diff --git a/Cargo.lock b/Cargo.lock index 03f5be16bee..141cc60882a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1313,7 +1313,7 @@ dependencies = [ "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "ra_parser 0.1.0", "ra_text_edit 0.1.0", - "rowan 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)", + "rowan 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)", "smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", "test_utils 0.1.0", "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1584,11 +1584,11 @@ dependencies = [ [[package]] name = "rowan" -version = "0.5.5" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "colosseum 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "parking_lot 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", + "parking_lot 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", "text_unit 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2275,7 +2275,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0e7790c7f1cc73d831d28dc5a7deb316a006e7848e6a7f467cdb10a0a9e0fb1c" "checksum remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4a83fa3702a688b9359eccba92d153ac33fd2e8462f9e0e3fdf155239ea7792e" "checksum ron 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "17f52a24414403f81528b67488cf8edc4eda977d3af1646bb6b106a600ead78f" -"checksum rowan 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)" = "500ba7550373d42593a5228085bad391517378fa31ad2a84defe100dd8259fef" +"checksum rowan 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0c433ffe99ac9b96fa9882805d05eee5d750c9202fb42d0546c556e5d70d54be" "checksum rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7f4dccf6f4891ebcc0c39f9b6eb1a83b9bf5d747cb439ec6fba4f3b977038af" "checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8" "checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" diff --git a/crates/ra_assists/src/add_derive.rs b/crates/ra_assists/src/add_derive.rs index bf7d55d6d11..f19196f53d6 100644 --- a/crates/ra_assists/src/add_derive.rs +++ b/crates/ra_assists/src/add_derive.rs @@ -9,7 +9,7 @@ use crate::{Assist, AssistCtx, AssistId}; pub(crate) fn add_derive(mut ctx: AssistCtx) -> Option { let nominal = ctx.node_at_offset::()?; - let node_start = derive_insertion_offset(nominal)?; + let node_start = derive_insertion_offset(&nominal)?; ctx.add_action(AssistId("add_derive"), "add `#[derive]`", |edit| { let derive_attr = nominal .attrs() diff --git a/crates/ra_assists/src/add_explicit_type.rs b/crates/ra_assists/src/add_explicit_type.rs index bb47a32f0a0..a69cfc8e3a3 100644 --- a/crates/ra_assists/src/add_explicit_type.rs +++ b/crates/ra_assists/src/add_explicit_type.rs @@ -27,7 +27,7 @@ pub(crate) fn add_explicit_type(mut ctx: AssistCtx) -> Option< // Infer type let db = ctx.db; let analyzer = hir::SourceAnalyzer::new(db, ctx.frange.file_id, stmt.syntax(), None); - let ty = analyzer.type_of(db, expr)?; + let ty = analyzer.type_of(db, &expr)?; // Assist not applicable if the type is unknown if is_unknown(&ty) { return None; diff --git a/crates/ra_assists/src/add_impl.rs b/crates/ra_assists/src/add_impl.rs index b81922c1d29..cebc1953967 100644 --- a/crates/ra_assists/src/add_impl.rs +++ b/crates/ra_assists/src/add_impl.rs @@ -16,7 +16,7 @@ pub(crate) fn add_impl(mut ctx: AssistCtx) -> Option { let start_offset = nominal.syntax().range().end(); let mut buf = String::new(); buf.push_str("\n\nimpl"); - if let Some(type_params) = type_params { + if let Some(type_params) = &type_params { type_params.syntax().text().push_to(&mut buf); } buf.push_str(" "); @@ -25,9 +25,9 @@ pub(crate) fn add_impl(mut ctx: AssistCtx) -> Option { let lifetime_params = type_params .lifetime_params() .filter_map(|it| it.lifetime_token()) - .map(|it| it.text()); + .map(|it| it.text().clone()); let type_params = - type_params.type_params().filter_map(|it| it.name()).map(|it| it.text()); + type_params.type_params().filter_map(|it| it.name()).map(|it| it.text().clone()); join(lifetime_params.chain(type_params)).surround_with("<", ">").to_buf(&mut buf); } buf.push_str(" {\n"); diff --git a/crates/ra_assists/src/add_missing_impl_members.rs b/crates/ra_assists/src/add_missing_impl_members.rs index 6ffdad0b1e5..b992a4dc802 100644 --- a/crates/ra_assists/src/add_missing_impl_members.rs +++ b/crates/ra_assists/src/add_missing_impl_members.rs @@ -5,8 +5,8 @@ use crate::{ use hir::{db::HirDatabase, HasSource}; use ra_db::FilePosition; -use ra_syntax::ast::{self, AstNode, ImplItem, ImplItemKind, NameOwner}; -use ra_syntax::{SmolStr, TreeArc}; +use ra_syntax::ast::{self, AstNode, ImplItemKind, NameOwner}; +use ra_syntax::SmolStr; #[derive(PartialEq)] enum AddMissingImplMembersMode { @@ -46,16 +46,16 @@ fn add_missing_impl_members_inner( let position = FilePosition { file_id, offset: impl_node.syntax().range().start() }; let analyzer = hir::SourceAnalyzer::new(ctx.db, position.file_id, impl_node.syntax(), None); - resolve_target_trait_def(ctx.db, &analyzer, impl_node)? + resolve_target_trait_def(ctx.db, &analyzer, &impl_node)? }; - let def_name = |kind| -> Option<&SmolStr> { + let def_name = |kind| -> Option { match kind { - ImplItemKind::FnDef(def) => def.name(), - ImplItemKind::TypeAliasDef(def) => def.name(), - ImplItemKind::ConstDef(def) => def.name(), + ast::ImplItemKind::FnDef(def) => def.name(), + ast::ImplItemKind::TypeAliasDef(def) => def.name(), + ast::ImplItemKind::ConstDef(def) => def.name(), } - .map(ast::Name::text) + .map(|it| it.text().clone()) }; let trait_items = trait_def.item_list()?.impl_items(); @@ -78,18 +78,13 @@ fn add_missing_impl_members_inner( ctx.add_action(AssistId(assist_id), label, |edit| { let n_existing_items = impl_item_list.impl_items().count(); - let items: Vec<_> = missing_items - .into_iter() - .map(|it| match it.kind() { - ImplItemKind::FnDef(def) => { - strip_docstring(ImplItem::cast(add_body(def).syntax()).unwrap()) - } - _ => strip_docstring(it), - }) - .collect(); + let items = missing_items.into_iter().map(|it| match it.kind() { + ImplItemKind::FnDef(def) => strip_docstring(add_body(def).into()), + _ => strip_docstring(it), + }); let mut ast_editor = AstEditor::new(impl_item_list); - ast_editor.append_items(items.iter().map(|it| &**it)); + ast_editor.append_items(items); let first_new_item = ast_editor.ast().impl_items().nth(n_existing_items).unwrap(); let cursor_position = first_new_item.syntax().range().start(); @@ -101,14 +96,14 @@ fn add_missing_impl_members_inner( ctx.build() } -fn strip_docstring(item: &ast::ImplItem) -> TreeArc { +fn strip_docstring(item: ast::ImplItem) -> ast::ImplItem { let mut ast_editor = AstEditor::new(item); ast_editor.strip_attrs_and_docs(); ast_editor.ast().to_owned() } -fn add_body(fn_def: &ast::FnDef) -> TreeArc { - let mut ast_editor = AstEditor::new(fn_def); +fn add_body(fn_def: ast::FnDef) -> ast::FnDef { + let mut ast_editor = AstEditor::new(fn_def.clone()); if fn_def.body().is_none() { ast_editor.set_body(&AstBuilder::::single_expr( &AstBuilder::::unimplemented(), @@ -123,9 +118,12 @@ fn resolve_target_trait_def( db: &impl HirDatabase, analyzer: &hir::SourceAnalyzer, impl_block: &ast::ImplBlock, -) -> Option> { - let ast_path = - impl_block.target_trait().map(AstNode::syntax).and_then(ast::PathType::cast)?.path()?; +) -> Option { + let ast_path = impl_block + .target_trait() + .map(|it| it.syntax().clone()) + .and_then(ast::PathType::cast)? + .path()?; match analyzer.resolve_path(db, &ast_path) { Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def.source(db).ast), diff --git a/crates/ra_assists/src/assist_ctx.rs b/crates/ra_assists/src/assist_ctx.rs index 34b20715400..e52085f85df 100644 --- a/crates/ra_assists/src/assist_ctx.rs +++ b/crates/ra_assists/src/assist_ctx.rs @@ -49,7 +49,7 @@ pub(crate) enum Assist { pub(crate) struct AssistCtx<'a, DB> { pub(crate) db: &'a DB, pub(crate) frange: FileRange, - source_file: &'a SourceFile, + source_file: SourceFile, should_compute_edit: bool, assist: Assist, } @@ -59,7 +59,7 @@ impl<'a, DB> Clone for AssistCtx<'a, DB> { AssistCtx { db: self.db, frange: self.frange, - source_file: self.source_file, + source_file: self.source_file.clone(), should_compute_edit: self.should_compute_edit, assist: self.assist.clone(), } @@ -104,18 +104,18 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> { Some(self.assist) } - pub(crate) fn token_at_offset(&self) -> TokenAtOffset> { + pub(crate) fn token_at_offset(&self) -> TokenAtOffset { find_token_at_offset(self.source_file.syntax(), self.frange.range.start()) } - pub(crate) fn node_at_offset(&self) -> Option<&'a N> { + pub(crate) fn node_at_offset(&self) -> Option { find_node_at_offset(self.source_file.syntax(), self.frange.range.start()) } - pub(crate) fn covering_element(&self) -> SyntaxElement<'a> { + pub(crate) fn covering_element(&self) -> SyntaxElement { find_covering_element(self.source_file.syntax(), self.frange.range) } - pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement<'a> { + pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement { find_covering_element(self.source_file.syntax(), range) } } @@ -139,7 +139,7 @@ impl AssistBuilder { ) { let mut replace_with = replace_with.into(); if let Some(indent) = leading_indent(node) { - replace_with = reindent(&replace_with, indent) + replace_with = reindent(&replace_with, &indent) } self.replace(node.range(), replace_with) } diff --git a/crates/ra_assists/src/ast_editor.rs b/crates/ra_assists/src/ast_editor.rs index 7b743c9f093..5fbcadfee80 100644 --- a/crates/ra_assists/src/ast_editor.rs +++ b/crates/ra_assists/src/ast_editor.rs @@ -4,18 +4,18 @@ use arrayvec::ArrayVec; use hir::Name; use ra_fmt::leading_indent; use ra_syntax::{ - ast, AstNode, Direction, InsertPosition, SourceFile, SyntaxElement, SyntaxKind::*, TreeArc, T, + ast, AstNode, Direction, InsertPosition, SourceFile, SyntaxElement, SyntaxKind::*, T, }; use ra_text_edit::TextEditBuilder; pub struct AstEditor { - original_ast: TreeArc, - ast: TreeArc, + original_ast: N, + ast: N, } impl AstEditor { - pub fn new(node: &N) -> AstEditor { - AstEditor { original_ast: node.to_owned(), ast: node.to_owned() } + pub fn new(node: N) -> AstEditor { + AstEditor { original_ast: node.clone(), ast: node } } pub fn into_text_edit(self, builder: &mut TextEditBuilder) { @@ -26,27 +26,27 @@ impl AstEditor { } pub fn ast(&self) -> &N { - &*self.ast + &self.ast } #[must_use] - fn insert_children<'a>( + fn insert_children( &self, - position: InsertPosition>, - to_insert: impl Iterator>, - ) -> TreeArc { + position: InsertPosition, + to_insert: impl Iterator, + ) -> N { let new_syntax = self.ast().syntax().insert_children(position, to_insert); - N::cast(&new_syntax).unwrap().to_owned() + N::cast(new_syntax).unwrap() } #[must_use] - fn replace_children<'a>( + fn replace_children( &self, - to_delete: RangeInclusive>, - to_insert: impl Iterator>, - ) -> TreeArc { + to_delete: RangeInclusive, + to_insert: impl Iterator, + ) -> N { let new_syntax = self.ast().syntax().replace_children(to_delete, to_insert); - N::cast(&new_syntax).unwrap().to_owned() + N::cast(new_syntax).unwrap() } fn do_make_multiline(&mut self) { @@ -66,16 +66,18 @@ impl AstEditor { if ws.text().contains('\n') { return; } - Some(ws) + Some(ws.clone()) } }; - let indent = leading_indent(self.ast().syntax()).unwrap_or(""); + let indent = leading_indent(self.ast().syntax()).unwrap_or("".into()); let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); let to_insert = iter::once(ws.ws().into()); self.ast = match existing_ws { None => self.insert_children(InsertPosition::After(l_curly), to_insert), - Some(ws) => self.replace_children(RangeInclusive::new(ws.into(), ws.into()), to_insert), + Some(ws) => { + self.replace_children(RangeInclusive::new(ws.clone().into(), ws.into()), to_insert) + } }; } } @@ -95,7 +97,7 @@ impl AstEditor { let space = if is_multiline { ws = tokens::WsBuilder::new(&format!( "\n{} ", - leading_indent(self.ast().syntax()).unwrap_or("") + leading_indent(self.ast().syntax()).unwrap_or("".into()) )); ws.ws() } else { @@ -104,7 +106,7 @@ impl AstEditor { let mut to_insert: ArrayVec<[SyntaxElement; 4]> = ArrayVec::new(); to_insert.push(space.into()); - to_insert.push(field.syntax().into()); + to_insert.push(field.syntax().clone().into()); to_insert.push(tokens::comma().into()); macro_rules! after_l_curly { @@ -127,7 +129,7 @@ impl AstEditor { InsertPosition::After(comma) } else { to_insert.insert(0, tokens::comma().into()); - InsertPosition::After($anchor.syntax().into()) + InsertPosition::After($anchor.syntax().clone().into()) } }; }; @@ -144,7 +146,9 @@ impl AstEditor { None => after_l_curly!(), } } - InsertPosition::Before(anchor) => InsertPosition::Before(anchor.syntax().into()), + InsertPosition::Before(anchor) => { + InsertPosition::Before(anchor.syntax().clone().into()) + } InsertPosition::After(anchor) => after_field!(anchor), }; @@ -157,7 +161,7 @@ impl AstEditor { } impl AstEditor { - pub fn append_items<'a>(&mut self, items: impl Iterator) { + pub fn append_items(&mut self, items: impl Iterator) { let n_existing_items = self.ast().impl_items().count(); if n_existing_items == 0 { self.do_make_multiline(); @@ -165,22 +169,23 @@ impl AstEditor { items.for_each(|it| self.append_item(it)); } - pub fn append_item(&mut self, item: &ast::ImplItem) { + pub fn append_item(&mut self, item: ast::ImplItem) { let (indent, position) = match self.ast().impl_items().last() { Some(it) => ( - leading_indent(it.syntax()).unwrap_or("").to_string(), - InsertPosition::After(it.syntax().into()), + leading_indent(it.syntax()).unwrap_or_default().to_string(), + InsertPosition::After(it.syntax().clone().into()), ), None => match self.l_curly() { Some(it) => ( - " ".to_string() + leading_indent(self.ast().syntax()).unwrap_or(""), + " ".to_string() + &leading_indent(self.ast().syntax()).unwrap_or_default(), InsertPosition::After(it), ), None => return, }, }; let ws = tokens::WsBuilder::new(&format!("\n{}", indent)); - let to_insert: ArrayVec<[SyntaxElement; 2]> = [ws.ws().into(), item.syntax().into()].into(); + let to_insert: ArrayVec<[SyntaxElement; 2]> = + [ws.ws().into(), item.syntax().clone().into()].into(); self.ast = self.insert_children(position, to_insert.into_iter()); } @@ -197,9 +202,9 @@ impl AstEditor { .children_with_tokens() .find(|it| it.kind() == ATTR || it.kind() == COMMENT) { - let end = match start.next_sibling_or_token() { - Some(el) if el.kind() == WHITESPACE => el, - Some(_) | None => start, + let end = match &start.next_sibling_or_token() { + Some(el) if el.kind() == WHITESPACE => el.clone(), + Some(_) | None => start.clone(), }; self.ast = self.replace_children(RangeInclusive::new(start, end), iter::empty()); } @@ -210,18 +215,18 @@ impl AstEditor { pub fn set_body(&mut self, body: &ast::Block) { let mut to_insert: ArrayVec<[SyntaxElement; 2]> = ArrayVec::new(); let old_body_or_semi: SyntaxElement = if let Some(old_body) = self.ast().body() { - old_body.syntax().into() + old_body.syntax().clone().into() } else if let Some(semi) = self.ast().semicolon_token() { to_insert.push(tokens::single_space().into()); semi.into() } else { to_insert.push(tokens::single_space().into()); - to_insert.push(body.syntax().into()); + to_insert.push(body.syntax().clone().into()); self.ast = self.insert_children(InsertPosition::Last, to_insert.into_iter()); return; }; - to_insert.push(body.syntax().into()); - let replace_range = RangeInclusive::new(old_body_or_semi, old_body_or_semi); + to_insert.push(body.syntax().clone().into()); + let replace_range = RangeInclusive::new(old_body_or_semi.clone(), old_body_or_semi); self.ast = self.replace_children(replace_range, to_insert.into_iter()) } } @@ -231,15 +236,15 @@ pub struct AstBuilder { } impl AstBuilder { - pub fn from_name(name: &Name) -> TreeArc { + pub fn from_name(name: &Name) -> ast::NamedField { ast_node_from_file_text(&format!("fn f() {{ S {{ {}: (), }} }}", name)) } - fn from_text(text: &str) -> TreeArc { + fn from_text(text: &str) -> ast::NamedField { ast_node_from_file_text(&format!("fn f() {{ S {{ {}, }} }}", text)) } - pub fn from_pieces(name: &ast::NameRef, expr: Option<&ast::Expr>) -> TreeArc { + pub fn from_pieces(name: &ast::NameRef, expr: Option<&ast::Expr>) -> ast::NamedField { match expr { Some(expr) => Self::from_text(&format!("{}: {}", name.syntax(), expr.syntax())), None => Self::from_text(&name.syntax().to_string()), @@ -248,36 +253,36 @@ impl AstBuilder { } impl AstBuilder { - fn from_text(text: &str) -> TreeArc { + fn from_text(text: &str) -> ast::Block { ast_node_from_file_text(&format!("fn f() {}", text)) } - pub fn single_expr(e: &ast::Expr) -> TreeArc { + pub fn single_expr(e: &ast::Expr) -> ast::Block { Self::from_text(&format!("{{ {} }}", e.syntax())) } } impl AstBuilder { - fn from_text(text: &str) -> TreeArc { + fn from_text(text: &str) -> ast::Expr { ast_node_from_file_text(&format!("fn f() {{ {}; }}", text)) } - pub fn unit() -> TreeArc { + pub fn unit() -> ast::Expr { Self::from_text("()") } - pub fn unimplemented() -> TreeArc { + pub fn unimplemented() -> ast::Expr { Self::from_text("unimplemented!()") } } impl AstBuilder { - pub fn new(text: &str) -> TreeArc { + pub fn new(text: &str) -> ast::NameRef { ast_node_from_file_text(&format!("fn f() {{ {}; }}", text)) } } -fn ast_node_from_file_text(text: &str) -> TreeArc { +fn ast_node_from_file_text(text: &str) -> N { let parse = SourceFile::parse(text); let res = parse.tree().syntax().descendants().find_map(N::cast).unwrap().to_owned(); res @@ -285,47 +290,49 @@ fn ast_node_from_file_text(text: &str) -> TreeArc { mod tokens { use once_cell::sync::Lazy; - use ra_syntax::{AstNode, SourceFile, SyntaxKind::*, SyntaxToken, TreeArc, T}; + use ra_syntax::{AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken, T}; - static SOURCE_FILE: Lazy> = - Lazy::new(|| SourceFile::parse(",\n; ;").tree().to_owned()); + static SOURCE_FILE: Lazy> = Lazy::new(|| SourceFile::parse(",\n; ;")); - pub(crate) fn comma() -> SyntaxToken<'static> { + pub(crate) fn comma() -> SyntaxToken { SOURCE_FILE + .tree() .syntax() .descendants_with_tokens() - .filter_map(|it| it.as_token()) + .filter_map(|it| it.as_token().cloned()) .find(|it| it.kind() == T![,]) .unwrap() } - pub(crate) fn single_space() -> SyntaxToken<'static> { + pub(crate) fn single_space() -> SyntaxToken { SOURCE_FILE + .tree() .syntax() .descendants_with_tokens() - .filter_map(|it| it.as_token()) + .filter_map(|it| it.as_token().cloned()) .find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ") .unwrap() } #[allow(unused)] - pub(crate) fn single_newline() -> SyntaxToken<'static> { + pub(crate) fn single_newline() -> SyntaxToken { SOURCE_FILE + .tree() .syntax() .descendants_with_tokens() - .filter_map(|it| it.as_token()) + .filter_map(|it| it.as_token().cloned()) .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n") .unwrap() } - pub(crate) struct WsBuilder(TreeArc); + pub(crate) struct WsBuilder(SourceFile); impl WsBuilder { pub(crate) fn new(text: &str) -> WsBuilder { WsBuilder(SourceFile::parse(text).ok().unwrap()) } - pub(crate) fn ws(&self) -> SyntaxToken<'_> { - self.0.syntax().first_child_or_token().unwrap().as_token().unwrap() + pub(crate) fn ws(&self) -> SyntaxToken { + self.0.syntax().first_child_or_token().unwrap().as_token().cloned().unwrap() } } diff --git a/crates/ra_assists/src/auto_import.rs b/crates/ra_assists/src/auto_import.rs index f8f37e85231..0eb4bdb6225 100644 --- a/crates/ra_assists/src/auto_import.rs +++ b/crates/ra_assists/src/auto_import.rs @@ -12,25 +12,25 @@ use ra_syntax::{ SyntaxNode, TextRange, T, }; -fn collect_path_segments_raw<'a>( - segments: &mut Vec<&'a ast::PathSegment>, - mut path: &'a ast::Path, +fn collect_path_segments_raw( + segments: &mut Vec, + mut path: ast::Path, ) -> Option { let oldlen = segments.len(); loop { let mut children = path.syntax().children_with_tokens(); let (first, second, third) = ( - children.next().map(|n| (n, n.kind())), - children.next().map(|n| (n, n.kind())), - children.next().map(|n| (n, n.kind())), + children.next().map(|n| (n.clone(), n.kind())), + children.next().map(|n| (n.clone(), n.kind())), + children.next().map(|n| (n.clone(), n.kind())), ); match (first, second, third) { (Some((subpath, PATH)), Some((_, T![::])), Some((segment, PATH_SEGMENT))) => { - path = ast::Path::cast(subpath.as_node()?)?; - segments.push(ast::PathSegment::cast(segment.as_node()?)?); + path = ast::Path::cast(subpath.as_node()?.clone())?; + segments.push(ast::PathSegment::cast(segment.as_node()?.clone())?); } (Some((segment, PATH_SEGMENT)), _, _) => { - segments.push(ast::PathSegment::cast(segment.as_node()?)?); + segments.push(ast::PathSegment::cast(segment.as_node()?.clone())?); break; } (_, _, _) => return None, @@ -60,7 +60,7 @@ fn fmt_segments_raw(segments: &[SmolStr], buf: &mut String) { } // Returns the numeber of common segments. -fn compare_path_segments(left: &[SmolStr], right: &[&ast::PathSegment]) -> usize { +fn compare_path_segments(left: &[SmolStr], right: &[ast::PathSegment]) -> usize { left.iter().zip(right).filter(|(l, r)| compare_path_segment(l, r)).count() } @@ -81,12 +81,12 @@ fn compare_path_segment_with_name(a: &SmolStr, b: &ast::Name) -> bool { a == b.text() } -#[derive(Copy, Clone)] -enum ImportAction<'a> { +#[derive(Clone)] +enum ImportAction { Nothing, // Add a brand new use statement. AddNewUse { - anchor: Option<&'a SyntaxNode>, // anchor node + anchor: Option, // anchor node add_after_anchor: bool, }, @@ -94,9 +94,9 @@ enum ImportAction<'a> { AddNestedImport { // how may segments matched with the target path common_segments: usize, - path_to_split: &'a ast::Path, + path_to_split: ast::Path, // the first segment of path_to_split we want to add into the new nested list - first_segment_to_split: Option<&'a ast::PathSegment>, + first_segment_to_split: Option, // Wether to add 'self' in addition to the target path add_self: bool, }, @@ -104,20 +104,20 @@ enum ImportAction<'a> { AddInTreeList { common_segments: usize, // The UseTreeList where to add the target path - tree_list: &'a ast::UseTreeList, + tree_list: ast::UseTreeList, add_self: bool, }, } -impl<'a> ImportAction<'a> { - fn add_new_use(anchor: Option<&'a SyntaxNode>, add_after_anchor: bool) -> Self { +impl ImportAction { + fn add_new_use(anchor: Option, add_after_anchor: bool) -> Self { ImportAction::AddNewUse { anchor, add_after_anchor } } fn add_nested_import( common_segments: usize, - path_to_split: &'a ast::Path, - first_segment_to_split: Option<&'a ast::PathSegment>, + path_to_split: ast::Path, + first_segment_to_split: Option, add_self: bool, ) -> Self { ImportAction::AddNestedImport { @@ -130,14 +130,14 @@ impl<'a> ImportAction<'a> { fn add_in_tree_list( common_segments: usize, - tree_list: &'a ast::UseTreeList, + tree_list: ast::UseTreeList, add_self: bool, ) -> Self { ImportAction::AddInTreeList { common_segments, tree_list, add_self } } - fn better<'b>(left: &'b ImportAction<'a>, right: &'b ImportAction<'a>) -> &'b ImportAction<'a> { - if left.is_better(right) { + fn better(left: ImportAction, right: ImportAction) -> ImportAction { + if left.is_better(&right) { left } else { right @@ -166,12 +166,12 @@ impl<'a> ImportAction<'a> { // Find out the best ImportAction to import target path against current_use_tree. // If current_use_tree has a nested import the function gets called recursively on every UseTree inside a UseTreeList. -fn walk_use_tree_for_best_action<'a>( - current_path_segments: &mut Vec<&'a ast::PathSegment>, // buffer containing path segments - current_parent_use_tree_list: Option<&'a ast::UseTreeList>, // will be Some value if we are in a nested import - current_use_tree: &'a ast::UseTree, // the use tree we are currently examinating - target: &[SmolStr], // the path we want to import -) -> ImportAction<'a> { +fn walk_use_tree_for_best_action( + current_path_segments: &mut Vec, // buffer containing path segments + current_parent_use_tree_list: Option, // will be Some value if we are in a nested import + current_use_tree: ast::UseTree, // the use tree we are currently examinating + target: &[SmolStr], // the path we want to import +) -> ImportAction { // We save the number of segments in the buffer so we can restore the correct segments // before returning. Recursive call will add segments so we need to delete them. let prev_len = current_path_segments.len(); @@ -188,32 +188,36 @@ fn walk_use_tree_for_best_action<'a>( .syntax() .ancestors() .find_map(ast::UseItem::cast) - .map(AstNode::syntax), + .map(|it| it.syntax().clone()), true, ); } }; // This can happen only if current_use_tree is a direct child of a UseItem - if let Some(name) = alias.and_then(ast::NameOwner::name) { - if compare_path_segment_with_name(&target[0], name) { + if let Some(name) = alias.and_then(|it| it.name()) { + if compare_path_segment_with_name(&target[0], &name) { return ImportAction::Nothing; } } - collect_path_segments_raw(current_path_segments, path); + collect_path_segments_raw(current_path_segments, path.clone()); // We compare only the new segments added in the line just above. // The first prev_len segments were already compared in 'parent' recursive calls. let left = target.split_at(prev_len).1; let right = current_path_segments.split_at(prev_len).1; - let common = compare_path_segments(left, right); + let common = compare_path_segments(left, &right); let mut action = match common { 0 => ImportAction::add_new_use( // e.g: target is std::fmt and we can have // use foo::bar // We add a brand new use statement - current_use_tree.syntax().ancestors().find_map(ast::UseItem::cast).map(AstNode::syntax), + current_use_tree + .syntax() + .ancestors() + .find_map(ast::UseItem::cast) + .map(|it| it.syntax().clone()), true, ), common if common == left.len() && left.len() == right.len() => { @@ -223,9 +227,9 @@ fn walk_use_tree_for_best_action<'a>( if let Some(list) = tree_list { // In case 2 we need to add self to the nested list // unless it's already there - let has_self = list.use_trees().map(ast::UseTree::path).any(|p| { - p.and_then(ast::Path::segment) - .and_then(ast::PathSegment::kind) + let has_self = list.use_trees().map(|it| it.path()).any(|p| { + p.and_then(|it| it.segment()) + .and_then(|it| it.kind()) .filter(|k| *k == ast::PathSegmentKind::SelfKw) .is_some() }); @@ -248,7 +252,7 @@ fn walk_use_tree_for_best_action<'a>( ImportAction::add_nested_import( prev_len + common, path, - Some(segments_to_split[0]), + Some(segments_to_split[0].clone()), false, ) } @@ -263,14 +267,18 @@ fn walk_use_tree_for_best_action<'a>( .syntax() .ancestors() .find_map(ast::UseItem::cast) - .map(AstNode::syntax), + .map(|it| it.syntax().clone()), true, ); if let Some(list) = tree_list { // Case 2, check recursively if the path is already imported in the nested list for u in list.use_trees() { - let child_action = - walk_use_tree_for_best_action(current_path_segments, Some(list), u, target); + let child_action = walk_use_tree_for_best_action( + current_path_segments, + Some(list.clone()), + u, + target, + ); if child_action.is_better(&better_action) { better_action = child_action; if let ImportAction::Nothing = better_action { @@ -291,7 +299,7 @@ fn walk_use_tree_for_best_action<'a>( ImportAction::add_nested_import( prev_len + common, path, - Some(segments_to_split[0]), + Some(segments_to_split[0].clone()), true, ) } @@ -302,7 +310,7 @@ fn walk_use_tree_for_best_action<'a>( ImportAction::add_nested_import( prev_len + common, path, - Some(segments_to_split[0]), + Some(segments_to_split[0].clone()), false, ) } @@ -311,7 +319,7 @@ fn walk_use_tree_for_best_action<'a>( // If we are inside a UseTreeList adding a use statement become adding to the existing // tree list. - action = match (current_parent_use_tree_list, action) { + action = match (current_parent_use_tree_list, action.clone()) { (Some(use_tree_list), ImportAction::AddNewUse { .. }) => { ImportAction::add_in_tree_list(prev_len, use_tree_list, false) } @@ -323,19 +331,20 @@ fn walk_use_tree_for_best_action<'a>( action } -fn best_action_for_target<'b, 'a: 'b>( - container: &'a SyntaxNode, - anchor: &'a SyntaxNode, - target: &'b [SmolStr], -) -> ImportAction<'a> { +fn best_action_for_target( + container: SyntaxNode, + anchor: SyntaxNode, + target: &[SmolStr], +) -> ImportAction { let mut storage = Vec::with_capacity(16); // this should be the only allocation let best_action = container .children() .filter_map(ast::UseItem::cast) - .filter_map(ast::UseItem::use_tree) + .filter_map(|it| it.use_tree()) .map(|u| walk_use_tree_for_best_action(&mut storage, None, u, target)) - .fold(None, |best, a| { - best.and_then(|best| Some(*ImportAction::better(&best, &a))).or_else(|| Some(a)) + .fold(None, |best, a| match best { + Some(best) => Some(ImportAction::better(best, a)), + None => Some(a), }); match best_action { @@ -386,7 +395,7 @@ fn make_assist(action: &ImportAction, target: &[SmolStr], edit: &mut TextEditBui } fn make_assist_add_new_use( - anchor: &Option<&SyntaxNode>, + anchor: &Option, after: bool, target: &[SmolStr], edit: &mut TextEditBuilder, @@ -396,7 +405,7 @@ fn make_assist_add_new_use( let mut buf = String::new(); if after { buf.push_str("\n"); - if let Some(spaces) = indent { + if let Some(spaces) = &indent { buf.push_str(spaces); } } @@ -405,8 +414,8 @@ fn make_assist_add_new_use( buf.push_str(";"); if !after { buf.push_str("\n\n"); - if let Some(spaces) = indent { - buf.push_str(spaces); + if let Some(spaces) = &indent { + buf.push_str(&spaces); } } let position = if after { anchor.range().end() } else { anchor.range().start() }; @@ -444,7 +453,7 @@ fn make_assist_add_in_tree_list( fn make_assist_add_nested_import( path: &ast::Path, - first_segment_to_split: &Option<&ast::PathSegment>, + first_segment_to_split: &Option, target: &[SmolStr], add_self: bool, edit: &mut TextEditBuilder, @@ -482,7 +491,7 @@ fn apply_auto_import( target: &[SmolStr], edit: &mut TextEditBuilder, ) { - let action = best_action_for_target(container, path.syntax(), target); + let action = best_action_for_target(container.clone(), path.syntax().clone(), target); make_assist(&action, target, edit); if let Some(last) = path.segment() { // Here we are assuming the assist will provide a correct use statement @@ -522,26 +531,26 @@ pub fn auto_import_text_edit( edit: &mut TextEditBuilder, ) { let container = position.ancestors().find_map(|n| { - if let Some(module) = ast::Module::cast(n) { - return module.item_list().map(ast::AstNode::syntax); + if let Some(module) = ast::Module::cast(n.clone()) { + return module.item_list().map(|it| it.syntax().clone()); } - ast::SourceFile::cast(n).map(ast::AstNode::syntax) + ast::SourceFile::cast(n).map(|it| it.syntax().clone()) }); if let Some(container) = container { - let action = best_action_for_target(container, anchor, target); + let action = best_action_for_target(container, anchor.clone(), target); make_assist(&action, target, edit); } } pub(crate) fn auto_import(mut ctx: AssistCtx) -> Option { - let path: &ast::Path = ctx.node_at_offset()?; + let path: ast::Path = ctx.node_at_offset()?; // We don't want to mess with use statements if path.syntax().ancestors().find_map(ast::UseItem::cast).is_some() { return None; } - let hir_path = hir::Path::from_ast(path)?; + let hir_path = hir::Path::from_ast(path.clone())?; let segments = collect_hir_path_segments(&hir_path); if segments.len() < 2 { return None; @@ -554,7 +563,7 @@ pub(crate) fn auto_import(mut ctx: AssistCtx) -> Option) -> Option) -> Option { if parent.children().any(|child| child.kind() == VISIBILITY) { return None; } - (vis_offset(parent), keyword.range()) + (vis_offset(&parent), keyword.range()) } else { let ident = ctx.token_at_offset().find(|leaf| leaf.kind() == IDENT)?; let field = ident.parent().ancestors().find_map(ast::NamedFieldDef::cast)?; @@ -65,7 +65,7 @@ fn vis_offset(node: &SyntaxNode) -> TextUnit { .unwrap_or_else(|| node.range().start()) } -fn change_vis(mut ctx: AssistCtx, vis: &ast::Visibility) -> Option { +fn change_vis(mut ctx: AssistCtx, vis: ast::Visibility) -> Option { if vis.syntax().text() == "pub" { ctx.add_action(AssistId("change_visibility"), "change to pub(crate)", |edit| { edit.target(vis.syntax().range()); diff --git a/crates/ra_assists/src/fill_match_arms.rs b/crates/ra_assists/src/fill_match_arms.rs index deef166b565..b96806ac665 100644 --- a/crates/ra_assists/src/fill_match_arms.rs +++ b/crates/ra_assists/src/fill_match_arms.rs @@ -27,7 +27,7 @@ pub(crate) fn fill_match_arms(mut ctx: AssistCtx) -> Option { if arm_iter.next() != None { @@ -44,7 +44,7 @@ pub(crate) fn fill_match_arms(mut ctx: AssistCtx) -> Option Some(e), _ => None, diff --git a/crates/ra_assists/src/flip_binexpr.rs b/crates/ra_assists/src/flip_binexpr.rs index 5e41f93461c..2e591ad3bd4 100644 --- a/crates/ra_assists/src/flip_binexpr.rs +++ b/crates/ra_assists/src/flip_binexpr.rs @@ -6,8 +6,8 @@ use crate::{Assist, AssistCtx, AssistId}; /// Flip binary expression assist. pub(crate) fn flip_binexpr(mut ctx: AssistCtx) -> Option { let expr = ctx.node_at_offset::()?; - let lhs = expr.lhs()?.syntax(); - let rhs = expr.rhs()?.syntax(); + let lhs = expr.lhs()?.syntax().clone(); + let rhs = expr.rhs()?.syntax().clone(); let op_range = expr.op_token()?.range(); // The assist should be applied only if the cursor is on the operator let cursor_in_range = ctx.frange.range.is_subrange(&op_range); diff --git a/crates/ra_assists/src/flip_comma.rs b/crates/ra_assists/src/flip_comma.rs index d8dba779f67..13016ae06d6 100644 --- a/crates/ra_assists/src/flip_comma.rs +++ b/crates/ra_assists/src/flip_comma.rs @@ -5,8 +5,8 @@ use crate::{Assist, AssistCtx, AssistId}; pub(crate) fn flip_comma(mut ctx: AssistCtx) -> Option { let comma = ctx.token_at_offset().find(|leaf| leaf.kind() == T![,])?; - let prev = non_trivia_sibling(comma.into(), Direction::Prev)?; - let next = non_trivia_sibling(comma.into(), Direction::Next)?; + let prev = non_trivia_sibling(comma.clone().into(), Direction::Prev)?; + let next = non_trivia_sibling(comma.clone().into(), Direction::Next)?; ctx.add_action(AssistId("flip_comma"), "flip comma", |edit| { edit.target(comma.range()); edit.replace(prev.range(), next.to_string()); diff --git a/crates/ra_assists/src/inline_local_variable.rs b/crates/ra_assists/src/inline_local_variable.rs index 554de8b4677..3c17089deb9 100644 --- a/crates/ra_assists/src/inline_local_variable.rs +++ b/crates/ra_assists/src/inline_local_variable.rs @@ -16,18 +16,18 @@ pub(crate) fn inline_local_varialbe(mut ctx: AssistCtx) -> Opt if bind_pat.is_mutable() { return None; } - let initializer_expr = let_stmt.initializer(); + let initializer_expr = let_stmt.initializer()?; let delete_range = if let Some(whitespace) = let_stmt .syntax() .next_sibling_or_token() - .and_then(|it| ast::Whitespace::cast(it.as_token()?)) + .and_then(|it| ast::Whitespace::cast(it.as_token()?.clone())) { TextRange::from_to(let_stmt.syntax().range().start(), whitespace.syntax().range().end()) } else { let_stmt.syntax().range() }; let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, bind_pat.syntax(), None); - let refs = analyzer.find_all_refs(bind_pat); + let refs = analyzer.find_all_refs(&bind_pat); let mut wrap_in_parens = vec![true; refs.len()]; @@ -45,7 +45,7 @@ pub(crate) fn inline_local_varialbe(mut ctx: AssistCtx) -> Opt } }; - wrap_in_parens[i] = match (initializer_expr?.kind(), usage_parent.kind()) { + wrap_in_parens[i] = match (initializer_expr.kind(), usage_parent.kind()) { (ExprKind::CallExpr(_), _) | (ExprKind::IndexExpr(_), _) | (ExprKind::MethodCallExpr(_), _) @@ -71,7 +71,7 @@ pub(crate) fn inline_local_varialbe(mut ctx: AssistCtx) -> Opt }; } - let init_str = initializer_expr?.syntax().text().to_string(); + let init_str = initializer_expr.syntax().text().to_string(); let init_in_paren = format!("({})", &init_str); ctx.add_action( diff --git a/crates/ra_assists/src/introduce_variable.rs b/crates/ra_assists/src/introduce_variable.rs index f7f5ccafa59..ce28132c9af 100644 --- a/crates/ra_assists/src/introduce_variable.rs +++ b/crates/ra_assists/src/introduce_variable.rs @@ -20,8 +20,8 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx) -> Option return None; } let expr = node.ancestors().find_map(valid_target_expr)?; - let (anchor_stmt, wrap_in_block) = anchor_stmt(expr)?; - let indent = anchor_stmt.prev_sibling_or_token()?.as_token()?; + let (anchor_stmt, wrap_in_block) = anchor_stmt(expr.clone())?; + let indent = anchor_stmt.prev_sibling_or_token()?.as_token()?.clone(); if indent.kind() != WHITESPACE { return None; } @@ -37,9 +37,9 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx) -> Option }; expr.syntax().text().push_to(&mut buf); - let full_stmt = ast::ExprStmt::cast(anchor_stmt); - let is_full_stmt = if let Some(expr_stmt) = full_stmt { - Some(expr.syntax()) == expr_stmt.expr().map(|e| e.syntax()) + let full_stmt = ast::ExprStmt::cast(anchor_stmt.clone()); + let is_full_stmt = if let Some(expr_stmt) = &full_stmt { + Some(expr.syntax().clone()) == expr_stmt.expr().map(|e| e.syntax().clone()) } else { false }; @@ -81,7 +81,7 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx) -> Option /// Check whether the node is a valid expression which can be extracted to a variable. /// In general that's true for any expression, but in some cases that would produce invalid code. -fn valid_target_expr(node: &SyntaxNode) -> Option<&ast::Expr> { +fn valid_target_expr(node: SyntaxNode) -> Option { match node.kind() { PATH_EXPR => None, BREAK_EXPR => ast::BreakExpr::cast(node).and_then(|e| e.expr()), @@ -96,14 +96,10 @@ fn valid_target_expr(node: &SyntaxNode) -> Option<&ast::Expr> { /// to produce correct code. /// It can be a statement, the last in a block expression or a wanna be block /// expression like a lambda or match arm. -fn anchor_stmt(expr: &ast::Expr) -> Option<(&SyntaxNode, bool)> { +fn anchor_stmt(expr: ast::Expr) -> Option<(SyntaxNode, bool)> { expr.syntax().ancestors().find_map(|node| { - if ast::Stmt::cast(node).is_some() { - return Some((node, false)); - } - if let Some(expr) = node.parent().and_then(ast::Block::cast).and_then(|it| it.expr()) { - if expr.syntax() == node { + if expr.syntax() == &node { tested_by!(test_introduce_var_last_expr); return Some((node, false)); } @@ -115,6 +111,10 @@ fn anchor_stmt(expr: &ast::Expr) -> Option<(&SyntaxNode, bool)> { } } + if ast::Stmt::cast(node.clone()).is_some() { + return Some((node, false)); + } + None }) } diff --git a/crates/ra_assists/src/move_guard.rs b/crates/ra_assists/src/move_guard.rs index e1ce86a33ef..313c9ad1878 100644 --- a/crates/ra_assists/src/move_guard.rs +++ b/crates/ra_assists/src/move_guard.rs @@ -18,9 +18,9 @@ pub(crate) fn move_guard_to_arm_body(mut ctx: AssistCtx) -> Op ctx.add_action(AssistId("move_guard_to_arm_body"), "move guard to arm body", |edit| { edit.target(guard.syntax().range()); - let offseting_amount = match space_before_guard { + let offseting_amount = match &space_before_guard { Some(SyntaxElement::Token(tok)) => { - if let Some(_) = ast::Whitespace::cast(tok) { + if let Some(_) = ast::Whitespace::cast(tok.clone()) { let ele = space_before_guard.unwrap().range(); edit.delete(ele); ele.len() @@ -39,11 +39,11 @@ pub(crate) fn move_guard_to_arm_body(mut ctx: AssistCtx) -> Op } pub(crate) fn move_arm_cond_to_match_guard(mut ctx: AssistCtx) -> Option { - let match_arm: &MatchArm = ctx.node_at_offset::()?; + let match_arm: MatchArm = ctx.node_at_offset::()?; let last_match_pat = match_arm.pats().last()?; let arm_body = match_arm.expr()?; - let if_expr: &IfExpr = IfExpr::cast(arm_body.syntax())?; + let if_expr: IfExpr = IfExpr::cast(arm_body.syntax().clone())?; let cond = if_expr.condition()?; let then_block = if_expr.then_branch()?; @@ -65,7 +65,7 @@ pub(crate) fn move_arm_cond_to_match_guard(mut ctx: AssistCtx) edit.target(if_expr.syntax().range()); let then_only_expr = then_block.statements().next().is_none(); - match then_block.expr() { + match &then_block.expr() { Some(then_expr) if then_only_expr => { edit.replace(if_expr.syntax().range(), then_expr.syntax().text()) } diff --git a/crates/ra_assists/src/remove_dbg.rs b/crates/ra_assists/src/remove_dbg.rs index 5680f76ca51..c330bc82783 100644 --- a/crates/ra_assists/src/remove_dbg.rs +++ b/crates/ra_assists/src/remove_dbg.rs @@ -8,7 +8,7 @@ use ra_syntax::{ pub(crate) fn remove_dbg(mut ctx: AssistCtx) -> Option { let macro_call = ctx.node_at_offset::()?; - if !is_valid_macrocall(macro_call, "dbg")? { + if !is_valid_macrocall(¯o_call, "dbg")? { return None; } @@ -35,7 +35,7 @@ pub(crate) fn remove_dbg(mut ctx: AssistCtx) -> Option }; let macro_content = { - let macro_args = macro_call.token_tree()?.syntax(); + let macro_args = macro_call.token_tree()?.syntax().clone(); let range = macro_args.range(); let start = range.start() + TextUnit::of_char('('); let end = range.end() - TextUnit::of_char(')'); @@ -65,7 +65,7 @@ fn is_valid_macrocall(macro_call: &ast::MacroCall, macro_name: &str) -> Option) -> Option { - let if_expr: &ast::IfExpr = ctx.node_at_offset()?; + let if_expr: ast::IfExpr = ctx.node_at_offset()?; let cond = if_expr.condition()?; let pat = cond.pat()?; let expr = cond.expr()?; @@ -25,16 +25,11 @@ pub(crate) fn replace_if_let_with_match(mut ctx: AssistCtx) -> ctx.build() } -fn build_match_expr( - expr: &ast::Expr, - pat1: &ast::Pat, - arm1: &ast::Block, - arm2: &ast::Block, -) -> String { +fn build_match_expr(expr: ast::Expr, pat1: ast::Pat, arm1: ast::Block, arm2: ast::Block) -> String { let mut buf = String::new(); buf.push_str(&format!("match {} {{\n", expr.syntax().text())); - buf.push_str(&format!(" {} => {}\n", pat1.syntax().text(), format_arm(arm1))); - buf.push_str(&format!(" _ => {}\n", format_arm(arm2))); + buf.push_str(&format!(" {} => {}\n", pat1.syntax().text(), format_arm(&arm1))); + buf.push_str(&format!(" _ => {}\n", format_arm(&arm2))); buf.push_str("}"); buf } diff --git a/crates/ra_cli/src/main.rs b/crates/ra_cli/src/main.rs index b063193cfad..375e2f508fa 100644 --- a/crates/ra_cli/src/main.rs +++ b/crates/ra_cli/src/main.rs @@ -7,7 +7,7 @@ use clap::{App, Arg, SubCommand}; use flexi_logger::Logger; use ra_ide_api::{file_structure, Analysis}; use ra_prof::profile; -use ra_syntax::{AstNode, SourceFile, TreeArc}; +use ra_syntax::{AstNode, SourceFile}; type Result = std::result::Result>; @@ -100,9 +100,9 @@ fn main() -> Result<()> { Ok(()) } -fn file() -> Result> { +fn file() -> Result { let text = read_stdin()?; - Ok(SourceFile::parse(&text).tree().to_owned()) + Ok(SourceFile::parse(&text).tree()) } fn read_stdin() -> Result { diff --git a/crates/ra_fmt/src/lib.rs b/crates/ra_fmt/src/lib.rs index 1c2c04ad2f0..d6e89572939 100644 --- a/crates/ra_fmt/src/lib.rs +++ b/crates/ra_fmt/src/lib.rs @@ -3,7 +3,7 @@ use itertools::Itertools; use ra_syntax::{ ast::{self, AstNode, AstToken}, - SyntaxKind, + SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode, SyntaxToken, T, }; @@ -15,12 +15,12 @@ pub fn reindent(text: &str, indent: &str) -> String { } /// If the node is on the beginning of the line, calculate indent. -pub fn leading_indent(node: &SyntaxNode) -> Option<&str> { +pub fn leading_indent(node: &SyntaxNode) -> Option { for token in prev_tokens(node.first_token()?) { - if let Some(ws) = ast::Whitespace::cast(token) { + if let Some(ws) = ast::Whitespace::cast(token.clone()) { let ws_text = ws.text(); if let Some(pos) = ws_text.rfind('\n') { - return Some(&ws_text[pos + 1..]); + return Some(ws_text[pos + 1..].into()); } } if token.text().contains('\n') { @@ -31,17 +31,17 @@ pub fn leading_indent(node: &SyntaxNode) -> Option<&str> { } fn prev_tokens(token: SyntaxToken) -> impl Iterator { - successors(token.prev_token(), |&token| token.prev_token()) + successors(token.prev_token(), |token| token.prev_token()) } -pub fn extract_trivial_expression(block: &ast::Block) -> Option<&ast::Expr> { +pub fn extract_trivial_expression(block: &ast::Block) -> Option { let expr = block.expr()?; if expr.syntax().text().contains('\n') { return None; } let non_trivial_children = block.syntax().children().filter(|it| match it.kind() { WHITESPACE | T!['{'] | T!['}'] => false, - _ => it != &expr.syntax(), + _ => it != expr.syntax(), }); if non_trivial_children.count() > 0 { return None; diff --git a/crates/ra_hir/src/adt.rs b/crates/ra_hir/src/adt.rs index 8afdac801d5..c65446df4a4 100644 --- a/crates/ra_hir/src/adt.rs +++ b/crates/ra_hir/src/adt.rs @@ -4,10 +4,7 @@ use std::sync::Arc; use ra_arena::{impl_arena_id, Arena, RawId}; -use ra_syntax::{ - ast::{self, NameOwner, StructKind, TypeAscriptionOwner}, - TreeArc, -}; +use ra_syntax::ast::{self, NameOwner, StructKind, TypeAscriptionOwner}; use crate::{ type_ref::TypeRef, AsName, AstDatabase, Crate, DefDatabase, Enum, EnumVariant, FieldSource, @@ -59,11 +56,11 @@ impl StructData { struct_: Struct, ) -> Arc { let src = struct_.source(db); - Arc::new(StructData::new(&*src.ast)) + Arc::new(StructData::new(&src.ast)) } } -fn variants(enum_def: &ast::EnumDef) -> impl Iterator { +fn variants(enum_def: &ast::EnumDef) -> impl Iterator { enum_def.variant_list().into_iter().flat_map(|it| it.variants()) } @@ -71,9 +68,9 @@ impl EnumVariant { pub(crate) fn source_impl( self, db: &(impl DefDatabase + AstDatabase), - ) -> Source> { + ) -> Source { let src = self.parent.source(db); - let ast = variants(&*src.ast) + let ast = variants(&src.ast) .zip(db.enum_data(self.parent).variants.iter()) .find(|(_syntax, (id, _))| *id == self.id) .unwrap() @@ -96,7 +93,7 @@ impl EnumData { pub(crate) fn enum_data_query(db: &(impl DefDatabase + AstDatabase), e: Enum) -> Arc { let src = e.source(db); let name = src.ast.name().map(|n| n.as_name()); - let variants = variants(&*src.ast) + let variants = variants(&src.ast) .map(|var| EnumVariantData { name: var.name().map(|it| it.as_name()), variant_data: Arc::new(VariantData::new(var.kind())), diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs index 4fb5844f46c..779764590ca 100644 --- a/crates/ra_hir/src/code_model.rs +++ b/crates/ra_hir/src/code_model.rs @@ -4,10 +4,7 @@ pub(crate) mod docs; use std::sync::Arc; use ra_db::{CrateId, Edition, FileId, SourceRootId}; -use ra_syntax::{ - ast::{self, NameOwner, TypeAscriptionOwner}, - TreeArc, -}; +use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner}; use crate::{ adt::{EnumVariantId, StructFieldId, VariantDef}, @@ -155,8 +152,8 @@ impl_froms!( ); pub enum ModuleSource { - SourceFile(TreeArc), - Module(TreeArc), + SourceFile(ast::SourceFile), + Module(ast::Module), } impl ModuleSource { @@ -199,7 +196,7 @@ impl Module { self, db: &impl HirDatabase, import: ImportId, - ) -> Either, TreeArc> { + ) -> Either { let src = self.definition_source(db); let (_, source_map) = db.raw_items_with_source_map(src.file_id); source_map.get(&src.ast, import) @@ -321,8 +318,8 @@ pub struct StructField { #[derive(Debug)] pub enum FieldSource { - Named(TreeArc), - Pos(TreeArc), + Named(ast::NamedFieldDef), + Pos(ast::PosFieldDef), } impl StructField { @@ -736,7 +733,7 @@ impl ConstData { konst: Const, ) -> Arc { let node = konst.source(db).ast; - const_data_for(&*node) + const_data_for(&node) } pub(crate) fn static_data_query( @@ -744,7 +741,7 @@ impl ConstData { konst: Static, ) -> Arc { let node = konst.source(db).ast; - const_data_for(&*node) + const_data_for(&node) } } diff --git a/crates/ra_hir/src/code_model/docs.rs b/crates/ra_hir/src/code_model/docs.rs index 007ef315d02..a2b4d8e9752 100644 --- a/crates/ra_hir/src/code_model/docs.rs +++ b/crates/ra_hir/src/code_model/docs.rs @@ -71,21 +71,21 @@ pub(crate) fn documentation_query( def: DocDef, ) -> Option { match def { - DocDef::Module(it) => docs_from_ast(&*it.declaration_source(db)?.ast), + DocDef::Module(it) => docs_from_ast(&it.declaration_source(db)?.ast), DocDef::StructField(it) => match it.source(db).ast { - FieldSource::Named(named) => docs_from_ast(&*named), + FieldSource::Named(named) => docs_from_ast(&named), FieldSource::Pos(..) => None, }, - DocDef::Struct(it) => docs_from_ast(&*it.source(db).ast), - DocDef::Enum(it) => docs_from_ast(&*it.source(db).ast), - DocDef::EnumVariant(it) => docs_from_ast(&*it.source(db).ast), - DocDef::Static(it) => docs_from_ast(&*it.source(db).ast), - DocDef::Const(it) => docs_from_ast(&*it.source(db).ast), - DocDef::Function(it) => docs_from_ast(&*it.source(db).ast), - DocDef::Union(it) => docs_from_ast(&*it.source(db).ast), - DocDef::Trait(it) => docs_from_ast(&*it.source(db).ast), - DocDef::TypeAlias(it) => docs_from_ast(&*it.source(db).ast), - DocDef::MacroDef(it) => docs_from_ast(&*it.source(db).ast), + DocDef::Struct(it) => docs_from_ast(&it.source(db).ast), + DocDef::Enum(it) => docs_from_ast(&it.source(db).ast), + DocDef::EnumVariant(it) => docs_from_ast(&it.source(db).ast), + DocDef::Static(it) => docs_from_ast(&it.source(db).ast), + DocDef::Const(it) => docs_from_ast(&it.source(db).ast), + DocDef::Function(it) => docs_from_ast(&it.source(db).ast), + DocDef::Union(it) => docs_from_ast(&it.source(db).ast), + DocDef::Trait(it) => docs_from_ast(&it.source(db).ast), + DocDef::TypeAlias(it) => docs_from_ast(&it.source(db).ast), + DocDef::MacroDef(it) => docs_from_ast(&it.source(db).ast), } } diff --git a/crates/ra_hir/src/code_model/src.rs b/crates/ra_hir/src/code_model/src.rs index 72451e0e70a..32bd9c661ee 100644 --- a/crates/ra_hir/src/code_model/src.rs +++ b/crates/ra_hir/src/code_model/src.rs @@ -1,4 +1,4 @@ -use ra_syntax::{ast, TreeArc}; +use ra_syntax::ast; use crate::{ ids::AstItemDef, AstDatabase, Const, DefDatabase, Enum, EnumVariant, FieldSource, Function, @@ -34,7 +34,7 @@ impl Module { pub fn declaration_source( self, db: &(impl DefDatabase + AstDatabase), - ) -> Option>> { + ) -> Option> { let def_map = db.crate_def_map(self.krate); let decl = def_map[self.module_id].declaration?; let ast = decl.to_node(db); @@ -49,62 +49,62 @@ impl HasSource for StructField { } } impl HasSource for Struct { - type Ast = TreeArc; - fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source> { + type Ast = ast::StructDef; + fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source { self.id.source(db) } } impl HasSource for Union { - type Ast = TreeArc; - fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source> { + type Ast = ast::StructDef; + fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source { self.id.source(db) } } impl HasSource for Enum { - type Ast = TreeArc; - fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source> { + type Ast = ast::EnumDef; + fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source { self.id.source(db) } } impl HasSource for EnumVariant { - type Ast = TreeArc; - fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source> { + type Ast = ast::EnumVariant; + fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source { self.source_impl(db) } } impl HasSource for Function { - type Ast = TreeArc; - fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source> { + type Ast = ast::FnDef; + fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source { self.id.source(db) } } impl HasSource for Const { - type Ast = TreeArc; - fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source> { + type Ast = ast::ConstDef; + fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source { self.id.source(db) } } impl HasSource for Static { - type Ast = TreeArc; - fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source> { + type Ast = ast::StaticDef; + fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source { self.id.source(db) } } impl HasSource for Trait { - type Ast = TreeArc; - fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source> { + type Ast = ast::TraitDef; + fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source { self.id.source(db) } } impl HasSource for TypeAlias { - type Ast = TreeArc; - fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source> { + type Ast = ast::TypeAliasDef; + fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source { self.id.source(db) } } impl HasSource for MacroDef { - type Ast = TreeArc; - fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source> { + type Ast = ast::MacroCall; + fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source { Source { file_id: self.id.0.file_id(), ast: self.id.0.to_node(db) } } } diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs index da9f3e32da7..35836517630 100644 --- a/crates/ra_hir/src/db.rs +++ b/crates/ra_hir/src/db.rs @@ -2,7 +2,7 @@ use std::sync::Arc; use parking_lot::Mutex; use ra_db::{salsa, SourceDatabase}; -use ra_syntax::{ast, Parse, SmolStr, SyntaxNode, TreeArc}; +use ra_syntax::{ast, Parse, SmolStr, SyntaxNode}; use crate::{ adt::{EnumData, StructData}, @@ -62,11 +62,11 @@ pub trait AstDatabase: InternDatabase { #[salsa::transparent] #[salsa::invoke(crate::source_id::AstIdMap::file_item_query)] - fn ast_id_to_node(&self, file_id: HirFileId, ast_id: ErasedFileAstId) -> TreeArc; + fn ast_id_to_node(&self, file_id: HirFileId, ast_id: ErasedFileAstId) -> SyntaxNode; #[salsa::transparent] #[salsa::invoke(crate::ids::HirFileId::parse_or_expand_query)] - fn parse_or_expand(&self, file_id: HirFileId) -> Option>; + fn parse_or_expand(&self, file_id: HirFileId) -> Option; #[salsa::invoke(crate::ids::HirFileId::parse_macro_query)] fn parse_macro(&self, macro_file: ids::MacroFile) -> Option>; diff --git a/crates/ra_hir/src/diagnostics.rs b/crates/ra_hir/src/diagnostics.rs index c97f0656d75..0290483b389 100644 --- a/crates/ra_hir/src/diagnostics.rs +++ b/crates/ra_hir/src/diagnostics.rs @@ -1,6 +1,6 @@ use std::{any::Any, fmt}; -use ra_syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, TextRange, TreeArc}; +use ra_syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr, TextRange}; use relative_path::RelativePathBuf; use crate::{HirDatabase, HirFileId, Name}; @@ -33,9 +33,9 @@ pub trait AstDiagnostic { } impl dyn Diagnostic { - pub fn syntax_node(&self, db: &impl HirDatabase) -> TreeArc { + pub fn syntax_node(&self, db: &impl HirDatabase) -> SyntaxNode { let node = db.parse_or_expand(self.file()).unwrap(); - self.syntax_node_ptr().to_node(&*node).to_owned() + self.syntax_node_ptr().to_node(&node) } pub fn downcast_ref(&self) -> Option<&D> { @@ -143,11 +143,11 @@ impl Diagnostic for MissingFields { } impl AstDiagnostic for MissingFields { - type AST = TreeArc; + type AST = ast::NamedFieldList; fn ast(&self, db: &impl HirDatabase) -> Self::AST { let root = db.parse_or_expand(self.file()).unwrap(); - let node = self.syntax_node_ptr().to_node(&*root); - ast::NamedFieldList::cast(&node).unwrap().to_owned() + let node = self.syntax_node_ptr().to_node(&root); + ast::NamedFieldList::cast(node).unwrap() } } diff --git a/crates/ra_hir/src/expr.rs b/crates/ra_hir/src/expr.rs index 3a97d97ce16..70af3f119eb 100644 --- a/crates/ra_hir/src/expr.rs +++ b/crates/ra_hir/src/expr.rs @@ -550,7 +550,7 @@ where self.exprs.alloc(block) } - fn collect_expr(&mut self, expr: &ast::Expr) -> ExprId { + fn collect_expr(&mut self, expr: ast::Expr) -> ExprId { let syntax_ptr = SyntaxNodePtr::new(expr.syntax()); match expr.kind() { ast::ExprKind::IfExpr(e) => { @@ -565,7 +565,8 @@ where .map(|b| match b { ast::ElseBranch::Block(it) => self.collect_block(it), ast::ElseBranch::IfExpr(elif) => { - let expr: &ast::Expr = ast::Expr::cast(elif.syntax()).unwrap(); + let expr: ast::Expr = + ast::Expr::cast(elif.syntax().clone()).unwrap(); self.collect_expr(expr) } }) @@ -582,7 +583,7 @@ where let else_branch = e.else_branch().map(|b| match b { ast::ElseBranch::Block(it) => self.collect_block(it), ast::ElseBranch::IfExpr(elif) => { - let expr: &ast::Expr = ast::Expr::cast(elif.syntax()).unwrap(); + let expr: ast::Expr = ast::Expr::cast(elif.syntax().clone()).unwrap(); self.collect_expr(expr) } }); @@ -689,7 +690,7 @@ where let struct_lit = if let Some(nfl) = e.named_field_list() { let fields = nfl .fields() - .inspect(|field| field_ptrs.push(AstPtr::new(*field))) + .inspect(|field| field_ptrs.push(AstPtr::new(field))) .map(|field| StructLitField { name: field .name_ref() @@ -699,7 +700,7 @@ where self.collect_expr(e) } else if let Some(nr) = field.name_ref() { // field shorthand - let id = self.exprs.alloc(Expr::Path(Path::from_name_ref(nr))); + let id = self.exprs.alloc(Expr::Path(Path::from_name_ref(&nr))); self.source_map .expr_map .insert(SyntaxNodePtr::new(nr.syntax()), id); @@ -837,7 +838,7 @@ where let ast_id = self .db .ast_id_map(self.current_file_id) - .ast_id(e) + .ast_id(&e) .with_file_id(self.current_file_id); if let Some(path) = e.path().and_then(Path::from_ast) { @@ -845,11 +846,11 @@ where let call_id = MacroCallLoc { def: def.id, ast_id }.id(self.db); let file_id = call_id.as_file(MacroFileKind::Expr); if let Some(node) = self.db.parse_or_expand(file_id) { - if let Some(expr) = ast::Expr::cast(&*node) { + if let Some(expr) = ast::Expr::cast(node) { log::debug!("macro expansion {}", expr.syntax().debug_dump()); let old_file_id = std::mem::replace(&mut self.current_file_id, file_id); - let id = self.collect_expr(&expr); + let id = self.collect_expr(expr); self.current_file_id = old_file_id; return id; } @@ -863,7 +864,7 @@ where } } - fn collect_expr_opt(&mut self, expr: Option<&ast::Expr>) -> ExprId { + fn collect_expr_opt(&mut self, expr: Option) -> ExprId { if let Some(expr) = expr { self.collect_expr(expr) } else { @@ -871,7 +872,7 @@ where } } - fn collect_block(&mut self, block: &ast::Block) -> ExprId { + fn collect_block(&mut self, block: ast::Block) -> ExprId { let statements = block .statements() .map(|s| match s.kind() { @@ -890,7 +891,7 @@ where self.alloc_expr(Expr::Block { statements, tail }, SyntaxNodePtr::new(block.syntax())) } - fn collect_block_opt(&mut self, block: Option<&ast::Block>) -> ExprId { + fn collect_block_opt(&mut self, block: Option) -> ExprId { if let Some(block) = block { self.collect_block(block) } else { @@ -898,7 +899,7 @@ where } } - fn collect_pat(&mut self, pat: &ast::Pat) -> PatId { + fn collect_pat(&mut self, pat: ast::Pat) -> PatId { let pattern = match pat.kind() { ast::PatKind::BindPat(bp) => { let name = bp.name().map(|nr| nr.as_name()).unwrap_or_else(Name::missing); @@ -932,7 +933,8 @@ where let mut fields: Vec<_> = field_pat_list .bind_pats() .filter_map(|bind_pat| { - let ast_pat = ast::Pat::cast(bind_pat.syntax()).expect("bind pat is a pat"); + let ast_pat = + ast::Pat::cast(bind_pat.syntax().clone()).expect("bind pat is a pat"); let pat = self.collect_pat(ast_pat); let name = bind_pat.name()?.as_name(); Some(FieldPat { name, pat }) @@ -953,11 +955,11 @@ where ast::PatKind::LiteralPat(_) => Pat::Missing, ast::PatKind::SlicePat(_) | ast::PatKind::RangePat(_) => Pat::Missing, }; - let ptr = AstPtr::new(pat); + let ptr = AstPtr::new(&pat); self.alloc_pat(pattern, Either::A(ptr)) } - fn collect_pat_opt(&mut self, pat: Option<&ast::Pat>) -> PatId { + fn collect_pat_opt(&mut self, pat: Option) -> PatId { if let Some(pat) = pat { self.collect_pat(pat) } else { @@ -965,20 +967,20 @@ where } } - fn collect_const_body(&mut self, node: &ast::ConstDef) { + fn collect_const_body(&mut self, node: ast::ConstDef) { let body = self.collect_expr_opt(node.body()); self.body_expr = Some(body); } - fn collect_static_body(&mut self, node: &ast::StaticDef) { + fn collect_static_body(&mut self, node: ast::StaticDef) { let body = self.collect_expr_opt(node.body()); self.body_expr = Some(body); } - fn collect_fn_body(&mut self, node: &ast::FnDef) { + fn collect_fn_body(&mut self, node: ast::FnDef) { if let Some(param_list) = node.param_list() { if let Some(self_param) = param_list.self_param() { - let ptr = AstPtr::new(self_param); + let ptr = AstPtr::new(&self_param); let param_pat = self.alloc_pat( Pat::Bind { name: SELF_PARAM, @@ -1027,17 +1029,17 @@ pub(crate) fn body_with_source_map_query( DefWithBody::Const(ref c) => { let src = c.source(db); collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); - collector.collect_const_body(&src.ast) + collector.collect_const_body(src.ast) } DefWithBody::Function(ref f) => { let src = f.source(db); collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); - collector.collect_fn_body(&src.ast) + collector.collect_fn_body(src.ast) } DefWithBody::Static(ref s) => { let src = s.source(db); collector = ExprCollector::new(def, src.file_id, def.resolver(db), db); - collector.collect_static_body(&src.ast) + collector.collect_static_body(src.ast) } } diff --git a/crates/ra_hir/src/expr/scope.rs b/crates/ra_hir/src/expr/scope.rs index 28fd526847a..6589b782c49 100644 --- a/crates/ra_hir/src/expr/scope.rs +++ b/crates/ra_hir/src/expr/scope.rs @@ -190,7 +190,7 @@ mod tests { let (db, _source_root, file_id) = MockDatabase::with_single_file(&code); let file = db.parse(file_id).ok().unwrap(); - let marker: &ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap(); + let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap(); let analyzer = SourceAnalyzer::new(&db, file_id, marker.syntax(), None); let scopes = analyzer.scopes(); @@ -290,10 +290,10 @@ mod tests { let file = db.parse(file_id).ok().unwrap(); let expected_name = find_node_at_offset::(file.syntax(), expected_offset.into()) .expect("failed to find a name at the target offset"); - let name_ref: &ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap(); + let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap(); let analyzer = SourceAnalyzer::new(&db, file_id, name_ref.syntax(), None); - let local_name_entry = analyzer.resolve_local_name(name_ref).unwrap(); + let local_name_entry = analyzer.resolve_local_name(&name_ref).unwrap(); let local_name = local_name_entry.ptr().either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()); assert_eq!(local_name.range(), expected_name.syntax().range()); diff --git a/crates/ra_hir/src/expr/validation.rs b/crates/ra_hir/src/expr/validation.rs index c2a10a0b5c2..82a06ca25c7 100644 --- a/crates/ra_hir/src/expr/validation.rs +++ b/crates/ra_hir/src/expr/validation.rs @@ -79,7 +79,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> { .and_then(StructLit::cast) .and_then(|lit| lit.named_field_list()) { - let field_list_ptr = AstPtr::new(field_list_node); + let field_list_ptr = AstPtr::new(&field_list_node); self.sink.push(MissingFields { file: file_id, field_list: field_list_ptr, diff --git a/crates/ra_hir/src/generics.rs b/crates/ra_hir/src/generics.rs index 07a59193f06..bcbb4988d5c 100644 --- a/crates/ra_hir/src/generics.rs +++ b/crates/ra_hir/src/generics.rs @@ -76,17 +76,17 @@ impl GenericParams { generics.parent_params = parent.map(|p| db.generic_params(p)); let start = generics.parent_params.as_ref().map(|p| p.params.len()).unwrap_or(0) as u32; match def { - GenericDef::Function(it) => generics.fill(&*it.source(db).ast, start), - GenericDef::Struct(it) => generics.fill(&*it.source(db).ast, start), - GenericDef::Union(it) => generics.fill(&*it.source(db).ast, start), - GenericDef::Enum(it) => generics.fill(&*it.source(db).ast, start), + GenericDef::Function(it) => generics.fill(&it.source(db).ast, start), + GenericDef::Struct(it) => generics.fill(&it.source(db).ast, start), + GenericDef::Union(it) => generics.fill(&it.source(db).ast, start), + GenericDef::Enum(it) => generics.fill(&it.source(db).ast, start), GenericDef::Trait(it) => { // traits get the Self type as an implicit first type parameter generics.params.push(GenericParam { idx: start, name: SELF_TYPE, default: None }); - generics.fill(&*it.source(db).ast, start + 1); + generics.fill(&it.source(db).ast, start + 1); } - GenericDef::TypeAlias(it) => generics.fill(&*it.source(db).ast, start), - GenericDef::ImplBlock(it) => generics.fill(&*it.source(db).ast, start), + GenericDef::TypeAlias(it) => generics.fill(&it.source(db).ast, start), + GenericDef::ImplBlock(it) => generics.fill(&it.source(db).ast, start), GenericDef::EnumVariant(_) => {} } @@ -102,9 +102,9 @@ impl GenericParams { } } - fn fill_params(&mut self, params: &ast::TypeParamList, start: u32) { + fn fill_params(&mut self, params: ast::TypeParamList, start: u32) { for (idx, type_param) in params.type_params().enumerate() { - let name = type_param.name().map(AsName::as_name).unwrap_or_else(Name::missing); + let name = type_param.name().map_or_else(Name::missing, |it| it.as_name()); let default = type_param.default_type().and_then(|t| t.path()).and_then(Path::from_ast); let param = GenericParam { idx: idx as u32 + start, name: name.clone(), default }; @@ -121,7 +121,7 @@ impl GenericParams { } } - fn fill_where_predicates(&mut self, where_clause: &ast::WhereClause) { + fn fill_where_predicates(&mut self, where_clause: ast::WhereClause) { for pred in where_clause.predicates() { let type_ref = match pred.type_ref() { Some(type_ref) => type_ref, @@ -134,7 +134,7 @@ impl GenericParams { } } - fn add_where_predicate_from_bound(&mut self, bound: &ast::TypeBound, type_ref: TypeRef) { + fn add_where_predicate_from_bound(&mut self, bound: ast::TypeBound, type_ref: TypeRef) { let path = bound .type_ref() .and_then(|tr| match tr.kind() { diff --git a/crates/ra_hir/src/ids.rs b/crates/ra_hir/src/ids.rs index 83f5c3f394d..05a18eb5680 100644 --- a/crates/ra_hir/src/ids.rs +++ b/crates/ra_hir/src/ids.rs @@ -6,7 +6,7 @@ use std::{ use mbe::MacroRules; use ra_db::{salsa, FileId}; use ra_prof::profile; -use ra_syntax::{ast, AstNode, Parse, SyntaxNode, TreeArc}; +use ra_syntax::{ast, AstNode, Parse, SyntaxNode}; use crate::{AstDatabase, AstId, DefDatabase, FileAstId, InternDatabase, Module, Source}; @@ -58,11 +58,11 @@ impl HirFileId { pub(crate) fn parse_or_expand_query( db: &impl AstDatabase, file_id: HirFileId, - ) -> Option> { + ) -> Option { match file_id.0 { - HirFileIdRepr::File(file_id) => Some(db.parse(file_id).tree().syntax().to_owned()), + HirFileIdRepr::File(file_id) => Some(db.parse(file_id).tree().syntax().clone()), HirFileIdRepr::Macro(macro_file) => { - db.parse_macro(macro_file).map(|it| it.tree().to_owned()) + db.parse_macro(macro_file).map(|it| it.syntax_node()) } } } @@ -123,7 +123,7 @@ pub struct MacroDefId(pub(crate) AstId); pub(crate) fn macro_def_query(db: &impl AstDatabase, id: MacroDefId) -> Option> { let macro_call = id.0.to_node(db); let arg = macro_call.token_tree()?; - let (tt, _) = mbe::ast_to_token_tree(arg).or_else(|| { + let (tt, _) = mbe::ast_to_token_tree(&arg).or_else(|| { log::warn!("fail on macro_def to token tree: {:#?}", arg); None })?; @@ -138,7 +138,7 @@ pub(crate) fn macro_arg_query(db: &impl AstDatabase, id: MacroCallId) -> Option< let loc = id.loc(db); let macro_call = loc.ast_id.to_node(db); let arg = macro_call.token_tree()?; - let (tt, _) = mbe::ast_to_token_tree(arg)?; + let (tt, _) = mbe::ast_to_token_tree(&arg)?; Some(Arc::new(tt)) } @@ -262,7 +262,7 @@ pub(crate) trait AstItemDef: salsa::InternKey + Clone { let loc = ItemLoc { module: ctx.module, ast_id: ast_id.with_file_id(ctx.file_id) }; Self::intern(ctx.db, loc) } - fn source(self, db: &(impl AstDatabase + DefDatabase)) -> Source> { + fn source(self, db: &(impl AstDatabase + DefDatabase)) -> Source { let loc = self.lookup_intern(db); let ast = loc.ast_id.to_node(db); Source { file_id: loc.ast_id.file_id(), ast } diff --git a/crates/ra_hir/src/impl_block.rs b/crates/ra_hir/src/impl_block.rs index ce134b27a4a..8e62cf66d46 100644 --- a/crates/ra_hir/src/impl_block.rs +++ b/crates/ra_hir/src/impl_block.rs @@ -4,7 +4,7 @@ use std::sync::Arc; use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId}; use ra_syntax::{ ast::{self, AstNode}, - AstPtr, SourceFile, TreeArc, + AstPtr, SourceFile, }; use crate::{ @@ -28,9 +28,9 @@ impl ImplSourceMap { self.map.insert(impl_id, AstPtr::new(impl_block)) } - pub fn get(&self, source: &ModuleSource, impl_id: ImplId) -> TreeArc { + pub fn get(&self, source: &ModuleSource, impl_id: ImplId) -> ast::ImplBlock { let file = match source { - ModuleSource::SourceFile(file) => &*file, + ModuleSource::SourceFile(file) => file.clone(), ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(), }; @@ -45,8 +45,8 @@ pub struct ImplBlock { } impl HasSource for ImplBlock { - type Ast = TreeArc; - fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source> { + type Ast = ast::ImplBlock; + fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source { let source_map = db.impls_in_module_with_source_map(self.module).1; let src = self.module.definition_source(db); Source { file_id: src.file_id, ast: source_map.get(&src.ast, self.impl_id) } @@ -132,9 +132,9 @@ impl ImplData { item_list .impl_items() .map(|item_node| match item_node.kind() { - ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(it) }.into(), - ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(it) }.into(), - ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(it) }.into(), + ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(&it) }.into(), + ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(&it) }.into(), + ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(&it) }.into(), }) .collect() } else { @@ -202,20 +202,20 @@ impl ModuleImplBlocks { let src = m.module.definition_source(db); let node = match &src.ast { - ModuleSource::SourceFile(node) => node.syntax(), + ModuleSource::SourceFile(node) => node.syntax().clone(), ModuleSource::Module(node) => { - node.item_list().expect("inline module should have item list").syntax() + node.item_list().expect("inline module should have item list").syntax().clone() } }; for impl_block_ast in node.children().filter_map(ast::ImplBlock::cast) { - let impl_block = ImplData::from_ast(db, src.file_id, m.module, impl_block_ast); + let impl_block = ImplData::from_ast(db, src.file_id, m.module, &impl_block_ast); let id = m.impls.alloc(impl_block); for &impl_item in &m.impls[id].items { m.impls_by_def.insert(impl_item, id); } - source_map.insert(id, impl_block_ast); + source_map.insert(id, &impl_block_ast); } m diff --git a/crates/ra_hir/src/lang_item.rs b/crates/ra_hir/src/lang_item.rs index 0443d4d9a22..fd6609fb8af 100644 --- a/crates/ra_hir/src/lang_item.rs +++ b/crates/ra_hir/src/lang_item.rs @@ -1,7 +1,7 @@ use rustc_hash::FxHashMap; use std::sync::Arc; -use ra_syntax::{ast::AttrsOwner, SmolStr, TreeArc}; +use ra_syntax::{ast::AttrsOwner, SmolStr}; use crate::{ AstDatabase, Crate, DefDatabase, Enum, Function, HasSource, HirDatabase, ImplBlock, Module, @@ -95,7 +95,7 @@ impl LangItems { // Look for impl targets for impl_block in module.impl_blocks(db) { let src = impl_block.source(db); - if let Some(lang_item_name) = lang_item_name(&*src.ast) { + if let Some(lang_item_name) = lang_item_name(&src.ast) { self.items .entry(lang_item_name) .or_insert_with(|| LangItemTarget::ImplBlock(impl_block)); @@ -137,11 +137,11 @@ impl LangItems { item: T, constructor: fn(T) -> LangItemTarget, ) where - T: Copy + HasSource>, + T: Copy + HasSource, N: AttrsOwner, { let node = item.source(db).ast; - if let Some(lang_item_name) = lang_item_name(&*node) { + if let Some(lang_item_name) = lang_item_name(&node) { self.items.entry(lang_item_name).or_insert_with(|| constructor(item)); } } diff --git a/crates/ra_hir/src/name.rs b/crates/ra_hir/src/name.rs index 40c9d600224..c589f8aba6e 100644 --- a/crates/ra_hir/src/name.rs +++ b/crates/ra_hir/src/name.rs @@ -75,7 +75,7 @@ impl AsName for ast::Name { } } -impl<'a> AsName for ast::FieldKind<'a> { +impl AsName for ast::FieldKind { fn as_name(&self) -> Name { match self { ast::FieldKind::Name(nr) => nr.as_name(), diff --git a/crates/ra_hir/src/nameres/raw.rs b/crates/ra_hir/src/nameres/raw.rs index 46b2bef5b57..8517f3c430d 100644 --- a/crates/ra_hir/src/nameres/raw.rs +++ b/crates/ra_hir/src/nameres/raw.rs @@ -3,7 +3,7 @@ use std::{ops::Index, sync::Arc}; use ra_arena::{impl_arena_id, map::ArenaMap, Arena, RawId}; use ra_syntax::{ ast::{self, AttrsOwner, NameOwner}, - AstNode, AstPtr, SmolStr, SourceFile, TreeArc, + AstNode, AstPtr, SmolStr, SourceFile, }; use test_utils::tested_by; @@ -32,7 +32,7 @@ pub struct ImportSourceMap { } type ImportSourcePtr = Either, AstPtr>; -type ImportSource = Either, TreeArc>; +type ImportSource = Either; impl ImportSourcePtr { fn to_node(self, file: &SourceFile) -> ImportSource { @@ -50,11 +50,11 @@ impl ImportSourceMap { pub(crate) fn get(&self, source: &ModuleSource, import: ImportId) -> ImportSource { let file = match source { - ModuleSource::SourceFile(file) => &*file, + ModuleSource::SourceFile(file) => file.clone(), ModuleSource::Module(m) => m.syntax().ancestors().find_map(SourceFile::cast).unwrap(), }; - self.map[import].to_node(file) + self.map[import].to_node(&file) } } @@ -76,8 +76,8 @@ impl RawItems { source_map: ImportSourceMap::default(), }; if let Some(node) = db.parse_or_expand(file_id) { - if let Some(source_file) = ast::SourceFile::cast(&node) { - collector.process_module(None, &*source_file); + if let Some(source_file) = ast::SourceFile::cast(node) { + collector.process_module(None, source_file); } } (Arc::new(collector.raw_items), Arc::new(collector.source_map)) @@ -188,7 +188,7 @@ struct RawItemsCollector { } impl RawItemsCollector { - fn process_module(&mut self, current_module: Option, body: &impl ast::ModuleItemOwner) { + fn process_module(&mut self, current_module: Option, body: impl ast::ModuleItemOwner) { for item_or_macro in body.items_with_macros() { match item_or_macro { ast::ItemOrMacro::Macro(m) => self.add_macro(current_module, m), @@ -197,7 +197,7 @@ impl RawItemsCollector { } } - fn add_item(&mut self, current_module: Option, item: &ast::ModuleItem) { + fn add_item(&mut self, current_module: Option, item: ast::ModuleItem) { let (kind, name) = match item.kind() { ast::ModuleItemKind::Module(module) => { self.add_module(current_module, module); @@ -216,7 +216,7 @@ impl RawItemsCollector { return; } ast::ModuleItemKind::StructDef(it) => { - let id = self.source_ast_id_map.ast_id(it); + let id = self.source_ast_id_map.ast_id(&it); let name = it.name(); if it.is_union() { (DefKind::Union(id), name) @@ -225,22 +225,22 @@ impl RawItemsCollector { } } ast::ModuleItemKind::EnumDef(it) => { - (DefKind::Enum(self.source_ast_id_map.ast_id(it)), it.name()) + (DefKind::Enum(self.source_ast_id_map.ast_id(&it)), it.name()) } ast::ModuleItemKind::FnDef(it) => { - (DefKind::Function(self.source_ast_id_map.ast_id(it)), it.name()) + (DefKind::Function(self.source_ast_id_map.ast_id(&it)), it.name()) } ast::ModuleItemKind::TraitDef(it) => { - (DefKind::Trait(self.source_ast_id_map.ast_id(it)), it.name()) + (DefKind::Trait(self.source_ast_id_map.ast_id(&it)), it.name()) } ast::ModuleItemKind::TypeAliasDef(it) => { - (DefKind::TypeAlias(self.source_ast_id_map.ast_id(it)), it.name()) + (DefKind::TypeAlias(self.source_ast_id_map.ast_id(&it)), it.name()) } ast::ModuleItemKind::ConstDef(it) => { - (DefKind::Const(self.source_ast_id_map.ast_id(it)), it.name()) + (DefKind::Const(self.source_ast_id_map.ast_id(&it)), it.name()) } ast::ModuleItemKind::StaticDef(it) => { - (DefKind::Static(self.source_ast_id_map.ast_id(it)), it.name()) + (DefKind::Static(self.source_ast_id_map.ast_id(&it)), it.name()) } }; if let Some(name) = name { @@ -250,14 +250,14 @@ impl RawItemsCollector { } } - fn add_module(&mut self, current_module: Option, module: &ast::Module) { + fn add_module(&mut self, current_module: Option, module: ast::Module) { let name = match module.name() { Some(it) => it.as_name(), None => return, }; - let attr_path = extract_mod_path_attribute(module); - let ast_id = self.source_ast_id_map.ast_id(module); + let attr_path = extract_mod_path_attribute(&module); + let ast_id = self.source_ast_id_map.ast_id(&module); if module.has_semi() { let item = self.raw_items.modules.alloc(ModuleData::Declaration { name, ast_id, attr_path }); @@ -278,10 +278,10 @@ impl RawItemsCollector { tested_by!(name_res_works_for_broken_modules); } - fn add_use_item(&mut self, current_module: Option, use_item: &ast::UseItem) { + fn add_use_item(&mut self, current_module: Option, use_item: ast::UseItem) { let is_prelude = use_item.has_atom_attr("prelude_import"); - Path::expand_use_item(use_item, |path, use_tree, is_glob, alias| { + Path::expand_use_item(&use_item, |path, use_tree, is_glob, alias| { let import_data = ImportData { path, alias, is_glob, is_prelude, is_extern_crate: false }; self.push_import(current_module, import_data, Either::A(AstPtr::new(use_tree))); @@ -291,11 +291,11 @@ impl RawItemsCollector { fn add_extern_crate_item( &mut self, current_module: Option, - extern_crate: &ast::ExternCrateItem, + extern_crate: ast::ExternCrateItem, ) { if let Some(name_ref) = extern_crate.name_ref() { - let path = Path::from_name_ref(name_ref); - let alias = extern_crate.alias().and_then(|a| a.name()).map(AsName::as_name); + let path = Path::from_name_ref(&name_ref); + let alias = extern_crate.alias().and_then(|a| a.name()).map(|it| it.as_name()); let import_data = ImportData { path, alias, @@ -303,18 +303,18 @@ impl RawItemsCollector { is_prelude: false, is_extern_crate: true, }; - self.push_import(current_module, import_data, Either::B(AstPtr::new(extern_crate))); + self.push_import(current_module, import_data, Either::B(AstPtr::new(&extern_crate))); } } - fn add_macro(&mut self, current_module: Option, m: &ast::MacroCall) { + fn add_macro(&mut self, current_module: Option, m: ast::MacroCall) { let path = match m.path().and_then(Path::from_ast) { Some(it) => it, _ => return, }; let name = m.name().map(|it| it.as_name()); - let ast_id = self.source_ast_id_map.ast_id(m); + let ast_id = self.source_ast_id_map.ast_id(&m); let export = m.has_atom_attr("macro_export"); let m = self.raw_items.macros.alloc(MacroData { ast_id, path, name, export }); self.push_item(current_module, RawItem::Macro(m)); diff --git a/crates/ra_hir/src/path.rs b/crates/ra_hir/src/path.rs index bce9d2d4bcb..882db768162 100644 --- a/crates/ra_hir/src/path.rs +++ b/crates/ra_hir/src/path.rs @@ -47,9 +47,9 @@ pub enum PathKind { impl Path { /// Calls `cb` with all paths, represented by this use item. - pub fn expand_use_item<'a>( - item: &'a ast::UseItem, - mut cb: impl FnMut(Path, &'a ast::UseTree, bool, Option), + pub fn expand_use_item( + item: &ast::UseItem, + mut cb: impl FnMut(Path, &ast::UseTree, bool, Option), ) { if let Some(tree) = item.use_tree() { expand_use_tree(None, tree, &mut cb); @@ -57,7 +57,7 @@ impl Path { } /// Converts an `ast::Path` to `Path`. Works with use trees. - pub fn from_ast(mut path: &ast::Path) -> Option { + pub fn from_ast(mut path: ast::Path) -> Option { let mut kind = PathKind::Plain; let mut segments = Vec::new(); loop { @@ -87,7 +87,7 @@ impl Path { break; } } - path = match qualifier(path) { + path = match qualifier(&path) { Some(it) => it, None => break, }; @@ -95,7 +95,7 @@ impl Path { segments.reverse(); return Some(Path { kind, segments }); - fn qualifier(path: &ast::Path) -> Option<&ast::Path> { + fn qualifier(path: &ast::Path) -> Option { if let Some(q) = path.qualifier() { return Some(q); } @@ -136,7 +136,7 @@ impl Path { } impl GenericArgs { - pub(crate) fn from_ast(node: &ast::TypeArgList) -> Option { + pub(crate) fn from_ast(node: ast::TypeArgList) -> Option { let mut args = Vec::new(); for type_arg in node.type_args() { let type_ref = TypeRef::from_ast_opt(type_arg.type_ref()); @@ -160,10 +160,10 @@ impl From for Path { } } -fn expand_use_tree<'a>( +fn expand_use_tree( prefix: Option, - tree: &'a ast::UseTree, - cb: &mut impl FnMut(Path, &'a ast::UseTree, bool, Option), + tree: ast::UseTree, + cb: &mut impl FnMut(Path, &ast::UseTree, bool, Option), ) { if let Some(use_tree_list) = tree.use_tree_list() { let prefix = match tree.path() { @@ -188,7 +188,7 @@ fn expand_use_tree<'a>( if let Some(segment) = ast_path.segment() { if segment.kind() == Some(ast::PathSegmentKind::SelfKw) { if let Some(prefix) = prefix { - cb(prefix, tree, false, alias); + cb(prefix, &tree, false, alias); return; } } @@ -196,7 +196,7 @@ fn expand_use_tree<'a>( } if let Some(path) = convert_path(prefix, ast_path) { let is_glob = tree.has_star(); - cb(path, tree, is_glob, alias) + cb(path, &tree, is_glob, alias) } // FIXME: report errors somewhere // We get here if we do @@ -204,7 +204,7 @@ fn expand_use_tree<'a>( } } -fn convert_path(prefix: Option, path: &ast::Path) -> Option { +fn convert_path(prefix: Option, path: ast::Path) -> Option { let prefix = if let Some(qual) = path.qualifier() { Some(convert_path(prefix, qual)?) } else { prefix }; let segment = path.segment()?; diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs index 071c1bb1887..e7bc4df9788 100644 --- a/crates/ra_hir/src/source_binder.rs +++ b/crates/ra_hir/src/source_binder.rs @@ -37,7 +37,7 @@ pub fn module_from_file_id(db: &impl HirDatabase, file_id: FileId) -> Option Option { let parent_module = module_from_file_id(db, file_id); let child_name = decl.name(); @@ -50,8 +50,8 @@ pub fn module_from_declaration( /// Locates the module by position in the source code. pub fn module_from_position(db: &impl HirDatabase, position: FilePosition) -> Option { let parse = db.parse(position.file_id); - match find_node_at_offset::(parse.tree().syntax(), position.offset) { - Some(m) if !m.has_semi() => module_from_inline(db, position.file_id, m), + match &find_node_at_offset::(parse.tree().syntax(), position.offset) { + Some(m) if !m.has_semi() => module_from_inline(db, position.file_id, m.clone()), _ => module_from_file_id(db, position.file_id), } } @@ -59,12 +59,12 @@ pub fn module_from_position(db: &impl HirDatabase, position: FilePosition) -> Op fn module_from_inline( db: &impl HirDatabase, file_id: FileId, - module: &ast::Module, + module: ast::Module, ) -> Option { assert!(!module.has_semi()); let file_id = file_id.into(); let ast_id_map = db.ast_id_map(file_id); - let item_id = ast_id_map.ast_id(module).with_file_id(file_id); + let item_id = ast_id_map.ast_id(&module).with_file_id(file_id); module_from_source(db, file_id, Some(item_id)) } @@ -127,16 +127,16 @@ fn try_get_resolver_for_node( file_id: FileId, node: &SyntaxNode, ) -> Option { - if let Some(module) = ast::Module::cast(node) { + if let Some(module) = ast::Module::cast(node.clone()) { Some(module_from_declaration(db, file_id, module)?.resolver(db)) - } else if let Some(_) = ast::SourceFile::cast(node) { + } else if let Some(_) = ast::SourceFile::cast(node.clone()) { Some(module_from_source(db, file_id.into(), None)?.resolver(db)) - } else if let Some(s) = ast::StructDef::cast(node) { + } else if let Some(s) = ast::StructDef::cast(node.clone()) { let module = module_from_child_node(db, file_id, s.syntax())?; - Some(struct_from_module(db, module, s).resolver(db)) - } else if let Some(e) = ast::EnumDef::cast(node) { + Some(struct_from_module(db, module, &s).resolver(db)) + } else if let Some(e) = ast::EnumDef::cast(node.clone()) { let module = module_from_child_node(db, file_id, e.syntax())?; - Some(enum_from_module(db, module, e).resolver(db)) + Some(enum_from_module(db, module, &e).resolver(db)) } else if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF { Some(def_with_body_from_child_node(db, file_id, node)?.resolver(db)) } else { @@ -153,14 +153,14 @@ fn def_with_body_from_child_node( let module = module_from_child_node(db, file_id, node)?; let ctx = LocationCtx::new(db, module, file_id.into()); node.ancestors().find_map(|node| { - if let Some(def) = ast::FnDef::cast(node) { - return Some(Function { id: ctx.to_def(def) }.into()); + if let Some(def) = ast::FnDef::cast(node.clone()) { + return Some(Function { id: ctx.to_def(&def) }.into()); } - if let Some(def) = ast::ConstDef::cast(node) { - return Some(Const { id: ctx.to_def(def) }.into()); + if let Some(def) = ast::ConstDef::cast(node.clone()) { + return Some(Const { id: ctx.to_def(&def) }.into()); } - if let Some(def) = ast::StaticDef::cast(node) { - return Some(Static { id: ctx.to_def(def) }.into()); + if let Some(def) = ast::StaticDef::cast(node.clone()) { + return Some(Static { id: ctx.to_def(&def) }.into()); } None }) @@ -237,7 +237,7 @@ impl SourceAnalyzer { SourceAnalyzer { resolver: node .ancestors() - .find_map(|node| try_get_resolver_for_node(db, file_id, node)) + .find_map(|node| try_get_resolver_for_node(db, file_id, &node)) .unwrap_or_default(), body_source_map: None, infer: None, @@ -257,17 +257,17 @@ impl SourceAnalyzer { } pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option { - let expr_id = self.body_source_map.as_ref()?.node_expr(call.into())?; + let expr_id = self.body_source_map.as_ref()?.node_expr(&call.clone().into())?; self.infer.as_ref()?.method_resolution(expr_id) } pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option { - let expr_id = self.body_source_map.as_ref()?.node_expr(field.into())?; + let expr_id = self.body_source_map.as_ref()?.node_expr(&field.clone().into())?; self.infer.as_ref()?.field_resolution(expr_id) } pub fn resolve_variant(&self, struct_lit: &ast::StructLit) -> Option { - let expr_id = self.body_source_map.as_ref()?.node_expr(struct_lit.into())?; + let expr_id = self.body_source_map.as_ref()?.node_expr(&struct_lit.clone().into())?; self.infer.as_ref()?.variant_resolution(expr_id) } @@ -290,18 +290,18 @@ impl SourceAnalyzer { pub fn resolve_path(&self, db: &impl HirDatabase, path: &ast::Path) -> Option { if let Some(path_expr) = path.syntax().parent().and_then(ast::PathExpr::cast) { - let expr_id = self.body_source_map.as_ref()?.node_expr(path_expr.into())?; + let expr_id = self.body_source_map.as_ref()?.node_expr(&path_expr.into())?; if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_expr(expr_id) { return Some(PathResolution::AssocItem(assoc)); } } if let Some(path_pat) = path.syntax().parent().and_then(ast::PathPat::cast) { - let pat_id = self.body_source_map.as_ref()?.node_pat(path_pat.into())?; + let pat_id = self.body_source_map.as_ref()?.node_pat(&path_pat.into())?; if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) { return Some(PathResolution::AssocItem(assoc)); } } - let hir_path = crate::Path::from_ast(path)?; + let hir_path = crate::Path::from_ast(path.clone())?; let res = self.resolver.resolve_path_without_assoc_items(db, &hir_path); let res = res.clone().take_types().or_else(|| res.take_values())?; let res = match res { @@ -343,12 +343,12 @@ impl SourceAnalyzer { // FIXME: at least, this should work with any DefWithBody, but ideally // this should be hir-based altogether let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap(); - let ptr = Either::A(AstPtr::new(pat.into())); + let ptr = Either::A(AstPtr::new(&ast::Pat::from(pat.clone()))); fn_def .syntax() .descendants() .filter_map(ast::NameRef::cast) - .filter(|name_ref| match self.resolve_local_name(*name_ref) { + .filter(|name_ref| match self.resolve_local_name(&name_ref) { None => false, Some(entry) => entry.ptr() == ptr, }) @@ -411,7 +411,7 @@ fn scope_for( node: &SyntaxNode, ) -> Option { node.ancestors() - .map(SyntaxNodePtr::new) + .map(|it| SyntaxNodePtr::new(&it)) .filter_map(|ptr| source_map.syntax_expr(ptr)) .find_map(|it| scopes.scope_for(it)) } diff --git a/crates/ra_hir/src/source_id.rs b/crates/ra_hir/src/source_id.rs index 6cdb901413b..51cd65ddad5 100644 --- a/crates/ra_hir/src/source_id.rs +++ b/crates/ra_hir/src/source_id.rs @@ -5,7 +5,7 @@ use std::{ }; use ra_arena::{impl_arena_id, Arena, RawId}; -use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxNodePtr, TreeArc}; +use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxNodePtr}; use crate::{AstDatabase, HirFileId}; @@ -42,9 +42,9 @@ impl AstId { self.file_id } - pub(crate) fn to_node(&self, db: &impl AstDatabase) -> TreeArc { + pub(crate) fn to_node(&self, db: &impl AstDatabase) -> N { let syntax_node = db.ast_id_to_node(self.file_id, self.file_ast_id.raw); - N::cast(&syntax_node).unwrap().to_owned() + N::cast(syntax_node).unwrap() } } @@ -93,7 +93,7 @@ pub struct AstIdMap { impl AstIdMap { pub(crate) fn ast_id_map_query(db: &impl AstDatabase, file_id: HirFileId) -> Arc { let map = if let Some(node) = db.parse_or_expand(file_id) { - AstIdMap::from_source(&*node) + AstIdMap::from_source(&node) } else { AstIdMap::default() }; @@ -104,9 +104,9 @@ impl AstIdMap { db: &impl AstDatabase, file_id: HirFileId, ast_id: ErasedFileAstId, - ) -> TreeArc { + ) -> SyntaxNode { let node = db.parse_or_expand(file_id).unwrap(); - db.ast_id_map(file_id).arena[ast_id].to_node(&*node).to_owned() + db.ast_id_map(file_id).arena[ast_id].to_node(&node) } pub(crate) fn ast_id(&self, item: &N) -> FileAstId { @@ -131,7 +131,7 @@ impl AstIdMap { // change parent's id. This means that, say, adding a new function to a // trait does not change ids of top-level items, which helps caching. bfs(node, |it| { - if let Some(module_item) = ast::ModuleItem::cast(it) { + if let Some(module_item) = ast::ModuleItem::cast(it.clone()) { res.alloc(module_item.syntax()); } else if let Some(macro_call) = ast::MacroCall::cast(it) { res.alloc(macro_call.syntax()); @@ -146,8 +146,8 @@ impl AstIdMap { } /// Walks the subtree in bfs order, calling `f` for each node. -fn bfs(node: &SyntaxNode, mut f: impl FnMut(&SyntaxNode)) { - let mut curr_layer = vec![node]; +fn bfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode)) { + let mut curr_layer = vec![node.clone()]; let mut next_layer = vec![]; while !curr_layer.is_empty() { curr_layer.drain(..).for_each(|node| { diff --git a/crates/ra_hir/src/traits.rs b/crates/ra_hir/src/traits.rs index fc036830309..de26f1a68fe 100644 --- a/crates/ra_hir/src/traits.rs +++ b/crates/ra_hir/src/traits.rs @@ -31,9 +31,9 @@ impl TraitData { item_list .impl_items() .map(|item_node| match item_node.kind() { - ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(it) }.into(), - ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(it) }.into(), - ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(it) }.into(), + ast::ImplItemKind::FnDef(it) => Function { id: ctx.to_def(&it) }.into(), + ast::ImplItemKind::ConstDef(it) => Const { id: ctx.to_def(&it) }.into(), + ast::ImplItemKind::TypeAliasDef(it) => TypeAlias { id: ctx.to_def(&it) }.into(), }) .collect() } else { diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs index 2410602a6b7..265740e5420 100644 --- a/crates/ra_hir/src/ty/tests.rs +++ b/crates/ra_hir/src/ty/tests.rs @@ -3086,7 +3086,7 @@ fn type_at_pos(db: &MockDatabase, pos: FilePosition) -> String { let file = db.parse(pos.file_id).ok().unwrap(); let expr = algo::find_node_at_offset::(file.syntax(), pos.offset).unwrap(); let analyzer = SourceAnalyzer::new(db, pos.file_id, expr.syntax(), Some(pos.offset)); - let ty = analyzer.type_of(db, expr).unwrap(); + let ty = analyzer.type_of(db, &expr).unwrap(); ty.display(db).to_string() } @@ -3126,7 +3126,7 @@ fn infer(content: &str) -> String { types.sort_by_key(|(ptr, _)| (ptr.range().start(), ptr.range().end())); for (syntax_ptr, ty) in &types { let node = syntax_ptr.to_node(source_file.syntax()); - let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node) { + let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.clone()) { (self_param.self_kw_token().range(), "self".to_string()) } else { (syntax_ptr.range(), node.text().to_string().replace("\n", " ")) @@ -3137,7 +3137,7 @@ fn infer(content: &str) -> String { for node in source_file.syntax().descendants() { if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF { - let analyzer = SourceAnalyzer::new(&db, file_id, node, None); + let analyzer = SourceAnalyzer::new(&db, file_id, &node, None); infer_def(analyzer.inference_result(), analyzer.body_source_map()); } } @@ -3179,7 +3179,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { let node = algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent(); let events = db.log_executed(|| { - SourceAnalyzer::new(&db, pos.file_id, node, None); + SourceAnalyzer::new(&db, pos.file_id, &node, None); }); assert!(format!("{:?}", events).contains("infer")) } @@ -3200,7 +3200,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() { let node = algo::find_token_at_offset(file.syntax(), pos.offset).right_biased().unwrap().parent(); let events = db.log_executed(|| { - SourceAnalyzer::new(&db, pos.file_id, node, None); + SourceAnalyzer::new(&db, pos.file_id, &node, None); }); assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events) } diff --git a/crates/ra_hir/src/type_ref.rs b/crates/ra_hir/src/type_ref.rs index 8aa80764821..8536ae44a15 100644 --- a/crates/ra_hir/src/type_ref.rs +++ b/crates/ra_hir/src/type_ref.rs @@ -56,7 +56,7 @@ pub enum TypeRef { impl TypeRef { /// Converts an `ast::TypeRef` to a `hir::TypeRef`. - pub(crate) fn from_ast(node: &ast::TypeRef) -> Self { + pub(crate) fn from_ast(node: ast::TypeRef) -> Self { use ra_syntax::ast::TypeRefKind::*; match node.kind() { ParenType(inner) => TypeRef::from_ast_opt(inner.type_ref()), @@ -95,7 +95,7 @@ impl TypeRef { } } - pub(crate) fn from_ast_opt(node: Option<&ast::TypeRef>) -> Self { + pub(crate) fn from_ast_opt(node: Option) -> Self { if let Some(node) = node { TypeRef::from_ast(node) } else { diff --git a/crates/ra_ide_api/src/call_info.rs b/crates/ra_ide_api/src/call_info.rs index 11dea7c1463..270499612bb 100644 --- a/crates/ra_ide_api/src/call_info.rs +++ b/crates/ra_ide_api/src/call_info.rs @@ -11,24 +11,24 @@ use crate::{db::RootDatabase, CallInfo, FilePosition, FunctionSignature}; /// Computes parameter information for the given call expression. pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option { let parse = db.parse(position.file_id); - let syntax = parse.tree().syntax(); + let syntax = parse.tree().syntax().clone(); // Find the calling expression and it's NameRef - let calling_node = FnCallNode::with_node(syntax, position.offset)?; + let calling_node = FnCallNode::with_node(&syntax, position.offset)?; let name_ref = calling_node.name_ref()?; let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None); - let function = match calling_node { + let function = match &calling_node { FnCallNode::CallExpr(expr) => { //FIXME: apply subst - let (callable_def, _subst) = analyzer.type_of(db, expr.expr()?)?.as_callable()?; + let (callable_def, _subst) = analyzer.type_of(db, &expr.expr()?)?.as_callable()?; match callable_def { hir::CallableDef::Function(it) => it, //FIXME: handle other callables _ => return None, } } - FnCallNode::MethodCallExpr(expr) => analyzer.resolve_method_call(expr)?, + FnCallNode::MethodCallExpr(expr) => analyzer.resolve_method_call(&expr)?, }; let mut call_info = CallInfo::new(db, function); @@ -73,13 +73,13 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option { - CallExpr(&'a ast::CallExpr), - MethodCallExpr(&'a ast::MethodCallExpr), +enum FnCallNode { + CallExpr(ast::CallExpr), + MethodCallExpr(ast::MethodCallExpr), } -impl<'a> FnCallNode<'a> { - fn with_node(syntax: &'a SyntaxNode, offset: TextUnit) -> Option> { +impl FnCallNode { + fn with_node(syntax: &SyntaxNode, offset: TextUnit) -> Option { if let Some(expr) = find_node_at_offset::(syntax, offset) { return Some(FnCallNode::CallExpr(expr)); } @@ -89,8 +89,8 @@ impl<'a> FnCallNode<'a> { None } - fn name_ref(&self) -> Option<&'a ast::NameRef> { - match *self { + fn name_ref(&self) -> Option { + match self { FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()?.kind() { ast::ExprKind::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?, _ => return None, @@ -102,8 +102,8 @@ impl<'a> FnCallNode<'a> { } } - fn arg_list(&self) -> Option<&'a ast::ArgList> { - match *self { + fn arg_list(&self) -> Option { + match self { FnCallNode::CallExpr(expr) => expr.arg_list(), FnCallNode::MethodCallExpr(expr) => expr.arg_list(), } diff --git a/crates/ra_ide_api/src/completion/complete_dot.rs b/crates/ra_ide_api/src/completion/complete_dot.rs index a5f071442b1..536ba36dff3 100644 --- a/crates/ra_ide_api/src/completion/complete_dot.rs +++ b/crates/ra_ide_api/src/completion/complete_dot.rs @@ -5,10 +5,11 @@ use rustc_hash::FxHashSet; /// Complete dot accesses, i.e. fields or methods (currently only fields). pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) { - let receiver_ty = match ctx.dot_receiver.and_then(|it| ctx.analyzer.type_of(ctx.db, it)) { - Some(it) => it, - None => return, - }; + let receiver_ty = + match ctx.dot_receiver.as_ref().and_then(|it| ctx.analyzer.type_of(ctx.db, it)) { + Some(it) => it, + None => return, + }; if !ctx.is_call { complete_fields(acc, ctx, receiver_ty.clone()); } diff --git a/crates/ra_ide_api/src/completion/complete_fn_param.rs b/crates/ra_ide_api/src/completion/complete_fn_param.rs index 5a117c48509..0887ef1f687 100644 --- a/crates/ra_ide_api/src/completion/complete_fn_param.rs +++ b/crates/ra_ide_api/src/completion/complete_fn_param.rs @@ -20,7 +20,7 @@ pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext) let _ = visitor_ctx(&mut params) .visit::(process) .visit::(process) - .accept(node); + .accept(&node); } params .into_iter() @@ -38,10 +38,7 @@ pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext) .add_to(acc) }); - fn process<'a, N: ast::FnDefOwner>( - node: &'a N, - params: &mut FxHashMap, - ) { + fn process(node: N, params: &mut FxHashMap) { node.functions().filter_map(|it| it.param_list()).flat_map(|it| it.params()).for_each( |param| { let text = param.syntax().text().to_string(); diff --git a/crates/ra_ide_api/src/completion/complete_keyword.rs b/crates/ra_ide_api/src/completion/complete_keyword.rs index 034ed934dfb..4cf34eff8a2 100644 --- a/crates/ra_ide_api/src/completion/complete_keyword.rs +++ b/crates/ra_ide_api/src/completion/complete_keyword.rs @@ -52,7 +52,7 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte return; } - let fn_def = match ctx.function_syntax { + let fn_def = match &ctx.function_syntax { Some(it) => it, None => return, }; @@ -65,7 +65,7 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte acc.add(keyword(ctx, "else", "else {$0}")); acc.add(keyword(ctx, "else if", "else if $0 {}")); } - if is_in_loop_body(ctx.token) { + if is_in_loop_body(&ctx.token) { if ctx.can_be_stmt { acc.add(keyword(ctx, "continue", "continue;")); acc.add(keyword(ctx, "break", "break;")); @@ -74,19 +74,19 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte acc.add(keyword(ctx, "break", "break")); } } - acc.add_all(complete_return(ctx, fn_def, ctx.can_be_stmt)); + acc.add_all(complete_return(ctx, &fn_def, ctx.can_be_stmt)); } -fn is_in_loop_body(leaf: SyntaxToken) -> bool { +fn is_in_loop_body(leaf: &SyntaxToken) -> bool { for node in leaf.parent().ancestors() { if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR { break; } let loop_body = visitor() - .visit::(LoopBodyOwner::loop_body) - .visit::(LoopBodyOwner::loop_body) - .visit::(LoopBodyOwner::loop_body) - .accept(node); + .visit::(|it| it.loop_body()) + .visit::(|it| it.loop_body()) + .visit::(|it| it.loop_body()) + .accept(&node); if let Some(Some(body)) = loop_body { if leaf.range().is_subrange(&body.syntax().range()) { return true; diff --git a/crates/ra_ide_api/src/completion/complete_postfix.rs b/crates/ra_ide_api/src/completion/complete_postfix.rs index 4f50622140b..c75b1c159c3 100644 --- a/crates/ra_ide_api/src/completion/complete_postfix.rs +++ b/crates/ra_ide_api/src/completion/complete_postfix.rs @@ -11,7 +11,8 @@ use ra_text_edit::TextEditBuilder; fn postfix_snippet(ctx: &CompletionContext, label: &str, detail: &str, snippet: &str) -> Builder { let edit = { - let receiver_range = ctx.dot_receiver.expect("no receiver available").syntax().range(); + let receiver_range = + ctx.dot_receiver.as_ref().expect("no receiver available").syntax().range(); let delete_range = TextRange::from_to(receiver_range.start(), ctx.source_range().end()); let mut builder = TextEditBuilder::default(); builder.replace(delete_range, snippet.to_string()); @@ -38,9 +39,9 @@ fn is_bool_or_unknown(ty: Option) -> bool { } pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { - if let Some(dot_receiver) = ctx.dot_receiver { + if let Some(dot_receiver) = &ctx.dot_receiver { let receiver_text = dot_receiver.syntax().text().to_string(); - let receiver_ty = ctx.analyzer.type_of(ctx.db, dot_receiver); + let receiver_ty = ctx.analyzer.type_of(ctx.db, &dot_receiver); if is_bool_or_unknown(receiver_ty) { postfix_snippet(ctx, "if", "if expr {}", &format!("if {} {{$0}}", receiver_text)) .add_to(acc); diff --git a/crates/ra_ide_api/src/completion/complete_scope.rs b/crates/ra_ide_api/src/completion/complete_scope.rs index 1ba8712572d..f920340557c 100644 --- a/crates/ra_ide_api/src/completion/complete_scope.rs +++ b/crates/ra_ide_api/src/completion/complete_scope.rs @@ -20,8 +20,8 @@ pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) { let mut builder = TextEditBuilder::default(); builder.replace(ctx.source_range(), name.to_string()); auto_import::auto_import_text_edit( - ctx.token.parent(), - ctx.token.parent(), + &ctx.token.parent(), + &ctx.token.parent(), &path, &mut builder, ); diff --git a/crates/ra_ide_api/src/completion/complete_struct_literal.rs b/crates/ra_ide_api/src/completion/complete_struct_literal.rs index b6216f857d6..9410f740ffe 100644 --- a/crates/ra_ide_api/src/completion/complete_struct_literal.rs +++ b/crates/ra_ide_api/src/completion/complete_struct_literal.rs @@ -4,8 +4,8 @@ use crate::completion::{CompletionContext, Completions}; /// Complete fields in fields literals. pub(super) fn complete_struct_literal(acc: &mut Completions, ctx: &CompletionContext) { - let (ty, variant) = match ctx.struct_lit_syntax.and_then(|it| { - Some((ctx.analyzer.type_of(ctx.db, it.into())?, ctx.analyzer.resolve_variant(it)?)) + let (ty, variant) = match ctx.struct_lit_syntax.as_ref().and_then(|it| { + Some((ctx.analyzer.type_of(ctx.db, &it.clone().into())?, ctx.analyzer.resolve_variant(it)?)) }) { Some(it) => it, _ => return, diff --git a/crates/ra_ide_api/src/completion/completion_context.rs b/crates/ra_ide_api/src/completion/completion_context.rs index 4aa84751f42..b803271abaf 100644 --- a/crates/ra_ide_api/src/completion/completion_context.rs +++ b/crates/ra_ide_api/src/completion/completion_context.rs @@ -16,11 +16,11 @@ pub(crate) struct CompletionContext<'a> { pub(super) db: &'a db::RootDatabase, pub(super) analyzer: hir::SourceAnalyzer, pub(super) offset: TextUnit, - pub(super) token: SyntaxToken<'a>, + pub(super) token: SyntaxToken, pub(super) module: Option, - pub(super) function_syntax: Option<&'a ast::FnDef>, - pub(super) use_item_syntax: Option<&'a ast::UseItem>, - pub(super) struct_lit_syntax: Option<&'a ast::StructLit>, + pub(super) function_syntax: Option, + pub(super) use_item_syntax: Option, + pub(super) struct_lit_syntax: Option, pub(super) is_param: bool, /// If a name-binding or reference to a const in a pattern. /// Irrefutable patterns (like let) are excluded. @@ -35,7 +35,7 @@ pub(crate) struct CompletionContext<'a> { /// Something is typed at the "top" level, in module or impl/trait. pub(super) is_new_item: bool, /// The receiver if this is a field or method access, i.e. writing something.<|> - pub(super) dot_receiver: Option<&'a ast::Expr>, + pub(super) dot_receiver: Option, /// If this is a call (method or function) in particular, i.e. the () are already there. pub(super) is_call: bool, } @@ -50,7 +50,7 @@ impl<'a> CompletionContext<'a> { let token = find_token_at_offset(original_parse.tree().syntax(), position.offset).left_biased()?; let analyzer = - hir::SourceAnalyzer::new(db, position.file_id, token.parent(), Some(position.offset)); + hir::SourceAnalyzer::new(db, position.file_id, &token.parent(), Some(position.offset)); let mut ctx = CompletionContext { db, analyzer, @@ -109,7 +109,7 @@ impl<'a> CompletionContext<'a> { if is_node::(name.syntax()) { let bind_pat = name.syntax().ancestors().find_map(ast::BindPat::cast).unwrap(); let parent = bind_pat.syntax().parent(); - if parent.and_then(ast::MatchArm::cast).is_some() + if parent.clone().and_then(ast::MatchArm::cast).is_some() || parent.and_then(ast::Condition::cast).is_some() { self.is_pat_binding = true; @@ -122,7 +122,7 @@ impl<'a> CompletionContext<'a> { } } - fn classify_name_ref(&mut self, original_file: &'a SourceFile, name_ref: &ast::NameRef) { + fn classify_name_ref(&mut self, original_file: SourceFile, name_ref: ast::NameRef) { let name_range = name_ref.syntax().range(); if name_ref.syntax().parent().and_then(ast::NamedField::cast).is_some() { self.struct_lit_syntax = find_node_at_offset(original_file.syntax(), self.offset); @@ -153,7 +153,7 @@ impl<'a> CompletionContext<'a> { None => return, }; - if let Some(segment) = ast::PathSegment::cast(parent) { + if let Some(segment) = ast::PathSegment::cast(parent.clone()) { let path = segment.parent_path(); self.is_call = path .syntax() @@ -162,7 +162,7 @@ impl<'a> CompletionContext<'a> { .and_then(|it| it.syntax().parent().and_then(ast::CallExpr::cast)) .is_some(); - if let Some(mut path) = hir::Path::from_ast(path) { + if let Some(mut path) = hir::Path::from_ast(path.clone()) { if !path.is_ident() { path.segments.pop().unwrap(); self.path_prefix = Some(path); @@ -179,7 +179,7 @@ impl<'a> CompletionContext<'a> { .syntax() .ancestors() .find_map(|node| { - if let Some(stmt) = ast::ExprStmt::cast(node) { + if let Some(stmt) = ast::ExprStmt::cast(node.clone()) { return Some(stmt.syntax().range() == name_ref.syntax().range()); } if let Some(block) = ast::Block::cast(node) { @@ -203,7 +203,7 @@ impl<'a> CompletionContext<'a> { } } } - if let Some(field_expr) = ast::FieldExpr::cast(parent) { + if let Some(field_expr) = ast::FieldExpr::cast(parent.clone()) { // The receiver comes before the point of insertion of the fake // ident, so it should have the same range in the non-modified file self.dot_receiver = field_expr @@ -222,7 +222,7 @@ impl<'a> CompletionContext<'a> { } } -fn find_node_with_range(syntax: &SyntaxNode, range: TextRange) -> Option<&N> { +fn find_node_with_range(syntax: &SyntaxNode, range: TextRange) -> Option { find_covering_element(syntax, range).ancestors().find_map(N::cast) } diff --git a/crates/ra_ide_api/src/diagnostics.rs b/crates/ra_ide_api/src/diagnostics.rs index 3f5b9e0a0de..affbad6cd05 100644 --- a/crates/ra_ide_api/src/diagnostics.rs +++ b/crates/ra_ide_api/src/diagnostics.rs @@ -35,8 +35,8 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec })); for node in parse.tree().syntax().descendants() { - check_unnecessary_braces_in_use_statement(&mut res, file_id, node); - check_struct_shorthand_initialization(&mut res, file_id, node); + check_unnecessary_braces_in_use_statement(&mut res, file_id, &node); + check_struct_shorthand_initialization(&mut res, file_id, &node); } let res = RefCell::new(res); let mut sink = DiagnosticSink::new(|d| { @@ -60,7 +60,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec }) .on::(|d| { let node = d.ast(db); - let mut ast_editor = AstEditor::new(&*node); + let mut ast_editor = AstEditor::new(node); for f in d.missed_fields.iter() { ast_editor.append_field(&AstBuilder::::from_name(f)); } @@ -94,11 +94,11 @@ fn check_unnecessary_braces_in_use_statement( file_id: FileId, node: &SyntaxNode, ) -> Option<()> { - let use_tree_list = ast::UseTreeList::cast(node)?; + let use_tree_list = ast::UseTreeList::cast(node.clone())?; if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() { let range = use_tree_list.syntax().range(); let edit = - text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(single_use_tree) + text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(&single_use_tree) .unwrap_or_else(|| { let to_replace = single_use_tree.syntax().text().to_string(); let mut edit_builder = TextEditBuilder::default(); @@ -141,7 +141,7 @@ fn check_struct_shorthand_initialization( file_id: FileId, node: &SyntaxNode, ) -> Option<()> { - let struct_lit = ast::StructLit::cast(node)?; + let struct_lit = ast::StructLit::cast(node.clone())?; let named_field_list = struct_lit.named_field_list()?; for named_field in named_field_list.fields() { if let (Some(name_ref), Some(expr)) = (named_field.name_ref(), named_field.expr()) { @@ -184,7 +184,7 @@ mod tests { let parse = SourceFile::parse(code); let mut diagnostics = Vec::new(); for node in parse.tree().syntax().descendants() { - func(&mut diagnostics, FileId(0), node); + func(&mut diagnostics, FileId(0), &node); } assert!(diagnostics.is_empty()); } @@ -193,7 +193,7 @@ mod tests { let parse = SourceFile::parse(before); let mut diagnostics = Vec::new(); for node in parse.tree().syntax().descendants() { - func(&mut diagnostics, FileId(0), node); + func(&mut diagnostics, FileId(0), &node); } let diagnostic = diagnostics.pop().unwrap_or_else(|| panic!("no diagnostics for:\n{}\n", before)); diff --git a/crates/ra_ide_api/src/display/function_signature.rs b/crates/ra_ide_api/src/display/function_signature.rs index e7ad5a0d19c..644a4532b61 100644 --- a/crates/ra_ide_api/src/display/function_signature.rs +++ b/crates/ra_ide_api/src/display/function_signature.rs @@ -38,7 +38,7 @@ impl FunctionSignature { pub(crate) fn from_hir(db: &db::RootDatabase, function: hir::Function) -> Self { let doc = function.docs(db); let ast_node = function.source(db).ast; - FunctionSignature::from(&*ast_node).with_doc_opt(doc) + FunctionSignature::from(&ast_node).with_doc_opt(doc) } } diff --git a/crates/ra_ide_api/src/display/navigation_target.rs b/crates/ra_ide_api/src/display/navigation_target.rs index 20a8d418e90..8cc853dd143 100644 --- a/crates/ra_ide_api/src/display/navigation_target.rs +++ b/crates/ra_ide_api/src/display/navigation_target.rs @@ -5,7 +5,7 @@ use ra_syntax::{ ast::{self, DocCommentsOwner}, AstNode, AstPtr, SmolStr, SyntaxKind::{self, NAME}, - SyntaxNode, TextRange, TreeArc, + SyntaxNode, TextRange, }; use super::short_label::ShortLabel; @@ -169,7 +169,7 @@ impl NavigationTarget { let file_id = src.file_id.original_file(db); match src.ast { FieldSource::Named(it) => { - NavigationTarget::from_named(file_id, &*it, it.doc_comment_text(), it.short_label()) + NavigationTarget::from_named(file_id, &it, it.doc_comment_text(), it.short_label()) } FieldSource::Pos(it) => { NavigationTarget::from_syntax(file_id, "".into(), None, it.syntax(), None, None) @@ -179,13 +179,13 @@ impl NavigationTarget { pub(crate) fn from_def_source(db: &RootDatabase, def: D) -> NavigationTarget where - D: HasSource>, + D: HasSource, A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel, { let src = def.source(db); NavigationTarget::from_named( src.file_id.original_file(db), - &*src.ast, + &src.ast, src.ast.doc_comment_text(), src.ast.short_label(), ) @@ -249,7 +249,7 @@ impl NavigationTarget { log::debug!("nav target {}", src.ast.syntax().debug_dump()); NavigationTarget::from_named( src.file_id.original_file(db), - &*src.ast, + &src.ast, src.ast.doc_comment_text(), None, ) @@ -318,22 +318,18 @@ pub(crate) fn docs_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option let parse = db.parse(symbol.file_id); let node = symbol.ptr.to_node(parse.tree().syntax()).to_owned(); - fn doc_comments(node: &N) -> Option { - node.doc_comment_text() - } - visitor() - .visit(doc_comments::) - .visit(doc_comments::) - .visit(doc_comments::) - .visit(doc_comments::) - .visit(doc_comments::) - .visit(doc_comments::) - .visit(doc_comments::) - .visit(doc_comments::) - .visit(doc_comments::) - .visit(doc_comments::) - .visit(doc_comments::) + .visit(|it: ast::FnDef| it.doc_comment_text()) + .visit(|it: ast::StructDef| it.doc_comment_text()) + .visit(|it: ast::EnumDef| it.doc_comment_text()) + .visit(|it: ast::TraitDef| it.doc_comment_text()) + .visit(|it: ast::Module| it.doc_comment_text()) + .visit(|it: ast::TypeAliasDef| it.doc_comment_text()) + .visit(|it: ast::ConstDef| it.doc_comment_text()) + .visit(|it: ast::StaticDef| it.doc_comment_text()) + .visit(|it: ast::NamedFieldDef| it.doc_comment_text()) + .visit(|it: ast::EnumVariant| it.doc_comment_text()) + .visit(|it: ast::MacroCall| it.doc_comment_text()) .accept(&node)? } @@ -345,15 +341,15 @@ pub(crate) fn description_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> let node = symbol.ptr.to_node(parse.tree().syntax()).to_owned(); visitor() - .visit(|node: &ast::FnDef| node.short_label()) - .visit(|node: &ast::StructDef| node.short_label()) - .visit(|node: &ast::EnumDef| node.short_label()) - .visit(|node: &ast::TraitDef| node.short_label()) - .visit(|node: &ast::Module| node.short_label()) - .visit(|node: &ast::TypeAliasDef| node.short_label()) - .visit(|node: &ast::ConstDef| node.short_label()) - .visit(|node: &ast::StaticDef| node.short_label()) - .visit(|node: &ast::NamedFieldDef| node.short_label()) - .visit(|node: &ast::EnumVariant| node.short_label()) + .visit(|node: ast::FnDef| node.short_label()) + .visit(|node: ast::StructDef| node.short_label()) + .visit(|node: ast::EnumDef| node.short_label()) + .visit(|node: ast::TraitDef| node.short_label()) + .visit(|node: ast::Module| node.short_label()) + .visit(|node: ast::TypeAliasDef| node.short_label()) + .visit(|node: ast::ConstDef| node.short_label()) + .visit(|node: ast::StaticDef| node.short_label()) + .visit(|node: ast::NamedFieldDef| node.short_label()) + .visit(|node: ast::EnumVariant| node.short_label()) .accept(&node)? } diff --git a/crates/ra_ide_api/src/display/structure.rs b/crates/ra_ide_api/src/display/structure.rs index 638484a9ba1..2e183d2f60f 100644 --- a/crates/ra_ide_api/src/display/structure.rs +++ b/crates/ra_ide_api/src/display/structure.rs @@ -24,14 +24,14 @@ pub fn file_structure(file: &SourceFile) -> Vec { for event in file.syntax().preorder() { match event { WalkEvent::Enter(node) => { - if let Some(mut symbol) = structure_node(node) { + if let Some(mut symbol) = structure_node(&node) { symbol.parent = stack.last().copied(); stack.push(res.len()); res.push(symbol); } } WalkEvent::Leave(node) => { - if structure_node(node).is_some() { + if structure_node(&node).is_some() { stack.pop().unwrap(); } } @@ -41,19 +41,20 @@ pub fn file_structure(file: &SourceFile) -> Vec { } fn structure_node(node: &SyntaxNode) -> Option { - fn decl(node: &N) -> Option { + fn decl(node: N) -> Option { decl_with_detail(node, None) } fn decl_with_ascription( - node: &N, + node: N, ) -> Option { - decl_with_type_ref(node, node.ascribed_type()) + let ty = node.ascribed_type(); + decl_with_type_ref(node, ty) } fn decl_with_type_ref( - node: &N, - type_ref: Option<&ast::TypeRef>, + node: N, + type_ref: Option, ) -> Option { let detail = type_ref.map(|type_ref| { let mut detail = String::new(); @@ -64,7 +65,7 @@ fn structure_node(node: &SyntaxNode) -> Option { } fn decl_with_detail( - node: &N, + node: N, detail: Option, ) -> Option { let name = node.name()?; @@ -82,22 +83,24 @@ fn structure_node(node: &SyntaxNode) -> Option { fn collapse_ws(node: &SyntaxNode, output: &mut String) { let mut can_insert_ws = false; - for line in node.text().chunks().flat_map(|chunk| chunk.lines()) { - let line = line.trim(); - if line.is_empty() { - if can_insert_ws { - output.push_str(" "); - can_insert_ws = false; + for chunk in node.text().chunks() { + for line in chunk.lines() { + let line = line.trim(); + if line.is_empty() { + if can_insert_ws { + output.push_str(" "); + can_insert_ws = false; + } + } else { + output.push_str(line); + can_insert_ws = true; } - } else { - output.push_str(line); - can_insert_ws = true; } } } visitor() - .visit(|fn_def: &ast::FnDef| { + .visit(|fn_def: ast::FnDef| { let mut detail = String::from("fn"); if let Some(type_param_list) = fn_def.type_param_list() { collapse_ws(type_param_list.syntax(), &mut detail); @@ -117,11 +120,14 @@ fn structure_node(node: &SyntaxNode) -> Option { .visit(decl::) .visit(decl::) .visit(decl::) - .visit(|td: &ast::TypeAliasDef| decl_with_type_ref(td, td.type_ref())) + .visit(|td: ast::TypeAliasDef| { + let ty = td.type_ref(); + decl_with_type_ref(td, ty) + }) .visit(decl_with_ascription::) .visit(decl_with_ascription::) .visit(decl_with_ascription::) - .visit(|im: &ast::ImplBlock| { + .visit(|im: ast::ImplBlock| { let target_type = im.target_type()?; let target_trait = im.target_trait(); let label = match target_trait { @@ -142,14 +148,14 @@ fn structure_node(node: &SyntaxNode) -> Option { }; Some(node) }) - .visit(|mc: &ast::MacroCall| { + .visit(|mc: ast::MacroCall| { let first_token = mc.syntax().first_token().unwrap(); if first_token.text().as_str() != "macro_rules" { return None; } decl(mc) }) - .accept(node)? + .accept(&node)? } #[cfg(test)] diff --git a/crates/ra_ide_api/src/extend_selection.rs b/crates/ra_ide_api/src/extend_selection.rs index 491b1570208..8c49960f529 100644 --- a/crates/ra_ide_api/src/extend_selection.rs +++ b/crates/ra_ide_api/src/extend_selection.rs @@ -42,7 +42,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option return None, TokenAtOffset::Single(l) => { if string_kinds.contains(&l.kind()) { - extend_single_word_in_comment_or_string(l, offset).unwrap_or_else(|| l.range()) + extend_single_word_in_comment_or_string(&l, offset).unwrap_or_else(|| l.range()) } else { l.range() } @@ -56,7 +56,7 @@ fn try_extend_selection(root: &SyntaxNode, range: TextRange) -> Option Option Option Option { let text: &str = leaf.text(); @@ -131,9 +131,9 @@ fn extend_ws(root: &SyntaxNode, ws: SyntaxToken, offset: TextUnit) -> TextRange ws.range() } -fn pick_best<'a>(l: SyntaxToken<'a>, r: SyntaxToken<'a>) -> SyntaxToken<'a> { - return if priority(r) > priority(l) { r } else { l }; - fn priority(n: SyntaxToken) -> usize { +fn pick_best<'a>(l: SyntaxToken, r: SyntaxToken) -> SyntaxToken { + return if priority(&r) > priority(&l) { r } else { l }; + fn priority(n: &SyntaxToken) -> usize { match n.kind() { WHITESPACE => 0, IDENT | T![self] | T![super] | T![crate] | LIFETIME => 2, @@ -156,7 +156,7 @@ fn extend_list_item(node: &SyntaxNode) -> Option { SyntaxElement::Token(it) => is_single_line_ws(it), }) .next() - .and_then(|it| it.as_token()) + .and_then(|it| it.as_token().cloned()) .filter(|node| node.kind() == T![,]) } @@ -167,7 +167,7 @@ fn extend_list_item(node: &SyntaxNode) -> Option { // Include any following whitespace when comma if after list item. let final_node = comma_node .next_sibling_or_token() - .and_then(|it| it.as_token()) + .and_then(|it| it.as_token().cloned()) .filter(|node| is_single_line_ws(node)) .unwrap_or(comma_node); @@ -178,8 +178,8 @@ fn extend_list_item(node: &SyntaxNode) -> Option { } fn extend_comments(comment: ast::Comment) -> Option { - let prev = adj_comments(comment, Direction::Prev); - let next = adj_comments(comment, Direction::Next); + let prev = adj_comments(&comment, Direction::Prev); + let next = adj_comments(&comment, Direction::Next); if prev != next { Some(TextRange::from_to(prev.syntax().range().start(), next.syntax().range().end())) } else { @@ -187,14 +187,14 @@ fn extend_comments(comment: ast::Comment) -> Option { } } -fn adj_comments(comment: ast::Comment, dir: Direction) -> ast::Comment { - let mut res = comment; +fn adj_comments(comment: &ast::Comment, dir: Direction) -> ast::Comment { + let mut res = comment.clone(); for element in comment.syntax().siblings_with_tokens(dir) { let token = match element.as_token() { None => break, Some(token) => token, }; - if let Some(c) = ast::Comment::cast(token) { + if let Some(c) = ast::Comment::cast(token.clone()) { res = c } else if token.kind() != WHITESPACE || token.text().contains("\n\n") { break; diff --git a/crates/ra_ide_api/src/folding_ranges.rs b/crates/ra_ide_api/src/folding_ranges.rs index 9d4855a6489..c2b981aedc1 100644 --- a/crates/ra_ide_api/src/folding_ranges.rs +++ b/crates/ra_ide_api/src/folding_ranges.rs @@ -30,7 +30,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec { for element in file.syntax().descendants_with_tokens() { // Fold items that span multiple lines if let Some(kind) = fold_kind(element.kind()) { - let is_multiline = match element { + let is_multiline = match &element { SyntaxElement::Node(node) => node.text().contains('\n'), SyntaxElement::Token(token) => token.text().contains('\n'), }; @@ -56,7 +56,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec { SyntaxElement::Node(node) => { // Fold groups of imports if node.kind() == USE_ITEM && !visited_imports.contains(&node) { - if let Some(range) = contiguous_range_for_group(node, &mut visited_imports) { + if let Some(range) = contiguous_range_for_group(&node, &mut visited_imports) { res.push(Fold { range, kind: FoldKind::Imports }) } } @@ -65,7 +65,7 @@ pub(crate) fn folding_ranges(file: &SourceFile) -> Vec { if node.kind() == MODULE && !has_visibility(&node) && !visited_mods.contains(&node) { if let Some(range) = - contiguous_range_for_group_unless(node, has_visibility, &mut visited_mods) + contiguous_range_for_group_unless(&node, has_visibility, &mut visited_mods) { res.push(Fold { range, kind: FoldKind::Mods }) } @@ -88,24 +88,24 @@ fn fold_kind(kind: SyntaxKind) -> Option { } fn has_visibility(node: &SyntaxNode) -> bool { - ast::Module::cast(node).and_then(|m| m.visibility()).is_some() + ast::Module::cast(node.clone()).and_then(|m| m.visibility()).is_some() } -fn contiguous_range_for_group<'a>( - first: &'a SyntaxNode, - visited: &mut FxHashSet<&'a SyntaxNode>, +fn contiguous_range_for_group( + first: &SyntaxNode, + visited: &mut FxHashSet, ) -> Option { contiguous_range_for_group_unless(first, |_| false, visited) } -fn contiguous_range_for_group_unless<'a>( - first: &'a SyntaxNode, - unless: impl Fn(&'a SyntaxNode) -> bool, - visited: &mut FxHashSet<&'a SyntaxNode>, +fn contiguous_range_for_group_unless( + first: &SyntaxNode, + unless: impl Fn(&SyntaxNode) -> bool, + visited: &mut FxHashSet, ) -> Option { - visited.insert(first); + visited.insert(first.clone()); - let mut last = first; + let mut last = first.clone(); for element in first.siblings_with_tokens(Direction::Next) { let node = match element { SyntaxElement::Token(token) => { @@ -123,15 +123,15 @@ fn contiguous_range_for_group_unless<'a>( }; // Stop if we find a node that doesn't belong to the group - if node.kind() != first.kind() || unless(node) { + if node.kind() != first.kind() || unless(&node) { break; } - visited.insert(node); + visited.insert(node.clone()); last = node; } - if first != last { + if first != &last { Some(TextRange::from_to(first.range().start(), last.range().end())) } else { // The group consists of only one element, therefore it cannot be folded @@ -139,11 +139,11 @@ fn contiguous_range_for_group_unless<'a>( } } -fn contiguous_range_for_comment<'a>( - first: ast::Comment<'a>, - visited: &mut FxHashSet>, +fn contiguous_range_for_comment( + first: ast::Comment, + visited: &mut FxHashSet, ) -> Option { - visited.insert(first); + visited.insert(first.clone()); // Only fold comments of the same flavor let group_kind = first.kind(); @@ -151,11 +151,11 @@ fn contiguous_range_for_comment<'a>( return None; } - let mut last = first; + let mut last = first.clone(); for element in first.syntax().siblings_with_tokens(Direction::Next) { match element { SyntaxElement::Token(token) => { - if let Some(ws) = ast::Whitespace::cast(token) { + if let Some(ws) = ast::Whitespace::cast(token.clone()) { if !ws.spans_multiple_lines() { // Ignore whitespace without blank lines continue; @@ -163,7 +163,7 @@ fn contiguous_range_for_comment<'a>( } if let Some(c) = ast::Comment::cast(token) { if c.kind() == group_kind { - visited.insert(c); + visited.insert(c.clone()); last = c; continue; } @@ -193,7 +193,7 @@ mod tests { fn do_check(text: &str, fold_kinds: &[FoldKind]) { let (ranges, text) = extract_ranges(text, "fold"); let parse = SourceFile::parse(&text); - let folds = folding_ranges(parse.tree()); + let folds = folding_ranges(&parse.tree()); assert_eq!( folds.len(), diff --git a/crates/ra_ide_api/src/goto_definition.rs b/crates/ra_ide_api/src/goto_definition.rs index 1066bf1557a..82b5e3b5e17 100644 --- a/crates/ra_ide_api/src/goto_definition.rs +++ b/crates/ra_ide_api/src/goto_definition.rs @@ -20,13 +20,13 @@ pub(crate) fn goto_definition( position: FilePosition, ) -> Option>> { let parse = db.parse(position.file_id); - let syntax = parse.tree().syntax(); - if let Some(name_ref) = find_node_at_offset::(syntax, position.offset) { - let navs = reference_definition(db, position.file_id, name_ref).to_vec(); + let syntax = parse.tree().syntax().clone(); + if let Some(name_ref) = find_node_at_offset::(&syntax, position.offset) { + let navs = reference_definition(db, position.file_id, &name_ref).to_vec(); return Some(RangeInfo::new(name_ref.syntax().range(), navs.to_vec())); } - if let Some(name) = find_node_at_offset::(syntax, position.offset) { - let navs = name_definition(db, position.file_id, name)?; + if let Some(name) = find_node_at_offset::(&syntax, position.offset) { + let navs = name_definition(db, position.file_id, &name)?; return Some(RangeInfo::new(name.syntax().range(), navs)); } None @@ -94,7 +94,7 @@ pub(crate) fn name_definition( ) -> Option> { let parent = name.syntax().parent()?; - if let Some(module) = ast::Module::cast(&parent) { + if let Some(module) = ast::Module::cast(parent.clone()) { if module.has_semi() { if let Some(child_module) = hir::source_binder::module_from_declaration(db, file_id, module) @@ -114,38 +114,88 @@ pub(crate) fn name_definition( fn named_target(file_id: FileId, node: &SyntaxNode) -> Option { visitor() - .visit(|node: &ast::StructDef| { - NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) + .visit(|node: ast::StructDef| { + NavigationTarget::from_named( + file_id, + &node, + node.doc_comment_text(), + node.short_label(), + ) }) - .visit(|node: &ast::EnumDef| { - NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) + .visit(|node: ast::EnumDef| { + NavigationTarget::from_named( + file_id, + &node, + node.doc_comment_text(), + node.short_label(), + ) }) - .visit(|node: &ast::EnumVariant| { - NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) + .visit(|node: ast::EnumVariant| { + NavigationTarget::from_named( + file_id, + &node, + node.doc_comment_text(), + node.short_label(), + ) }) - .visit(|node: &ast::FnDef| { - NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) + .visit(|node: ast::FnDef| { + NavigationTarget::from_named( + file_id, + &node, + node.doc_comment_text(), + node.short_label(), + ) }) - .visit(|node: &ast::TypeAliasDef| { - NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) + .visit(|node: ast::TypeAliasDef| { + NavigationTarget::from_named( + file_id, + &node, + node.doc_comment_text(), + node.short_label(), + ) }) - .visit(|node: &ast::ConstDef| { - NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) + .visit(|node: ast::ConstDef| { + NavigationTarget::from_named( + file_id, + &node, + node.doc_comment_text(), + node.short_label(), + ) }) - .visit(|node: &ast::StaticDef| { - NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) + .visit(|node: ast::StaticDef| { + NavigationTarget::from_named( + file_id, + &node, + node.doc_comment_text(), + node.short_label(), + ) }) - .visit(|node: &ast::TraitDef| { - NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) + .visit(|node: ast::TraitDef| { + NavigationTarget::from_named( + file_id, + &node, + node.doc_comment_text(), + node.short_label(), + ) }) - .visit(|node: &ast::NamedFieldDef| { - NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) + .visit(|node: ast::NamedFieldDef| { + NavigationTarget::from_named( + file_id, + &node, + node.doc_comment_text(), + node.short_label(), + ) }) - .visit(|node: &ast::Module| { - NavigationTarget::from_named(file_id, node, node.doc_comment_text(), node.short_label()) + .visit(|node: ast::Module| { + NavigationTarget::from_named( + file_id, + &node, + node.doc_comment_text(), + node.short_label(), + ) }) - .visit(|node: &ast::MacroCall| { - NavigationTarget::from_named(file_id, node, node.doc_comment_text(), None) + .visit(|node: ast::MacroCall| { + NavigationTarget::from_named(file_id, &node, node.doc_comment_text(), None) }) .accept(node) } diff --git a/crates/ra_ide_api/src/goto_type_definition.rs b/crates/ra_ide_api/src/goto_type_definition.rs index 6ce5e214f06..fc4b6e1af59 100644 --- a/crates/ra_ide_api/src/goto_type_definition.rs +++ b/crates/ra_ide_api/src/goto_type_definition.rs @@ -13,15 +13,17 @@ pub(crate) fn goto_type_definition( token .parent() .ancestors() - .find(|n| ast::Expr::cast(*n).is_some() || ast::Pat::cast(*n).is_some()) + .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some()) })?; - let analyzer = hir::SourceAnalyzer::new(db, position.file_id, node, None); + let analyzer = hir::SourceAnalyzer::new(db, position.file_id, &node, None); - let ty: hir::Ty = if let Some(ty) = ast::Expr::cast(node).and_then(|e| analyzer.type_of(db, e)) + let ty: hir::Ty = if let Some(ty) = + ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e)) { ty - } else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, p)) { + } else if let Some(ty) = ast::Pat::cast(node.clone()).and_then(|p| analyzer.type_of_pat(db, &p)) + { ty } else { return None; diff --git a/crates/ra_ide_api/src/hover.rs b/crates/ra_ide_api/src/hover.rs index 253d21f48d2..e503bf6a96a 100644 --- a/crates/ra_ide_api/src/hover.rs +++ b/crates/ra_ide_api/src/hover.rs @@ -6,7 +6,7 @@ use ra_syntax::{ visit::{visitor, Visitor}, }, ast::{self, DocCommentsOwner}, - AstNode, TreeArc, + AstNode, }; use crate::{ @@ -104,7 +104,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option res.extend(from_def_source(db, it)), Some(Macro(it)) => { let src = it.source(db); @@ -163,7 +163,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option Option(file.syntax(), position.offset) { if let Some(parent) = name.syntax().parent() { let text = visitor() - .visit(|node: &ast::StructDef| { + .visit(|node: ast::StructDef| { hover_text(node.doc_comment_text(), node.short_label()) }) - .visit(|node: &ast::EnumDef| { + .visit(|node: ast::EnumDef| hover_text(node.doc_comment_text(), node.short_label())) + .visit(|node: ast::EnumVariant| { hover_text(node.doc_comment_text(), node.short_label()) }) - .visit(|node: &ast::EnumVariant| { + .visit(|node: ast::FnDef| hover_text(node.doc_comment_text(), node.short_label())) + .visit(|node: ast::TypeAliasDef| { hover_text(node.doc_comment_text(), node.short_label()) }) - .visit(|node: &ast::FnDef| hover_text(node.doc_comment_text(), node.short_label())) - .visit(|node: &ast::TypeAliasDef| { + .visit(|node: ast::ConstDef| { hover_text(node.doc_comment_text(), node.short_label()) }) - .visit(|node: &ast::ConstDef| { + .visit(|node: ast::StaticDef| { hover_text(node.doc_comment_text(), node.short_label()) }) - .visit(|node: &ast::StaticDef| { + .visit(|node: ast::TraitDef| { hover_text(node.doc_comment_text(), node.short_label()) }) - .visit(|node: &ast::TraitDef| { + .visit(|node: ast::NamedFieldDef| { hover_text(node.doc_comment_text(), node.short_label()) }) - .visit(|node: &ast::NamedFieldDef| { - hover_text(node.doc_comment_text(), node.short_label()) - }) - .visit(|node: &ast::Module| hover_text(node.doc_comment_text(), node.short_label())) - .visit(|node: &ast::MacroCall| hover_text(node.doc_comment_text(), None)) - .accept(parent); + .visit(|node: ast::Module| hover_text(node.doc_comment_text(), node.short_label())) + .visit(|node: ast::MacroCall| hover_text(node.doc_comment_text(), None)) + .accept(&parent); if let Some(text) = text { res.extend(text); @@ -217,8 +215,9 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option Option(db: &RootDatabase, def: D) -> Option where - D: HasSource>, + D: HasSource, A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel, { let src = def.source(db); @@ -243,17 +242,17 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option Option { let parse = db.parse(frange.file_id); - let syntax = parse.tree().syntax(); - let leaf_node = find_covering_element(syntax, frange.range); + let leaf_node = find_covering_element(parse.tree().syntax(), frange.range); // if we picked identifier, expand to pattern/expression let node = leaf_node .ancestors() .take_while(|it| it.range() == leaf_node.range()) - .find(|&it| ast::Expr::cast(it).is_some() || ast::Pat::cast(it).is_some())?; - let analyzer = hir::SourceAnalyzer::new(db, frange.file_id, node, None); - let ty = if let Some(ty) = ast::Expr::cast(node).and_then(|e| analyzer.type_of(db, e)) { + .find(|it| ast::Expr::cast(it.clone()).is_some() || ast::Pat::cast(it.clone()).is_some())?; + let analyzer = hir::SourceAnalyzer::new(db, frange.file_id, &node, None); + let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e)) + { ty - } else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, p)) { + } else if let Some(ty) = ast::Pat::cast(node).and_then(|p| analyzer.type_of_pat(db, &p)) { ty } else { return None; diff --git a/crates/ra_ide_api/src/impls.rs b/crates/ra_ide_api/src/impls.rs index 6d69f36aa97..15999d4331a 100644 --- a/crates/ra_ide_api/src/impls.rs +++ b/crates/ra_ide_api/src/impls.rs @@ -9,19 +9,19 @@ pub(crate) fn goto_implementation( position: FilePosition, ) -> Option>> { let parse = db.parse(position.file_id); - let syntax = parse.tree().syntax(); + let syntax = parse.tree().syntax().clone(); let module = source_binder::module_from_position(db, position)?; - if let Some(nominal_def) = find_node_at_offset::(syntax, position.offset) { + if let Some(nominal_def) = find_node_at_offset::(&syntax, position.offset) { return Some(RangeInfo::new( nominal_def.syntax().range(), - impls_for_def(db, nominal_def, module)?, + impls_for_def(db, &nominal_def, module)?, )); - } else if let Some(trait_def) = find_node_at_offset::(syntax, position.offset) { + } else if let Some(trait_def) = find_node_at_offset::(&syntax, position.offset) { return Some(RangeInfo::new( trait_def.syntax().range(), - impls_for_trait(db, trait_def, module)?, + impls_for_trait(db, &trait_def, module)?, )); } diff --git a/crates/ra_ide_api/src/join_lines.rs b/crates/ra_ide_api/src/join_lines.rs index e20cb137081..50bcfb5b76a 100644 --- a/crates/ra_ide_api/src/join_lines.rs +++ b/crates/ra_ide_api/src/join_lines.rs @@ -27,7 +27,7 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit { SyntaxElement::Token(token) => token.parent(), }; let mut edit = TextEditBuilder::default(); - for token in node.descendants_with_tokens().filter_map(|it| it.as_token()) { + for token in node.descendants_with_tokens().filter_map(|it| it.as_token().cloned()) { let range = match range.intersection(&token.range()) { Some(range) => range, None => continue, @@ -37,7 +37,7 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit { let pos: TextUnit = (pos as u32).into(); let off = token.range().start() + range.start() + pos; if !edit.invalidates_offset(off) { - remove_newline(&mut edit, token, off); + remove_newline(&mut edit, &token, off); } } } @@ -45,7 +45,7 @@ pub fn join_lines(file: &SourceFile, range: TextRange) -> TextEdit { edit.finish() } -fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUnit) { +fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextUnit) { if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 { // The node is either the first or the last in the file let suff = &token.text()[TextRange::from_to( @@ -98,9 +98,10 @@ fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUn TextRange::from_to(prev.range().start(), token.range().end()), space.to_string(), ); - } else if let (Some(_), Some(next)) = - (prev.as_token().and_then(ast::Comment::cast), next.as_token().and_then(ast::Comment::cast)) - { + } else if let (Some(_), Some(next)) = ( + prev.as_token().cloned().and_then(ast::Comment::cast), + next.as_token().cloned().and_then(ast::Comment::cast), + ) { // Removes: newline (incl. surrounding whitespace), start of the next comment edit.delete(TextRange::from_to( token.range().start(), @@ -113,16 +114,16 @@ fn remove_newline(edit: &mut TextEditBuilder, token: SyntaxToken, offset: TextUn } fn has_comma_after(node: &SyntaxNode) -> bool { - match non_trivia_sibling(node.into(), Direction::Next) { + match non_trivia_sibling(node.clone().into(), Direction::Next) { Some(n) => n.kind() == T![,], _ => false, } } -fn join_single_expr_block(edit: &mut TextEditBuilder, token: SyntaxToken) -> Option<()> { +fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> { let block = ast::Block::cast(token.parent())?; let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?; - let expr = extract_trivial_expression(block)?; + let expr = extract_trivial_expression(&block)?; let block_range = block_expr.syntax().range(); let mut buf = expr.syntax().text().to_string(); @@ -139,7 +140,7 @@ fn join_single_expr_block(edit: &mut TextEditBuilder, token: SyntaxToken) -> Opt Some(()) } -fn join_single_use_tree(edit: &mut TextEditBuilder, token: SyntaxToken) -> Option<()> { +fn join_single_use_tree(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> { let use_tree_list = ast::UseTreeList::cast(token.parent())?; let (tree,) = use_tree_list.use_trees().collect_tuple()?; edit.replace(use_tree_list.syntax().range(), tree.syntax().text().to_string()); @@ -504,7 +505,7 @@ fn foo() { fn check_join_lines_sel(before: &str, after: &str) { let (sel, before) = extract_range(before); let parse = SourceFile::parse(&before); - let result = join_lines(parse.tree(), sel); + let result = join_lines(&parse.tree(), sel); let actual = result.apply(&before); assert_eq_text!(after, &actual); } diff --git a/crates/ra_ide_api/src/lib.rs b/crates/ra_ide_api/src/lib.rs index 9f3b18d9d15..c54d574bcc5 100644 --- a/crates/ra_ide_api/src/lib.rs +++ b/crates/ra_ide_api/src/lib.rs @@ -50,7 +50,7 @@ use ra_db::{ salsa::{self, ParallelDatabase}, CheckCanceled, SourceDatabase, }; -use ra_syntax::{SourceFile, TextRange, TextUnit, TreeArc}; +use ra_syntax::{SourceFile, TextRange, TextUnit}; use ra_text_edit::TextEdit; use relative_path::RelativePathBuf; @@ -325,8 +325,8 @@ impl Analysis { } /// Gets the syntax tree of the file. - pub fn parse(&self, file_id: FileId) -> TreeArc { - self.db.parse(file_id).tree().to_owned() + pub fn parse(&self, file_id: FileId) -> SourceFile { + self.db.parse(file_id).tree() } /// Gets the file's `LineIndex`: data structure to convert between absolute @@ -360,7 +360,7 @@ impl Analysis { let parse = self.db.parse(frange.file_id); let file_edit = SourceFileEdit { file_id: frange.file_id, - edit: join_lines::join_lines(parse.tree(), frange.range), + edit: join_lines::join_lines(&parse.tree(), frange.range), }; SourceChange::source_file_edit("join lines", file_edit) } @@ -393,13 +393,13 @@ impl Analysis { /// file outline. pub fn file_structure(&self, file_id: FileId) -> Vec { let parse = self.db.parse(file_id); - file_structure(parse.tree()) + file_structure(&parse.tree()) } /// Returns the set of folding ranges. pub fn folding_ranges(&self, file_id: FileId) -> Vec { let parse = self.db.parse(file_id); - folding_ranges::folding_ranges(parse.tree()) + folding_ranges::folding_ranges(&parse.tree()) } /// Fuzzy searches for a symbol. diff --git a/crates/ra_ide_api/src/matching_brace.rs b/crates/ra_ide_api/src/matching_brace.rs index 455a5c891b6..102327fd739 100644 --- a/crates/ra_ide_api/src/matching_brace.rs +++ b/crates/ra_ide_api/src/matching_brace.rs @@ -26,7 +26,7 @@ mod tests { fn do_check(before: &str, after: &str) { let (pos, before) = extract_offset(before); let parse = SourceFile::parse(&before); - let new_pos = match matching_brace(parse.tree(), pos) { + let new_pos = match matching_brace(&parse.tree(), pos) { None => pos, Some(pos) => pos, }; diff --git a/crates/ra_ide_api/src/name_ref_kind.rs b/crates/ra_ide_api/src/name_ref_kind.rs index 67381c9c8dd..6832acf5dd2 100644 --- a/crates/ra_ide_api/src/name_ref_kind.rs +++ b/crates/ra_ide_api/src/name_ref_kind.rs @@ -26,7 +26,7 @@ pub(crate) fn classify_name_ref( // Check if it is a method if let Some(method_call) = name_ref.syntax().parent().and_then(ast::MethodCallExpr::cast) { tested_by!(goto_definition_works_for_methods); - if let Some(func) = analyzer.resolve_method_call(method_call) { + if let Some(func) = analyzer.resolve_method_call(&method_call) { return Some(Method(func)); } } @@ -40,7 +40,7 @@ pub(crate) fn classify_name_ref( .and_then(ast::MacroCall::cast) { tested_by!(goto_definition_works_for_macros); - if let Some(mac) = analyzer.resolve_macro_call(db, macro_call) { + if let Some(mac) = analyzer.resolve_macro_call(db, ¯o_call) { return Some(Macro(mac)); } } @@ -48,7 +48,7 @@ pub(crate) fn classify_name_ref( // It could also be a field access if let Some(field_expr) = name_ref.syntax().parent().and_then(ast::FieldExpr::cast) { tested_by!(goto_definition_works_for_fields); - if let Some(field) = analyzer.resolve_field(field_expr) { + if let Some(field) = analyzer.resolve_field(&field_expr) { return Some(FieldAccess(field)); }; } @@ -59,7 +59,7 @@ pub(crate) fn classify_name_ref( let struct_lit = field_expr.syntax().ancestors().find_map(ast::StructLit::cast); - if let Some(ty) = struct_lit.and_then(|lit| analyzer.type_of(db, lit.into())) { + if let Some(ty) = struct_lit.and_then(|lit| analyzer.type_of(db, &lit.into())) { if let Some((hir::AdtDef::Struct(s), _)) = ty.as_adt() { let hir_path = hir::Path::from_name_ref(name_ref); let hir_name = hir_path.as_ident().unwrap(); @@ -73,7 +73,7 @@ pub(crate) fn classify_name_ref( // General case, a path or a local: if let Some(path) = name_ref.syntax().ancestors().find_map(ast::Path::cast) { - if let Some(resolved) = analyzer.resolve_path(db, path) { + if let Some(resolved) = analyzer.resolve_path(db, &path) { return match resolved { hir::PathResolution::Def(def) => Some(Def(def)), hir::PathResolution::LocalBinding(Either::A(pat)) => Some(Pat(pat)), diff --git a/crates/ra_ide_api/src/references.rs b/crates/ra_ide_api/src/references.rs index 0af1ae811fb..1c4cd49dcce 100644 --- a/crates/ra_ide_api/src/references.rs +++ b/crates/ra_ide_api/src/references.rs @@ -50,11 +50,11 @@ pub(crate) fn find_all_refs( position: FilePosition, ) -> Option { let parse = db.parse(position.file_id); - let (binding, analyzer) = find_binding(db, parse.tree(), position)?; - let declaration = NavigationTarget::from_bind_pat(position.file_id, binding); + let (binding, analyzer) = find_binding(db, &parse.tree(), position)?; + let declaration = NavigationTarget::from_bind_pat(position.file_id, &binding); let references = analyzer - .find_all_refs(binding) + .find_all_refs(&binding) .into_iter() .map(move |ref_desc| FileRange { file_id: position.file_id, range: ref_desc.range }) .collect::>(); @@ -63,9 +63,9 @@ pub(crate) fn find_all_refs( fn find_binding<'a>( db: &RootDatabase, - source_file: &'a SourceFile, + source_file: &SourceFile, position: FilePosition, - ) -> Option<(&'a ast::BindPat, hir::SourceAnalyzer)> { + ) -> Option<(ast::BindPat, hir::SourceAnalyzer)> { let syntax = source_file.syntax(); if let Some(binding) = find_node_at_offset::(syntax, position.offset) { let analyzer = hir::SourceAnalyzer::new(db, position.file_id, binding.syntax(), None); @@ -73,7 +73,7 @@ pub(crate) fn find_all_refs( }; let name_ref = find_node_at_offset::(syntax, position.offset)?; let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None); - let resolved = analyzer.resolve_local_name(name_ref)?; + let resolved = analyzer.resolve_local_name(&name_ref)?; if let Either::A(ptr) = resolved.ptr() { if let ast::PatKind::BindPat(binding) = ptr.to_node(source_file.syntax()).kind() { return Some((binding, analyzer)); @@ -89,10 +89,10 @@ pub(crate) fn rename( new_name: &str, ) -> Option { let parse = db.parse(position.file_id); - let syntax = parse.tree().syntax(); - - if let Some((ast_name, ast_module)) = find_name_and_module_at_offset(syntax, position) { - rename_mod(db, ast_name, ast_module, position, new_name) + if let Some((ast_name, ast_module)) = + find_name_and_module_at_offset(parse.tree().syntax(), position) + { + rename_mod(db, &ast_name, &ast_module, position, new_name) } else { rename_reference(db, position, new_name) } @@ -101,14 +101,10 @@ pub(crate) fn rename( fn find_name_and_module_at_offset( syntax: &SyntaxNode, position: FilePosition, -) -> Option<(&ast::Name, &ast::Module)> { - let ast_name = find_node_at_offset::(syntax, position.offset); - let ast_name_parent = ast::Module::cast(ast_name?.syntax().parent()?); - - if let (Some(ast_module), Some(name)) = (ast_name_parent, ast_name) { - return Some((name, ast_module)); - } - None +) -> Option<(ast::Name, ast::Module)> { + let ast_name = find_node_at_offset::(syntax, position.offset)?; + let ast_module = ast::Module::cast(ast_name.syntax().parent()?)?; + Some((ast_name, ast_module)) } fn source_edit_from_fileid_range( @@ -135,7 +131,8 @@ fn rename_mod( ) -> Option { let mut source_file_edits = Vec::new(); let mut file_system_edits = Vec::new(); - if let Some(module) = source_binder::module_from_declaration(db, position.file_id, &ast_module) + if let Some(module) = + source_binder::module_from_declaration(db, position.file_id, ast_module.clone()) { let src = module.definition_source(db); let file_id = src.file_id.as_original_file(); diff --git a/crates/ra_ide_api/src/runnables.rs b/crates/ra_ide_api/src/runnables.rs index 8cb859b3735..2009584341f 100644 --- a/crates/ra_ide_api/src/runnables.rs +++ b/crates/ra_ide_api/src/runnables.rs @@ -26,8 +26,8 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec { parse.tree().syntax().descendants().filter_map(|i| runnable(db, file_id, i)).collect() } -fn runnable(db: &RootDatabase, file_id: FileId, item: &SyntaxNode) -> Option { - if let Some(fn_def) = ast::FnDef::cast(item) { +fn runnable(db: &RootDatabase, file_id: FileId, item: SyntaxNode) -> Option { + if let Some(fn_def) = ast::FnDef::cast(item.clone()) { runnable_fn(fn_def) } else if let Some(m) = ast::Module::cast(item) { runnable_mod(db, file_id, m) @@ -36,8 +36,8 @@ fn runnable(db: &RootDatabase, file_id: FileId, item: &SyntaxNode) -> Option Option { - let name = fn_def.name()?.text(); +fn runnable_fn(fn_def: ast::FnDef) -> Option { + let name = fn_def.name()?.text().clone(); let kind = if name == "main" { RunnableKind::Bin } else if fn_def.has_atom_attr("test") { @@ -50,7 +50,7 @@ fn runnable_fn(fn_def: &ast::FnDef) -> Option { Some(Runnable { range: fn_def.syntax().range(), kind }) } -fn runnable_mod(db: &RootDatabase, file_id: FileId, module: &ast::Module) -> Option { +fn runnable_mod(db: &RootDatabase, file_id: FileId, module: ast::Module) -> Option { let has_test_function = module .item_list()? .items() diff --git a/crates/ra_ide_api/src/status.rs b/crates/ra_ide_api/src/status.rs index a31e1524547..d533d1742c4 100644 --- a/crates/ra_ide_api/src/status.rs +++ b/crates/ra_ide_api/src/status.rs @@ -104,7 +104,7 @@ impl FromIterator>>> for SyntaxTr let mut res = SyntaxTreeStats::default(); for entry in iter { res.total += 1; - if let Some(tree) = entry.value.and_then(|it| it).map(|it| it.tree().to_owned()) { + if let Some(tree) = entry.value.and_then(|it| it).map(|it| it.syntax_node()) { res.retained += 1; res.retained_size += tree.memory_size_of_subtree(); } diff --git a/crates/ra_ide_api/src/symbol_index.rs b/crates/ra_ide_api/src/symbol_index.rs index 9b3a45319b7..e784b5f69a1 100644 --- a/crates/ra_ide_api/src/symbol_index.rs +++ b/crates/ra_ide_api/src/symbol_index.rs @@ -61,7 +61,7 @@ fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Arc db.check_canceled(); let parse = db.parse(file_id); - let symbols = source_file_to_file_symbols(parse.tree(), file_id); + let symbols = source_file_to_file_symbols(&parse.tree(), file_id); // FIXME: add macros here @@ -173,7 +173,7 @@ impl SymbolIndex { files: impl ParallelIterator)>, ) -> SymbolIndex { let symbols = files - .flat_map(|(file_id, file)| source_file_to_file_symbols(file.tree(), file_id)) + .flat_map(|(file_id, file)| source_file_to_file_symbols(&file.tree(), file_id)) .collect::>(); SymbolIndex::new(symbols) } @@ -249,7 +249,7 @@ fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec for event in source_file.syntax().preorder() { match event { WalkEvent::Enter(node) => { - if let Some(mut symbol) = to_file_symbol(node, file_id) { + if let Some(mut symbol) = to_file_symbol(&node, file_id) { symbol.container_name = stack.last().cloned(); stack.push(symbol.name.clone()); @@ -258,7 +258,7 @@ fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec } WalkEvent::Leave(node) => { - if to_symbol(node).is_some() { + if to_symbol(&node).is_some() { stack.pop(); } } @@ -269,7 +269,7 @@ fn source_file_to_file_symbols(source_file: &SourceFile, file_id: FileId) -> Vec } fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { - fn decl(node: &N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { + fn decl(node: N) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { let name = node.name()?; let name_range = name.syntax().range(); let name = name.text().clone(); diff --git a/crates/ra_ide_api/src/syntax_highlighting.rs b/crates/ra_ide_api/src/syntax_highlighting.rs index eb392d5da7b..16a7287899f 100644 --- a/crates/ra_ide_api/src/syntax_highlighting.rs +++ b/crates/ra_ide_api/src/syntax_highlighting.rs @@ -31,8 +31,8 @@ fn is_control_keyword(kind: SyntaxKind) -> bool { } } -fn is_variable_mutable(db: &RootDatabase, analyzer: &hir::SourceAnalyzer, pat: &ast::Pat) -> bool { - let ty = analyzer.type_of_pat(db, pat).unwrap_or(Ty::Unknown); +fn is_variable_mutable(db: &RootDatabase, analyzer: &hir::SourceAnalyzer, pat: ast::Pat) -> bool { + let ty = analyzer.type_of_pat(db, &pat).unwrap_or(Ty::Unknown); let is_ty_mut = { if let Some((_, mutability)) = ty.as_reference() { match mutability { @@ -55,7 +55,7 @@ fn is_variable_mutable(db: &RootDatabase, analyzer: &hir::SourceAnalyzer, pat: & pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec { let _p = profile("highlight"); let parse = db.parse(file_id); - let root = parse.tree().syntax(); + let root = parse.tree().syntax().clone(); fn calc_binding_hash(file_id: FileId, text: &SmolStr, shadow_count: u32) -> u64 { fn hash(x: T) -> u64 { @@ -70,6 +70,7 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec = FxHashSet::default(); let mut bindings_shadow_count: FxHashMap = FxHashMap::default(); @@ -84,14 +85,14 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec "string", ATTR => "attribute", NAME_REF => { - if let Some(name_ref) = node.as_node().and_then(ast::NameRef::cast) { + if let Some(name_ref) = node.as_node().cloned().and_then(ast::NameRef::cast) { // FIXME: revisit this after #1340 use crate::name_ref_kind::{classify_name_ref, NameRefKind::*}; use hir::{ImplItem, ModuleDef}; // FIXME: try to reuse the SourceAnalyzers let analyzer = hir::SourceAnalyzer::new(db, file_id, name_ref.syntax(), None); - match classify_name_ref(db, &analyzer, name_ref) { + match classify_name_ref(db, &analyzer, &name_ref) { Some(Method(_)) => "function", Some(Macro(_)) => "macro", Some(FieldAccess(_)) => "field", @@ -113,13 +114,13 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec { binding_hash = Some({ let text = - ptr.syntax_node_ptr().to_node(root).text().to_smol_string(); + ptr.syntax_node_ptr().to_node(&root).text().to_smol_string(); let shadow_count = bindings_shadow_count.entry(text.clone()).or_default(); calc_binding_hash(file_id, &text, *shadow_count) }); - if is_variable_mutable(db, &analyzer, ptr.to_node(root)) { + if is_variable_mutable(db, &analyzer, ptr.to_node(&root)) { "variable.mut" } else { "variable" @@ -134,7 +135,7 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec { - if let Some(name) = node.as_node().and_then(ast::Name::cast) { + if let Some(name) = node.as_node().cloned().and_then(ast::Name::cast) { let analyzer = hir::SourceAnalyzer::new(db, file_id, name.syntax(), None); if let Some(pat) = name.syntax().ancestors().find_map(ast::Pat::cast) { binding_hash = Some({ @@ -176,12 +177,11 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec "keyword.control", k if k.is_keyword() => "keyword", _ => { - // let analyzer = hir::SourceAnalyzer::new(db, file_id, name_ref.syntax(), None); - if let Some(macro_call) = node.as_node().and_then(ast::MacroCall::cast) { + if let Some(macro_call) = node.as_node().cloned().and_then(ast::MacroCall::cast) { if let Some(path) = macro_call.path() { if let Some(segment) = path.segment() { if let Some(name_ref) = segment.name_ref() { - highlighted.insert(name_ref.syntax().into()); + highlighted.insert(name_ref.syntax().clone().into()); let range_start = name_ref.syntax().range().start(); let mut range_end = name_ref.syntax().range().end(); for sibling in path.syntax().siblings_with_tokens(Direction::Next) { @@ -230,7 +230,8 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo let mut buf = String::new(); buf.push_str(&STYLE); buf.push_str("
");
-    let tokens = parse.tree().syntax().descendants_with_tokens().filter_map(|it| it.as_token());
+    let tokens =
+        parse.tree().syntax().descendants_with_tokens().filter_map(|it| it.as_token().cloned());
     for token in tokens {
         could_intersect.retain(|it| token.range().start() <= it.range.end());
         while let Some(r) = ranges.get(frontier) {
diff --git a/crates/ra_ide_api/src/syntax_tree.rs b/crates/ra_ide_api/src/syntax_tree.rs
index b3e08c04111..3d7373d02bc 100644
--- a/crates/ra_ide_api/src/syntax_tree.rs
+++ b/crates/ra_ide_api/src/syntax_tree.rs
@@ -18,7 +18,7 @@ pub(crate) fn syntax_tree(
         let node = match algo::find_covering_element(parse.tree().syntax(), text_range) {
             SyntaxElement::Node(node) => node,
             SyntaxElement::Token(token) => {
-                if let Some(tree) = syntax_tree_for_string(token, text_range) {
+                if let Some(tree) = syntax_tree_for_string(&token, text_range) {
                     return tree;
                 }
                 token.parent()
@@ -33,7 +33,7 @@ pub(crate) fn syntax_tree(
 
 /// Attempts parsing the selected contents of a string literal
 /// as rust syntax and returns its syntax tree
-fn syntax_tree_for_string(token: SyntaxToken, text_range: TextRange) -> Option {
+fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option {
     // When the range is inside a string
     // we'll attempt parsing it as rust syntax
     // to provide the syntax tree of the contents of the string
@@ -43,7 +43,7 @@ fn syntax_tree_for_string(token: SyntaxToken, text_range: TextRange) -> Option Option {
+fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option {
     // Range of the full node
     let node_range = node.range();
     let text = node.text().to_string();
diff --git a/crates/ra_ide_api/src/typing.rs b/crates/ra_ide_api/src/typing.rs
index 01eb32b2fb5..ad0ababcc0a 100644
--- a/crates/ra_ide_api/src/typing.rs
+++ b/crates/ra_ide_api/src/typing.rs
@@ -1,15 +1,16 @@
-use crate::{db::RootDatabase, SourceChange, SourceFileEdit};
 use ra_db::{FilePosition, SourceDatabase};
 use ra_fmt::leading_indent;
 use ra_syntax::{
     algo::{find_node_at_offset, find_token_at_offset, TokenAtOffset},
     ast::{self, AstToken},
-    AstNode, SourceFile,
+    AstNode, SmolStr, SourceFile,
     SyntaxKind::*,
     SyntaxToken, TextRange, TextUnit,
 };
 use ra_text_edit::{TextEdit, TextEditBuilder};
 
+use crate::{db::RootDatabase, SourceChange, SourceFileEdit};
+
 pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option {
     let parse = db.parse(position.file_id);
     let file = parse.tree();
@@ -43,15 +44,15 @@ pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option(file: &'a SourceFile, token: SyntaxToken) -> Option<&'a str> {
+fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option {
     let ws = match find_token_at_offset(file.syntax(), token.range().start()) {
         TokenAtOffset::Between(l, r) => {
-            assert!(r == token);
+            assert!(r == *token);
             l
         }
         TokenAtOffset::Single(n) => {
-            assert!(n == token);
-            return Some("");
+            assert!(n == *token);
+            return Some("".into());
         }
         TokenAtOffset::None => unreachable!(),
     };
@@ -60,12 +61,12 @@ fn node_indent<'a>(file: &'a SourceFile, token: SyntaxToken) -> Option<&'a str>
     }
     let text = ws.text();
     let pos = text.rfind('\n').map(|it| it + 1).unwrap_or(0);
-    Some(&text[pos..])
+    Some(text[pos..].into())
 }
 
 pub fn on_eq_typed(file: &SourceFile, eq_offset: TextUnit) -> Option {
     assert_eq!(file.syntax().text().char_at(eq_offset), Some('='));
-    let let_stmt: &ast::LetStmt = find_node_at_offset(file.syntax(), eq_offset)?;
+    let let_stmt: ast::LetStmt = find_node_at_offset(file.syntax(), eq_offset)?;
     if let_stmt.has_semi() {
         return None;
     }
@@ -141,7 +142,7 @@ mod tests {
             edit.insert(offset, "=".to_string());
             let before = edit.finish().apply(&before);
             let parse = SourceFile::parse(&before);
-            if let Some(result) = on_eq_typed(parse.tree(), offset) {
+            if let Some(result) = on_eq_typed(&parse.tree(), offset) {
                 let actual = result.apply(&before);
                 assert_eq_text!(after, &actual);
             } else {
diff --git a/crates/ra_mbe/src/mbe_expander.rs b/crates/ra_mbe/src/mbe_expander.rs
index c7c06c7fd64..f185aecb718 100644
--- a/crates/ra_mbe/src/mbe_expander.rs
+++ b/crates/ra_mbe/src/mbe_expander.rs
@@ -599,7 +599,8 @@ mod tests {
         let macro_definition =
             source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
 
-        let (definition_tt, _) = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap();
+        let (definition_tt, _) =
+            ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap();
         crate::MacroRules::parse(&definition_tt).unwrap()
     }
 
@@ -611,7 +612,8 @@ mod tests {
         let macro_invocation =
             source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
 
-        let (invocation_tt, _) = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap();
+        let (invocation_tt, _) =
+            ast_to_token_tree(¯o_invocation.token_tree().unwrap()).unwrap();
 
         expand_rule(&rules.rules[0], &invocation_tt)
     }
diff --git a/crates/ra_mbe/src/mbe_parser.rs b/crates/ra_mbe/src/mbe_parser.rs
index cddb4a7b4c9..954b84d9d7d 100644
--- a/crates/ra_mbe/src/mbe_parser.rs
+++ b/crates/ra_mbe/src/mbe_parser.rs
@@ -179,7 +179,8 @@ mod tests {
         let macro_definition =
             source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
 
-        let (definition_tt, _) = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap();
+        let (definition_tt, _) =
+            ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap();
         parse(&definition_tt)
     }
 
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index bfc351f81a6..64ed6a517da 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -2,7 +2,7 @@ use crate::subtree_source::SubtreeTokenSource;
 use crate::ExpandError;
 use ra_parser::{ParseError, TreeSink};
 use ra_syntax::{
-    ast, AstNode, Parse, SmolStr, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxNode,
+    ast, AstNode, AstToken, Parse, SmolStr, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxNode,
     SyntaxTreeBuilder, TextRange, TextUnit, T,
 };
 use tt::buffer::{Cursor, TokenBuffer};
@@ -116,8 +116,6 @@ impl TokenMap {
 /// and strips the ending `*/`
 /// And then quote the string, which is needed to convert to `tt::Literal`
 fn doc_comment_text(comment: &ast::Comment) -> SmolStr {
-    use ast::AstToken;
-
     let prefix_len = comment.prefix().len();
     let mut text = &comment.text()[prefix_len..];
 
@@ -132,9 +130,8 @@ fn doc_comment_text(comment: &ast::Comment) -> SmolStr {
     text.into()
 }
 
-fn convert_doc_comment<'a>(token: &ra_syntax::SyntaxToken<'a>) -> Option> {
-    use ast::AstToken;
-    let comment = ast::Comment::cast(*token)?;
+fn convert_doc_comment(token: &ra_syntax::SyntaxToken) -> Option> {
+    let comment = ast::Comment::cast(token.clone())?;
     let doc = comment.kind().doc?;
 
     // Make `doc="\" Comments\""
@@ -245,7 +242,7 @@ fn convert_tt(
                 }
             }
             SyntaxElement::Node(node) => {
-                let child = convert_tt(token_map, global_offset, node)?.into();
+                let child = convert_tt(token_map, global_offset, &node)?.into();
                 token_trees.push(child);
             }
         };
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs
index 419b2c099ca..38a31109d9f 100644
--- a/crates/ra_mbe/src/tests.rs
+++ b/crates/ra_mbe/src/tests.rs
@@ -37,8 +37,8 @@ impl_froms!(TokenTree: Leaf, Subtree);
     let macro_invocation =
         source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
 
-    let (definition_tt, _) = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap();
-    let (invocation_tt, _) = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap();
+    let (definition_tt, _) = ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap();
+    let (invocation_tt, _) = ast_to_token_tree(¯o_invocation.token_tree().unwrap()).unwrap();
     let rules = crate::MacroRules::parse(&definition_tt).unwrap();
     let expansion = rules.expand(&invocation_tt).unwrap();
     assert_eq!(
@@ -53,7 +53,7 @@ pub(crate) fn create_rules(macro_definition: &str) -> MacroRules {
     let macro_definition =
         source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
 
-    let (definition_tt, _) = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap();
+    let (definition_tt, _) = ast_to_token_tree(¯o_definition.token_tree().unwrap()).unwrap();
     crate::MacroRules::parse(&definition_tt).unwrap()
 }
 
@@ -62,34 +62,25 @@ pub(crate) fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree {
     let macro_invocation =
         source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
 
-    let (invocation_tt, _) = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap();
+    let (invocation_tt, _) = ast_to_token_tree(¯o_invocation.token_tree().unwrap()).unwrap();
 
     rules.expand(&invocation_tt).unwrap()
 }
 
-pub(crate) fn expand_to_items(
-    rules: &MacroRules,
-    invocation: &str,
-) -> ra_syntax::TreeArc {
+pub(crate) fn expand_to_items(rules: &MacroRules, invocation: &str) -> ast::MacroItems {
     let expanded = expand(rules, invocation);
-    token_tree_to_macro_items(&expanded).unwrap().tree().to_owned()
+    token_tree_to_macro_items(&expanded).unwrap().tree()
 }
 
 #[allow(unused)]
-pub(crate) fn expand_to_stmts(
-    rules: &MacroRules,
-    invocation: &str,
-) -> ra_syntax::TreeArc {
+pub(crate) fn expand_to_stmts(rules: &MacroRules, invocation: &str) -> ast::MacroStmts {
     let expanded = expand(rules, invocation);
-    token_tree_to_macro_stmts(&expanded).unwrap().tree().to_owned()
+    token_tree_to_macro_stmts(&expanded).unwrap().tree()
 }
 
-pub(crate) fn expand_to_expr(
-    rules: &MacroRules,
-    invocation: &str,
-) -> ra_syntax::TreeArc {
+pub(crate) fn expand_to_expr(rules: &MacroRules, invocation: &str) -> ast::Expr {
     let expanded = expand(rules, invocation);
-    token_tree_to_expr(&expanded).unwrap().tree().to_owned()
+    token_tree_to_expr(&expanded).unwrap().tree()
 }
 
 pub(crate) fn text_to_tokentree(text: &str) -> tt::Subtree {
@@ -97,7 +88,7 @@ pub(crate) fn text_to_tokentree(text: &str) -> tt::Subtree {
     let wrapped = format!("wrap_macro!( {} )", text);
     let wrapped = ast::SourceFile::parse(&wrapped);
     let wrapped = wrapped.tree().syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
-    let mut wrapped = ast_to_token_tree(wrapped).unwrap().0;
+    let mut wrapped = ast_to_token_tree(&wrapped).unwrap().0;
     wrapped.delimiter = tt::Delimiter::None;
 
     wrapped
@@ -164,8 +155,8 @@ pub(crate) fn assert_expansion(
 
     let (expanded_tree, expected_tree) = match kind {
         MacroKind::Items => {
-            let expanded_tree = token_tree_to_macro_items(&expanded).unwrap().tree().to_owned();
-            let expected_tree = token_tree_to_macro_items(&expected).unwrap().tree().to_owned();
+            let expanded_tree = token_tree_to_macro_items(&expanded).unwrap().tree();
+            let expected_tree = token_tree_to_macro_items(&expected).unwrap().tree();
 
             (
                 debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(),
@@ -174,8 +165,8 @@ pub(crate) fn assert_expansion(
         }
 
         MacroKind::Stmts => {
-            let expanded_tree = token_tree_to_macro_stmts(&expanded).unwrap().tree().to_owned();
-            let expected_tree = token_tree_to_macro_stmts(&expected).unwrap().tree().to_owned();
+            let expanded_tree = token_tree_to_macro_stmts(&expanded).unwrap().tree();
+            let expected_tree = token_tree_to_macro_stmts(&expected).unwrap().tree();
 
             (
                 debug_dump_ignore_spaces(expanded_tree.syntax()).trim().to_string(),
@@ -419,7 +410,7 @@ fn test_expand_to_item_list() {
             ",
     );
     let expansion = expand(&rules, "structs!(Foo, Bar);");
-    let tree = token_tree_to_macro_items(&expansion).unwrap().tree().to_owned();
+    let tree = token_tree_to_macro_items(&expansion).unwrap().tree();
     assert_eq!(
         tree.syntax().debug_dump().trim(),
         r#"
@@ -537,7 +528,7 @@ fn test_tt_to_stmts() {
     );
 
     let expanded = expand(&rules, "foo!{}");
-    let stmts = token_tree_to_macro_stmts(&expanded).unwrap().tree().to_owned();
+    let stmts = token_tree_to_macro_stmts(&expanded).unwrap().tree();
 
     assert_eq!(
         stmts.syntax().debug_dump().trim(),
diff --git a/crates/ra_syntax/Cargo.toml b/crates/ra_syntax/Cargo.toml
index 37a91ea3588..a5565de33f1 100644
--- a/crates/ra_syntax/Cargo.toml
+++ b/crates/ra_syntax/Cargo.toml
@@ -10,7 +10,7 @@ repository = "https://github.com/rust-analyzer/rust-analyzer"
 [dependencies]
 unicode-xid = "0.1.0"
 itertools = "0.8.0"
-rowan = "0.5.0"
+rowan = "0.5.6"
 
 # ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here
 # to reduce number of compilations
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs
index fad8da13235..e2de5e0e39b 100644
--- a/crates/ra_syntax/src/algo.rs
+++ b/crates/ra_syntax/src/algo.rs
@@ -9,8 +9,8 @@ pub use rowan::TokenAtOffset;
 pub fn find_token_at_offset(node: &SyntaxNode, offset: TextUnit) -> TokenAtOffset {
     match node.0.token_at_offset(offset) {
         TokenAtOffset::None => TokenAtOffset::None,
-        TokenAtOffset::Single(n) => TokenAtOffset::Single(n.into()),
-        TokenAtOffset::Between(l, r) => TokenAtOffset::Between(l.into(), r.into()),
+        TokenAtOffset::Single(n) => TokenAtOffset::Single(SyntaxToken(n)),
+        TokenAtOffset::Between(l, r) => TokenAtOffset::Between(SyntaxToken(l), SyntaxToken(r)),
     }
 }
 
@@ -22,7 +22,7 @@ pub fn find_token_at_offset(node: &SyntaxNode, offset: TextUnit) -> TokenAtOffse
 pub fn ancestors_at_offset(
     node: &SyntaxNode,
     offset: TextUnit,
-) -> impl Iterator {
+) -> impl Iterator {
     find_token_at_offset(node, offset)
         .map(|token| token.parent().ancestors())
         .kmerge_by(|node1, node2| node1.range().len() < node2.range().len())
@@ -37,7 +37,7 @@ pub fn ancestors_at_offset(
 /// ```
 ///
 /// then the shorter node will be silently preferred.
-pub fn find_node_at_offset(syntax: &SyntaxNode, offset: TextUnit) -> Option<&N> {
+pub fn find_node_at_offset(syntax: &SyntaxNode, offset: TextUnit) -> Option {
     ancestors_at_offset(syntax, offset).find_map(N::cast)
 }
 
@@ -59,5 +59,5 @@ pub fn non_trivia_sibling(element: SyntaxElement, direction: Direction) -> Optio
 }
 
 pub fn find_covering_element(root: &SyntaxNode, range: TextRange) -> SyntaxElement {
-    root.0.covering_node(range).into()
+    SyntaxElement::new(root.0.covering_node(range))
 }
diff --git a/crates/ra_syntax/src/algo/visit.rs b/crates/ra_syntax/src/algo/visit.rs
index 81a99228fd2..87bd15cc0c5 100644
--- a/crates/ra_syntax/src/algo/visit.rs
+++ b/crates/ra_syntax/src/algo/visit.rs
@@ -16,7 +16,7 @@ pub trait Visitor<'a>: Sized {
     fn visit(self, f: F) -> Vis
     where
         N: AstNode + 'a,
-        F: FnOnce(&'a N) -> Self::Output,
+        F: FnOnce(N) -> Self::Output,
     {
         Vis { inner: self, f, ph: PhantomData }
     }
@@ -29,7 +29,7 @@ pub trait VisitorCtx<'a>: Sized {
     fn visit(self, f: F) -> VisCtx
     where
         N: AstNode + 'a,
-        F: FnOnce(&'a N, Self::Ctx) -> Self::Output,
+        F: FnOnce(N, Self::Ctx) -> Self::Output,
     {
         VisCtx { inner: self, f, ph: PhantomData }
     }
@@ -74,13 +74,13 @@ impl<'a, V, N, F> Visitor<'a> for Vis
 where
     V: Visitor<'a>,
     N: AstNode + 'a,
-    F: FnOnce(&'a N) -> >::Output,
+    F: FnOnce(N) -> >::Output,
 {
     type Output = >::Output;
 
     fn accept(self, node: &'a SyntaxNode) -> Option {
         let Vis { inner, f, .. } = self;
-        inner.accept(node).or_else(|| N::cast(node).map(f))
+        inner.accept(node).or_else(|| N::cast(node.clone()).map(f))
     }
 }
 
@@ -95,14 +95,14 @@ impl<'a, V, N, F> VisitorCtx<'a> for VisCtx
 where
     V: VisitorCtx<'a>,
     N: AstNode + 'a,
-    F: FnOnce(&'a N, >::Ctx) -> >::Output,
+    F: FnOnce(N, >::Ctx) -> >::Output,
 {
     type Output = >::Output;
     type Ctx = >::Ctx;
 
     fn accept(self, node: &'a SyntaxNode) -> Result {
         let VisCtx { inner, f, .. } = self;
-        inner.accept(node).or_else(|ctx| match N::cast(node) {
+        inner.accept(node).or_else(|ctx| match N::cast(node.clone()) {
             None => Err(ctx),
             Some(node) => Ok(f(node, ctx)),
         })
diff --git a/crates/ra_syntax/src/ast.rs b/crates/ra_syntax/src/ast.rs
index 3dcf39f7e16..ceb603c5052 100644
--- a/crates/ra_syntax/src/ast.rs
+++ b/crates/ra_syntax/src/ast.rs
@@ -9,7 +9,7 @@ mod expr_extensions;
 use std::marker::PhantomData;
 
 use crate::{
-    syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken, TreeArc},
+    syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken},
     SmolStr,
 };
 
@@ -25,51 +25,49 @@ pub use self::{
 /// conversion itself has zero runtime cost: ast and syntax nodes have exactly
 /// the same representation: a pointer to the tree root and a pointer to the
 /// node itself.
-pub trait AstNode:
-    rowan::TransparentNewType + ToOwned>
-{
-    fn cast(syntax: &SyntaxNode) -> Option<&Self>
+pub trait AstNode: Clone {
+    fn cast(syntax: SyntaxNode) -> Option
     where
         Self: Sized;
     fn syntax(&self) -> &SyntaxNode;
 }
 
 /// Like `AstNode`, but wraps tokens rather than interior nodes.
-pub trait AstToken<'a> {
-    fn cast(token: SyntaxToken<'a>) -> Option
+pub trait AstToken {
+    fn cast(token: SyntaxToken) -> Option
     where
         Self: Sized;
-    fn syntax(&self) -> SyntaxToken<'a>;
-    fn text(&self) -> &'a SmolStr {
+    fn syntax(&self) -> &SyntaxToken;
+    fn text(&self) -> &SmolStr {
         self.syntax().text()
     }
 }
 
 /// An iterator over `SyntaxNode` children of a particular AST type.
 #[derive(Debug)]
-pub struct AstChildren<'a, N> {
-    inner: SyntaxNodeChildren<'a>,
+pub struct AstChildren {
+    inner: SyntaxNodeChildren,
     ph: PhantomData,
 }
 
-impl<'a, N> AstChildren<'a, N> {
-    fn new(parent: &'a SyntaxNode) -> Self {
+impl AstChildren {
+    fn new(parent: &SyntaxNode) -> Self {
         AstChildren { inner: parent.children(), ph: PhantomData }
     }
 }
 
-impl<'a, N: AstNode + 'a> Iterator for AstChildren<'a, N> {
-    type Item = &'a N;
-    fn next(&mut self) -> Option<&'a N> {
+impl Iterator for AstChildren {
+    type Item = N;
+    fn next(&mut self) -> Option {
         self.inner.by_ref().find_map(N::cast)
     }
 }
 
-fn child_opt(parent: &P) -> Option<&C> {
+fn child_opt(parent: &P) -> Option {
     children(parent).next()
 }
 
-fn children(parent: &P) -> AstChildren {
+fn children(parent: &P) -> AstChildren {
     AstChildren::new(parent.syntax())
 }
 
@@ -123,7 +121,7 @@ fn test_doc_comment_preserves_indents() {
 
 #[test]
 fn test_where_predicates() {
-    fn assert_bound(text: &str, bound: Option<&TypeBound>) {
+    fn assert_bound(text: &str, bound: Option) {
         assert_eq!(text, bound.unwrap().syntax().text().to_string());
     }
 
diff --git a/crates/ra_syntax/src/ast/expr_extensions.rs b/crates/ra_syntax/src/ast/expr_extensions.rs
index 4355e35875c..ca1773908a1 100644
--- a/crates/ra_syntax/src/ast/expr_extensions.rs
+++ b/crates/ra_syntax/src/ast/expr_extensions.rs
@@ -8,20 +8,20 @@ use crate::{
 };
 
 #[derive(Debug, Clone, PartialEq, Eq)]
-pub enum ElseBranch<'a> {
-    Block(&'a ast::Block),
-    IfExpr(&'a ast::IfExpr),
+pub enum ElseBranch {
+    Block(ast::Block),
+    IfExpr(ast::IfExpr),
 }
 
 impl ast::IfExpr {
-    pub fn then_branch(&self) -> Option<&ast::Block> {
+    pub fn then_branch(&self) -> Option {
         self.blocks().nth(0)
     }
     pub fn else_branch(&self) -> Option {
         let res = match self.blocks().nth(1) {
             Some(block) => ElseBranch::Block(block),
             None => {
-                let elif: &ast::IfExpr = child_opt(self)?;
+                let elif: ast::IfExpr = child_opt(self)?;
                 ElseBranch::IfExpr(elif)
             }
         };
@@ -60,7 +60,7 @@ impl ast::PrefixExpr {
     }
 
     pub fn op_token(&self) -> Option {
-        self.syntax().first_child_or_token()?.as_token()
+        self.syntax().first_child_or_token()?.as_token().cloned()
     }
 }
 
@@ -132,7 +132,7 @@ pub enum BinOp {
 
 impl ast::BinExpr {
     fn op_details(&self) -> Option<(SyntaxToken, BinOp)> {
-        self.syntax().children_with_tokens().filter_map(|it| it.as_token()).find_map(|c| {
+        self.syntax().children_with_tokens().filter_map(|it| it.as_token().cloned()).find_map(|c| {
             match c.kind() {
                 T![||] => Some((c, BinOp::BooleanOr)),
                 T![&&] => Some((c, BinOp::BooleanAnd)),
@@ -178,15 +178,15 @@ impl ast::BinExpr {
         self.op_details().map(|t| t.0)
     }
 
-    pub fn lhs(&self) -> Option<&ast::Expr> {
+    pub fn lhs(&self) -> Option {
         children(self).nth(0)
     }
 
-    pub fn rhs(&self) -> Option<&ast::Expr> {
+    pub fn rhs(&self) -> Option {
         children(self).nth(1)
     }
 
-    pub fn sub_exprs(&self) -> (Option<&ast::Expr>, Option<&ast::Expr>) {
+    pub fn sub_exprs(&self) -> (Option, Option) {
         let mut children = children(self);
         let first = children.next();
         let second = children.next();
@@ -194,9 +194,9 @@ impl ast::BinExpr {
     }
 }
 
-pub enum ArrayExprKind<'a> {
-    Repeat { initializer: Option<&'a ast::Expr>, repeat: Option<&'a ast::Expr> },
-    ElementList(AstChildren<'a, ast::Expr>),
+pub enum ArrayExprKind {
+    Repeat { initializer: Option, repeat: Option },
+    ElementList(AstChildren),
 }
 
 impl ast::ArrayExpr {
@@ -275,12 +275,12 @@ impl ast::Literal {
 #[test]
 fn test_literal_with_attr() {
     let parse = ast::SourceFile::parse(r#"const _: &str = { #[attr] "Hello" };"#);
-    let lit = parse.tree.syntax().descendants().find_map(ast::Literal::cast).unwrap();
+    let lit = parse.tree().syntax().descendants().find_map(ast::Literal::cast).unwrap();
     assert_eq!(lit.token().text(), r#""Hello""#);
 }
 
 impl ast::NamedField {
-    pub fn parent_struct_lit(&self) -> &ast::StructLit {
+    pub fn parent_struct_lit(&self) -> ast::StructLit {
         self.syntax().ancestors().find_map(ast::StructLit::cast).unwrap()
     }
 }
diff --git a/crates/ra_syntax/src/ast/extensions.rs b/crates/ra_syntax/src/ast/extensions.rs
index 72a30232df2..5420f67ff07 100644
--- a/crates/ra_syntax/src/ast/extensions.rs
+++ b/crates/ra_syntax/src/ast/extensions.rs
@@ -4,7 +4,7 @@
 use itertools::Itertools;
 
 use crate::{
-    ast::{self, child_opt, children, AstNode},
+    ast::{self, child_opt, children, AstNode, SyntaxNode},
     SmolStr, SyntaxElement,
     SyntaxKind::*,
     SyntaxToken, T,
@@ -13,15 +13,20 @@ use ra_parser::SyntaxKind;
 
 impl ast::Name {
     pub fn text(&self) -> &SmolStr {
-        let ident = self.syntax().first_child_or_token().unwrap().as_token().unwrap();
-        ident.text()
+        text_of_first_token(self.syntax())
     }
 }
 
 impl ast::NameRef {
     pub fn text(&self) -> &SmolStr {
-        let ident = self.syntax().first_child_or_token().unwrap().as_token().unwrap();
-        ident.text()
+        text_of_first_token(self.syntax())
+    }
+}
+
+fn text_of_first_token(node: &SyntaxNode) -> &SmolStr {
+    match node.0.green().children().first() {
+        Some(rowan::GreenElement::Token(it)) => it.text(),
+        _ => panic!(),
     }
 }
 
@@ -50,10 +55,10 @@ impl ast::Attr {
         }
     }
 
-    pub fn as_call(&self) -> Option<(SmolStr, &ast::TokenTree)> {
+    pub fn as_call(&self) -> Option<(SmolStr, ast::TokenTree)> {
         let tt = self.value()?;
         let (_bra, attr, args, _ket) = tt.syntax().children_with_tokens().collect_tuple()?;
-        let args = ast::TokenTree::cast(args.as_node()?)?;
+        let args = ast::TokenTree::cast(args.as_node()?.clone())?;
         if attr.kind() == IDENT {
             Some((attr.as_token()?.text().clone(), args))
         } else {
@@ -86,16 +91,16 @@ impl ast::Attr {
     }
 }
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq)]
-pub enum PathSegmentKind<'a> {
-    Name(&'a ast::NameRef),
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum PathSegmentKind {
+    Name(ast::NameRef),
     SelfKw,
     SuperKw,
     CrateKw,
 }
 
 impl ast::PathSegment {
-    pub fn parent_path(&self) -> &ast::Path {
+    pub fn parent_path(&self) -> ast::Path {
         self.syntax()
             .parent()
             .and_then(ast::Path::cast)
@@ -125,7 +130,7 @@ impl ast::PathSegment {
 }
 
 impl ast::Path {
-    pub fn parent_path(&self) -> Option<&ast::Path> {
+    pub fn parent_path(&self) -> Option {
         self.syntax().parent().and_then(ast::Path::cast)
     }
 }
@@ -146,7 +151,7 @@ impl ast::UseTree {
 }
 
 impl ast::UseTreeList {
-    pub fn parent_use_tree(&self) -> &ast::UseTree {
+    pub fn parent_use_tree(&self) -> ast::UseTree {
         self.syntax()
             .parent()
             .and_then(ast::UseTree::cast)
@@ -155,21 +160,21 @@ impl ast::UseTreeList {
 }
 
 impl ast::ImplBlock {
-    pub fn target_type(&self) -> Option<&ast::TypeRef> {
+    pub fn target_type(&self) -> Option {
         match self.target() {
             (Some(t), None) | (_, Some(t)) => Some(t),
             _ => None,
         }
     }
 
-    pub fn target_trait(&self) -> Option<&ast::TypeRef> {
+    pub fn target_trait(&self) -> Option {
         match self.target() {
             (Some(t), Some(_)) => Some(t),
             _ => None,
         }
     }
 
-    fn target(&self) -> (Option<&ast::TypeRef>, Option<&ast::TypeRef>) {
+    fn target(&self) -> (Option, Option) {
         let mut types = children(self);
         let first = types.next();
         let second = types.next();
@@ -182,13 +187,13 @@ impl ast::ImplBlock {
 }
 
 #[derive(Debug, Clone, PartialEq, Eq)]
-pub enum StructKind<'a> {
-    Tuple(&'a ast::PosFieldDefList),
-    Named(&'a ast::NamedFieldDefList),
+pub enum StructKind {
+    Tuple(ast::PosFieldDefList),
+    Named(ast::NamedFieldDefList),
     Unit,
 }
 
-impl StructKind<'_> {
+impl StructKind {
     fn from_node(node: &N) -> StructKind {
         if let Some(nfdl) = child_opt::<_, ast::NamedFieldDefList>(node) {
             StructKind::Named(nfdl)
@@ -218,7 +223,7 @@ impl ast::StructDef {
 }
 
 impl ast::EnumVariant {
-    pub fn parent_enum(&self) -> &ast::EnumDef {
+    pub fn parent_enum(&self) -> ast::EnumDef {
         self.syntax()
             .parent()
             .and_then(|it| it.parent())
@@ -231,10 +236,10 @@ impl ast::EnumVariant {
 }
 
 impl ast::FnDef {
-    pub fn semicolon_token(&self) -> Option> {
+    pub fn semicolon_token(&self) -> Option {
         self.syntax()
             .last_child_or_token()
-            .and_then(|it| it.as_token())
+            .and_then(|it| it.as_token().cloned())
             .filter(|it| it.kind() == T![;])
     }
 }
@@ -258,9 +263,9 @@ impl ast::ExprStmt {
 }
 
 #[derive(Debug, Clone, PartialEq, Eq)]
-pub enum FieldKind<'a> {
-    Name(&'a ast::NameRef),
-    Index(SyntaxToken<'a>),
+pub enum FieldKind {
+    Name(ast::NameRef),
+    Index(SyntaxToken),
 }
 
 impl ast::FieldExpr {
@@ -271,6 +276,7 @@ impl ast::FieldExpr {
             .find(|c| c.kind() == SyntaxKind::INT_NUMBER || c.kind() == SyntaxKind::FLOAT_NUMBER)
             .as_ref()
             .and_then(SyntaxElement::as_token)
+            .cloned()
     }
 
     pub fn field_access(&self) -> Option {
@@ -326,7 +332,7 @@ impl ast::SelfParam {
     pub fn self_kw_token(&self) -> SyntaxToken {
         self.syntax()
             .children_with_tokens()
-            .filter_map(|it| it.as_token())
+            .filter_map(|it| it.as_token().cloned())
             .find(|it| it.kind() == T![self])
             .expect("invalid tree: self param must have self")
     }
@@ -355,7 +361,7 @@ impl ast::LifetimeParam {
     pub fn lifetime_token(&self) -> Option {
         self.syntax()
             .children_with_tokens()
-            .filter_map(|it| it.as_token())
+            .filter_map(|it| it.as_token().cloned())
             .find(|it| it.kind() == LIFETIME)
     }
 }
@@ -364,7 +370,7 @@ impl ast::WherePred {
     pub fn lifetime_token(&self) -> Option {
         self.syntax()
             .children_with_tokens()
-            .filter_map(|it| it.as_token())
+            .filter_map(|it| it.as_token().cloned())
             .find(|it| it.kind() == LIFETIME)
     }
 }
diff --git a/crates/ra_syntax/src/ast/generated.rs b/crates/ra_syntax/src/ast/generated.rs
index 1d888e70945..a1f32025789 100644
--- a/crates/ra_syntax/src/ast/generated.rs
+++ b/crates/ra_syntax/src/ast/generated.rs
@@ -9,503 +9,365 @@
 
 #![cfg_attr(rustfmt, rustfmt_skip)]
 
-use rowan::TransparentNewType;
-
 use crate::{
     SyntaxNode, SyntaxKind::*,
-    syntax_node::{TreeArc},
     ast::{self, AstNode},
 };
 
 // Alias
-#[derive(Debug, PartialEq, Eq, Hash)]
-#[repr(transparent)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Alias {
     pub(crate) syntax: SyntaxNode,
 }
-unsafe impl TransparentNewType for Alias {
-    type Repr = rowan::SyntaxNode;
-}
 
 impl AstNode for Alias {
-    fn cast(syntax: &SyntaxNode) -> Option<&Self> {
+    fn cast(syntax: SyntaxNode) -> Option {
         match syntax.kind() {
-            ALIAS => Some(Alias::from_repr(syntax.into_repr())),
+            ALIAS => Some(Alias { syntax }),
             _ => None,
         }
     }
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
 
-impl ToOwned for Alias {
-    type Owned = TreeArc;
-    fn to_owned(&self) -> TreeArc { TreeArc::cast(self.syntax.to_owned()) }
-}
-
 
 impl ast::NameOwner for Alias {}
 impl Alias {}
 
 // ArgList
-#[derive(Debug, PartialEq, Eq, Hash)]
-#[repr(transparent)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ArgList {
     pub(crate) syntax: SyntaxNode,
 }
-unsafe impl TransparentNewType for ArgList {
-    type Repr = rowan::SyntaxNode;
-}
 
 impl AstNode for ArgList {
-    fn cast(syntax: &SyntaxNode) -> Option<&Self> {
+    fn cast(syntax: SyntaxNode) -> Option {
         match syntax.kind() {
-            ARG_LIST => Some(ArgList::from_repr(syntax.into_repr())),
+            ARG_LIST => Some(ArgList { syntax }),
             _ => None,
         }
     }
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
 
-impl ToOwned for ArgList {
-    type Owned = TreeArc;
-    fn to_owned(&self) -> TreeArc { TreeArc::cast(self.syntax.to_owned()) }
-}
-
 
 impl ArgList {
-    pub fn args(&self) -> impl Iterator {
+    pub fn args(&self) -> impl Iterator {
         super::children(self)
     }
 }
 
 // ArrayExpr
-#[derive(Debug, PartialEq, Eq, Hash)]
-#[repr(transparent)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ArrayExpr {
     pub(crate) syntax: SyntaxNode,
 }
-unsafe impl TransparentNewType for ArrayExpr {
-    type Repr = rowan::SyntaxNode;
-}
 
 impl AstNode for ArrayExpr {
-    fn cast(syntax: &SyntaxNode) -> Option<&Self> {
+    fn cast(syntax: SyntaxNode) -> Option {
         match syntax.kind() {
-            ARRAY_EXPR => Some(ArrayExpr::from_repr(syntax.into_repr())),
+            ARRAY_EXPR => Some(ArrayExpr { syntax }),
             _ => None,
         }
     }
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
 
-impl ToOwned for ArrayExpr {
-    type Owned = TreeArc;
-    fn to_owned(&self) -> TreeArc { TreeArc::cast(self.syntax.to_owned()) }
-}
-
 
 impl ArrayExpr {
-    pub fn exprs(&self) -> impl Iterator {
+    pub fn exprs(&self) -> impl Iterator {
         super::children(self)
     }
 }
 
 // ArrayType
-#[derive(Debug, PartialEq, Eq, Hash)]
-#[repr(transparent)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ArrayType {
     pub(crate) syntax: SyntaxNode,
 }
-unsafe impl TransparentNewType for ArrayType {
-    type Repr = rowan::SyntaxNode;
-}
 
 impl AstNode for ArrayType {
-    fn cast(syntax: &SyntaxNode) -> Option<&Self> {
+    fn cast(syntax: SyntaxNode) -> Option {
         match syntax.kind() {
-            ARRAY_TYPE => Some(ArrayType::from_repr(syntax.into_repr())),
+            ARRAY_TYPE => Some(ArrayType { syntax }),
             _ => None,
         }
     }
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
 
-impl ToOwned for ArrayType {
-    type Owned = TreeArc;
-    fn to_owned(&self) -> TreeArc { TreeArc::cast(self.syntax.to_owned()) }
-}
-
 
 impl ArrayType {
-    pub fn type_ref(&self) -> Option<&TypeRef> {
+    pub fn type_ref(&self) -> Option {
         super::child_opt(self)
     }
 
-    pub fn expr(&self) -> Option<&Expr> {
+    pub fn expr(&self) -> Option {
         super::child_opt(self)
     }
 }
 
 // AssocTypeArg
-#[derive(Debug, PartialEq, Eq, Hash)]
-#[repr(transparent)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct AssocTypeArg {
     pub(crate) syntax: SyntaxNode,
 }
-unsafe impl TransparentNewType for AssocTypeArg {
-    type Repr = rowan::SyntaxNode;
-}
 
 impl AstNode for AssocTypeArg {
-    fn cast(syntax: &SyntaxNode) -> Option<&Self> {
+    fn cast(syntax: SyntaxNode) -> Option {
         match syntax.kind() {
-            ASSOC_TYPE_ARG => Some(AssocTypeArg::from_repr(syntax.into_repr())),
+            ASSOC_TYPE_ARG => Some(AssocTypeArg { syntax }),
             _ => None,
         }
     }
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
 
-impl ToOwned for AssocTypeArg {
-    type Owned = TreeArc;
-    fn to_owned(&self) -> TreeArc { TreeArc::cast(self.syntax.to_owned()) }
-}
-
 
 impl AssocTypeArg {
-    pub fn name_ref(&self) -> Option<&NameRef> {
+    pub fn name_ref(&self) -> Option {
         super::child_opt(self)
     }
 
-    pub fn type_ref(&self) -> Option<&TypeRef> {
+    pub fn type_ref(&self) -> Option {
         super::child_opt(self)
     }
 }
 
 // Attr
-#[derive(Debug, PartialEq, Eq, Hash)]
-#[repr(transparent)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Attr {
     pub(crate) syntax: SyntaxNode,
 }
-unsafe impl TransparentNewType for Attr {
-    type Repr = rowan::SyntaxNode;
-}
 
 impl AstNode for Attr {
-    fn cast(syntax: &SyntaxNode) -> Option<&Self> {
+    fn cast(syntax: SyntaxNode) -> Option {
         match syntax.kind() {
-            ATTR => Some(Attr::from_repr(syntax.into_repr())),
+            ATTR => Some(Attr { syntax }),
             _ => None,
         }
     }
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
 
-impl ToOwned for Attr {
-    type Owned = TreeArc;
-    fn to_owned(&self) -> TreeArc { TreeArc::cast(self.syntax.to_owned()) }
-}
-
 
 impl Attr {
-    pub fn value(&self) -> Option<&TokenTree> {
+    pub fn value(&self) -> Option {
         super::child_opt(self)
     }
 }
 
 // BinExpr
-#[derive(Debug, PartialEq, Eq, Hash)]
-#[repr(transparent)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct BinExpr {
     pub(crate) syntax: SyntaxNode,
 }
-unsafe impl TransparentNewType for BinExpr {
-    type Repr = rowan::SyntaxNode;
-}
 
 impl AstNode for BinExpr {
-    fn cast(syntax: &SyntaxNode) -> Option<&Self> {
+    fn cast(syntax: SyntaxNode) -> Option {
         match syntax.kind() {
-            BIN_EXPR => Some(BinExpr::from_repr(syntax.into_repr())),
+            BIN_EXPR => Some(BinExpr { syntax }),
             _ => None,
         }
     }
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
 
-impl ToOwned for BinExpr {
-    type Owned = TreeArc;
-    fn to_owned(&self) -> TreeArc { TreeArc::cast(self.syntax.to_owned()) }
-}
-
 
 impl BinExpr {}
 
 // BindPat
-#[derive(Debug, PartialEq, Eq, Hash)]
-#[repr(transparent)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct BindPat {
     pub(crate) syntax: SyntaxNode,
 }
-unsafe impl TransparentNewType for BindPat {
-    type Repr = rowan::SyntaxNode;
-}
 
 impl AstNode for BindPat {
-    fn cast(syntax: &SyntaxNode) -> Option<&Self> {
+    fn cast(syntax: SyntaxNode) -> Option {
         match syntax.kind() {
-            BIND_PAT => Some(BindPat::from_repr(syntax.into_repr())),
+            BIND_PAT => Some(BindPat { syntax }),
             _ => None,
         }
     }
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
 
-impl ToOwned for BindPat {
-    type Owned = TreeArc;
-    fn to_owned(&self) -> TreeArc { TreeArc::cast(self.syntax.to_owned()) }
-}
-
 
 impl ast::NameOwner for BindPat {}
 impl BindPat {
-    pub fn pat(&self) -> Option<&Pat> {
+    pub fn pat(&self) -> Option {
         super::child_opt(self)
     }
 }
 
 // Block
-#[derive(Debug, PartialEq, Eq, Hash)]
-#[repr(transparent)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Block {
     pub(crate) syntax: SyntaxNode,
 }
-unsafe impl TransparentNewType for Block {
-    type Repr = rowan::SyntaxNode;
-}
 
 impl AstNode for Block {
-    fn cast(syntax: &SyntaxNode) -> Option<&Self> {
+    fn cast(syntax: SyntaxNode) -> Option {
         match syntax.kind() {
-            BLOCK => Some(Block::from_repr(syntax.into_repr())),
+            BLOCK => Some(Block { syntax }),
             _ => None,
         }
     }
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
 
-impl ToOwned for Block {
-    type Owned = TreeArc;
-    fn to_owned(&self) -> TreeArc { TreeArc::cast(self.syntax.to_owned()) }
-}
-
 
 impl ast::AttrsOwner for Block {}
 impl Block {
-    pub fn statements(&self) -> impl Iterator {
+    pub fn statements(&self) -> impl Iterator {
         super::children(self)
     }
 
-    pub fn expr(&self) -> Option<&Expr> {
+    pub fn expr(&self) -> Option {
         super::child_opt(self)
     }
 }
 
 // BlockExpr
-#[derive(Debug, PartialEq, Eq, Hash)]
-#[repr(transparent)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct BlockExpr {
     pub(crate) syntax: SyntaxNode,
 }
-unsafe impl TransparentNewType for BlockExpr {
-    type Repr = rowan::SyntaxNode;
-}
 
 impl AstNode for BlockExpr {
-    fn cast(syntax: &SyntaxNode) -> Option<&Self> {
+    fn cast(syntax: SyntaxNode) -> Option {
         match syntax.kind() {
-            BLOCK_EXPR => Some(BlockExpr::from_repr(syntax.into_repr())),
+            BLOCK_EXPR => Some(BlockExpr { syntax }),
             _ => None,
         }
     }
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
 
-impl ToOwned for BlockExpr {
-    type Owned = TreeArc;
-    fn to_owned(&self) -> TreeArc { TreeArc::cast(self.syntax.to_owned()) }
-}
-
 
 impl BlockExpr {
-    pub fn block(&self) -> Option<&Block> {
+    pub fn block(&self) -> Option {
         super::child_opt(self)
     }
 }
 
 // BreakExpr
-#[derive(Debug, PartialEq, Eq, Hash)]
-#[repr(transparent)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct BreakExpr {
     pub(crate) syntax: SyntaxNode,
 }
-unsafe impl TransparentNewType for BreakExpr {
-    type Repr = rowan::SyntaxNode;
-}
 
 impl AstNode for BreakExpr {
-    fn cast(syntax: &SyntaxNode) -> Option<&Self> {
+    fn cast(syntax: SyntaxNode) -> Option {
         match syntax.kind() {
-            BREAK_EXPR => Some(BreakExpr::from_repr(syntax.into_repr())),
+            BREAK_EXPR => Some(BreakExpr { syntax }),
             _ => None,
         }
     }
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
 
-impl ToOwned for BreakExpr {
-    type Owned = TreeArc;
-    fn to_owned(&self) -> TreeArc { TreeArc::cast(self.syntax.to_owned()) }
-}
-
 
 impl BreakExpr {
-    pub fn expr(&self) -> Option<&Expr> {
+    pub fn expr(&self) -> Option {
         super::child_opt(self)
     }
 }
 
 // CallExpr
-#[derive(Debug, PartialEq, Eq, Hash)]
-#[repr(transparent)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct CallExpr {
     pub(crate) syntax: SyntaxNode,
 }
-unsafe impl TransparentNewType for CallExpr {
-    type Repr = rowan::SyntaxNode;
-}
 
 impl AstNode for CallExpr {
-    fn cast(syntax: &SyntaxNode) -> Option<&Self> {
+    fn cast(syntax: SyntaxNode) -> Option {
         match syntax.kind() {
-            CALL_EXPR => Some(CallExpr::from_repr(syntax.into_repr())),
+            CALL_EXPR => Some(CallExpr { syntax }),
             _ => None,
         }
     }
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
 
-impl ToOwned for CallExpr {
-    type Owned = TreeArc;
-    fn to_owned(&self) -> TreeArc { TreeArc::cast(self.syntax.to_owned()) }
-}
-
 
 impl ast::ArgListOwner for CallExpr {}
 impl CallExpr {
-    pub fn expr(&self) -> Option<&Expr> {
+    pub fn expr(&self) -> Option {
         super::child_opt(self)
     }
 }
 
 // CastExpr
-#[derive(Debug, PartialEq, Eq, Hash)]
-#[repr(transparent)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct CastExpr {
     pub(crate) syntax: SyntaxNode,
 }
-unsafe impl TransparentNewType for CastExpr {
-    type Repr = rowan::SyntaxNode;
-}
 
 impl AstNode for CastExpr {
-    fn cast(syntax: &SyntaxNode) -> Option<&Self> {
+    fn cast(syntax: SyntaxNode) -> Option {
         match syntax.kind() {
-            CAST_EXPR => Some(CastExpr::from_repr(syntax.into_repr())),
+            CAST_EXPR => Some(CastExpr { syntax }),
             _ => None,
         }
     }
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
 
-impl ToOwned for CastExpr {
-    type Owned = TreeArc;
-    fn to_owned(&self) -> TreeArc { TreeArc::cast(self.syntax.to_owned()) }
-}
-
 
 impl CastExpr {
-    pub fn expr(&self) -> Option<&Expr> {
+    pub fn expr(&self) -> Option {
         super::child_opt(self)
     }
 
-    pub fn type_ref(&self) -> Option<&TypeRef> {
+    pub fn type_ref(&self) -> Option {
         super::child_opt(self)
     }
 }
 
 // Condition
-#[derive(Debug, PartialEq, Eq, Hash)]
-#[repr(transparent)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Condition {
     pub(crate) syntax: SyntaxNode,
 }
-unsafe impl TransparentNewType for Condition {
-    type Repr = rowan::SyntaxNode;
-}
 
 impl AstNode for Condition {
-    fn cast(syntax: &SyntaxNode) -> Option<&Self> {
+    fn cast(syntax: SyntaxNode) -> Option {
         match syntax.kind() {
-            CONDITION => Some(Condition::from_repr(syntax.into_repr())),
+            CONDITION => Some(Condition { syntax }),
             _ => None,
         }
     }
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
 
-impl ToOwned for Condition {
-    type Owned = TreeArc;
-    fn to_owned(&self) -> TreeArc { TreeArc::cast(self.syntax.to_owned()) }
-}
-
 
 impl Condition {
-    pub fn pat(&self) -> Option<&Pat> {
+    pub fn pat(&self) -> Option {
         super::child_opt(self)
     }
 
-    pub fn expr(&self) -> Option<&Expr> {
+    pub fn expr(&self) -> Option {
         super::child_opt(self)
     }
 }
 
 // ConstDef
-#[derive(Debug, PartialEq, Eq, Hash)]
-#[repr(transparent)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ConstDef {
     pub(crate) syntax: SyntaxNode,
 }
-unsafe impl TransparentNewType for ConstDef {
-    type Repr = rowan::SyntaxNode;
-}
 
 impl AstNode for ConstDef {
-    fn cast(syntax: &SyntaxNode) -> Option<&Self> {
+    fn cast(syntax: SyntaxNode) -> Option {
         match syntax.kind() {
-            CONST_DEF => Some(ConstDef::from_repr(syntax.into_repr())),
+            CONST_DEF => Some(ConstDef { syntax }),
             _ => None,
         }
     }
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
 
-impl ToOwned for ConstDef {
-    type Owned = TreeArc;
-    fn to_owned(&self) -> TreeArc { TreeArc::cast(self.syntax.to_owned()) }
-}
-
 
 impl ast::VisibilityOwner for ConstDef {}
 impl ast::NameOwner for ConstDef {}
@@ -514,93 +376,66 @@ impl ast::AttrsOwner for ConstDef {}
 impl ast::DocCommentsOwner for ConstDef {}
 impl ast::TypeAscriptionOwner for ConstDef {}
 impl ConstDef {
-    pub fn body(&self) -> Option<&Expr> {
+    pub fn body(&self) -> Option {
         super::child_opt(self)
     }
 }
 
 // ContinueExpr
-#[derive(Debug, PartialEq, Eq, Hash)]
-#[repr(transparent)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct ContinueExpr {
     pub(crate) syntax: SyntaxNode,
 }
-unsafe impl TransparentNewType for ContinueExpr {
-    type Repr = rowan::SyntaxNode;
-}
 
 impl AstNode for ContinueExpr {
-    fn cast(syntax: &SyntaxNode) -> Option<&Self> {
+    fn cast(syntax: SyntaxNode) -> Option {
         match syntax.kind() {
-            CONTINUE_EXPR => Some(ContinueExpr::from_repr(syntax.into_repr())),
+            CONTINUE_EXPR => Some(ContinueExpr { syntax }),
             _ => None,
         }
     }
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
 
-impl ToOwned for ContinueExpr {
-    type Owned = TreeArc;
-    fn to_owned(&self) -> TreeArc { TreeArc::cast(self.syntax.to_owned()) }
-}
-
 
 impl ContinueExpr {}
 
 // DynTraitType
-#[derive(Debug, PartialEq, Eq, Hash)]
-#[repr(transparent)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct DynTraitType {
     pub(crate) syntax: SyntaxNode,
 }
-unsafe impl TransparentNewType for DynTraitType {
-    type Repr = rowan::SyntaxNode;
-}
 
 impl AstNode for DynTraitType {
-    fn cast(syntax: &SyntaxNode) -> Option<&Self> {
+    fn cast(syntax: SyntaxNode) -> Option {
         match syntax.kind() {
-            DYN_TRAIT_TYPE => Some(DynTraitType::from_repr(syntax.into_repr())),
+            DYN_TRAIT_TYPE => Some(DynTraitType { syntax }),
             _ => None,
         }
     }
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
 
-impl ToOwned for DynTraitType {
-    type Owned = TreeArc;
-    fn to_owned(&self) -> TreeArc { TreeArc::cast(self.syntax.to_owned()) }
-}
-
 
 impl ast::TypeBoundsOwner for DynTraitType {}
 impl DynTraitType {}
 
 // EnumDef
-#[derive(Debug, PartialEq, Eq, Hash)]
-#[repr(transparent)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct EnumDef {
     pub(crate) syntax: SyntaxNode,
 }
-unsafe impl TransparentNewType for EnumDef {
-    type Repr = rowan::SyntaxNode;
-}
 
 impl AstNode for EnumDef {
-    fn cast(syntax: &SyntaxNode) -> Option<&Self> {
+    fn cast(syntax: SyntaxNode) -> Option {
         match syntax.kind() {
-            ENUM_DEF => Some(EnumDef::from_repr(syntax.into_repr())),
+            ENUM_DEF => Some(EnumDef { syntax }),
             _ => None,
         }
     }
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
 
-impl ToOwned for EnumDef {
-    type Owned = TreeArc;
-    fn to_owned(&self) -> TreeArc { TreeArc::cast(self.syntax.to_owned()) }
-}
-
 
 impl ast::VisibilityOwner for EnumDef {}
 impl ast::NameOwner for EnumDef {}
@@ -608,269 +443,247 @@ impl ast::TypeParamsOwner for EnumDef {}
 impl ast::AttrsOwner for EnumDef {}
 impl ast::DocCommentsOwner for EnumDef {}
 impl EnumDef {
-    pub fn variant_list(&self) -> Option<&EnumVariantList> {
+    pub fn variant_list(&self) -> Option {
         super::child_opt(self)
     }
 }
 
 // EnumVariant
-#[derive(Debug, PartialEq, Eq, Hash)]
-#[repr(transparent)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct EnumVariant {
     pub(crate) syntax: SyntaxNode,
 }
-unsafe impl TransparentNewType for EnumVariant {
-    type Repr = rowan::SyntaxNode;
-}
 
 impl AstNode for EnumVariant {
-    fn cast(syntax: &SyntaxNode) -> Option<&Self> {
+    fn cast(syntax: SyntaxNode) -> Option {
         match syntax.kind() {
-            ENUM_VARIANT => Some(EnumVariant::from_repr(syntax.into_repr())),
+            ENUM_VARIANT => Some(EnumVariant { syntax }),
             _ => None,
         }
     }
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
 
-impl ToOwned for EnumVariant {
-    type Owned = TreeArc;
-    fn to_owned(&self) -> TreeArc { TreeArc::cast(self.syntax.to_owned()) }
-}
-
 
 impl ast::NameOwner for EnumVariant {}
 impl ast::DocCommentsOwner for EnumVariant {}
 impl ast::AttrsOwner for EnumVariant {}
 impl EnumVariant {
-    pub fn expr(&self) -> Option<&Expr> {
+    pub fn expr(&self) -> Option {
         super::child_opt(self)
     }
 }
 
 // EnumVariantList
-#[derive(Debug, PartialEq, Eq, Hash)]
-#[repr(transparent)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct EnumVariantList {
     pub(crate) syntax: SyntaxNode,
 }
-unsafe impl TransparentNewType for EnumVariantList {
-    type Repr = rowan::SyntaxNode;
-}
 
 impl AstNode for EnumVariantList {
-    fn cast(syntax: &SyntaxNode) -> Option<&Self> {
+    fn cast(syntax: SyntaxNode) -> Option {
         match syntax.kind() {
-            ENUM_VARIANT_LIST => Some(EnumVariantList::from_repr(syntax.into_repr())),
+            ENUM_VARIANT_LIST => Some(EnumVariantList { syntax }),
             _ => None,
         }
     }
     fn syntax(&self) -> &SyntaxNode { &self.syntax }
 }
 
-impl ToOwned for EnumVariantList {
-    type Owned = TreeArc;
-    fn to_owned(&self) -> TreeArc { TreeArc::cast(self.syntax.to_owned()) }
-}
-
 
 impl EnumVariantList {
-    pub fn variants(&self) -> impl Iterator {
+    pub fn variants(&self) -> impl Iterator {
         super::children(self)
     }
 }
 
 // Expr
-#[derive(Debug, PartialEq, Eq, Hash)]
-#[repr(transparent)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct Expr {
     pub(crate) syntax: SyntaxNode,
 }
-unsafe impl TransparentNewType for Expr {
-    type Repr = rowan::SyntaxNode;
-}
 
-#[derive(Debug, Clone, Copy, PartialEq, Eq)]
-pub enum ExprKind<'a> {
-    TupleExpr(&'a TupleExpr),
-    ArrayExpr(&'a ArrayExpr),
-    ParenExpr(&'a ParenExpr),
-    PathExpr(&'a PathExpr),
-    LambdaExpr(&'a LambdaExpr),
-    IfExpr(&'a IfExpr),
-    LoopExpr(&'a LoopExpr),
-    ForExpr(&'a ForExpr),
-    WhileExpr(&'a WhileExpr),
-    ContinueExpr(&'a ContinueExpr),
-    BreakExpr(&'a BreakExpr),
-    Label(&'a Label),
-    BlockExpr(&'a BlockExpr),
-    ReturnExpr(&'a ReturnExpr),
-    MatchExpr(&'a MatchExpr),
-    StructLit(&'a StructLit),
-    CallExpr(&'a CallExpr),
-    IndexExpr(&'a IndexExpr),
-    MethodCallExpr(&'a MethodCallExpr),
-    FieldExpr(&'a FieldExpr),
-    TryExpr(&'a TryExpr),
-    TryBlockExpr(&'a TryBlockExpr),
-    CastExpr(&'a CastExpr),
-    RefExpr(&'a RefExpr),
-    PrefixExpr(&'a PrefixExpr),
-    RangeExpr(&'a RangeExpr),
-    BinExpr(&'a BinExpr),
-    Literal(&'a Literal),
-    MacroCall(&'a MacroCall),
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ExprKind {
+    TupleExpr(TupleExpr),
+    ArrayExpr(ArrayExpr),
+    ParenExpr(ParenExpr),
+    PathExpr(PathExpr),
+    LambdaExpr(LambdaExpr),
+    IfExpr(IfExpr),
+    LoopExpr(LoopExpr),
+    ForExpr(ForExpr),
+    WhileExpr(WhileExpr),
+    ContinueExpr(ContinueExpr),
+    BreakExpr(BreakExpr),
+    Label(Label),
+    BlockExpr(BlockExpr),
+    ReturnExpr(ReturnExpr),
+    MatchExpr(MatchExpr),
+    StructLit(StructLit),
+    CallExpr(CallExpr),
+    IndexExpr(IndexExpr),
+    MethodCallExpr(MethodCallExpr),
+    FieldExpr(FieldExpr),
+    TryExpr(TryExpr),
+    TryBlockExpr(TryBlockExpr),
+    CastExpr(CastExpr),
+    RefExpr(RefExpr),
+    PrefixExpr(PrefixExpr),
+    RangeExpr(RangeExpr),
+    BinExpr(BinExpr),
+    Literal(Literal),
+    MacroCall(MacroCall),
 }
-impl<'a> From<&'a TupleExpr> for &'a Expr {
-    fn from(n: &'a TupleExpr) -> &'a Expr {
-        Expr::cast(&n.syntax).unwrap()
+impl From for Expr {
+    fn from(n: TupleExpr) -> Expr {
+        Expr::cast(n.syntax).unwrap()
     }
 }
-impl<'a> From<&'a ArrayExpr> for &'a Expr {
-    fn from(n: &'a ArrayExpr) -> &'a Expr {
-        Expr::cast(&n.syntax).unwrap()
+impl From for Expr {
+    fn from(n: ArrayExpr) -> Expr {
+        Expr::cast(n.syntax).unwrap()
     }
 }
-impl<'a> From<&'a ParenExpr> for &'a Expr {
-    fn from(n: &'a ParenExpr) -> &'a Expr {
-        Expr::cast(&n.syntax).unwrap()
+impl From for Expr {
+    fn from(n: ParenExpr) -> Expr {
+        Expr::cast(n.syntax).unwrap()
     }
 }
-impl<'a> From<&'a PathExpr> for &'a Expr {
-    fn from(n: &'a PathExpr) -> &'a Expr {
-        Expr::cast(&n.syntax).unwrap()
+impl From for Expr {
+    fn from(n: PathExpr) -> Expr {
+        Expr::cast(n.syntax).unwrap()
     }
 }
-impl<'a> From<&'a LambdaExpr> for &'a Expr {
-    fn from(n: &'a LambdaExpr) -> &'a Expr {
-        Expr::cast(&n.syntax).unwrap()
+impl From for Expr {
+    fn from(n: LambdaExpr) -> Expr {
+        Expr::cast(n.syntax).unwrap()
     }
 }
-impl<'a> From<&'a IfExpr> for &'a Expr {
-    fn from(n: &'a IfExpr) -> &'a Expr {
-        Expr::cast(&n.syntax).unwrap()
+impl From for Expr {
+    fn from(n: IfExpr) -> Expr {
+        Expr::cast(n.syntax).unwrap()
     }
 }
-impl<'a> From<&'a LoopExpr> for &'a Expr {
-    fn from(n: &'a LoopExpr) -> &'a Expr {
-        Expr::cast(&n.syntax).unwrap()
+impl From for Expr {
+    fn from(n: LoopExpr) -> Expr {
+        Expr::cast(n.syntax).unwrap()
     }
 }
-impl<'a> From<&'a ForExpr> for &'a Expr {
-    fn from(n: &'a ForExpr) -> &'a Expr {
-        Expr::cast(&n.syntax).unwrap()
+impl From for Expr {
+    fn from(n: ForExpr) -> Expr {
+        Expr::cast(n.syntax).unwrap()
     }
 }
-impl<'a> From<&'a WhileExpr> for &'a Expr {
-    fn from(n: &'a WhileExpr) -> &'a Expr {
-        Expr::cast(&n.syntax).unwrap()
+impl From for Expr {
+    fn from(n: WhileExpr) -> Expr {
+        Expr::cast(n.syntax).unwrap()
     }
 }
-impl<'a> From<&'a ContinueExpr> for &'a Expr {
-    fn from(n: &'a ContinueExpr) -> &'a Expr {
-        Expr::cast(&n.syntax).unwrap()
+impl From for Expr {
+    fn from(n: ContinueExpr) -> Expr {
+        Expr::cast(n.syntax).unwrap()
     }
 }
-impl<'a> From<&'a BreakExpr> for &'a Expr {
-    fn from(n: &'a BreakExpr) -> &'a Expr {
-        Expr::cast(&n.syntax).unwrap()
+impl From for Expr {
+    fn from(n: BreakExpr) -> Expr {
+        Expr::cast(n.syntax).unwrap()
     }
 }
-impl<'a> From<&'a Label> for &'a Expr {
-    fn from(n: &'a Label) -> &'a Expr {
-        Expr::cast(&n.syntax).unwrap()
+impl From