internal: Add offset param to token descending API

This commit is contained in:
Lukas Wirth 2023-08-16 10:07:18 +02:00
parent b14770934a
commit 53b292478d
21 changed files with 185 additions and 143 deletions

View File

@ -37,7 +37,7 @@ use either::Either;
use syntax::{ use syntax::{
algo::{self, skip_trivia_token}, algo::{self, skip_trivia_token},
ast::{self, AstNode, HasDocComments}, ast::{self, AstNode, HasDocComments},
AstPtr, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, AstPtr, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize,
}; };
use crate::{ use crate::{
@ -642,6 +642,8 @@ impl ExpansionInfo {
db: &dyn db::ExpandDatabase, db: &dyn db::ExpandDatabase,
item: Option<ast::Item>, item: Option<ast::Item>,
token: InFile<&SyntaxToken>, token: InFile<&SyntaxToken>,
// FIXME: use this for range mapping, so that we can resolve inline format args
_relative_token_offset: Option<TextSize>,
) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> { ) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> {
assert_eq!(token.file_id, self.arg.file_id); assert_eq!(token.file_id, self.arg.file_id);
let token_id_in_attr_input = if let Some(item) = item { let token_id_in_attr_input = if let Some(item) = item {
@ -1051,16 +1053,6 @@ impl InFile<SyntaxToken> {
} }
} }
} }
pub fn ancestors_with_macros(
self,
db: &dyn db::ExpandDatabase,
) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
self.value.parent().into_iter().flat_map({
let file_id = self.file_id;
move |parent| InFile::new(file_id, &parent).ancestors_with_macros(db)
})
}
} }
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] #[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]

View File

@ -170,6 +170,8 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.is_derive_annotated(item) self.imp.is_derive_annotated(item)
} }
/// Expand the macro call with a different token tree, mapping the `token_to_map` down into the
/// expansion. `token_to_map` should be a token from the `speculative args` node.
pub fn speculative_expand( pub fn speculative_expand(
&self, &self,
actual_macro_call: &ast::MacroCall, actual_macro_call: &ast::MacroCall,
@ -179,6 +181,8 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map) self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map)
} }
/// Expand the macro call with a different item as the input, mapping the `token_to_map` down into the
/// expansion. `token_to_map` should be a token from the `speculative args` node.
pub fn speculative_expand_attr_macro( pub fn speculative_expand_attr_macro(
&self, &self,
actual_macro_call: &ast::Item, actual_macro_call: &ast::Item,
@ -201,14 +205,22 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
) )
} }
/// Descend the token into macrocalls to its first mapped counterpart. /// Descend the token into its macro call if it is part of one, returning the token in the
pub fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken { /// expansion that it is associated with. If `offset` points into the token's range, it will
self.imp.descend_into_macros_single(token) /// be considered for the mapping in case of inline format args.
pub fn descend_into_macros_single(&self, token: SyntaxToken, offset: TextSize) -> SyntaxToken {
self.imp.descend_into_macros_single(token, offset)
} }
/// Descend the token into macrocalls to all its mapped counterparts. /// Descend the token into its macro call if it is part of one, returning the tokens in the
pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { /// expansion that it is associated with. If `offset` points into the token's range, it will
self.imp.descend_into_macros(token) /// be considered for the mapping in case of inline format args.
pub fn descend_into_macros(
&self,
token: SyntaxToken,
offset: TextSize,
) -> SmallVec<[SyntaxToken; 1]> {
self.imp.descend_into_macros(token, offset)
} }
/// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token. /// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token.
@ -217,12 +229,17 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
pub fn descend_into_macros_with_same_text( pub fn descend_into_macros_with_same_text(
&self, &self,
token: SyntaxToken, token: SyntaxToken,
offset: TextSize,
) -> SmallVec<[SyntaxToken; 1]> { ) -> SmallVec<[SyntaxToken; 1]> {
self.imp.descend_into_macros_with_same_text(token) self.imp.descend_into_macros_with_same_text(token, offset)
} }
pub fn descend_into_macros_with_kind_preference(&self, token: SyntaxToken) -> SyntaxToken { pub fn descend_into_macros_with_kind_preference(
self.imp.descend_into_macros_with_kind_preference(token) &self,
token: SyntaxToken,
offset: TextSize,
) -> SyntaxToken {
self.imp.descend_into_macros_with_kind_preference(token, offset)
} }
/// Maps a node down by mapping its first and last token down. /// Maps a node down by mapping its first and last token down.
@ -665,7 +682,7 @@ impl<'db> SemanticsImpl<'db> {
}; };
if first == last { if first == last {
self.descend_into_macros_impl(first, &mut |InFile { value, .. }| { self.descend_into_macros_impl(first, 0.into(), &mut |InFile { value, .. }| {
if let Some(node) = value.parent_ancestors().find_map(N::cast) { if let Some(node) = value.parent_ancestors().find_map(N::cast) {
res.push(node) res.push(node)
} }
@ -674,7 +691,7 @@ impl<'db> SemanticsImpl<'db> {
} else { } else {
// Descend first and last token, then zip them to look for the node they belong to // Descend first and last token, then zip them to look for the node they belong to
let mut scratch: SmallVec<[_; 1]> = smallvec![]; let mut scratch: SmallVec<[_; 1]> = smallvec![];
self.descend_into_macros_impl(first, &mut |token| { self.descend_into_macros_impl(first, 0.into(), &mut |token| {
scratch.push(token); scratch.push(token);
false false
}); });
@ -682,6 +699,7 @@ impl<'db> SemanticsImpl<'db> {
let mut scratch = scratch.into_iter(); let mut scratch = scratch.into_iter();
self.descend_into_macros_impl( self.descend_into_macros_impl(
last, last,
0.into(),
&mut |InFile { value: last, file_id: last_fid }| { &mut |InFile { value: last, file_id: last_fid }| {
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() { if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
if first_fid == last_fid { if first_fid == last_fid {
@ -705,19 +723,27 @@ impl<'db> SemanticsImpl<'db> {
res res
} }
fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { fn descend_into_macros(
&self,
token: SyntaxToken,
offset: TextSize,
) -> SmallVec<[SyntaxToken; 1]> {
let mut res = smallvec![]; let mut res = smallvec![];
self.descend_into_macros_impl(token, &mut |InFile { value, .. }| { self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| {
res.push(value); res.push(value);
false false
}); });
res res
} }
fn descend_into_macros_with_same_text(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> { fn descend_into_macros_with_same_text(
&self,
token: SyntaxToken,
offset: TextSize,
) -> SmallVec<[SyntaxToken; 1]> {
let text = token.text(); let text = token.text();
let mut res = smallvec![]; let mut res = smallvec![];
self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| { self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| {
if value.text() == text { if value.text() == text {
res.push(value); res.push(value);
} }
@ -729,7 +755,11 @@ impl<'db> SemanticsImpl<'db> {
res res
} }
fn descend_into_macros_with_kind_preference(&self, token: SyntaxToken) -> SyntaxToken { fn descend_into_macros_with_kind_preference(
&self,
token: SyntaxToken,
offset: TextSize,
) -> SyntaxToken {
let fetch_kind = |token: &SyntaxToken| match token.parent() { let fetch_kind = |token: &SyntaxToken| match token.parent() {
Some(node) => match node.kind() { Some(node) => match node.kind() {
kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => { kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => {
@ -741,7 +771,7 @@ impl<'db> SemanticsImpl<'db> {
}; };
let preferred_kind = fetch_kind(&token); let preferred_kind = fetch_kind(&token);
let mut res = None; let mut res = None;
self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| { self.descend_into_macros_impl(token.clone(), offset, &mut |InFile { value, .. }| {
if fetch_kind(&value) == preferred_kind { if fetch_kind(&value) == preferred_kind {
res = Some(value); res = Some(value);
true true
@ -755,9 +785,9 @@ impl<'db> SemanticsImpl<'db> {
res.unwrap_or(token) res.unwrap_or(token)
} }
fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken { fn descend_into_macros_single(&self, token: SyntaxToken, offset: TextSize) -> SyntaxToken {
let mut res = token.clone(); let mut res = token.clone();
self.descend_into_macros_impl(token, &mut |InFile { value, .. }| { self.descend_into_macros_impl(token, offset, &mut |InFile { value, .. }| {
res = value; res = value;
true true
}); });
@ -767,9 +797,13 @@ impl<'db> SemanticsImpl<'db> {
fn descend_into_macros_impl( fn descend_into_macros_impl(
&self, &self,
token: SyntaxToken, token: SyntaxToken,
// FIXME: We might want this to be Option<TextSize> to be able to opt out of subrange
// mapping, specifically for node downmapping
offset: TextSize,
f: &mut dyn FnMut(InFile<SyntaxToken>) -> bool, f: &mut dyn FnMut(InFile<SyntaxToken>) -> bool,
) { ) {
let _p = profile::span("descend_into_macros"); let _p = profile::span("descend_into_macros");
let relative_token_offset = token.text_range().start().checked_sub(offset);
let parent = match token.parent() { let parent = match token.parent() {
Some(it) => it, Some(it) => it,
None => return, None => return,
@ -796,7 +830,12 @@ impl<'db> SemanticsImpl<'db> {
self.cache(value, file_id); self.cache(value, file_id);
} }
let mapped_tokens = expansion_info.map_token_down(self.db.upcast(), item, token)?; let mapped_tokens = expansion_info.map_token_down(
self.db.upcast(),
item,
token,
relative_token_offset,
)?;
let len = stack.len(); let len = stack.len();
// requeue the tokens we got from mapping our current token down // requeue the tokens we got from mapping our current token down
@ -943,7 +982,7 @@ impl<'db> SemanticsImpl<'db> {
offset: TextSize, offset: TextSize,
) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ { ) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
node.token_at_offset(offset) node.token_at_offset(offset)
.map(move |token| self.descend_into_macros(token)) .map(move |token| self.descend_into_macros(token, offset))
.map(|descendants| { .map(|descendants| {
descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it)) descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
}) })

View File

@ -48,7 +48,7 @@ pub(crate) fn extract_expressions_from_format_string(
let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?; let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?;
let expanded_t = ast::String::cast( let expanded_t = ast::String::cast(
ctx.sema.descend_into_macros_with_kind_preference(fmt_string.syntax().clone()), ctx.sema.descend_into_macros_with_kind_preference(fmt_string.syntax().clone(), 0.into()),
)?; )?;
if !is_format_string(&expanded_t) { if !is_format_string(&expanded_t) {
return None; return None;

View File

@ -750,7 +750,7 @@ impl FunctionBody {
.descendants_with_tokens() .descendants_with_tokens()
.filter_map(SyntaxElement::into_token) .filter_map(SyntaxElement::into_token)
.filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self])) .filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self]))
.flat_map(|t| sema.descend_into_macros(t)) .flat_map(|t| sema.descend_into_macros(t, 0.into()))
.for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast))); .for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast)));
} }
} }

View File

@ -117,7 +117,7 @@ pub fn get_definition(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken, token: SyntaxToken,
) -> Option<Definition> { ) -> Option<Definition> {
for token in sema.descend_into_macros(token) { for token in sema.descend_into_macros(token, 0.into()) {
let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops); let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops);
if let Some(&[x]) = def.as_deref() { if let Some(&[x]) = def.as_deref() {
return Some(x); return Some(x);

View File

@ -456,14 +456,14 @@ impl<'a> FindUsages<'a> {
it.text().trim_start_matches("r#") == name it.text().trim_start_matches("r#") == name
}) })
.into_iter() .into_iter()
.flat_map(|token| { .flat_map(move |token| {
// FIXME: There should be optimization potential here // FIXME: There should be optimization potential here
// Currently we try to descend everything we find which // Currently we try to descend everything we find which
// means we call `Semantics::descend_into_macros` on // means we call `Semantics::descend_into_macros` on
// every textual hit. That function is notoriously // every textual hit. That function is notoriously
// expensive even for things that do not get down mapped // expensive even for things that do not get down mapped
// into macros. // into macros.
sema.descend_into_macros(token).into_iter().filter_map(|it| it.parent()) sema.descend_into_macros(token, offset).into_iter().filter_map(|it| it.parent())
}) })
}; };

View File

@ -74,18 +74,20 @@ pub(crate) fn incoming_calls(
Some(calls.into_items()) Some(calls.into_items())
} }
pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> { pub(crate) fn outgoing_calls(
db: &RootDatabase,
FilePosition { file_id, offset }: FilePosition,
) -> Option<Vec<CallItem>> {
let sema = Semantics::new(db); let sema = Semantics::new(db);
let file_id = position.file_id;
let file = sema.parse(file_id); let file = sema.parse(file_id);
let file = file.syntax(); let file = file.syntax();
let token = pick_best_token(file.token_at_offset(position.offset), |kind| match kind { let token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
IDENT => 1, IDENT => 1,
_ => 0, _ => 0,
})?; })?;
let mut calls = CallLocations::default(); let mut calls = CallLocations::default();
sema.descend_into_macros(token) sema.descend_into_macros(token, offset)
.into_iter() .into_iter()
.filter_map(|it| it.parent_ancestors().nth(1).and_then(ast::Item::cast)) .filter_map(|it| it.parent_ancestors().nth(1).and_then(ast::Item::cast))
.filter_map(|item| match item { .filter_map(|item| match item {

View File

@ -131,19 +131,19 @@ pub(crate) fn remove_links(markdown: &str) -> String {
// |=== // |===
pub(crate) fn external_docs( pub(crate) fn external_docs(
db: &RootDatabase, db: &RootDatabase,
position: &FilePosition, FilePosition { file_id, offset }: FilePosition,
target_dir: Option<&OsStr>, target_dir: Option<&OsStr>,
sysroot: Option<&OsStr>, sysroot: Option<&OsStr>,
) -> Option<DocumentationLinks> { ) -> Option<DocumentationLinks> {
let sema = &Semantics::new(db); let sema = &Semantics::new(db);
let file = sema.parse(position.file_id).syntax().clone(); let file = sema.parse(file_id).syntax().clone();
let token = pick_best_token(file.token_at_offset(position.offset), |kind| match kind { let token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
IDENT | INT_NUMBER | T![self] => 3, IDENT | INT_NUMBER | T![self] => 3,
T!['('] | T![')'] => 2, T!['('] | T![')'] => 2,
kind if kind.is_trivia() => 0, kind if kind.is_trivia() => 0,
_ => 1, _ => 1,
})?; })?;
let token = sema.descend_into_macros_single(token); let token = sema.descend_into_macros_single(token, offset);
let node = token.parent()?; let node = token.parent()?;
let definition = match_ast! { let definition = match_ast! {
@ -285,7 +285,7 @@ impl DocCommentToken {
let original_start = doc_token.text_range().start(); let original_start = doc_token.text_range().start();
let relative_comment_offset = offset - original_start - prefix_len; let relative_comment_offset = offset - original_start - prefix_len;
sema.descend_into_macros(doc_token).into_iter().find_map(|t| { sema.descend_into_macros(doc_token, offset).into_iter().find_map(|t| {
let (node, descended_prefix_len) = match_ast! { let (node, descended_prefix_len) = match_ast! {
match t { match t {
ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?), ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?),

View File

@ -40,7 +40,8 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
// struct Bar; // struct Bar;
// ``` // ```
let derive = sema.descend_into_macros(tok.clone()).into_iter().find_map(|descended| { let derive =
sema.descend_into_macros(tok.clone(), 0.into()).into_iter().find_map(|descended| {
let hir_file = sema.hir_file_for(&descended.parent()?); let hir_file = sema.hir_file_for(&descended.parent()?);
if !hir_file.is_derive_attr_pseudo_expansion(db) { if !hir_file.is_derive_attr_pseudo_expansion(db) {
return None; return None;
@ -58,8 +59,12 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
.take_while(|it| it != &token) .take_while(|it| it != &token)
.filter(|it| it.kind() == T![,]) .filter(|it| it.kind() == T![,])
.count(); .count();
let expansion = let expansion = format(
format(db, SyntaxKind::MACRO_ITEMS, position.file_id, expansions.get(idx).cloned()?); db,
SyntaxKind::MACRO_ITEMS,
position.file_id,
expansions.get(idx).cloned()?,
);
Some(ExpandedMacro { name, expansion }) Some(ExpandedMacro { name, expansion })
}); });

View File

@ -17,8 +17,6 @@ use crate::FileRange;
// Extends or shrinks the current selection to the encompassing syntactic construct // Extends or shrinks the current selection to the encompassing syntactic construct
// (expression, statement, item, module, etc). It works with multiple cursors. // (expression, statement, item, module, etc). It works with multiple cursors.
// //
// This is a standard LSP feature and not a protocol extension.
//
// |=== // |===
// | Editor | Shortcut // | Editor | Shortcut
// //
@ -142,8 +140,10 @@ fn extend_tokens_from_range(
// compute original mapped token range // compute original mapped token range
let extended = { let extended = {
let fst_expanded = sema.descend_into_macros_single(first_token.clone()); let fst_expanded =
let lst_expanded = sema.descend_into_macros_single(last_token.clone()); sema.descend_into_macros_single(first_token.clone(), original_range.start());
let lst_expanded =
sema.descend_into_macros_single(last_token.clone(), original_range.end());
let mut lca = let mut lca =
algo::least_common_ancestor(&fst_expanded.parent()?, &lst_expanded.parent()?)?; algo::least_common_ancestor(&fst_expanded.parent()?, &lst_expanded.parent()?)?;
lca = shallowest_node(&lca); lca = shallowest_node(&lca);
@ -154,13 +154,16 @@ fn extend_tokens_from_range(
}; };
// Compute parent node range // Compute parent node range
let validate = |token: &SyntaxToken| -> bool { let validate = |offset: TextSize| {
let expanded = sema.descend_into_macros_single(token.clone()); let extended = &extended;
move |token: &SyntaxToken| -> bool {
let expanded = sema.descend_into_macros_single(token.clone(), offset);
let parent = match expanded.parent() { let parent = match expanded.parent() {
Some(it) => it, Some(it) => it,
None => return false, None => return false,
}; };
algo::least_common_ancestor(&extended, &parent).as_ref() == Some(&extended) algo::least_common_ancestor(extended, &parent).as_ref() == Some(extended)
}
}; };
// Find the first and last text range under expanded parent // Find the first and last text range under expanded parent
@ -168,14 +171,14 @@ fn extend_tokens_from_range(
let token = token.prev_token()?; let token = token.prev_token()?;
skip_trivia_token(token, Direction::Prev) skip_trivia_token(token, Direction::Prev)
}) })
.take_while(validate) .take_while(validate(original_range.start()))
.last()?; .last()?;
let last = successors(Some(last_token), |token| { let last = successors(Some(last_token), |token| {
let token = token.next_token()?; let token = token.next_token()?;
skip_trivia_token(token, Direction::Next) skip_trivia_token(token, Direction::Next)
}) })
.take_while(validate) .take_while(validate(original_range.end()))
.last()?; .last()?;
let range = first.text_range().cover(last.text_range()); let range = first.text_range().cover(last.text_range());

View File

@ -20,16 +20,16 @@ use crate::{
// - fields in patterns will navigate to the field declaration of the struct, union or variant // - fields in patterns will navigate to the field declaration of the struct, union or variant
pub(crate) fn goto_declaration( pub(crate) fn goto_declaration(
db: &RootDatabase, db: &RootDatabase,
position: FilePosition, position @ FilePosition { file_id, offset }: FilePosition,
) -> Option<RangeInfo<Vec<NavigationTarget>>> { ) -> Option<RangeInfo<Vec<NavigationTarget>>> {
let sema = Semantics::new(db); let sema = Semantics::new(db);
let file = sema.parse(position.file_id).syntax().clone(); let file = sema.parse(file_id).syntax().clone();
let original_token = file let original_token = file
.token_at_offset(position.offset) .token_at_offset(offset)
.find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?; .find(|it| matches!(it.kind(), IDENT | T![self] | T![super] | T![crate] | T![Self]))?;
let range = original_token.text_range(); let range = original_token.text_range();
let info: Vec<NavigationTarget> = sema let info: Vec<NavigationTarget> = sema
.descend_into_macros(original_token) .descend_into_macros(original_token, offset)
.iter() .iter()
.filter_map(|token| { .filter_map(|token| {
let parent = token.parent()?; let parent = token.parent()?;

View File

@ -29,12 +29,11 @@ use syntax::{ast, AstNode, AstToken, SyntaxKind::*, SyntaxToken, TextRange, T};
// image::https://user-images.githubusercontent.com/48062697/113065563-025fbe00-91b1-11eb-83e4-a5a703610b23.gif[] // image::https://user-images.githubusercontent.com/48062697/113065563-025fbe00-91b1-11eb-83e4-a5a703610b23.gif[]
pub(crate) fn goto_definition( pub(crate) fn goto_definition(
db: &RootDatabase, db: &RootDatabase,
position: FilePosition, FilePosition { file_id, offset }: FilePosition,
) -> Option<RangeInfo<Vec<NavigationTarget>>> { ) -> Option<RangeInfo<Vec<NavigationTarget>>> {
let sema = &Semantics::new(db); let sema = &Semantics::new(db);
let file = sema.parse(position.file_id).syntax().clone(); let file = sema.parse(file_id).syntax().clone();
let original_token = let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
pick_best_token(file.token_at_offset(position.offset), |kind| match kind {
IDENT IDENT
| INT_NUMBER | INT_NUMBER
| LIFETIME_IDENT | LIFETIME_IDENT
@ -51,23 +50,18 @@ pub(crate) fn goto_definition(
_ => 1, _ => 1,
})?; })?;
if let Some(doc_comment) = token_as_doc_comment(&original_token) { if let Some(doc_comment) = token_as_doc_comment(&original_token) {
return doc_comment.get_definition_with_descend_at( return doc_comment.get_definition_with_descend_at(sema, offset, |def, _, link_range| {
sema,
position.offset,
|def, _, link_range| {
let nav = def.try_to_nav(db)?; let nav = def.try_to_nav(db)?;
Some(RangeInfo::new(link_range, vec![nav])) Some(RangeInfo::new(link_range, vec![nav]))
}, });
);
} }
let navs = sema let navs = sema
.descend_into_macros(original_token.clone()) .descend_into_macros(original_token.clone(), offset)
.into_iter() .into_iter()
.filter_map(|token| { .filter_map(|token| {
let parent = token.parent()?; let parent = token.parent()?;
if let Some(tt) = ast::TokenTree::cast(parent) { if let Some(tt) = ast::TokenTree::cast(parent) {
if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), position.file_id) if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), file_id) {
{
return Some(vec![x]); return Some(vec![x]);
} }
} }

View File

@ -22,20 +22,19 @@ use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav};
// image::https://user-images.githubusercontent.com/48062697/113065566-02f85480-91b1-11eb-9288-aaad8abd8841.gif[] // image::https://user-images.githubusercontent.com/48062697/113065566-02f85480-91b1-11eb-9288-aaad8abd8841.gif[]
pub(crate) fn goto_implementation( pub(crate) fn goto_implementation(
db: &RootDatabase, db: &RootDatabase,
position: FilePosition, FilePosition { file_id, offset }: FilePosition,
) -> Option<RangeInfo<Vec<NavigationTarget>>> { ) -> Option<RangeInfo<Vec<NavigationTarget>>> {
let sema = Semantics::new(db); let sema = Semantics::new(db);
let source_file = sema.parse(position.file_id); let source_file = sema.parse(file_id);
let syntax = source_file.syntax().clone(); let syntax = source_file.syntax().clone();
let original_token = let original_token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind {
pick_best_token(syntax.token_at_offset(position.offset), |kind| match kind {
IDENT | T![self] | INT_NUMBER => 1, IDENT | T![self] | INT_NUMBER => 1,
_ => 0, _ => 0,
})?; })?;
let range = original_token.text_range(); let range = original_token.text_range();
let navs = let navs =
sema.descend_into_macros(original_token) sema.descend_into_macros(original_token, offset)
.into_iter() .into_iter()
.filter_map(|token| token.parent().and_then(ast::NameLike::cast)) .filter_map(|token| token.parent().and_then(ast::NameLike::cast))
.filter_map(|node| match &node { .filter_map(|node| match &node {

View File

@ -16,13 +16,13 @@ use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav};
// image::https://user-images.githubusercontent.com/48062697/113020657-b560f500-917a-11eb-9007-0f809733a338.gif[] // image::https://user-images.githubusercontent.com/48062697/113020657-b560f500-917a-11eb-9007-0f809733a338.gif[]
pub(crate) fn goto_type_definition( pub(crate) fn goto_type_definition(
db: &RootDatabase, db: &RootDatabase,
position: FilePosition, FilePosition { file_id, offset }: FilePosition,
) -> Option<RangeInfo<Vec<NavigationTarget>>> { ) -> Option<RangeInfo<Vec<NavigationTarget>>> {
let sema = hir::Semantics::new(db); let sema = hir::Semantics::new(db);
let file: ast::SourceFile = sema.parse(position.file_id); let file: ast::SourceFile = sema.parse(file_id);
let token: SyntaxToken = let token: SyntaxToken =
pick_best_token(file.syntax().token_at_offset(position.offset), |kind| match kind { pick_best_token(file.syntax().token_at_offset(offset), |kind| match kind {
IDENT | INT_NUMBER | T![self] => 2, IDENT | INT_NUMBER | T![self] => 2,
kind if kind.is_trivia() => 0, kind if kind.is_trivia() => 0,
_ => 1, _ => 1,
@ -37,7 +37,7 @@ pub(crate) fn goto_type_definition(
} }
}; };
let range = token.text_range(); let range = token.text_range();
sema.descend_into_macros(token) sema.descend_into_macros(token, offset)
.into_iter() .into_iter()
.filter_map(|token| { .filter_map(|token| {
let ty = sema let ty = sema

View File

@ -15,6 +15,7 @@ use syntax::{
SyntaxKind::{self, IDENT, INT_NUMBER}, SyntaxKind::{self, IDENT, INT_NUMBER},
SyntaxNode, SyntaxToken, TextRange, T, SyntaxNode, SyntaxToken, TextRange, T,
}; };
use text_edit::TextSize;
use crate::{navigation_target::ToNav, references, NavigationTarget, TryToNav}; use crate::{navigation_target::ToNav, references, NavigationTarget, TryToNav};
@ -51,7 +52,7 @@ pub struct HighlightRelatedConfig {
pub(crate) fn highlight_related( pub(crate) fn highlight_related(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
config: HighlightRelatedConfig, config: HighlightRelatedConfig,
FilePosition { offset, file_id }: FilePosition, pos @ FilePosition { offset, file_id }: FilePosition,
) -> Option<Vec<HighlightedRange>> { ) -> Option<Vec<HighlightedRange>> {
let _p = profile::span("highlight_related"); let _p = profile::span("highlight_related");
let syntax = sema.parse(file_id).syntax().clone(); let syntax = sema.parse(file_id).syntax().clone();
@ -79,7 +80,7 @@ pub(crate) fn highlight_related(
} }
T![|] if config.closure_captures => highlight_closure_captures(sema, token, file_id), T![|] if config.closure_captures => highlight_closure_captures(sema, token, file_id),
T![move] if config.closure_captures => highlight_closure_captures(sema, token, file_id), T![move] if config.closure_captures => highlight_closure_captures(sema, token, file_id),
_ if config.references => highlight_references(sema, &syntax, token, file_id), _ if config.references => highlight_references(sema, &syntax, token, pos),
_ => None, _ => None,
} }
} }
@ -129,9 +130,9 @@ fn highlight_references(
sema: &Semantics<'_, RootDatabase>, sema: &Semantics<'_, RootDatabase>,
node: &SyntaxNode, node: &SyntaxNode,
token: SyntaxToken, token: SyntaxToken,
file_id: FileId, FilePosition { file_id, offset }: FilePosition,
) -> Option<Vec<HighlightedRange>> { ) -> Option<Vec<HighlightedRange>> {
let defs = find_defs(sema, token.clone()); let defs = find_defs(sema, token.clone(), offset);
let usages = defs let usages = defs
.iter() .iter()
.filter_map(|&d| { .filter_map(|&d| {
@ -455,8 +456,12 @@ fn cover_range(r0: Option<TextRange>, r1: Option<TextRange>) -> Option<TextRange
} }
} }
fn find_defs(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> FxHashSet<Definition> { fn find_defs(
sema.descend_into_macros(token) sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken,
offset: TextSize,
) -> FxHashSet<Definition> {
sema.descend_into_macros(token, offset)
.into_iter() .into_iter()
.filter_map(|token| IdentClass::classify_token(sema, &token)) .filter_map(|token| IdentClass::classify_token(sema, &token))
.map(IdentClass::definitions_no_ops) .map(IdentClass::definitions_no_ops)

View File

@ -162,9 +162,9 @@ fn hover_simple(
// prefer descending the same token kind in attribute expansions, in normal macros text // prefer descending the same token kind in attribute expansions, in normal macros text
// equivalency is more important // equivalency is more important
let descended = if in_attr { let descended = if in_attr {
[sema.descend_into_macros_with_kind_preference(original_token.clone())].into() [sema.descend_into_macros_with_kind_preference(original_token.clone(), offset)].into()
} else { } else {
sema.descend_into_macros_with_same_text(original_token.clone()) sema.descend_into_macros_with_same_text(original_token.clone(), offset)
}; };
let descended = || descended.iter(); let descended = || descended.iter();

View File

@ -484,7 +484,7 @@ impl Analysis {
sysroot: Option<&OsStr>, sysroot: Option<&OsStr>,
) -> Cancellable<doc_links::DocumentationLinks> { ) -> Cancellable<doc_links::DocumentationLinks> {
self.with_db(|db| { self.with_db(|db| {
doc_links::external_docs(db, &position, target_dir, sysroot).unwrap_or_default() doc_links::external_docs(db, position, target_dir, sysroot).unwrap_or_default()
}) })
} }

View File

@ -99,7 +99,7 @@ pub(crate) fn moniker(
}); });
} }
let navs = sema let navs = sema
.descend_into_macros(original_token.clone()) .descend_into_macros(original_token.clone(), offset)
.into_iter() .into_iter()
.filter_map(|token| { .filter_map(|token| {
IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops).map(|it| { IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops).map(|it| {

View File

@ -126,7 +126,7 @@ pub(crate) fn find_defs<'a>(
) )
}); });
token.map(|token| { token.map(|token| {
sema.descend_into_macros_with_same_text(token) sema.descend_into_macros_with_same_text(token, offset)
.into_iter() .into_iter()
.filter_map(|it| ast::NameLike::cast(it.parent()?)) .filter_map(|it| ast::NameLike::cast(it.parent()?))
.filter_map(move |name_like| { .filter_map(move |name_like| {

View File

@ -67,17 +67,20 @@ impl SignatureHelp {
} }
/// Computes parameter information for the given position. /// Computes parameter information for the given position.
pub(crate) fn signature_help(db: &RootDatabase, position: FilePosition) -> Option<SignatureHelp> { pub(crate) fn signature_help(
db: &RootDatabase,
FilePosition { file_id, offset }: FilePosition,
) -> Option<SignatureHelp> {
let sema = Semantics::new(db); let sema = Semantics::new(db);
let file = sema.parse(position.file_id); let file = sema.parse(file_id);
let file = file.syntax(); let file = file.syntax();
let token = file let token = file
.token_at_offset(position.offset) .token_at_offset(offset)
.left_biased() .left_biased()
// if the cursor is sandwiched between two space tokens and the call is unclosed // if the cursor is sandwiched between two space tokens and the call is unclosed
// this prevents us from leaving the CallExpression // this prevents us from leaving the CallExpression
.and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?; .and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?;
let token = sema.descend_into_macros_single(token); let token = sema.descend_into_macros_single(token, offset);
for node in token.parent_ancestors() { for node in token.parent_ancestors() {
match_ast! { match_ast! {

View File

@ -395,10 +395,10 @@ fn traverse(
NodeOrToken::Token(token) if token.kind() != COMMENT => { NodeOrToken::Token(token) if token.kind() != COMMENT => {
let token = match attr_or_derive_item { let token = match attr_or_derive_item {
Some(AttrOrDerive::Attr(_)) => { Some(AttrOrDerive::Attr(_)) => {
sema.descend_into_macros_with_kind_preference(token) sema.descend_into_macros_with_kind_preference(token, 0.into())
} }
Some(AttrOrDerive::Derive(_)) | None => { Some(AttrOrDerive::Derive(_)) | None => {
sema.descend_into_macros_single(token) sema.descend_into_macros_single(token, 0.into())
} }
}; };
match token.parent().and_then(ast::NameLike::cast) { match token.parent().and_then(ast::NameLike::cast) {