mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-29 10:13:54 +00:00
Token-based reverse-mapping
This commit is contained in:
parent
5e8c22dbf3
commit
b79d678923
@ -20,7 +20,7 @@ use ra_db::{salsa, CrateId, FileId};
|
||||
use ra_syntax::{
|
||||
algo,
|
||||
ast::{self, AstNode},
|
||||
SyntaxNode, SyntaxToken, TextRange, TextUnit,
|
||||
SyntaxNode, SyntaxToken, TextUnit,
|
||||
};
|
||||
|
||||
use crate::ast_id_map::FileAstId;
|
||||
@ -79,22 +79,17 @@ impl HirFileId {
|
||||
HirFileIdRepr::MacroFile(macro_file) => {
|
||||
let loc: MacroCallLoc = db.lookup_intern_macro(macro_file.macro_call_id);
|
||||
|
||||
let arg_start = loc.ast_id.to_node(db).token_tree()?.syntax().text_range().start();
|
||||
let def_start =
|
||||
loc.def.ast_id.to_node(db).token_tree()?.syntax().text_range().start();
|
||||
let arg_tt = loc.ast_id.to_node(db).token_tree()?;
|
||||
let def_tt = loc.def.ast_id.to_node(db).token_tree()?;
|
||||
|
||||
let macro_def = db.macro_def(loc.def)?;
|
||||
let (parse, exp_map) = db.parse_macro(macro_file)?;
|
||||
let expanded = Source::new(self, parse.syntax_node());
|
||||
let macro_arg = db.macro_arg(macro_file.macro_call_id)?;
|
||||
|
||||
let arg_start = (loc.ast_id.file_id, arg_start);
|
||||
let def_start = (loc.def.ast_id.file_id, def_start);
|
||||
|
||||
Some(ExpansionInfo {
|
||||
expanded,
|
||||
arg_start,
|
||||
def_start,
|
||||
expanded: Source::new(self, parse.syntax_node()),
|
||||
arg: Source::new(loc.ast_id.file_id, arg_tt),
|
||||
def: Source::new(loc.ast_id.file_id, def_tt),
|
||||
macro_arg,
|
||||
macro_def,
|
||||
exp_map,
|
||||
@ -159,8 +154,8 @@ impl MacroCallId {
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ExpansionInfo {
|
||||
expanded: Source<SyntaxNode>,
|
||||
arg_start: (HirFileId, TextUnit),
|
||||
def_start: (HirFileId, TextUnit),
|
||||
arg: Source<ast::TokenTree>,
|
||||
def: Source<ast::TokenTree>,
|
||||
|
||||
macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>,
|
||||
macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>,
|
||||
@ -169,8 +164,9 @@ pub struct ExpansionInfo {
|
||||
|
||||
impl ExpansionInfo {
|
||||
pub fn map_token_down(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>> {
|
||||
assert_eq!(token.file_id, self.arg_start.0);
|
||||
let range = token.ast.text_range().checked_sub(self.arg_start.1)?;
|
||||
assert_eq!(token.file_id, self.arg.file_id);
|
||||
let range =
|
||||
token.ast.text_range().checked_sub(self.arg.ast.syntax().text_range().start())?;
|
||||
let token_id = self.macro_arg.1.token_by_range(range)?;
|
||||
let token_id = self.macro_def.0.map_id_down(token_id);
|
||||
|
||||
@ -181,25 +177,22 @@ impl ExpansionInfo {
|
||||
Some(self.expanded.with_ast(token))
|
||||
}
|
||||
|
||||
// FIXME: a more correct signature would be
|
||||
// `pub fn map_token_up(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>>`
|
||||
pub fn find_range(&self, from: TextRange) -> Option<(HirFileId, TextRange)> {
|
||||
let token_id = look_in_rev_map(&self.exp_map, from)?;
|
||||
pub fn map_token_up(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>> {
|
||||
let token_id = self.exp_map.token_by_range(token.ast.text_range())?;
|
||||
|
||||
let (token_id, origin) = self.macro_def.0.map_id_up(token_id);
|
||||
|
||||
let (token_map, (file_id, start_offset)) = match origin {
|
||||
mbe::Origin::Call => (&self.macro_arg.1, self.arg_start),
|
||||
mbe::Origin::Def => (&self.macro_def.1, self.def_start),
|
||||
let (token_map, tt) = match origin {
|
||||
mbe::Origin::Call => (&self.macro_arg.1, &self.arg),
|
||||
mbe::Origin::Def => (&self.macro_def.1, &self.def),
|
||||
};
|
||||
|
||||
let range = token_map.relative_range_of(token_id)?;
|
||||
|
||||
return Some((file_id, range + start_offset));
|
||||
|
||||
fn look_in_rev_map(exp_map: &mbe::RevTokenMap, from: TextRange) -> Option<tt::TokenId> {
|
||||
exp_map.ranges.iter().find(|&it| it.0.is_subrange(&from)).map(|it| it.1)
|
||||
}
|
||||
let token = algo::find_covering_element(
|
||||
tt.ast.syntax(),
|
||||
range + tt.ast.syntax().text_range().start(),
|
||||
)
|
||||
.into_token()?;
|
||||
Some(tt.with_ast(token))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -8,15 +8,32 @@ use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxToken};
|
||||
use crate::{db::RootDatabase, FileRange};
|
||||
|
||||
pub(crate) fn original_range(db: &RootDatabase, node: Source<&SyntaxNode>) -> FileRange {
|
||||
let text_range = node.ast.text_range();
|
||||
let (file_id, range) = node
|
||||
.file_id
|
||||
.expansion_info(db)
|
||||
.and_then(|expansion_info| expansion_info.find_range(text_range))
|
||||
.unwrap_or((node.file_id, text_range));
|
||||
let expansion = match node.file_id.expansion_info(db) {
|
||||
None => {
|
||||
return FileRange {
|
||||
file_id: node.file_id.original_file(db),
|
||||
range: node.ast.text_range(),
|
||||
}
|
||||
}
|
||||
Some(it) => it,
|
||||
};
|
||||
// FIXME: the following completely wrong.
|
||||
//
|
||||
// *First*, we should try to map first and last tokens of node, and, if that
|
||||
// fails, return the range of the overall macro expansions.
|
||||
//
|
||||
// *Second*, we should handle recurside macro expansions
|
||||
|
||||
// FIXME: handle recursive macro generated macro
|
||||
FileRange { file_id: file_id.original_file(db), range }
|
||||
let token = node
|
||||
.ast
|
||||
.descendants_with_tokens()
|
||||
.filter_map(|it| it.into_token())
|
||||
.find_map(|it| expansion.map_token_up(node.with_ast(&it)));
|
||||
|
||||
match token {
|
||||
Some(it) => FileRange { file_id: it.file_id.original_file(db), range: it.ast.text_range() },
|
||||
None => FileRange { file_id: node.file_id.original_file(db), range: node.ast.text_range() },
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn descend_into_macros(
|
||||
|
@ -20,7 +20,7 @@ pub struct TokenMap {
|
||||
/// Maps relative range of the expanded syntax node to `tt::TokenId`
|
||||
#[derive(Debug, PartialEq, Eq, Default)]
|
||||
pub struct RevTokenMap {
|
||||
pub ranges: Vec<(TextRange, tt::TokenId)>,
|
||||
ranges: Vec<(TextRange, tt::TokenId)>,
|
||||
}
|
||||
|
||||
/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
|
||||
@ -96,6 +96,10 @@ impl TokenMap {
|
||||
}
|
||||
|
||||
impl RevTokenMap {
|
||||
pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
|
||||
self.ranges.iter().find(|&it| it.0 == relative_range).map(|it| it.1)
|
||||
}
|
||||
|
||||
pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TextRange> {
|
||||
let &(r, _) = self.ranges.iter().find(|(_, tid)| *tid == token_id)?;
|
||||
Some(r)
|
||||
|
Loading…
Reference in New Issue
Block a user