This commit is contained in:
Aleksey Kladov 2019-02-11 21:31:54 +03:00
parent 0d34a256de
commit ae312680d6
2 changed files with 15 additions and 1 deletions

View File

@ -3,12 +3,15 @@ use ra_syntax::{
ast, SyntaxKind::*, TextUnit ast, SyntaxKind::*, TextUnit
}; };
/// Maps `tt::TokenId` to the relative range of the original token.
#[derive(Default)] #[derive(Default)]
pub struct TokenMap { pub struct TokenMap {
/// Maps `tt::TokenId` to the *relative* source range. /// Maps `tt::TokenId` to the *relative* source range.
toknes: Vec<TextRange>, toknes: Vec<TextRange>,
} }
/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
/// will consume).
pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap)> { pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap)> {
let mut token_map = TokenMap::default(); let mut token_map = TokenMap::default();
let node = ast.syntax(); let node = ast.syntax();
@ -17,6 +20,11 @@ pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap)
} }
impl TokenMap { impl TokenMap {
pub fn relative_range_of(&self, tt: tt::TokenId) -> Option<TextRange> {
let idx = tt.0 as usize;
self.toknes.get(idx).map(|&it| it)
}
fn alloc(&mut self, relative_range: TextRange) -> tt::TokenId { fn alloc(&mut self, relative_range: TextRange) -> tt::TokenId {
let id = self.toknes.len(); let id = self.toknes.len();
self.toknes.push(relative_range); self.toknes.push(relative_range);

View File

@ -1,4 +1,4 @@
/// `tt` crate defines a `TokenTree` datastructure: this is the interface (both /// `tt` crate defines a `TokenTree` data structure: this is the interface (both
/// input and output) of macros. It closely mirrors `proc_macro` crate's /// input and output) of macros. It closely mirrors `proc_macro` crate's
/// `TokenTree`. /// `TokenTree`.
@ -18,6 +18,12 @@ use std::fmt;
use smol_str::SmolStr; use smol_str::SmolStr;
/// Represents identity of the token.
///
/// For hygiene purposes, we need to track which expanded tokens originated from
/// which source tokens. We do it by assigning an distinct identity to each
/// source token and making sure that identities are preserved during macro
/// expansion.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TokenId(pub u32); pub struct TokenId(pub u32);