2545: Add Token id to all tt::TokenTree r=matklad a=edwin0cheng

This PR try to add token id to all `tt::Leaf` and `tt::Delimiter`.

~~Some tests are failed now because of #2544~~ 

~~Still blocked by a test in goto-definition : see https://github.com/rust-analyzer/rust-analyzer/pull/2544#issuecomment-565572553~~

Co-authored-by: Edwin Cheng <edwin0cheng@gmail.com>
This commit is contained in:
bors[bot] 2019-12-18 11:47:50 +00:00 committed by GitHub
commit 242f0ae1d8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 272 additions and 81 deletions

View File

@ -97,11 +97,24 @@ fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, mbe::ExpandError> {
fn make_type_args(n: usize, bound: Vec<tt::TokenTree>) -> Vec<tt::TokenTree> {
let mut result = Vec::<tt::TokenTree>::new();
result.push(tt::Leaf::Punct(tt::Punct { char: '<', spacing: tt::Spacing::Alone }).into());
result.push(
tt::Leaf::Punct(tt::Punct {
char: '<',
spacing: tt::Spacing::Alone,
id: tt::TokenId::unspecified(),
})
.into(),
);
for i in 0..n {
if i > 0 {
result
.push(tt::Leaf::Punct(tt::Punct { char: ',', spacing: tt::Spacing::Alone }).into());
result.push(
tt::Leaf::Punct(tt::Punct {
char: ',',
spacing: tt::Spacing::Alone,
id: tt::TokenId::unspecified(),
})
.into(),
);
}
result.push(
tt::Leaf::Ident(tt::Ident {
@ -112,7 +125,14 @@ fn make_type_args(n: usize, bound: Vec<tt::TokenTree>) -> Vec<tt::TokenTree> {
);
result.extend(bound.iter().cloned());
}
result.push(tt::Leaf::Punct(tt::Punct { char: '>', spacing: tt::Spacing::Alone }).into());
result.push(
tt::Leaf::Punct(tt::Punct {
char: '>',
spacing: tt::Spacing::Alone,
id: tt::TokenId::unspecified(),
})
.into(),
);
result
}

View File

@ -227,7 +227,7 @@ impl ExpansionInfo {
let token_id = self.macro_arg.1.token_by_range(range)?;
let token_id = self.macro_def.0.map_id_down(token_id);
let range = self.exp_map.range_by_token(token_id)?;
let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?;
let token = algo::find_covering_element(&self.expanded.value, range).into_token()?;
@ -248,7 +248,7 @@ impl ExpansionInfo {
}
};
let range = token_map.range_by_token(token_id)?;
let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?;
let token = algo::find_covering_element(&tt.value, range + tt.value.text_range().start())
.into_token()?;
Some((tt.with_value(token), origin))

View File

@ -16,7 +16,10 @@ macro_rules! __quote {
{
let children = $crate::__quote!($($tt)*);
let subtree = tt::Subtree {
delimiter: Some(tt::Delimiter::$delim),
delimiter: Some(tt::Delimiter {
kind: tt::DelimiterKind::$delim,
id: tt::TokenId::unspecified(),
}),
token_trees: $crate::quote::IntoTt::to_tokens(children),
};
subtree
@ -29,6 +32,7 @@ macro_rules! __quote {
tt::Leaf::Punct(tt::Punct {
char: $first,
spacing: tt::Spacing::Alone,
id: tt::TokenId::unspecified(),
}).into()
]
}
@ -40,10 +44,12 @@ macro_rules! __quote {
tt::Leaf::Punct(tt::Punct {
char: $first,
spacing: tt::Spacing::Joint,
id: tt::TokenId::unspecified(),
}).into(),
tt::Leaf::Punct(tt::Punct {
char: $sec,
spacing: tt::Spacing::Alone,
id: tt::TokenId::unspecified(),
}).into()
]
}
@ -179,15 +185,15 @@ macro_rules! impl_to_to_tokentrees {
}
impl_to_to_tokentrees! {
u32 => self { tt::Literal{text: self.to_string().into()} };
usize => self { tt::Literal{text: self.to_string().into()}};
i32 => self { tt::Literal{text: self.to_string().into()}};
u32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()} };
usize => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()}};
i32 => self { tt::Literal{text: self.to_string().into(), id: tt::TokenId::unspecified()}};
tt::Leaf => self { self };
tt::Literal => self { self };
tt::Ident => self { self };
tt::Punct => self { self };
&str => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into()}};
String => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into()}}
&str => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into(), id: tt::TokenId::unspecified()}};
String => self { tt::Literal{text: format!("{:?}", self.escape_default().to_string()).into(), id: tt::TokenId::unspecified()}}
}
#[cfg(test)]
@ -254,8 +260,13 @@ mod tests {
let fields =
fields.iter().map(|it| quote!(#it: self.#it.clone(), ).token_trees.clone()).flatten();
let list =
tt::Subtree { delimiter: Some(tt::Delimiter::Brace), token_trees: fields.collect() };
let list = tt::Subtree {
delimiter: Some(tt::Delimiter {
kind: tt::DelimiterKind::Brace,
id: tt::TokenId::unspecified(),
}),
token_trees: fields.collect(),
};
let quoted = quote! {
impl Clone for #struct_name {

View File

@ -67,7 +67,15 @@ impl Shift {
.token_trees
.iter()
.filter_map(|tt| match tt {
tt::TokenTree::Subtree(subtree) => max_id(subtree),
tt::TokenTree::Subtree(subtree) => {
let tree_id = max_id(subtree);
match subtree.delimiter {
Some(it) if it.id != tt::TokenId::unspecified() => {
Some(tree_id.map_or(it.id.0, |t| t.max(it.id.0)))
}
_ => tree_id,
}
}
tt::TokenTree::Leaf(tt::Leaf::Ident(ident))
if ident.id != tt::TokenId::unspecified() =>
{
@ -85,9 +93,13 @@ impl Shift {
match t {
tt::TokenTree::Leaf(leaf) => match leaf {
tt::Leaf::Ident(ident) => ident.id = self.shift(ident.id),
_ => (),
tt::Leaf::Punct(punct) => punct.id = self.shift(punct.id),
tt::Leaf::Literal(lit) => lit.id = self.shift(lit.id),
},
tt::TokenTree::Subtree(tt) => self.shift_all(tt),
tt::TokenTree::Subtree(tt) => {
tt.delimiter.as_mut().map(|it: &mut Delimiter| it.id = self.shift(it.id));
self.shift_all(tt)
}
}
}
}

View File

@ -106,7 +106,7 @@ fn match_subtree(
}
Op::TokenTree(tt::TokenTree::Subtree(lhs)) => {
let rhs = src.expect_subtree().map_err(|()| err!("expected subtree"))?;
if lhs.delimiter != rhs.delimiter {
if lhs.delimiter_kind() != rhs.delimiter_kind() {
bail!("mismatched delimiter")
}
let mut src = TtIter::new(rhs);

View File

@ -108,7 +108,12 @@ fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr) -> Result<Fragment, ExpandError>
let tt = tt::Subtree {
delimiter: None,
token_trees: vec![
tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone }).into(),
tt::Leaf::from(tt::Punct {
char: '$',
spacing: tt::Spacing::Alone,
id: tt::TokenId::unspecified(),
})
.into(),
tt::Leaf::from(tt::Ident { text: v.clone(), id: tt::TokenId::unspecified() })
.into(),
],

View File

@ -70,11 +70,11 @@ impl<'a> SubtreeTokenSource<'a> {
}
Some(tt::TokenTree::Subtree(subtree)) => {
self.cached_cursor.set(cursor.subtree().unwrap());
cached.push(Some(convert_delim(subtree.delimiter, false)));
cached.push(Some(convert_delim(subtree.delimiter_kind(), false)));
}
None => {
if let Some(subtree) = cursor.end() {
cached.push(Some(convert_delim(subtree.delimiter, true)));
cached.push(Some(convert_delim(subtree.delimiter_kind(), true)));
self.cached_cursor.set(cursor.bump());
}
}
@ -114,11 +114,11 @@ impl<'a> TokenSource for SubtreeTokenSource<'a> {
}
}
fn convert_delim(d: Option<tt::Delimiter>, closing: bool) -> TtToken {
fn convert_delim(d: Option<tt::DelimiterKind>, closing: bool) -> TtToken {
let (kinds, texts) = match d {
Some(tt::Delimiter::Parenthesis) => ([T!['('], T![')']], "()"),
Some(tt::Delimiter::Brace) => ([T!['{'], T!['}']], "{}"),
Some(tt::Delimiter::Bracket) => ([T!['['], T![']']], "[]"),
Some(tt::DelimiterKind::Parenthesis) => ([T!['('], T![')']], "()"),
Some(tt::DelimiterKind::Brace) => ([T!['{'], T!['}']], "{}"),
Some(tt::DelimiterKind::Bracket) => ([T!['['], T![']']], "[]"),
None => ([L_DOLLAR, R_DOLLAR], ""),
};

View File

@ -5,17 +5,37 @@ use ra_syntax::{
ast, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode,
SyntaxTreeBuilder, TextRange, TextUnit, T,
};
use rustc_hash::FxHashMap;
use std::iter::successors;
use tt::buffer::{Cursor, TokenBuffer};
use crate::subtree_source::SubtreeTokenSource;
use crate::ExpandError;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum TokenTextRange {
Token(TextRange),
Delimiter(TextRange, TextRange),
}
impl TokenTextRange {
pub fn by_kind(self, kind: SyntaxKind) -> Option<TextRange> {
match self {
TokenTextRange::Token(it) => Some(it),
TokenTextRange::Delimiter(open, close) => match kind {
T!['{'] | T!['('] | T!['['] => Some(open),
T!['}'] | T![')'] | T![']'] => Some(close),
_ => None,
},
}
}
}
/// Maps `tt::TokenId` to the relative range of the original token.
#[derive(Debug, PartialEq, Eq, Default)]
pub struct TokenMap {
/// Maps `tt::TokenId` to the *relative* source range.
entries: Vec<(tt::TokenId, TextRange)>,
entries: Vec<(tt::TokenId, TokenTextRange)>,
}
/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
@ -71,17 +91,32 @@ pub fn token_tree_to_syntax_node(
impl TokenMap {
pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
let &(token_id, _) = self.entries.iter().find(|(_, range)| *range == relative_range)?;
let &(token_id, _) = self.entries.iter().find(|(_, range)| match range {
TokenTextRange::Token(it) => *it == relative_range,
TokenTextRange::Delimiter(open, close) => {
*open == relative_range || *close == relative_range
}
})?;
Some(token_id)
}
pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TextRange> {
pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TokenTextRange> {
let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?;
Some(range)
}
fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
self.entries.push((token_id, relative_range));
self.entries.push((token_id, TokenTextRange::Token(relative_range)));
}
fn insert_delim(
&mut self,
token_id: tt::TokenId,
open_relative_range: TextRange,
close_relative_range: TextRange,
) {
self.entries
.push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range)));
}
}
@ -121,7 +156,10 @@ fn convert_doc_comment(token: &ra_syntax::SyntaxToken) -> Option<Vec<tt::TokenTr
token_trees.push(mk_punct('!'));
}
token_trees.push(tt::TokenTree::from(tt::Subtree {
delimiter: Some(tt::Delimiter::Bracket),
delimiter: Some(tt::Delimiter {
kind: tt::DelimiterKind::Bracket,
id: tt::TokenId::unspecified(),
}),
token_trees: meta_tkns,
}));
@ -136,11 +174,15 @@ fn convert_doc_comment(token: &ra_syntax::SyntaxToken) -> Option<Vec<tt::TokenTr
}
fn mk_punct(c: char) -> tt::TokenTree {
tt::TokenTree::from(tt::Leaf::from(tt::Punct { char: c, spacing: tt::Spacing::Alone }))
tt::TokenTree::from(tt::Leaf::from(tt::Punct {
char: c,
spacing: tt::Spacing::Alone,
id: tt::TokenId::unspecified(),
}))
}
fn mk_doc_literal(comment: &ast::Comment) -> tt::TokenTree {
let lit = tt::Literal { text: doc_comment_text(comment) };
let lit = tt::Literal { text: doc_comment_text(comment), id: tt::TokenId::unspecified() };
tt::TokenTree::from(tt::Leaf::from(lit))
}
@ -186,12 +228,16 @@ impl Convertor {
.last()
.unwrap();
let (delimiter, skip_first) = match (first_child.kind(), last_child.kind()) {
(T!['('], T![')']) => (Some(tt::Delimiter::Parenthesis), true),
(T!['{'], T!['}']) => (Some(tt::Delimiter::Brace), true),
(T!['['], T![']']) => (Some(tt::Delimiter::Bracket), true),
let (delimiter_kind, skip_first) = match (first_child.kind(), last_child.kind()) {
(T!['('], T![')']) => (Some(tt::DelimiterKind::Parenthesis), true),
(T!['{'], T!['}']) => (Some(tt::DelimiterKind::Brace), true),
(T!['['], T![']']) => (Some(tt::DelimiterKind::Bracket), true),
_ => (None, false),
};
let delimiter = delimiter_kind.map(|kind| tt::Delimiter {
kind,
id: self.alloc_delim(first_child.text_range(), last_child.text_range()),
});
let mut token_trees = Vec::new();
let mut child_iter = tt.children_with_tokens().skip(skip_first as usize).peekable();
@ -223,25 +269,34 @@ impl Convertor {
.take(token.text().len() - 1)
.chain(std::iter::once(last_spacing));
for (char, spacing) in token.text().chars().zip(spacing_iter) {
token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into());
token_trees.push(
tt::Leaf::from(tt::Punct {
char,
spacing,
id: self.alloc(token.text_range()),
})
.into(),
);
}
} else {
let child: tt::TokenTree =
if token.kind() == T![true] || token.kind() == T![false] {
tt::Leaf::from(tt::Literal { text: token.text().clone() }).into()
} else if token.kind().is_keyword()
|| token.kind() == IDENT
|| token.kind() == LIFETIME
{
let id = self.alloc(token.text_range());
let text = token.text().clone();
tt::Leaf::from(tt::Ident { text, id }).into()
} else if token.kind().is_literal() {
tt::Leaf::from(tt::Literal { text: token.text().clone() }).into()
} else {
return None;
macro_rules! make_leaf {
($i:ident) => {
tt::$i {
id: self.alloc(token.text_range()),
text: token.text().clone(),
}
.into()
};
token_trees.push(child);
}
let child: tt::Leaf = match token.kind() {
T![true] | T![false] => make_leaf!(Literal),
IDENT | LIFETIME => make_leaf!(Ident),
k if k.is_keyword() => make_leaf!(Ident),
k if k.is_literal() => make_leaf!(Literal),
_ => return None,
};
token_trees.push(child.into());
}
}
NodeOrToken::Node(node) => {
@ -266,11 +321,26 @@ impl Convertor {
self.map.insert(token_id, relative_range);
token_id
}
fn alloc_delim(
&mut self,
open_abs_range: TextRange,
close_abs_range: TextRange,
) -> tt::TokenId {
let open_relative_range = open_abs_range - self.global_offset;
let close_relative_range = close_abs_range - self.global_offset;
let token_id = tt::TokenId(self.next_id);
self.next_id += 1;
self.map.insert_delim(token_id, open_relative_range, close_relative_range);
token_id
}
}
struct TtTreeSink<'a> {
buf: String,
cursor: Cursor<'a>,
open_delims: FxHashMap<tt::TokenId, TextUnit>,
text_pos: TextUnit,
inner: SyntaxTreeBuilder,
token_map: TokenMap,
@ -285,6 +355,7 @@ impl<'a> TtTreeSink<'a> {
TtTreeSink {
buf: String::new(),
cursor,
open_delims: FxHashMap::default(),
text_pos: 0.into(),
inner: SyntaxTreeBuilder::default(),
roots: smallvec::SmallVec::new(),
@ -297,11 +368,11 @@ impl<'a> TtTreeSink<'a> {
}
}
fn delim_to_str(d: Option<tt::Delimiter>, closing: bool) -> SmolStr {
fn delim_to_str(d: Option<tt::DelimiterKind>, closing: bool) -> SmolStr {
let texts = match d {
Some(tt::Delimiter::Parenthesis) => "()",
Some(tt::Delimiter::Brace) => "{}",
Some(tt::Delimiter::Bracket) => "[]",
Some(tt::DelimiterKind::Parenthesis) => "()",
Some(tt::DelimiterKind::Brace) => "{}",
Some(tt::DelimiterKind::Bracket) => "[]",
None => return "".into(),
};
@ -322,34 +393,49 @@ impl<'a> TreeSink for TtTreeSink<'a> {
break;
}
match self.cursor.token_tree() {
let text: SmolStr = match self.cursor.token_tree() {
Some(tt::TokenTree::Leaf(leaf)) => {
// Mark the range if needed
if let tt::Leaf::Ident(ident) = leaf {
if kind == IDENT {
let range =
TextRange::offset_len(self.text_pos, TextUnit::of_str(&ident.text));
self.token_map.insert(ident.id, range);
}
}
let id = match leaf {
tt::Leaf::Ident(ident) => ident.id,
tt::Leaf::Punct(punct) => punct.id,
tt::Leaf::Literal(lit) => lit.id,
};
let text = SmolStr::new(format!("{}", leaf));
let range = TextRange::offset_len(self.text_pos, TextUnit::of_str(&text));
self.token_map.insert(id, range);
self.cursor = self.cursor.bump();
self.buf += &format!("{}", leaf);
text
}
Some(tt::TokenTree::Subtree(subtree)) => {
self.cursor = self.cursor.subtree().unwrap();
self.buf += &delim_to_str(subtree.delimiter, false);
if let Some(id) = subtree.delimiter.map(|it| it.id) {
self.open_delims.insert(id, self.text_pos);
}
delim_to_str(subtree.delimiter_kind(), false)
}
None => {
if let Some(parent) = self.cursor.end() {
self.cursor = self.cursor.bump();
self.buf += &delim_to_str(parent.delimiter, true);
if let Some(id) = parent.delimiter.map(|it| it.id) {
if let Some(open_delim) = self.open_delims.get(&id) {
let open_range =
TextRange::offset_len(*open_delim, TextUnit::from_usize(1));
let close_range =
TextRange::offset_len(self.text_pos, TextUnit::from_usize(1));
self.token_map.insert_delim(id, open_range, close_range);
}
}
delim_to_str(parent.delimiter_kind(), true)
} else {
continue;
}
}
};
self.buf += &text;
self.text_pos += TextUnit::of_str(&text);
}
self.text_pos += TextUnit::of_str(&self.buf);
let text = SmolStr::new(self.buf.as_str());
self.buf.clear();
self.inner.token(kind, text);
@ -495,7 +581,7 @@ mod tests {
let token_tree = ast::TokenTree::cast(token_tree).unwrap();
let tt = ast_to_token_tree(&token_tree).unwrap().0;
assert_eq!(tt.delimiter, Some(tt::Delimiter::Brace));
assert_eq!(tt.delimiter_kind(), Some(tt::DelimiterKind::Brace));
}
#[test]

View File

@ -77,13 +77,41 @@ macro_rules! foobar {
}
assert_eq!(expansion.token_trees.len(), 3);
// ($e:ident) => { foo bar $e }
// 0 1 2 3 4
assert_eq!(get_id(&expansion.token_trees[0]), Some(2));
assert_eq!(get_id(&expansion.token_trees[1]), Some(3));
// {($e:ident) => { foo bar $e }}
// 012345 67 8 9 T 12
assert_eq!(get_id(&expansion.token_trees[0]), Some(9));
assert_eq!(get_id(&expansion.token_trees[1]), Some(10));
// So baz should be 5
assert_eq!(get_id(&expansion.token_trees[2]), Some(5));
// The input args of macro call include parentheses:
// (baz)
// So baz should be 12+1+1
assert_eq!(get_id(&expansion.token_trees[2]), Some(14));
}
#[test]
fn test_token_map() {
use ra_parser::SyntaxKind::*;
use ra_syntax::T;
let macro_definition = r#"
macro_rules! foobar {
($e:ident) => { fn $e() {} }
}
"#;
let rules = create_rules(macro_definition);
let (expansion, (token_map, content)) = expand_and_map(&rules, "foobar!(baz);");
let get_text = |id, kind| -> String {
content[token_map.range_by_token(id).unwrap().by_kind(kind).unwrap()].to_string()
};
assert_eq!(expansion.token_trees.len(), 4);
// {($e:ident) => { fn $e() {} }}
// 012345 67 8 9 T12 3
assert_eq!(get_text(tt::TokenId(9), IDENT), "fn");
assert_eq!(get_text(tt::TokenId(12), T!['(']), "(");
assert_eq!(get_text(tt::TokenId(13), T!['{']), "{");
}
#[test]
@ -1441,6 +1469,23 @@ pub(crate) fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree {
rules.expand(&invocation_tt).unwrap()
}
pub(crate) fn expand_and_map(
rules: &MacroRules,
invocation: &str,
) -> (tt::Subtree, (TokenMap, String)) {
let source_file = ast::SourceFile::parse(invocation).ok().unwrap();
let macro_invocation =
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
let (invocation_tt, _) = ast_to_token_tree(&macro_invocation.token_tree().unwrap()).unwrap();
let expanded = rules.expand(&invocation_tt).unwrap();
let (node, expanded_token_tree) =
token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap();
(expanded, (expanded_token_tree, node.syntax_node().to_string()))
}
pub(crate) enum MacroKind {
Items,
Stmts,

View File

@ -55,7 +55,13 @@ pub struct Subtree {
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum Delimiter {
pub struct Delimiter {
pub id: TokenId,
pub kind: DelimiterKind,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub enum DelimiterKind {
Parenthesis,
Brace,
Bracket,
@ -64,12 +70,14 @@ pub enum Delimiter {
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Literal {
pub text: SmolStr,
pub id: TokenId,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Punct {
pub char: char,
pub spacing: Spacing,
pub id: TokenId,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -95,10 +103,10 @@ impl fmt::Display for TokenTree {
impl fmt::Display for Subtree {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let (l, r) = match self.delimiter {
Some(Delimiter::Parenthesis) => ("(", ")"),
Some(Delimiter::Brace) => ("{", "}"),
Some(Delimiter::Bracket) => ("[", "]"),
let (l, r) = match self.delimiter_kind() {
Some(DelimiterKind::Parenthesis) => ("(", ")"),
Some(DelimiterKind::Brace) => ("{", "}"),
Some(DelimiterKind::Bracket) => ("[", "]"),
None => ("", ""),
};
f.write_str(l)?;
@ -163,6 +171,10 @@ impl Subtree {
self.token_trees.len() + children_count
}
pub fn delimiter_kind(&self) -> Option<DelimiterKind> {
self.delimiter.map(|it| it.kind)
}
}
pub mod buffer;