7159: Refactor mbe to reduce clone and copying r=edwin0cheng a=edwin0cheng

bors r+

Co-authored-by: Edwin Cheng <edwin0cheng@gmail.com>
This commit is contained in:
bors[bot] 2021-01-04 18:02:54 +00:00 committed by GitHub
commit 0708bfeb72
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 172 additions and 152 deletions

View File

@ -58,7 +58,7 @@ impl ProcMacroExpander {
}
fn eat_punct(cursor: &mut Cursor, c: char) -> bool {
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = cursor.token_tree() {
if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(punct), _)) = cursor.token_tree() {
if punct.char == c {
*cursor = cursor.bump();
return true;
@ -68,7 +68,7 @@ fn eat_punct(cursor: &mut Cursor, c: char) -> bool {
}
fn eat_subtree(cursor: &mut Cursor, kind: tt::DelimiterKind) -> bool {
if let Some(tt::TokenTree::Subtree(subtree)) = cursor.token_tree() {
if let Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) = cursor.token_tree() {
if Some(kind) == subtree.delimiter_kind() {
*cursor = cursor.bump_subtree();
return true;
@ -78,7 +78,7 @@ fn eat_subtree(cursor: &mut Cursor, kind: tt::DelimiterKind) -> bool {
}
fn eat_ident(cursor: &mut Cursor, t: &str) -> bool {
if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = cursor.token_tree() {
if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Ident(ident), _)) = cursor.token_tree() {
if t == ident.text.as_str() {
*cursor = cursor.bump();
return true;
@ -88,7 +88,7 @@ fn eat_ident(cursor: &mut Cursor, t: &str) -> bool {
}
fn remove_derive_attrs(tt: &tt::Subtree) -> Option<tt::Subtree> {
let buffer = TokenBuffer::new(&tt.token_trees);
let buffer = TokenBuffer::from_tokens(&tt.token_trees);
let mut p = buffer.begin();
let mut result = tt::Subtree::default();
@ -106,7 +106,7 @@ fn remove_derive_attrs(tt: &tt::Subtree) -> Option<tt::Subtree> {
}
}
result.token_trees.push(curr.token_tree()?.clone());
result.token_trees.push(curr.token_tree()?.cloned());
p = curr.bump();
}

View File

@ -309,7 +309,7 @@ impl<'a> TtIter<'a> {
}
}
let buffer = TokenBuffer::new(&self.inner.as_slice());
let buffer = TokenBuffer::from_tokens(&self.inner.as_slice());
let mut src = SubtreeTokenSource::new(&buffer);
let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false };
@ -336,11 +336,11 @@ impl<'a> TtIter<'a> {
err = Some(err!("no tokens consumed"));
}
let res = match res.len() {
1 => Some(res[0].clone()),
1 => Some(res[0].cloned()),
0 => None,
_ => Some(tt::TokenTree::Subtree(tt::Subtree {
delimiter: None,
token_trees: res.into_iter().cloned().collect(),
token_trees: res.into_iter().map(|it| it.cloned()).collect(),
})),
};
ExpandResult { value: res, err }

View File

@ -1,129 +1,104 @@
//! FIXME: write short doc here
use parser::{Token, TokenSource};
use std::cell::{Cell, Ref, RefCell};
use syntax::{lex_single_syntax_kind, SmolStr, SyntaxKind, SyntaxKind::*, T};
use tt::buffer::{Cursor, TokenBuffer};
use tt::buffer::TokenBuffer;
#[derive(Debug, Clone, Eq, PartialEq)]
struct TtToken {
kind: SyntaxKind,
is_joint_to_next: bool,
tt: Token,
text: SmolStr,
}
pub(crate) struct SubtreeTokenSource<'a> {
cached_cursor: Cell<Cursor<'a>>,
cached: RefCell<Vec<Option<TtToken>>>,
pub(crate) struct SubtreeTokenSource {
cached: Vec<TtToken>,
curr: (Token, usize),
}
impl<'a> SubtreeTokenSource<'a> {
impl<'a> SubtreeTokenSource {
// Helper function used in test
#[cfg(test)]
pub(crate) fn text(&self) -> SmolStr {
match *self.get(self.curr.1) {
match self.cached.get(self.curr.1) {
Some(ref tt) => tt.text.clone(),
_ => SmolStr::new(""),
}
}
}
impl<'a> SubtreeTokenSource<'a> {
pub(crate) fn new(buffer: &'a TokenBuffer) -> SubtreeTokenSource<'a> {
let cursor = buffer.begin();
impl<'a> SubtreeTokenSource {
pub(crate) fn new(buffer: &TokenBuffer) -> SubtreeTokenSource {
let mut current = buffer.begin();
let mut cached = Vec::with_capacity(100);
let mut res = SubtreeTokenSource {
curr: (Token { kind: EOF, is_jointed_to_next: false }, 0),
cached_cursor: Cell::new(cursor),
cached: RefCell::new(Vec::with_capacity(10)),
};
res.curr = (res.mk_token(0), 0);
res
}
while !current.eof() {
let cursor = current;
let tt = cursor.token_tree();
fn mk_token(&self, pos: usize) -> Token {
match *self.get(pos) {
Some(ref tt) => Token { kind: tt.kind, is_jointed_to_next: tt.is_joint_to_next },
None => Token { kind: EOF, is_jointed_to_next: false },
}
}
fn get(&self, pos: usize) -> Ref<Option<TtToken>> {
fn is_lifetime(c: Cursor) -> Option<(Cursor, SmolStr)> {
let tkn = c.token_tree();
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = tkn {
// Check if it is lifetime
if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(punct), _)) = tt {
if punct.char == '\'' {
let next = c.bump();
if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = next.token_tree() {
let res_cursor = next.bump();
let text = SmolStr::new("'".to_string() + &ident.to_string());
return Some((res_cursor, text));
let next = cursor.bump();
if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Ident(ident), _)) =
next.token_tree()
{
let text = SmolStr::new("'".to_string() + &ident.text);
cached.push(TtToken {
tt: Token { kind: LIFETIME_IDENT, is_jointed_to_next: false },
text,
});
current = next.bump();
continue;
} else {
panic!("Next token must be ident : {:#?}", next.token_tree());
}
}
}
None
}
if pos < self.cached.borrow().len() {
return Ref::map(self.cached.borrow(), |c| &c[pos]);
}
{
let mut cached = self.cached.borrow_mut();
while pos >= cached.len() {
let cursor = self.cached_cursor.get();
if cursor.eof() {
cached.push(None);
continue;
current = match tt {
Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
cached.push(convert_leaf(&leaf));
cursor.bump()
}
if let Some((curr, text)) = is_lifetime(cursor) {
cached.push(Some(TtToken {
kind: LIFETIME_IDENT,
is_joint_to_next: false,
text,
}));
self.cached_cursor.set(curr);
continue;
Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
cached.push(convert_delim(subtree.delimiter_kind(), false));
cursor.subtree().unwrap()
}
match cursor.token_tree() {
Some(tt::TokenTree::Leaf(leaf)) => {
cached.push(Some(convert_leaf(&leaf)));
self.cached_cursor.set(cursor.bump());
}
Some(tt::TokenTree::Subtree(subtree)) => {
self.cached_cursor.set(cursor.subtree().unwrap());
cached.push(Some(convert_delim(subtree.delimiter_kind(), false)));
}
None => {
if let Some(subtree) = cursor.end() {
cached.push(Some(convert_delim(subtree.delimiter_kind(), true)));
self.cached_cursor.set(cursor.bump());
}
None => {
if let Some(subtree) = cursor.end() {
cached.push(convert_delim(subtree.delimiter_kind(), true));
cursor.bump()
} else {
continue;
}
}
}
};
}
Ref::map(self.cached.borrow(), |c| &c[pos])
let mut res = SubtreeTokenSource {
curr: (Token { kind: EOF, is_jointed_to_next: false }, 0),
cached,
};
res.curr = (res.token(0), 0);
res
}
fn token(&self, pos: usize) -> Token {
match self.cached.get(pos) {
Some(it) => it.tt,
None => Token { kind: EOF, is_jointed_to_next: false },
}
}
}
impl<'a> TokenSource for SubtreeTokenSource<'a> {
impl<'a> TokenSource for SubtreeTokenSource {
fn current(&self) -> Token {
self.curr.0
}
/// Lookahead n token
fn lookahead_nth(&self, n: usize) -> Token {
self.mk_token(self.curr.1 + n)
self.token(self.curr.1 + n)
}
/// bump cursor to next token
@ -131,13 +106,12 @@ impl<'a> TokenSource for SubtreeTokenSource<'a> {
if self.current().kind == EOF {
return;
}
self.curr = (self.mk_token(self.curr.1 + 1), self.curr.1 + 1);
self.curr = (self.token(self.curr.1 + 1), self.curr.1 + 1);
}
/// Is the current token a specified keyword?
fn is_keyword(&self, kw: &str) -> bool {
match *self.get(self.curr.1) {
match self.cached.get(self.curr.1) {
Some(ref t) => t.text == *kw,
_ => false,
}
@ -155,7 +129,7 @@ fn convert_delim(d: Option<tt::DelimiterKind>, closing: bool) -> TtToken {
let idx = closing as usize;
let kind = kinds[idx];
let text = if !texts.is_empty() { &texts[idx..texts.len() - (1 - idx)] } else { "" };
TtToken { kind, is_joint_to_next: false, text: SmolStr::new(text) }
TtToken { tt: Token { kind, is_jointed_to_next: false }, text: SmolStr::new(text) }
}
fn convert_literal(l: &tt::Literal) -> TtToken {
@ -169,7 +143,7 @@ fn convert_literal(l: &tt::Literal) -> TtToken {
})
.unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &l));
TtToken { kind, is_joint_to_next: false, text: l.text.clone() }
TtToken { tt: Token { kind, is_jointed_to_next: false }, text: l.text.clone() }
}
fn convert_ident(ident: &tt::Ident) -> TtToken {
@ -180,7 +154,7 @@ fn convert_ident(ident: &tt::Ident) -> TtToken {
_ => SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT),
};
TtToken { kind, is_joint_to_next: false, text: ident.text.clone() }
TtToken { tt: Token { kind, is_jointed_to_next: false }, text: ident.text.clone() }
}
fn convert_punct(p: tt::Punct) -> TtToken {
@ -194,7 +168,7 @@ fn convert_punct(p: tt::Punct) -> TtToken {
let s: &str = p.char.encode_utf8(&mut buf);
SmolStr::new(s)
};
TtToken { kind, is_joint_to_next: p.spacing == tt::Spacing::Joint, text }
TtToken { tt: Token { kind, is_jointed_to_next: p.spacing == tt::Spacing::Joint }, text }
}
fn convert_leaf(leaf: &tt::Leaf) -> TtToken {
@ -208,6 +182,7 @@ fn convert_leaf(leaf: &tt::Leaf) -> TtToken {
#[cfg(test)]
mod tests {
use super::{convert_literal, TtToken};
use parser::Token;
use syntax::{SmolStr, SyntaxKind};
#[test]
@ -218,8 +193,7 @@ mod tests {
text: SmolStr::new("-42.0")
}),
TtToken {
kind: SyntaxKind::FLOAT_NUMBER,
is_joint_to_next: false,
tt: Token { kind: SyntaxKind::FLOAT_NUMBER, is_jointed_to_next: false },
text: SmolStr::new("-42.0")
}
);

View File

@ -70,15 +70,12 @@ pub fn token_tree_to_syntax_node(
tt: &tt::Subtree,
fragment_kind: FragmentKind,
) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> {
let tmp;
let tokens = match tt {
tt::Subtree { delimiter: None, token_trees } => token_trees.as_slice(),
_ => {
tmp = [tt.clone().into()];
&tmp[..]
let buffer = match tt {
tt::Subtree { delimiter: None, token_trees } => {
TokenBuffer::from_tokens(token_trees.as_slice())
}
_ => TokenBuffer::from_subtree(tt),
};
let buffer = TokenBuffer::new(&tokens);
let mut token_source = SubtreeTokenSource::new(&buffer);
let mut tree_sink = TtTreeSink::new(buffer.begin());
parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind);
@ -414,7 +411,7 @@ trait TokenConvertor {
fn id_alloc(&mut self) -> &mut TokenIdAlloc;
}
impl<'a> SrcToken for (RawToken, &'a str) {
impl<'a> SrcToken for (&'a RawToken, &'a str) {
fn kind(&self) -> SyntaxKind {
self.0.kind
}
@ -431,7 +428,7 @@ impl<'a> SrcToken for (RawToken, &'a str) {
impl RawConvertor<'_> {}
impl<'a> TokenConvertor for RawConvertor<'a> {
type Token = (RawToken, &'a str);
type Token = (&'a RawToken, &'a str);
fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
convert_doc_comment(&doc_comment(token.1))
@ -442,11 +439,11 @@ impl<'a> TokenConvertor for RawConvertor<'a> {
let range = TextRange::at(self.offset, token.len);
self.offset += token.len;
Some(((*token, &self.text[range]), range))
Some(((token, &self.text[range]), range))
}
fn peek(&self) -> Option<Self::Token> {
let token = self.inner.as_slice().get(0).cloned();
let token = self.inner.as_slice().get(0);
token.map(|it| {
let range = TextRange::at(self.offset, it.len);
@ -601,17 +598,16 @@ impl<'a> TtTreeSink<'a> {
}
}
fn delim_to_str(d: Option<tt::DelimiterKind>, closing: bool) -> SmolStr {
fn delim_to_str(d: Option<tt::DelimiterKind>, closing: bool) -> &'static str {
let texts = match d {
Some(tt::DelimiterKind::Parenthesis) => "()",
Some(tt::DelimiterKind::Brace) => "{}",
Some(tt::DelimiterKind::Bracket) => "[]",
None => return "".into(),
None => return "",
};
let idx = closing as usize;
let text = &texts[idx..texts.len() - (1 - idx)];
text.into()
&texts[idx..texts.len() - (1 - idx)]
}
impl<'a> TreeSink for TtTreeSink<'a> {
@ -626,29 +622,32 @@ impl<'a> TreeSink for TtTreeSink<'a> {
let mut last = self.cursor;
for _ in 0..n_tokens {
let tmp_str: SmolStr;
if self.cursor.eof() {
break;
}
last = self.cursor;
let text: SmolStr = match self.cursor.token_tree() {
Some(tt::TokenTree::Leaf(leaf)) => {
let text: &str = match self.cursor.token_tree() {
Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
// Mark the range if needed
let (text, id) = match leaf {
tt::Leaf::Ident(ident) => (ident.text.clone(), ident.id),
tt::Leaf::Ident(ident) => (&ident.text, ident.id),
tt::Leaf::Punct(punct) => {
assert!(punct.char.is_ascii());
let char = &(punct.char as u8);
let text = std::str::from_utf8(std::slice::from_ref(char)).unwrap();
(SmolStr::new_inline(text), punct.id)
tmp_str = SmolStr::new_inline(
std::str::from_utf8(std::slice::from_ref(char)).unwrap(),
);
(&tmp_str, punct.id)
}
tt::Leaf::Literal(lit) => (lit.text.clone(), lit.id),
tt::Leaf::Literal(lit) => (&lit.text, lit.id),
};
let range = TextRange::at(self.text_pos, TextSize::of(text.as_str()));
self.token_map.insert(id, range);
self.cursor = self.cursor.bump();
text
}
Some(tt::TokenTree::Subtree(subtree)) => {
Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
self.cursor = self.cursor.subtree().unwrap();
if let Some(id) = subtree.delimiter.map(|it| it.id) {
self.open_delims.insert(id, self.text_pos);
@ -672,7 +671,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
}
};
self.buf += &text;
self.text_pos += TextSize::of(text.as_str());
self.text_pos += TextSize::of(text);
}
let text = SmolStr::new(self.buf.as_str());
@ -682,8 +681,8 @@ impl<'a> TreeSink for TtTreeSink<'a> {
// Add whitespace between adjoint puncts
let next = last.bump();
if let (
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(curr))),
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(_))),
Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(curr), _)),
Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(_), _)),
) = (last.token_tree(), next.token_tree())
{
// Note: We always assume the semi-colon would be the last token in
@ -742,7 +741,7 @@ mod tests {
)
.expand_tt("literals!(foo);");
let tts = &[expansion.into()];
let buffer = tt::buffer::TokenBuffer::new(tts);
let buffer = tt::buffer::TokenBuffer::from_tokens(tts);
let mut tt_src = SubtreeTokenSource::new(&buffer);
let mut tokens = vec![];
while tt_src.current().kind != EOF {

View File

@ -1,6 +1,6 @@
//! FIXME: write short doc here
use crate::{Subtree, TokenTree};
use crate::{Leaf, Subtree, TokenTree};
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
struct EntryId(usize);
@ -13,7 +13,7 @@ struct EntryPtr(EntryId, usize);
#[derive(Debug)]
enum Entry<'t> {
// Mimicking types from proc-macro.
Subtree(&'t TokenTree, EntryId),
Subtree(Option<&'t TokenTree>, &'t Subtree, EntryId),
Leaf(&'t TokenTree),
// End entries contain a pointer to the entry from the containing
// token tree, or None if this is the outermost level.
@ -27,37 +27,64 @@ pub struct TokenBuffer<'t> {
buffers: Vec<Box<[Entry<'t>]>>,
}
impl<'t> TokenBuffer<'t> {
pub fn new(tokens: &'t [TokenTree]) -> TokenBuffer<'t> {
let mut buffers = vec![];
trait TokenList<'a> {
fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>);
}
let idx = TokenBuffer::new_inner(tokens, &mut buffers, None);
assert_eq!(idx, 0);
TokenBuffer { buffers }
}
fn new_inner(
tokens: &'t [TokenTree],
buffers: &mut Vec<Box<[Entry<'t>]>>,
next: Option<EntryPtr>,
) -> usize {
impl<'a> TokenList<'a> for &'a [TokenTree] {
fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>) {
// Must contain everything in tokens and then the Entry::End
let start_capacity = tokens.len() + 1;
let start_capacity = self.len() + 1;
let mut entries = Vec::with_capacity(start_capacity);
let mut children = vec![];
for (idx, tt) in tokens.iter().enumerate() {
for (idx, tt) in self.iter().enumerate() {
match tt {
TokenTree::Leaf(_) => {
entries.push(Entry::Leaf(tt));
}
TokenTree::Subtree(subtree) => {
entries.push(Entry::End(None));
children.push((idx, (subtree, tt)));
children.push((idx, (subtree, Some(tt))));
}
}
}
(children, entries)
}
}
impl<'a> TokenList<'a> for &'a Subtree {
fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>) {
// Must contain everything in tokens and then the Entry::End
let mut entries = vec![];
let mut children = vec![];
entries.push(Entry::End(None));
children.push((0usize, (*self, None)));
(children, entries)
}
}
impl<'t> TokenBuffer<'t> {
pub fn from_tokens(tokens: &'t [TokenTree]) -> TokenBuffer<'t> {
Self::new(tokens)
}
pub fn from_subtree(subtree: &'t Subtree) -> TokenBuffer<'t> {
Self::new(subtree)
}
fn new<T: TokenList<'t>>(tokens: T) -> TokenBuffer<'t> {
let mut buffers = vec![];
let idx = TokenBuffer::new_inner(tokens, &mut buffers, None);
assert_eq!(idx, 0);
TokenBuffer { buffers }
}
fn new_inner<T: TokenList<'t>>(
tokens: T,
buffers: &mut Vec<Box<[Entry<'t>]>>,
next: Option<EntryPtr>,
) -> usize {
let (children, mut entries) = tokens.entries();
entries.push(Entry::End(next));
let res = buffers.len();
@ -65,11 +92,11 @@ impl<'t> TokenBuffer<'t> {
for (child_idx, (subtree, tt)) in children {
let idx = TokenBuffer::new_inner(
&subtree.token_trees,
subtree.token_trees.as_slice(),
buffers,
Some(EntryPtr(EntryId(res), child_idx + 1)),
);
buffers[res].as_mut()[child_idx] = Entry::Subtree(tt, EntryId(idx));
buffers[res].as_mut()[child_idx] = Entry::Subtree(tt, subtree, EntryId(idx));
}
res
@ -87,6 +114,24 @@ impl<'t> TokenBuffer<'t> {
}
}
#[derive(Debug)]
pub enum TokenTreeRef<'a> {
Subtree(&'a Subtree, Option<&'a TokenTree>),
Leaf(&'a Leaf, &'a TokenTree),
}
impl<'a> TokenTreeRef<'a> {
pub fn cloned(&self) -> TokenTree {
match &self {
TokenTreeRef::Subtree(subtree, tt) => match tt {
Some(it) => (*it).clone(),
None => (*subtree).clone().into(),
},
TokenTreeRef::Leaf(_, tt) => (*tt).clone(),
}
}
}
/// A safe version of `Cursor` from `syn` crate https://github.com/dtolnay/syn/blob/6533607f91686545cb034d2838beea338d9d0742/src/buffer.rs#L125
#[derive(Copy, Clone, Debug)]
pub struct Cursor<'a> {
@ -114,12 +159,11 @@ impl<'a> Cursor<'a> {
match self.entry() {
Some(Entry::End(Some(ptr))) => {
let idx = ptr.1;
if let Some(Entry::Subtree(TokenTree::Subtree(subtree), _)) =
if let Some(Entry::Subtree(_, subtree, _)) =
self.buffer.entry(&EntryPtr(ptr.0, idx - 1))
{
return Some(subtree);
}
None
}
_ => None,
@ -134,7 +178,7 @@ impl<'a> Cursor<'a> {
/// a cursor into that subtree
pub fn subtree(self) -> Option<Cursor<'a>> {
match self.entry() {
Some(Entry::Subtree(_, entry_id)) => {
Some(Entry::Subtree(_, _, entry_id)) => {
Some(Cursor::create(self.buffer, EntryPtr(*entry_id, 0)))
}
_ => None,
@ -142,10 +186,13 @@ impl<'a> Cursor<'a> {
}
/// If the cursor is pointing at a `TokenTree`, returns it
pub fn token_tree(self) -> Option<&'a TokenTree> {
pub fn token_tree(self) -> Option<TokenTreeRef<'a>> {
match self.entry() {
Some(Entry::Leaf(tt)) => Some(tt),
Some(Entry::Subtree(tt, _)) => Some(tt),
Some(Entry::Leaf(tt)) => match tt {
TokenTree::Leaf(leaf) => Some(TokenTreeRef::Leaf(leaf, *tt)),
TokenTree::Subtree(subtree) => Some(TokenTreeRef::Subtree(subtree, Some(tt))),
},
Some(Entry::Subtree(tt, subtree, _)) => Some(TokenTreeRef::Subtree(subtree, *tt)),
Some(Entry::End(_)) => None,
None => None,
}
@ -172,7 +219,7 @@ impl<'a> Cursor<'a> {
/// a cursor into that subtree
pub fn bump_subtree(self) -> Cursor<'a> {
match self.entry() {
Some(Entry::Subtree(_, _)) => self.subtree().unwrap(),
Some(Entry::Subtree(_, _, _)) => self.subtree().unwrap(),
_ => self.bump(),
}
}