Don't create an empty LazyTokenStream

This commit is contained in:
Aaron Hill 2020-10-22 10:09:08 -04:00
parent 37b25e8a49
commit 920bed1213
No known key found for this signature in database
GPG Key ID: B4087E510E98B164
5 changed files with 25 additions and 19 deletions

View File

@ -72,7 +72,7 @@ impl<'a> Parser<'a> {
} }
})?; })?;
if let Some(mut attr) = attr { if let Some(mut attr) = attr {
attr.tokens = Some(tokens); attr.tokens = tokens;
attrs.push(attr); attrs.push(attr);
} else { } else {
break; break;
@ -176,7 +176,7 @@ impl<'a> Parser<'a> {
}; };
if capture_tokens { if capture_tokens {
let (mut item, tokens) = self.collect_tokens(do_parse)?; let (mut item, tokens) = self.collect_tokens(do_parse)?;
item.tokens = Some(tokens); item.tokens = tokens;
item item
} else { } else {
do_parse(self)? do_parse(self)?
@ -213,7 +213,7 @@ impl<'a> Parser<'a> {
} }
})?; })?;
if let Some(mut attr) = attr { if let Some(mut attr) = attr {
attr.tokens = Some(tokens); attr.tokens = tokens;
attrs.push(attr); attrs.push(attr);
} else { } else {
break; break;

View File

@ -1116,7 +1116,7 @@ impl<'a> Parser<'a> {
) -> PResult<'a, P<Expr>> { ) -> PResult<'a, P<Expr>> {
if needs_tokens { if needs_tokens {
let (mut expr, tokens) = self.collect_tokens(f)?; let (mut expr, tokens) = self.collect_tokens(f)?;
expr.tokens = Some(tokens); expr.tokens = tokens;
Ok(expr) Ok(expr)
} else { } else {
f(self) f(self)

View File

@ -151,7 +151,7 @@ impl<'a> Parser<'a> {
if let Some(tokens) = tokens { if let Some(tokens) = tokens {
if let Some(item) = &mut item { if let Some(item) = &mut item {
if !item.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) { if !item.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) {
item.tokens = Some(tokens); item.tokens = tokens;
} }
} }
} }

View File

@ -1178,8 +1178,9 @@ impl<'a> Parser<'a> {
/// Records all tokens consumed by the provided callback, /// Records all tokens consumed by the provided callback,
/// including the current token. These tokens are collected /// including the current token. These tokens are collected
/// into a `TokenStream`, and returned along with the result /// into a `LazyTokenStream`, and returned along with the result
/// of the callback. /// of the callback. The returned `LazyTokenStream` will be `None`
/// if not tokens were captured.
/// ///
/// Note: If your callback consumes an opening delimiter /// Note: If your callback consumes an opening delimiter
/// (including the case where you call `collect_tokens` /// (including the case where you call `collect_tokens`
@ -1195,7 +1196,7 @@ impl<'a> Parser<'a> {
pub fn collect_tokens<R>( pub fn collect_tokens<R>(
&mut self, &mut self,
f: impl FnOnce(&mut Self) -> PResult<'a, R>, f: impl FnOnce(&mut Self) -> PResult<'a, R>,
) -> PResult<'a, (R, LazyTokenStream)> { ) -> PResult<'a, (R, Option<LazyTokenStream>)> {
let start_token = (self.token.clone(), self.token_spacing); let start_token = (self.token.clone(), self.token_spacing);
let mut cursor_snapshot = self.token_cursor.clone(); let mut cursor_snapshot = self.token_cursor.clone();
@ -1205,6 +1206,11 @@ impl<'a> Parser<'a> {
let num_calls = new_calls - cursor_snapshot.num_next_calls; let num_calls = new_calls - cursor_snapshot.num_next_calls;
let desugar_doc_comments = self.desugar_doc_comments; let desugar_doc_comments = self.desugar_doc_comments;
// We didn't capture any tokens
if num_calls == 0 {
return Ok((ret, None));
}
// Produces a `TokenStream` on-demand. Using `cursor_snapshot` // Produces a `TokenStream` on-demand. Using `cursor_snapshot`
// and `num_calls`, we can reconstruct the `TokenStream` seen // and `num_calls`, we can reconstruct the `TokenStream` seen
// by the callback. This allows us to avoid producing a `TokenStream` // by the callback. This allows us to avoid producing a `TokenStream`
@ -1233,7 +1239,7 @@ impl<'a> Parser<'a> {
}; };
let stream = LazyTokenStream::new(LazyTokenStreamInner::Lazy(Box::new(lazy_cb))); let stream = LazyTokenStream::new(LazyTokenStreamInner::Lazy(Box::new(lazy_cb)));
Ok((ret, stream)) Ok((ret, Some(stream)))
} }
/// `::{` or `::*` /// `::{` or `::*`

View File

@ -103,7 +103,7 @@ impl<'a> Parser<'a> {
// If we captured tokens during parsing (due to outer attributes), // If we captured tokens during parsing (due to outer attributes),
// use those. // use those.
if item.tokens.is_none() { if item.tokens.is_none() {
item.tokens = Some(tokens); item.tokens = tokens;
} }
token::NtItem(item) token::NtItem(item)
} }
@ -115,7 +115,7 @@ impl<'a> Parser<'a> {
let (mut block, tokens) = self.collect_tokens(|this| this.parse_block())?; let (mut block, tokens) = self.collect_tokens(|this| this.parse_block())?;
// We have have eaten an NtBlock, which could already have tokens // We have have eaten an NtBlock, which could already have tokens
if block.tokens.is_none() { if block.tokens.is_none() {
block.tokens = Some(tokens); block.tokens = tokens;
} }
token::NtBlock(block) token::NtBlock(block)
} }
@ -124,7 +124,7 @@ impl<'a> Parser<'a> {
match stmt { match stmt {
Some(mut s) => { Some(mut s) => {
if s.tokens.is_none() { if s.tokens.is_none() {
s.tokens = Some(tokens); s.tokens = tokens;
} }
token::NtStmt(s) token::NtStmt(s)
} }
@ -137,7 +137,7 @@ impl<'a> Parser<'a> {
let (mut pat, tokens) = self.collect_tokens(|this| this.parse_pat(None))?; let (mut pat, tokens) = self.collect_tokens(|this| this.parse_pat(None))?;
// We have have eaten an NtPat, which could already have tokens // We have have eaten an NtPat, which could already have tokens
if pat.tokens.is_none() { if pat.tokens.is_none() {
pat.tokens = Some(tokens); pat.tokens = tokens;
} }
token::NtPat(pat) token::NtPat(pat)
} }
@ -146,7 +146,7 @@ impl<'a> Parser<'a> {
// If we captured tokens during parsing (due to outer attributes), // If we captured tokens during parsing (due to outer attributes),
// use those. // use those.
if expr.tokens.is_none() { if expr.tokens.is_none() {
expr.tokens = Some(tokens); expr.tokens = tokens;
} }
token::NtExpr(expr) token::NtExpr(expr)
} }
@ -155,7 +155,7 @@ impl<'a> Parser<'a> {
self.collect_tokens(|this| this.parse_literal_maybe_minus())?; self.collect_tokens(|this| this.parse_literal_maybe_minus())?;
// We have have eaten a nonterminal, which could already have tokens // We have have eaten a nonterminal, which could already have tokens
if lit.tokens.is_none() { if lit.tokens.is_none() {
lit.tokens = Some(tokens); lit.tokens = tokens;
} }
token::NtLiteral(lit) token::NtLiteral(lit)
} }
@ -163,7 +163,7 @@ impl<'a> Parser<'a> {
let (mut ty, tokens) = self.collect_tokens(|this| this.parse_ty())?; let (mut ty, tokens) = self.collect_tokens(|this| this.parse_ty())?;
// We have an eaten an NtTy, which could already have tokens // We have an eaten an NtTy, which could already have tokens
if ty.tokens.is_none() { if ty.tokens.is_none() {
ty.tokens = Some(tokens); ty.tokens = tokens;
} }
token::NtTy(ty) token::NtTy(ty)
} }
@ -183,7 +183,7 @@ impl<'a> Parser<'a> {
self.collect_tokens(|this| this.parse_path(PathStyle::Type))?; self.collect_tokens(|this| this.parse_path(PathStyle::Type))?;
// We have have eaten an NtPath, which could already have tokens // We have have eaten an NtPath, which could already have tokens
if path.tokens.is_none() { if path.tokens.is_none() {
path.tokens = Some(tokens); path.tokens = tokens;
} }
token::NtPath(path) token::NtPath(path)
} }
@ -191,7 +191,7 @@ impl<'a> Parser<'a> {
let (mut attr, tokens) = self.collect_tokens(|this| this.parse_attr_item(false))?; let (mut attr, tokens) = self.collect_tokens(|this| this.parse_attr_item(false))?;
// We may have eaten a nonterminal, which could already have tokens // We may have eaten a nonterminal, which could already have tokens
if attr.tokens.is_none() { if attr.tokens.is_none() {
attr.tokens = Some(tokens); attr.tokens = tokens;
} }
token::NtMeta(P(attr)) token::NtMeta(P(attr))
} }
@ -201,7 +201,7 @@ impl<'a> Parser<'a> {
self.collect_tokens(|this| this.parse_visibility(FollowedByType::Yes))?; self.collect_tokens(|this| this.parse_visibility(FollowedByType::Yes))?;
// We may have etan an `NtVis`, which could already have tokens // We may have etan an `NtVis`, which could already have tokens
if vis.tokens.is_none() { if vis.tokens.is_none() {
vis.tokens = Some(tokens); vis.tokens = tokens;
} }
token::NtVis(vis) token::NtVis(vis)
} }