mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-25 16:24:46 +00:00
Always preserve None
-delimited groups in a captured TokenStream
Previously, we would silently remove any `None`-delimiters when capturing a `TokenStream`, 'flattenting' them to their inner tokens. This was not normally visible, since we usually have `TokenKind::Interpolated` (which gets converted to a `None`-delimited group during macro invocation) instead of an actual `None`-delimited group. However, there are a couple of cases where this becomes visible to proc-macros: 1. A cross-crate `macro_rules!` macro has a `None`-delimited group stored in its body (as a result of being produced by another `macro_rules!` macro). The cross-crate `macro_rules!` invocation can then expand to an attribute macro invocation, which needs to be able to see the `None`-delimited group. 2. A proc-macro can invoke an attribute proc-macro with its re-collected input. If there are any nonterminals present in the input, they will get re-collected to `None`-delimited groups, which will then get captured as part of the attribute macro invocation. Both of these cases are incredibly obscure, so there hopefully won't be any breakage. This change will allow more agressive 'flattenting' of nonterminals in #82608 without losing `None`-delimited groups.
This commit is contained in:
parent
f811f14006
commit
f94360fd83
@ -98,21 +98,46 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
impl CreateTokenStream for LazyTokenStreamImpl {
|
||||
fn create_token_stream(&self) -> TokenStream {
|
||||
// The token produced by the final call to `next` or `next_desugared`
|
||||
// was not actually consumed by the callback. The combination
|
||||
// of chaining the initial token and using `take` produces the desired
|
||||
// result - we produce an empty `TokenStream` if no calls were made,
|
||||
// and omit the final token otherwise.
|
||||
if self.num_calls == 0 {
|
||||
return TokenStream::new(vec![]);
|
||||
}
|
||||
|
||||
let mut cursor_snapshot = self.cursor_snapshot.clone();
|
||||
let tokens = std::iter::once(self.start_token.clone())
|
||||
.chain((0..self.num_calls).map(|_| {
|
||||
if self.desugar_doc_comments {
|
||||
// Don't skip `None` delimiters, since we want to pass them to
|
||||
// proc macros. Normally, we'll end up capturing `TokenKind::Interpolated`,
|
||||
// which gets converted to a `None`-delimited group when we invoke
|
||||
// a proc-macro. However, it's possible to already have a `None`-delimited
|
||||
// group in the stream (such as when parsing the output of a proc-macro,
|
||||
// or in certain unusual cases with cross-crate `macro_rules!` macros).
|
||||
cursor_snapshot.skip_none_delims = false;
|
||||
|
||||
// The token produced by the final call to `next` or `next_desugared`
|
||||
// was not actually consumed by the callback.
|
||||
let num_calls = self.num_calls - 1;
|
||||
let mut i = 0;
|
||||
let tokens =
|
||||
std::iter::once(self.start_token.clone()).chain(std::iter::from_fn(|| {
|
||||
if i >= num_calls {
|
||||
return None;
|
||||
}
|
||||
|
||||
let token = if self.desugar_doc_comments {
|
||||
cursor_snapshot.next_desugared()
|
||||
} else {
|
||||
cursor_snapshot.next()
|
||||
};
|
||||
|
||||
// When the `LazyTokenStreamImpl` was original produced, we did *not*
|
||||
// include `NoDelim` tokens in `num_calls`, since they are normally ignored
|
||||
// by the parser. Therefore, we only increment our counter for other types of tokens.
|
||||
if !matches!(
|
||||
token.0.kind,
|
||||
token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim)
|
||||
) {
|
||||
i += 1;
|
||||
}
|
||||
}))
|
||||
.take(self.num_calls);
|
||||
Some(token)
|
||||
}));
|
||||
|
||||
make_token_stream(tokens, self.append_unglued_token.clone())
|
||||
}
|
||||
|
@ -172,6 +172,13 @@ struct TokenCursor {
|
||||
// appended to the captured stream when
|
||||
// we evaluate a `LazyTokenStream`
|
||||
append_unglued_token: Option<TreeAndSpacing>,
|
||||
// If `true`, skip the delimiters for `None`-delimited groups,
|
||||
// and just yield the inner tokens. This is `true` during
|
||||
// normal parsing, since the parser code is not currently prepared
|
||||
// to handle `None` delimiters. When capturing a `TokenStream`,
|
||||
// however, we want to handle `None`-delimiters, since
|
||||
// proc-macros always see `None`-delimited groups.
|
||||
skip_none_delims: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -184,13 +191,13 @@ struct TokenCursorFrame {
|
||||
}
|
||||
|
||||
impl TokenCursorFrame {
|
||||
fn new(span: DelimSpan, delim: DelimToken, tts: TokenStream) -> Self {
|
||||
fn new(span: DelimSpan, delim: DelimToken, tts: TokenStream, skip_none_delims: bool) -> Self {
|
||||
TokenCursorFrame {
|
||||
delim,
|
||||
span,
|
||||
open_delim: delim == token::NoDelim,
|
||||
open_delim: delim == token::NoDelim && skip_none_delims,
|
||||
tree_cursor: tts.into_trees(),
|
||||
close_delim: delim == token::NoDelim,
|
||||
close_delim: delim == token::NoDelim && skip_none_delims,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -218,7 +225,7 @@ impl TokenCursor {
|
||||
return (token, spacing);
|
||||
}
|
||||
TokenTree::Delimited(sp, delim, tts) => {
|
||||
let frame = TokenCursorFrame::new(sp, delim, tts);
|
||||
let frame = TokenCursorFrame::new(sp, delim, tts, self.skip_none_delims);
|
||||
self.stack.push(mem::replace(&mut self.frame, frame));
|
||||
}
|
||||
}
|
||||
@ -276,6 +283,7 @@ impl TokenCursor {
|
||||
.cloned()
|
||||
.collect::<TokenStream>()
|
||||
},
|
||||
self.skip_none_delims,
|
||||
),
|
||||
));
|
||||
|
||||
@ -371,12 +379,19 @@ impl<'a> Parser<'a> {
|
||||
prev_token: Token::dummy(),
|
||||
restrictions: Restrictions::empty(),
|
||||
expected_tokens: Vec::new(),
|
||||
// Skip over the delimiters for `None`-delimited groups
|
||||
token_cursor: TokenCursor {
|
||||
frame: TokenCursorFrame::new(DelimSpan::dummy(), token::NoDelim, tokens),
|
||||
frame: TokenCursorFrame::new(
|
||||
DelimSpan::dummy(),
|
||||
token::NoDelim,
|
||||
tokens,
|
||||
/* skip_none_delims */ true,
|
||||
),
|
||||
stack: Vec::new(),
|
||||
num_next_calls: 0,
|
||||
desugar_doc_comments,
|
||||
append_unglued_token: None,
|
||||
skip_none_delims: true,
|
||||
},
|
||||
desugar_doc_comments,
|
||||
unmatched_angle_bracket_count: 0,
|
||||
|
@ -2,14 +2,15 @@ pub struct FirstStruct;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! outer_macro {
|
||||
($name:ident) => {
|
||||
($name:ident, $attr_struct_name:ident) => {
|
||||
#[macro_export]
|
||||
macro_rules! inner_macro {
|
||||
($wrapper:ident) => {
|
||||
$wrapper!($name)
|
||||
($bang_macro:ident, $attr_macro:ident) => {
|
||||
$bang_macro!($name);
|
||||
#[$attr_macro] struct $attr_struct_name {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
outer_macro!(FirstStruct);
|
||||
outer_macro!(FirstStruct, FirstAttrStruct);
|
||||
|
@ -1,7 +1,7 @@
|
||||
// run-pass
|
||||
// aux-build:nested-macro-rules.rs
|
||||
// aux-build:test-macros.rs
|
||||
// compile-flags: -Z span-debug
|
||||
// compile-flags: -Z span-debug -Z macro-backtrace
|
||||
// edition:2018
|
||||
|
||||
#![no_std] // Don't load unnecessary hygiene information from std
|
||||
@ -10,14 +10,14 @@ extern crate std;
|
||||
extern crate nested_macro_rules;
|
||||
extern crate test_macros;
|
||||
|
||||
use test_macros::print_bang;
|
||||
use test_macros::{print_bang, print_attr};
|
||||
|
||||
use nested_macro_rules::FirstStruct;
|
||||
struct SecondStruct;
|
||||
|
||||
fn main() {
|
||||
nested_macro_rules::inner_macro!(print_bang);
|
||||
nested_macro_rules::inner_macro!(print_bang, print_attr);
|
||||
|
||||
nested_macro_rules::outer_macro!(SecondStruct);
|
||||
inner_macro!(print_bang);
|
||||
nested_macro_rules::outer_macro!(SecondStruct, SecondAttrStruct);
|
||||
inner_macro!(print_bang, print_attr);
|
||||
}
|
||||
|
@ -5,10 +5,32 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "FirstStruct",
|
||||
span: $DIR/auxiliary/nested-macro-rules.rs:15:14: 15:25 (#7),
|
||||
span: $DIR/auxiliary/nested-macro-rules.rs:16:14: 16:25 (#7),
|
||||
},
|
||||
],
|
||||
span: $DIR/auxiliary/nested-macro-rules.rs:9:27: 9:32 (#6),
|
||||
span: $DIR/auxiliary/nested-macro-rules.rs:9:30: 9:35 (#6),
|
||||
},
|
||||
]
|
||||
PRINT-ATTR INPUT (DISPLAY): struct FirstAttrStruct { }
|
||||
PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Ident {
|
||||
ident: "struct",
|
||||
span: $DIR/auxiliary/nested-macro-rules.rs:10:32: 10:38 (#6),
|
||||
},
|
||||
Group {
|
||||
delimiter: None,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "FirstAttrStruct",
|
||||
span: $DIR/auxiliary/nested-macro-rules.rs:16:27: 16:42 (#7),
|
||||
},
|
||||
],
|
||||
span: $DIR/auxiliary/nested-macro-rules.rs:10:39: 10:56 (#6),
|
||||
},
|
||||
Group {
|
||||
delimiter: Brace,
|
||||
stream: TokenStream [],
|
||||
span: $DIR/auxiliary/nested-macro-rules.rs:10:57: 10:59 (#6),
|
||||
},
|
||||
]
|
||||
PRINT-BANG INPUT (DISPLAY): SecondStruct
|
||||
@ -18,9 +40,31 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "SecondStruct",
|
||||
span: $DIR/nested-macro-rules.rs:21:38: 21:50 (#13),
|
||||
span: $DIR/nested-macro-rules.rs:21:38: 21:50 (#16),
|
||||
},
|
||||
],
|
||||
span: $DIR/auxiliary/nested-macro-rules.rs:9:27: 9:32 (#12),
|
||||
span: $DIR/auxiliary/nested-macro-rules.rs:9:30: 9:35 (#15),
|
||||
},
|
||||
]
|
||||
PRINT-ATTR INPUT (DISPLAY): struct SecondAttrStruct { }
|
||||
PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||
Ident {
|
||||
ident: "struct",
|
||||
span: $DIR/auxiliary/nested-macro-rules.rs:10:32: 10:38 (#15),
|
||||
},
|
||||
Group {
|
||||
delimiter: None,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "SecondAttrStruct",
|
||||
span: $DIR/nested-macro-rules.rs:21:52: 21:68 (#16),
|
||||
},
|
||||
],
|
||||
span: $DIR/auxiliary/nested-macro-rules.rs:10:39: 10:56 (#15),
|
||||
},
|
||||
Group {
|
||||
delimiter: Brace,
|
||||
stream: TokenStream [],
|
||||
span: $DIR/auxiliary/nested-macro-rules.rs:10:57: 10:59 (#15),
|
||||
},
|
||||
]
|
||||
|
Loading…
Reference in New Issue
Block a user