mirror of
https://github.com/rust-lang/rust.git
synced 2025-04-28 02:57:37 +00:00
Auto merge of #98463 - mystor:expand_expr_bool, r=eddyb
proc_macro: Fix expand_expr expansion of bool literals Previously, the expand_expr method would expand bool literals as a `Literal` token containing a `LitKind::Bool`, rather than as an `Ident`. This is not a valid token, and the `LitKind::Bool` case needs to be handled seperately. Tests were added to more deeply compare the streams in the expand-expr test suite to catch mistakes like this in the future.
This commit is contained in:
commit
29554c0a12
@ -426,6 +426,10 @@ impl server::TokenStream for Rustc<'_, '_> {
|
||||
// We don't use `TokenStream::from_ast` as the tokenstream currently cannot
|
||||
// be recovered in the general case.
|
||||
match &expr.kind {
|
||||
ast::ExprKind::Lit(l) if l.token.kind == token::Bool => {
|
||||
Ok(tokenstream::TokenTree::token(token::Ident(l.token.symbol, false), l.span)
|
||||
.into())
|
||||
}
|
||||
ast::ExprKind::Lit(l) => {
|
||||
Ok(tokenstream::TokenTree::token(token::Literal(l.token), l.span).into())
|
||||
}
|
||||
|
@ -10,6 +10,72 @@ extern crate proc_macro;
|
||||
use proc_macro::*;
|
||||
use std::str::FromStr;
|
||||
|
||||
// Flatten the TokenStream, removing any toplevel `Delimiter::None`s for
|
||||
// comparison.
|
||||
fn flatten(ts: TokenStream) -> Vec<TokenTree> {
|
||||
ts.into_iter()
|
||||
.flat_map(|tt| match &tt {
|
||||
TokenTree::Group(group) if group.delimiter() == Delimiter::None => {
|
||||
flatten(group.stream())
|
||||
}
|
||||
_ => vec![tt],
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
// Assert that two TokenStream values are roughly equal to one-another.
|
||||
fn assert_ts_eq(lhs: &TokenStream, rhs: &TokenStream) {
|
||||
let ltts = flatten(lhs.clone());
|
||||
let rtts = flatten(rhs.clone());
|
||||
|
||||
if ltts.len() != rtts.len() {
|
||||
panic!(
|
||||
"expected the same number of tts ({} == {})\nlhs:\n{:#?}\nrhs:\n{:#?}",
|
||||
ltts.len(),
|
||||
rtts.len(),
|
||||
lhs,
|
||||
rhs
|
||||
)
|
||||
}
|
||||
|
||||
for (ltt, rtt) in ltts.iter().zip(&rtts) {
|
||||
match (ltt, rtt) {
|
||||
(TokenTree::Group(l), TokenTree::Group(r)) => {
|
||||
assert_eq!(
|
||||
l.delimiter(),
|
||||
r.delimiter(),
|
||||
"expected delimiters to match for {:?} and {:?}",
|
||||
l,
|
||||
r
|
||||
);
|
||||
assert_ts_eq(&l.stream(), &r.stream());
|
||||
}
|
||||
(TokenTree::Punct(l), TokenTree::Punct(r)) => assert_eq!(
|
||||
(l.as_char(), l.spacing()),
|
||||
(r.as_char(), r.spacing()),
|
||||
"expected punct to match for {:?} and {:?}",
|
||||
l,
|
||||
r
|
||||
),
|
||||
(TokenTree::Ident(l), TokenTree::Ident(r)) => assert_eq!(
|
||||
l.to_string(),
|
||||
r.to_string(),
|
||||
"expected ident to match for {:?} and {:?}",
|
||||
l,
|
||||
r
|
||||
),
|
||||
(TokenTree::Literal(l), TokenTree::Literal(r)) => assert_eq!(
|
||||
l.to_string(),
|
||||
r.to_string(),
|
||||
"expected literal to match for {:?} and {:?}",
|
||||
l,
|
||||
r
|
||||
),
|
||||
(l, r) => panic!("expected type to match for {:?} and {:?}", l, r),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[proc_macro]
|
||||
pub fn expand_expr_is(input: TokenStream) -> TokenStream {
|
||||
let mut iter = input.into_iter();
|
||||
@ -31,6 +97,9 @@ pub fn expand_expr_is(input: TokenStream) -> TokenStream {
|
||||
expanded.to_string()
|
||||
);
|
||||
|
||||
// Also compare the raw tts to make sure they line up.
|
||||
assert_ts_eq(&expected, &expanded);
|
||||
|
||||
TokenStream::new()
|
||||
}
|
||||
|
||||
@ -48,7 +117,7 @@ pub fn check_expand_expr_file(ts: TokenStream) -> TokenStream {
|
||||
// invocation expand to the same literal.
|
||||
let input_t = ts.expand_expr().expect("expand_expr failed on macro input").to_string();
|
||||
let parse_t = TokenStream::from_str("file!{}")
|
||||
.unwrap()
|
||||
.unwrap()
|
||||
.expand_expr()
|
||||
.expect("expand_expr failed on internal macro")
|
||||
.to_string();
|
||||
|
Loading…
Reference in New Issue
Block a user