Auto merge of #87264 - mystor:expand_literal, r=petrochenkov

proc_macro: Add an expand_expr method to TokenStream

This feature is aimed at giving proc macros access to powers similar to those used by builtin macros such as `format_args!` or `concat!`. These macros are able to accept macros in place of string literal parameters, such as the format string, as they perform recursive macro expansion while being expanded.

This can be especially useful in many cases thanks to helper macros like `concat!`, `stringify!` and `include_str!` which are often used to construct string literals at compile-time in user code.

For now, this method only allows expanding macros which produce literals, although more expressions will be supported before the method is stabilized.

In earlier versions of this PR, this method exclusively returned `Literal`, and spans on returned literals were stripped of expansion context before being returned to be as conservative as possible about permission leakage. The method's naming has been generalized to eventually support arbitrary expressions, and the context stripping has been removed (https://github.com/rust-lang/rust/pull/87264#discussion_r674863279), which should allow for more general APIs like "format_args_implicits" (https://github.com/rust-lang/rust/issues/67984) to be supported as well.

## API Surface

```rust
impl TokenStream {
    pub fn expand_expr(&self) -> Result<TokenStream, ExpandError>;
}

#[non_exhaustive]
pub struct ExpandError;

impl Debug for ExpandError { ... }
impl Display for ExpandError { ... }
impl Error for ExpandError {}
impl !Send for ExpandError {}
impl !Sync for ExpandError {}
```
This commit is contained in:
bors 2021-11-13 08:22:52 +00:00
commit 3e018ce194
8 changed files with 421 additions and 75 deletions

View File

@ -24,8 +24,9 @@ impl base::ProcMacro for BangProcMacro {
span: Span,
input: TokenStream,
) -> Result<TokenStream, ErrorReported> {
let proc_macro_backtrace = ecx.ecfg.proc_macro_backtrace;
let server = proc_macro_server::Rustc::new(ecx);
self.client.run(&EXEC_STRATEGY, server, input, ecx.ecfg.proc_macro_backtrace).map_err(|e| {
self.client.run(&EXEC_STRATEGY, server, input, proc_macro_backtrace).map_err(|e| {
let mut err = ecx.struct_span_err(span, "proc macro panicked");
if let Some(s) = e.as_str() {
err.help(&format!("message: {}", s));
@ -48,9 +49,10 @@ impl base::AttrProcMacro for AttrProcMacro {
annotation: TokenStream,
annotated: TokenStream,
) -> Result<TokenStream, ErrorReported> {
let proc_macro_backtrace = ecx.ecfg.proc_macro_backtrace;
let server = proc_macro_server::Rustc::new(ecx);
self.client
.run(&EXEC_STRATEGY, server, annotation, annotated, ecx.ecfg.proc_macro_backtrace)
.run(&EXEC_STRATEGY, server, annotation, annotated, proc_macro_backtrace)
.map_err(|e| {
let mut err = ecx.struct_span_err(span, "custom attribute panicked");
if let Some(s) = e.as_str() {
@ -97,19 +99,19 @@ impl MultiItemModifier for ProcMacroDerive {
nt_to_tokenstream(&item, &ecx.sess.parse_sess, CanSynthesizeMissingTokens::No)
};
let proc_macro_backtrace = ecx.ecfg.proc_macro_backtrace;
let server = proc_macro_server::Rustc::new(ecx);
let stream =
match self.client.run(&EXEC_STRATEGY, server, input, ecx.ecfg.proc_macro_backtrace) {
Ok(stream) => stream,
Err(e) => {
let mut err = ecx.struct_span_err(span, "proc-macro derive panicked");
if let Some(s) = e.as_str() {
err.help(&format!("message: {}", s));
}
err.emit();
return ExpandResult::Ready(vec![]);
let stream = match self.client.run(&EXEC_STRATEGY, server, input, proc_macro_backtrace) {
Ok(stream) => stream,
Err(e) => {
let mut err = ecx.struct_span_err(span, "proc-macro derive panicked");
if let Some(s) = e.as_str() {
err.help(&format!("message: {}", s));
}
};
err.emit();
return ExpandResult::Ready(vec![]);
}
};
let error_count_before = ecx.sess.parse_sess.span_diagnostic.err_count();
let mut parser =

View File

@ -1,4 +1,4 @@
use crate::base::{ExtCtxt, ResolverExpand};
use crate::base::ExtCtxt;
use rustc_ast as ast;
use rustc_ast::token::{self, Nonterminal, NtIdent};
@ -7,7 +7,7 @@ use rustc_ast::tokenstream::{DelimSpan, Spacing::*, TokenStream, TreeAndSpacing}
use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
use rustc_errors::Diagnostic;
use rustc_errors::{Diagnostic, PResult};
use rustc_lint_defs::builtin::PROC_MACRO_BACK_COMPAT;
use rustc_lint_defs::BuiltinLintDiagnostics;
use rustc_parse::lexer::nfc_normalize;
@ -53,11 +53,11 @@ impl ToInternal<token::DelimToken> for Delimiter {
}
}
impl FromInternal<(TreeAndSpacing, &'_ mut Vec<Self>, &mut Rustc<'_>)>
impl FromInternal<(TreeAndSpacing, &'_ mut Vec<Self>, &mut Rustc<'_, '_>)>
for TokenTree<Group, Punct, Ident, Literal>
{
fn from_internal(
((tree, spacing), stack, rustc): (TreeAndSpacing, &mut Vec<Self>, &mut Rustc<'_>),
((tree, spacing), stack, rustc): (TreeAndSpacing, &mut Vec<Self>, &mut Rustc<'_, '_>),
) -> Self {
use rustc_ast::token::*;
@ -146,10 +146,10 @@ impl FromInternal<(TreeAndSpacing, &'_ mut Vec<Self>, &mut Rustc<'_>)>
SingleQuote => op!('\''),
Ident(name, false) if name == kw::DollarCrate => tt!(Ident::dollar_crate()),
Ident(name, is_raw) => tt!(Ident::new(rustc.sess, name, is_raw)),
Ident(name, is_raw) => tt!(Ident::new(rustc.sess(), name, is_raw)),
Lifetime(name) => {
let ident = symbol::Ident::new(name, span).without_first_quote();
stack.push(tt!(Ident::new(rustc.sess, ident.name, false)));
stack.push(tt!(Ident::new(rustc.sess(), ident.name, false)));
tt!(Punct::new('\'', true))
}
Literal(lit) => tt!(Literal { lit }),
@ -181,15 +181,15 @@ impl FromInternal<(TreeAndSpacing, &'_ mut Vec<Self>, &mut Rustc<'_>)>
Interpolated(nt)
if let Some((name, is_raw)) = ident_name_compatibility_hack(&nt, span, rustc) =>
{
TokenTree::Ident(Ident::new(rustc.sess, name.name, is_raw, name.span))
TokenTree::Ident(Ident::new(rustc.sess(), name.name, is_raw, name.span))
}
Interpolated(nt) => {
let stream = nt_to_tokenstream(&nt, rustc.sess, CanSynthesizeMissingTokens::No);
let stream = nt_to_tokenstream(&nt, rustc.sess(), CanSynthesizeMissingTokens::No);
TokenTree::Group(Group {
delimiter: Delimiter::None,
stream,
span: DelimSpan::from_single(span),
flatten: crate::base::pretty_printing_compatibility_hack(&nt, rustc.sess),
flatten: crate::base::pretty_printing_compatibility_hack(&nt, rustc.sess()),
})
}
@ -355,38 +355,38 @@ pub struct Literal {
span: Span,
}
pub(crate) struct Rustc<'a> {
resolver: &'a dyn ResolverExpand,
sess: &'a ParseSess,
pub(crate) struct Rustc<'a, 'b> {
ecx: &'a mut ExtCtxt<'b>,
def_site: Span,
call_site: Span,
mixed_site: Span,
span_debug: bool,
krate: CrateNum,
rebased_spans: FxHashMap<usize, Span>,
}
impl<'a> Rustc<'a> {
pub fn new(cx: &'a ExtCtxt<'_>) -> Self {
let expn_data = cx.current_expansion.id.expn_data();
impl<'a, 'b> Rustc<'a, 'b> {
pub fn new(ecx: &'a mut ExtCtxt<'b>) -> Self {
let expn_data = ecx.current_expansion.id.expn_data();
Rustc {
resolver: cx.resolver,
sess: cx.parse_sess(),
def_site: cx.with_def_site_ctxt(expn_data.def_site),
call_site: cx.with_call_site_ctxt(expn_data.call_site),
mixed_site: cx.with_mixed_site_ctxt(expn_data.call_site),
span_debug: cx.ecfg.span_debug,
def_site: ecx.with_def_site_ctxt(expn_data.def_site),
call_site: ecx.with_call_site_ctxt(expn_data.call_site),
mixed_site: ecx.with_mixed_site_ctxt(expn_data.call_site),
krate: expn_data.macro_def_id.unwrap().krate,
rebased_spans: FxHashMap::default(),
ecx,
}
}
fn sess(&self) -> &ParseSess {
self.ecx.parse_sess()
}
fn lit(&mut self, kind: token::LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Literal {
Literal { lit: token::Lit::new(kind, symbol, suffix), span: server::Span::call_site(self) }
}
}
impl server::Types for Rustc<'_> {
impl server::Types for Rustc<'_, '_> {
type FreeFunctions = FreeFunctions;
type TokenStream = TokenStream;
type TokenStreamBuilder = tokenstream::TokenStreamBuilder;
@ -401,17 +401,20 @@ impl server::Types for Rustc<'_> {
type Span = Span;
}
impl server::FreeFunctions for Rustc<'_> {
impl server::FreeFunctions for Rustc<'_, '_> {
fn track_env_var(&mut self, var: &str, value: Option<&str>) {
self.sess.env_depinfo.borrow_mut().insert((Symbol::intern(var), value.map(Symbol::intern)));
self.sess()
.env_depinfo
.borrow_mut()
.insert((Symbol::intern(var), value.map(Symbol::intern)));
}
fn track_path(&mut self, path: &str) {
self.sess.file_depinfo.borrow_mut().insert(Symbol::intern(path));
self.sess().file_depinfo.borrow_mut().insert(Symbol::intern(path));
}
}
impl server::TokenStream for Rustc<'_> {
impl server::TokenStream for Rustc<'_, '_> {
fn new(&mut self) -> Self::TokenStream {
TokenStream::default()
}
@ -422,13 +425,62 @@ impl server::TokenStream for Rustc<'_> {
parse_stream_from_source_str(
FileName::proc_macro_source_code(src),
src.to_string(),
self.sess,
self.sess(),
Some(self.call_site),
)
}
fn to_string(&mut self, stream: &Self::TokenStream) -> String {
pprust::tts_to_string(stream)
}
fn expand_expr(&mut self, stream: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
// Parse the expression from our tokenstream.
let expr: PResult<'_, _> = try {
let mut p = rustc_parse::stream_to_parser(
self.sess(),
stream.clone(),
Some("proc_macro expand expr"),
);
let expr = p.parse_expr()?;
if p.token != token::Eof {
p.unexpected()?;
}
expr
};
let expr = expr.map_err(|mut err| err.emit())?;
// Perform eager expansion on the expression.
let expr = self
.ecx
.expander()
.fully_expand_fragment(crate::expand::AstFragment::Expr(expr))
.make_expr();
// NOTE: For now, limit `expand_expr` to exclusively expand to literals.
// This may be relaxed in the future.
// We don't use `nt_to_tokenstream` as the tokenstream currently cannot
// be recovered in the general case.
match &expr.kind {
ast::ExprKind::Lit(l) => {
Ok(tokenstream::TokenTree::token(token::Literal(l.token), l.span).into())
}
ast::ExprKind::Unary(ast::UnOp::Neg, e) => match &e.kind {
ast::ExprKind::Lit(l) => match l.token {
token::Lit { kind: token::Integer | token::Float, .. } => {
Ok(std::array::IntoIter::new([
// FIXME: The span of the `-` token is lost when
// parsing, so we cannot faithfully recover it here.
tokenstream::TokenTree::token(token::BinOp(token::Minus), e.span),
tokenstream::TokenTree::token(token::Literal(l.token), l.span),
])
.collect())
}
_ => Err(()),
},
_ => Err(()),
},
_ => Err(()),
}
}
fn from_token_tree(
&mut self,
tree: TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>,
@ -440,7 +492,7 @@ impl server::TokenStream for Rustc<'_> {
}
}
impl server::TokenStreamBuilder for Rustc<'_> {
impl server::TokenStreamBuilder for Rustc<'_, '_> {
fn new(&mut self) -> Self::TokenStreamBuilder {
tokenstream::TokenStreamBuilder::new()
}
@ -452,7 +504,7 @@ impl server::TokenStreamBuilder for Rustc<'_> {
}
}
impl server::TokenStreamIter for Rustc<'_> {
impl server::TokenStreamIter for Rustc<'_, '_> {
fn next(
&mut self,
iter: &mut Self::TokenStreamIter,
@ -477,7 +529,7 @@ impl server::TokenStreamIter for Rustc<'_> {
}
}
impl server::Group for Rustc<'_> {
impl server::Group for Rustc<'_, '_> {
fn new(&mut self, delimiter: Delimiter, stream: Self::TokenStream) -> Self::Group {
Group {
delimiter,
@ -506,7 +558,7 @@ impl server::Group for Rustc<'_> {
}
}
impl server::Punct for Rustc<'_> {
impl server::Punct for Rustc<'_, '_> {
fn new(&mut self, ch: char, spacing: Spacing) -> Self::Punct {
Punct::new(ch, spacing == Spacing::Joint, server::Span::call_site(self))
}
@ -524,9 +576,9 @@ impl server::Punct for Rustc<'_> {
}
}
impl server::Ident for Rustc<'_> {
impl server::Ident for Rustc<'_, '_> {
fn new(&mut self, string: &str, span: Self::Span, is_raw: bool) -> Self::Ident {
Ident::new(self.sess, Symbol::intern(string), is_raw, span)
Ident::new(self.sess(), Symbol::intern(string), is_raw, span)
}
fn span(&mut self, ident: Self::Ident) -> Self::Span {
ident.span
@ -536,10 +588,10 @@ impl server::Ident for Rustc<'_> {
}
}
impl server::Literal for Rustc<'_> {
impl server::Literal for Rustc<'_, '_> {
fn from_str(&mut self, s: &str) -> Result<Self::Literal, ()> {
let name = FileName::proc_macro_source_code(s);
let mut parser = rustc_parse::new_parser_from_source_str(self.sess, name, s.to_owned());
let mut parser = rustc_parse::new_parser_from_source_str(self.sess(), name, s.to_owned());
let first_span = parser.token.span.data();
let minus_present = parser.eat(&token::BinOp(token::Minus));
@ -675,7 +727,7 @@ impl server::Literal for Rustc<'_> {
}
}
impl server::SourceFile for Rustc<'_> {
impl server::SourceFile for Rustc<'_, '_> {
fn eq(&mut self, file1: &Self::SourceFile, file2: &Self::SourceFile) -> bool {
Lrc::ptr_eq(file1, file2)
}
@ -695,7 +747,7 @@ impl server::SourceFile for Rustc<'_> {
}
}
impl server::MultiSpan for Rustc<'_> {
impl server::MultiSpan for Rustc<'_, '_> {
fn new(&mut self) -> Self::MultiSpan {
vec![]
}
@ -704,7 +756,7 @@ impl server::MultiSpan for Rustc<'_> {
}
}
impl server::Diagnostic for Rustc<'_> {
impl server::Diagnostic for Rustc<'_, '_> {
fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
let mut diag = Diagnostic::new(level.to_internal(), msg);
diag.set_span(MultiSpan::from_spans(spans));
@ -720,13 +772,13 @@ impl server::Diagnostic for Rustc<'_> {
diag.sub(level.to_internal(), msg, MultiSpan::from_spans(spans), None);
}
fn emit(&mut self, diag: Self::Diagnostic) {
self.sess.span_diagnostic.emit_diagnostic(&diag);
self.sess().span_diagnostic.emit_diagnostic(&diag);
}
}
impl server::Span for Rustc<'_> {
impl server::Span for Rustc<'_, '_> {
fn debug(&mut self, span: Self::Span) -> String {
if self.span_debug {
if self.ecx.ecfg.span_debug {
format!("{:?}", span)
} else {
format!("{:?} bytes({}..{})", span.ctxt(), span.lo().0, span.hi().0)
@ -742,7 +794,7 @@ impl server::Span for Rustc<'_> {
self.mixed_site
}
fn source_file(&mut self, span: Self::Span) -> Self::SourceFile {
self.sess.source_map().lookup_char_pos(span.lo()).file
self.sess().source_map().lookup_char_pos(span.lo()).file
}
fn parent(&mut self, span: Self::Span) -> Option<Self::Span> {
span.parent_callsite()
@ -751,11 +803,11 @@ impl server::Span for Rustc<'_> {
span.source_callsite()
}
fn start(&mut self, span: Self::Span) -> LineColumn {
let loc = self.sess.source_map().lookup_char_pos(span.lo());
let loc = self.sess().source_map().lookup_char_pos(span.lo());
LineColumn { line: loc.line, column: loc.col.to_usize() }
}
fn end(&mut self, span: Self::Span) -> LineColumn {
let loc = self.sess.source_map().lookup_char_pos(span.hi());
let loc = self.sess().source_map().lookup_char_pos(span.hi());
LineColumn { line: loc.line, column: loc.col.to_usize() }
}
fn before(&mut self, span: Self::Span) -> Self::Span {
@ -765,8 +817,8 @@ impl server::Span for Rustc<'_> {
span.shrink_to_hi()
}
fn join(&mut self, first: Self::Span, second: Self::Span) -> Option<Self::Span> {
let self_loc = self.sess.source_map().lookup_char_pos(first.lo());
let other_loc = self.sess.source_map().lookup_char_pos(second.lo());
let self_loc = self.sess().source_map().lookup_char_pos(first.lo());
let other_loc = self.sess().source_map().lookup_char_pos(second.lo());
if self_loc.file.name != other_loc.file.name {
return None;
@ -778,7 +830,7 @@ impl server::Span for Rustc<'_> {
span.with_ctxt(at.ctxt())
}
fn source_text(&mut self, span: Self::Span) -> Option<String> {
self.sess.source_map().span_to_snippet(span).ok()
self.sess().source_map().span_to_snippet(span).ok()
}
/// Saves the provided span into the metadata of
/// *the crate we are currently compiling*, which must
@ -805,10 +857,10 @@ impl server::Span for Rustc<'_> {
/// since we've loaded `my_proc_macro` from disk in order to execute it).
/// In this way, we have obtained a span pointing into `my_proc_macro`
fn save_span(&mut self, span: Self::Span) -> usize {
self.sess.save_proc_macro_span(span)
self.sess().save_proc_macro_span(span)
}
fn recover_proc_macro_span(&mut self, id: usize) -> Self::Span {
let (resolver, krate, def_site) = (self.resolver, self.krate, self.def_site);
let (resolver, krate, def_site) = (&*self.ecx.resolver, self.krate, self.def_site);
*self.rebased_spans.entry(id).or_insert_with(|| {
// FIXME: `SyntaxContext` for spans from proc macro crates is lost during encoding,
// replace it with a def-site context until we are encoding it properly.
@ -821,11 +873,11 @@ impl server::Span for Rustc<'_> {
fn ident_name_compatibility_hack(
nt: &Nonterminal,
orig_span: Span,
rustc: &mut Rustc<'_>,
rustc: &mut Rustc<'_, '_>,
) -> Option<(rustc_span::symbol::Ident, bool)> {
if let NtIdent(ident, is_raw) = nt {
if let ExpnKind::Macro(_, macro_name) = orig_span.ctxt().outer_expn_data().kind {
let source_map = rustc.sess.source_map();
let source_map = rustc.sess().source_map();
let filename = source_map.span_to_filename(orig_span);
if let FileName::Real(RealFileName::LocalPath(path)) = filename {
let matches_prefix = |prefix, filename| {
@ -846,7 +898,7 @@ fn ident_name_compatibility_hack(
let snippet = source_map.span_to_snippet(orig_span);
if snippet.as_deref() == Ok("$name") {
if time_macros_impl {
rustc.sess.buffer_lint_with_diagnostic(
rustc.sess().buffer_lint_with_diagnostic(
&PROC_MACRO_BACK_COMPAT,
orig_span,
ast::CRATE_NODE_ID,
@ -871,7 +923,7 @@ fn ident_name_compatibility_hack(
.and_then(|c| c.parse::<u32>().ok())
.map_or(false, |v| v < 40)
{
rustc.sess.buffer_lint_with_diagnostic(
rustc.sess().buffer_lint_with_diagnostic(
&PROC_MACRO_BACK_COMPAT,
orig_span,
ast::CRATE_NODE_ID,
@ -894,7 +946,7 @@ fn ident_name_compatibility_hack(
source_map.span_to_filename(rustc.def_site)
{
if macro_path.to_string_lossy().contains("pin-project-internal-0.") {
rustc.sess.buffer_lint_with_diagnostic(
rustc.sess().buffer_lint_with_diagnostic(
&PROC_MACRO_BACK_COMPAT,
orig_span,
ast::CRATE_NODE_ID,

View File

@ -62,6 +62,7 @@ macro_rules! with_api {
fn clone($self: &$S::TokenStream) -> $S::TokenStream;
fn new() -> $S::TokenStream;
fn is_empty($self: &$S::TokenStream) -> bool;
fn expand_expr($self: &$S::TokenStream) -> Result<$S::TokenStream, ()>;
fn from_str(src: &str) -> $S::TokenStream;
fn to_string($self: &$S::TokenStream) -> String;
fn from_token_tree(

View File

@ -88,12 +88,6 @@ impl !Sync for TokenStream {}
#[derive(Debug)]
pub struct LexError;
impl LexError {
fn new() -> Self {
LexError
}
}
#[stable(feature = "proc_macro_lexerror_impls", since = "1.44.0")]
impl fmt::Display for LexError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
@ -109,6 +103,28 @@ impl !Send for LexError {}
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
impl !Sync for LexError {}
/// Error returned from `TokenStream::expand_expr`.
#[unstable(feature = "proc_macro_expand", issue = "90765")]
#[non_exhaustive]
#[derive(Debug)]
pub struct ExpandError;
#[unstable(feature = "proc_macro_expand", issue = "90765")]
impl fmt::Display for ExpandError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("macro expansion failed")
}
}
#[unstable(feature = "proc_macro_expand", issue = "90765")]
impl error::Error for ExpandError {}
#[unstable(feature = "proc_macro_expand", issue = "90765")]
impl !Send for ExpandError {}
#[unstable(feature = "proc_macro_expand", issue = "90765")]
impl !Sync for ExpandError {}
impl TokenStream {
/// Returns an empty `TokenStream` containing no token trees.
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
@ -121,6 +137,24 @@ impl TokenStream {
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
/// Parses this `TokenStream` as an expression and attempts to expand any
/// macros within it. Returns the expanded `TokenStream`.
///
/// Currently only expressions expanding to literals will succeed, although
/// this may be relaxed in the future.
///
/// NOTE: In error conditions, `expand_expr` may leave macros unexpanded,
/// report an error, failing compilation, and/or return an `Err(..)`. The
/// specific behavior for any error condition, and what conditions are
/// considered errors, is unspecified and may change in the future.
#[unstable(feature = "proc_macro_expand", issue = "90765")]
pub fn expand_expr(&self) -> Result<TokenStream, ExpandError> {
match bridge::client::TokenStream::expand_expr(&self.0) {
Ok(stream) => Ok(TokenStream(stream)),
Err(_) => Err(ExpandError),
}
}
}
/// Attempts to break the string into tokens and parse those tokens into a token stream.
@ -1211,7 +1245,7 @@ impl FromStr for Literal {
fn from_str(src: &str) -> Result<Self, LexError> {
match bridge::client::Literal::from_str(src) {
Ok(literal) => Ok(Literal(literal)),
Err(()) => Err(LexError::new()),
Err(()) => Err(LexError),
}
}
}

View File

@ -0,0 +1,80 @@
// force-host
// no-prefer-dynamic
#![crate_type = "proc-macro"]
#![deny(warnings)]
#![feature(proc_macro_expand, proc_macro_span)]
extern crate proc_macro;
use proc_macro::*;
use std::str::FromStr;
#[proc_macro]
pub fn expand_expr_is(input: TokenStream) -> TokenStream {
let mut iter = input.into_iter();
let mut expected_tts = Vec::new();
loop {
match iter.next() {
Some(TokenTree::Punct(ref p)) if p.as_char() == ',' => break,
Some(tt) => expected_tts.push(tt),
None => panic!("expected comma"),
}
}
let expected = expected_tts.into_iter().collect::<TokenStream>();
let expanded = iter.collect::<TokenStream>().expand_expr().expect("expand_expr failed");
assert!(
expected.to_string() == expanded.to_string(),
"assert failed\nexpected: `{}`\nexpanded: `{}`",
expected.to_string(),
expanded.to_string()
);
TokenStream::new()
}
#[proc_macro]
pub fn expand_expr_fail(input: TokenStream) -> TokenStream {
match input.expand_expr() {
Ok(ts) => panic!("expand_expr unexpectedly succeeded: `{}`", ts),
Err(_) => TokenStream::new(),
}
}
#[proc_macro]
pub fn check_expand_expr_file(ts: TokenStream) -> TokenStream {
// Check that the passed in `file!()` invocation and a parsed `file!`
// invocation expand to the same literal.
let input_t = ts.expand_expr().expect("expand_expr failed on macro input").to_string();
let parse_t = TokenStream::from_str("file!{}")
.unwrap()
.expand_expr()
.expect("expand_expr failed on internal macro")
.to_string();
assert_eq!(input_t, parse_t);
// Check that the literal matches `Span::call_site().source_file().path()`
let expect_t =
Literal::string(&Span::call_site().source_file().path().to_string_lossy()).to_string();
assert_eq!(input_t, expect_t);
TokenStream::new()
}
#[proc_macro]
pub fn recursive_expand(_: TokenStream) -> TokenStream {
// Recursively call until we hit the recursion limit and get an error.
//
// NOTE: This doesn't panic if expansion fails because that'll cause a very
// large number of errors to fill the output.
TokenStream::from_str("recursive_expand!{}")
.unwrap()
.expand_expr()
.unwrap_or(std::iter::once(TokenTree::Literal(Literal::u32_suffixed(0))).collect())
}
#[proc_macro]
pub fn echo_pm(input: TokenStream) -> TokenStream {
input
}

View File

@ -0,0 +1 @@
Included file contents

View File

@ -0,0 +1,121 @@
// aux-build:expand-expr.rs
extern crate expand_expr;
use expand_expr::{
check_expand_expr_file, echo_pm, expand_expr_fail, expand_expr_is, recursive_expand,
};
// Check builtin macros can be expanded.
expand_expr_is!(11u32, line!());
expand_expr_is!(24u32, column!());
expand_expr_is!("Hello, World!", concat!("Hello, ", "World", "!"));
expand_expr_is!("int10floats5.3booltrue", concat!("int", 10, "floats", 5.3, "bool", true));
expand_expr_is!("Hello", concat!(r##"Hello"##));
expand_expr_is!("Included file contents\n", include_str!("auxiliary/included-file.txt"));
expand_expr_is!(b"Included file contents\n", include_bytes!("auxiliary/included-file.txt"));
expand_expr_is!(
"contents: Included file contents\n",
concat!("contents: ", include_str!("auxiliary/included-file.txt"))
);
// Correct value is checked for multiple sources.
check_expand_expr_file!(file!());
expand_expr_is!("hello", stringify!(hello));
expand_expr_is!("10 + 20", stringify!(10 + 20));
macro_rules! echo_tts {
($($t:tt)*) => { $($t)* }; //~ ERROR: expected expression, found `$`
}
macro_rules! echo_lit {
($l:literal) => {
$l
};
}
macro_rules! echo_expr {
($e:expr) => {
$e
};
}
macro_rules! simple_lit {
($l:literal) => {
expand_expr_is!($l, $l);
expand_expr_is!($l, echo_lit!($l));
expand_expr_is!($l, echo_expr!($l));
expand_expr_is!($l, echo_tts!($l));
expand_expr_is!($l, echo_pm!($l));
const _: () = {
macro_rules! mac {
() => {
$l
};
}
expand_expr_is!($l, mac!());
expand_expr_is!($l, echo_expr!(mac!()));
expand_expr_is!($l, echo_tts!(mac!()));
expand_expr_is!($l, echo_pm!(mac!()));
};
};
}
simple_lit!("Hello, World");
simple_lit!('c');
simple_lit!(b'c');
simple_lit!(10);
simple_lit!(10.0);
simple_lit!(10.0f64);
simple_lit!(-3.14159);
simple_lit!(-3.5e10);
simple_lit!(0xFEED);
simple_lit!(-0xFEED);
simple_lit!(0b0100);
simple_lit!(-0b0100);
simple_lit!("string");
simple_lit!(r##"raw string"##);
simple_lit!(b"byte string");
simple_lit!(br##"raw byte string"##);
simple_lit!(true);
simple_lit!(false);
// Ensure char escapes aren't normalized by expansion
simple_lit!("\u{0}");
simple_lit!("\0");
simple_lit!("\x00");
simple_lit!('\u{0}');
simple_lit!('\0');
simple_lit!('\x00');
simple_lit!(b"\x00");
simple_lit!(b"\0");
simple_lit!(b'\x00');
simple_lit!(b'\0');
// Extra tokens after the string literal aren't ignored
expand_expr_fail!("string"; hello); //~ ERROR: expected one of `.`, `?`, or an operator, found `;`
// Invalid expressions produce errors in addition to returning `Err(())`.
expand_expr_fail!($); //~ ERROR: expected expression, found `$`
expand_expr_fail!(echo_tts!($));
expand_expr_fail!(echo_pm!($)); //~ ERROR: expected expression, found `$`
// We get errors reported and recover during macro expansion if the macro
// doesn't produce a valid expression.
expand_expr_is!("string", echo_tts!("string"; hello)); //~ ERROR: macro expansion ignores token `hello` and any following
expand_expr_is!("string", echo_pm!("string"; hello)); //~ ERROR: macro expansion ignores token `;` and any following
// For now, fail if a non-literal expression is expanded.
expand_expr_fail!(arbitrary_expression() + "etc");
expand_expr_fail!(echo_tts!(arbitrary_expression() + "etc"));
expand_expr_fail!(echo_expr!(arbitrary_expression() + "etc"));
expand_expr_fail!(echo_pm!(arbitrary_expression() + "etc"));
const _: u32 = recursive_expand!(); //~ ERROR: recursion limit reached while expanding `recursive_expand!`
fn main() {}

View File

@ -0,0 +1,55 @@
error: expected one of `.`, `?`, or an operator, found `;`
--> $DIR/expand-expr.rs:101:27
|
LL | expand_expr_fail!("string"; hello);
| ^ expected one of `.`, `?`, or an operator
error: expected expression, found `$`
--> $DIR/expand-expr.rs:104:19
|
LL | expand_expr_fail!($);
| ^ expected expression
error: expected expression, found `$`
--> $DIR/expand-expr.rs:33:23
|
LL | ($($t:tt)*) => { $($t)* };
| ^^^^ expected expression
error: expected expression, found `$`
--> $DIR/expand-expr.rs:106:28
|
LL | expand_expr_fail!(echo_pm!($));
| ^ expected expression
error: macro expansion ignores token `hello` and any following
--> $DIR/expand-expr.rs:110:47
|
LL | expand_expr_is!("string", echo_tts!("string"; hello));
| --------------------^^^^^-- help: you might be missing a semicolon here: `;`
| |
| caused by the macro expansion here
|
= note: the usage of `echo_tts!` is likely invalid in expression context
error: macro expansion ignores token `;` and any following
--> $DIR/expand-expr.rs:111:44
|
LL | expand_expr_is!("string", echo_pm!("string"; hello));
| -----------------^-------- help: you might be missing a semicolon here: `;`
| |
| caused by the macro expansion here
|
= note: the usage of `echo_pm!` is likely invalid in expression context
error: recursion limit reached while expanding `recursive_expand!`
--> $DIR/expand-expr.rs:119:16
|
LL | const _: u32 = recursive_expand!();
| ^^^^^^^^^^^^^^^^^^^
|
= help: consider increasing the recursion limit by adding a `#![recursion_limit = "256"]` attribute to your crate (`expand_expr`)
= note: this error originates in the macro `recursive_expand` (in Nightly builds, run with -Z macro-backtrace for more info)
error: aborting due to 7 previous errors