mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-02 15:32:06 +00:00
Rollup merge of #64041 - matklad:token-stream-tt, r=petrochenkov
use TokenStream rather than &[TokenTree] for built-in macros That way, we don't loose the jointness info
This commit is contained in:
commit
fd46f6ed41
@ -57,12 +57,12 @@ extern crate rustc;
|
||||
extern crate rustc_driver;
|
||||
|
||||
use syntax::parse::token::{self, Token};
|
||||
use syntax::tokenstream::TokenTree;
|
||||
use syntax::tokenstream::{TokenTree, TokenStream};
|
||||
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
|
||||
use syntax_pos::Span;
|
||||
use rustc_driver::plugin::Registry;
|
||||
|
||||
fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
|
||||
fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: TokenStream)
|
||||
-> Box<dyn MacResult + 'static> {
|
||||
|
||||
static NUMERALS: &'static [(&'static str, usize)] = &[
|
||||
@ -78,7 +78,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
|
||||
return DummyResult::any(sp);
|
||||
}
|
||||
|
||||
let text = match args[0] {
|
||||
let text = match args.into_trees().next().unwrap() {
|
||||
TokenTree::Token(Token { kind: token::Ident(s, _), .. }) => s.to_string(),
|
||||
_ => {
|
||||
cx.span_err(sp, "argument should be a single identifier");
|
||||
|
@ -1354,7 +1354,7 @@ impl EncodeContext<'tcx> {
|
||||
let def_id = self.tcx.hir().local_def_id(macro_def.hir_id);
|
||||
Entry {
|
||||
kind: EntryKind::MacroDef(self.lazy(MacroDef {
|
||||
body: pprust::tokens_to_string(macro_def.body.clone()),
|
||||
body: pprust::tts_to_string(macro_def.body.clone()),
|
||||
legacy: macro_def.legacy,
|
||||
})),
|
||||
visibility: self.lazy(ty::Visibility::Public),
|
||||
|
@ -6,7 +6,7 @@ use crate::ext::base::{ExtCtxt, MacEager, MacResult};
|
||||
use crate::parse::token::{self, Token};
|
||||
use crate::ptr::P;
|
||||
use crate::symbol::kw;
|
||||
use crate::tokenstream::{TokenTree};
|
||||
use crate::tokenstream::{TokenTree, TokenStream};
|
||||
|
||||
use smallvec::smallvec;
|
||||
use syntax_pos::Span;
|
||||
@ -27,12 +27,11 @@ pub type ErrorMap = BTreeMap<Name, ErrorInfo>;
|
||||
|
||||
pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
||||
span: Span,
|
||||
token_tree: &[TokenTree])
|
||||
tts: TokenStream)
|
||||
-> Box<dyn MacResult+'cx> {
|
||||
let code = match token_tree {
|
||||
[
|
||||
TokenTree::Token(Token { kind: token::Ident(code, _), .. })
|
||||
] => code,
|
||||
assert_eq!(tts.len(), 1);
|
||||
let code = match tts.into_trees().next() {
|
||||
Some(TokenTree::Token(Token { kind: token::Ident(code, _), .. })) => code,
|
||||
_ => unreachable!()
|
||||
};
|
||||
|
||||
@ -62,20 +61,21 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
||||
|
||||
pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
||||
span: Span,
|
||||
token_tree: &[TokenTree])
|
||||
tts: TokenStream)
|
||||
-> Box<dyn MacResult+'cx> {
|
||||
let (code, description) = match token_tree {
|
||||
[
|
||||
TokenTree::Token(Token { kind: token::Ident(code, _), .. })
|
||||
] => {
|
||||
(*code, None)
|
||||
},
|
||||
[
|
||||
TokenTree::Token(Token { kind: token::Ident(code, _), .. }),
|
||||
TokenTree::Token(Token { kind: token::Comma, .. }),
|
||||
TokenTree::Token(Token { kind: token::Literal(token::Lit { symbol, .. }), ..})
|
||||
] => {
|
||||
(*code, Some(*symbol))
|
||||
assert!(tts.len() == 1 || tts.len() == 3);
|
||||
let mut cursor = tts.into_trees();
|
||||
let code = match cursor.next() {
|
||||
Some(TokenTree::Token(Token { kind: token::Ident(code, _), .. })) => code,
|
||||
_ => unreachable!()
|
||||
};
|
||||
let description = match (cursor.next(), cursor.next()) {
|
||||
(None, None) => None,
|
||||
(
|
||||
Some(TokenTree::Token(Token { kind: token::Comma, .. })),
|
||||
Some(TokenTree::Token(Token { kind: token::Literal(token::Lit { symbol, .. }), ..}))
|
||||
) => {
|
||||
Some(symbol)
|
||||
},
|
||||
_ => unreachable!()
|
||||
};
|
||||
@ -121,12 +121,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
||||
|
||||
pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
||||
span: Span,
|
||||
token_tree: &[TokenTree])
|
||||
tts: TokenStream)
|
||||
-> Box<dyn MacResult+'cx> {
|
||||
assert_eq!(token_tree.len(), 3);
|
||||
let ident = match &token_tree[2] {
|
||||
assert_eq!(tts.len(), 3);
|
||||
let ident = match tts.into_trees().nth(2) {
|
||||
// DIAGNOSTICS ident.
|
||||
&TokenTree::Token(Token { kind: token::Ident(name, _), span })
|
||||
Some(TokenTree::Token(Token { kind: token::Ident(name, _), span }))
|
||||
=> Ident::new(name, span),
|
||||
_ => unreachable!()
|
||||
};
|
||||
|
@ -10,7 +10,7 @@ use crate::parse::token;
|
||||
use crate::ptr::P;
|
||||
use crate::symbol::{kw, sym, Ident, Symbol};
|
||||
use crate::{ThinVec, MACRO_ARGUMENTS};
|
||||
use crate::tokenstream::{self, TokenStream, TokenTree};
|
||||
use crate::tokenstream::{self, TokenStream};
|
||||
use crate::visit::Visitor;
|
||||
|
||||
use errors::{DiagnosticBuilder, DiagnosticId};
|
||||
@ -235,18 +235,18 @@ pub trait TTMacroExpander {
|
||||
}
|
||||
|
||||
pub type MacroExpanderFn =
|
||||
for<'cx> fn(&'cx mut ExtCtxt<'_>, Span, &[tokenstream::TokenTree])
|
||||
for<'cx> fn(&'cx mut ExtCtxt<'_>, Span, TokenStream)
|
||||
-> Box<dyn MacResult+'cx>;
|
||||
|
||||
impl<F> TTMacroExpander for F
|
||||
where F: for<'cx> Fn(&'cx mut ExtCtxt<'_>, Span, &[tokenstream::TokenTree])
|
||||
where F: for<'cx> Fn(&'cx mut ExtCtxt<'_>, Span, TokenStream)
|
||||
-> Box<dyn MacResult+'cx>
|
||||
{
|
||||
fn expand<'cx>(
|
||||
&self,
|
||||
ecx: &'cx mut ExtCtxt<'_>,
|
||||
span: Span,
|
||||
input: TokenStream,
|
||||
mut input: TokenStream,
|
||||
) -> Box<dyn MacResult+'cx> {
|
||||
struct AvoidInterpolatedIdents;
|
||||
|
||||
@ -268,10 +268,8 @@ impl<F> TTMacroExpander for F
|
||||
mut_visit::noop_visit_mac(mac, self)
|
||||
}
|
||||
}
|
||||
|
||||
let input: Vec<_> =
|
||||
input.trees().map(|mut tt| { AvoidInterpolatedIdents.visit_tt(&mut tt); tt }).collect();
|
||||
(*self)(ecx, span, &input)
|
||||
AvoidInterpolatedIdents.visit_tts(&mut input);
|
||||
(*self)(ecx, span, input)
|
||||
}
|
||||
}
|
||||
|
||||
@ -677,7 +675,7 @@ impl SyntaxExtension {
|
||||
}
|
||||
|
||||
pub fn dummy_bang(edition: Edition) -> SyntaxExtension {
|
||||
fn expander<'cx>(_: &'cx mut ExtCtxt<'_>, span: Span, _: &[TokenTree])
|
||||
fn expander<'cx>(_: &'cx mut ExtCtxt<'_>, span: Span, _: TokenStream)
|
||||
-> Box<dyn MacResult + 'cx> {
|
||||
DummyResult::any(span)
|
||||
}
|
||||
@ -811,9 +809,8 @@ impl<'a> ExtCtxt<'a> {
|
||||
pub fn monotonic_expander<'b>(&'b mut self) -> expand::MacroExpander<'b, 'a> {
|
||||
expand::MacroExpander::new(self, true)
|
||||
}
|
||||
|
||||
pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree]) -> parser::Parser<'a> {
|
||||
parse::stream_to_parser(self.parse_sess, tts.iter().cloned().collect(), MACRO_ARGUMENTS)
|
||||
pub fn new_parser_from_tts(&self, stream: TokenStream) -> parser::Parser<'a> {
|
||||
parse::stream_to_parser(self.parse_sess, stream, MACRO_ARGUMENTS)
|
||||
}
|
||||
pub fn source_map(&self) -> &'a SourceMap { self.parse_sess.source_map() }
|
||||
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }
|
||||
@ -1019,7 +1016,7 @@ pub fn expr_to_string(cx: &mut ExtCtxt<'_>, expr: P<ast::Expr>, err_msg: &str)
|
||||
/// done as rarely as possible).
|
||||
pub fn check_zero_tts(cx: &ExtCtxt<'_>,
|
||||
sp: Span,
|
||||
tts: &[tokenstream::TokenTree],
|
||||
tts: TokenStream,
|
||||
name: &str) {
|
||||
if !tts.is_empty() {
|
||||
cx.span_err(sp, &format!("{} takes no arguments", name));
|
||||
@ -1030,7 +1027,7 @@ pub fn check_zero_tts(cx: &ExtCtxt<'_>,
|
||||
/// expect exactly one string literal, or emit an error and return `None`.
|
||||
pub fn get_single_str_from_tts(cx: &mut ExtCtxt<'_>,
|
||||
sp: Span,
|
||||
tts: &[tokenstream::TokenTree],
|
||||
tts: TokenStream,
|
||||
name: &str)
|
||||
-> Option<String> {
|
||||
let mut p = cx.new_parser_from_tts(tts);
|
||||
@ -1053,7 +1050,7 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt<'_>,
|
||||
/// parsing error, emit a non-fatal error and return `None`.
|
||||
pub fn get_exprs_from_tts(cx: &mut ExtCtxt<'_>,
|
||||
sp: Span,
|
||||
tts: &[tokenstream::TokenTree]) -> Option<Vec<P<ast::Expr>>> {
|
||||
tts: TokenStream) -> Option<Vec<P<ast::Expr>>> {
|
||||
let mut p = cx.new_parser_from_tts(tts);
|
||||
let mut es = Vec::new();
|
||||
while p.token != token::Eof {
|
||||
|
@ -701,7 +701,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||
path: &Path,
|
||||
span: Span,
|
||||
) -> AstFragment {
|
||||
let mut parser = self.cx.new_parser_from_tts(&toks.into_trees().collect::<Vec<_>>());
|
||||
let mut parser = self.cx.new_parser_from_tts(toks);
|
||||
match parser.parse_ast_fragment(kind, false) {
|
||||
Ok(fragment) => {
|
||||
parser.ensure_complete_parse(path, kind.name(), span);
|
||||
|
@ -356,11 +356,7 @@ pub fn tt_to_string(tt: tokenstream::TokenTree) -> String {
|
||||
to_string(|s| s.print_tt(tt, false))
|
||||
}
|
||||
|
||||
pub fn tts_to_string(tts: &[tokenstream::TokenTree]) -> String {
|
||||
tokens_to_string(tts.iter().cloned().collect())
|
||||
}
|
||||
|
||||
pub fn tokens_to_string(tokens: TokenStream) -> String {
|
||||
pub fn tts_to_string(tokens: TokenStream) -> String {
|
||||
to_string(|s| s.print_tts(tokens, false))
|
||||
}
|
||||
|
||||
|
@ -506,7 +506,7 @@ impl Cursor {
|
||||
|
||||
impl fmt::Display for TokenStream {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&pprust::tokens_to_string(self.clone()))
|
||||
f.write_str(&pprust::tts_to_string(self.clone()))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -8,13 +8,12 @@ use errors::DiagnosticBuilder;
|
||||
|
||||
use syntax::ast;
|
||||
use syntax::ext::base::{self, *};
|
||||
use syntax::parse;
|
||||
use syntax::parse::token::{self, Token};
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::{kw, sym, Symbol};
|
||||
use syntax::ast::AsmDialect;
|
||||
use syntax_pos::Span;
|
||||
use syntax::tokenstream;
|
||||
use syntax::tokenstream::{self, TokenStream};
|
||||
use syntax::{span_err, struct_span_err};
|
||||
|
||||
enum State {
|
||||
@ -43,7 +42,7 @@ const OPTIONS: &[Symbol] = &[sym::volatile, sym::alignstack, sym::intel];
|
||||
|
||||
pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||
sp: Span,
|
||||
tts: &[tokenstream::TokenTree])
|
||||
tts: TokenStream)
|
||||
-> Box<dyn base::MacResult + 'cx> {
|
||||
let mut inline_asm = match parse_inline_asm(cx, sp, tts) {
|
||||
Ok(Some(inline_asm)) => inline_asm,
|
||||
@ -71,20 +70,20 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||
fn parse_inline_asm<'a>(
|
||||
cx: &mut ExtCtxt<'a>,
|
||||
sp: Span,
|
||||
tts: &[tokenstream::TokenTree],
|
||||
tts: TokenStream,
|
||||
) -> Result<Option<ast::InlineAsm>, DiagnosticBuilder<'a>> {
|
||||
// Split the tts before the first colon, to avoid `asm!("x": y)` being
|
||||
// parsed as `asm!(z)` with `z = "x": y` which is type ascription.
|
||||
let first_colon = tts.iter()
|
||||
let first_colon = tts.trees()
|
||||
.position(|tt| {
|
||||
match *tt {
|
||||
match tt {
|
||||
tokenstream::TokenTree::Token(Token { kind: token::Colon, .. }) |
|
||||
tokenstream::TokenTree::Token(Token { kind: token::ModSep, .. }) => true,
|
||||
_ => false,
|
||||
}
|
||||
})
|
||||
.unwrap_or(tts.len());
|
||||
let mut p = cx.new_parser_from_tts(&tts[first_colon..]);
|
||||
let mut p = cx.new_parser_from_tts(tts.trees().skip(first_colon).collect());
|
||||
let mut asm = kw::Invalid;
|
||||
let mut asm_str_style = None;
|
||||
let mut outputs = Vec::new();
|
||||
@ -110,7 +109,8 @@ fn parse_inline_asm<'a>(
|
||||
));
|
||||
}
|
||||
// Nested parser, stop before the first colon (see above).
|
||||
let mut p2 = cx.new_parser_from_tts(&tts[..first_colon]);
|
||||
let mut p2 =
|
||||
cx.new_parser_from_tts(tts.trees().take(first_colon).collect());
|
||||
|
||||
if p2.token == token::Eof {
|
||||
let mut err =
|
||||
@ -129,12 +129,8 @@ fn parse_inline_asm<'a>(
|
||||
// This is most likely malformed.
|
||||
if p2.token != token::Eof {
|
||||
let mut extra_tts = p2.parse_all_token_trees()?;
|
||||
extra_tts.extend(tts[first_colon..].iter().cloned());
|
||||
p = parse::stream_to_parser(
|
||||
cx.parse_sess,
|
||||
extra_tts.into_iter().collect(),
|
||||
Some("inline assembly"),
|
||||
);
|
||||
extra_tts.extend(tts.trees().skip(first_colon));
|
||||
p = cx.new_parser_from_tts(extra_tts.into_iter().collect());
|
||||
}
|
||||
|
||||
asm = s;
|
||||
|
@ -13,7 +13,7 @@ use syntax_pos::{Span, DUMMY_SP};
|
||||
pub fn expand_assert<'cx>(
|
||||
cx: &'cx mut ExtCtxt<'_>,
|
||||
sp: Span,
|
||||
tts: &[TokenTree],
|
||||
tts: TokenStream,
|
||||
) -> Box<dyn MacResult + 'cx> {
|
||||
let Assert { cond_expr, custom_message } = match parse_assert(cx, sp, tts) {
|
||||
Ok(assert) => assert,
|
||||
@ -59,9 +59,9 @@ struct Assert {
|
||||
fn parse_assert<'a>(
|
||||
cx: &mut ExtCtxt<'a>,
|
||||
sp: Span,
|
||||
tts: &[TokenTree]
|
||||
stream: TokenStream
|
||||
) -> Result<Assert, DiagnosticBuilder<'a>> {
|
||||
let mut parser = cx.new_parser_from_tts(tts);
|
||||
let mut parser = cx.new_parser_from_tts(stream);
|
||||
|
||||
if parser.token == token::Eof {
|
||||
let mut err = cx.struct_span_err(sp, "macro requires a boolean expression as an argument");
|
||||
|
@ -7,14 +7,14 @@ use errors::DiagnosticBuilder;
|
||||
use syntax::ast;
|
||||
use syntax::ext::base::{self, *};
|
||||
use syntax::attr;
|
||||
use syntax::tokenstream;
|
||||
use syntax::tokenstream::TokenStream;
|
||||
use syntax::parse::token;
|
||||
use syntax_pos::Span;
|
||||
|
||||
pub fn expand_cfg(
|
||||
cx: &mut ExtCtxt<'_>,
|
||||
sp: Span,
|
||||
tts: &[tokenstream::TokenTree],
|
||||
tts: TokenStream,
|
||||
) -> Box<dyn base::MacResult + 'static> {
|
||||
let sp = cx.with_legacy_ctxt(sp);
|
||||
|
||||
@ -33,7 +33,7 @@ pub fn expand_cfg(
|
||||
fn parse_cfg<'a>(
|
||||
cx: &mut ExtCtxt<'a>,
|
||||
sp: Span,
|
||||
tts: &[tokenstream::TokenTree],
|
||||
tts: TokenStream,
|
||||
) -> Result<ast::MetaItem, DiagnosticBuilder<'a>> {
|
||||
let mut p = cx.new_parser_from_tts(tts);
|
||||
|
||||
|
@ -2,11 +2,11 @@
|
||||
|
||||
use syntax::ext::base::{self, *};
|
||||
use syntax_pos::Span;
|
||||
use syntax::tokenstream;
|
||||
use syntax::tokenstream::TokenStream;
|
||||
|
||||
pub fn expand_compile_error<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||
sp: Span,
|
||||
tts: &[tokenstream::TokenTree])
|
||||
tts: TokenStream)
|
||||
-> Box<dyn base::MacResult + 'cx> {
|
||||
let var = match get_single_str_from_tts(cx, sp, tts, "compile_error!") {
|
||||
None => return DummyResult::any(sp),
|
||||
|
@ -1,14 +1,14 @@
|
||||
use syntax::ast;
|
||||
use syntax::ext::base::{self, DummyResult};
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax::tokenstream;
|
||||
use syntax::tokenstream::TokenStream;
|
||||
|
||||
use std::string::String;
|
||||
|
||||
pub fn expand_syntax_ext(
|
||||
pub fn expand_concat(
|
||||
cx: &mut base::ExtCtxt<'_>,
|
||||
sp: syntax_pos::Span,
|
||||
tts: &[tokenstream::TokenTree],
|
||||
tts: TokenStream,
|
||||
) -> Box<dyn base::MacResult + 'static> {
|
||||
let es = match base::get_exprs_from_tts(cx, sp, tts) {
|
||||
Some(e) => e,
|
||||
|
@ -6,21 +6,21 @@ use syntax::parse::token::{self, Token};
|
||||
use syntax::ptr::P;
|
||||
use syntax_pos::Span;
|
||||
use syntax_pos::symbol::Symbol;
|
||||
use syntax::tokenstream::TokenTree;
|
||||
use syntax::tokenstream::{TokenTree, TokenStream};
|
||||
|
||||
pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||
sp: Span,
|
||||
tts: &[TokenTree])
|
||||
-> Box<dyn base::MacResult + 'cx> {
|
||||
pub fn expand_concat_idents<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||
sp: Span,
|
||||
tts: TokenStream)
|
||||
-> Box<dyn base::MacResult + 'cx> {
|
||||
if tts.is_empty() {
|
||||
cx.span_err(sp, "concat_idents! takes 1 or more arguments.");
|
||||
return DummyResult::any(sp);
|
||||
}
|
||||
|
||||
let mut res_str = String::new();
|
||||
for (i, e) in tts.iter().enumerate() {
|
||||
for (i, e) in tts.into_trees().enumerate() {
|
||||
if i & 1 == 1 {
|
||||
match *e {
|
||||
match e {
|
||||
TokenTree::Token(Token { kind: token::Comma, .. }) => {}
|
||||
_ => {
|
||||
cx.span_err(sp, "concat_idents! expecting comma.");
|
||||
@ -28,7 +28,7 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||
}
|
||||
}
|
||||
} else {
|
||||
match *e {
|
||||
match e {
|
||||
TokenTree::Token(Token { kind: token::Ident(name, _), .. }) =>
|
||||
res_str.push_str(&name.as_str()),
|
||||
_ => {
|
||||
|
@ -7,13 +7,13 @@ use syntax::ast::{self, Ident, GenericArg};
|
||||
use syntax::ext::base::{self, *};
|
||||
use syntax::symbol::{kw, sym, Symbol};
|
||||
use syntax_pos::Span;
|
||||
use syntax::tokenstream;
|
||||
use syntax::tokenstream::TokenStream;
|
||||
|
||||
use std::env;
|
||||
|
||||
pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||
sp: Span,
|
||||
tts: &[tokenstream::TokenTree])
|
||||
tts: TokenStream)
|
||||
-> Box<dyn base::MacResult + 'cx> {
|
||||
let var = match get_single_str_from_tts(cx, sp, tts, "option_env!") {
|
||||
None => return DummyResult::any(sp),
|
||||
@ -45,7 +45,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||
|
||||
pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||
sp: Span,
|
||||
tts: &[tokenstream::TokenTree])
|
||||
tts: TokenStream)
|
||||
-> Box<dyn base::MacResult + 'cx> {
|
||||
let mut exprs = match get_exprs_from_tts(cx, sp, tts) {
|
||||
Some(ref exprs) if exprs.is_empty() => {
|
||||
|
@ -11,7 +11,7 @@ use syntax::ext::base::{self, *};
|
||||
use syntax::parse::token;
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::{Symbol, sym};
|
||||
use syntax::tokenstream;
|
||||
use syntax::tokenstream::TokenStream;
|
||||
use syntax_pos::{MultiSpan, Span};
|
||||
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
@ -126,7 +126,7 @@ struct Context<'a, 'b> {
|
||||
fn parse_args<'a>(
|
||||
ecx: &mut ExtCtxt<'a>,
|
||||
sp: Span,
|
||||
tts: &[tokenstream::TokenTree]
|
||||
tts: TokenStream,
|
||||
) -> Result<(P<ast::Expr>, Vec<P<ast::Expr>>, FxHashMap<Symbol, usize>), DiagnosticBuilder<'a>> {
|
||||
let mut args = Vec::<P<ast::Expr>>::new();
|
||||
let mut names = FxHashMap::<Symbol, usize>::default();
|
||||
@ -794,7 +794,7 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||
fn expand_format_args_impl<'cx>(
|
||||
ecx: &'cx mut ExtCtxt<'_>,
|
||||
mut sp: Span,
|
||||
tts: &[tokenstream::TokenTree],
|
||||
tts: TokenStream,
|
||||
nl: bool,
|
||||
) -> Box<dyn base::MacResult + 'cx> {
|
||||
sp = ecx.with_def_site_ctxt(sp);
|
||||
@ -812,7 +812,7 @@ fn expand_format_args_impl<'cx>(
|
||||
pub fn expand_format_args<'cx>(
|
||||
ecx: &'cx mut ExtCtxt<'_>,
|
||||
sp: Span,
|
||||
tts: &[tokenstream::TokenTree],
|
||||
tts: TokenStream,
|
||||
) -> Box<dyn base::MacResult + 'cx> {
|
||||
expand_format_args_impl(ecx, sp, tts, false)
|
||||
}
|
||||
@ -820,7 +820,7 @@ pub fn expand_format_args<'cx>(
|
||||
pub fn expand_format_args_nl<'cx>(
|
||||
ecx: &'cx mut ExtCtxt<'_>,
|
||||
sp: Span,
|
||||
tts: &[tokenstream::TokenTree],
|
||||
tts: TokenStream,
|
||||
) -> Box<dyn base::MacResult + 'cx> {
|
||||
expand_format_args_impl(ecx, sp, tts, true)
|
||||
}
|
||||
|
@ -16,12 +16,12 @@ use syntax::ext::base::{self, *};
|
||||
use syntax::parse::token;
|
||||
use syntax::ptr::P;
|
||||
use syntax_pos::Span;
|
||||
use syntax::tokenstream;
|
||||
use syntax::tokenstream::TokenStream;
|
||||
use smallvec::smallvec;
|
||||
|
||||
pub fn expand_global_asm<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||
sp: Span,
|
||||
tts: &[tokenstream::TokenTree]) -> Box<dyn base::MacResult + 'cx> {
|
||||
tts: TokenStream) -> Box<dyn base::MacResult + 'cx> {
|
||||
match parse_global_asm(cx, sp, tts) {
|
||||
Ok(Some(global_asm)) => {
|
||||
MacEager::items(smallvec![P(ast::Item {
|
||||
@ -45,7 +45,7 @@ pub fn expand_global_asm<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||
fn parse_global_asm<'a>(
|
||||
cx: &mut ExtCtxt<'a>,
|
||||
sp: Span,
|
||||
tts: &[tokenstream::TokenTree]
|
||||
tts: TokenStream
|
||||
) -> Result<Option<ast::GlobalAsm>, DiagnosticBuilder<'a>> {
|
||||
let mut p = cx.new_parser_from_tts(tts);
|
||||
|
||||
|
@ -67,8 +67,8 @@ pub fn register_builtin_macros(resolver: &mut dyn syntax::ext::base::Resolver, e
|
||||
cfg: cfg::expand_cfg,
|
||||
column: source_util::expand_column,
|
||||
compile_error: compile_error::expand_compile_error,
|
||||
concat_idents: concat_idents::expand_syntax_ext,
|
||||
concat: concat::expand_syntax_ext,
|
||||
concat_idents: concat_idents::expand_concat_idents,
|
||||
concat: concat::expand_concat,
|
||||
env: env::expand_env,
|
||||
file: source_util::expand_file,
|
||||
format_args_nl: format::expand_format_args_nl,
|
||||
@ -78,7 +78,7 @@ pub fn register_builtin_macros(resolver: &mut dyn syntax::ext::base::Resolver, e
|
||||
include_str: source_util::expand_include_str,
|
||||
include: source_util::expand_include,
|
||||
line: source_util::expand_line,
|
||||
log_syntax: log_syntax::expand_syntax_ext,
|
||||
log_syntax: log_syntax::expand_log_syntax,
|
||||
module_path: source_util::expand_mod,
|
||||
option_env: env::expand_option_env,
|
||||
stringify: source_util::expand_stringify,
|
||||
|
@ -1,11 +1,11 @@
|
||||
use syntax::ext::base;
|
||||
use syntax::print;
|
||||
use syntax::tokenstream;
|
||||
use syntax::tokenstream::TokenStream;
|
||||
use syntax_pos;
|
||||
|
||||
pub fn expand_syntax_ext<'cx>(_cx: &'cx mut base::ExtCtxt<'_>,
|
||||
pub fn expand_log_syntax<'cx>(_cx: &'cx mut base::ExtCtxt<'_>,
|
||||
sp: syntax_pos::Span,
|
||||
tts: &[tokenstream::TokenTree])
|
||||
tts: TokenStream)
|
||||
-> Box<dyn base::MacResult + 'cx> {
|
||||
println!("{}", print::pprust::tts_to_string(tts));
|
||||
|
||||
|
@ -4,7 +4,7 @@ use syntax::parse::{self, token, DirectoryOwnership};
|
||||
use syntax::print::pprust;
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax::tokenstream;
|
||||
use syntax::tokenstream::TokenStream;
|
||||
|
||||
use smallvec::SmallVec;
|
||||
use syntax_pos::{self, Pos, Span};
|
||||
@ -16,7 +16,7 @@ use rustc_data_structures::sync::Lrc;
|
||||
// a given file into the current one.
|
||||
|
||||
/// line!(): expands to the current line number
|
||||
pub fn expand_line(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
|
||||
pub fn expand_line(cx: &mut ExtCtxt<'_>, sp: Span, tts: TokenStream)
|
||||
-> Box<dyn base::MacResult+'static> {
|
||||
base::check_zero_tts(cx, sp, tts, "line!");
|
||||
|
||||
@ -27,7 +27,7 @@ pub fn expand_line(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree
|
||||
}
|
||||
|
||||
/* column!(): expands to the current column number */
|
||||
pub fn expand_column(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
|
||||
pub fn expand_column(cx: &mut ExtCtxt<'_>, sp: Span, tts: TokenStream)
|
||||
-> Box<dyn base::MacResult+'static> {
|
||||
base::check_zero_tts(cx, sp, tts, "column!");
|
||||
|
||||
@ -40,7 +40,7 @@ pub fn expand_column(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTr
|
||||
/// file!(): expands to the current filename */
|
||||
/// The source_file (`loc.file`) contains a bunch more information we could spit
|
||||
/// out if we wanted.
|
||||
pub fn expand_file(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
|
||||
pub fn expand_file(cx: &mut ExtCtxt<'_>, sp: Span, tts: TokenStream)
|
||||
-> Box<dyn base::MacResult+'static> {
|
||||
base::check_zero_tts(cx, sp, tts, "file!");
|
||||
|
||||
@ -49,13 +49,13 @@ pub fn expand_file(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree
|
||||
base::MacEager::expr(cx.expr_str(topmost, Symbol::intern(&loc.file.name.to_string())))
|
||||
}
|
||||
|
||||
pub fn expand_stringify(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
|
||||
pub fn expand_stringify(cx: &mut ExtCtxt<'_>, sp: Span, tts: TokenStream)
|
||||
-> Box<dyn base::MacResult+'static> {
|
||||
let s = pprust::tts_to_string(tts);
|
||||
base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&s)))
|
||||
}
|
||||
|
||||
pub fn expand_mod(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
|
||||
pub fn expand_mod(cx: &mut ExtCtxt<'_>, sp: Span, tts: TokenStream)
|
||||
-> Box<dyn base::MacResult+'static> {
|
||||
base::check_zero_tts(cx, sp, tts, "module_path!");
|
||||
let mod_path = &cx.current_expansion.module.mod_path;
|
||||
@ -67,7 +67,7 @@ pub fn expand_mod(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree]
|
||||
/// include! : parse the given file as an expr
|
||||
/// This is generally a bad idea because it's going to behave
|
||||
/// unhygienically.
|
||||
pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
|
||||
pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt<'_>, sp: Span, tts: TokenStream)
|
||||
-> Box<dyn base::MacResult+'cx> {
|
||||
let file = match get_single_str_from_tts(cx, sp, tts, "include!") {
|
||||
Some(f) => f,
|
||||
@ -105,7 +105,7 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt<'_>, sp: Span, tts: &[tokenstrea
|
||||
}
|
||||
|
||||
// include_str! : read the given file, insert it as a literal string expr
|
||||
pub fn expand_include_str(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
|
||||
pub fn expand_include_str(cx: &mut ExtCtxt<'_>, sp: Span, tts: TokenStream)
|
||||
-> Box<dyn base::MacResult+'static> {
|
||||
let file = match get_single_str_from_tts(cx, sp, tts, "include_str!") {
|
||||
Some(f) => f,
|
||||
@ -130,7 +130,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::To
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expand_include_bytes(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
|
||||
pub fn expand_include_bytes(cx: &mut ExtCtxt<'_>, sp: Span, tts: TokenStream)
|
||||
-> Box<dyn base::MacResult+'static> {
|
||||
let file = match get_single_str_from_tts(cx, sp, tts, "include_bytes!") {
|
||||
Some(f) => f,
|
||||
|
@ -1,20 +1,27 @@
|
||||
use syntax::ext::base::{self, ExtCtxt};
|
||||
use syntax::symbol::kw;
|
||||
use syntax_pos::Span;
|
||||
use syntax::tokenstream::TokenTree;
|
||||
use syntax::tokenstream::{TokenTree, TokenStream};
|
||||
|
||||
pub fn expand_trace_macros(cx: &mut ExtCtxt<'_>,
|
||||
sp: Span,
|
||||
tt: &[TokenTree])
|
||||
tt: TokenStream)
|
||||
-> Box<dyn base::MacResult + 'static> {
|
||||
match tt {
|
||||
[TokenTree::Token(token)] if token.is_keyword(kw::True) => {
|
||||
cx.set_trace_macros(true);
|
||||
}
|
||||
[TokenTree::Token(token)] if token.is_keyword(kw::False) => {
|
||||
cx.set_trace_macros(false);
|
||||
}
|
||||
_ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),
|
||||
let mut cursor = tt.into_trees();
|
||||
let mut err = false;
|
||||
let value = match &cursor.next() {
|
||||
Some(TokenTree::Token(token)) if token.is_keyword(kw::True) => true,
|
||||
Some(TokenTree::Token(token)) if token.is_keyword(kw::False) => false,
|
||||
_ => {
|
||||
err = true;
|
||||
false
|
||||
},
|
||||
};
|
||||
err |= cursor.next().is_some();
|
||||
if err {
|
||||
cx.span_err(sp, "trace_macros! accepts only `true` or `false`")
|
||||
} else {
|
||||
cx.set_trace_macros(value);
|
||||
}
|
||||
|
||||
base::DummyResult::any_valid(sp)
|
||||
|
@ -15,12 +15,12 @@ extern crate rustc;
|
||||
extern crate rustc_driver;
|
||||
|
||||
use syntax::parse::token::{self, Token};
|
||||
use syntax::tokenstream::TokenTree;
|
||||
use syntax::tokenstream::{TokenTree, TokenStream};
|
||||
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
|
||||
use syntax_pos::Span;
|
||||
use rustc_driver::plugin::Registry;
|
||||
|
||||
fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
|
||||
fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: TokenStream)
|
||||
-> Box<dyn MacResult + 'static> {
|
||||
|
||||
static NUMERALS: &'static [(&'static str, usize)] = &[
|
||||
@ -36,7 +36,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
|
||||
return DummyResult::any(sp);
|
||||
}
|
||||
|
||||
let text = match args[0] {
|
||||
let text = match args.into_trees().next().unwrap() {
|
||||
TokenTree::Token(Token { kind: token::Ident(s, _), .. }) => s.to_string(),
|
||||
_ => {
|
||||
cx.span_err(sp, "argument should be a single identifier");
|
||||
|
Loading…
Reference in New Issue
Block a user