mirror of
https://github.com/rust-lang/rust.git
synced 2025-01-27 07:03:45 +00:00
Refactor TokenStream
.
This commit is contained in:
parent
ec29011346
commit
2dc60b1180
@ -103,7 +103,7 @@ DEPS_syntax_ext := syntax syntax_pos rustc_errors fmt_macros proc_macro
|
||||
DEPS_proc_macro := syntax syntax_pos rustc_plugin log
|
||||
DEPS_syntax_pos := serialize
|
||||
DEPS_proc_macro_tokens := syntax syntax_pos log
|
||||
DEPS_proc_macro_plugin := syntax syntax_pos rustc_plugin log proc_macro_tokens
|
||||
DEPS_proc_macro_plugin := syntax syntax_pos rustc_plugin
|
||||
|
||||
DEPS_rustc_const_math := std syntax log serialize rustc_i128
|
||||
DEPS_rustc_const_eval := rustc_const_math rustc syntax log serialize \
|
||||
|
3
src/Cargo.lock
generated
3
src/Cargo.lock
generated
@ -208,10 +208,9 @@ dependencies = [
|
||||
name = "proc_macro_plugin"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"log 0.0.0",
|
||||
"proc_macro_tokens 0.0.0",
|
||||
"rustc_plugin 0.0.0",
|
||||
"syntax 0.0.0",
|
||||
"syntax_pos 0.0.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -82,14 +82,15 @@ pub mod __internal {
|
||||
use syntax::ast;
|
||||
use syntax::ptr::P;
|
||||
use syntax::parse::{self, token, ParseSess};
|
||||
use syntax::tokenstream::TokenStream as TokenStream_;
|
||||
use syntax::tokenstream::{TokenTree, TokenStream as TokenStream_};
|
||||
|
||||
use super::{TokenStream, LexError};
|
||||
|
||||
pub fn new_token_stream(item: P<ast::Item>) -> TokenStream {
|
||||
TokenStream { inner: TokenStream_::from_tokens(vec![
|
||||
token::Interpolated(Rc::new(token::NtItem(item)))
|
||||
])}
|
||||
TokenStream {
|
||||
inner: TokenTree::Token(item.span, token::Interpolated(Rc::new(token::NtItem(item))))
|
||||
.into()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn token_stream_wrap(inner: TokenStream_) -> TokenStream {
|
||||
@ -175,7 +176,7 @@ impl FromStr for TokenStream {
|
||||
let tts = try!(parse::parse_tts_from_source_str(name, src, sess)
|
||||
.map_err(parse_to_lex_err));
|
||||
|
||||
Ok(__internal::token_stream_wrap(TokenStream_::from_tts(tts)))
|
||||
Ok(__internal::token_stream_wrap(tts.into_iter().collect()))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -8,7 +8,6 @@ path = "lib.rs"
|
||||
crate-type = ["dylib"]
|
||||
|
||||
[dependencies]
|
||||
log = { path = "../liblog" }
|
||||
rustc_plugin = { path = "../librustc_plugin" }
|
||||
syntax = { path = "../libsyntax" }
|
||||
proc_macro_tokens = { path = "../libproc_macro_tokens" }
|
||||
syntax_pos = { path = "../libsyntax_pos" }
|
||||
|
@ -15,11 +15,8 @@
|
||||
//! ## Usage
|
||||
//! This crate provides the `qquote!` macro for syntax creation.
|
||||
//!
|
||||
//! The `qquote!` macro imports `syntax::ext::proc_macro_shim::prelude::*`, so you
|
||||
//! will need to `extern crate syntax` for usage. (This is a temporary solution until more
|
||||
//! of the external API in libproc_macro_tokens is stabilized to support the token construction
|
||||
//! operations that the qausiquoter relies on.) The shim file also provides additional
|
||||
//! operations, such as `build_block_emitter` (as used in the `cond` example below).
|
||||
//! The `qquote!` macro uses the crate `syntax`, so users must declare `extern crate syntax;`
|
||||
//! at the crate root. This is a temporary solution until we have better hygiene.
|
||||
//!
|
||||
//! ## Quasiquotation
|
||||
//!
|
||||
@ -88,19 +85,20 @@
|
||||
|
||||
extern crate rustc_plugin;
|
||||
extern crate syntax;
|
||||
extern crate proc_macro_tokens;
|
||||
#[macro_use] extern crate log;
|
||||
extern crate syntax_pos;
|
||||
|
||||
mod qquote;
|
||||
|
||||
use qquote::qquote;
|
||||
|
||||
use rustc_plugin::Registry;
|
||||
use syntax::ext::base::SyntaxExtension;
|
||||
use syntax::symbol::Symbol;
|
||||
|
||||
// ____________________________________________________________________________________________
|
||||
// Main macro definition
|
||||
|
||||
#[plugin_registrar]
|
||||
pub fn plugin_registrar(reg: &mut Registry) {
|
||||
reg.register_macro("qquote", qquote);
|
||||
reg.register_syntax_extension(Symbol::intern("qquote"),
|
||||
SyntaxExtension::ProcMacro(Box::new(qquote)));
|
||||
}
|
||||
|
@ -9,463 +9,223 @@
|
||||
// except according to those terms.
|
||||
|
||||
//! # Quasiquoter
|
||||
//! This file contains the implementation internals of the quasiquoter provided by `quote!`.
|
||||
//!
|
||||
//! ## Ouput
|
||||
//! The quasiquoter produces output of the form:
|
||||
//! let tmp0 = ...;
|
||||
//! let tmp1 = ...;
|
||||
//! ...
|
||||
//! concat(from_tokens(...), concat(...))
|
||||
//!
|
||||
//! To the more explicit, the quasiquoter produces a series of bindings that each
|
||||
//! construct TokenStreams via constructing Tokens and using `from_tokens`, ultimately
|
||||
//! invoking `concat` on these bindings (and inlined expressions) to construct a
|
||||
//! TokenStream that resembles the output syntax.
|
||||
//!
|
||||
|
||||
use proc_macro_tokens::build::*;
|
||||
use proc_macro_tokens::parse::lex;
|
||||
|
||||
use qquote::int_build::*;
|
||||
//! This file contains the implementation internals of the quasiquoter provided by `qquote!`.
|
||||
|
||||
use syntax::ast::Ident;
|
||||
use syntax::codemap::Span;
|
||||
use syntax::ext::base::*;
|
||||
use syntax::ext::base;
|
||||
use syntax::ext::proc_macro_shim::build_block_emitter;
|
||||
use syntax::parse::token::{self, Token};
|
||||
use syntax::print::pprust;
|
||||
use syntax::parse::token::{self, Token, Lit};
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax::tokenstream::{TokenTree, TokenStream};
|
||||
use syntax::tokenstream::{self, Delimited, TokenTree, TokenStream};
|
||||
use syntax_pos::DUMMY_SP;
|
||||
|
||||
// ____________________________________________________________________________________________
|
||||
// Main definition
|
||||
/// The user should use the macro, not this procedure.
|
||||
pub fn qquote<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree])
|
||||
-> Box<base::MacResult + 'cx> {
|
||||
use std::rc::Rc;
|
||||
|
||||
debug!("\nTTs in: {:?}\n", pprust::tts_to_string(&tts[..]));
|
||||
let output = qquoter(cx, TokenStream::from_tts(tts.clone().to_owned()));
|
||||
debug!("\nQQ out: {}\n", pprust::tts_to_string(&output.to_tts()[..]));
|
||||
let imports = concat(lex("use syntax::ext::proc_macro_shim::prelude::*;"),
|
||||
lex("use proc_macro_tokens::prelude::*;"));
|
||||
build_block_emitter(cx, sp, build_brace_delimited(concat(imports, output)))
|
||||
pub fn qquote<'cx>(stream: TokenStream) -> TokenStream {
|
||||
stream.quote()
|
||||
}
|
||||
|
||||
// ____________________________________________________________________________________________
|
||||
// Datatype Definitions
|
||||
|
||||
#[derive(Debug)]
|
||||
struct QDelimited {
|
||||
delim: token::DelimToken,
|
||||
open_span: Span,
|
||||
tts: Vec<Qtt>,
|
||||
close_span: Span,
|
||||
trait Quote {
|
||||
fn quote(&self) -> TokenStream;
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum Qtt {
|
||||
TT(TokenTree),
|
||||
Delimited(QDelimited),
|
||||
QIdent(TokenTree),
|
||||
macro_rules! quote_tok {
|
||||
(,) => { Token::Comma };
|
||||
(.) => { Token::Dot };
|
||||
(:) => { Token::Colon };
|
||||
(::) => { Token::ModSep };
|
||||
(!) => { Token::Not };
|
||||
(<) => { Token::Lt };
|
||||
(>) => { Token::Gt };
|
||||
(_) => { Token::Underscore };
|
||||
($i:ident) => { Token::Ident(Ident::from_str(stringify!($i))) };
|
||||
}
|
||||
|
||||
type Bindings = Vec<(Ident, TokenStream)>;
|
||||
macro_rules! quote_tree {
|
||||
((unquote $($t:tt)*)) => { $($t)* };
|
||||
((quote $($t:tt)*)) => { ($($t)*).quote() };
|
||||
(($($t:tt)*)) => { delimit(token::Paren, quote!($($t)*)) };
|
||||
([$($t:tt)*]) => { delimit(token::Bracket, quote!($($t)*)) };
|
||||
({$($t:tt)*}) => { delimit(token::Brace, quote!($($t)*)) };
|
||||
($t:tt) => { TokenStream::from(TokenTree::Token(DUMMY_SP, quote_tok!($t))) };
|
||||
}
|
||||
|
||||
// ____________________________________________________________________________________________
|
||||
// Quasiquoter Algorithm
|
||||
// This algorithm works as follows:
|
||||
// Input: TokenStream
|
||||
// 1. Walk the TokenStream, gathering up the unquoted expressions and marking them separately.
|
||||
// 2. Hoist any unquoted term into its own let-binding via a gensym'd identifier
|
||||
// 3. Convert the body from a `complex expression` into a simplified one via `convert_complex_tts
|
||||
// 4. Stitch everything together with `concat`.
|
||||
fn qquoter<'cx>(cx: &'cx mut ExtCtxt, ts: TokenStream) -> TokenStream {
|
||||
if ts.is_empty() {
|
||||
return lex("TokenStream::mk_empty()");
|
||||
fn delimit(delim: token::DelimToken, stream: TokenStream) -> TokenStream {
|
||||
TokenTree::Delimited(DUMMY_SP, Rc::new(Delimited {
|
||||
delim: delim,
|
||||
tts: stream.trees().cloned().collect(),
|
||||
open_span: DUMMY_SP,
|
||||
close_span: DUMMY_SP,
|
||||
})).into()
|
||||
}
|
||||
|
||||
macro_rules! quote {
|
||||
() => { TokenStream::empty() };
|
||||
($($t:tt)*) => { [ $( quote_tree!($t), )* ].iter().cloned().collect::<TokenStream>() };
|
||||
}
|
||||
|
||||
impl<T: Quote> Quote for Option<T> {
|
||||
fn quote(&self) -> TokenStream {
|
||||
match *self {
|
||||
Some(ref t) => quote!(::std::option::Option::Some((quote t))),
|
||||
None => quote!(::std::option::Option::None),
|
||||
}
|
||||
}
|
||||
let qq_res = qquote_iter(cx, 0, ts);
|
||||
let mut bindings = qq_res.0;
|
||||
let body = qq_res.1;
|
||||
let mut cct_res = convert_complex_tts(cx, body);
|
||||
|
||||
bindings.append(&mut cct_res.0);
|
||||
|
||||
if bindings.is_empty() {
|
||||
cct_res.1
|
||||
} else {
|
||||
debug!("BINDINGS");
|
||||
for b in bindings.clone() {
|
||||
debug!("{:?} = {}", b.0, pprust::tts_to_string(&b.1.to_tts()[..]));
|
||||
}
|
||||
TokenStream::concat(unravel(bindings), cct_res.1)
|
||||
}
|
||||
}
|
||||
|
||||
fn qquote_iter<'cx>(cx: &'cx mut ExtCtxt, depth: i64, ts: TokenStream) -> (Bindings, Vec<Qtt>) {
|
||||
let mut depth = depth;
|
||||
let mut bindings: Bindings = Vec::new();
|
||||
let mut output: Vec<Qtt> = Vec::new();
|
||||
|
||||
let mut iter = ts.iter();
|
||||
|
||||
loop {
|
||||
let next = iter.next();
|
||||
if next.is_none() {
|
||||
break;
|
||||
impl Quote for TokenStream {
|
||||
fn quote(&self) -> TokenStream {
|
||||
if self.is_empty() {
|
||||
return quote!(::syntax::tokenstream::TokenStream::empty());
|
||||
}
|
||||
let next = next.unwrap().clone();
|
||||
match next {
|
||||
TokenTree::Token(_, Token::Ident(id)) if is_unquote(id) => {
|
||||
if depth == 0 {
|
||||
let exp = iter.next();
|
||||
if exp.is_none() {
|
||||
break;
|
||||
} // produce an error or something first
|
||||
let exp = vec![exp.unwrap().to_owned()];
|
||||
debug!("RHS: {:?}", exp.clone());
|
||||
let new_id = Ident::with_empty_ctxt(Symbol::gensym("tmp"));
|
||||
debug!("RHS TS: {:?}", TokenStream::from_tts(exp.clone()));
|
||||
debug!("RHS TS TT: {:?}", TokenStream::from_tts(exp.clone()).to_vec());
|
||||
bindings.push((new_id, TokenStream::from_tts(exp)));
|
||||
debug!("BINDINGS");
|
||||
for b in bindings.clone() {
|
||||
debug!("{:?} = {}", b.0, pprust::tts_to_string(&b.1.to_tts()[..]));
|
||||
|
||||
struct Quote<'a>(tokenstream::Cursor<'a>);
|
||||
|
||||
impl<'a> Iterator for Quote<'a> {
|
||||
type Item = TokenStream;
|
||||
|
||||
fn next(&mut self) -> Option<TokenStream> {
|
||||
let is_unquote = match self.0.peek() {
|
||||
Some(&TokenTree::Token(_, Token::Ident(ident))) if ident.name == "unquote" => {
|
||||
self.0.next();
|
||||
true
|
||||
}
|
||||
output.push(Qtt::QIdent(as_tt(Token::Ident(new_id.clone()))));
|
||||
} else {
|
||||
depth = depth - 1;
|
||||
output.push(Qtt::TT(next.clone()));
|
||||
}
|
||||
}
|
||||
TokenTree::Token(_, Token::Ident(id)) if is_qquote(id) => {
|
||||
depth = depth + 1;
|
||||
}
|
||||
TokenTree::Delimited(_, ref dl) => {
|
||||
let br = qquote_iter(cx, depth, TokenStream::from_tts(dl.tts.clone().to_owned()));
|
||||
let mut nested_bindings = br.0;
|
||||
let nested = br.1;
|
||||
bindings.append(&mut nested_bindings);
|
||||
|
||||
let new_dl = QDelimited {
|
||||
delim: dl.delim,
|
||||
open_span: dl.open_span,
|
||||
tts: nested,
|
||||
close_span: dl.close_span,
|
||||
_ => false,
|
||||
};
|
||||
|
||||
output.push(Qtt::Delimited(new_dl));
|
||||
}
|
||||
t => {
|
||||
output.push(Qtt::TT(t));
|
||||
self.0.next().cloned().map(|tree| {
|
||||
let quoted_tree = if is_unquote { tree.into() } else { tree.quote() };
|
||||
quote!(::syntax::tokenstream::TokenStream::from((unquote quoted_tree)),)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(bindings, output)
|
||||
let quoted = Quote(self.trees()).collect::<TokenStream>();
|
||||
quote!([(unquote quoted)].iter().cloned().collect::<::syntax::tokenstream::TokenStream>())
|
||||
}
|
||||
}
|
||||
|
||||
// ____________________________________________________________________________________________
|
||||
// Turns QQTs into a TokenStream and some Bindings.
|
||||
/// Construct a chain of concatenations.
|
||||
fn unravel_concats(tss: Vec<TokenStream>) -> TokenStream {
|
||||
let mut pushes: Vec<TokenStream> =
|
||||
tss.into_iter().filter(|&ref ts| !ts.is_empty()).collect();
|
||||
let mut output = match pushes.pop() {
|
||||
Some(ts) => ts,
|
||||
None => {
|
||||
return TokenStream::mk_empty();
|
||||
}
|
||||
};
|
||||
|
||||
while let Some(ts) = pushes.pop() {
|
||||
output = build_fn_call(Ident::from_str("concat"),
|
||||
concat(concat(ts,
|
||||
from_tokens(vec![Token::Comma])),
|
||||
output));
|
||||
impl Quote for Vec<TokenTree> {
|
||||
fn quote(&self) -> TokenStream {
|
||||
let stream = self.iter().cloned().collect::<TokenStream>();
|
||||
quote!((quote stream).trees().cloned().collect::<::std::vec::Vec<_> >())
|
||||
}
|
||||
output
|
||||
}
|
||||
|
||||
/// This converts the vector of Qtts into a set of Bindings for construction and the main
|
||||
/// body as a TokenStream.
|
||||
fn convert_complex_tts<'cx>(cx: &'cx mut ExtCtxt, tts: Vec<Qtt>) -> (Bindings, TokenStream) {
|
||||
let mut pushes: Vec<TokenStream> = Vec::new();
|
||||
let mut bindings: Bindings = Vec::new();
|
||||
|
||||
let mut iter = tts.into_iter();
|
||||
|
||||
loop {
|
||||
let next = iter.next();
|
||||
if next.is_none() {
|
||||
break;
|
||||
impl Quote for TokenTree {
|
||||
fn quote(&self) -> TokenStream {
|
||||
match *self {
|
||||
TokenTree::Token(_, ref token) => quote! {
|
||||
::syntax::tokenstream::TokenTree::Token(::syntax::ext::quote::rt::DUMMY_SP,
|
||||
(quote token))
|
||||
},
|
||||
TokenTree::Delimited(_, ref delimited) => quote! {
|
||||
::syntax::tokenstream::TokenTree::Delimited(::syntax::ext::quote::rt::DUMMY_SP,
|
||||
(quote delimited))
|
||||
},
|
||||
_ => panic!("unexpected `TokenTree::Sequence` in `qquote`"),
|
||||
}
|
||||
let next = next.unwrap();
|
||||
match next {
|
||||
Qtt::TT(TokenTree::Token(_, t)) => {
|
||||
let token_out = emit_token(t);
|
||||
pushes.push(token_out);
|
||||
}
|
||||
// FIXME handle sequence repetition tokens
|
||||
Qtt::Delimited(qdl) => {
|
||||
debug!(" Delimited: {:?} ", qdl.tts);
|
||||
let fresh_id = Ident::with_empty_ctxt(Symbol::gensym("qdl_tmp"));
|
||||
let (mut nested_bindings, nested_toks) = convert_complex_tts(cx, qdl.tts);
|
||||
|
||||
let body = if nested_toks.is_empty() {
|
||||
assert!(nested_bindings.is_empty());
|
||||
build_mod_call(vec![Ident::from_str("TokenStream"),
|
||||
Ident::from_str("mk_empty")],
|
||||
TokenStream::mk_empty())
|
||||
} else {
|
||||
bindings.append(&mut nested_bindings);
|
||||
bindings.push((fresh_id, nested_toks));
|
||||
TokenStream::from_tokens(vec![Token::Ident(fresh_id)])
|
||||
};
|
||||
|
||||
let delimitiers = build_delim_tok(qdl.delim);
|
||||
|
||||
pushes.push(build_mod_call(vec![Ident::from_str("proc_macro_tokens"),
|
||||
Ident::from_str("build"),
|
||||
Ident::from_str("build_delimited")],
|
||||
flatten(vec![body,
|
||||
lex(","),
|
||||
delimitiers].into_iter())));
|
||||
}
|
||||
Qtt::QIdent(t) => {
|
||||
pushes.push(TokenStream::from_tts(vec![t]));
|
||||
pushes.push(TokenStream::mk_empty());
|
||||
}
|
||||
_ => panic!("Unhandled case!"),
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
(bindings, unravel_concats(pushes))
|
||||
}
|
||||
|
||||
// ____________________________________________________________________________________________
|
||||
// Utilities
|
||||
|
||||
/// Unravels Bindings into a TokenStream of `let` declarations.
|
||||
fn unravel(bindings: Bindings) -> TokenStream {
|
||||
flatten(bindings.into_iter().map(|(a, b)| build_let(a, b)))
|
||||
impl Quote for Rc<Delimited> {
|
||||
fn quote(&self) -> TokenStream {
|
||||
quote!(::std::rc::Rc::new(::syntax::tokenstream::Delimited {
|
||||
open_span: ::syntax::ext::quote::rt::DUMMY_SP,
|
||||
close_span: ::syntax::ext::quote::rt::DUMMY_SP,
|
||||
delim: (quote self.delim),
|
||||
tts: (quote self.tts),
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks if the Ident is `unquote`.
|
||||
fn is_unquote(id: Ident) -> bool {
|
||||
let qq = Ident::from_str("unquote");
|
||||
id.name == qq.name // We disregard context; unquote is _reserved_
|
||||
impl<'a> Quote for &'a str {
|
||||
fn quote(&self) -> TokenStream {
|
||||
TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Str_(Symbol::intern(self)), None))
|
||||
.into()
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks if the Ident is `quote`.
|
||||
fn is_qquote(id: Ident) -> bool {
|
||||
let qq = Ident::from_str("qquote");
|
||||
id.name == qq.name // We disregard context; qquote is _reserved_
|
||||
impl Quote for Ident {
|
||||
fn quote(&self) -> TokenStream {
|
||||
// FIXME(jseyfried) quote hygiene
|
||||
quote!(::syntax::ast::Ident::from_str((quote &*self.name.as_str())))
|
||||
}
|
||||
}
|
||||
|
||||
mod int_build {
|
||||
use proc_macro_tokens::build::*;
|
||||
use proc_macro_tokens::parse::*;
|
||||
|
||||
use syntax::ast::{self, Ident};
|
||||
use syntax::codemap::{DUMMY_SP};
|
||||
use syntax::parse::token::{self, Token, Lit};
|
||||
use syntax::symbol::keywords;
|
||||
use syntax::tokenstream::{TokenTree, TokenStream};
|
||||
|
||||
// ____________________________________________________________________________________________
|
||||
// Emitters
|
||||
|
||||
pub fn emit_token(t: Token) -> TokenStream {
|
||||
concat(lex("TokenStream::from_tokens"),
|
||||
build_paren_delimited(build_vec(build_token_tt(t))))
|
||||
impl Quote for Symbol {
|
||||
fn quote(&self) -> TokenStream {
|
||||
quote!(::syntax::symbol::Symbol::intern((quote &*self.as_str())))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn emit_lit(l: Lit, n: Option<ast::Name>) -> TokenStream {
|
||||
let suf = match n {
|
||||
Some(n) => format!("Some(ast::Name({}))", n.as_u32()),
|
||||
None => "None".to_string(),
|
||||
};
|
||||
|
||||
let lit = match l {
|
||||
Lit::Byte(n) => format!("Lit::Byte(Symbol::intern(\"{}\"))", n.to_string()),
|
||||
Lit::Char(n) => format!("Lit::Char(Symbol::intern(\"{}\"))", n.to_string()),
|
||||
Lit::Float(n) => format!("Lit::Float(Symbol::intern(\"{}\"))", n.to_string()),
|
||||
Lit::Str_(n) => format!("Lit::Str_(Symbol::intern(\"{}\"))", n.to_string()),
|
||||
Lit::Integer(n) => format!("Lit::Integer(Symbol::intern(\"{}\"))", n.to_string()),
|
||||
Lit::ByteStr(n) => format!("Lit::ByteStr(Symbol::intern(\"{}\"))", n.to_string()),
|
||||
_ => panic!("Unsupported literal"),
|
||||
};
|
||||
|
||||
let res = format!("Token::Literal({},{})", lit, suf);
|
||||
debug!("{}", res);
|
||||
lex(&res)
|
||||
}
|
||||
|
||||
// ____________________________________________________________________________________________
|
||||
// Token Builders
|
||||
|
||||
pub fn build_binop_tok(bot: token::BinOpToken) -> TokenStream {
|
||||
match bot {
|
||||
token::BinOpToken::Plus => lex("Token::BinOp(BinOpToken::Plus)"),
|
||||
token::BinOpToken::Minus => lex("Token::BinOp(BinOpToken::Minus)"),
|
||||
token::BinOpToken::Star => lex("Token::BinOp(BinOpToken::Star)"),
|
||||
token::BinOpToken::Slash => lex("Token::BinOp(BinOpToken::Slash)"),
|
||||
token::BinOpToken::Percent => lex("Token::BinOp(BinOpToken::Percent)"),
|
||||
token::BinOpToken::Caret => lex("Token::BinOp(BinOpToken::Caret)"),
|
||||
token::BinOpToken::And => lex("Token::BinOp(BinOpToken::And)"),
|
||||
token::BinOpToken::Or => lex("Token::BinOp(BinOpToken::Or)"),
|
||||
token::BinOpToken::Shl => lex("Token::BinOp(BinOpToken::Shl)"),
|
||||
token::BinOpToken::Shr => lex("Token::BinOp(BinOpToken::Shr)"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_binopeq_tok(bot: token::BinOpToken) -> TokenStream {
|
||||
match bot {
|
||||
token::BinOpToken::Plus => lex("Token::BinOpEq(BinOpToken::Plus)"),
|
||||
token::BinOpToken::Minus => lex("Token::BinOpEq(BinOpToken::Minus)"),
|
||||
token::BinOpToken::Star => lex("Token::BinOpEq(BinOpToken::Star)"),
|
||||
token::BinOpToken::Slash => lex("Token::BinOpEq(BinOpToken::Slash)"),
|
||||
token::BinOpToken::Percent => lex("Token::BinOpEq(BinOpToken::Percent)"),
|
||||
token::BinOpToken::Caret => lex("Token::BinOpEq(BinOpToken::Caret)"),
|
||||
token::BinOpToken::And => lex("Token::BinOpEq(BinOpToken::And)"),
|
||||
token::BinOpToken::Or => lex("Token::BinOpEq(BinOpToken::Or)"),
|
||||
token::BinOpToken::Shl => lex("Token::BinOpEq(BinOpToken::Shl)"),
|
||||
token::BinOpToken::Shr => lex("Token::BinOpEq(BinOpToken::Shr)"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_delim_tok(dt: token::DelimToken) -> TokenStream {
|
||||
match dt {
|
||||
token::DelimToken::Paren => lex("DelimToken::Paren"),
|
||||
token::DelimToken::Bracket => lex("DelimToken::Bracket"),
|
||||
token::DelimToken::Brace => lex("DelimToken::Brace"),
|
||||
token::DelimToken::NoDelim => lex("DelimToken::NoDelim"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_token_tt(t: Token) -> TokenStream {
|
||||
match t {
|
||||
Token::Eq => lex("Token::Eq"),
|
||||
Token::Lt => lex("Token::Lt"),
|
||||
Token::Le => lex("Token::Le"),
|
||||
Token::EqEq => lex("Token::EqEq"),
|
||||
Token::Ne => lex("Token::Ne"),
|
||||
Token::Ge => lex("Token::Ge"),
|
||||
Token::Gt => lex("Token::Gt"),
|
||||
Token::AndAnd => lex("Token::AndAnd"),
|
||||
Token::OrOr => lex("Token::OrOr"),
|
||||
Token::Not => lex("Token::Not"),
|
||||
Token::Tilde => lex("Token::Tilde"),
|
||||
Token::BinOp(tok) => build_binop_tok(tok),
|
||||
Token::BinOpEq(tok) => build_binopeq_tok(tok),
|
||||
Token::At => lex("Token::At"),
|
||||
Token::Dot => lex("Token::Dot"),
|
||||
Token::DotDot => lex("Token::DotDot"),
|
||||
Token::DotDotDot => lex("Token::DotDotDot"),
|
||||
Token::Comma => lex("Token::Comma"),
|
||||
Token::Semi => lex("Token::Semi"),
|
||||
Token::Colon => lex("Token::Colon"),
|
||||
Token::ModSep => lex("Token::ModSep"),
|
||||
Token::RArrow => lex("Token::RArrow"),
|
||||
Token::LArrow => lex("Token::LArrow"),
|
||||
Token::FatArrow => lex("Token::FatArrow"),
|
||||
Token::Pound => lex("Token::Pound"),
|
||||
Token::Dollar => lex("Token::Dollar"),
|
||||
Token::Question => lex("Token::Question"),
|
||||
Token::OpenDelim(dt) => {
|
||||
match dt {
|
||||
token::DelimToken::Paren => lex("Token::OpenDelim(DelimToken::Paren)"),
|
||||
token::DelimToken::Bracket => lex("Token::OpenDelim(DelimToken::Bracket)"),
|
||||
token::DelimToken::Brace => lex("Token::OpenDelim(DelimToken::Brace)"),
|
||||
token::DelimToken::NoDelim => lex("DelimToken::NoDelim"),
|
||||
impl Quote for Token {
|
||||
fn quote(&self) -> TokenStream {
|
||||
macro_rules! gen_match {
|
||||
($($i:ident),*; $($t:tt)*) => {
|
||||
match *self {
|
||||
$( Token::$i => quote!(::syntax::parse::token::$i), )*
|
||||
$( $t )*
|
||||
}
|
||||
}
|
||||
Token::CloseDelim(dt) => {
|
||||
match dt {
|
||||
token::DelimToken::Paren => lex("Token::CloseDelim(DelimToken::Paren)"),
|
||||
token::DelimToken::Bracket => lex("Token::CloseDelim(DelimToken::Bracket)"),
|
||||
token::DelimToken::Brace => lex("Token::CloseDelim(DelimToken::Brace)"),
|
||||
token::DelimToken::NoDelim => lex("DelimToken::NoDelim"),
|
||||
}
|
||||
}
|
||||
Token::Underscore => lex("_"),
|
||||
Token::Literal(lit, sfx) => emit_lit(lit, sfx),
|
||||
// fix ident expansion information... somehow
|
||||
Token::Ident(ident) =>
|
||||
lex(&format!("Token::Ident(Ident::from_str(\"{}\"))", ident.name)),
|
||||
Token::Lifetime(ident) =>
|
||||
lex(&format!("Token::Ident(Ident::from_str(\"{}\"))", ident.name)),
|
||||
}
|
||||
|
||||
gen_match! {
|
||||
Eq, Lt, Le, EqEq, Ne, Ge, Gt, AndAnd, OrOr, Not, Tilde, At, Dot, DotDot, DotDotDot,
|
||||
Comma, Semi, Colon, ModSep, RArrow, LArrow, FatArrow, Pound, Dollar, Question,
|
||||
Underscore;
|
||||
|
||||
Token::OpenDelim(delim) => quote!(::syntax::parse::token::OpenDelim((quote delim))),
|
||||
Token::CloseDelim(delim) => quote!(::syntax::parse::token::CloseDelim((quote delim))),
|
||||
Token::BinOp(tok) => quote!(::syntax::parse::token::BinOp((quote tok))),
|
||||
Token::BinOpEq(tok) => quote!(::syntax::parse::token::BinOpEq((quote tok))),
|
||||
Token::Ident(ident) => quote!(::syntax::parse::token::Ident((quote ident))),
|
||||
Token::Lifetime(ident) => quote!(::syntax::parse::token::Lifetime((quote ident))),
|
||||
Token::Literal(lit, sfx) => quote! {
|
||||
::syntax::parse::token::Literal((quote lit), (quote sfx))
|
||||
},
|
||||
_ => panic!("Unhandled case!"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ____________________________________________________________________________________________
|
||||
// Conversion operators
|
||||
|
||||
pub fn as_tt(t: Token) -> TokenTree {
|
||||
// FIXME do something nicer with the spans
|
||||
TokenTree::Token(DUMMY_SP, t)
|
||||
}
|
||||
|
||||
// ____________________________________________________________________________________________
|
||||
// Build Procedures
|
||||
|
||||
/// Takes `input` and returns `vec![input]`.
|
||||
pub fn build_vec(ts: TokenStream) -> TokenStream {
|
||||
build_mac_call(Ident::from_str("vec"), ts)
|
||||
// tts.clone().to_owned()
|
||||
}
|
||||
|
||||
/// Takes `ident` and `rhs` and produces `let ident = rhs;`.
|
||||
pub fn build_let(id: Ident, tts: TokenStream) -> TokenStream {
|
||||
concat(from_tokens(vec![keyword_to_token_ident(keywords::Let),
|
||||
Token::Ident(id),
|
||||
Token::Eq]),
|
||||
concat(tts, from_tokens(vec![Token::Semi])))
|
||||
}
|
||||
|
||||
/// Takes `ident ...`, and `args ...` and produces `ident::...(args ...)`.
|
||||
pub fn build_mod_call(ids: Vec<Ident>, args: TokenStream) -> TokenStream {
|
||||
let call = from_tokens(intersperse(ids.into_iter().map(|id| Token::Ident(id)).collect(),
|
||||
Token::ModSep));
|
||||
concat(call, build_paren_delimited(args))
|
||||
}
|
||||
|
||||
/// Takes `ident` and `args ...` and produces `ident(args ...)`.
|
||||
pub fn build_fn_call(name: Ident, args: TokenStream) -> TokenStream {
|
||||
concat(from_tokens(vec![Token::Ident(name)]), build_paren_delimited(args))
|
||||
}
|
||||
|
||||
/// Takes `ident` and `args ...` and produces `ident!(args ...)`.
|
||||
pub fn build_mac_call(name: Ident, args: TokenStream) -> TokenStream {
|
||||
concat(from_tokens(vec![Token::Ident(name), Token::Not]),
|
||||
build_paren_delimited(args))
|
||||
}
|
||||
|
||||
// ____________________________________________________________________________________________
|
||||
// Utilities
|
||||
|
||||
/// A wrapper around `TokenStream::from_tokens` to avoid extra namespace specification and
|
||||
/// provide it as a generic operator.
|
||||
pub fn from_tokens(tokens: Vec<Token>) -> TokenStream {
|
||||
TokenStream::from_tokens(tokens)
|
||||
}
|
||||
|
||||
pub fn intersperse<T>(vs: Vec<T>, t: T) -> Vec<T>
|
||||
where T: Clone
|
||||
{
|
||||
if vs.len() < 2 {
|
||||
return vs;
|
||||
impl Quote for token::BinOpToken {
|
||||
fn quote(&self) -> TokenStream {
|
||||
macro_rules! gen_match {
|
||||
($($i:ident),*) => {
|
||||
match *self {
|
||||
$( token::BinOpToken::$i => quote!(::syntax::parse::token::BinOpToken::$i), )*
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut output = vec![vs.get(0).unwrap().to_owned()];
|
||||
|
||||
for v in vs.into_iter().skip(1) {
|
||||
output.push(t.clone());
|
||||
output.push(v);
|
||||
}
|
||||
output
|
||||
gen_match!(Plus, Minus, Star, Slash, Percent, Caret, And, Or, Shl, Shr)
|
||||
}
|
||||
}
|
||||
|
||||
impl Quote for Lit {
|
||||
fn quote(&self) -> TokenStream {
|
||||
macro_rules! gen_match {
|
||||
($($i:ident),*) => {
|
||||
match *self {
|
||||
$( Lit::$i(lit) => quote!(::syntax::parse::token::Lit::$i((quote lit))), )*
|
||||
_ => panic!("Unsupported literal"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
gen_match!(Byte, Char, Float, Str_, Integer, ByteStr)
|
||||
}
|
||||
}
|
||||
|
||||
impl Quote for token::DelimToken {
|
||||
fn quote(&self) -> TokenStream {
|
||||
macro_rules! gen_match {
|
||||
($($i:ident),*) => {
|
||||
match *self {
|
||||
$(token::DelimToken::$i => { quote!(::syntax::parse::token::DelimToken::$i) })*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
gen_match!(Paren, Bracket, Brace, NoDelim)
|
||||
}
|
||||
}
|
||||
|
@ -18,20 +18,7 @@ use std::rc::Rc;
|
||||
/// A wrapper around `TokenStream::concat` to avoid extra namespace specification and
|
||||
/// provide TokenStream concatenation as a generic operator.
|
||||
pub fn concat(ts1: TokenStream, ts2: TokenStream) -> TokenStream {
|
||||
TokenStream::concat(ts1, ts2)
|
||||
}
|
||||
|
||||
/// Flatten a sequence of TokenStreams into a single TokenStream.
|
||||
pub fn flatten<T: Iterator<Item=TokenStream>>(mut iter: T) -> TokenStream {
|
||||
match iter.next() {
|
||||
Some(mut ts) => {
|
||||
for next in iter {
|
||||
ts = TokenStream::concat(ts, next);
|
||||
}
|
||||
ts
|
||||
}
|
||||
None => TokenStream::mk_empty()
|
||||
}
|
||||
TokenStream::concat([ts1, ts2].iter().cloned())
|
||||
}
|
||||
|
||||
/// Checks if two identifiers have the same name, disregarding context. This allows us to
|
||||
@ -69,14 +56,12 @@ pub fn keyword_to_token_ident(kw: keywords::Keyword) -> Token {
|
||||
/// Generically takes a `ts` and delimiter and returns `ts` delimited by the specified
|
||||
/// delimiter.
|
||||
pub fn build_delimited(ts: TokenStream, delim: token::DelimToken) -> TokenStream {
|
||||
let tts = ts.to_tts();
|
||||
TokenStream::from_tts(vec![TokenTree::Delimited(DUMMY_SP,
|
||||
Rc::new(tokenstream::Delimited {
|
||||
delim: delim,
|
||||
open_span: DUMMY_SP,
|
||||
tts: tts,
|
||||
close_span: DUMMY_SP,
|
||||
}))])
|
||||
TokenTree::Delimited(DUMMY_SP, Rc::new(tokenstream::Delimited {
|
||||
delim: delim,
|
||||
open_span: DUMMY_SP,
|
||||
tts: ts.trees().cloned().collect(),
|
||||
close_span: DUMMY_SP,
|
||||
})).into()
|
||||
}
|
||||
|
||||
/// Takes `ts` and returns `[ts]`.
|
||||
@ -96,5 +81,5 @@ pub fn build_paren_delimited(ts: TokenStream) -> TokenStream {
|
||||
|
||||
/// Constructs `()`.
|
||||
pub fn build_empty_args() -> TokenStream {
|
||||
build_paren_delimited(TokenStream::mk_empty())
|
||||
build_paren_delimited(TokenStream::empty())
|
||||
}
|
||||
|
@ -16,9 +16,8 @@ use syntax::tokenstream::TokenStream;
|
||||
/// Map a string to tts, using a made-up filename. For example, `lex("15")` will return a
|
||||
/// TokenStream containing the literal 15.
|
||||
pub fn lex(source_str: &str) -> TokenStream {
|
||||
let ps = ParseSess::new();
|
||||
TokenStream::from_tts(filemap_to_tts(&ps,
|
||||
ps.codemap().new_filemap("<procmacro_lex>".to_string(),
|
||||
None,
|
||||
source_str.to_owned())))
|
||||
let sess = ParseSess::new();
|
||||
let filemap =
|
||||
sess.codemap().new_filemap("<procmacro_lex>".to_string(), None, source_str.to_owned());
|
||||
filemap_to_tts(&sess, filemap).into_iter().collect()
|
||||
}
|
||||
|
@ -73,23 +73,6 @@ pub fn dummy_spanned<T>(t: T) -> Spanned<T> {
|
||||
respan(DUMMY_SP, t)
|
||||
}
|
||||
|
||||
/// Build a span that covers the two provided spans.
|
||||
pub fn combine_spans(sp1: Span, sp2: Span) -> Span {
|
||||
if sp1 == DUMMY_SP && sp2 == DUMMY_SP {
|
||||
DUMMY_SP
|
||||
} else if sp1 == DUMMY_SP {
|
||||
sp2
|
||||
} else if sp2 == DUMMY_SP {
|
||||
sp1
|
||||
} else {
|
||||
Span {
|
||||
lo: if sp1.lo < sp2.lo { sp1.lo } else { sp2.lo },
|
||||
hi: if sp1.hi > sp2.hi { sp1.hi } else { sp2.hi },
|
||||
expn_id: if sp1.expn_id == sp2.expn_id { sp1.expn_id } else { NO_EXPANSION },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Hash, Debug)]
|
||||
pub struct NameAndSpan {
|
||||
/// The format with which the macro was invoked.
|
||||
|
@ -364,10 +364,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||
kind.expect_from_annotatables(items)
|
||||
}
|
||||
SyntaxExtension::AttrProcMacro(ref mac) => {
|
||||
let attr_toks = TokenStream::from_tts(tts_for_attr_args(&attr,
|
||||
&self.cx.parse_sess));
|
||||
|
||||
let item_toks = TokenStream::from_tts(tts_for_item(&item, &self.cx.parse_sess));
|
||||
let attr_toks = tts_for_attr_args(&attr, &self.cx.parse_sess).into_iter().collect();
|
||||
let item_toks = tts_for_item(&item, &self.cx.parse_sess).into_iter().collect();
|
||||
|
||||
let tok_result = mac.expand(self.cx, attr.span, attr_toks, item_toks);
|
||||
self.parse_expansion(tok_result, kind, name, attr.span)
|
||||
@ -467,7 +465,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||
},
|
||||
});
|
||||
|
||||
let toks = TokenStream::from_tts(marked_tts);
|
||||
let toks = marked_tts.into_iter().collect();
|
||||
let tok_result = expandfun.expand(self.cx, span, toks);
|
||||
Some(self.parse_expansion(tok_result, kind, extname, span))
|
||||
}
|
||||
@ -490,7 +488,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||
|
||||
fn parse_expansion(&mut self, toks: TokenStream, kind: ExpansionKind, name: Name, span: Span)
|
||||
-> Expansion {
|
||||
let mut parser = self.cx.new_parser_from_tts(&toks.to_tts());
|
||||
let mut parser = self.cx.new_parser_from_tts(&toks.trees().cloned().collect::<Vec<_>>());
|
||||
let expansion = match parser.parse_expansion(kind, false) {
|
||||
Ok(expansion) => expansion,
|
||||
Err(mut err) => {
|
||||
|
@ -1,72 +0,0 @@
|
||||
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! This is a shim file to ease the transition to the final procedural macro interface for
|
||||
//! Macros 2.0. It currently exposes the `libsyntax` operations that the quasiquoter's
|
||||
//! output needs to compile correctly, along with the following operators:
|
||||
//!
|
||||
//! - `build_block_emitter`, which produces a `block` output macro result from the
|
||||
//! provided TokenStream.
|
||||
|
||||
use ast;
|
||||
use codemap::Span;
|
||||
use parse::parser::Parser;
|
||||
use ptr::P;
|
||||
use tokenstream::TokenStream;
|
||||
use ext::base::*;
|
||||
|
||||
/// Take a `ExtCtxt`, `Span`, and `TokenStream`, and produce a Macro Result that parses
|
||||
/// the TokenStream as a block and returns it as an `Expr`.
|
||||
pub fn build_block_emitter<'cx>(cx: &'cx mut ExtCtxt,
|
||||
sp: Span,
|
||||
output: TokenStream)
|
||||
-> Box<MacResult + 'cx> {
|
||||
let parser = cx.new_parser_from_tts(&output.to_tts());
|
||||
|
||||
struct Result<'a> {
|
||||
prsr: Parser<'a>,
|
||||
span: Span,
|
||||
}; //FIXME is this the right lifetime
|
||||
|
||||
impl<'a> Result<'a> {
|
||||
fn block(&mut self) -> P<ast::Block> {
|
||||
let res = self.prsr.parse_block().unwrap();
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> MacResult for Result<'a> {
|
||||
fn make_expr(self: Box<Self>) -> Option<P<ast::Expr>> {
|
||||
let mut me = *self;
|
||||
Some(P(ast::Expr {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
node: ast::ExprKind::Block(me.block()),
|
||||
span: me.span,
|
||||
attrs: ast::ThinVec::new(),
|
||||
}))
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
Box::new(Result {
|
||||
prsr: parser,
|
||||
span: sp,
|
||||
})
|
||||
}
|
||||
|
||||
pub mod prelude {
|
||||
pub use super::build_block_emitter;
|
||||
pub use ast::Ident;
|
||||
pub use codemap::{DUMMY_SP, Span};
|
||||
pub use ext::base::{ExtCtxt, MacResult};
|
||||
pub use parse::token::{self, Token, DelimToken};
|
||||
pub use symbol::keywords;
|
||||
pub use tokenstream::{TokenTree, TokenStream};
|
||||
}
|
@ -89,6 +89,9 @@ pub mod util {
|
||||
|
||||
mod thin_vec;
|
||||
pub use self::thin_vec::ThinVec;
|
||||
|
||||
mod rc_slice;
|
||||
pub use self::rc_slice::RcSlice;
|
||||
}
|
||||
|
||||
pub mod json;
|
||||
@ -129,7 +132,6 @@ pub mod ext {
|
||||
pub mod expand;
|
||||
pub mod placeholders;
|
||||
pub mod hygiene;
|
||||
pub mod proc_macro_shim;
|
||||
pub mod quote;
|
||||
pub mod source_util;
|
||||
|
||||
|
@ -196,7 +196,7 @@ pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, tts: Vec<tokenstream::TokenT
|
||||
}
|
||||
|
||||
pub fn new_parser_from_ts<'a>(sess: &'a ParseSess, ts: tokenstream::TokenStream) -> Parser<'a> {
|
||||
tts_to_parser(sess, ts.to_tts())
|
||||
tts_to_parser(sess, ts.trees().cloned().collect())
|
||||
}
|
||||
|
||||
|
||||
|
@ -25,19 +25,19 @@
|
||||
//! ownership of the original.
|
||||
|
||||
use ast::{self, AttrStyle, LitKind};
|
||||
use syntax_pos::{Span, DUMMY_SP, NO_EXPANSION};
|
||||
use codemap::{Spanned, combine_spans};
|
||||
use syntax_pos::Span;
|
||||
use codemap::Spanned;
|
||||
use ext::base;
|
||||
use ext::tt::macro_parser;
|
||||
use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
|
||||
use parse::{self, Directory};
|
||||
use parse::token::{self, Token, Lit, Nonterminal};
|
||||
use print::pprust;
|
||||
use serialize::{Decoder, Decodable, Encoder, Encodable};
|
||||
use symbol::Symbol;
|
||||
use util::RcSlice;
|
||||
|
||||
use std::fmt;
|
||||
use std::iter::*;
|
||||
use std::ops::{self, Index};
|
||||
use std::{fmt, iter};
|
||||
use std::rc::Rc;
|
||||
|
||||
/// A delimited sequence of token trees
|
||||
@ -323,555 +323,158 @@ impl TokenTree {
|
||||
}
|
||||
}
|
||||
|
||||
/// #Token Streams
|
||||
/// # Token Streams
|
||||
///
|
||||
/// TokenStreams are a syntactic abstraction over TokenTrees. The goal is for procedural
|
||||
/// macros to work over TokenStreams instead of arbitrary syntax. For now, however, we
|
||||
/// are going to cut a few corners (i.e., use some of the AST structure) when we need to
|
||||
/// for backwards compatibility.
|
||||
|
||||
/// TokenStreams are collections of TokenTrees that represent a syntactic structure. The
|
||||
/// struct itself shouldn't be directly manipulated; the internal structure is not stable,
|
||||
/// and may be changed at any time in the future. The operators will not, however (except
|
||||
/// for signatures, later on).
|
||||
#[derive(Clone, Eq, Hash, RustcEncodable, RustcDecodable)]
|
||||
/// A `TokenStream` is an abstract sequence of tokens, organized into `TokenTree`s.
|
||||
/// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s
|
||||
/// instead of a representation of the abstract syntax tree.
|
||||
/// Today's `TokenTree`s can still contain AST via `Token::Interpolated` for back-compat.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct TokenStream {
|
||||
ts: InternalTS,
|
||||
kind: TokenStreamKind,
|
||||
}
|
||||
|
||||
// This indicates the maximum size for a leaf in the concatenation algorithm.
|
||||
// If two leafs will be collectively smaller than this, they will be merged.
|
||||
// If a leaf is larger than this, it will be concatenated at the top.
|
||||
const LEAF_SIZE : usize = 32;
|
||||
|
||||
// NB If Leaf access proves to be slow, inroducing a secondary Leaf without the bounds
|
||||
// for unsliced Leafs may lead to some performance improvemenet.
|
||||
#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
|
||||
pub enum InternalTS {
|
||||
Empty(Span),
|
||||
Leaf {
|
||||
tts: Rc<Vec<TokenTree>>,
|
||||
offset: usize,
|
||||
len: usize,
|
||||
sp: Span,
|
||||
},
|
||||
Node {
|
||||
left: Rc<InternalTS>,
|
||||
right: Rc<InternalTS>,
|
||||
len: usize,
|
||||
sp: Span,
|
||||
},
|
||||
#[derive(Clone, Debug)]
|
||||
enum TokenStreamKind {
|
||||
Empty,
|
||||
Tree(TokenTree),
|
||||
Stream(RcSlice<TokenStream>),
|
||||
}
|
||||
|
||||
impl fmt::Debug for TokenStream {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
self.ts.fmt(f)
|
||||
impl From<TokenTree> for TokenStream {
|
||||
fn from(tt: TokenTree) -> TokenStream {
|
||||
TokenStream { kind: TokenStreamKind::Tree(tt) }
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for InternalTS {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match *self {
|
||||
InternalTS::Empty(..) => Ok(()),
|
||||
InternalTS::Leaf { ref tts, offset, len, .. } => {
|
||||
for t in tts.iter().skip(offset).take(len) {
|
||||
try!(write!(f, "{:?}", t));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
InternalTS::Node { ref left, ref right, .. } => {
|
||||
try!(left.fmt(f));
|
||||
right.fmt(f)
|
||||
}
|
||||
}
|
||||
impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
|
||||
TokenStream::concat(iter.into_iter().map(Into::into))
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks if two TokenStreams are equivalent (including spans). For unspanned
|
||||
/// equality, see `eq_unspanned`.
|
||||
impl Eq for TokenStream {}
|
||||
|
||||
impl PartialEq<TokenStream> for TokenStream {
|
||||
fn eq(&self, other: &TokenStream) -> bool {
|
||||
self.iter().eq(other.iter())
|
||||
self.trees().eq(other.trees())
|
||||
}
|
||||
}
|
||||
|
||||
// NB this will disregard gaps. if we have [a|{2,5} , b|{11,13}], the resultant span
|
||||
// will be at {2,13}. Without finer-grained span structures, however, this seems to be
|
||||
// our only recourse.
|
||||
// FIXME Do something smarter to compute the expansion id.
|
||||
fn covering_span(trees: &[TokenTree]) -> Span {
|
||||
// disregard any dummy spans we have
|
||||
let trees = trees.iter().filter(|t| t.span() != DUMMY_SP).collect::<Vec<&TokenTree>>();
|
||||
|
||||
// if we're out of spans, stop
|
||||
if trees.len() < 1 {
|
||||
return DUMMY_SP;
|
||||
}
|
||||
|
||||
// set up the initial values
|
||||
let fst_span = trees[0].span();
|
||||
|
||||
let mut lo_span = fst_span.lo;
|
||||
let mut hi_span = fst_span.hi;
|
||||
let mut expn_id = fst_span.expn_id;
|
||||
|
||||
// compute the spans iteratively
|
||||
for t in trees.iter().skip(1) {
|
||||
let sp = t.span();
|
||||
if sp.lo < lo_span {
|
||||
lo_span = sp.lo;
|
||||
}
|
||||
if hi_span < sp.hi {
|
||||
hi_span = sp.hi;
|
||||
}
|
||||
if expn_id != sp.expn_id {
|
||||
expn_id = NO_EXPANSION;
|
||||
}
|
||||
}
|
||||
|
||||
Span {
|
||||
lo: lo_span,
|
||||
hi: hi_span,
|
||||
expn_id: expn_id,
|
||||
}
|
||||
}
|
||||
|
||||
impl InternalTS {
|
||||
fn len(&self) -> usize {
|
||||
match *self {
|
||||
InternalTS::Empty(..) => 0,
|
||||
InternalTS::Leaf { len, .. } => len,
|
||||
InternalTS::Node { len, .. } => len,
|
||||
}
|
||||
}
|
||||
|
||||
fn span(&self) -> Span {
|
||||
match *self {
|
||||
InternalTS::Empty(sp) |
|
||||
InternalTS::Leaf { sp, .. } |
|
||||
InternalTS::Node { sp, .. } => sp,
|
||||
}
|
||||
}
|
||||
|
||||
fn slice(&self, range: ops::Range<usize>) -> TokenStream {
|
||||
let from = range.start;
|
||||
let to = range.end;
|
||||
if from == to {
|
||||
return TokenStream::mk_empty();
|
||||
}
|
||||
if from > to {
|
||||
panic!("Invalid range: {} to {}", from, to);
|
||||
}
|
||||
if from == 0 && to == self.len() {
|
||||
return TokenStream { ts: self.clone() }; /* should be cheap */
|
||||
}
|
||||
match *self {
|
||||
InternalTS::Empty(..) => panic!("Invalid index"),
|
||||
InternalTS::Leaf { ref tts, offset, .. } => {
|
||||
let offset = offset + from;
|
||||
let len = to - from;
|
||||
TokenStream::mk_sub_leaf(tts.clone(),
|
||||
offset,
|
||||
len,
|
||||
covering_span(&tts[offset..offset + len]))
|
||||
}
|
||||
InternalTS::Node { ref left, ref right, .. } => {
|
||||
let left_len = left.len();
|
||||
if to <= left_len {
|
||||
left.slice(range)
|
||||
} else if from >= left_len {
|
||||
right.slice(from - left_len..to - left_len)
|
||||
} else {
|
||||
TokenStream::concat(left.slice(from..left_len), right.slice(0..to - left_len))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn to_vec(&self) -> Vec<&TokenTree> {
|
||||
let mut res = Vec::with_capacity(self.len());
|
||||
fn traverse_and_append<'a>(res: &mut Vec<&'a TokenTree>, ts: &'a InternalTS) {
|
||||
match *ts {
|
||||
InternalTS::Empty(..) => {},
|
||||
InternalTS::Leaf { ref tts, offset, len, .. } => {
|
||||
let mut to_app = tts[offset..offset + len].iter().collect();
|
||||
res.append(&mut to_app);
|
||||
}
|
||||
InternalTS::Node { ref left, ref right, .. } => {
|
||||
traverse_and_append(res, left);
|
||||
traverse_and_append(res, right);
|
||||
}
|
||||
}
|
||||
}
|
||||
traverse_and_append(&mut res, self);
|
||||
res
|
||||
}
|
||||
|
||||
fn to_tts(&self) -> Vec<TokenTree> {
|
||||
self.to_vec().into_iter().cloned().collect::<Vec<TokenTree>>()
|
||||
}
|
||||
|
||||
// Returns an internal node's children.
|
||||
fn children(&self) -> Option<(Rc<InternalTS>, Rc<InternalTS>)> {
|
||||
match *self {
|
||||
InternalTS::Node { ref left, ref right, .. } => Some((left.clone(), right.clone())),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// TokenStream operators include basic destructuring, boolean operations, `maybe_...`
|
||||
/// operations, and `maybe_..._prefix` operations. Boolean operations are straightforward,
|
||||
/// indicating information about the structure of the stream. The `maybe_...` operations
|
||||
/// return `Some<...>` if the tokenstream contains the appropriate item.
|
||||
///
|
||||
/// Similarly, the `maybe_..._prefix` operations potentially return a
|
||||
/// partially-destructured stream as a pair where the first element is the expected item
|
||||
/// and the second is the remainder of the stream. As anb example,
|
||||
///
|
||||
/// `maybe_path_prefix("a::b::c(a,b,c).foo()") -> (a::b::c, "(a,b,c).foo()")`
|
||||
impl TokenStream {
|
||||
// Construct an empty node with a dummy span.
|
||||
pub fn mk_empty() -> TokenStream {
|
||||
TokenStream { ts: InternalTS::Empty(DUMMY_SP) }
|
||||
pub fn empty() -> TokenStream {
|
||||
TokenStream { kind: TokenStreamKind::Empty }
|
||||
}
|
||||
|
||||
// Construct an empty node with the provided span.
|
||||
fn mk_spanned_empty(sp: Span) -> TokenStream {
|
||||
TokenStream { ts: InternalTS::Empty(sp) }
|
||||
}
|
||||
|
||||
// Construct a leaf node with a 0 offset and length equivalent to the input.
|
||||
fn mk_leaf(tts: Rc<Vec<TokenTree>>, sp: Span) -> TokenStream {
|
||||
let len = tts.len();
|
||||
TokenStream {
|
||||
ts: InternalTS::Leaf {
|
||||
tts: tts,
|
||||
offset: 0,
|
||||
len: len,
|
||||
sp: sp,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Construct a leaf node with the provided values.
|
||||
fn mk_sub_leaf(tts: Rc<Vec<TokenTree>>, offset: usize, len: usize, sp: Span) -> TokenStream {
|
||||
TokenStream {
|
||||
ts: InternalTS::Leaf {
|
||||
tts: tts,
|
||||
offset: offset,
|
||||
len: len,
|
||||
sp: sp,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Construct an internal node with the provided values.
|
||||
fn mk_int_node(left: Rc<InternalTS>,
|
||||
right: Rc<InternalTS>,
|
||||
len: usize,
|
||||
sp: Span)
|
||||
-> TokenStream {
|
||||
TokenStream {
|
||||
ts: InternalTS::Node {
|
||||
left: left,
|
||||
right: right,
|
||||
len: len,
|
||||
sp: sp,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a vector of `TokenTree`s into a `TokenStream`.
|
||||
pub fn from_tts(trees: Vec<TokenTree>) -> TokenStream {
|
||||
let span = covering_span(&trees[..]);
|
||||
TokenStream::mk_leaf(Rc::new(trees), span)
|
||||
}
|
||||
|
||||
/// Convert a vector of Tokens into a TokenStream.
|
||||
pub fn from_tokens(tokens: Vec<Token>) -> TokenStream {
|
||||
// FIXME do something nicer with the spans
|
||||
TokenStream::from_tts(tokens.into_iter().map(|t| TokenTree::Token(DUMMY_SP, t)).collect())
|
||||
}
|
||||
|
||||
/// Manually change a TokenStream's span.
|
||||
pub fn respan(self, span: Span) -> TokenStream {
|
||||
match self.ts {
|
||||
InternalTS::Empty(..) => TokenStream::mk_spanned_empty(span),
|
||||
InternalTS::Leaf { tts, offset, len, .. } => {
|
||||
TokenStream::mk_sub_leaf(tts, offset, len, span)
|
||||
}
|
||||
InternalTS::Node { left, right, len, .. } => {
|
||||
TokenStream::mk_int_node(left, right, len, span)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Concatenates two TokenStreams into a new TokenStream.
|
||||
pub fn concat(left: TokenStream, right: TokenStream) -> TokenStream {
|
||||
// This internal procedure performs 'aggressive compacting' during concatenation as
|
||||
// follows:
|
||||
// - If the nodes' combined total total length is less than 32, we copy both of
|
||||
// them into a new vector and build a new leaf node.
|
||||
// - If one node is an internal node and the other is a 'small' leaf (length<32),
|
||||
// we recur down the internal node on the appropriate side.
|
||||
// - Otherwise, we construct a new internal node that points to them as left and
|
||||
// right.
|
||||
fn concat_internal(left: Rc<InternalTS>, right: Rc<InternalTS>) -> TokenStream {
|
||||
let llen = left.len();
|
||||
let rlen = right.len();
|
||||
let len = llen + rlen;
|
||||
let span = combine_spans(left.span(), right.span());
|
||||
if len <= LEAF_SIZE {
|
||||
let mut new_vec = left.to_tts();
|
||||
let mut rvec = right.to_tts();
|
||||
new_vec.append(&mut rvec);
|
||||
return TokenStream::mk_leaf(Rc::new(new_vec), span);
|
||||
}
|
||||
|
||||
match (left.children(), right.children()) {
|
||||
(Some((lleft, lright)), None) => {
|
||||
if rlen <= LEAF_SIZE {
|
||||
let new_right = concat_internal(lright, right);
|
||||
TokenStream::mk_int_node(lleft, Rc::new(new_right.ts), len, span)
|
||||
} else {
|
||||
TokenStream::mk_int_node(left, right, len, span)
|
||||
}
|
||||
}
|
||||
(None, Some((rleft, rright))) => {
|
||||
if rlen <= LEAF_SIZE {
|
||||
let new_left = concat_internal(left, rleft);
|
||||
TokenStream::mk_int_node(Rc::new(new_left.ts), rright, len, span)
|
||||
} else {
|
||||
TokenStream::mk_int_node(left, right, len, span)
|
||||
}
|
||||
}
|
||||
(_, _) => TokenStream::mk_int_node(left, right, len, span),
|
||||
}
|
||||
}
|
||||
|
||||
if left.is_empty() {
|
||||
right
|
||||
} else if right.is_empty() {
|
||||
left
|
||||
} else {
|
||||
concat_internal(Rc::new(left.ts), Rc::new(right.ts))
|
||||
}
|
||||
}
|
||||
|
||||
/// Indicate if the TokenStream is empty.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
|
||||
/// Return a TokenStream's length.
|
||||
pub fn len(&self) -> usize {
|
||||
self.ts.len()
|
||||
}
|
||||
|
||||
/// Convert a TokenStream into a vector of borrowed TokenTrees.
|
||||
pub fn to_vec(&self) -> Vec<&TokenTree> {
|
||||
self.ts.to_vec()
|
||||
}
|
||||
|
||||
/// Convert a TokenStream into a vector of TokenTrees (by cloning the TokenTrees).
|
||||
/// (This operation is an O(n) deep copy of the underlying structure.)
|
||||
pub fn to_tts(&self) -> Vec<TokenTree> {
|
||||
self.ts.to_tts()
|
||||
}
|
||||
|
||||
/// Return the TokenStream's span.
|
||||
pub fn span(&self) -> Span {
|
||||
self.ts.span()
|
||||
}
|
||||
|
||||
/// Returns an iterator over a TokenStream (as a sequence of TokenTrees).
|
||||
pub fn iter<'a>(&self) -> Iter {
|
||||
Iter { vs: self, idx: 0 }
|
||||
}
|
||||
|
||||
/// Splits a TokenStream based on the provided `&TokenTree -> bool` predicate.
|
||||
pub fn split<P>(&self, pred: P) -> Split<P>
|
||||
where P: FnMut(&TokenTree) -> bool
|
||||
{
|
||||
Split {
|
||||
vs: self,
|
||||
pred: pred,
|
||||
finished: false,
|
||||
idx: 0,
|
||||
match self.kind {
|
||||
TokenStreamKind::Empty => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Produce a slice of the input TokenStream from the `from` index, inclusive, to the
|
||||
/// `to` index, non-inclusive.
|
||||
pub fn slice(&self, range: ops::Range<usize>) -> TokenStream {
|
||||
self.ts.slice(range)
|
||||
pub fn concat<I: IntoIterator<Item = TokenStream>>(streams: I) -> TokenStream {
|
||||
let mut streams = streams.into_iter().filter(|stream| !stream.is_empty());
|
||||
let first_stream = match streams.next() {
|
||||
Some(stream) => stream,
|
||||
None => return TokenStream::empty(),
|
||||
};
|
||||
let second_stream = match streams.next() {
|
||||
Some(stream) => stream,
|
||||
None => return first_stream,
|
||||
};
|
||||
let mut vec = vec![first_stream, second_stream];
|
||||
vec.extend(streams);
|
||||
TokenStream { kind: TokenStreamKind::Stream(RcSlice::new(vec)) }
|
||||
}
|
||||
|
||||
/// Slice starting at the provided index, inclusive.
|
||||
pub fn slice_from(&self, from: ops::RangeFrom<usize>) -> TokenStream {
|
||||
self.slice(from.start..self.len())
|
||||
}
|
||||
|
||||
/// Slice up to the provided index, non-inclusive.
|
||||
pub fn slice_to(&self, to: ops::RangeTo<usize>) -> TokenStream {
|
||||
self.slice(0..to.end)
|
||||
}
|
||||
|
||||
/// Indicates where the stream is a single, delimited expression (e.g., `(a,b,c)` or
|
||||
/// `{a,b,c}`).
|
||||
pub fn is_delimited(&self) -> bool {
|
||||
self.maybe_delimited().is_some()
|
||||
}
|
||||
|
||||
/// Returns the inside of the delimited term as a new TokenStream.
|
||||
pub fn maybe_delimited(&self) -> Option<TokenStream> {
|
||||
if !(self.len() == 1) {
|
||||
return None;
|
||||
}
|
||||
|
||||
// FIXME It would be nice to change Delimited to move the Rc around the TokenTree
|
||||
// vector directly in order to avoid the clone here.
|
||||
match self[0] {
|
||||
TokenTree::Delimited(_, ref rc) => Some(TokenStream::from_tts(rc.tts.clone())),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Indicates if the stream is exactly one identifier.
|
||||
pub fn is_ident(&self) -> bool {
|
||||
self.maybe_ident().is_some()
|
||||
}
|
||||
|
||||
/// Returns an identifier
|
||||
pub fn maybe_ident(&self) -> Option<ast::Ident> {
|
||||
if !(self.len() == 1) {
|
||||
return None;
|
||||
}
|
||||
|
||||
match self[0] {
|
||||
TokenTree::Token(_, Token::Ident(t)) => Some(t),
|
||||
_ => None,
|
||||
}
|
||||
pub fn trees<'a>(&'a self) -> Cursor {
|
||||
Cursor::new(self)
|
||||
}
|
||||
|
||||
/// Compares two TokenStreams, checking equality without regarding span information.
|
||||
pub fn eq_unspanned(&self, other: &TokenStream) -> bool {
|
||||
for (t1, t2) in self.iter().zip(other.iter()) {
|
||||
for (t1, t2) in self.trees().zip(other.trees()) {
|
||||
if !t1.eq_unspanned(t2) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a vector of TokenTrees into a parentheses-delimited TokenStream.
|
||||
pub fn as_delimited_stream(tts: Vec<TokenTree>, delim: token::DelimToken) -> TokenStream {
|
||||
let new_sp = covering_span(&tts);
|
||||
pub struct Cursor<'a> {
|
||||
current_frame: CursorFrame<'a>,
|
||||
stack: Vec<CursorFrame<'a>>,
|
||||
}
|
||||
|
||||
let new_delim = Rc::new(Delimited {
|
||||
delim: delim,
|
||||
open_span: DUMMY_SP,
|
||||
tts: tts,
|
||||
close_span: DUMMY_SP,
|
||||
});
|
||||
impl<'a> Iterator for Cursor<'a> {
|
||||
type Item = &'a TokenTree;
|
||||
|
||||
TokenStream::from_tts(vec![TokenTree::Delimited(new_sp, new_delim)])
|
||||
fn next(&mut self) -> Option<&'a TokenTree> {
|
||||
let tree = self.peek();
|
||||
self.current_frame = self.stack.pop().unwrap_or(CursorFrame::Empty);
|
||||
tree
|
||||
}
|
||||
}
|
||||
|
||||
enum CursorFrame<'a> {
|
||||
Empty,
|
||||
Tree(&'a TokenTree),
|
||||
Stream(&'a RcSlice<TokenStream>, usize),
|
||||
}
|
||||
|
||||
impl<'a> CursorFrame<'a> {
|
||||
fn new(stream: &'a TokenStream) -> Self {
|
||||
match stream.kind {
|
||||
TokenStreamKind::Empty => CursorFrame::Empty,
|
||||
TokenStreamKind::Tree(ref tree) => CursorFrame::Tree(tree),
|
||||
TokenStreamKind::Stream(ref stream) => CursorFrame::Stream(stream, 0),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Cursor<'a> {
|
||||
fn new(stream: &'a TokenStream) -> Self {
|
||||
Cursor {
|
||||
current_frame: CursorFrame::new(stream),
|
||||
stack: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn peek(&mut self) -> Option<&'a TokenTree> {
|
||||
while let CursorFrame::Stream(stream, index) = self.current_frame {
|
||||
self.current_frame = if index == stream.len() {
|
||||
self.stack.pop().unwrap_or(CursorFrame::Empty)
|
||||
} else {
|
||||
self.stack.push(CursorFrame::Stream(stream, index + 1));
|
||||
CursorFrame::new(&stream[index])
|
||||
};
|
||||
}
|
||||
|
||||
match self.current_frame {
|
||||
CursorFrame::Empty => None,
|
||||
CursorFrame::Tree(tree) => Some(tree),
|
||||
CursorFrame::Stream(..) => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for TokenStream {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.write_str(&pprust::tts_to_string(&self.to_tts()))
|
||||
f.write_str(&pprust::tts_to_string(&self.trees().cloned().collect::<Vec<_>>()))
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME Reimplement this iterator to hold onto a slice iterator for a leaf, getting the
|
||||
// next leaf's iterator when the current one is exhausted.
|
||||
pub struct Iter<'a> {
|
||||
vs: &'a TokenStream,
|
||||
idx: usize,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for Iter<'a> {
|
||||
type Item = &'a TokenTree;
|
||||
|
||||
fn next(&mut self) -> Option<&'a TokenTree> {
|
||||
if self.vs.is_empty() || self.idx >= self.vs.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let ret = Some(&self.vs[self.idx]);
|
||||
self.idx = self.idx + 1;
|
||||
ret
|
||||
impl Encodable for TokenStream {
|
||||
fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), E::Error> {
|
||||
self.trees().cloned().collect::<Vec<_>>().encode(encoder)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Split<'a, P>
|
||||
where P: FnMut(&TokenTree) -> bool
|
||||
{
|
||||
vs: &'a TokenStream,
|
||||
pred: P,
|
||||
finished: bool,
|
||||
idx: usize,
|
||||
}
|
||||
|
||||
impl<'a, P> Iterator for Split<'a, P>
|
||||
where P: FnMut(&TokenTree) -> bool
|
||||
{
|
||||
type Item = TokenStream;
|
||||
|
||||
fn next(&mut self) -> Option<TokenStream> {
|
||||
if self.finished {
|
||||
return None;
|
||||
}
|
||||
if self.idx >= self.vs.len() {
|
||||
self.finished = true;
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut lookup = self.vs.iter().skip(self.idx);
|
||||
match lookup.position(|x| (self.pred)(&x)) {
|
||||
None => {
|
||||
self.finished = true;
|
||||
Some(self.vs.slice_from(self.idx..))
|
||||
}
|
||||
Some(edx) => {
|
||||
let ret = Some(self.vs.slice(self.idx..self.idx + edx));
|
||||
self.idx += edx + 1;
|
||||
ret
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<usize> for TokenStream {
|
||||
type Output = TokenTree;
|
||||
|
||||
fn index(&self, index: usize) -> &TokenTree {
|
||||
&self.ts[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl Index<usize> for InternalTS {
|
||||
type Output = TokenTree;
|
||||
|
||||
fn index(&self, index: usize) -> &TokenTree {
|
||||
if self.len() <= index {
|
||||
panic!("Index {} too large for {:?}", index, self);
|
||||
}
|
||||
match *self {
|
||||
InternalTS::Empty(..) => panic!("Invalid index"),
|
||||
InternalTS::Leaf { ref tts, offset, .. } => tts.get(index + offset).unwrap(),
|
||||
InternalTS::Node { ref left, ref right, .. } => {
|
||||
let left_len = left.len();
|
||||
if index < left_len {
|
||||
Index::index(&**left, index)
|
||||
} else {
|
||||
Index::index(&**right, index - left_len)
|
||||
}
|
||||
}
|
||||
}
|
||||
impl Decodable for TokenStream {
|
||||
fn decode<D: Decoder>(decoder: &mut D) -> Result<TokenStream, D::Error> {
|
||||
Vec::<TokenTree>::decode(decoder).map(|vec| vec.into_iter().collect())
|
||||
}
|
||||
}
|
||||
|
||||
@ -880,10 +483,13 @@ impl Index<usize> for InternalTS {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use syntax::ast::Ident;
|
||||
use syntax_pos::{Span, BytePos, NO_EXPANSION, DUMMY_SP};
|
||||
use parse::token::{self, Token};
|
||||
use syntax_pos::{Span, BytePos, NO_EXPANSION};
|
||||
use parse::token::Token;
|
||||
use util::parser_testing::string_to_tts;
|
||||
use std::rc::Rc;
|
||||
|
||||
fn string_to_ts(string: &str) -> TokenStream {
|
||||
string_to_tts(string.to_owned()).into_iter().collect()
|
||||
}
|
||||
|
||||
fn sp(a: u32, b: u32) -> Span {
|
||||
Span {
|
||||
@ -893,239 +499,76 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
fn as_paren_delimited_stream(tts: Vec<TokenTree>) -> TokenStream {
|
||||
TokenStream::as_delimited_stream(tts, token::DelimToken::Paren)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_concat() {
|
||||
let test_res = TokenStream::from_tts(string_to_tts("foo::bar::baz".to_string()));
|
||||
let test_fst = TokenStream::from_tts(string_to_tts("foo::bar".to_string()));
|
||||
let test_snd = TokenStream::from_tts(string_to_tts("::baz".to_string()));
|
||||
let eq_res = TokenStream::concat(test_fst, test_snd);
|
||||
assert_eq!(test_res.len(), 5);
|
||||
assert_eq!(eq_res.len(), 5);
|
||||
let test_res = string_to_ts("foo::bar::baz");
|
||||
let test_fst = string_to_ts("foo::bar");
|
||||
let test_snd = string_to_ts("::baz");
|
||||
let eq_res = TokenStream::concat([test_fst, test_snd].iter().cloned());
|
||||
assert_eq!(test_res.trees().count(), 5);
|
||||
assert_eq!(eq_res.trees().count(), 5);
|
||||
assert_eq!(test_res.eq_unspanned(&eq_res), true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_from_to_bijection() {
|
||||
let test_start = string_to_tts("foo::bar(baz)".to_string());
|
||||
let test_end = TokenStream::from_tts(string_to_tts("foo::bar(baz)".to_string())).to_tts();
|
||||
let ts = test_start.iter().cloned().collect::<TokenStream>();
|
||||
let test_end: Vec<TokenTree> = ts.trees().cloned().collect();
|
||||
assert_eq!(test_start, test_end)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_to_from_bijection() {
|
||||
let test_start = TokenStream::from_tts(string_to_tts("foo::bar(baz)".to_string()));
|
||||
let test_end = TokenStream::from_tts(test_start.clone().to_tts());
|
||||
let test_start = string_to_ts("foo::bar(baz)");
|
||||
let test_end = test_start.trees().cloned().collect();
|
||||
assert_eq!(test_start, test_end)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_eq_0() {
|
||||
let test_res = TokenStream::from_tts(string_to_tts("foo".to_string()));
|
||||
let test_eqs = TokenStream::from_tts(string_to_tts("foo".to_string()));
|
||||
let test_res = string_to_ts("foo");
|
||||
let test_eqs = string_to_ts("foo");
|
||||
assert_eq!(test_res, test_eqs)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_eq_1() {
|
||||
let test_res = TokenStream::from_tts(string_to_tts("::bar::baz".to_string()));
|
||||
let test_eqs = TokenStream::from_tts(string_to_tts("::bar::baz".to_string()));
|
||||
let test_res = string_to_ts("::bar::baz");
|
||||
let test_eqs = string_to_ts("::bar::baz");
|
||||
assert_eq!(test_res, test_eqs)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_eq_2() {
|
||||
let test_res = TokenStream::from_tts(string_to_tts("foo::bar".to_string()));
|
||||
let test_eqs = TokenStream::from_tts(string_to_tts("foo::bar::baz".to_string()));
|
||||
assert_eq!(test_res, test_eqs.slice(0..3))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_eq_3() {
|
||||
let test_res = TokenStream::from_tts(string_to_tts("".to_string()));
|
||||
let test_eqs = TokenStream::from_tts(string_to_tts("".to_string()));
|
||||
let test_res = string_to_ts("");
|
||||
let test_eqs = string_to_ts("");
|
||||
assert_eq!(test_res, test_eqs)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_diseq_0() {
|
||||
let test_res = TokenStream::from_tts(string_to_tts("::bar::baz".to_string()));
|
||||
let test_eqs = TokenStream::from_tts(string_to_tts("bar::baz".to_string()));
|
||||
let test_res = string_to_ts("::bar::baz");
|
||||
let test_eqs = string_to_ts("bar::baz");
|
||||
assert_eq!(test_res == test_eqs, false)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_diseq_1() {
|
||||
let test_res = TokenStream::from_tts(string_to_tts("(bar,baz)".to_string()));
|
||||
let test_eqs = TokenStream::from_tts(string_to_tts("bar,baz".to_string()));
|
||||
let test_res = string_to_ts("(bar,baz)");
|
||||
let test_eqs = string_to_ts("bar,baz");
|
||||
assert_eq!(test_res == test_eqs, false)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_slice_0() {
|
||||
let test_res = TokenStream::from_tts(string_to_tts("foo::bar".to_string()));
|
||||
let test_eqs = TokenStream::from_tts(string_to_tts("foo::bar::baz".to_string()));
|
||||
assert_eq!(test_res, test_eqs.slice(0..3))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_slice_1() {
|
||||
let test_res = TokenStream::from_tts(string_to_tts("foo::bar::baz".to_string()))
|
||||
.slice(2..3);
|
||||
let test_eqs = TokenStream::from_tts(vec![TokenTree::Token(sp(5,8),
|
||||
token::Ident(Ident::from_str("bar")))]);
|
||||
assert_eq!(test_res, test_eqs)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_empty() {
|
||||
let test0 = TokenStream::from_tts(Vec::new());
|
||||
let test1 = TokenStream::from_tts(
|
||||
vec![TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a")))]
|
||||
);
|
||||
|
||||
let test2 = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
|
||||
let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect();
|
||||
let test1: TokenStream =
|
||||
TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a"))).into();
|
||||
let test2 = string_to_ts("foo(bar::baz)");
|
||||
|
||||
assert_eq!(test0.is_empty(), true);
|
||||
assert_eq!(test1.is_empty(), false);
|
||||
assert_eq!(test2.is_empty(), false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_delimited() {
|
||||
let test0 = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
|
||||
let test1 = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string()));
|
||||
let test2 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string()));
|
||||
let test3 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)(zab,rab,oof)".to_string()));
|
||||
let test4 = TokenStream::from_tts(string_to_tts("(foo,bar,baz)foo".to_string()));
|
||||
let test5 = TokenStream::from_tts(string_to_tts("".to_string()));
|
||||
|
||||
assert_eq!(test0.is_delimited(), false);
|
||||
assert_eq!(test1.is_delimited(), true);
|
||||
assert_eq!(test2.is_delimited(), true);
|
||||
assert_eq!(test3.is_delimited(), false);
|
||||
assert_eq!(test4.is_delimited(), false);
|
||||
assert_eq!(test5.is_delimited(), false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_ident() {
|
||||
let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string()));
|
||||
let test1 = TokenStream::from_tts(string_to_tts("5".to_string()));
|
||||
let test2 = TokenStream::from_tts(string_to_tts("foo".to_string()));
|
||||
let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string()));
|
||||
let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string()));
|
||||
|
||||
assert_eq!(test0.is_ident(), false);
|
||||
assert_eq!(test1.is_ident(), false);
|
||||
assert_eq!(test2.is_ident(), true);
|
||||
assert_eq!(test3.is_ident(), false);
|
||||
assert_eq!(test4.is_ident(), false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_maybe_delimited() {
|
||||
let test0_input = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
|
||||
let test1_input = TokenStream::from_tts(string_to_tts("(bar::baz)".to_string()));
|
||||
let test2_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)".to_string()));
|
||||
let test3_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)(zab,rab)"
|
||||
.to_string()));
|
||||
let test4_input = TokenStream::from_tts(string_to_tts("(foo,bar,baz)foo".to_string()));
|
||||
let test5_input = TokenStream::from_tts(string_to_tts("".to_string()));
|
||||
|
||||
let test0 = test0_input.maybe_delimited();
|
||||
let test1 = test1_input.maybe_delimited();
|
||||
let test2 = test2_input.maybe_delimited();
|
||||
let test3 = test3_input.maybe_delimited();
|
||||
let test4 = test4_input.maybe_delimited();
|
||||
let test5 = test5_input.maybe_delimited();
|
||||
|
||||
assert_eq!(test0, None);
|
||||
|
||||
let test1_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
|
||||
token::Ident(Ident::from_str("bar"))),
|
||||
TokenTree::Token(sp(4, 6), token::ModSep),
|
||||
TokenTree::Token(sp(6, 9),
|
||||
token::Ident(Ident::from_str("baz")))]);
|
||||
assert_eq!(test1, Some(test1_expected));
|
||||
|
||||
let test2_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
|
||||
token::Ident(Ident::from_str("foo"))),
|
||||
TokenTree::Token(sp(4, 5), token::Comma),
|
||||
TokenTree::Token(sp(5, 8),
|
||||
token::Ident(Ident::from_str("bar"))),
|
||||
TokenTree::Token(sp(8, 9), token::Comma),
|
||||
TokenTree::Token(sp(9, 12),
|
||||
token::Ident(Ident::from_str("baz")))]);
|
||||
assert_eq!(test2, Some(test2_expected));
|
||||
|
||||
assert_eq!(test3, None);
|
||||
|
||||
assert_eq!(test4, None);
|
||||
|
||||
assert_eq!(test5, None);
|
||||
}
|
||||
|
||||
// pub fn maybe_ident(&self) -> Option<ast::Ident>
|
||||
#[test]
|
||||
fn test_maybe_ident() {
|
||||
let test0 = TokenStream::from_tts(string_to_tts("\"foo\"".to_string())).maybe_ident();
|
||||
let test1 = TokenStream::from_tts(string_to_tts("5".to_string())).maybe_ident();
|
||||
let test2 = TokenStream::from_tts(string_to_tts("foo".to_string())).maybe_ident();
|
||||
let test3 = TokenStream::from_tts(string_to_tts("foo::bar".to_string())).maybe_ident();
|
||||
let test4 = TokenStream::from_tts(string_to_tts("foo(bar)".to_string())).maybe_ident();
|
||||
|
||||
assert_eq!(test0, None);
|
||||
assert_eq!(test1, None);
|
||||
assert_eq!(test2, Some(Ident::from_str("foo")));
|
||||
assert_eq!(test3, None);
|
||||
assert_eq!(test4, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_as_delimited_stream() {
|
||||
let test0 = as_paren_delimited_stream(string_to_tts("foo,bar,".to_string()));
|
||||
let test1 = as_paren_delimited_stream(string_to_tts("baz(foo,bar)".to_string()));
|
||||
|
||||
let test0_tts = vec![TokenTree::Token(sp(0, 3), token::Ident(Ident::from_str("foo"))),
|
||||
TokenTree::Token(sp(3, 4), token::Comma),
|
||||
TokenTree::Token(sp(4, 7), token::Ident(Ident::from_str("bar"))),
|
||||
TokenTree::Token(sp(7, 8), token::Comma)];
|
||||
let test0_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(0, 8),
|
||||
Rc::new(Delimited {
|
||||
delim: token::DelimToken::Paren,
|
||||
open_span: DUMMY_SP,
|
||||
tts: test0_tts,
|
||||
close_span: DUMMY_SP,
|
||||
}))]);
|
||||
|
||||
assert_eq!(test0, test0_stream);
|
||||
|
||||
|
||||
let test1_tts = vec![TokenTree::Token(sp(4, 7), token::Ident(Ident::from_str("foo"))),
|
||||
TokenTree::Token(sp(7, 8), token::Comma),
|
||||
TokenTree::Token(sp(8, 11), token::Ident(Ident::from_str("bar")))];
|
||||
|
||||
let test1_parse = vec![TokenTree::Token(sp(0, 3), token::Ident(Ident::from_str("baz"))),
|
||||
TokenTree::Delimited(sp(3, 12),
|
||||
Rc::new(Delimited {
|
||||
delim: token::DelimToken::Paren,
|
||||
open_span: sp(3, 4),
|
||||
tts: test1_tts,
|
||||
close_span: sp(11, 12),
|
||||
}))];
|
||||
|
||||
let test1_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(0, 12),
|
||||
Rc::new(Delimited {
|
||||
delim: token::DelimToken::Paren,
|
||||
open_span: DUMMY_SP,
|
||||
tts: test1_parse,
|
||||
close_span: DUMMY_SP,
|
||||
}))]);
|
||||
|
||||
assert_eq!(test1, test1_stream);
|
||||
}
|
||||
}
|
||||
|
50
src/libsyntax/util/rc_slice.rs
Normal file
50
src/libsyntax/util/rc_slice.rs
Normal file
@ -0,0 +1,50 @@
|
||||
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use std::hash::{self, Hash};
|
||||
use std::fmt;
|
||||
use std::ops::Deref;
|
||||
use std::rc::Rc;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct RcSlice<T> {
|
||||
data: Rc<Box<[T]>>,
|
||||
offset: u32,
|
||||
len: u32,
|
||||
}
|
||||
|
||||
impl<T> RcSlice<T> {
|
||||
pub fn new(vec: Vec<T>) -> Self {
|
||||
RcSlice {
|
||||
offset: 0,
|
||||
len: vec.len() as u32,
|
||||
data: Rc::new(vec.into_boxed_slice()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Deref for RcSlice<T> {
|
||||
type Target = [T];
|
||||
fn deref(&self) -> &[T] {
|
||||
&self.data[self.offset as usize .. (self.offset + self.len) as usize]
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Hash> Hash for RcSlice<T> {
|
||||
fn hash<H: hash::Hasher>(&self, state: &mut H) {
|
||||
self.deref().hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: fmt::Debug> fmt::Debug for RcSlice<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Debug::fmt(self.deref(), f)
|
||||
}
|
||||
}
|
@ -15,52 +15,45 @@
|
||||
#![plugin(proc_macro_plugin)]
|
||||
|
||||
extern crate rustc_plugin;
|
||||
extern crate proc_macro_tokens;
|
||||
extern crate syntax;
|
||||
|
||||
use proc_macro_tokens::prelude::*;
|
||||
|
||||
use rustc_plugin::Registry;
|
||||
|
||||
use syntax::ast::Ident;
|
||||
use syntax::codemap::{DUMMY_SP, Span};
|
||||
use syntax::ext::proc_macro_shim::build_block_emitter;
|
||||
use syntax::ext::base::{ExtCtxt, MacResult};
|
||||
use syntax::parse::token::{self, Token, DelimToken};
|
||||
use syntax::ext::base::SyntaxExtension;
|
||||
use syntax::parse::token::Token;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax::tokenstream::{TokenTree, TokenStream};
|
||||
|
||||
#[plugin_registrar]
|
||||
pub fn plugin_registrar(reg: &mut Registry) {
|
||||
reg.register_macro("cond", cond);
|
||||
reg.register_syntax_extension(Symbol::intern("cond"),
|
||||
SyntaxExtension::ProcMacro(Box::new(cond)));
|
||||
}
|
||||
|
||||
fn cond<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<MacResult + 'cx> {
|
||||
let output = cond_rec(TokenStream::from_tts(tts.clone().to_owned()));
|
||||
build_block_emitter(cx, sp, output)
|
||||
}
|
||||
|
||||
fn cond_rec(input: TokenStream) -> TokenStream {
|
||||
if input.is_empty() {
|
||||
return qquote!();
|
||||
}
|
||||
|
||||
let next = input.slice(0..1);
|
||||
let rest = input.slice_from(1..);
|
||||
|
||||
let clause : TokenStream = match next.maybe_delimited() {
|
||||
Some(ts) => ts,
|
||||
_ => panic!("Invalid input"),
|
||||
};
|
||||
|
||||
// clause is ([test]) [rhs]
|
||||
if clause.len() < 2 { panic!("Invalid macro usage in cond: {:?}", clause) }
|
||||
|
||||
let test: TokenStream = clause.slice(0..1);
|
||||
let rhs: TokenStream = clause.slice_from(1..);
|
||||
|
||||
if ident_eq(&test[0], Ident::from_str("else")) || rest.is_empty() {
|
||||
qquote!({unquote(rhs)})
|
||||
} else {
|
||||
qquote!({if unquote(test) { unquote(rhs) } else { cond!(unquote(rest)) } })
|
||||
}
|
||||
fn cond(input: TokenStream) -> TokenStream {
|
||||
let mut conds = Vec::new();
|
||||
let mut input = input.trees();
|
||||
while let Some(tree) = input.next() {
|
||||
let cond: TokenStream = match *tree {
|
||||
TokenTree::Delimited(_, ref delimited) => delimited.tts.iter().cloned().collect(),
|
||||
_ => panic!("Invalid input"),
|
||||
};
|
||||
let mut trees = cond.trees().cloned();
|
||||
let test = trees.next();
|
||||
let rhs = trees.collect::<TokenStream>();
|
||||
if rhs.is_empty() {
|
||||
panic!("Invalid macro usage in cond: {}", cond);
|
||||
}
|
||||
let is_else = match test {
|
||||
Some(TokenTree::Token(_, Token::Ident(ident))) if ident.name == "else" => true,
|
||||
_ => false,
|
||||
};
|
||||
conds.push(if is_else || input.peek().is_none() {
|
||||
qquote!({ unquote rhs })
|
||||
} else {
|
||||
qquote!(if unquote(test.unwrap()) { unquote rhs } else)
|
||||
});
|
||||
}
|
||||
|
||||
conds.into_iter().collect()
|
||||
}
|
||||
|
@ -14,22 +14,21 @@
|
||||
#![plugin(proc_macro_plugin)]
|
||||
|
||||
extern crate rustc_plugin;
|
||||
extern crate proc_macro_tokens;
|
||||
extern crate syntax;
|
||||
|
||||
use syntax::ext::proc_macro_shim::prelude::*;
|
||||
use proc_macro_tokens::prelude::*;
|
||||
|
||||
use rustc_plugin::Registry;
|
||||
use syntax::ext::base::SyntaxExtension;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax::tokenstream::TokenStream;
|
||||
|
||||
#[plugin_registrar]
|
||||
pub fn plugin_registrar(reg: &mut Registry) {
|
||||
reg.register_macro("hello", hello);
|
||||
reg.register_syntax_extension(Symbol::intern("hello"),
|
||||
SyntaxExtension::ProcMacro(Box::new(hello)));
|
||||
}
|
||||
|
||||
// This macro is not very interesting, but it does contain delimited tokens with
|
||||
// no content - `()` and `{}` - which has caused problems in the past.
|
||||
fn hello<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<MacResult + 'cx> {
|
||||
let output = qquote!({ fn hello() {} hello(); });
|
||||
build_block_emitter(cx, sp, output)
|
||||
fn hello(_: TokenStream) -> TokenStream {
|
||||
qquote!({ fn hello() {} hello(); })
|
||||
}
|
||||
|
@ -9,15 +9,14 @@
|
||||
// except according to those terms.
|
||||
|
||||
#![feature(plugin, plugin_registrar, rustc_private)]
|
||||
#![plugin(proc_macro_plugin)]
|
||||
|
||||
extern crate proc_macro_tokens;
|
||||
extern crate rustc_plugin;
|
||||
extern crate syntax;
|
||||
|
||||
use proc_macro_tokens::prelude::*;
|
||||
use rustc_plugin::Registry;
|
||||
use syntax::ext::base::SyntaxExtension;
|
||||
use syntax::ext::proc_macro_shim::prelude::*;
|
||||
use syntax::tokenstream::TokenStream;
|
||||
use syntax::symbol::Symbol;
|
||||
|
||||
#[plugin_registrar]
|
||||
@ -35,23 +34,21 @@ pub fn plugin_registrar(reg: &mut Registry) {
|
||||
}
|
||||
|
||||
fn attr_tru(_attr: TokenStream, _item: TokenStream) -> TokenStream {
|
||||
lex("fn f1() -> bool { true }")
|
||||
qquote!(fn f1() -> bool { true })
|
||||
}
|
||||
|
||||
fn attr_identity(_attr: TokenStream, item: TokenStream) -> TokenStream {
|
||||
let source = item.to_string();
|
||||
lex(&source)
|
||||
qquote!(unquote item)
|
||||
}
|
||||
|
||||
fn tru(_ts: TokenStream) -> TokenStream {
|
||||
lex("true")
|
||||
qquote!(true)
|
||||
}
|
||||
|
||||
fn ret_tru(_ts: TokenStream) -> TokenStream {
|
||||
lex("return true;")
|
||||
qquote!(return true;)
|
||||
}
|
||||
|
||||
fn identity(ts: TokenStream) -> TokenStream {
|
||||
let source = ts.to_string();
|
||||
lex(&source)
|
||||
qquote!(unquote ts)
|
||||
}
|
||||
|
@ -14,12 +14,14 @@
|
||||
#![feature(rustc_private)]
|
||||
#![plugin(proc_macro_plugin)]
|
||||
|
||||
extern crate proc_macro_tokens;
|
||||
use proc_macro_tokens::prelude::*;
|
||||
|
||||
extern crate syntax;
|
||||
extern crate syntax_pos;
|
||||
|
||||
use syntax::ast::Ident;
|
||||
use syntax::parse::token;
|
||||
use syntax::tokenstream::TokenTree;
|
||||
|
||||
fn main() {
|
||||
let lex_true = lex("true");
|
||||
assert_eq!(qquote!(true).eq_unspanned(&lex_true), true);
|
||||
let true_tok = TokenTree::Token(syntax_pos::DUMMY_SP, token::Ident(Ident::from_str("true")));
|
||||
assert!(qquote!(true).eq_unspanned(&true_tok.into()));
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user