Rename TokenTree variants for clarity

This should be clearer, and fits in better with the `TTNonterminal` variant.

Renames:

- `TTTok` -> `TTToken`
- `TTDelim` -> `TTDelimited`
- `TTSeq` -> `TTSequence`
This commit is contained in:
Brendan Zabarauskas 2014-10-22 23:35:32 +11:00
parent 971d776aa5
commit ec3f0201e7
14 changed files with 98 additions and 95 deletions

View File

@ -56,7 +56,7 @@ extern crate rustc;
use syntax::codemap::Span; use syntax::codemap::Span;
use syntax::parse::token::{IDENT, get_ident}; use syntax::parse::token::{IDENT, get_ident};
use syntax::ast::{TokenTree, TTTok}; use syntax::ast::{TokenTree, TTToken};
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacExpr}; use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacExpr};
use syntax::ext::build::AstBuilder; // trait for expr_uint use syntax::ext::build::AstBuilder; // trait for expr_uint
use rustc::plugin::Registry; use rustc::plugin::Registry;
@ -71,7 +71,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
("I", 1)]; ("I", 1)];
let text = match args { let text = match args {
[TTTok(_, IDENT(s, _))] => get_ident(s).to_string(), [TTToken(_, IDENT(s, _))] => get_ident(s).to_string(),
_ => { _ => {
cx.span_err(sp, "argument should be a single identifier"); cx.span_err(sp, "argument should be a single identifier");
return DummyResult::any(sp); return DummyResult::any(sp);

View File

@ -24,6 +24,9 @@ use std::fmt::Show;
use std::rc::Rc; use std::rc::Rc;
use serialize::{Encodable, Decodable, Encoder, Decoder}; use serialize::{Encodable, Decodable, Encoder, Decoder};
#[cfg(stage0)]
pub use self::TTToken as TTTok;
// FIXME #6993: in librustc, uses of "ident" should be replaced // FIXME #6993: in librustc, uses of "ident" should be replaced
// by just "Name". // by just "Name".
@ -600,9 +603,9 @@ pub struct Delimiter {
} }
impl Delimiter { impl Delimiter {
/// Convert the delimiter to a `TTTok` /// Convert the delimiter to a `TTToken`
pub fn to_tt(&self) -> TokenTree { pub fn to_tt(&self) -> TokenTree {
TTTok(self.span, self.token.clone()) TTToken(self.span, self.token.clone())
} }
} }
@ -614,9 +617,9 @@ impl Delimiter {
/// If the syntax extension is an MBE macro, it will attempt to match its /// If the syntax extension is an MBE macro, it will attempt to match its
/// LHS "matchers" against the provided token tree, and if it finds a /// LHS "matchers" against the provided token tree, and if it finds a
/// match, will transcribe the RHS token tree, splicing in any captured /// match, will transcribe the RHS token tree, splicing in any captured
/// macro_parser::matched_nonterminals into the TTNonterminals it finds. /// `macro_parser::matched_nonterminals` into the `TTNonterminal`s it finds.
/// ///
/// The RHS of an MBE macro is the only place a TTNonterminal or TTSeq /// The RHS of an MBE macro is the only place a `TTNonterminal` or `TTSequence`
/// makes any real sense. You could write them elsewhere but nothing /// makes any real sense. You could write them elsewhere but nothing
/// else knows what to do with them, so you'll probably get a syntax /// else knows what to do with them, so you'll probably get a syntax
/// error. /// error.
@ -624,18 +627,18 @@ impl Delimiter {
#[doc="For macro invocations; parsing is delegated to the macro"] #[doc="For macro invocations; parsing is delegated to the macro"]
pub enum TokenTree { pub enum TokenTree {
/// A single token /// A single token
TTTok(Span, ::parse::token::Token), TTToken(Span, ::parse::token::Token),
/// A delimited sequence of token trees /// A delimited sequence of token trees
// FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST. // FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST.
TTDelim(Span, Delimiter, Rc<Vec<TokenTree>>, Delimiter), TTDelimited(Span, Delimiter, Rc<Vec<TokenTree>>, Delimiter),
// These only make sense for right-hand-sides of MBE macros: // These only make sense for right-hand-sides of MBE macros:
/// A kleene-style repetition sequence with a span, a TTForest, /// A kleene-style repetition sequence with a span, a `TTForest`,
/// an optional separator, and a boolean where true indicates /// an optional separator, and a boolean where true indicates
/// zero or more (..), and false indicates one or more (+). /// zero or more (..), and false indicates one or more (+).
// FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST. // FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST.
TTSeq(Span, Rc<Vec<TokenTree>>, Option<::parse::token::Token>, bool), TTSequence(Span, Rc<Vec<TokenTree>>, Option<::parse::token::Token>, bool),
/// A syntactic variable that will be filled in by macro expansion. /// A syntactic variable that will be filled in by macro expansion.
TTNonterminal(Span, Ident) TTNonterminal(Span, Ident)
@ -645,10 +648,10 @@ impl TokenTree {
/// Returns the `Span` corresponding to this token tree. /// Returns the `Span` corresponding to this token tree.
pub fn get_span(&self) -> Span { pub fn get_span(&self) -> Span {
match *self { match *self {
TTTok(span, _) => span, TTToken(span, _) => span,
TTDelim(span, _, _, _) => span, TTDelimited(span, _, _, _) => span,
TTSeq(span, _, _, _) => span, TTSequence(span, _, _, _) => span,
TTNonterminal(span, _) => span, TTNonterminal(span, _) => span,
} }
} }
} }

View File

@ -50,7 +50,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
token_tree: &[TokenTree]) token_tree: &[TokenTree])
-> Box<MacResult+'cx> { -> Box<MacResult+'cx> {
let code = match token_tree { let code = match token_tree {
[ast::TTTok(_, token::IDENT(code, _))] => code, [ast::TTToken(_, token::IDENT(code, _))] => code,
_ => unreachable!() _ => unreachable!()
}; };
with_registered_diagnostics(|diagnostics| { with_registered_diagnostics(|diagnostics| {
@ -82,12 +82,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
token_tree: &[TokenTree]) token_tree: &[TokenTree])
-> Box<MacResult+'cx> { -> Box<MacResult+'cx> {
let (code, description) = match token_tree { let (code, description) = match token_tree {
[ast::TTTok(_, token::IDENT(ref code, _))] => { [ast::TTToken(_, token::IDENT(ref code, _))] => {
(code, None) (code, None)
}, },
[ast::TTTok(_, token::IDENT(ref code, _)), [ast::TTToken(_, token::IDENT(ref code, _)),
ast::TTTok(_, token::COMMA), ast::TTToken(_, token::COMMA),
ast::TTTok(_, token::LIT_STR_RAW(description, _))] => { ast::TTToken(_, token::LIT_STR_RAW(description, _))] => {
(code, Some(description)) (code, Some(description))
} }
_ => unreachable!() _ => unreachable!()
@ -110,7 +110,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
token_tree: &[TokenTree]) token_tree: &[TokenTree])
-> Box<MacResult+'cx> { -> Box<MacResult+'cx> {
let name = match token_tree { let name = match token_tree {
[ast::TTTok(_, token::IDENT(ref name, _))] => name, [ast::TTToken(_, token::IDENT(ref name, _))] => name,
_ => unreachable!() _ => unreachable!()
}; };

View File

@ -684,8 +684,8 @@ pub fn get_single_str_from_tts(cx: &ExtCtxt,
cx.span_err(sp, format!("{} takes 1 argument.", name).as_slice()); cx.span_err(sp, format!("{} takes 1 argument.", name).as_slice());
} else { } else {
match tts[0] { match tts[0] {
ast::TTTok(_, token::LIT_STR(ident)) => return Some(parse::str_lit(ident.as_str())), ast::TTToken(_, token::LIT_STR(ident)) => return Some(parse::str_lit(ident.as_str())),
ast::TTTok(_, token::LIT_STR_RAW(ident, _)) => { ast::TTToken(_, token::LIT_STR_RAW(ident, _)) => {
return Some(parse::raw_str_lit(ident.as_str())) return Some(parse::raw_str_lit(ident.as_str()))
} }
_ => { _ => {

View File

@ -23,7 +23,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]
for (i, e) in tts.iter().enumerate() { for (i, e) in tts.iter().enumerate() {
if i & 1 == 1 { if i & 1 == 1 {
match *e { match *e {
ast::TTTok(_, token::COMMA) => (), ast::TTToken(_, token::COMMA) => (),
_ => { _ => {
cx.span_err(sp, "concat_idents! expecting comma."); cx.span_err(sp, "concat_idents! expecting comma.");
return DummyResult::expr(sp); return DummyResult::expr(sp);
@ -31,7 +31,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]
} }
} else { } else {
match *e { match *e {
ast::TTTok(_, token::IDENT(ident,_)) => { ast::TTToken(_, token::IDENT(ident,_)) => {
res_str.push_str(token::get_ident(ident).get()) res_str.push_str(token::get_ident(ident).get())
} }
_ => { _ => {

View File

@ -639,10 +639,10 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> { fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
match *tt { match *tt {
ast::TTTok(sp, ref tok) => { ast::TTToken(sp, ref tok) => {
let e_sp = cx.expr_ident(sp, id_ext("_sp")); let e_sp = cx.expr_ident(sp, id_ext("_sp"));
let e_tok = cx.expr_call(sp, let e_tok = cx.expr_call(sp,
mk_ast_path(cx, sp, "TTTok"), mk_ast_path(cx, sp, "TTToken"),
vec!(e_sp, mk_token(cx, sp, tok))); vec!(e_sp, mk_token(cx, sp, tok)));
let e_push = let e_push =
cx.expr_method_call(sp, cx.expr_method_call(sp,
@ -651,14 +651,14 @@ fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
vec!(e_tok)); vec!(e_tok));
vec!(cx.stmt_expr(e_push)) vec!(cx.stmt_expr(e_push))
}, },
ast::TTDelim(sp, ref open, ref tts, ref close) => { ast::TTDelimited(sp, ref open, ref tts, ref close) => {
let mut stmts = vec![]; let mut stmts = vec![];
stmts.extend(mk_tt(cx, sp, &open.to_tt()).into_iter()); stmts.extend(mk_tt(cx, sp, &open.to_tt()).into_iter());
stmts.extend(tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter())); stmts.extend(tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter()));
stmts.extend(mk_tt(cx, sp, &close.to_tt()).into_iter()); stmts.extend(mk_tt(cx, sp, &close.to_tt()).into_iter());
stmts stmts
}, },
ast::TTSeq(..) => fail!("TTSeq in quote!"), ast::TTSequence(..) => fail!("TTSequence in quote!"),
ast::TTNonterminal(sp, ident) => { ast::TTNonterminal(sp, ident) => {
// tt.extend($ident.to_tokens(ext_cx).into_iter()) // tt.extend($ident.to_tokens(ext_cx).into_iter())

View File

@ -20,10 +20,10 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt,
tt: &[ast::TokenTree]) tt: &[ast::TokenTree])
-> Box<base::MacResult+'static> { -> Box<base::MacResult+'static> {
match tt { match tt {
[ast::TTTok(_, ref tok)] if is_keyword(keywords::True, tok) => { [ast::TTToken(_, ref tok)] if is_keyword(keywords::True, tok) => {
cx.set_trace_macros(true); cx.set_trace_macros(true);
} }
[ast::TTTok(_, ref tok)] if is_keyword(keywords::False, tok) => { [ast::TTToken(_, ref tok)] if is_keyword(keywords::False, tok) => {
cx.set_trace_macros(false); cx.set_trace_macros(false);
} }
_ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"), _ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),

View File

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
use ast::{Ident, Matcher_, Matcher, MatchTok, MatchNonterminal, MatchSeq, TTDelim}; use ast::{Ident, Matcher_, Matcher, MatchTok, MatchNonterminal, MatchSeq, TTDelimited};
use ast; use ast;
use codemap::{Span, Spanned, DUMMY_SP}; use codemap::{Span, Spanned, DUMMY_SP};
use ext::base::{ExtCtxt, MacResult, MacroDef}; use ext::base::{ExtCtxt, MacResult, MacroDef};
@ -172,7 +172,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
MatchedNonterminal(NtTT(ref tt)) => { MatchedNonterminal(NtTT(ref tt)) => {
match **tt { match **tt {
// ignore delimiters // ignore delimiters
TTDelim(_, _, ref tts, _) => (**tts).clone(), TTDelimited(_, _, ref tts, _) => (**tts).clone(),
_ => cx.span_fatal(sp, "macro rhs must be delimited"), _ => cx.span_fatal(sp, "macro rhs must be delimited"),
} }
}, },

View File

@ -9,7 +9,7 @@
// except according to those terms. // except according to those terms.
use ast; use ast;
use ast::{TokenTree, TTDelim, TTTok, TTSeq, TTNonterminal, Ident}; use ast::{TokenTree, TTDelimited, TTToken, TTSequence, TTNonterminal, Ident};
use codemap::{Span, DUMMY_SP}; use codemap::{Span, DUMMY_SP};
use diagnostic::SpanHandler; use diagnostic::SpanHandler;
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
@ -45,7 +45,7 @@ pub struct TtReader<'a> {
} }
/// This can do Macro-By-Example transcription. On the other hand, if /// This can do Macro-By-Example transcription. On the other hand, if
/// `src` contains no `TTSeq`s and `TTNonterminal`s, `interp` can (and /// `src` contains no `TTSequence`s and `TTNonterminal`s, `interp` can (and
/// should) be none. /// should) be none.
pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler, pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
interp: Option<HashMap<Ident, Rc<NamedMatch>>>, interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
@ -130,12 +130,12 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
match *t { match *t {
// The opening and closing delimiters are both tokens, so they are // The opening and closing delimiters are both tokens, so they are
// treated as `LisUnconstrained`. // treated as `LisUnconstrained`.
TTDelim(_, _, ref tts, _) | TTSeq(_, ref tts, _, _) => { TTDelimited(_, _, ref tts, _) | TTSequence(_, ref tts, _, _) => {
tts.iter().fold(LisUnconstrained, |size, tt| { tts.iter().fold(LisUnconstrained, |size, tt| {
size + lockstep_iter_size(tt, r) size + lockstep_iter_size(tt, r)
}) })
}, },
TTTok(..) => LisUnconstrained, TTToken(..) => LisUnconstrained,
TTNonterminal(_, name) => match *lookup_cur_matched(r, name) { TTNonterminal(_, name) => match *lookup_cur_matched(r, name) {
MatchedNonterminal(_) => LisUnconstrained, MatchedNonterminal(_) => LisUnconstrained,
MatchedSeq(ref ads, _) => LisConstraint(ads.len(), name) MatchedSeq(ref ads, _) => LisConstraint(ads.len(), name)
@ -194,15 +194,15 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
} }
} }
} }
loop { /* because it's easiest, this handles `TTDelim` not starting loop { /* because it's easiest, this handles `TTDelimited` not starting
with a `TTTok`, even though it won't happen */ with a `TTToken`, even though it won't happen */
let t = { let t = {
let frame = r.stack.last().unwrap(); let frame = r.stack.last().unwrap();
// FIXME(pcwalton): Bad copy. // FIXME(pcwalton): Bad copy.
(*frame.forest)[frame.idx].clone() (*frame.forest)[frame.idx].clone()
}; };
match t { match t {
TTDelim(_, open, delimed_tts, close) => { TTDelimited(_, open, delimed_tts, close) => {
let mut tts = vec![]; let mut tts = vec![];
tts.push(open.to_tt()); tts.push(open.to_tt());
tts.extend(delimed_tts.iter().map(|x| (*x).clone())); tts.extend(delimed_tts.iter().map(|x| (*x).clone()));
@ -216,15 +216,15 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
}); });
// if this could be 0-length, we'd need to potentially recur here // if this could be 0-length, we'd need to potentially recur here
} }
TTTok(sp, tok) => { TTToken(sp, tok) => {
r.cur_span = sp; r.cur_span = sp;
r.cur_tok = tok; r.cur_tok = tok;
r.stack.last_mut().unwrap().idx += 1; r.stack.last_mut().unwrap().idx += 1;
return ret_val; return ret_val;
} }
TTSeq(sp, tts, sep, zerok) => { TTSequence(sp, tts, sep, zerok) => {
// FIXME(pcwalton): Bad copy. // FIXME(pcwalton): Bad copy.
match lockstep_iter_size(&TTSeq(sp, tts.clone(), sep.clone(), zerok), r) { match lockstep_iter_size(&TTSequence(sp, tts.clone(), sep.clone(), zerok), r) {
LisUnconstrained => { LisUnconstrained => {
r.sp_diag.span_fatal( r.sp_diag.span_fatal(
sp.clone(), /* blame macro writer */ sp.clone(), /* blame macro writer */

View File

@ -569,24 +569,24 @@ pub fn noop_fold_arg<T: Folder>(Arg {id, pat, ty}: Arg, fld: &mut T) -> Arg {
pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree { pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
match *tt { match *tt {
TTTok(span, ref tok) => TTToken(span, ref tok) =>
TTTok(span, fld.fold_token(tok.clone())), TTToken(span, fld.fold_token(tok.clone())),
TTDelim(span, ref open, ref tts, ref close) => TTDelimited(span, ref open, ref tts, ref close) =>
TTDelim(span, TTDelimited(span,
Delimiter { Delimiter {
span: open.span, span: open.span,
token: fld.fold_token(open.token.clone()) token: fld.fold_token(open.token.clone())
}, },
Rc::new(fld.fold_tts(tts.as_slice())), Rc::new(fld.fold_tts(tts.as_slice())),
Delimiter { Delimiter {
span: close.span, span: close.span,
token: fld.fold_token(close.token.clone()) token: fld.fold_token(close.token.clone())
}), }),
TTSeq(span, ref pattern, ref sep, is_optional) => TTSequence(span, ref pattern, ref sep, is_optional) =>
TTSeq(span, TTSequence(span,
Rc::new(fld.fold_tts(pattern.as_slice())), Rc::new(fld.fold_tts(pattern.as_slice())),
sep.clone().map(|tok| fld.fold_token(tok)), sep.clone().map(|tok| fld.fold_token(tok)),
is_optional), is_optional),
TTNonterminal(sp,ref ident) => TTNonterminal(sp,ref ident) =>
TTNonterminal(sp,fld.fold_ident(*ident)) TTNonterminal(sp,fld.fold_ident(*ident))
} }

View File

@ -793,29 +793,29 @@ mod test {
let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string()); let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string());
let tts: &[ast::TokenTree] = tts.as_slice(); let tts: &[ast::TokenTree] = tts.as_slice();
match tts { match tts {
[ast::TTTok(_, _), [ast::TTToken(_, _),
ast::TTTok(_, token::NOT), ast::TTToken(_, token::NOT),
ast::TTTok(_, _), ast::TTToken(_, _),
ast::TTDelim(_, ast::TTTok(_, token::LPAREN), ast::TTDelimited(_, ast::TTToken(_, token::LPAREN),
ref delim_elts, ref delim_elts,
ast::TTTok(_, token::RPAREN))] => { ast::TTToken(_, token::RPAREN))] => {
let delim_elts: &[ast::TokenTree] = delim_elts.as_slice(); let delim_elts: &[ast::TokenTree] = delim_elts.as_slice();
match delim_elts { match delim_elts {
[ast::TTDelim(_, ast::TTTok(_, token::LPAREN), [ast::TTDelimited(_, ast::TTToken(_, token::LPAREN),
ref first_set, ref first_set,
ast::TTTok(_, token::RPAREN)), ast::TTToken(_, token::RPAREN)),
ast::TTTok(_, token::FAT_ARROW), ast::TTToken(_, token::FAT_ARROW),
ast::TTDelim(_, ast::TTTok(_, token::LPAREN), ast::TTDelimited(_, ast::TTToken(_, token::LPAREN),
ref second_set, ref second_set,
ast::TTTok(_, token::RPAREN))] => { ast::TTToken(_, token::RPAREN))] => {
let first_set: &[ast::TokenTree] = let first_set: &[ast::TokenTree] =
first_set.as_slice(); first_set.as_slice();
match first_set { match first_set {
[ast::TTTok(_, token::DOLLAR), ast::TTTok(_, _)] => { [ast::TTToken(_, token::DOLLAR), ast::TTToken(_, _)] => {
let second_set: &[ast::TokenTree] = let second_set: &[ast::TokenTree] =
second_set.as_slice(); second_set.as_slice();
match second_set { match second_set {
[ast::TTTok(_, token::DOLLAR), ast::TTTok(_, _)] => { [ast::TTToken(_, token::DOLLAR), ast::TTToken(_, _)] => {
assert_eq!("correct","correct") assert_eq!("correct","correct")
} }
_ => assert_eq!("wrong 4","correct") _ => assert_eq!("wrong 4","correct")
@ -845,7 +845,7 @@ mod test {
assert_eq!(json::encode(&tts), assert_eq!(json::encode(&tts),
"[\ "[\
{\ {\
\"variant\":\"TTTok\",\ \"variant\":\"TTToken\",\
\"fields\":[\ \"fields\":[\
null,\ null,\
{\ {\
@ -858,7 +858,7 @@ mod test {
]\ ]\
},\ },\
{\ {\
\"variant\":\"TTTok\",\ \"variant\":\"TTToken\",\
\"fields\":[\ \"fields\":[\
null,\ null,\
{\ {\
@ -871,18 +871,18 @@ mod test {
]\ ]\
},\ },\
{\ {\
\"variant\":\"TTDelim\",\ \"variant\":\"TTDelimited\",\
\"fields\":[\ \"fields\":[\
[\ [\
{\ {\
\"variant\":\"TTTok\",\ \"variant\":\"TTToken\",\
\"fields\":[\ \"fields\":[\
null,\ null,\
\"LPAREN\"\ \"LPAREN\"\
]\ ]\
},\ },\
{\ {\
\"variant\":\"TTTok\",\ \"variant\":\"TTToken\",\
\"fields\":[\ \"fields\":[\
null,\ null,\
{\ {\
@ -895,14 +895,14 @@ mod test {
]\ ]\
},\ },\
{\ {\
\"variant\":\"TTTok\",\ \"variant\":\"TTToken\",\
\"fields\":[\ \"fields\":[\
null,\ null,\
\"COLON\"\ \"COLON\"\
]\ ]\
},\ },\
{\ {\
\"variant\":\"TTTok\",\ \"variant\":\"TTToken\",\
\"fields\":[\ \"fields\":[\
null,\ null,\
{\ {\
@ -915,7 +915,7 @@ mod test {
]\ ]\
},\ },\
{\ {\
\"variant\":\"TTTok\",\ \"variant\":\"TTToken\",\
\"fields\":[\ \"fields\":[\
null,\ null,\
\"RPAREN\"\ \"RPAREN\"\
@ -925,18 +925,18 @@ mod test {
]\ ]\
},\ },\
{\ {\
\"variant\":\"TTDelim\",\ \"variant\":\"TTDelimited\",\
\"fields\":[\ \"fields\":[\
[\ [\
{\ {\
\"variant\":\"TTTok\",\ \"variant\":\"TTToken\",\
\"fields\":[\ \"fields\":[\
null,\ null,\
\"LBRACE\"\ \"LBRACE\"\
]\ ]\
},\ },\
{\ {\
\"variant\":\"TTTok\",\ \"variant\":\"TTToken\",\
\"fields\":[\ \"fields\":[\
null,\ null,\
{\ {\
@ -949,14 +949,14 @@ mod test {
]\ ]\
},\ },\
{\ {\
\"variant\":\"TTTok\",\ \"variant\":\"TTToken\",\
\"fields\":[\ \"fields\":[\
null,\ null,\
\"SEMI\"\ \"SEMI\"\
]\ ]\
},\ },\
{\ {\
\"variant\":\"TTTok\",\ \"variant\":\"TTToken\",\
\"fields\":[\ \"fields\":[\
null,\ null,\
\"RBRACE\"\ \"RBRACE\"\

View File

@ -48,7 +48,7 @@ use ast::{StmtExpr, StmtSemi, StmtMac, StructDef, StructField};
use ast::{StructVariantKind, BiSub}; use ast::{StructVariantKind, BiSub};
use ast::StrStyle; use ast::StrStyle;
use ast::{SelfExplicit, SelfRegion, SelfStatic, SelfValue}; use ast::{SelfExplicit, SelfRegion, SelfStatic, SelfValue};
use ast::{Delimiter, TokenTree, TraitItem, TraitRef, TTDelim, TTSeq, TTTok}; use ast::{Delimiter, TokenTree, TraitItem, TraitRef, TTDelimited, TTSequence, TTToken};
use ast::{TTNonterminal, TupleVariantKind, Ty, Ty_, TyBot}; use ast::{TTNonterminal, TupleVariantKind, Ty, Ty_, TyBot};
use ast::{TypeField, TyFixedLengthVec, TyClosure, TyProc, TyBareFn}; use ast::{TypeField, TyFixedLengthVec, TyClosure, TyProc, TyBareFn};
use ast::{TyTypeof, TyInfer, TypeMethod}; use ast::{TyTypeof, TyInfer, TypeMethod};
@ -2526,7 +2526,7 @@ impl<'a> Parser<'a> {
/// parse a single token tree from the input. /// parse a single token tree from the input.
pub fn parse_token_tree(&mut self) -> TokenTree { pub fn parse_token_tree(&mut self) -> TokenTree {
// FIXME #6994: currently, this is too eager. It // FIXME #6994: currently, this is too eager. It
// parses token trees but also identifies TTSeq's // parses token trees but also identifies TTSequence's
// and TTNonterminal's; it's too early to know yet // and TTNonterminal's; it's too early to know yet
// whether something will be a nonterminal or a seq // whether something will be a nonterminal or a seq
// yet. // yet.
@ -2568,13 +2568,13 @@ impl<'a> Parser<'a> {
let seq = match seq { let seq = match seq {
Spanned { node, .. } => node, Spanned { node, .. } => node,
}; };
TTSeq(mk_sp(sp.lo, p.span.hi), Rc::new(seq), s, z) TTSequence(mk_sp(sp.lo, p.span.hi), Rc::new(seq), s, z)
} else { } else {
TTNonterminal(sp, p.parse_ident()) TTNonterminal(sp, p.parse_ident())
} }
} }
_ => { _ => {
TTTok(p.span, p.bump_and_get()) TTToken(p.span, p.bump_and_get())
} }
} }
} }
@ -2615,7 +2615,7 @@ impl<'a> Parser<'a> {
// Expand to cover the entire delimited token tree // Expand to cover the entire delimited token tree
let span = Span { hi: self.span.hi, ..pre_span }; let span = Span { hi: self.span.hi, ..pre_span };
TTDelim(span, open, Rc::new(tts), close) TTDelimited(span, open, Rc::new(tts), close)
} }
_ => parse_non_delim_tt_tok(self) _ => parse_non_delim_tt_tok(self)
} }

View File

@ -1020,14 +1020,14 @@ impl<'a> State<'a> {
/// expression arguments as expressions). It can be done! I think. /// expression arguments as expressions). It can be done! I think.
pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> { pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> {
match *tt { match *tt {
ast::TTDelim(_, ref open, ref tts, ref close) => { ast::TTDelimited(_, ref open, ref tts, ref close) => {
try!(word(&mut self.s, parse::token::to_string(&open.token).as_slice())); try!(word(&mut self.s, parse::token::to_string(&open.token).as_slice()));
try!(space(&mut self.s)); try!(space(&mut self.s));
try!(self.print_tts(tts.as_slice())); try!(self.print_tts(tts.as_slice()));
try!(space(&mut self.s)); try!(space(&mut self.s));
word(&mut self.s, parse::token::to_string(&close.token).as_slice()) word(&mut self.s, parse::token::to_string(&close.token).as_slice())
}, },
ast::TTTok(_, ref tk) => { ast::TTToken(_, ref tk) => {
try!(word(&mut self.s, parse::token::to_string(tk).as_slice())); try!(word(&mut self.s, parse::token::to_string(tk).as_slice()));
match *tk { match *tk {
parse::token::DOC_COMMENT(..) => { parse::token::DOC_COMMENT(..) => {
@ -1036,7 +1036,7 @@ impl<'a> State<'a> {
_ => Ok(()) _ => Ok(())
} }
} }
ast::TTSeq(_, ref tts, ref sep, zerok) => { ast::TTSequence(_, ref tts, ref sep, zerok) => {
try!(word(&mut self.s, "$(")); try!(word(&mut self.s, "$("));
for tt_elt in (*tts).iter() { for tt_elt in (*tts).iter() {
try!(self.print_tt(tt_elt)); try!(self.print_tt(tt_elt));

View File

@ -18,7 +18,7 @@ extern crate rustc;
use syntax::codemap::Span; use syntax::codemap::Span;
use syntax::parse::token::{IDENT, get_ident}; use syntax::parse::token::{IDENT, get_ident};
use syntax::ast::{TokenTree, TTTok}; use syntax::ast::{TokenTree, TTToken};
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacExpr}; use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacExpr};
use syntax::ext::build::AstBuilder; // trait for expr_uint use syntax::ext::build::AstBuilder; // trait for expr_uint
use rustc::plugin::Registry; use rustc::plugin::Registry;
@ -39,7 +39,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
("I", 1)]; ("I", 1)];
let text = match args { let text = match args {
[TTTok(_, IDENT(s, _))] => get_ident(s).to_string(), [TTToken(_, IDENT(s, _))] => get_ident(s).to_string(),
_ => { _ => {
cx.span_err(sp, "argument should be a single identifier"); cx.span_err(sp, "argument should be a single identifier");
return DummyResult::any(sp); return DummyResult::any(sp);