2014-11-06 08:05:53 +00:00
|
|
|
pub use self::BinOpToken::*;
|
|
|
|
pub use self::Nonterminal::*;
|
|
|
|
pub use self::DelimToken::*;
|
2014-11-18 23:17:40 +00:00
|
|
|
pub use self::Lit::*;
|
2014-11-06 08:05:53 +00:00
|
|
|
pub use self::Token::*;
|
|
|
|
|
2016-09-14 05:57:16 +00:00
|
|
|
use ast::{self};
|
2017-09-15 15:28:34 +00:00
|
|
|
use parse::ParseSess;
|
|
|
|
use print::pprust;
|
2014-09-13 16:06:01 +00:00
|
|
|
use ptr::P;
|
2017-03-29 01:55:01 +00:00
|
|
|
use serialize::{Decodable, Decoder, Encodable, Encoder};
|
2016-11-16 08:21:52 +00:00
|
|
|
use symbol::keywords;
|
2017-09-15 15:28:34 +00:00
|
|
|
use syntax::parse::parse_stream_from_source_str;
|
2018-05-13 13:14:43 +00:00
|
|
|
use syntax_pos::{self, Span, FileName};
|
2018-05-21 16:02:50 +00:00
|
|
|
use syntax_pos::symbol::{self, Symbol};
|
2018-09-09 01:07:02 +00:00
|
|
|
use tokenstream::{self, DelimSpan, TokenStream, TokenTree};
|
2012-12-23 22:41:37 +00:00
|
|
|
|
2017-06-05 01:41:33 +00:00
|
|
|
use std::{cmp, fmt};
|
2018-04-19 02:36:48 +00:00
|
|
|
use std::mem;
|
2018-03-26 18:10:05 +00:00
|
|
|
use rustc_data_structures::sync::{Lrc, Lock};
|
2010-08-18 18:35:12 +00:00
|
|
|
|
2018-03-20 22:58:25 +00:00
|
|
|
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
2014-10-27 08:22:52 +00:00
|
|
|
pub enum BinOpToken {
|
|
|
|
Plus,
|
|
|
|
Minus,
|
|
|
|
Star,
|
|
|
|
Slash,
|
|
|
|
Percent,
|
|
|
|
Caret,
|
|
|
|
And,
|
|
|
|
Or,
|
|
|
|
Shl,
|
|
|
|
Shr,
|
2010-09-09 22:59:29 +00:00
|
|
|
}
|
|
|
|
|
2015-01-07 01:53:18 +00:00
|
|
|
/// A delimiter token
|
2018-03-20 22:58:25 +00:00
|
|
|
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
2014-10-29 10:37:54 +00:00
|
|
|
pub enum DelimToken {
|
|
|
|
/// A round parenthesis: `(` or `)`
|
|
|
|
Paren,
|
|
|
|
/// A square bracket: `[` or `]`
|
|
|
|
Bracket,
|
|
|
|
/// A curly brace: `{` or `}`
|
|
|
|
Brace,
|
2016-07-19 20:00:45 +00:00
|
|
|
/// An empty delimiter
|
|
|
|
NoDelim,
|
2014-10-29 10:37:54 +00:00
|
|
|
}
|
|
|
|
|
2017-01-23 04:58:15 +00:00
|
|
|
impl DelimToken {
|
2017-01-27 11:00:10 +00:00
|
|
|
pub fn len(self) -> usize {
|
|
|
|
if self == NoDelim { 0 } else { 1 }
|
2017-01-23 04:58:15 +00:00
|
|
|
}
|
2017-05-12 18:05:39 +00:00
|
|
|
|
|
|
|
pub fn is_empty(self) -> bool {
|
|
|
|
self == NoDelim
|
|
|
|
}
|
2017-01-23 04:58:15 +00:00
|
|
|
}
|
|
|
|
|
2018-03-20 22:58:25 +00:00
|
|
|
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
2014-11-18 23:17:40 +00:00
|
|
|
pub enum Lit {
|
|
|
|
Byte(ast::Name),
|
|
|
|
Char(ast::Name),
|
2019-01-16 00:27:43 +00:00
|
|
|
Err(ast::Name),
|
2014-11-18 23:17:40 +00:00
|
|
|
Integer(ast::Name),
|
|
|
|
Float(ast::Name),
|
|
|
|
Str_(ast::Name),
|
2018-04-12 09:50:53 +00:00
|
|
|
StrRaw(ast::Name, u16), /* raw str delimited by n hash symbols */
|
2015-09-03 07:54:53 +00:00
|
|
|
ByteStr(ast::Name),
|
2018-04-12 09:50:53 +00:00
|
|
|
ByteStrRaw(ast::Name, u16), /* raw byte str delimited by n hash symbols */
|
2014-11-18 23:17:40 +00:00
|
|
|
}
|
|
|
|
|
2014-11-19 04:48:38 +00:00
|
|
|
impl Lit {
|
2018-11-12 03:38:44 +00:00
|
|
|
crate fn literal_name(&self) -> &'static str {
|
2014-11-19 04:48:38 +00:00
|
|
|
match *self {
|
2018-11-12 03:38:44 +00:00
|
|
|
Byte(_) => "byte literal",
|
|
|
|
Char(_) => "char literal",
|
2019-01-17 20:20:27 +00:00
|
|
|
Err(_) => "invalid literal",
|
2018-11-12 03:38:44 +00:00
|
|
|
Integer(_) => "integer literal",
|
|
|
|
Float(_) => "float literal",
|
|
|
|
Str_(_) | StrRaw(..) => "string literal",
|
|
|
|
ByteStr(_) | ByteStrRaw(..) => "byte string literal"
|
2014-11-19 04:48:38 +00:00
|
|
|
}
|
|
|
|
}
|
2018-04-19 02:36:48 +00:00
|
|
|
|
|
|
|
// See comments in `interpolated_to_tokenstream` for why we care about
|
|
|
|
// *probably* equal here rather than actual equality
|
|
|
|
fn probably_equal_for_proc_macro(&self, other: &Lit) -> bool {
|
|
|
|
mem::discriminant(self) == mem::discriminant(other)
|
|
|
|
}
|
2014-11-19 04:48:38 +00:00
|
|
|
}
|
|
|
|
|
2018-03-22 05:38:24 +00:00
|
|
|
pub(crate) fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool {
|
2018-03-10 05:56:40 +00:00
|
|
|
let ident_token: Token = Ident(ident, is_raw);
|
2017-01-27 05:51:20 +00:00
|
|
|
|
2017-06-29 10:16:35 +00:00
|
|
|
!ident_token.is_reserved_ident() ||
|
2017-01-27 05:51:20 +00:00
|
|
|
ident_token.is_path_segment_keyword() ||
|
|
|
|
[
|
2018-08-01 16:50:15 +00:00
|
|
|
keywords::Async.name(),
|
2017-03-03 22:41:07 +00:00
|
|
|
keywords::Do.name(),
|
2017-01-27 05:51:20 +00:00
|
|
|
keywords::Box.name(),
|
|
|
|
keywords::Break.name(),
|
|
|
|
keywords::Continue.name(),
|
|
|
|
keywords::False.name(),
|
|
|
|
keywords::For.name(),
|
|
|
|
keywords::If.name(),
|
|
|
|
keywords::Loop.name(),
|
|
|
|
keywords::Match.name(),
|
|
|
|
keywords::Move.name(),
|
|
|
|
keywords::Return.name(),
|
|
|
|
keywords::True.name(),
|
|
|
|
keywords::Unsafe.name(),
|
|
|
|
keywords::While.name(),
|
2016-12-26 13:34:03 +00:00
|
|
|
keywords::Yield.name(),
|
2018-01-29 09:53:20 +00:00
|
|
|
keywords::Static.name(),
|
2017-01-27 05:51:20 +00:00
|
|
|
].contains(&ident.name)
|
|
|
|
}
|
|
|
|
|
2018-03-10 05:56:40 +00:00
|
|
|
fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool {
|
|
|
|
let ident_token: Token = Ident(ident, is_raw);
|
2017-03-16 21:47:32 +00:00
|
|
|
|
2017-06-29 10:16:35 +00:00
|
|
|
!ident_token.is_reserved_ident() ||
|
2017-03-16 21:47:32 +00:00
|
|
|
ident_token.is_path_segment_keyword() ||
|
|
|
|
[
|
2018-03-08 11:27:23 +00:00
|
|
|
keywords::Underscore.name(),
|
2017-03-16 21:47:32 +00:00
|
|
|
keywords::For.name(),
|
|
|
|
keywords::Impl.name(),
|
|
|
|
keywords::Fn.name(),
|
|
|
|
keywords::Unsafe.name(),
|
|
|
|
keywords::Extern.name(),
|
|
|
|
keywords::Typeof.name(),
|
2018-09-15 17:18:49 +00:00
|
|
|
keywords::Dyn.name(),
|
2017-03-16 21:47:32 +00:00
|
|
|
].contains(&ident.name)
|
|
|
|
}
|
|
|
|
|
2018-06-26 21:57:27 +00:00
|
|
|
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)]
|
2013-01-29 21:54:06 +00:00
|
|
|
pub enum Token {
|
2010-09-09 22:59:29 +00:00
|
|
|
/* Expression-operator symbols. */
|
2014-10-27 08:22:52 +00:00
|
|
|
Eq,
|
|
|
|
Lt,
|
|
|
|
Le,
|
|
|
|
EqEq,
|
|
|
|
Ne,
|
|
|
|
Ge,
|
|
|
|
Gt,
|
|
|
|
AndAnd,
|
|
|
|
OrOr,
|
|
|
|
Not,
|
|
|
|
Tilde,
|
|
|
|
BinOp(BinOpToken),
|
|
|
|
BinOpEq(BinOpToken),
|
2010-09-09 22:59:29 +00:00
|
|
|
|
|
|
|
/* Structural symbols */
|
2014-10-27 08:22:52 +00:00
|
|
|
At,
|
|
|
|
Dot,
|
|
|
|
DotDot,
|
|
|
|
DotDotDot,
|
2017-09-19 05:40:04 +00:00
|
|
|
DotDotEq,
|
2014-10-27 08:22:52 +00:00
|
|
|
Comma,
|
|
|
|
Semi,
|
|
|
|
Colon,
|
|
|
|
ModSep,
|
|
|
|
RArrow,
|
|
|
|
LArrow,
|
|
|
|
FatArrow,
|
|
|
|
Pound,
|
|
|
|
Dollar,
|
|
|
|
Question,
|
2018-05-13 21:01:56 +00:00
|
|
|
/// Used by proc macros for representing lifetimes, not generated by lexer right now.
|
|
|
|
SingleQuote,
|
2015-01-07 01:53:18 +00:00
|
|
|
/// An opening delimiter, eg. `{`
|
2014-10-29 10:37:54 +00:00
|
|
|
OpenDelim(DelimToken),
|
2015-01-07 01:53:18 +00:00
|
|
|
/// A closing delimiter, eg. `}`
|
2014-10-29 10:37:54 +00:00
|
|
|
CloseDelim(DelimToken),
|
2012-01-25 23:38:09 +00:00
|
|
|
|
2010-09-09 22:59:29 +00:00
|
|
|
/* Literals */
|
2014-11-19 04:48:38 +00:00
|
|
|
Literal(Lit, Option<ast::Name>),
|
2010-09-09 22:59:29 +00:00
|
|
|
|
|
|
|
/* Name components */
|
2018-03-10 05:56:40 +00:00
|
|
|
Ident(ast::Ident, /* is_raw */ bool),
|
2014-10-27 08:22:52 +00:00
|
|
|
Lifetime(ast::Ident),
|
2012-06-12 17:50:17 +00:00
|
|
|
|
2017-06-05 01:41:33 +00:00
|
|
|
// The `LazyTokenStream` is a pure function of the `Nonterminal`,
|
|
|
|
// and so the `LazyTokenStream` can be ignored by Eq, Hash, etc.
|
2018-02-27 16:11:14 +00:00
|
|
|
Interpolated(Lrc<(Nonterminal, LazyTokenStream)>),
|
2014-10-06 22:00:56 +00:00
|
|
|
// Can be expanded into several tokens.
|
|
|
|
/// Doc comment
|
2014-10-27 08:22:52 +00:00
|
|
|
DocComment(ast::Name),
|
2014-07-05 05:30:39 +00:00
|
|
|
|
|
|
|
// Junk. These carry no data because we don't really care about the data
|
|
|
|
// they *would* carry, and don't really want to allocate a new ident for
|
|
|
|
// them. Instead, users could extract that from the associated span.
|
|
|
|
|
|
|
|
/// Whitespace
|
2014-10-27 08:22:52 +00:00
|
|
|
Whitespace,
|
2014-07-05 05:30:39 +00:00
|
|
|
/// Comment
|
2014-10-27 08:22:52 +00:00
|
|
|
Comment,
|
|
|
|
Shebang(ast::Name),
|
2014-07-05 05:30:39 +00:00
|
|
|
|
2014-10-27 08:22:52 +00:00
|
|
|
Eof,
|
2010-09-09 22:59:29 +00:00
|
|
|
}
|
2010-08-18 18:35:12 +00:00
|
|
|
|
2018-12-10 22:06:51 +00:00
|
|
|
// `Token` is used a lot. Make sure it doesn't unintentionally get bigger.
|
|
|
|
#[cfg(target_arch = "x86_64")]
|
|
|
|
static_assert!(MEM_SIZE_OF_STATEMENT: mem::size_of::<Token>() == 16);
|
|
|
|
|
2014-10-27 12:33:30 +00:00
|
|
|
impl Token {
|
2017-03-29 01:55:01 +00:00
|
|
|
pub fn interpolated(nt: Nonterminal) -> Token {
|
2018-02-27 16:11:14 +00:00
|
|
|
Token::Interpolated(Lrc::new((nt, LazyTokenStream::new())))
|
2017-03-29 01:55:01 +00:00
|
|
|
}
|
|
|
|
|
2018-03-10 05:56:40 +00:00
|
|
|
/// Recovers a `Token` from an `ast::Ident`. This creates a raw identifier if necessary.
|
|
|
|
pub fn from_ast_ident(ident: ast::Ident) -> Token {
|
2018-05-13 13:14:43 +00:00
|
|
|
Ident(ident, ident.is_raw_guess())
|
2018-03-10 05:56:40 +00:00
|
|
|
}
|
|
|
|
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn is_like_plus(&self) -> bool {
|
2018-05-25 21:09:32 +00:00
|
|
|
match *self {
|
|
|
|
BinOp(Plus) | BinOpEq(Plus) => true,
|
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
2015-04-18 01:18:46 +00:00
|
|
|
|
2014-10-27 12:33:30 +00:00
|
|
|
/// Returns `true` if the token can appear at the start of an expression.
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn can_begin_expr(&self) -> bool {
|
2014-10-27 12:33:30 +00:00
|
|
|
match *self {
|
2018-03-10 05:56:40 +00:00
|
|
|
Ident(ident, is_raw) =>
|
|
|
|
ident_can_begin_expr(ident, is_raw), // value name or keyword
|
2017-09-19 05:40:04 +00:00
|
|
|
OpenDelim(..) | // tuple, array or block
|
|
|
|
Literal(..) | // literal
|
|
|
|
Not | // operator not
|
|
|
|
BinOp(Minus) | // unary minus
|
|
|
|
BinOp(Star) | // dereference
|
|
|
|
BinOp(Or) | OrOr | // closure
|
|
|
|
BinOp(And) | // reference
|
|
|
|
AndAnd | // double reference
|
2017-11-04 23:46:41 +00:00
|
|
|
// DotDotDot is no longer supported, but we need some way to display the error
|
2017-09-19 05:40:04 +00:00
|
|
|
DotDot | DotDotDot | DotDotEq | // range notation
|
|
|
|
Lt | BinOp(Shl) | // associated path
|
|
|
|
ModSep | // global path
|
2018-04-22 11:34:42 +00:00
|
|
|
Lifetime(..) | // labeled loop
|
2017-09-19 05:40:04 +00:00
|
|
|
Pound => true, // expression attributes
|
2017-03-29 01:55:01 +00:00
|
|
|
Interpolated(ref nt) => match nt.0 {
|
2018-04-09 23:08:47 +00:00
|
|
|
NtLiteral(..) |
|
|
|
|
NtIdent(..) |
|
|
|
|
NtExpr(..) |
|
|
|
|
NtBlock(..) |
|
|
|
|
NtPath(..) |
|
|
|
|
NtLifetime(..) => true,
|
2016-11-02 03:03:55 +00:00
|
|
|
_ => false,
|
|
|
|
},
|
|
|
|
_ => false,
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-01-17 18:18:29 +00:00
|
|
|
/// Returns `true` if the token can appear at the start of a type.
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn can_begin_type(&self) -> bool {
|
2017-01-17 18:18:29 +00:00
|
|
|
match *self {
|
2018-03-10 05:56:40 +00:00
|
|
|
Ident(ident, is_raw) =>
|
|
|
|
ident_can_begin_type(ident, is_raw), // type name or keyword
|
2017-05-12 18:05:39 +00:00
|
|
|
OpenDelim(Paren) | // tuple
|
|
|
|
OpenDelim(Bracket) | // array
|
|
|
|
Not | // never
|
|
|
|
BinOp(Star) | // raw pointer
|
|
|
|
BinOp(And) | // reference
|
|
|
|
AndAnd | // double reference
|
|
|
|
Question | // maybe bound in trait object
|
|
|
|
Lifetime(..) | // lifetime bound in trait object
|
|
|
|
Lt | BinOp(Shl) | // associated path
|
2017-01-17 18:18:29 +00:00
|
|
|
ModSep => true, // global path
|
2017-03-29 01:55:01 +00:00
|
|
|
Interpolated(ref nt) => match nt.0 {
|
2017-12-20 22:22:37 +00:00
|
|
|
NtIdent(..) | NtTy(..) | NtPath(..) | NtLifetime(..) => true,
|
2017-01-17 18:18:29 +00:00
|
|
|
_ => false,
|
|
|
|
},
|
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-10-10 14:33:19 +00:00
|
|
|
/// Returns `true` if the token can appear at the start of a generic bound.
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn can_begin_bound(&self) -> bool {
|
2017-10-10 14:33:19 +00:00
|
|
|
self.is_path_start() || self.is_lifetime() || self.is_keyword(keywords::For) ||
|
|
|
|
self == &Question || self == &OpenDelim(Paren)
|
|
|
|
}
|
|
|
|
|
2014-10-27 12:33:30 +00:00
|
|
|
/// Returns `true` if the token is any literal
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn is_lit(&self) -> bool {
|
2014-10-27 12:33:30 +00:00
|
|
|
match *self {
|
2016-08-26 16:23:42 +00:00
|
|
|
Literal(..) => true,
|
|
|
|
_ => false,
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-04-09 23:08:47 +00:00
|
|
|
/// Returns `true` if the token is any literal, a minus (which can follow a literal,
|
|
|
|
/// for example a '-42', or one of the boolean idents).
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn can_begin_literal_or_bool(&self) -> bool {
|
2018-04-09 23:08:47 +00:00
|
|
|
match *self {
|
|
|
|
Literal(..) => true,
|
|
|
|
BinOp(Minus) => true,
|
|
|
|
Ident(ident, false) if ident.name == keywords::True.name() => true,
|
|
|
|
Ident(ident, false) if ident.name == keywords::False.name() => true,
|
2018-08-05 15:37:48 +00:00
|
|
|
Interpolated(ref nt) => match nt.0 {
|
|
|
|
NtLiteral(..) => true,
|
|
|
|
_ => false,
|
|
|
|
},
|
2018-04-09 23:08:47 +00:00
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-24 16:49:50 +00:00
|
|
|
/// Returns an identifier if this token is an identifier.
|
|
|
|
pub fn ident(&self) -> Option<(ast::Ident, /* is_raw */ bool)> {
|
2014-10-27 12:33:30 +00:00
|
|
|
match *self {
|
2018-03-18 17:16:02 +00:00
|
|
|
Ident(ident, is_raw) => Some((ident, is_raw)),
|
2017-03-29 01:55:01 +00:00
|
|
|
Interpolated(ref nt) => match nt.0 {
|
2018-03-18 13:47:09 +00:00
|
|
|
NtIdent(ident, is_raw) => Some((ident, is_raw)),
|
2017-03-29 07:17:18 +00:00
|
|
|
_ => None,
|
|
|
|
},
|
|
|
|
_ => None,
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
|
|
|
}
|
2018-03-24 16:49:50 +00:00
|
|
|
/// Returns a lifetime identifier if this token is a lifetime.
|
|
|
|
pub fn lifetime(&self) -> Option<ast::Ident> {
|
|
|
|
match *self {
|
|
|
|
Lifetime(ident) => Some(ident),
|
|
|
|
Interpolated(ref nt) => match nt.0 {
|
|
|
|
NtLifetime(ident) => Some(ident),
|
|
|
|
_ => None,
|
|
|
|
},
|
|
|
|
_ => None,
|
|
|
|
}
|
|
|
|
}
|
2017-03-29 07:17:18 +00:00
|
|
|
/// Returns `true` if the token is an identifier.
|
|
|
|
pub fn is_ident(&self) -> bool {
|
|
|
|
self.ident().is_some()
|
|
|
|
}
|
2018-03-24 16:49:50 +00:00
|
|
|
/// Returns `true` if the token is a lifetime.
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn is_lifetime(&self) -> bool {
|
2018-03-24 16:49:50 +00:00
|
|
|
self.lifetime().is_some()
|
|
|
|
}
|
2017-03-29 07:17:18 +00:00
|
|
|
|
2018-03-22 05:38:24 +00:00
|
|
|
/// Returns `true` if the token is a identifier whose name is the given
|
|
|
|
/// string slice.
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn is_ident_named(&self, name: &str) -> bool {
|
2018-03-22 05:38:24 +00:00
|
|
|
match self.ident() {
|
2018-05-26 12:12:38 +00:00
|
|
|
Some((ident, _)) => ident.as_str() == name,
|
2018-03-22 05:38:24 +00:00
|
|
|
None => false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-10-27 12:33:30 +00:00
|
|
|
/// Returns `true` if the token is an interpolated path.
|
2018-05-31 22:53:30 +00:00
|
|
|
fn is_path(&self) -> bool {
|
2016-11-02 03:03:55 +00:00
|
|
|
if let Interpolated(ref nt) = *self {
|
2017-03-29 01:55:01 +00:00
|
|
|
if let NtPath(..) = nt.0 {
|
2016-11-02 03:03:55 +00:00
|
|
|
return true;
|
|
|
|
}
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
2016-11-02 03:03:55 +00:00
|
|
|
false
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns `true` if the token is either the `mut` or `const` keyword.
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn is_mutability(&self) -> bool {
|
2014-10-27 12:33:30 +00:00
|
|
|
self.is_keyword(keywords::Mut) ||
|
|
|
|
self.is_keyword(keywords::Const)
|
|
|
|
}
|
|
|
|
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn is_qpath_start(&self) -> bool {
|
2016-10-19 20:33:41 +00:00
|
|
|
self == &Lt || self == &BinOp(Shl)
|
|
|
|
}
|
|
|
|
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn is_path_start(&self) -> bool {
|
2016-10-19 20:33:41 +00:00
|
|
|
self == &ModSep || self.is_qpath_start() || self.is_path() ||
|
2017-06-29 10:16:35 +00:00
|
|
|
self.is_path_segment_keyword() || self.is_ident() && !self.is_reserved_ident()
|
2016-04-20 23:03:29 +00:00
|
|
|
}
|
|
|
|
|
2014-10-27 12:33:30 +00:00
|
|
|
/// Returns `true` if the token is a given keyword, `kw`.
|
|
|
|
pub fn is_keyword(&self, kw: keywords::Keyword) -> bool {
|
2018-03-18 17:16:02 +00:00
|
|
|
self.ident().map(|(ident, is_raw)| ident.name == kw.name() && !is_raw).unwrap_or(false)
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
|
|
|
|
2016-04-16 01:10:59 +00:00
|
|
|
pub fn is_path_segment_keyword(&self) -> bool {
|
2018-03-18 17:16:02 +00:00
|
|
|
match self.ident() {
|
2018-05-13 13:14:43 +00:00
|
|
|
Some((id, false)) => id.is_path_segment_keyword(),
|
2018-03-18 17:16:02 +00:00
|
|
|
_ => false,
|
2014-09-16 01:27:28 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-06-29 10:16:35 +00:00
|
|
|
// Returns true for reserved identifiers used internally for elided lifetimes,
|
|
|
|
// unnamed method parameters, crate root module, error recovery etc.
|
|
|
|
pub fn is_special_ident(&self) -> bool {
|
2018-03-18 17:16:02 +00:00
|
|
|
match self.ident() {
|
2018-05-13 13:14:43 +00:00
|
|
|
Some((id, false)) => id.is_special(),
|
2017-06-29 10:16:35 +00:00
|
|
|
_ => false,
|
|
|
|
}
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
|
|
|
|
2017-06-29 10:16:35 +00:00
|
|
|
/// Returns `true` if the token is a keyword used in the language.
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn is_used_keyword(&self) -> bool {
|
2018-03-18 17:16:02 +00:00
|
|
|
match self.ident() {
|
2018-05-13 13:14:43 +00:00
|
|
|
Some((id, false)) => id.is_used_keyword(),
|
2014-10-27 12:33:30 +00:00
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-04-16 15:05:06 +00:00
|
|
|
/// Returns `true` if the token is a keyword reserved for possible future use.
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn is_unused_keyword(&self) -> bool {
|
2018-03-18 17:16:02 +00:00
|
|
|
match self.ident() {
|
2018-05-13 13:14:43 +00:00
|
|
|
Some((id, false)) => id.is_unused_keyword(),
|
2014-10-27 12:33:30 +00:00
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
2017-03-17 23:41:09 +00:00
|
|
|
|
2018-03-24 16:49:50 +00:00
|
|
|
/// Returns `true` if the token is either a special identifier or a keyword.
|
|
|
|
pub fn is_reserved_ident(&self) -> bool {
|
|
|
|
match self.ident() {
|
2018-05-13 13:14:43 +00:00
|
|
|
Some((id, false)) => id.is_reserved(),
|
2018-03-24 16:49:50 +00:00
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn glue(self, joint: Token) -> Option<Token> {
|
2017-03-17 23:41:09 +00:00
|
|
|
Some(match self {
|
|
|
|
Eq => match joint {
|
|
|
|
Eq => EqEq,
|
|
|
|
Gt => FatArrow,
|
|
|
|
_ => return None,
|
|
|
|
},
|
|
|
|
Lt => match joint {
|
|
|
|
Eq => Le,
|
|
|
|
Lt => BinOp(Shl),
|
|
|
|
Le => BinOpEq(Shl),
|
|
|
|
BinOp(Minus) => LArrow,
|
|
|
|
_ => return None,
|
|
|
|
},
|
|
|
|
Gt => match joint {
|
|
|
|
Eq => Ge,
|
|
|
|
Gt => BinOp(Shr),
|
|
|
|
Ge => BinOpEq(Shr),
|
|
|
|
_ => return None,
|
|
|
|
},
|
|
|
|
Not => match joint {
|
|
|
|
Eq => Ne,
|
|
|
|
_ => return None,
|
|
|
|
},
|
|
|
|
BinOp(op) => match joint {
|
|
|
|
Eq => BinOpEq(op),
|
|
|
|
BinOp(And) if op == And => AndAnd,
|
|
|
|
BinOp(Or) if op == Or => OrOr,
|
|
|
|
Gt if op == Minus => RArrow,
|
|
|
|
_ => return None,
|
|
|
|
},
|
|
|
|
Dot => match joint {
|
|
|
|
Dot => DotDot,
|
|
|
|
DotDot => DotDotDot,
|
|
|
|
_ => return None,
|
|
|
|
},
|
|
|
|
DotDot => match joint {
|
|
|
|
Dot => DotDotDot,
|
2017-09-19 05:40:04 +00:00
|
|
|
Eq => DotDotEq,
|
2017-03-17 23:41:09 +00:00
|
|
|
_ => return None,
|
|
|
|
},
|
|
|
|
Colon => match joint {
|
|
|
|
Colon => ModSep,
|
|
|
|
_ => return None,
|
|
|
|
},
|
2018-05-13 21:01:56 +00:00
|
|
|
SingleQuote => match joint {
|
2018-05-21 16:02:50 +00:00
|
|
|
Ident(ident, false) => {
|
|
|
|
let name = Symbol::intern(&format!("'{}", ident));
|
|
|
|
Lifetime(symbol::Ident {
|
|
|
|
name,
|
|
|
|
span: ident.span,
|
|
|
|
})
|
|
|
|
}
|
2018-05-13 21:01:56 +00:00
|
|
|
_ => return None,
|
|
|
|
},
|
2017-03-17 23:41:09 +00:00
|
|
|
|
2018-12-02 13:15:50 +00:00
|
|
|
Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | BinOpEq(..) | At | DotDotDot |
|
2017-09-19 05:40:04 +00:00
|
|
|
DotDotEq | Comma | Semi | ModSep | RArrow | LArrow | FatArrow | Pound | Dollar |
|
2019-01-20 05:51:54 +00:00
|
|
|
Question | OpenDelim(..) | CloseDelim(..) |
|
2017-03-17 23:41:09 +00:00
|
|
|
Literal(..) | Ident(..) | Lifetime(..) | Interpolated(..) | DocComment(..) |
|
|
|
|
Whitespace | Comment | Shebang(..) | Eof => return None,
|
|
|
|
})
|
|
|
|
}
|
2017-07-05 15:42:13 +00:00
|
|
|
|
2017-10-24 13:04:01 +00:00
|
|
|
/// Returns tokens that are likely to be typed accidentally instead of the current token.
|
|
|
|
/// Enables better error recovery when the wrong token is found.
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn similar_tokens(&self) -> Option<Vec<Token>> {
|
2017-10-24 13:04:01 +00:00
|
|
|
match *self {
|
|
|
|
Comma => Some(vec![Dot, Lt]),
|
|
|
|
Semi => Some(vec![Colon]),
|
|
|
|
_ => None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-09-15 15:28:34 +00:00
|
|
|
pub fn interpolated_to_tokenstream(&self, sess: &ParseSess, span: Span)
|
|
|
|
-> TokenStream
|
|
|
|
{
|
|
|
|
let nt = match *self {
|
|
|
|
Token::Interpolated(ref nt) => nt,
|
|
|
|
_ => panic!("only works on interpolated tokens"),
|
|
|
|
};
|
|
|
|
|
|
|
|
// An `Interpolated` token means that we have a `Nonterminal`
|
|
|
|
// which is often a parsed AST item. At this point we now need
|
|
|
|
// to convert the parsed AST to an actual token stream, e.g.
|
|
|
|
// un-parse it basically.
|
|
|
|
//
|
|
|
|
// Unfortunately there's not really a great way to do that in a
|
|
|
|
// guaranteed lossless fashion right now. The fallback here is
|
|
|
|
// to just stringify the AST node and reparse it, but this loses
|
|
|
|
// all span information.
|
|
|
|
//
|
|
|
|
// As a result, some AST nodes are annotated with the token
|
2018-04-10 19:52:47 +00:00
|
|
|
// stream they came from. Here we attempt to extract these
|
|
|
|
// lossless token streams before we fall back to the
|
|
|
|
// stringification.
|
2017-09-15 15:28:34 +00:00
|
|
|
let mut tokens = None;
|
|
|
|
|
|
|
|
match nt.0 {
|
|
|
|
Nonterminal::NtItem(ref item) => {
|
|
|
|
tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span);
|
|
|
|
}
|
|
|
|
Nonterminal::NtTraitItem(ref item) => {
|
|
|
|
tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span);
|
|
|
|
}
|
|
|
|
Nonterminal::NtImplItem(ref item) => {
|
|
|
|
tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span);
|
|
|
|
}
|
2018-03-10 05:56:40 +00:00
|
|
|
Nonterminal::NtIdent(ident, is_raw) => {
|
2018-03-18 13:47:09 +00:00
|
|
|
let token = Token::Ident(ident, is_raw);
|
2017-09-15 15:28:34 +00:00
|
|
|
tokens = Some(TokenTree::Token(ident.span, token).into());
|
|
|
|
}
|
2018-03-24 16:49:50 +00:00
|
|
|
Nonterminal::NtLifetime(ident) => {
|
|
|
|
let token = Token::Lifetime(ident);
|
|
|
|
tokens = Some(TokenTree::Token(ident.span, token).into());
|
2017-12-20 22:22:37 +00:00
|
|
|
}
|
2017-09-15 15:28:34 +00:00
|
|
|
Nonterminal::NtTT(ref tt) => {
|
|
|
|
tokens = Some(tt.clone().into());
|
|
|
|
}
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
|
2018-04-10 19:52:47 +00:00
|
|
|
let tokens_for_real = nt.1.force(|| {
|
|
|
|
// FIXME(#43081): Avoid this pretty-print + reparse hack
|
|
|
|
let source = pprust::token_to_string(self);
|
2018-10-30 14:11:24 +00:00
|
|
|
let filename = FileName::macro_expansion_source_code(&source);
|
|
|
|
parse_stream_from_source_str(filename, source, sess, Some(span))
|
2018-04-10 19:52:47 +00:00
|
|
|
});
|
2018-04-19 02:36:48 +00:00
|
|
|
|
|
|
|
// During early phases of the compiler the AST could get modified
|
2018-11-27 02:59:49 +00:00
|
|
|
// directly (e.g., attributes added or removed) and the internal cache
|
2018-04-19 02:36:48 +00:00
|
|
|
// of tokens my not be invalidated or updated. Consequently if the
|
|
|
|
// "lossless" token stream disagrees with our actual stringification
|
|
|
|
// (which has historically been much more battle-tested) then we go
|
|
|
|
// with the lossy stream anyway (losing span information).
|
|
|
|
//
|
|
|
|
// Note that the comparison isn't `==` here to avoid comparing spans,
|
|
|
|
// but it *also* is a "probable" equality which is a pretty weird
|
|
|
|
// definition. We mostly want to catch actual changes to the AST
|
|
|
|
// like a `#[cfg]` being processed or some weird `macro_rules!`
|
|
|
|
// expansion.
|
|
|
|
//
|
|
|
|
// What we *don't* want to catch is the fact that a user-defined
|
|
|
|
// literal like `0xf` is stringified as `15`, causing the cached token
|
|
|
|
// stream to not be literal `==` token-wise (ignoring spans) to the
|
|
|
|
// token stream we got from stringification.
|
|
|
|
//
|
|
|
|
// Instead the "probably equal" check here is "does each token
|
|
|
|
// recursively have the same discriminant?" We basically don't look at
|
2018-11-17 07:37:23 +00:00
|
|
|
// the token values here and assume that such fine grained token stream
|
|
|
|
// modifications, including adding/removing typically non-semantic
|
|
|
|
// tokens such as extra braces and commas, don't happen.
|
2018-04-10 19:52:47 +00:00
|
|
|
if let Some(tokens) = tokens {
|
2018-04-19 02:36:48 +00:00
|
|
|
if tokens.probably_equal_for_proc_macro(&tokens_for_real) {
|
2018-04-10 19:52:47 +00:00
|
|
|
return tokens
|
|
|
|
}
|
2018-05-17 16:30:43 +00:00
|
|
|
info!("cached tokens found, but they're not \"probably equal\", \
|
|
|
|
going with stringified version");
|
2018-04-10 19:52:47 +00:00
|
|
|
}
|
|
|
|
return tokens_for_real
|
2017-09-15 15:28:34 +00:00
|
|
|
}
|
2018-04-19 02:36:48 +00:00
|
|
|
|
|
|
|
// See comments in `interpolated_to_tokenstream` for why we care about
|
|
|
|
// *probably* equal here rather than actual equality
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn probably_equal_for_proc_macro(&self, other: &Token) -> bool {
|
2018-04-19 02:36:48 +00:00
|
|
|
if mem::discriminant(self) != mem::discriminant(other) {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
match (self, other) {
|
|
|
|
(&Eq, &Eq) |
|
|
|
|
(&Lt, &Lt) |
|
|
|
|
(&Le, &Le) |
|
|
|
|
(&EqEq, &EqEq) |
|
|
|
|
(&Ne, &Ne) |
|
|
|
|
(&Ge, &Ge) |
|
|
|
|
(&Gt, &Gt) |
|
|
|
|
(&AndAnd, &AndAnd) |
|
|
|
|
(&OrOr, &OrOr) |
|
|
|
|
(&Not, &Not) |
|
|
|
|
(&Tilde, &Tilde) |
|
|
|
|
(&At, &At) |
|
|
|
|
(&Dot, &Dot) |
|
|
|
|
(&DotDot, &DotDot) |
|
|
|
|
(&DotDotDot, &DotDotDot) |
|
|
|
|
(&DotDotEq, &DotDotEq) |
|
|
|
|
(&Comma, &Comma) |
|
|
|
|
(&Semi, &Semi) |
|
|
|
|
(&Colon, &Colon) |
|
|
|
|
(&ModSep, &ModSep) |
|
|
|
|
(&RArrow, &RArrow) |
|
|
|
|
(&LArrow, &LArrow) |
|
|
|
|
(&FatArrow, &FatArrow) |
|
|
|
|
(&Pound, &Pound) |
|
|
|
|
(&Dollar, &Dollar) |
|
|
|
|
(&Question, &Question) |
|
|
|
|
(&Whitespace, &Whitespace) |
|
|
|
|
(&Comment, &Comment) |
|
|
|
|
(&Eof, &Eof) => true,
|
|
|
|
|
|
|
|
(&BinOp(a), &BinOp(b)) |
|
|
|
|
(&BinOpEq(a), &BinOpEq(b)) => a == b,
|
|
|
|
|
|
|
|
(&OpenDelim(a), &OpenDelim(b)) |
|
|
|
|
(&CloseDelim(a), &CloseDelim(b)) => a == b,
|
|
|
|
|
|
|
|
(&DocComment(a), &DocComment(b)) |
|
|
|
|
(&Shebang(a), &Shebang(b)) => a == b,
|
|
|
|
|
|
|
|
(&Lifetime(a), &Lifetime(b)) => a.name == b.name,
|
2018-12-10 23:26:31 +00:00
|
|
|
(&Ident(a, b), &Ident(c, d)) => b == d && (a.name == c.name ||
|
|
|
|
a.name == keywords::DollarCrate.name() ||
|
|
|
|
c.name == keywords::DollarCrate.name()),
|
2018-04-19 02:36:48 +00:00
|
|
|
|
|
|
|
(&Literal(ref a, b), &Literal(ref c, d)) => {
|
|
|
|
b == d && a.probably_equal_for_proc_macro(c)
|
|
|
|
}
|
|
|
|
|
|
|
|
(&Interpolated(_), &Interpolated(_)) => false,
|
|
|
|
|
|
|
|
_ => panic!("forgot to add a token?"),
|
|
|
|
}
|
|
|
|
}
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
|
|
|
|
2018-06-26 21:57:27 +00:00
|
|
|
#[derive(Clone, RustcEncodable, RustcDecodable)]
|
2012-07-04 21:53:12 +00:00
|
|
|
/// For interpolation during macro expansion.
|
2014-01-09 13:05:33 +00:00
|
|
|
pub enum Nonterminal {
|
2014-10-29 21:44:41 +00:00
|
|
|
NtItem(P<ast::Item>),
|
2014-01-09 13:05:33 +00:00
|
|
|
NtBlock(P<ast::Block>),
|
2016-11-02 03:03:55 +00:00
|
|
|
NtStmt(ast::Stmt),
|
2014-10-29 21:44:41 +00:00
|
|
|
NtPat(P<ast::Pat>),
|
|
|
|
NtExpr(P<ast::Expr>),
|
|
|
|
NtTy(P<ast::Ty>),
|
2018-03-18 13:47:09 +00:00
|
|
|
NtIdent(ast::Ident, /* is_raw */ bool),
|
2018-03-24 16:49:50 +00:00
|
|
|
NtLifetime(ast::Ident),
|
2018-04-09 23:08:47 +00:00
|
|
|
NtLiteral(P<ast::Expr>),
|
2014-06-09 20:19:38 +00:00
|
|
|
/// Stuff inside brackets for attributes
|
2016-11-15 10:17:24 +00:00
|
|
|
NtMeta(ast::MetaItem),
|
2016-11-02 03:03:55 +00:00
|
|
|
NtPath(ast::Path),
|
2017-04-02 18:34:38 +00:00
|
|
|
NtVis(ast::Visibility),
|
2017-03-03 09:23:59 +00:00
|
|
|
NtTT(TokenTree),
|
2015-11-11 20:19:01 +00:00
|
|
|
// These are not exposed to macros, but are used by quasiquote.
|
Interpolate AST nodes in quasiquote.
This changes the `ToTokens` implementations for expressions, statements,
etc. with almost-trivial ones that produce `Interpolated(*Nt(...))`
pseudo-tokens. In this way, quasiquote now works the same way as macros
do: already-parsed AST fragments are used as-is, not reparsed.
The `ToSource` trait is removed. Quasiquote no longer involves
pretty-printing at all, which removes the need for the
`encode_with_hygiene` hack. All associated machinery is removed.
A new `Nonterminal` is added, NtArm, which the parser now interpolates.
This is just for quasiquote, not macros (although it could be in the
future).
`ToTokens` is no longer implemented for `Arg` (although this could be
added again) and `Generics` (which I don't think makes sense).
This breaks any compiler extensions that relied on the ability of
`ToTokens` to turn AST fragments back into inspectable token trees. For
this reason, this closes #16987.
As such, this is a [breaking-change].
Fixes #16472.
Fixes #15962.
Fixes #17397.
Fixes #16617.
2015-03-05 20:06:49 +00:00
|
|
|
NtArm(ast::Arm),
|
2016-11-02 03:03:55 +00:00
|
|
|
NtImplItem(ast::ImplItem),
|
|
|
|
NtTraitItem(ast::TraitItem),
|
2018-03-11 02:16:26 +00:00
|
|
|
NtForeignItem(ast::ForeignItem),
|
2015-05-02 17:55:41 +00:00
|
|
|
NtGenerics(ast::Generics),
|
|
|
|
NtWhereClause(ast::WhereClause),
|
2015-11-11 20:19:01 +00:00
|
|
|
NtArg(ast::Arg),
|
2012-06-12 17:50:17 +00:00
|
|
|
}
|
|
|
|
|
2018-03-24 13:00:44 +00:00
|
|
|
impl PartialEq for Nonterminal {
|
|
|
|
fn eq(&self, rhs: &Self) -> bool {
|
|
|
|
match (self, rhs) {
|
|
|
|
(NtIdent(ident_lhs, is_raw_lhs), NtIdent(ident_rhs, is_raw_rhs)) =>
|
|
|
|
ident_lhs == ident_rhs && is_raw_lhs == is_raw_rhs,
|
|
|
|
(NtLifetime(ident_lhs), NtLifetime(ident_rhs)) => ident_lhs == ident_rhs,
|
|
|
|
(NtTT(tt_lhs), NtTT(tt_rhs)) => tt_lhs == tt_rhs,
|
|
|
|
// FIXME: Assume that all "complex" nonterminal are not equal, we can't compare them
|
|
|
|
// correctly based on data from AST. This will prevent them from matching each other
|
|
|
|
// in macros. The comparison will become possible only when each nonterminal has an
|
|
|
|
// attached token stream from which it was parsed.
|
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-01-20 23:45:07 +00:00
|
|
|
impl fmt::Debug for Nonterminal {
|
2014-02-28 09:23:06 +00:00
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
match *self {
|
|
|
|
NtItem(..) => f.pad("NtItem(..)"),
|
|
|
|
NtBlock(..) => f.pad("NtBlock(..)"),
|
|
|
|
NtStmt(..) => f.pad("NtStmt(..)"),
|
|
|
|
NtPat(..) => f.pad("NtPat(..)"),
|
|
|
|
NtExpr(..) => f.pad("NtExpr(..)"),
|
|
|
|
NtTy(..) => f.pad("NtTy(..)"),
|
|
|
|
NtIdent(..) => f.pad("NtIdent(..)"),
|
2018-04-09 23:08:47 +00:00
|
|
|
NtLiteral(..) => f.pad("NtLiteral(..)"),
|
2014-03-26 23:14:07 +00:00
|
|
|
NtMeta(..) => f.pad("NtMeta(..)"),
|
2014-02-28 09:23:06 +00:00
|
|
|
NtPath(..) => f.pad("NtPath(..)"),
|
|
|
|
NtTT(..) => f.pad("NtTT(..)"),
|
Interpolate AST nodes in quasiquote.
This changes the `ToTokens` implementations for expressions, statements,
etc. with almost-trivial ones that produce `Interpolated(*Nt(...))`
pseudo-tokens. In this way, quasiquote now works the same way as macros
do: already-parsed AST fragments are used as-is, not reparsed.
The `ToSource` trait is removed. Quasiquote no longer involves
pretty-printing at all, which removes the need for the
`encode_with_hygiene` hack. All associated machinery is removed.
A new `Nonterminal` is added, NtArm, which the parser now interpolates.
This is just for quasiquote, not macros (although it could be in the
future).
`ToTokens` is no longer implemented for `Arg` (although this could be
added again) and `Generics` (which I don't think makes sense).
This breaks any compiler extensions that relied on the ability of
`ToTokens` to turn AST fragments back into inspectable token trees. For
this reason, this closes #16987.
As such, this is a [breaking-change].
Fixes #16472.
Fixes #15962.
Fixes #17397.
Fixes #16617.
2015-03-05 20:06:49 +00:00
|
|
|
NtArm(..) => f.pad("NtArm(..)"),
|
|
|
|
NtImplItem(..) => f.pad("NtImplItem(..)"),
|
|
|
|
NtTraitItem(..) => f.pad("NtTraitItem(..)"),
|
2018-03-11 02:16:26 +00:00
|
|
|
NtForeignItem(..) => f.pad("NtForeignItem(..)"),
|
2015-05-02 17:55:41 +00:00
|
|
|
NtGenerics(..) => f.pad("NtGenerics(..)"),
|
|
|
|
NtWhereClause(..) => f.pad("NtWhereClause(..)"),
|
2015-11-11 20:19:01 +00:00
|
|
|
NtArg(..) => f.pad("NtArg(..)"),
|
2016-04-24 16:04:01 +00:00
|
|
|
NtVis(..) => f.pad("NtVis(..)"),
|
2017-05-10 00:30:47 +00:00
|
|
|
NtLifetime(..) => f.pad("NtLifetime(..)"),
|
2014-02-28 09:23:06 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2017-03-17 23:41:09 +00:00
|
|
|
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn is_op(tok: &Token) -> bool {
|
2017-03-17 23:41:09 +00:00
|
|
|
match *tok {
|
|
|
|
OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
|
2018-03-08 11:27:23 +00:00
|
|
|
Ident(..) | Lifetime(..) | Interpolated(..) |
|
2017-03-17 23:41:09 +00:00
|
|
|
Whitespace | Comment | Shebang(..) | Eof => false,
|
|
|
|
_ => true,
|
|
|
|
}
|
|
|
|
}
|
2017-03-29 01:55:01 +00:00
|
|
|
|
2018-03-26 18:10:05 +00:00
|
|
|
#[derive(Clone)]
|
|
|
|
pub struct LazyTokenStream(Lock<Option<TokenStream>>);
|
2017-06-05 01:41:33 +00:00
|
|
|
|
|
|
|
impl cmp::Eq for LazyTokenStream {}
|
|
|
|
impl PartialEq for LazyTokenStream {
|
|
|
|
fn eq(&self, _other: &LazyTokenStream) -> bool {
|
|
|
|
true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl fmt::Debug for LazyTokenStream {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
fmt::Debug::fmt(&self.clone().0.into_inner(), f)
|
|
|
|
}
|
|
|
|
}
|
2017-03-29 01:55:01 +00:00
|
|
|
|
|
|
|
impl LazyTokenStream {
|
2018-05-31 22:53:30 +00:00
|
|
|
fn new() -> Self {
|
2018-03-26 18:10:05 +00:00
|
|
|
LazyTokenStream(Lock::new(None))
|
2017-03-29 01:55:01 +00:00
|
|
|
}
|
|
|
|
|
2018-05-31 22:53:30 +00:00
|
|
|
fn force<F: FnOnce() -> TokenStream>(&self, f: F) -> TokenStream {
|
2018-03-26 18:10:05 +00:00
|
|
|
let mut opt_stream = self.0.lock();
|
2017-03-29 01:55:01 +00:00
|
|
|
if opt_stream.is_none() {
|
2018-03-26 18:10:05 +00:00
|
|
|
*opt_stream = Some(f());
|
2017-06-05 01:41:33 +00:00
|
|
|
}
|
2017-03-29 01:55:01 +00:00
|
|
|
opt_stream.clone().unwrap()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Encodable for LazyTokenStream {
|
|
|
|
fn encode<S: Encoder>(&self, _: &mut S) -> Result<(), S::Error> {
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Decodable for LazyTokenStream {
|
|
|
|
fn decode<D: Decoder>(_: &mut D) -> Result<LazyTokenStream, D::Error> {
|
|
|
|
Ok(LazyTokenStream::new())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ::std::hash::Hash for LazyTokenStream {
|
2017-06-05 01:41:33 +00:00
|
|
|
fn hash<H: ::std::hash::Hasher>(&self, _hasher: &mut H) {}
|
2017-03-29 01:55:01 +00:00
|
|
|
}
|
2017-09-15 15:28:34 +00:00
|
|
|
|
|
|
|
fn prepend_attrs(sess: &ParseSess,
|
|
|
|
attrs: &[ast::Attribute],
|
|
|
|
tokens: Option<&tokenstream::TokenStream>,
|
|
|
|
span: syntax_pos::Span)
|
|
|
|
-> Option<tokenstream::TokenStream>
|
|
|
|
{
|
2017-12-09 01:32:04 +00:00
|
|
|
let tokens = tokens?;
|
2017-09-15 15:28:34 +00:00
|
|
|
if attrs.len() == 0 {
|
|
|
|
return Some(tokens.clone())
|
|
|
|
}
|
|
|
|
let mut builder = tokenstream::TokenStreamBuilder::new();
|
|
|
|
for attr in attrs {
|
|
|
|
assert_eq!(attr.style, ast::AttrStyle::Outer,
|
|
|
|
"inner attributes should prevent cached tokens from existing");
|
2018-07-19 14:06:31 +00:00
|
|
|
|
2018-10-30 14:11:24 +00:00
|
|
|
let source = pprust::attr_to_string(attr);
|
|
|
|
let macro_filename = FileName::macro_expansion_source_code(&source);
|
2018-07-19 14:06:31 +00:00
|
|
|
if attr.is_sugared_doc {
|
|
|
|
let stream = parse_stream_from_source_str(
|
2018-10-30 14:11:24 +00:00
|
|
|
macro_filename,
|
|
|
|
source,
|
2018-07-19 14:06:31 +00:00
|
|
|
sess,
|
|
|
|
Some(span),
|
|
|
|
);
|
|
|
|
builder.push(stream);
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// synthesize # [ $path $tokens ] manually here
|
|
|
|
let mut brackets = tokenstream::TokenStreamBuilder::new();
|
|
|
|
|
|
|
|
// For simple paths, push the identifier directly
|
|
|
|
if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() {
|
|
|
|
let ident = attr.path.segments[0].ident;
|
|
|
|
let token = Ident(ident, ident.as_str().starts_with("r#"));
|
|
|
|
brackets.push(tokenstream::TokenTree::Token(ident.span, token));
|
|
|
|
|
|
|
|
// ... and for more complicated paths, fall back to a reparse hack that
|
|
|
|
// should eventually be removed.
|
|
|
|
} else {
|
|
|
|
let stream = parse_stream_from_source_str(
|
2018-10-30 14:11:24 +00:00
|
|
|
macro_filename,
|
|
|
|
source,
|
2018-07-19 14:06:31 +00:00
|
|
|
sess,
|
|
|
|
Some(span),
|
|
|
|
);
|
|
|
|
brackets.push(stream);
|
|
|
|
}
|
|
|
|
|
|
|
|
brackets.push(attr.tokens.clone());
|
|
|
|
|
|
|
|
// The span we list here for `#` and for `[ ... ]` are both wrong in
|
|
|
|
// that it encompasses more than each token, but it hopefully is "good
|
|
|
|
// enough" for now at least.
|
|
|
|
builder.push(tokenstream::TokenTree::Token(attr.span, Pound));
|
2018-09-09 01:07:02 +00:00
|
|
|
let delim_span = DelimSpan::from_single(attr.span);
|
2018-11-29 23:02:04 +00:00
|
|
|
builder.push(tokenstream::TokenTree::Delimited(
|
|
|
|
delim_span, DelimToken::Bracket, brackets.build().into()));
|
2017-09-15 15:28:34 +00:00
|
|
|
}
|
|
|
|
builder.push(tokens.clone());
|
|
|
|
Some(builder.build())
|
|
|
|
}
|