2019-02-06 17:33:01 +00:00
|
|
|
pub use BinOpToken::*;
|
|
|
|
pub use Nonterminal::*;
|
|
|
|
pub use DelimToken::*;
|
2019-05-18 22:04:26 +00:00
|
|
|
pub use LitKind::*;
|
2019-06-04 14:55:23 +00:00
|
|
|
pub use TokenKind::*;
|
2019-02-06 17:33:01 +00:00
|
|
|
|
|
|
|
use crate::ast::{self};
|
|
|
|
use crate::parse::ParseSess;
|
|
|
|
use crate::print::pprust;
|
|
|
|
use crate::ptr::P;
|
2019-05-11 14:41:37 +00:00
|
|
|
use crate::symbol::kw;
|
2019-02-06 17:33:01 +00:00
|
|
|
use crate::syntax::parse::parse_stream_from_source_str;
|
|
|
|
use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree};
|
|
|
|
|
2018-05-21 16:02:50 +00:00
|
|
|
use syntax_pos::symbol::{self, Symbol};
|
2019-02-06 17:33:01 +00:00
|
|
|
use syntax_pos::{self, Span, FileName};
|
|
|
|
use log::info;
|
2012-12-23 22:41:37 +00:00
|
|
|
|
2019-02-14 22:10:02 +00:00
|
|
|
use std::fmt;
|
2018-04-19 02:36:48 +00:00
|
|
|
use std::mem;
|
2019-02-06 17:33:01 +00:00
|
|
|
#[cfg(target_arch = "x86_64")]
|
2019-05-19 10:59:44 +00:00
|
|
|
use rustc_data_structures::static_assert_size;
|
2019-02-14 22:10:02 +00:00
|
|
|
use rustc_data_structures::sync::Lrc;
|
2010-08-18 18:35:12 +00:00
|
|
|
|
2018-03-20 22:58:25 +00:00
|
|
|
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
2014-10-27 08:22:52 +00:00
|
|
|
pub enum BinOpToken {
|
|
|
|
Plus,
|
|
|
|
Minus,
|
|
|
|
Star,
|
|
|
|
Slash,
|
|
|
|
Percent,
|
|
|
|
Caret,
|
|
|
|
And,
|
|
|
|
Or,
|
|
|
|
Shl,
|
|
|
|
Shr,
|
2010-09-09 22:59:29 +00:00
|
|
|
}
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
/// A delimiter token.
|
2018-03-20 22:58:25 +00:00
|
|
|
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
2014-10-29 10:37:54 +00:00
|
|
|
pub enum DelimToken {
|
2019-02-08 13:53:55 +00:00
|
|
|
/// A round parenthesis (i.e., `(` or `)`).
|
2014-10-29 10:37:54 +00:00
|
|
|
Paren,
|
2019-02-08 13:53:55 +00:00
|
|
|
/// A square bracket (i.e., `[` or `]`).
|
2014-10-29 10:37:54 +00:00
|
|
|
Bracket,
|
2019-02-08 13:53:55 +00:00
|
|
|
/// A curly brace (i.e., `{` or `}`).
|
2014-10-29 10:37:54 +00:00
|
|
|
Brace,
|
2019-02-08 13:53:55 +00:00
|
|
|
/// An empty delimiter.
|
2016-07-19 20:00:45 +00:00
|
|
|
NoDelim,
|
2014-10-29 10:37:54 +00:00
|
|
|
}
|
|
|
|
|
2017-01-23 04:58:15 +00:00
|
|
|
impl DelimToken {
|
2017-01-27 11:00:10 +00:00
|
|
|
pub fn len(self) -> usize {
|
|
|
|
if self == NoDelim { 0 } else { 1 }
|
2017-01-23 04:58:15 +00:00
|
|
|
}
|
2017-05-12 18:05:39 +00:00
|
|
|
|
|
|
|
pub fn is_empty(self) -> bool {
|
|
|
|
self == NoDelim
|
|
|
|
}
|
2017-01-23 04:58:15 +00:00
|
|
|
}
|
|
|
|
|
2019-05-18 22:04:26 +00:00
|
|
|
#[derive(Clone, Copy, PartialEq, RustcEncodable, RustcDecodable, Debug)]
|
|
|
|
pub enum LitKind {
|
|
|
|
Bool, // AST only, must never appear in a `Token`
|
|
|
|
Byte,
|
|
|
|
Char,
|
|
|
|
Integer,
|
|
|
|
Float,
|
|
|
|
Str,
|
|
|
|
StrRaw(u16), // raw string delimited by `n` hash symbols
|
|
|
|
ByteStr,
|
|
|
|
ByteStrRaw(u16), // raw byte string delimited by `n` hash symbols
|
|
|
|
Err,
|
2014-11-18 23:17:40 +00:00
|
|
|
}
|
|
|
|
|
2019-05-19 16:56:45 +00:00
|
|
|
/// A literal token.
|
2019-05-18 22:04:26 +00:00
|
|
|
#[derive(Clone, Copy, PartialEq, RustcEncodable, RustcDecodable, Debug)]
|
|
|
|
pub struct Lit {
|
|
|
|
pub kind: LitKind,
|
|
|
|
pub symbol: Symbol,
|
|
|
|
pub suffix: Option<Symbol>,
|
|
|
|
}
|
2019-05-18 14:36:30 +00:00
|
|
|
|
2019-05-18 22:04:26 +00:00
|
|
|
impl LitKind {
|
2019-05-19 16:56:45 +00:00
|
|
|
/// An English article for the literal token kind.
|
2019-05-18 22:04:26 +00:00
|
|
|
crate fn article(self) -> &'static str {
|
|
|
|
match self {
|
|
|
|
Integer | Err => "an",
|
2019-05-18 14:36:30 +00:00
|
|
|
_ => "a",
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-18 22:04:26 +00:00
|
|
|
crate fn descr(self) -> &'static str {
|
|
|
|
match self {
|
|
|
|
Bool => panic!("literal token contains `Lit::Bool`"),
|
2019-05-19 16:56:45 +00:00
|
|
|
Byte => "byte",
|
|
|
|
Char => "char",
|
|
|
|
Integer => "integer",
|
|
|
|
Float => "float",
|
|
|
|
Str | StrRaw(..) => "string",
|
|
|
|
ByteStr | ByteStrRaw(..) => "byte string",
|
|
|
|
Err => "error",
|
2014-11-19 04:48:38 +00:00
|
|
|
}
|
|
|
|
}
|
2018-04-19 02:36:48 +00:00
|
|
|
|
2019-05-18 22:04:26 +00:00
|
|
|
crate fn may_have_suffix(self) -> bool {
|
|
|
|
match self {
|
|
|
|
Integer | Float | Err => true,
|
2019-05-10 00:00:51 +00:00
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
2014-11-19 04:48:38 +00:00
|
|
|
}
|
|
|
|
|
2019-05-18 22:04:26 +00:00
|
|
|
impl Lit {
|
|
|
|
pub fn new(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Lit {
|
|
|
|
Lit { kind, symbol, suffix }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-22 05:38:24 +00:00
|
|
|
pub(crate) fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool {
|
2019-06-04 14:55:23 +00:00
|
|
|
let ident_token: TokenKind = Ident(ident, is_raw);
|
2017-01-27 05:51:20 +00:00
|
|
|
|
2017-06-29 10:16:35 +00:00
|
|
|
!ident_token.is_reserved_ident() ||
|
2017-01-27 05:51:20 +00:00
|
|
|
ident_token.is_path_segment_keyword() ||
|
|
|
|
[
|
2019-05-11 14:41:37 +00:00
|
|
|
kw::Async,
|
2019-04-18 19:55:23 +00:00
|
|
|
|
|
|
|
// FIXME: remove when `await!(..)` syntax is removed
|
|
|
|
// https://github.com/rust-lang/rust/issues/60610
|
2019-05-11 14:41:37 +00:00
|
|
|
kw::Await,
|
|
|
|
|
|
|
|
kw::Do,
|
|
|
|
kw::Box,
|
|
|
|
kw::Break,
|
|
|
|
kw::Continue,
|
|
|
|
kw::False,
|
|
|
|
kw::For,
|
|
|
|
kw::If,
|
|
|
|
kw::Loop,
|
|
|
|
kw::Match,
|
|
|
|
kw::Move,
|
|
|
|
kw::Return,
|
|
|
|
kw::True,
|
|
|
|
kw::Unsafe,
|
|
|
|
kw::While,
|
|
|
|
kw::Yield,
|
|
|
|
kw::Static,
|
2017-01-27 05:51:20 +00:00
|
|
|
].contains(&ident.name)
|
|
|
|
}
|
|
|
|
|
2018-03-10 05:56:40 +00:00
|
|
|
fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool {
|
2019-06-04 14:55:23 +00:00
|
|
|
let ident_token: TokenKind = Ident(ident, is_raw);
|
2017-03-16 21:47:32 +00:00
|
|
|
|
2017-06-29 10:16:35 +00:00
|
|
|
!ident_token.is_reserved_ident() ||
|
2017-03-16 21:47:32 +00:00
|
|
|
ident_token.is_path_segment_keyword() ||
|
|
|
|
[
|
2019-05-11 14:41:37 +00:00
|
|
|
kw::Underscore,
|
|
|
|
kw::For,
|
|
|
|
kw::Impl,
|
|
|
|
kw::Fn,
|
|
|
|
kw::Unsafe,
|
|
|
|
kw::Extern,
|
|
|
|
kw::Typeof,
|
|
|
|
kw::Dyn,
|
2017-03-16 21:47:32 +00:00
|
|
|
].contains(&ident.name)
|
|
|
|
}
|
|
|
|
|
2018-06-26 21:57:27 +00:00
|
|
|
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)]
|
2019-06-04 14:55:23 +00:00
|
|
|
pub enum TokenKind {
|
2010-09-09 22:59:29 +00:00
|
|
|
/* Expression-operator symbols. */
|
2014-10-27 08:22:52 +00:00
|
|
|
Eq,
|
|
|
|
Lt,
|
|
|
|
Le,
|
|
|
|
EqEq,
|
|
|
|
Ne,
|
|
|
|
Ge,
|
|
|
|
Gt,
|
|
|
|
AndAnd,
|
|
|
|
OrOr,
|
|
|
|
Not,
|
|
|
|
Tilde,
|
|
|
|
BinOp(BinOpToken),
|
|
|
|
BinOpEq(BinOpToken),
|
2010-09-09 22:59:29 +00:00
|
|
|
|
|
|
|
/* Structural symbols */
|
2014-10-27 08:22:52 +00:00
|
|
|
At,
|
|
|
|
Dot,
|
|
|
|
DotDot,
|
|
|
|
DotDotDot,
|
2017-09-19 05:40:04 +00:00
|
|
|
DotDotEq,
|
2014-10-27 08:22:52 +00:00
|
|
|
Comma,
|
|
|
|
Semi,
|
|
|
|
Colon,
|
|
|
|
ModSep,
|
|
|
|
RArrow,
|
|
|
|
LArrow,
|
|
|
|
FatArrow,
|
|
|
|
Pound,
|
|
|
|
Dollar,
|
|
|
|
Question,
|
2018-05-13 21:01:56 +00:00
|
|
|
/// Used by proc macros for representing lifetimes, not generated by lexer right now.
|
|
|
|
SingleQuote,
|
2019-02-08 13:53:55 +00:00
|
|
|
/// An opening delimiter (e.g., `{`).
|
2014-10-29 10:37:54 +00:00
|
|
|
OpenDelim(DelimToken),
|
2019-02-08 13:53:55 +00:00
|
|
|
/// A closing delimiter (e.g., `}`).
|
2014-10-29 10:37:54 +00:00
|
|
|
CloseDelim(DelimToken),
|
2012-01-25 23:38:09 +00:00
|
|
|
|
2010-09-09 22:59:29 +00:00
|
|
|
/* Literals */
|
2019-05-18 22:04:26 +00:00
|
|
|
Literal(Lit),
|
2010-09-09 22:59:29 +00:00
|
|
|
|
|
|
|
/* Name components */
|
2018-03-10 05:56:40 +00:00
|
|
|
Ident(ast::Ident, /* is_raw */ bool),
|
2014-10-27 08:22:52 +00:00
|
|
|
Lifetime(ast::Ident),
|
2012-06-12 17:50:17 +00:00
|
|
|
|
2019-02-14 22:10:02 +00:00
|
|
|
Interpolated(Lrc<Nonterminal>),
|
|
|
|
|
2014-10-06 22:00:56 +00:00
|
|
|
// Can be expanded into several tokens.
|
2019-02-08 13:53:55 +00:00
|
|
|
/// A doc comment.
|
2014-10-27 08:22:52 +00:00
|
|
|
DocComment(ast::Name),
|
2014-07-05 05:30:39 +00:00
|
|
|
|
|
|
|
// Junk. These carry no data because we don't really care about the data
|
|
|
|
// they *would* carry, and don't really want to allocate a new ident for
|
|
|
|
// them. Instead, users could extract that from the associated span.
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
/// Whitespace.
|
2014-10-27 08:22:52 +00:00
|
|
|
Whitespace,
|
2019-02-08 13:53:55 +00:00
|
|
|
/// A comment.
|
2014-10-27 08:22:52 +00:00
|
|
|
Comment,
|
|
|
|
Shebang(ast::Name),
|
2014-07-05 05:30:39 +00:00
|
|
|
|
2014-10-27 08:22:52 +00:00
|
|
|
Eof,
|
2010-09-09 22:59:29 +00:00
|
|
|
}
|
2010-08-18 18:35:12 +00:00
|
|
|
|
2019-06-04 14:55:23 +00:00
|
|
|
// `TokenKind` is used a lot. Make sure it doesn't unintentionally get bigger.
|
2018-12-10 22:06:51 +00:00
|
|
|
#[cfg(target_arch = "x86_64")]
|
2019-06-04 14:55:23 +00:00
|
|
|
static_assert_size!(TokenKind, 16);
|
2018-12-10 22:06:51 +00:00
|
|
|
|
2019-06-04 14:55:23 +00:00
|
|
|
impl TokenKind {
|
|
|
|
/// Recovers a `TokenKind` from an `ast::Ident`. This creates a raw identifier if necessary.
|
|
|
|
pub fn from_ast_ident(ident: ast::Ident) -> TokenKind {
|
2018-05-13 13:14:43 +00:00
|
|
|
Ident(ident, ident.is_raw_guess())
|
2018-03-10 05:56:40 +00:00
|
|
|
}
|
|
|
|
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn is_like_plus(&self) -> bool {
|
2018-05-25 21:09:32 +00:00
|
|
|
match *self {
|
|
|
|
BinOp(Plus) | BinOpEq(Plus) => true,
|
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
2015-04-18 01:18:46 +00:00
|
|
|
|
2014-10-27 12:33:30 +00:00
|
|
|
/// Returns `true` if the token can appear at the start of an expression.
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn can_begin_expr(&self) -> bool {
|
2014-10-27 12:33:30 +00:00
|
|
|
match *self {
|
2018-03-10 05:56:40 +00:00
|
|
|
Ident(ident, is_raw) =>
|
|
|
|
ident_can_begin_expr(ident, is_raw), // value name or keyword
|
2017-09-19 05:40:04 +00:00
|
|
|
OpenDelim(..) | // tuple, array or block
|
|
|
|
Literal(..) | // literal
|
|
|
|
Not | // operator not
|
|
|
|
BinOp(Minus) | // unary minus
|
|
|
|
BinOp(Star) | // dereference
|
|
|
|
BinOp(Or) | OrOr | // closure
|
|
|
|
BinOp(And) | // reference
|
|
|
|
AndAnd | // double reference
|
2017-11-04 23:46:41 +00:00
|
|
|
// DotDotDot is no longer supported, but we need some way to display the error
|
2017-09-19 05:40:04 +00:00
|
|
|
DotDot | DotDotDot | DotDotEq | // range notation
|
|
|
|
Lt | BinOp(Shl) | // associated path
|
|
|
|
ModSep | // global path
|
2018-04-22 11:34:42 +00:00
|
|
|
Lifetime(..) | // labeled loop
|
2017-09-19 05:40:04 +00:00
|
|
|
Pound => true, // expression attributes
|
2019-02-14 22:10:02 +00:00
|
|
|
Interpolated(ref nt) => match **nt {
|
2018-04-09 23:08:47 +00:00
|
|
|
NtLiteral(..) |
|
|
|
|
NtIdent(..) |
|
|
|
|
NtExpr(..) |
|
|
|
|
NtBlock(..) |
|
|
|
|
NtPath(..) |
|
|
|
|
NtLifetime(..) => true,
|
2016-11-02 03:03:55 +00:00
|
|
|
_ => false,
|
|
|
|
},
|
|
|
|
_ => false,
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-01-17 18:18:29 +00:00
|
|
|
/// Returns `true` if the token can appear at the start of a type.
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn can_begin_type(&self) -> bool {
|
2017-01-17 18:18:29 +00:00
|
|
|
match *self {
|
2018-03-10 05:56:40 +00:00
|
|
|
Ident(ident, is_raw) =>
|
|
|
|
ident_can_begin_type(ident, is_raw), // type name or keyword
|
2017-05-12 18:05:39 +00:00
|
|
|
OpenDelim(Paren) | // tuple
|
|
|
|
OpenDelim(Bracket) | // array
|
|
|
|
Not | // never
|
|
|
|
BinOp(Star) | // raw pointer
|
|
|
|
BinOp(And) | // reference
|
|
|
|
AndAnd | // double reference
|
|
|
|
Question | // maybe bound in trait object
|
|
|
|
Lifetime(..) | // lifetime bound in trait object
|
|
|
|
Lt | BinOp(Shl) | // associated path
|
2017-01-17 18:18:29 +00:00
|
|
|
ModSep => true, // global path
|
2019-02-14 22:10:02 +00:00
|
|
|
Interpolated(ref nt) => match **nt {
|
2017-12-20 22:22:37 +00:00
|
|
|
NtIdent(..) | NtTy(..) | NtPath(..) | NtLifetime(..) => true,
|
2017-01-17 18:18:29 +00:00
|
|
|
_ => false,
|
|
|
|
},
|
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-05 15:49:38 +00:00
|
|
|
/// Returns `true` if the token can appear at the start of a const param.
|
|
|
|
pub fn can_begin_const_arg(&self) -> bool {
|
|
|
|
match self {
|
|
|
|
OpenDelim(Brace) => true,
|
2019-02-14 22:10:02 +00:00
|
|
|
Interpolated(ref nt) => match **nt {
|
2019-02-07 13:59:59 +00:00
|
|
|
NtExpr(..) => true,
|
2019-02-05 15:49:38 +00:00
|
|
|
NtBlock(..) => true,
|
|
|
|
NtLiteral(..) => true,
|
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
_ => self.can_begin_literal_or_bool(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-10-10 14:33:19 +00:00
|
|
|
/// Returns `true` if the token can appear at the start of a generic bound.
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn can_begin_bound(&self) -> bool {
|
2019-05-11 14:41:37 +00:00
|
|
|
self.is_path_start() || self.is_lifetime() || self.is_keyword(kw::For) ||
|
2017-10-10 14:33:19 +00:00
|
|
|
self == &Question || self == &OpenDelim(Paren)
|
|
|
|
}
|
|
|
|
|
2019-06-04 14:55:23 +00:00
|
|
|
pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> TokenKind {
|
2019-05-18 22:04:26 +00:00
|
|
|
Literal(Lit::new(kind, symbol, suffix))
|
|
|
|
}
|
|
|
|
|
2014-10-27 12:33:30 +00:00
|
|
|
/// Returns `true` if the token is any literal
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn is_lit(&self) -> bool {
|
2014-10-27 12:33:30 +00:00
|
|
|
match *self {
|
2016-08-26 16:23:42 +00:00
|
|
|
Literal(..) => true,
|
|
|
|
_ => false,
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-18 22:04:26 +00:00
|
|
|
crate fn expect_lit(&self) -> Lit {
|
2019-05-18 14:36:30 +00:00
|
|
|
match *self {
|
2019-05-18 22:04:26 +00:00
|
|
|
Literal(lit) => lit,
|
2019-05-18 14:36:30 +00:00
|
|
|
_=> panic!("`expect_lit` called on non-literal"),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-07 13:59:59 +00:00
|
|
|
/// Returns `true` if the token is any literal, a minus (which can prefix a literal,
|
2018-04-09 23:08:47 +00:00
|
|
|
/// for example a '-42', or one of the boolean idents).
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn can_begin_literal_or_bool(&self) -> bool {
|
2018-04-09 23:08:47 +00:00
|
|
|
match *self {
|
|
|
|
Literal(..) => true,
|
|
|
|
BinOp(Minus) => true,
|
2019-05-11 14:41:37 +00:00
|
|
|
Ident(ident, false) if ident.name == kw::True => true,
|
|
|
|
Ident(ident, false) if ident.name == kw::False => true,
|
2019-02-14 22:10:02 +00:00
|
|
|
Interpolated(ref nt) => match **nt {
|
2018-08-05 15:37:48 +00:00
|
|
|
NtLiteral(..) => true,
|
|
|
|
_ => false,
|
|
|
|
},
|
2018-04-09 23:08:47 +00:00
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-03-24 16:49:50 +00:00
|
|
|
/// Returns an identifier if this token is an identifier.
|
|
|
|
pub fn ident(&self) -> Option<(ast::Ident, /* is_raw */ bool)> {
|
2014-10-27 12:33:30 +00:00
|
|
|
match *self {
|
2018-03-18 17:16:02 +00:00
|
|
|
Ident(ident, is_raw) => Some((ident, is_raw)),
|
2019-02-14 22:10:02 +00:00
|
|
|
Interpolated(ref nt) => match **nt {
|
2018-03-18 13:47:09 +00:00
|
|
|
NtIdent(ident, is_raw) => Some((ident, is_raw)),
|
2017-03-29 07:17:18 +00:00
|
|
|
_ => None,
|
|
|
|
},
|
|
|
|
_ => None,
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
|
|
|
}
|
2018-03-24 16:49:50 +00:00
|
|
|
/// Returns a lifetime identifier if this token is a lifetime.
|
|
|
|
pub fn lifetime(&self) -> Option<ast::Ident> {
|
|
|
|
match *self {
|
|
|
|
Lifetime(ident) => Some(ident),
|
2019-02-14 22:10:02 +00:00
|
|
|
Interpolated(ref nt) => match **nt {
|
2018-03-24 16:49:50 +00:00
|
|
|
NtLifetime(ident) => Some(ident),
|
|
|
|
_ => None,
|
|
|
|
},
|
|
|
|
_ => None,
|
|
|
|
}
|
|
|
|
}
|
2017-03-29 07:17:18 +00:00
|
|
|
/// Returns `true` if the token is an identifier.
|
|
|
|
pub fn is_ident(&self) -> bool {
|
|
|
|
self.ident().is_some()
|
|
|
|
}
|
2018-03-24 16:49:50 +00:00
|
|
|
/// Returns `true` if the token is a lifetime.
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn is_lifetime(&self) -> bool {
|
2018-03-24 16:49:50 +00:00
|
|
|
self.lifetime().is_some()
|
|
|
|
}
|
2017-03-29 07:17:18 +00:00
|
|
|
|
2018-03-22 05:38:24 +00:00
|
|
|
/// Returns `true` if the token is a identifier whose name is the given
|
|
|
|
/// string slice.
|
2019-05-23 05:31:43 +00:00
|
|
|
crate fn is_ident_named(&self, name: Symbol) -> bool {
|
2018-03-22 05:38:24 +00:00
|
|
|
match self.ident() {
|
2019-05-23 05:31:43 +00:00
|
|
|
Some((ident, _)) => ident.name == name,
|
2018-03-22 05:38:24 +00:00
|
|
|
None => false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-10-27 12:33:30 +00:00
|
|
|
/// Returns `true` if the token is an interpolated path.
|
2018-05-31 22:53:30 +00:00
|
|
|
fn is_path(&self) -> bool {
|
2016-11-02 03:03:55 +00:00
|
|
|
if let Interpolated(ref nt) = *self {
|
2019-02-14 22:10:02 +00:00
|
|
|
if let NtPath(..) = **nt {
|
2016-11-02 03:03:55 +00:00
|
|
|
return true;
|
|
|
|
}
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
2016-11-02 03:03:55 +00:00
|
|
|
false
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns `true` if the token is either the `mut` or `const` keyword.
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn is_mutability(&self) -> bool {
|
2019-05-11 14:41:37 +00:00
|
|
|
self.is_keyword(kw::Mut) ||
|
|
|
|
self.is_keyword(kw::Const)
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
|
|
|
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn is_qpath_start(&self) -> bool {
|
2016-10-19 20:33:41 +00:00
|
|
|
self == &Lt || self == &BinOp(Shl)
|
|
|
|
}
|
|
|
|
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn is_path_start(&self) -> bool {
|
2016-10-19 20:33:41 +00:00
|
|
|
self == &ModSep || self.is_qpath_start() || self.is_path() ||
|
2017-06-29 10:16:35 +00:00
|
|
|
self.is_path_segment_keyword() || self.is_ident() && !self.is_reserved_ident()
|
2016-04-20 23:03:29 +00:00
|
|
|
}
|
|
|
|
|
2014-10-27 12:33:30 +00:00
|
|
|
/// Returns `true` if the token is a given keyword, `kw`.
|
2019-05-11 14:41:37 +00:00
|
|
|
pub fn is_keyword(&self, kw: Symbol) -> bool {
|
|
|
|
self.ident().map(|(ident, is_raw)| ident.name == kw && !is_raw).unwrap_or(false)
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
|
|
|
|
2016-04-16 01:10:59 +00:00
|
|
|
pub fn is_path_segment_keyword(&self) -> bool {
|
2018-03-18 17:16:02 +00:00
|
|
|
match self.ident() {
|
2018-05-13 13:14:43 +00:00
|
|
|
Some((id, false)) => id.is_path_segment_keyword(),
|
2018-03-18 17:16:02 +00:00
|
|
|
_ => false,
|
2014-09-16 01:27:28 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-06-29 10:16:35 +00:00
|
|
|
// Returns true for reserved identifiers used internally for elided lifetimes,
|
|
|
|
// unnamed method parameters, crate root module, error recovery etc.
|
|
|
|
pub fn is_special_ident(&self) -> bool {
|
2018-03-18 17:16:02 +00:00
|
|
|
match self.ident() {
|
2018-05-13 13:14:43 +00:00
|
|
|
Some((id, false)) => id.is_special(),
|
2017-06-29 10:16:35 +00:00
|
|
|
_ => false,
|
|
|
|
}
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
|
|
|
|
2017-06-29 10:16:35 +00:00
|
|
|
/// Returns `true` if the token is a keyword used in the language.
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn is_used_keyword(&self) -> bool {
|
2018-03-18 17:16:02 +00:00
|
|
|
match self.ident() {
|
2018-05-13 13:14:43 +00:00
|
|
|
Some((id, false)) => id.is_used_keyword(),
|
2014-10-27 12:33:30 +00:00
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-04-16 15:05:06 +00:00
|
|
|
/// Returns `true` if the token is a keyword reserved for possible future use.
|
2018-05-31 22:53:30 +00:00
|
|
|
crate fn is_unused_keyword(&self) -> bool {
|
2018-03-18 17:16:02 +00:00
|
|
|
match self.ident() {
|
2018-05-13 13:14:43 +00:00
|
|
|
Some((id, false)) => id.is_unused_keyword(),
|
2014-10-27 12:33:30 +00:00
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
2017-03-17 23:41:09 +00:00
|
|
|
|
2018-03-24 16:49:50 +00:00
|
|
|
/// Returns `true` if the token is either a special identifier or a keyword.
|
|
|
|
pub fn is_reserved_ident(&self) -> bool {
|
|
|
|
match self.ident() {
|
2018-05-13 13:14:43 +00:00
|
|
|
Some((id, false)) => id.is_reserved(),
|
2018-03-24 16:49:50 +00:00
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-04 14:55:23 +00:00
|
|
|
crate fn glue(self, joint: TokenKind) -> Option<TokenKind> {
|
2017-03-17 23:41:09 +00:00
|
|
|
Some(match self {
|
|
|
|
Eq => match joint {
|
|
|
|
Eq => EqEq,
|
|
|
|
Gt => FatArrow,
|
|
|
|
_ => return None,
|
|
|
|
},
|
|
|
|
Lt => match joint {
|
|
|
|
Eq => Le,
|
|
|
|
Lt => BinOp(Shl),
|
|
|
|
Le => BinOpEq(Shl),
|
|
|
|
BinOp(Minus) => LArrow,
|
|
|
|
_ => return None,
|
|
|
|
},
|
|
|
|
Gt => match joint {
|
|
|
|
Eq => Ge,
|
|
|
|
Gt => BinOp(Shr),
|
|
|
|
Ge => BinOpEq(Shr),
|
|
|
|
_ => return None,
|
|
|
|
},
|
|
|
|
Not => match joint {
|
|
|
|
Eq => Ne,
|
|
|
|
_ => return None,
|
|
|
|
},
|
|
|
|
BinOp(op) => match joint {
|
|
|
|
Eq => BinOpEq(op),
|
|
|
|
BinOp(And) if op == And => AndAnd,
|
|
|
|
BinOp(Or) if op == Or => OrOr,
|
|
|
|
Gt if op == Minus => RArrow,
|
|
|
|
_ => return None,
|
|
|
|
},
|
|
|
|
Dot => match joint {
|
|
|
|
Dot => DotDot,
|
|
|
|
DotDot => DotDotDot,
|
|
|
|
_ => return None,
|
|
|
|
},
|
|
|
|
DotDot => match joint {
|
|
|
|
Dot => DotDotDot,
|
2017-09-19 05:40:04 +00:00
|
|
|
Eq => DotDotEq,
|
2017-03-17 23:41:09 +00:00
|
|
|
_ => return None,
|
|
|
|
},
|
|
|
|
Colon => match joint {
|
|
|
|
Colon => ModSep,
|
|
|
|
_ => return None,
|
|
|
|
},
|
2018-05-13 21:01:56 +00:00
|
|
|
SingleQuote => match joint {
|
2018-05-21 16:02:50 +00:00
|
|
|
Ident(ident, false) => {
|
|
|
|
let name = Symbol::intern(&format!("'{}", ident));
|
|
|
|
Lifetime(symbol::Ident {
|
|
|
|
name,
|
|
|
|
span: ident.span,
|
|
|
|
})
|
|
|
|
}
|
2018-05-13 21:01:56 +00:00
|
|
|
_ => return None,
|
|
|
|
},
|
2017-03-17 23:41:09 +00:00
|
|
|
|
2018-12-02 13:15:50 +00:00
|
|
|
Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | BinOpEq(..) | At | DotDotDot |
|
2017-09-19 05:40:04 +00:00
|
|
|
DotDotEq | Comma | Semi | ModSep | RArrow | LArrow | FatArrow | Pound | Dollar |
|
2019-01-20 05:51:54 +00:00
|
|
|
Question | OpenDelim(..) | CloseDelim(..) |
|
2017-03-17 23:41:09 +00:00
|
|
|
Literal(..) | Ident(..) | Lifetime(..) | Interpolated(..) | DocComment(..) |
|
|
|
|
Whitespace | Comment | Shebang(..) | Eof => return None,
|
|
|
|
})
|
|
|
|
}
|
2017-07-05 15:42:13 +00:00
|
|
|
|
2017-10-24 13:04:01 +00:00
|
|
|
/// Returns tokens that are likely to be typed accidentally instead of the current token.
|
|
|
|
/// Enables better error recovery when the wrong token is found.
|
2019-06-04 14:55:23 +00:00
|
|
|
crate fn similar_tokens(&self) -> Option<Vec<TokenKind>> {
|
2017-10-24 13:04:01 +00:00
|
|
|
match *self {
|
2019-01-28 05:04:50 +00:00
|
|
|
Comma => Some(vec![Dot, Lt, Semi]),
|
|
|
|
Semi => Some(vec![Colon, Comma]),
|
2017-10-24 13:04:01 +00:00
|
|
|
_ => None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-17 23:06:26 +00:00
|
|
|
// See comments in `Nonterminal::to_tokenstream` for why we care about
|
2018-04-19 02:36:48 +00:00
|
|
|
// *probably* equal here rather than actual equality
|
2019-06-04 14:55:23 +00:00
|
|
|
crate fn probably_equal_for_proc_macro(&self, other: &TokenKind) -> bool {
|
2018-04-19 02:36:48 +00:00
|
|
|
if mem::discriminant(self) != mem::discriminant(other) {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
match (self, other) {
|
|
|
|
(&Eq, &Eq) |
|
|
|
|
(&Lt, &Lt) |
|
|
|
|
(&Le, &Le) |
|
|
|
|
(&EqEq, &EqEq) |
|
|
|
|
(&Ne, &Ne) |
|
|
|
|
(&Ge, &Ge) |
|
|
|
|
(&Gt, &Gt) |
|
|
|
|
(&AndAnd, &AndAnd) |
|
|
|
|
(&OrOr, &OrOr) |
|
|
|
|
(&Not, &Not) |
|
|
|
|
(&Tilde, &Tilde) |
|
|
|
|
(&At, &At) |
|
|
|
|
(&Dot, &Dot) |
|
|
|
|
(&DotDot, &DotDot) |
|
|
|
|
(&DotDotDot, &DotDotDot) |
|
|
|
|
(&DotDotEq, &DotDotEq) |
|
|
|
|
(&Comma, &Comma) |
|
|
|
|
(&Semi, &Semi) |
|
|
|
|
(&Colon, &Colon) |
|
|
|
|
(&ModSep, &ModSep) |
|
|
|
|
(&RArrow, &RArrow) |
|
|
|
|
(&LArrow, &LArrow) |
|
|
|
|
(&FatArrow, &FatArrow) |
|
|
|
|
(&Pound, &Pound) |
|
|
|
|
(&Dollar, &Dollar) |
|
|
|
|
(&Question, &Question) |
|
|
|
|
(&Whitespace, &Whitespace) |
|
|
|
|
(&Comment, &Comment) |
|
|
|
|
(&Eof, &Eof) => true,
|
|
|
|
|
|
|
|
(&BinOp(a), &BinOp(b)) |
|
|
|
|
(&BinOpEq(a), &BinOpEq(b)) => a == b,
|
|
|
|
|
|
|
|
(&OpenDelim(a), &OpenDelim(b)) |
|
|
|
|
(&CloseDelim(a), &CloseDelim(b)) => a == b,
|
|
|
|
|
|
|
|
(&DocComment(a), &DocComment(b)) |
|
|
|
|
(&Shebang(a), &Shebang(b)) => a == b,
|
|
|
|
|
2019-05-18 22:04:26 +00:00
|
|
|
(&Literal(a), &Literal(b)) => a == b,
|
|
|
|
|
2018-04-19 02:36:48 +00:00
|
|
|
(&Lifetime(a), &Lifetime(b)) => a.name == b.name,
|
2018-12-10 23:26:31 +00:00
|
|
|
(&Ident(a, b), &Ident(c, d)) => b == d && (a.name == c.name ||
|
2019-05-11 14:41:37 +00:00
|
|
|
a.name == kw::DollarCrate ||
|
|
|
|
c.name == kw::DollarCrate),
|
2018-04-19 02:36:48 +00:00
|
|
|
|
|
|
|
(&Interpolated(_), &Interpolated(_)) => false,
|
|
|
|
|
|
|
|
_ => panic!("forgot to add a token?"),
|
|
|
|
}
|
|
|
|
}
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
|
|
|
|
2018-06-26 21:57:27 +00:00
|
|
|
#[derive(Clone, RustcEncodable, RustcDecodable)]
|
2012-07-04 21:53:12 +00:00
|
|
|
/// For interpolation during macro expansion.
|
2014-01-09 13:05:33 +00:00
|
|
|
pub enum Nonterminal {
|
2014-10-29 21:44:41 +00:00
|
|
|
NtItem(P<ast::Item>),
|
2014-01-09 13:05:33 +00:00
|
|
|
NtBlock(P<ast::Block>),
|
2016-11-02 03:03:55 +00:00
|
|
|
NtStmt(ast::Stmt),
|
2014-10-29 21:44:41 +00:00
|
|
|
NtPat(P<ast::Pat>),
|
|
|
|
NtExpr(P<ast::Expr>),
|
|
|
|
NtTy(P<ast::Ty>),
|
2018-03-18 13:47:09 +00:00
|
|
|
NtIdent(ast::Ident, /* is_raw */ bool),
|
2018-03-24 16:49:50 +00:00
|
|
|
NtLifetime(ast::Ident),
|
2018-04-09 23:08:47 +00:00
|
|
|
NtLiteral(P<ast::Expr>),
|
2014-06-09 20:19:38 +00:00
|
|
|
/// Stuff inside brackets for attributes
|
2016-11-15 10:17:24 +00:00
|
|
|
NtMeta(ast::MetaItem),
|
2016-11-02 03:03:55 +00:00
|
|
|
NtPath(ast::Path),
|
2017-04-02 18:34:38 +00:00
|
|
|
NtVis(ast::Visibility),
|
2017-03-03 09:23:59 +00:00
|
|
|
NtTT(TokenTree),
|
2019-05-11 19:44:33 +00:00
|
|
|
// Used only for passing items to proc macro attributes (they are not
|
|
|
|
// strictly necessary for that, `Annotatable` can be converted into
|
|
|
|
// tokens directly, but doing that naively regresses pretty-printing).
|
2016-11-02 03:03:55 +00:00
|
|
|
NtTraitItem(ast::TraitItem),
|
2019-05-11 19:44:33 +00:00
|
|
|
NtImplItem(ast::ImplItem),
|
2018-03-11 02:16:26 +00:00
|
|
|
NtForeignItem(ast::ForeignItem),
|
2012-06-12 17:50:17 +00:00
|
|
|
}
|
|
|
|
|
2018-03-24 13:00:44 +00:00
|
|
|
impl PartialEq for Nonterminal {
|
|
|
|
fn eq(&self, rhs: &Self) -> bool {
|
|
|
|
match (self, rhs) {
|
|
|
|
(NtIdent(ident_lhs, is_raw_lhs), NtIdent(ident_rhs, is_raw_rhs)) =>
|
|
|
|
ident_lhs == ident_rhs && is_raw_lhs == is_raw_rhs,
|
|
|
|
(NtLifetime(ident_lhs), NtLifetime(ident_rhs)) => ident_lhs == ident_rhs,
|
|
|
|
(NtTT(tt_lhs), NtTT(tt_rhs)) => tt_lhs == tt_rhs,
|
|
|
|
// FIXME: Assume that all "complex" nonterminal are not equal, we can't compare them
|
|
|
|
// correctly based on data from AST. This will prevent them from matching each other
|
|
|
|
// in macros. The comparison will become possible only when each nonterminal has an
|
|
|
|
// attached token stream from which it was parsed.
|
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-01-20 23:45:07 +00:00
|
|
|
impl fmt::Debug for Nonterminal {
|
2019-02-06 17:33:01 +00:00
|
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
2014-02-28 09:23:06 +00:00
|
|
|
match *self {
|
|
|
|
NtItem(..) => f.pad("NtItem(..)"),
|
|
|
|
NtBlock(..) => f.pad("NtBlock(..)"),
|
|
|
|
NtStmt(..) => f.pad("NtStmt(..)"),
|
|
|
|
NtPat(..) => f.pad("NtPat(..)"),
|
|
|
|
NtExpr(..) => f.pad("NtExpr(..)"),
|
|
|
|
NtTy(..) => f.pad("NtTy(..)"),
|
|
|
|
NtIdent(..) => f.pad("NtIdent(..)"),
|
2018-04-09 23:08:47 +00:00
|
|
|
NtLiteral(..) => f.pad("NtLiteral(..)"),
|
2014-03-26 23:14:07 +00:00
|
|
|
NtMeta(..) => f.pad("NtMeta(..)"),
|
2014-02-28 09:23:06 +00:00
|
|
|
NtPath(..) => f.pad("NtPath(..)"),
|
|
|
|
NtTT(..) => f.pad("NtTT(..)"),
|
Interpolate AST nodes in quasiquote.
This changes the `ToTokens` implementations for expressions, statements,
etc. with almost-trivial ones that produce `Interpolated(*Nt(...))`
pseudo-tokens. In this way, quasiquote now works the same way as macros
do: already-parsed AST fragments are used as-is, not reparsed.
The `ToSource` trait is removed. Quasiquote no longer involves
pretty-printing at all, which removes the need for the
`encode_with_hygiene` hack. All associated machinery is removed.
A new `Nonterminal` is added, NtArm, which the parser now interpolates.
This is just for quasiquote, not macros (although it could be in the
future).
`ToTokens` is no longer implemented for `Arg` (although this could be
added again) and `Generics` (which I don't think makes sense).
This breaks any compiler extensions that relied on the ability of
`ToTokens` to turn AST fragments back into inspectable token trees. For
this reason, this closes #16987.
As such, this is a [breaking-change].
Fixes #16472.
Fixes #15962.
Fixes #17397.
Fixes #16617.
2015-03-05 20:06:49 +00:00
|
|
|
NtImplItem(..) => f.pad("NtImplItem(..)"),
|
|
|
|
NtTraitItem(..) => f.pad("NtTraitItem(..)"),
|
2018-03-11 02:16:26 +00:00
|
|
|
NtForeignItem(..) => f.pad("NtForeignItem(..)"),
|
2016-04-24 16:04:01 +00:00
|
|
|
NtVis(..) => f.pad("NtVis(..)"),
|
2017-05-10 00:30:47 +00:00
|
|
|
NtLifetime(..) => f.pad("NtLifetime(..)"),
|
2014-02-28 09:23:06 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2017-03-17 23:41:09 +00:00
|
|
|
|
2019-02-17 23:06:26 +00:00
|
|
|
impl Nonterminal {
|
|
|
|
pub fn to_tokenstream(&self, sess: &ParseSess, span: Span) -> TokenStream {
|
|
|
|
// A `Nonterminal` is often a parsed AST item. At this point we now
|
|
|
|
// need to convert the parsed AST to an actual token stream, e.g.
|
|
|
|
// un-parse it basically.
|
|
|
|
//
|
|
|
|
// Unfortunately there's not really a great way to do that in a
|
|
|
|
// guaranteed lossless fashion right now. The fallback here is to just
|
|
|
|
// stringify the AST node and reparse it, but this loses all span
|
|
|
|
// information.
|
|
|
|
//
|
|
|
|
// As a result, some AST nodes are annotated with the token stream they
|
|
|
|
// came from. Here we attempt to extract these lossless token streams
|
|
|
|
// before we fall back to the stringification.
|
|
|
|
let tokens = match *self {
|
|
|
|
Nonterminal::NtItem(ref item) => {
|
|
|
|
prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
|
|
|
|
}
|
|
|
|
Nonterminal::NtTraitItem(ref item) => {
|
|
|
|
prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
|
|
|
|
}
|
|
|
|
Nonterminal::NtImplItem(ref item) => {
|
|
|
|
prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
|
|
|
|
}
|
|
|
|
Nonterminal::NtIdent(ident, is_raw) => {
|
2019-05-23 23:04:56 +00:00
|
|
|
let token = Ident(ident, is_raw);
|
2019-02-17 23:06:26 +00:00
|
|
|
Some(TokenTree::Token(ident.span, token).into())
|
|
|
|
}
|
|
|
|
Nonterminal::NtLifetime(ident) => {
|
2019-05-23 23:04:56 +00:00
|
|
|
let token = Lifetime(ident);
|
2019-02-17 23:06:26 +00:00
|
|
|
Some(TokenTree::Token(ident.span, token).into())
|
|
|
|
}
|
|
|
|
Nonterminal::NtTT(ref tt) => {
|
|
|
|
Some(tt.clone().into())
|
|
|
|
}
|
|
|
|
_ => None,
|
|
|
|
};
|
|
|
|
|
|
|
|
// FIXME(#43081): Avoid this pretty-print + reparse hack
|
|
|
|
let source = pprust::nonterminal_to_string(self);
|
|
|
|
let filename = FileName::macro_expansion_source_code(&source);
|
2019-03-04 20:59:43 +00:00
|
|
|
let tokens_for_real = parse_stream_from_source_str(filename, source, sess, Some(span));
|
2019-02-17 23:06:26 +00:00
|
|
|
|
|
|
|
// During early phases of the compiler the AST could get modified
|
|
|
|
// directly (e.g., attributes added or removed) and the internal cache
|
|
|
|
// of tokens my not be invalidated or updated. Consequently if the
|
|
|
|
// "lossless" token stream disagrees with our actual stringification
|
|
|
|
// (which has historically been much more battle-tested) then we go
|
|
|
|
// with the lossy stream anyway (losing span information).
|
|
|
|
//
|
|
|
|
// Note that the comparison isn't `==` here to avoid comparing spans,
|
|
|
|
// but it *also* is a "probable" equality which is a pretty weird
|
|
|
|
// definition. We mostly want to catch actual changes to the AST
|
|
|
|
// like a `#[cfg]` being processed or some weird `macro_rules!`
|
|
|
|
// expansion.
|
|
|
|
//
|
|
|
|
// What we *don't* want to catch is the fact that a user-defined
|
|
|
|
// literal like `0xf` is stringified as `15`, causing the cached token
|
|
|
|
// stream to not be literal `==` token-wise (ignoring spans) to the
|
|
|
|
// token stream we got from stringification.
|
|
|
|
//
|
|
|
|
// Instead the "probably equal" check here is "does each token
|
|
|
|
// recursively have the same discriminant?" We basically don't look at
|
|
|
|
// the token values here and assume that such fine grained token stream
|
|
|
|
// modifications, including adding/removing typically non-semantic
|
|
|
|
// tokens such as extra braces and commas, don't happen.
|
|
|
|
if let Some(tokens) = tokens {
|
|
|
|
if tokens.probably_equal_for_proc_macro(&tokens_for_real) {
|
|
|
|
return tokens
|
|
|
|
}
|
|
|
|
info!("cached tokens found, but they're not \"probably equal\", \
|
|
|
|
going with stringified version");
|
|
|
|
}
|
|
|
|
return tokens_for_real
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-04 14:55:23 +00:00
|
|
|
crate fn is_op(tok: &TokenKind) -> bool {
|
2017-03-17 23:41:09 +00:00
|
|
|
match *tok {
|
|
|
|
OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
|
2018-03-08 11:27:23 +00:00
|
|
|
Ident(..) | Lifetime(..) | Interpolated(..) |
|
2017-03-17 23:41:09 +00:00
|
|
|
Whitespace | Comment | Shebang(..) | Eof => false,
|
|
|
|
_ => true,
|
|
|
|
}
|
|
|
|
}
|
2017-03-29 01:55:01 +00:00
|
|
|
|
2017-09-15 15:28:34 +00:00
|
|
|
fn prepend_attrs(sess: &ParseSess,
|
|
|
|
attrs: &[ast::Attribute],
|
|
|
|
tokens: Option<&tokenstream::TokenStream>,
|
|
|
|
span: syntax_pos::Span)
|
|
|
|
-> Option<tokenstream::TokenStream>
|
|
|
|
{
|
2017-12-09 01:32:04 +00:00
|
|
|
let tokens = tokens?;
|
2017-09-15 15:28:34 +00:00
|
|
|
if attrs.len() == 0 {
|
|
|
|
return Some(tokens.clone())
|
|
|
|
}
|
|
|
|
let mut builder = tokenstream::TokenStreamBuilder::new();
|
|
|
|
for attr in attrs {
|
|
|
|
assert_eq!(attr.style, ast::AttrStyle::Outer,
|
|
|
|
"inner attributes should prevent cached tokens from existing");
|
2018-07-19 14:06:31 +00:00
|
|
|
|
2018-10-30 14:11:24 +00:00
|
|
|
let source = pprust::attr_to_string(attr);
|
|
|
|
let macro_filename = FileName::macro_expansion_source_code(&source);
|
2018-07-19 14:06:31 +00:00
|
|
|
if attr.is_sugared_doc {
|
2019-03-04 20:59:43 +00:00
|
|
|
let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span));
|
2018-07-19 14:06:31 +00:00
|
|
|
builder.push(stream);
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// synthesize # [ $path $tokens ] manually here
|
|
|
|
let mut brackets = tokenstream::TokenStreamBuilder::new();
|
|
|
|
|
|
|
|
// For simple paths, push the identifier directly
|
|
|
|
if attr.path.segments.len() == 1 && attr.path.segments[0].args.is_none() {
|
|
|
|
let ident = attr.path.segments[0].ident;
|
|
|
|
let token = Ident(ident, ident.as_str().starts_with("r#"));
|
|
|
|
brackets.push(tokenstream::TokenTree::Token(ident.span, token));
|
|
|
|
|
|
|
|
// ... and for more complicated paths, fall back to a reparse hack that
|
|
|
|
// should eventually be removed.
|
|
|
|
} else {
|
2019-03-04 20:59:43 +00:00
|
|
|
let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span));
|
2018-07-19 14:06:31 +00:00
|
|
|
brackets.push(stream);
|
|
|
|
}
|
|
|
|
|
|
|
|
brackets.push(attr.tokens.clone());
|
|
|
|
|
|
|
|
// The span we list here for `#` and for `[ ... ]` are both wrong in
|
|
|
|
// that it encompasses more than each token, but it hopefully is "good
|
|
|
|
// enough" for now at least.
|
|
|
|
builder.push(tokenstream::TokenTree::Token(attr.span, Pound));
|
2018-09-09 01:07:02 +00:00
|
|
|
let delim_span = DelimSpan::from_single(attr.span);
|
2018-11-29 23:02:04 +00:00
|
|
|
builder.push(tokenstream::TokenTree::Delimited(
|
|
|
|
delim_span, DelimToken::Bracket, brackets.build().into()));
|
2017-09-15 15:28:34 +00:00
|
|
|
}
|
|
|
|
builder.push(tokens.clone());
|
|
|
|
Some(builder.build())
|
|
|
|
}
|