2013-05-30 10:16:33 +00:00
|
|
|
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
|
2012-12-04 00:48:01 +00:00
|
|
|
// file at the top-level directory of this distribution and at
|
|
|
|
// http://rust-lang.org/COPYRIGHT.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
|
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
|
|
// option. This file may not be copied, modified, or distributed
|
|
|
|
// except according to those terms.
|
|
|
|
|
2014-11-06 08:05:53 +00:00
|
|
|
pub use self::BinOpToken::*;
|
|
|
|
pub use self::Nonterminal::*;
|
|
|
|
pub use self::DelimToken::*;
|
2014-11-18 23:17:40 +00:00
|
|
|
pub use self::Lit::*;
|
2014-11-06 08:05:53 +00:00
|
|
|
pub use self::Token::*;
|
|
|
|
|
2016-09-14 05:57:16 +00:00
|
|
|
use ast::{self};
|
2017-09-15 15:28:34 +00:00
|
|
|
use parse::ParseSess;
|
|
|
|
use print::pprust;
|
2014-09-13 16:06:01 +00:00
|
|
|
use ptr::P;
|
2017-03-29 01:55:01 +00:00
|
|
|
use serialize::{Decodable, Decoder, Encodable, Encoder};
|
2016-11-16 08:21:52 +00:00
|
|
|
use symbol::keywords;
|
2017-09-15 15:28:34 +00:00
|
|
|
use syntax::parse::parse_stream_from_source_str;
|
2017-12-14 07:09:19 +00:00
|
|
|
use syntax_pos::{self, Span, FileName};
|
2017-03-29 01:55:01 +00:00
|
|
|
use tokenstream::{TokenStream, TokenTree};
|
2017-09-15 15:28:34 +00:00
|
|
|
use tokenstream;
|
2012-12-23 22:41:37 +00:00
|
|
|
|
2017-06-05 01:41:33 +00:00
|
|
|
use std::cell::Cell;
|
|
|
|
use std::{cmp, fmt};
|
2014-03-27 17:28:38 +00:00
|
|
|
use std::rc::Rc;
|
2010-08-18 18:35:12 +00:00
|
|
|
|
2015-01-28 13:34:18 +00:00
|
|
|
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
|
2014-10-27 08:22:52 +00:00
|
|
|
pub enum BinOpToken {
|
|
|
|
Plus,
|
|
|
|
Minus,
|
|
|
|
Star,
|
|
|
|
Slash,
|
|
|
|
Percent,
|
|
|
|
Caret,
|
|
|
|
And,
|
|
|
|
Or,
|
|
|
|
Shl,
|
|
|
|
Shr,
|
2010-09-09 22:59:29 +00:00
|
|
|
}
|
|
|
|
|
2015-01-07 01:53:18 +00:00
|
|
|
/// A delimiter token
|
2015-01-28 13:34:18 +00:00
|
|
|
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
|
2014-10-29 10:37:54 +00:00
|
|
|
pub enum DelimToken {
|
|
|
|
/// A round parenthesis: `(` or `)`
|
|
|
|
Paren,
|
|
|
|
/// A square bracket: `[` or `]`
|
|
|
|
Bracket,
|
|
|
|
/// A curly brace: `{` or `}`
|
|
|
|
Brace,
|
2016-07-19 20:00:45 +00:00
|
|
|
/// An empty delimiter
|
|
|
|
NoDelim,
|
2014-10-29 10:37:54 +00:00
|
|
|
}
|
|
|
|
|
2017-01-23 04:58:15 +00:00
|
|
|
impl DelimToken {
|
2017-01-27 11:00:10 +00:00
|
|
|
pub fn len(self) -> usize {
|
|
|
|
if self == NoDelim { 0 } else { 1 }
|
2017-01-23 04:58:15 +00:00
|
|
|
}
|
2017-05-12 18:05:39 +00:00
|
|
|
|
|
|
|
pub fn is_empty(self) -> bool {
|
|
|
|
self == NoDelim
|
|
|
|
}
|
2017-01-23 04:58:15 +00:00
|
|
|
}
|
|
|
|
|
2015-01-28 13:34:18 +00:00
|
|
|
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
|
2014-11-18 23:17:40 +00:00
|
|
|
pub enum Lit {
|
|
|
|
Byte(ast::Name),
|
|
|
|
Char(ast::Name),
|
|
|
|
Integer(ast::Name),
|
|
|
|
Float(ast::Name),
|
|
|
|
Str_(ast::Name),
|
2015-01-17 23:33:05 +00:00
|
|
|
StrRaw(ast::Name, usize), /* raw str delimited by n hash symbols */
|
2015-09-03 07:54:53 +00:00
|
|
|
ByteStr(ast::Name),
|
|
|
|
ByteStrRaw(ast::Name, usize), /* raw byte str delimited by n hash symbols */
|
2014-11-18 23:17:40 +00:00
|
|
|
}
|
|
|
|
|
2014-11-19 04:48:38 +00:00
|
|
|
impl Lit {
|
|
|
|
pub fn short_name(&self) -> &'static str {
|
|
|
|
match *self {
|
|
|
|
Byte(_) => "byte",
|
|
|
|
Char(_) => "char",
|
|
|
|
Integer(_) => "integer",
|
|
|
|
Float(_) => "float",
|
2015-09-03 07:54:53 +00:00
|
|
|
Str_(_) | StrRaw(..) => "string",
|
|
|
|
ByteStr(_) | ByteStrRaw(..) => "byte string"
|
2014-11-19 04:48:38 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-01-27 05:51:20 +00:00
|
|
|
fn ident_can_begin_expr(ident: ast::Ident) -> bool {
|
|
|
|
let ident_token: Token = Ident(ident);
|
|
|
|
|
2017-06-29 10:16:35 +00:00
|
|
|
!ident_token.is_reserved_ident() ||
|
2017-01-27 05:51:20 +00:00
|
|
|
ident_token.is_path_segment_keyword() ||
|
|
|
|
[
|
2017-03-03 22:41:07 +00:00
|
|
|
keywords::Do.name(),
|
2017-01-27 05:51:20 +00:00
|
|
|
keywords::Box.name(),
|
|
|
|
keywords::Break.name(),
|
|
|
|
keywords::Continue.name(),
|
|
|
|
keywords::False.name(),
|
|
|
|
keywords::For.name(),
|
|
|
|
keywords::If.name(),
|
|
|
|
keywords::Loop.name(),
|
|
|
|
keywords::Match.name(),
|
|
|
|
keywords::Move.name(),
|
|
|
|
keywords::Return.name(),
|
|
|
|
keywords::True.name(),
|
|
|
|
keywords::Unsafe.name(),
|
|
|
|
keywords::While.name(),
|
2016-12-26 13:34:03 +00:00
|
|
|
keywords::Yield.name(),
|
2017-01-27 05:51:20 +00:00
|
|
|
].contains(&ident.name)
|
|
|
|
}
|
|
|
|
|
2017-03-16 21:47:32 +00:00
|
|
|
fn ident_can_begin_type(ident: ast::Ident) -> bool {
|
|
|
|
let ident_token: Token = Ident(ident);
|
|
|
|
|
2017-06-29 10:16:35 +00:00
|
|
|
!ident_token.is_reserved_ident() ||
|
2017-03-16 21:47:32 +00:00
|
|
|
ident_token.is_path_segment_keyword() ||
|
|
|
|
[
|
|
|
|
keywords::For.name(),
|
|
|
|
keywords::Impl.name(),
|
|
|
|
keywords::Fn.name(),
|
|
|
|
keywords::Unsafe.name(),
|
|
|
|
keywords::Extern.name(),
|
|
|
|
keywords::Typeof.name(),
|
|
|
|
].contains(&ident.name)
|
|
|
|
}
|
|
|
|
|
2015-01-28 13:34:18 +00:00
|
|
|
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug)]
|
2013-01-29 21:54:06 +00:00
|
|
|
pub enum Token {
|
2010-09-09 22:59:29 +00:00
|
|
|
/* Expression-operator symbols. */
|
2014-10-27 08:22:52 +00:00
|
|
|
Eq,
|
|
|
|
Lt,
|
|
|
|
Le,
|
|
|
|
EqEq,
|
|
|
|
Ne,
|
|
|
|
Ge,
|
|
|
|
Gt,
|
|
|
|
AndAnd,
|
|
|
|
OrOr,
|
|
|
|
Not,
|
|
|
|
Tilde,
|
|
|
|
BinOp(BinOpToken),
|
|
|
|
BinOpEq(BinOpToken),
|
2010-09-09 22:59:29 +00:00
|
|
|
|
|
|
|
/* Structural symbols */
|
2014-10-27 08:22:52 +00:00
|
|
|
At,
|
|
|
|
Dot,
|
|
|
|
DotDot,
|
|
|
|
DotDotDot,
|
2017-09-19 05:40:04 +00:00
|
|
|
DotDotEq,
|
|
|
|
DotEq, // HACK(durka42) never produced by the parser, only used for libproc_macro
|
2014-10-27 08:22:52 +00:00
|
|
|
Comma,
|
|
|
|
Semi,
|
|
|
|
Colon,
|
|
|
|
ModSep,
|
|
|
|
RArrow,
|
|
|
|
LArrow,
|
|
|
|
FatArrow,
|
|
|
|
Pound,
|
|
|
|
Dollar,
|
|
|
|
Question,
|
2015-01-07 01:53:18 +00:00
|
|
|
/// An opening delimiter, eg. `{`
|
2014-10-29 10:37:54 +00:00
|
|
|
OpenDelim(DelimToken),
|
2015-01-07 01:53:18 +00:00
|
|
|
/// A closing delimiter, eg. `}`
|
2014-10-29 10:37:54 +00:00
|
|
|
CloseDelim(DelimToken),
|
2012-01-25 23:38:09 +00:00
|
|
|
|
2010-09-09 22:59:29 +00:00
|
|
|
/* Literals */
|
2014-11-19 04:48:38 +00:00
|
|
|
Literal(Lit, Option<ast::Name>),
|
2010-09-09 22:59:29 +00:00
|
|
|
|
|
|
|
/* Name components */
|
2016-04-16 01:12:02 +00:00
|
|
|
Ident(ast::Ident),
|
2014-10-27 08:22:52 +00:00
|
|
|
Underscore,
|
|
|
|
Lifetime(ast::Ident),
|
2012-06-12 17:50:17 +00:00
|
|
|
|
2017-06-05 01:41:33 +00:00
|
|
|
// The `LazyTokenStream` is a pure function of the `Nonterminal`,
|
|
|
|
// and so the `LazyTokenStream` can be ignored by Eq, Hash, etc.
|
2017-03-29 01:55:01 +00:00
|
|
|
Interpolated(Rc<(Nonterminal, LazyTokenStream)>),
|
2014-10-06 22:00:56 +00:00
|
|
|
// Can be expanded into several tokens.
|
|
|
|
/// Doc comment
|
2014-10-27 08:22:52 +00:00
|
|
|
DocComment(ast::Name),
|
2014-07-05 05:30:39 +00:00
|
|
|
|
|
|
|
// Junk. These carry no data because we don't really care about the data
|
|
|
|
// they *would* carry, and don't really want to allocate a new ident for
|
|
|
|
// them. Instead, users could extract that from the associated span.
|
|
|
|
|
|
|
|
/// Whitespace
|
2014-10-27 08:22:52 +00:00
|
|
|
Whitespace,
|
2014-07-05 05:30:39 +00:00
|
|
|
/// Comment
|
2014-10-27 08:22:52 +00:00
|
|
|
Comment,
|
|
|
|
Shebang(ast::Name),
|
2014-07-05 05:30:39 +00:00
|
|
|
|
2014-10-27 08:22:52 +00:00
|
|
|
Eof,
|
2010-09-09 22:59:29 +00:00
|
|
|
}
|
2010-08-18 18:35:12 +00:00
|
|
|
|
2014-10-27 12:33:30 +00:00
|
|
|
impl Token {
|
2017-03-29 01:55:01 +00:00
|
|
|
pub fn interpolated(nt: Nonterminal) -> Token {
|
|
|
|
Token::Interpolated(Rc::new((nt, LazyTokenStream::new())))
|
|
|
|
}
|
|
|
|
|
2015-04-18 01:18:46 +00:00
|
|
|
/// Returns `true` if the token starts with '>'.
|
|
|
|
pub fn is_like_gt(&self) -> bool {
|
|
|
|
match *self {
|
|
|
|
BinOp(Shr) | BinOpEq(Shr) | Gt | Ge => true,
|
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-10-27 12:33:30 +00:00
|
|
|
/// Returns `true` if the token can appear at the start of an expression.
|
|
|
|
pub fn can_begin_expr(&self) -> bool {
|
|
|
|
match *self {
|
2017-03-16 21:47:32 +00:00
|
|
|
Ident(ident) => ident_can_begin_expr(ident), // value name or keyword
|
2017-09-19 05:40:04 +00:00
|
|
|
OpenDelim(..) | // tuple, array or block
|
|
|
|
Literal(..) | // literal
|
|
|
|
Not | // operator not
|
|
|
|
BinOp(Minus) | // unary minus
|
|
|
|
BinOp(Star) | // dereference
|
|
|
|
BinOp(Or) | OrOr | // closure
|
|
|
|
BinOp(And) | // reference
|
|
|
|
AndAnd | // double reference
|
2017-11-04 23:46:41 +00:00
|
|
|
// DotDotDot is no longer supported, but we need some way to display the error
|
2017-09-19 05:40:04 +00:00
|
|
|
DotDot | DotDotDot | DotDotEq | // range notation
|
|
|
|
Lt | BinOp(Shl) | // associated path
|
|
|
|
ModSep | // global path
|
|
|
|
Pound => true, // expression attributes
|
2017-03-29 01:55:01 +00:00
|
|
|
Interpolated(ref nt) => match nt.0 {
|
2017-03-29 07:17:18 +00:00
|
|
|
NtIdent(..) | NtExpr(..) | NtBlock(..) | NtPath(..) => true,
|
2016-11-02 03:03:55 +00:00
|
|
|
_ => false,
|
|
|
|
},
|
|
|
|
_ => false,
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-01-17 18:18:29 +00:00
|
|
|
/// Returns `true` if the token can appear at the start of a type.
|
|
|
|
pub fn can_begin_type(&self) -> bool {
|
|
|
|
match *self {
|
2017-03-16 21:47:32 +00:00
|
|
|
Ident(ident) => ident_can_begin_type(ident), // type name or keyword
|
2017-05-12 18:05:39 +00:00
|
|
|
OpenDelim(Paren) | // tuple
|
|
|
|
OpenDelim(Bracket) | // array
|
|
|
|
Underscore | // placeholder
|
|
|
|
Not | // never
|
|
|
|
BinOp(Star) | // raw pointer
|
|
|
|
BinOp(And) | // reference
|
|
|
|
AndAnd | // double reference
|
|
|
|
Question | // maybe bound in trait object
|
|
|
|
Lifetime(..) | // lifetime bound in trait object
|
|
|
|
Lt | BinOp(Shl) | // associated path
|
2017-01-17 18:18:29 +00:00
|
|
|
ModSep => true, // global path
|
2017-03-29 01:55:01 +00:00
|
|
|
Interpolated(ref nt) => match nt.0 {
|
2017-12-20 22:22:37 +00:00
|
|
|
NtIdent(..) | NtTy(..) | NtPath(..) | NtLifetime(..) => true,
|
2017-01-17 18:18:29 +00:00
|
|
|
_ => false,
|
|
|
|
},
|
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-10-10 14:33:19 +00:00
|
|
|
/// Returns `true` if the token can appear at the start of a generic bound.
|
|
|
|
pub fn can_begin_bound(&self) -> bool {
|
|
|
|
self.is_path_start() || self.is_lifetime() || self.is_keyword(keywords::For) ||
|
|
|
|
self == &Question || self == &OpenDelim(Paren)
|
|
|
|
}
|
|
|
|
|
2014-10-27 12:33:30 +00:00
|
|
|
/// Returns `true` if the token is any literal
|
|
|
|
pub fn is_lit(&self) -> bool {
|
|
|
|
match *self {
|
2016-08-26 16:23:42 +00:00
|
|
|
Literal(..) => true,
|
|
|
|
_ => false,
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-29 07:17:18 +00:00
|
|
|
pub fn ident(&self) -> Option<ast::Ident> {
|
2014-10-27 12:33:30 +00:00
|
|
|
match *self {
|
2017-03-29 07:17:18 +00:00
|
|
|
Ident(ident) => Some(ident),
|
2017-03-29 01:55:01 +00:00
|
|
|
Interpolated(ref nt) => match nt.0 {
|
2017-03-29 07:17:18 +00:00
|
|
|
NtIdent(ident) => Some(ident.node),
|
|
|
|
_ => None,
|
|
|
|
},
|
|
|
|
_ => None,
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-29 07:17:18 +00:00
|
|
|
/// Returns `true` if the token is an identifier.
|
|
|
|
pub fn is_ident(&self) -> bool {
|
|
|
|
self.ident().is_some()
|
|
|
|
}
|
|
|
|
|
2016-05-28 02:05:22 +00:00
|
|
|
/// Returns `true` if the token is a documentation comment.
|
|
|
|
pub fn is_doc_comment(&self) -> bool {
|
|
|
|
match *self {
|
|
|
|
DocComment(..) => true,
|
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-01-24 21:46:39 +00:00
|
|
|
/// Returns `true` if the token is interpolated.
|
|
|
|
pub fn is_interpolated(&self) -> bool {
|
|
|
|
match *self {
|
|
|
|
Interpolated(..) => true,
|
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-10-27 12:33:30 +00:00
|
|
|
/// Returns `true` if the token is an interpolated path.
|
|
|
|
pub fn is_path(&self) -> bool {
|
2016-11-02 03:03:55 +00:00
|
|
|
if let Interpolated(ref nt) = *self {
|
2017-03-29 01:55:01 +00:00
|
|
|
if let NtPath(..) = nt.0 {
|
2016-11-02 03:03:55 +00:00
|
|
|
return true;
|
|
|
|
}
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
2016-11-02 03:03:55 +00:00
|
|
|
false
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns `true` if the token is a lifetime.
|
|
|
|
pub fn is_lifetime(&self) -> bool {
|
|
|
|
match *self {
|
|
|
|
Lifetime(..) => true,
|
2017-12-20 22:22:37 +00:00
|
|
|
Interpolated(ref nt) => match nt.0 {
|
|
|
|
NtLifetime(..) => true,
|
|
|
|
_ => false,
|
|
|
|
},
|
|
|
|
_ => false,
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns `true` if the token is either the `mut` or `const` keyword.
|
|
|
|
pub fn is_mutability(&self) -> bool {
|
|
|
|
self.is_keyword(keywords::Mut) ||
|
|
|
|
self.is_keyword(keywords::Const)
|
|
|
|
}
|
|
|
|
|
2016-10-19 20:33:41 +00:00
|
|
|
pub fn is_qpath_start(&self) -> bool {
|
|
|
|
self == &Lt || self == &BinOp(Shl)
|
|
|
|
}
|
|
|
|
|
2016-04-20 23:03:29 +00:00
|
|
|
pub fn is_path_start(&self) -> bool {
|
2016-10-19 20:33:41 +00:00
|
|
|
self == &ModSep || self.is_qpath_start() || self.is_path() ||
|
2017-06-29 10:16:35 +00:00
|
|
|
self.is_path_segment_keyword() || self.is_ident() && !self.is_reserved_ident()
|
2016-04-20 23:03:29 +00:00
|
|
|
}
|
|
|
|
|
2014-10-27 12:33:30 +00:00
|
|
|
/// Returns `true` if the token is a given keyword, `kw`.
|
|
|
|
pub fn is_keyword(&self, kw: keywords::Keyword) -> bool {
|
2017-03-29 07:17:18 +00:00
|
|
|
self.ident().map(|ident| ident.name == kw.name()).unwrap_or(false)
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
|
|
|
|
2016-04-16 01:10:59 +00:00
|
|
|
pub fn is_path_segment_keyword(&self) -> bool {
|
2017-03-29 07:17:18 +00:00
|
|
|
match self.ident() {
|
|
|
|
Some(id) => id.name == keywords::Super.name() ||
|
|
|
|
id.name == keywords::SelfValue.name() ||
|
2017-06-25 17:34:49 +00:00
|
|
|
id.name == keywords::SelfType.name() ||
|
2017-11-04 20:56:45 +00:00
|
|
|
id.name == keywords::Crate.name() ||
|
2017-06-25 17:34:49 +00:00
|
|
|
id.name == keywords::DollarCrate.name(),
|
2017-03-29 07:17:18 +00:00
|
|
|
None => false,
|
2014-09-16 01:27:28 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-06-29 10:16:35 +00:00
|
|
|
// Returns true for reserved identifiers used internally for elided lifetimes,
|
|
|
|
// unnamed method parameters, crate root module, error recovery etc.
|
|
|
|
pub fn is_special_ident(&self) -> bool {
|
|
|
|
match self.ident() {
|
|
|
|
Some(id) => id.name <= keywords::DollarCrate.name(),
|
|
|
|
_ => false,
|
|
|
|
}
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
|
|
|
|
2017-06-29 10:16:35 +00:00
|
|
|
/// Returns `true` if the token is a keyword used in the language.
|
|
|
|
pub fn is_used_keyword(&self) -> bool {
|
2017-03-29 07:17:18 +00:00
|
|
|
match self.ident() {
|
|
|
|
Some(id) => id.name >= keywords::As.name() && id.name <= keywords::While.name(),
|
2014-10-27 12:33:30 +00:00
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-04-16 15:05:06 +00:00
|
|
|
/// Returns `true` if the token is a keyword reserved for possible future use.
|
2017-06-29 10:16:35 +00:00
|
|
|
pub fn is_unused_keyword(&self) -> bool {
|
2017-03-29 07:17:18 +00:00
|
|
|
match self.ident() {
|
|
|
|
Some(id) => id.name >= keywords::Abstract.name() && id.name <= keywords::Yield.name(),
|
2014-10-27 12:33:30 +00:00
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
2017-03-17 23:41:09 +00:00
|
|
|
|
|
|
|
pub fn glue(self, joint: Token) -> Option<Token> {
|
|
|
|
Some(match self {
|
|
|
|
Eq => match joint {
|
|
|
|
Eq => EqEq,
|
|
|
|
Gt => FatArrow,
|
|
|
|
_ => return None,
|
|
|
|
},
|
|
|
|
Lt => match joint {
|
|
|
|
Eq => Le,
|
|
|
|
Lt => BinOp(Shl),
|
|
|
|
Le => BinOpEq(Shl),
|
|
|
|
BinOp(Minus) => LArrow,
|
|
|
|
_ => return None,
|
|
|
|
},
|
|
|
|
Gt => match joint {
|
|
|
|
Eq => Ge,
|
|
|
|
Gt => BinOp(Shr),
|
|
|
|
Ge => BinOpEq(Shr),
|
|
|
|
_ => return None,
|
|
|
|
},
|
|
|
|
Not => match joint {
|
|
|
|
Eq => Ne,
|
|
|
|
_ => return None,
|
|
|
|
},
|
|
|
|
BinOp(op) => match joint {
|
|
|
|
Eq => BinOpEq(op),
|
|
|
|
BinOp(And) if op == And => AndAnd,
|
|
|
|
BinOp(Or) if op == Or => OrOr,
|
|
|
|
Gt if op == Minus => RArrow,
|
|
|
|
_ => return None,
|
|
|
|
},
|
|
|
|
Dot => match joint {
|
|
|
|
Dot => DotDot,
|
|
|
|
DotDot => DotDotDot,
|
2017-09-19 05:40:04 +00:00
|
|
|
DotEq => DotDotEq,
|
2017-03-17 23:41:09 +00:00
|
|
|
_ => return None,
|
|
|
|
},
|
|
|
|
DotDot => match joint {
|
|
|
|
Dot => DotDotDot,
|
2017-09-19 05:40:04 +00:00
|
|
|
Eq => DotDotEq,
|
2017-03-17 23:41:09 +00:00
|
|
|
_ => return None,
|
|
|
|
},
|
|
|
|
Colon => match joint {
|
|
|
|
Colon => ModSep,
|
|
|
|
_ => return None,
|
|
|
|
},
|
|
|
|
|
2017-09-19 05:40:04 +00:00
|
|
|
Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | BinOpEq(..) | At | DotDotDot | DotEq |
|
|
|
|
DotDotEq | Comma | Semi | ModSep | RArrow | LArrow | FatArrow | Pound | Dollar |
|
|
|
|
Question | OpenDelim(..) | CloseDelim(..) | Underscore => return None,
|
2017-03-17 23:41:09 +00:00
|
|
|
|
|
|
|
Literal(..) | Ident(..) | Lifetime(..) | Interpolated(..) | DocComment(..) |
|
|
|
|
Whitespace | Comment | Shebang(..) | Eof => return None,
|
|
|
|
})
|
|
|
|
}
|
2017-07-05 15:42:13 +00:00
|
|
|
|
2017-10-24 13:04:01 +00:00
|
|
|
/// Returns tokens that are likely to be typed accidentally instead of the current token.
|
|
|
|
/// Enables better error recovery when the wrong token is found.
|
|
|
|
pub fn similar_tokens(&self) -> Option<Vec<Token>> {
|
|
|
|
match *self {
|
|
|
|
Comma => Some(vec![Dot, Lt]),
|
|
|
|
Semi => Some(vec![Colon]),
|
|
|
|
_ => None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-06-29 10:16:35 +00:00
|
|
|
/// Returns `true` if the token is either a special identifier or a keyword.
|
|
|
|
pub fn is_reserved_ident(&self) -> bool {
|
|
|
|
self.is_special_ident() || self.is_used_keyword() || self.is_unused_keyword()
|
|
|
|
}
|
2017-09-15 15:28:34 +00:00
|
|
|
|
|
|
|
pub fn interpolated_to_tokenstream(&self, sess: &ParseSess, span: Span)
|
|
|
|
-> TokenStream
|
|
|
|
{
|
|
|
|
let nt = match *self {
|
|
|
|
Token::Interpolated(ref nt) => nt,
|
|
|
|
_ => panic!("only works on interpolated tokens"),
|
|
|
|
};
|
|
|
|
|
|
|
|
// An `Interpolated` token means that we have a `Nonterminal`
|
|
|
|
// which is often a parsed AST item. At this point we now need
|
|
|
|
// to convert the parsed AST to an actual token stream, e.g.
|
|
|
|
// un-parse it basically.
|
|
|
|
//
|
|
|
|
// Unfortunately there's not really a great way to do that in a
|
|
|
|
// guaranteed lossless fashion right now. The fallback here is
|
|
|
|
// to just stringify the AST node and reparse it, but this loses
|
|
|
|
// all span information.
|
|
|
|
//
|
|
|
|
// As a result, some AST nodes are annotated with the token
|
|
|
|
// stream they came from. Attempt to extract these lossless
|
|
|
|
// token streams before we fall back to the stringification.
|
|
|
|
let mut tokens = None;
|
|
|
|
|
|
|
|
match nt.0 {
|
|
|
|
Nonterminal::NtItem(ref item) => {
|
|
|
|
tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span);
|
|
|
|
}
|
|
|
|
Nonterminal::NtTraitItem(ref item) => {
|
|
|
|
tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span);
|
|
|
|
}
|
|
|
|
Nonterminal::NtImplItem(ref item) => {
|
|
|
|
tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span);
|
|
|
|
}
|
|
|
|
Nonterminal::NtIdent(ident) => {
|
|
|
|
let token = Token::Ident(ident.node);
|
|
|
|
tokens = Some(TokenTree::Token(ident.span, token).into());
|
|
|
|
}
|
2017-12-20 22:22:37 +00:00
|
|
|
Nonterminal::NtLifetime(lifetime) => {
|
|
|
|
let token = Token::Lifetime(lifetime.ident);
|
|
|
|
tokens = Some(TokenTree::Token(lifetime.span, token).into());
|
|
|
|
}
|
2017-09-15 15:28:34 +00:00
|
|
|
Nonterminal::NtTT(ref tt) => {
|
|
|
|
tokens = Some(tt.clone().into());
|
|
|
|
}
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
|
|
|
|
tokens.unwrap_or_else(|| {
|
|
|
|
nt.1.force(|| {
|
|
|
|
// FIXME(jseyfried): Avoid this pretty-print + reparse hack
|
|
|
|
let source = pprust::token_to_string(self);
|
2017-12-14 07:09:19 +00:00
|
|
|
parse_stream_from_source_str(FileName::MacroExpansion, source, sess, Some(span))
|
2017-09-15 15:28:34 +00:00
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
2014-10-27 12:33:30 +00:00
|
|
|
}
|
|
|
|
|
2015-01-04 03:54:18 +00:00
|
|
|
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash)]
|
2012-07-04 21:53:12 +00:00
|
|
|
/// For interpolation during macro expansion.
|
2014-01-09 13:05:33 +00:00
|
|
|
pub enum Nonterminal {
|
2014-10-29 21:44:41 +00:00
|
|
|
NtItem(P<ast::Item>),
|
2014-01-09 13:05:33 +00:00
|
|
|
NtBlock(P<ast::Block>),
|
2016-11-02 03:03:55 +00:00
|
|
|
NtStmt(ast::Stmt),
|
2014-10-29 21:44:41 +00:00
|
|
|
NtPat(P<ast::Pat>),
|
|
|
|
NtExpr(P<ast::Expr>),
|
|
|
|
NtTy(P<ast::Ty>),
|
2016-11-02 03:03:55 +00:00
|
|
|
NtIdent(ast::SpannedIdent),
|
2014-06-09 20:19:38 +00:00
|
|
|
/// Stuff inside brackets for attributes
|
2016-11-15 10:17:24 +00:00
|
|
|
NtMeta(ast::MetaItem),
|
2016-11-02 03:03:55 +00:00
|
|
|
NtPath(ast::Path),
|
2017-04-02 18:34:38 +00:00
|
|
|
NtVis(ast::Visibility),
|
2017-03-03 09:23:59 +00:00
|
|
|
NtTT(TokenTree),
|
2015-11-11 20:19:01 +00:00
|
|
|
// These are not exposed to macros, but are used by quasiquote.
|
Interpolate AST nodes in quasiquote.
This changes the `ToTokens` implementations for expressions, statements,
etc. with almost-trivial ones that produce `Interpolated(*Nt(...))`
pseudo-tokens. In this way, quasiquote now works the same way as macros
do: already-parsed AST fragments are used as-is, not reparsed.
The `ToSource` trait is removed. Quasiquote no longer involves
pretty-printing at all, which removes the need for the
`encode_with_hygiene` hack. All associated machinery is removed.
A new `Nonterminal` is added, NtArm, which the parser now interpolates.
This is just for quasiquote, not macros (although it could be in the
future).
`ToTokens` is no longer implemented for `Arg` (although this could be
added again) and `Generics` (which I don't think makes sense).
This breaks any compiler extensions that relied on the ability of
`ToTokens` to turn AST fragments back into inspectable token trees. For
this reason, this closes #16987.
As such, this is a [breaking-change].
Fixes #16472.
Fixes #15962.
Fixes #17397.
Fixes #16617.
2015-03-05 20:06:49 +00:00
|
|
|
NtArm(ast::Arm),
|
2016-11-02 03:03:55 +00:00
|
|
|
NtImplItem(ast::ImplItem),
|
|
|
|
NtTraitItem(ast::TraitItem),
|
2015-05-02 17:55:41 +00:00
|
|
|
NtGenerics(ast::Generics),
|
|
|
|
NtWhereClause(ast::WhereClause),
|
2015-11-11 20:19:01 +00:00
|
|
|
NtArg(ast::Arg),
|
2017-05-10 00:30:47 +00:00
|
|
|
NtLifetime(ast::Lifetime),
|
2012-06-12 17:50:17 +00:00
|
|
|
}
|
|
|
|
|
2015-01-20 23:45:07 +00:00
|
|
|
impl fmt::Debug for Nonterminal {
|
2014-02-28 09:23:06 +00:00
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
match *self {
|
|
|
|
NtItem(..) => f.pad("NtItem(..)"),
|
|
|
|
NtBlock(..) => f.pad("NtBlock(..)"),
|
|
|
|
NtStmt(..) => f.pad("NtStmt(..)"),
|
|
|
|
NtPat(..) => f.pad("NtPat(..)"),
|
|
|
|
NtExpr(..) => f.pad("NtExpr(..)"),
|
|
|
|
NtTy(..) => f.pad("NtTy(..)"),
|
|
|
|
NtIdent(..) => f.pad("NtIdent(..)"),
|
2014-03-26 23:14:07 +00:00
|
|
|
NtMeta(..) => f.pad("NtMeta(..)"),
|
2014-02-28 09:23:06 +00:00
|
|
|
NtPath(..) => f.pad("NtPath(..)"),
|
|
|
|
NtTT(..) => f.pad("NtTT(..)"),
|
Interpolate AST nodes in quasiquote.
This changes the `ToTokens` implementations for expressions, statements,
etc. with almost-trivial ones that produce `Interpolated(*Nt(...))`
pseudo-tokens. In this way, quasiquote now works the same way as macros
do: already-parsed AST fragments are used as-is, not reparsed.
The `ToSource` trait is removed. Quasiquote no longer involves
pretty-printing at all, which removes the need for the
`encode_with_hygiene` hack. All associated machinery is removed.
A new `Nonterminal` is added, NtArm, which the parser now interpolates.
This is just for quasiquote, not macros (although it could be in the
future).
`ToTokens` is no longer implemented for `Arg` (although this could be
added again) and `Generics` (which I don't think makes sense).
This breaks any compiler extensions that relied on the ability of
`ToTokens` to turn AST fragments back into inspectable token trees. For
this reason, this closes #16987.
As such, this is a [breaking-change].
Fixes #16472.
Fixes #15962.
Fixes #17397.
Fixes #16617.
2015-03-05 20:06:49 +00:00
|
|
|
NtArm(..) => f.pad("NtArm(..)"),
|
|
|
|
NtImplItem(..) => f.pad("NtImplItem(..)"),
|
|
|
|
NtTraitItem(..) => f.pad("NtTraitItem(..)"),
|
2015-05-02 17:55:41 +00:00
|
|
|
NtGenerics(..) => f.pad("NtGenerics(..)"),
|
|
|
|
NtWhereClause(..) => f.pad("NtWhereClause(..)"),
|
2015-11-11 20:19:01 +00:00
|
|
|
NtArg(..) => f.pad("NtArg(..)"),
|
2016-04-24 16:04:01 +00:00
|
|
|
NtVis(..) => f.pad("NtVis(..)"),
|
2017-05-10 00:30:47 +00:00
|
|
|
NtLifetime(..) => f.pad("NtLifetime(..)"),
|
2014-02-28 09:23:06 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2017-03-17 23:41:09 +00:00
|
|
|
|
|
|
|
pub fn is_op(tok: &Token) -> bool {
|
|
|
|
match *tok {
|
|
|
|
OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
|
|
|
|
Ident(..) | Underscore | Lifetime(..) | Interpolated(..) |
|
|
|
|
Whitespace | Comment | Shebang(..) | Eof => false,
|
|
|
|
_ => true,
|
|
|
|
}
|
|
|
|
}
|
2017-03-29 01:55:01 +00:00
|
|
|
|
2017-06-05 01:41:33 +00:00
|
|
|
pub struct LazyTokenStream(Cell<Option<TokenStream>>);
|
|
|
|
|
|
|
|
impl Clone for LazyTokenStream {
|
|
|
|
fn clone(&self) -> Self {
|
|
|
|
let opt_stream = self.0.take();
|
|
|
|
self.0.set(opt_stream.clone());
|
|
|
|
LazyTokenStream(Cell::new(opt_stream))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl cmp::Eq for LazyTokenStream {}
|
|
|
|
impl PartialEq for LazyTokenStream {
|
|
|
|
fn eq(&self, _other: &LazyTokenStream) -> bool {
|
|
|
|
true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl fmt::Debug for LazyTokenStream {
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
|
|
fmt::Debug::fmt(&self.clone().0.into_inner(), f)
|
|
|
|
}
|
|
|
|
}
|
2017-03-29 01:55:01 +00:00
|
|
|
|
|
|
|
impl LazyTokenStream {
|
|
|
|
pub fn new() -> Self {
|
2017-06-05 01:41:33 +00:00
|
|
|
LazyTokenStream(Cell::new(None))
|
2017-03-29 01:55:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn force<F: FnOnce() -> TokenStream>(&self, f: F) -> TokenStream {
|
2017-06-05 01:41:33 +00:00
|
|
|
let mut opt_stream = self.0.take();
|
2017-03-29 01:55:01 +00:00
|
|
|
if opt_stream.is_none() {
|
2017-06-05 01:41:33 +00:00
|
|
|
opt_stream = Some(f());
|
|
|
|
}
|
|
|
|
self.0.set(opt_stream.clone());
|
2017-03-29 01:55:01 +00:00
|
|
|
opt_stream.clone().unwrap()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Encodable for LazyTokenStream {
|
|
|
|
fn encode<S: Encoder>(&self, _: &mut S) -> Result<(), S::Error> {
|
|
|
|
Ok(())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Decodable for LazyTokenStream {
|
|
|
|
fn decode<D: Decoder>(_: &mut D) -> Result<LazyTokenStream, D::Error> {
|
|
|
|
Ok(LazyTokenStream::new())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ::std::hash::Hash for LazyTokenStream {
|
2017-06-05 01:41:33 +00:00
|
|
|
fn hash<H: ::std::hash::Hasher>(&self, _hasher: &mut H) {}
|
2017-03-29 01:55:01 +00:00
|
|
|
}
|
2017-09-15 15:28:34 +00:00
|
|
|
|
|
|
|
fn prepend_attrs(sess: &ParseSess,
|
|
|
|
attrs: &[ast::Attribute],
|
|
|
|
tokens: Option<&tokenstream::TokenStream>,
|
|
|
|
span: syntax_pos::Span)
|
|
|
|
-> Option<tokenstream::TokenStream>
|
|
|
|
{
|
2017-12-09 01:32:04 +00:00
|
|
|
let tokens = tokens?;
|
2017-09-15 15:28:34 +00:00
|
|
|
if attrs.len() == 0 {
|
|
|
|
return Some(tokens.clone())
|
|
|
|
}
|
|
|
|
let mut builder = tokenstream::TokenStreamBuilder::new();
|
|
|
|
for attr in attrs {
|
|
|
|
assert_eq!(attr.style, ast::AttrStyle::Outer,
|
|
|
|
"inner attributes should prevent cached tokens from existing");
|
|
|
|
// FIXME: Avoid this pretty-print + reparse hack as bove
|
2017-12-14 07:09:19 +00:00
|
|
|
let name = FileName::MacroExpansion;
|
2017-09-15 15:28:34 +00:00
|
|
|
let source = pprust::attr_to_string(attr);
|
|
|
|
let stream = parse_stream_from_source_str(name, source, sess, Some(span));
|
|
|
|
builder.push(stream);
|
|
|
|
}
|
|
|
|
builder.push(tokens.clone());
|
|
|
|
Some(builder.build())
|
|
|
|
}
|