Initial implementation of RFC 2151, Raw Identifiers

This commit is contained in:
Lymia Aluysia 2018-03-09 23:56:40 -06:00
parent 8aa27ee309
commit fad1648e0f
No known key found for this signature in database
GPG Key ID: DB2E204C989251F7
37 changed files with 475 additions and 145 deletions

View File

@ -681,7 +681,8 @@ impl TokenTree {
Dollar => op!('$'),
Question => op!('?'),
Ident(ident) | Lifetime(ident) => TokenNode::Term(Term(ident.name)),
Ident(ident, false) | Lifetime(ident) => TokenNode::Term(Term(ident.name)),
Ident(ident, true) => TokenNode::Term(Term(Symbol::intern(&format!("r#{}", ident)))),
Literal(..) | DocComment(..) => TokenNode::Literal(self::Literal(token)),
Interpolated(_) => {
@ -713,8 +714,14 @@ impl TokenTree {
},
TokenNode::Term(symbol) => {
let ident = ast::Ident { name: symbol.0, ctxt: self.span.0.ctxt() };
let sym_str = symbol.0.as_str();
let token =
if symbol.0.as_str().starts_with("'") { Lifetime(ident) } else { Ident(ident) };
if sym_str.starts_with("'") { Lifetime(ident) }
else if sym_str.starts_with("r#") {
let name = Symbol::intern(&sym_str[2..]);
let ident = ast::Ident { name, ctxt: self.span.0.ctxt() };
Ident(ident, true)
} else { Ident(ident, false) };
return TokenTree::Token(self.span.0, token).into();
}
TokenNode::Literal(token) => return TokenTree::Token(self.span.0, token.0).into(),

View File

@ -318,7 +318,10 @@ fn hash_token<'a, 'gcx, W: StableHasherResult>(
opt_name.hash_stable(hcx, hasher);
}
token::Token::Ident(ident) |
token::Token::Ident(ident, is_raw) => {
ident.name.hash_stable(hcx, hasher);
is_raw.hash_stable(hcx, hasher);
}
token::Token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),
token::Token::Interpolated(_) => {

View File

@ -41,13 +41,13 @@ impl<'a> AstValidator<'a> {
keywords::StaticLifetime.name(),
keywords::Invalid.name()];
if !valid_names.contains(&lifetime.ident.name) &&
token::Ident(lifetime.ident.without_first_quote()).is_reserved_ident() {
token::is_reserved_ident(lifetime.ident.without_first_quote()) {
self.err_handler().span_err(lifetime.span, "lifetimes cannot use keyword names");
}
}
fn check_label(&self, label: Ident, span: Span) {
if token::Ident(label.without_first_quote()).is_reserved_ident() {
if token::is_reserved_ident(label.without_first_quote()) {
self.err_handler().span_err(span, &format!("invalid label name `{}`", label.name));
}
}

View File

@ -3206,7 +3206,7 @@ impl<'a> Resolver<'a> {
// `$crate::a::b`
module = Some(self.resolve_crate_root(ident.node.ctxt, true));
continue
} else if i == 1 && !token::Ident(ident.node).is_path_segment_keyword() {
} else if i == 1 && !token::is_path_segment_keyword(ident.node) {
let prev_name = path[0].node.name;
if prev_name == keywords::Extern.name() ||
prev_name == keywords::CrateRoot.name() &&

View File

@ -268,7 +268,7 @@ impl<'a> base::Resolver for Resolver<'a> {
if k > 0 {
tokens.push(TokenTree::Token(path.span, Token::ModSep).into());
}
let tok = Token::Ident(segment.identifier);
let tok = Token::from_ast_ident(segment.identifier);
tokens.push(TokenTree::Token(path.span, tok).into());
}
}

View File

@ -625,7 +625,7 @@ impl<'a, 'b:'a> ImportResolver<'a, 'b> {
} else {
Some(self.resolve_crate_root(source.ctxt.modern(), false))
}
} else if is_extern && !token::Ident(source).is_path_segment_keyword() {
} else if is_extern && !token::is_path_segment_keyword(source) {
let crate_id =
self.crate_loader.resolve_crate_from_path(source.name, directive.span);
let crate_root =
@ -667,7 +667,7 @@ impl<'a, 'b:'a> ImportResolver<'a, 'b> {
}
PathResult::Failed(span, msg, true) => {
let (mut self_path, mut self_result) = (module_path.clone(), None);
let is_special = |ident| token::Ident(ident).is_path_segment_keyword() &&
let is_special = |ident| token::is_path_segment_keyword(ident) &&
ident.name != keywords::CrateRoot.name();
if !self_path.is_empty() && !is_special(self_path[0].node) &&
!(self_path.len() > 1 && is_special(self_path[1].node)) {

View File

@ -323,12 +323,12 @@ impl<'a> Classifier<'a> {
}
// Keywords are also included in the identifier set.
token::Ident(ident) => {
token::Ident(ident, is_raw) => {
match &*ident.name.as_str() {
"ref" | "mut" => Class::RefKeyWord,
"ref" | "mut" if !is_raw => Class::RefKeyWord,
"self" |"Self" => Class::Self_,
"false" | "true" => Class::Bool,
"self" | "Self" => Class::Self_,
"false" | "true" if !is_raw => Class::Bool,
"Option" | "Result" => Class::PreludeTy,
"Some" | "None" | "Ok" | "Err" => Class::PreludeVal,

View File

@ -112,7 +112,7 @@ impl Path {
// or starts with something like `self`/`super`/`$crate`/etc.
pub fn make_root(&self) -> Option<PathSegment> {
if let Some(ident) = self.segments.get(0).map(|seg| seg.identifier) {
if ::parse::token::Ident(ident).is_path_segment_keyword() &&
if ::parse::token::is_path_segment_keyword(ident) &&
ident.name != keywords::Crate.name() {
return None;
}

View File

@ -1106,7 +1106,8 @@ impl IntType {
impl MetaItem {
fn tokens(&self) -> TokenStream {
let ident = TokenTree::Token(self.span, Token::Ident(Ident::with_empty_ctxt(self.name)));
let ident = TokenTree::Token(self.span,
Token::from_ast_ident(Ident::with_empty_ctxt(self.name)));
TokenStream::concat(vec![ident.into(), self.node.tokens(self.span)])
}
@ -1114,9 +1115,9 @@ impl MetaItem {
where I: Iterator<Item = TokenTree>,
{
let (span, name) = match tokens.next() {
Some(TokenTree::Token(span, Token::Ident(ident))) => (span, ident.name),
Some(TokenTree::Token(span, Token::Ident(ident, _))) => (span, ident.name),
Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match nt.0 {
token::Nonterminal::NtIdent(ident) => (ident.span, ident.node.name),
token::Nonterminal::NtIdent(ident, _) => (ident.span, ident.node.name),
token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()),
_ => return None,
},
@ -1269,14 +1270,14 @@ impl LitKind {
"true"
} else {
"false"
}))),
})), false),
}
}
fn from_token(token: Token) -> Option<LitKind> {
match token {
Token::Ident(ident) if ident.name == "true" => Some(LitKind::Bool(true)),
Token::Ident(ident) if ident.name == "false" => Some(LitKind::Bool(false)),
Token::Ident(ident, false) if ident.name == "true" => Some(LitKind::Bool(true)),
Token::Ident(ident, false) if ident.name == "false" => Some(LitKind::Bool(false)),
Token::Interpolated(ref nt) => match nt.0 {
token::NtExpr(ref v) => match v.node {
ExprKind::Lit(ref lit) => Some(lit.node.clone()),

View File

@ -44,7 +44,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
token_tree: &[TokenTree])
-> Box<MacResult+'cx> {
let code = match (token_tree.len(), token_tree.get(0)) {
(1, Some(&TokenTree::Token(_, token::Ident(code)))) => code,
(1, Some(&TokenTree::Token(_, token::Ident(code, false)))) => code,
_ => unreachable!()
};
@ -82,10 +82,10 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
token_tree.get(1),
token_tree.get(2)
) {
(1, Some(&TokenTree::Token(_, token::Ident(ref code))), None, None) => {
(1, Some(&TokenTree::Token(_, token::Ident(ref code, false))), None, None) => {
(code, None)
},
(3, Some(&TokenTree::Token(_, token::Ident(ref code))),
(3, Some(&TokenTree::Token(_, token::Ident(ref code, false))),
Some(&TokenTree::Token(_, token::Comma)),
Some(&TokenTree::Token(_, token::Literal(token::StrRaw(description, _), None)))) => {
(code, Some(description))
@ -150,9 +150,9 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
let (crate_name, name) = match (&token_tree[0], &token_tree[2]) {
(
// Crate name.
&TokenTree::Token(_, token::Ident(ref crate_name)),
&TokenTree::Token(_, token::Ident(ref crate_name, false)),
// DIAGNOSTICS ident.
&TokenTree::Token(_, token::Ident(ref name))
&TokenTree::Token(_, token::Ident(ref name, false))
) => (*&crate_name, name),
_ => unreachable!()
};

View File

@ -229,8 +229,9 @@ impl<F> TTMacroExpander for F
impl Folder for AvoidInterpolatedIdents {
fn fold_tt(&mut self, tt: tokenstream::TokenTree) -> tokenstream::TokenTree {
if let tokenstream::TokenTree::Token(_, token::Interpolated(ref nt)) = tt {
if let token::NtIdent(ident) = nt.0 {
return tokenstream::TokenTree::Token(ident.span, token::Ident(ident.node));
if let token::NtIdent(ident, is_raw) = nt.0 {
return tokenstream::TokenTree::Token(ident.span,
token::Ident(ident.node, is_raw));
}
}
fold::noop_fold_tt(tt, self)

View File

@ -75,7 +75,7 @@ pub mod rt {
impl ToTokens for ast::Ident {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
vec![TokenTree::Token(DUMMY_SP, token::Ident(*self))]
vec![TokenTree::Token(DUMMY_SP, Token::from_ast_ident(*self))]
}
}
@ -238,7 +238,8 @@ pub mod rt {
if i > 0 {
inner.push(TokenTree::Token(self.span, token::Colon).into());
}
inner.push(TokenTree::Token(self.span, token::Ident(segment.identifier)).into());
inner.push(TokenTree::Token(self.span,
token::Ident(segment.identifier, false)).into());
}
inner.push(self.tokens.clone());
@ -658,10 +659,10 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
token::Literal(token::ByteStr(i), suf) => return mk_lit!("ByteStr", suf, i),
token::Literal(token::ByteStrRaw(i, n), suf) => return mk_lit!("ByteStrRaw", suf, i, n),
token::Ident(ident) => {
token::Ident(ident, is_raw) => {
return cx.expr_call(sp,
mk_token_path(cx, sp, "Ident"),
vec![mk_ident(cx, sp, ident)]);
vec![mk_ident(cx, sp, ident), cx.expr_bool(sp, is_raw)]);
}
token::Lifetime(ident) => {
@ -720,7 +721,7 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec<ast::Stmt> {
match *tt {
TokenTree::Token(sp, token::Ident(ident)) if quoted => {
TokenTree::Token(sp, token::Ident(ident, _)) if quoted => {
// tt.extend($ident.to_tokens(ext_cx))
let e_to_toks =

View File

@ -365,7 +365,7 @@ pub fn parse_failure_msg(tok: Token) -> String {
/// Perform a token equality check, ignoring syntax context (that is, an unhygienic comparison)
fn token_name_eq(t1: &Token, t2: &Token) -> bool {
if let (Some(id1), Some(id2)) = (t1.ident(), t2.ident()) {
id1.name == id2.name
id1.name == id2.name && t1.is_raw_ident() == t2.is_raw_ident()
} else if let (&token::Lifetime(id1), &token::Lifetime(id2)) = (t1, t2) {
id1.name == id2.name
} else {
@ -711,9 +711,10 @@ pub fn parse(
/// The token is an identifier, but not `_`.
/// We prohibit passing `_` to macros expecting `ident` for now.
fn get_macro_ident(token: &Token) -> Option<Ident> {
fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> {
match *token {
token::Ident(ident) if ident.name != keywords::Underscore.name() => Some(ident),
token::Ident(ident, is_raw) if ident.name != keywords::Underscore.name() =>
Some((ident, is_raw)),
_ => None,
}
}
@ -737,7 +738,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
"ident" => get_macro_ident(token).is_some(),
"vis" => match *token {
// The follow-set of :vis + "priv" keyword + interpolated
Token::Comma | Token::Ident(_) | Token::Interpolated(_) => true,
Token::Comma | Token::Ident(..) | Token::Interpolated(_) => true,
_ => token.can_begin_type(),
},
"block" => match *token {
@ -746,7 +747,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
token::NtItem(_)
| token::NtPat(_)
| token::NtTy(_)
| token::NtIdent(_)
| token::NtIdent(..)
| token::NtMeta(_)
| token::NtPath(_)
| token::NtVis(_) => false, // none of these may start with '{'.
@ -755,7 +756,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
_ => false,
},
"path" | "meta" => match *token {
Token::ModSep | Token::Ident(_) => true,
Token::ModSep | Token::Ident(..) => true,
Token::Interpolated(ref nt) => match nt.0 {
token::NtPath(_) | token::NtMeta(_) => true,
_ => may_be_ident(&nt.0),
@ -763,7 +764,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
_ => false,
},
"pat" => match *token {
Token::Ident(_) | // box, ref, mut, and other identifiers (can stricten)
Token::Ident(..) | // box, ref, mut, and other identifiers (can stricten)
Token::OpenDelim(token::Paren) | // tuple pattern
Token::OpenDelim(token::Bracket) | // slice pattern
Token::BinOp(token::And) | // reference
@ -823,9 +824,9 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
"expr" => token::NtExpr(panictry!(p.parse_expr())),
"ty" => token::NtTy(panictry!(p.parse_ty())),
// this could be handled like a token, since it is one
"ident" => if let Some(ident) = get_macro_ident(&p.token) {
"ident" => if let Some((ident, is_raw)) = get_macro_ident(&p.token) {
p.bump();
token::NtIdent(respan(p.prev_span, ident))
token::NtIdent(respan(p.prev_span, ident), is_raw)
} else {
let token_str = pprust::token_to_string(&p.token);
p.fatal(&format!("expected ident, found {}", &token_str)).emit();

View File

@ -831,7 +831,7 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> Result<bool, (String, &'
"pat" => match *tok {
TokenTree::Token(_, ref tok) => match *tok {
FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true),
Ident(i) if i.name == "if" || i.name == "in" => Ok(true),
Ident(i, false) if i.name == "if" || i.name == "in" => Ok(true),
_ => Ok(false)
},
_ => Ok(false),
@ -840,7 +840,7 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> Result<bool, (String, &'
TokenTree::Token(_, ref tok) => match *tok {
OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) |
Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true),
Ident(i) if i.name == "as" || i.name == "where" => Ok(true),
Ident(i, false) if i.name == "as" || i.name == "where" => Ok(true),
_ => Ok(false)
},
TokenTree::MetaVarDecl(_, _, frag) if frag.name == "block" => Ok(true),
@ -860,7 +860,7 @@ fn is_in_follow(tok: &quoted::TokenTree, frag: &str) -> Result<bool, (String, &'
match *tok {
TokenTree::Token(_, ref tok) => match *tok {
Comma => Ok(true),
Ident(i) if i.name != "priv" => Ok(true),
Ident(i, is_raw) if is_raw || i.name != "priv" => Ok(true),
ref tok => Ok(tok.can_begin_type())
},
TokenTree::MetaVarDecl(_, _, frag) if frag.name == "ident"

View File

@ -296,7 +296,7 @@ where
name: keywords::DollarCrate.name(),
..ident
};
TokenTree::Token(span, token::Ident(ident))
TokenTree::Token(span, token::Ident(ident, false))
} else {
TokenTree::MetaVar(span, ident)
}

View File

@ -169,7 +169,7 @@ pub fn transcribe(cx: &ExtCtxt,
Ident { ctxt: ident.ctxt.apply_mark(cx.current_expansion.mark), ..ident };
sp = sp.with_ctxt(sp.ctxt().apply_mark(cx.current_expansion.mark));
result.push(TokenTree::Token(sp, token::Dollar).into());
result.push(TokenTree::Token(sp, token::Ident(ident)).into());
result.push(TokenTree::Token(sp, token::Ident(ident, false)).into());
}
}
quoted::TokenTree::Delimited(mut span, delimited) => {

View File

@ -578,7 +578,7 @@ pub fn noop_fold_tts<T: Folder>(tts: TokenStream, fld: &mut T) -> TokenStream {
// apply ident folder if it's an ident, apply other folds to interpolated nodes
pub fn noop_fold_token<T: Folder>(t: token::Token, fld: &mut T) -> token::Token {
match t {
token::Ident(id) => token::Ident(fld.fold_ident(id)),
token::Ident(id, is_raw) => token::Ident(fld.fold_ident(id), is_raw),
token::Lifetime(id) => token::Lifetime(fld.fold_ident(id)),
token::Interpolated(nt) => {
let nt = match Lrc::try_unwrap(nt) {
@ -630,7 +630,8 @@ pub fn noop_fold_interpolated<T: Folder>(nt: token::Nonterminal, fld: &mut T)
token::NtPat(pat) => token::NtPat(fld.fold_pat(pat)),
token::NtExpr(expr) => token::NtExpr(fld.fold_expr(expr)),
token::NtTy(ty) => token::NtTy(fld.fold_ty(ty)),
token::NtIdent(id) => token::NtIdent(Spanned::<Ident>{node: fld.fold_ident(id.node), ..id}),
token::NtIdent(id, is_raw) =>
token::NtIdent(Spanned::<Ident>{node: fld.fold_ident(id.node), ..id}, is_raw),
token::NtMeta(meta) => token::NtMeta(fld.fold_meta_item(meta)),
token::NtPath(path) => token::NtPath(fld.fold_path(path)),
token::NtTT(tt) => token::NtTT(fld.fold_tt(tt)),

View File

@ -14,7 +14,7 @@ use codemap::{CodeMap, FilePathMapping};
use errors::{FatalError, DiagnosticBuilder};
use parse::{token, ParseSess};
use str::char_at;
use symbol::Symbol;
use symbol::{Symbol, keywords};
use std_unicode::property::Pattern_White_Space;
use std::borrow::Cow;
@ -1115,26 +1115,49 @@ impl<'a> StringReader<'a> {
/// token, and updates the interner
fn next_token_inner(&mut self) -> Result<token::Token, ()> {
let c = self.ch;
if ident_start(c) &&
match (c.unwrap(), self.nextch(), self.nextnextch()) {
// Note: r as in r" or r#" is part of a raw string literal,
// b as in b' is part of a byte literal.
// They are not identifiers, and are handled further down.
('r', Some('"'), _) |
('r', Some('#'), _) |
('b', Some('"'), _) |
('b', Some('\''), _) |
('b', Some('r'), Some('"')) |
('b', Some('r'), Some('#')) => false,
_ => true,
} {
let start = self.pos;
while ident_continue(self.ch) {
self.bump();
}
// FIXME: perform NFKC normalization here. (Issue #2253)
return Ok(self.with_str_from(start, |string| token::Ident(self.mk_ident(string))));
if ident_start(c) {
let (is_ident_start, is_raw_ident) =
match (c.unwrap(), self.nextch(), self.nextnextch()) {
// r# followed by an identifier starter is a raw identifier.
// This is an exception to the r# case below.
('r', Some('#'), x) if ident_start(x) => (true, true),
// r as in r" or r#" is part of a raw string literal.
// b as in b' is part of a byte literal.
// They are not identifiers, and are handled further down.
('r', Some('"'), _) |
('r', Some('#'), _) |
('b', Some('"'), _) |
('b', Some('\''), _) |
('b', Some('r'), Some('"')) |
('b', Some('r'), Some('#')) => (false, false),
_ => (true, false),
};
if is_ident_start {
let raw_start = self.pos;
if is_raw_ident {
// Consume the 'r#' characters.
self.bump();
self.bump();
}
let start = self.pos;
while ident_continue(self.ch) {
self.bump();
}
return Ok(self.with_str_from(start, |string| {
// FIXME: perform NFKC normalization here. (Issue #2253)
let ident = self.mk_ident(string);
if is_raw_ident && (token::is_path_segment_keyword(ident) ||
ident.name == keywords::Underscore.name()) {
self.fatal_span_(raw_start, self.pos,
&format!("`r#{}` is not currently supported.", ident.name)
).raise();
}
token::Ident(ident, is_raw_ident)
}));
}
}
if is_dec_digit(c) {
@ -1801,7 +1824,7 @@ mod tests {
assert_eq!(string_reader.next_token().tok, token::Whitespace);
let tok1 = string_reader.next_token();
let tok2 = TokenAndSpan {
tok: token::Ident(id),
tok: token::Ident(id, false),
sp: Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
};
assert_eq!(tok1, tok2);
@ -1811,7 +1834,7 @@ mod tests {
// read another token:
let tok3 = string_reader.next_token();
let tok4 = TokenAndSpan {
tok: token::Ident(Ident::from_str("main")),
tok: mk_ident("main"),
sp: Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
};
assert_eq!(tok3, tok4);
@ -1830,7 +1853,7 @@ mod tests {
// make the identifier by looking up the string in the interner
fn mk_ident(id: &str) -> token::Token {
token::Ident(Ident::from_str(id))
token::Token::from_ast_ident(Ident::from_str(id))
}
#[test]

View File

@ -741,9 +741,9 @@ mod tests {
match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
(
4,
Some(&TokenTree::Token(_, token::Ident(name_macro_rules))),
Some(&TokenTree::Token(_, token::Ident(name_macro_rules, false))),
Some(&TokenTree::Token(_, token::Not)),
Some(&TokenTree::Token(_, token::Ident(name_zip))),
Some(&TokenTree::Token(_, token::Ident(name_zip, false))),
Some(&TokenTree::Delimited(_, ref macro_delimed)),
)
if name_macro_rules.name == "macro_rules"
@ -762,7 +762,7 @@ mod tests {
(
2,
Some(&TokenTree::Token(_, token::Dollar)),
Some(&TokenTree::Token(_, token::Ident(ident))),
Some(&TokenTree::Token(_, token::Ident(ident, false))),
)
if first_delimed.delim == token::Paren && ident.name == "a" => {},
_ => panic!("value 3: {:?}", *first_delimed),
@ -772,7 +772,7 @@ mod tests {
(
2,
Some(&TokenTree::Token(_, token::Dollar)),
Some(&TokenTree::Token(_, token::Ident(ident))),
Some(&TokenTree::Token(_, token::Ident(ident, false))),
)
if second_delimed.delim == token::Paren
&& ident.name == "a" => {},
@ -793,17 +793,18 @@ mod tests {
let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
let expected = TokenStream::concat(vec![
TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))).into(),
TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))).into(),
TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(),
TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(),
TokenTree::Delimited(
sp(5, 14),
tokenstream::Delimited {
delim: token::DelimToken::Paren,
tts: TokenStream::concat(vec![
TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))).into(),
TokenTree::Token(sp(6, 7),
token::Ident(Ident::from_str("b"), false)).into(),
TokenTree::Token(sp(8, 9), token::Colon).into(),
TokenTree::Token(sp(10, 13),
token::Ident(Ident::from_str("i32"))).into(),
token::Ident(Ident::from_str("i32"), false)).into(),
]).into(),
}).into(),
TokenTree::Delimited(
@ -811,7 +812,8 @@ mod tests {
tokenstream::Delimited {
delim: token::DelimToken::Brace,
tts: TokenStream::concat(vec![
TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))).into(),
TokenTree::Token(sp(17, 18),
token::Ident(Ident::from_str("b"), false)).into(),
TokenTree::Token(sp(18, 19), token::Semi).into(),
]).into(),
}).into()

View File

@ -358,7 +358,7 @@ impl TokenCursor {
let body = TokenTree::Delimited(sp, Delimited {
delim: token::Bracket,
tts: [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"))),
tts: [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"), false)),
TokenTree::Token(sp, token::Eq),
TokenTree::Token(sp, token::Literal(
token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))]
@ -784,7 +784,7 @@ impl<'a> Parser<'a> {
fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> {
match self.token {
token::Ident(i) => {
token::Ident(i, _) => {
if self.token.is_reserved_ident() {
let mut err = self.expected_ident_found();
if recover {
@ -1925,7 +1925,7 @@ impl<'a> Parser<'a> {
pub fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> {
match self.token {
token::Ident(sid) if self.token.is_path_segment_keyword() => {
token::Ident(sid, _) if self.token.is_path_segment_keyword() => {
self.bump();
Ok(sid)
}
@ -2740,11 +2740,14 @@ impl<'a> Parser<'a> {
}
pub fn process_potential_macro_variable(&mut self) {
let ident = match self.token {
let (ident, is_raw) = match self.token {
token::Dollar if self.span.ctxt() != syntax_pos::hygiene::SyntaxContext::empty() &&
self.look_ahead(1, |t| t.is_ident()) => {
self.bump();
let name = match self.token { token::Ident(ident) => ident, _ => unreachable!() };
let name = match self.token {
token::Ident(ident, _) => ident,
_ => unreachable!()
};
let mut err = self.fatal(&format!("unknown macro variable `{}`", name));
err.span_label(self.span, "unknown macro variable");
err.emit();
@ -2753,13 +2756,13 @@ impl<'a> Parser<'a> {
token::Interpolated(ref nt) => {
self.meta_var_span = Some(self.span);
match nt.0 {
token::NtIdent(ident) => ident,
token::NtIdent(ident, is_raw) => (ident, is_raw),
_ => return,
}
}
_ => return,
};
self.token = token::Ident(ident.node);
self.token = token::Ident(ident.node, is_raw);
self.span = ident.span;
}
@ -4245,7 +4248,7 @@ impl<'a> Parser<'a> {
-> PResult<'a, Option<P<Item>>> {
let token_lo = self.span;
let (ident, def) = match self.token {
token::Ident(ident) if ident.name == keywords::Macro.name() => {
token::Ident(ident, false) if ident.name == keywords::Macro.name() => {
self.bump();
let ident = self.parse_ident()?;
let tokens = if self.check(&token::OpenDelim(token::Brace)) {
@ -4273,7 +4276,7 @@ impl<'a> Parser<'a> {
(ident, ast::MacroDef { tokens: tokens.into(), legacy: false })
}
token::Ident(ident) if ident.name == "macro_rules" &&
token::Ident(ident, _) if ident.name == "macro_rules" &&
self.look_ahead(1, |t| *t == token::Not) => {
let prev_span = self.prev_span;
self.complain_if_pub_macro(&vis.node, prev_span);
@ -5078,7 +5081,9 @@ impl<'a> Parser<'a> {
fn parse_self_arg(&mut self) -> PResult<'a, Option<Arg>> {
let expect_ident = |this: &mut Self| match this.token {
// Preserve hygienic context.
token::Ident(ident) => { let sp = this.span; this.bump(); codemap::respan(sp, ident) }
token::Ident(ident, _) => {
let sp = this.span; this.bump(); codemap::respan(sp, ident)
}
_ => unreachable!()
};
let isolated_self = |this: &mut Self, n| {
@ -5375,7 +5380,7 @@ impl<'a> Parser<'a> {
VisibilityKind::Inherited => Ok(()),
_ => {
let is_macro_rules: bool = match self.token {
token::Ident(sid) => sid.name == Symbol::intern("macro_rules"),
token::Ident(sid, _) => sid.name == Symbol::intern("macro_rules"),
_ => false,
};
if is_macro_rules {
@ -7016,7 +7021,7 @@ impl<'a> Parser<'a> {
fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> {
if self.eat_keyword(keywords::As) {
match self.token {
token::Ident(ident) if ident.name == keywords::Underscore.name() => {
token::Ident(ident, false) if ident.name == keywords::Underscore.name() => {
self.bump(); // `_`
Ok(Some(Ident { name: ident.name.gensymed(), ..ident }))
}

View File

@ -91,8 +91,8 @@ impl Lit {
}
}
fn ident_can_begin_expr(ident: ast::Ident) -> bool {
let ident_token: Token = Ident(ident);
fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool {
let ident_token: Token = Ident(ident, is_raw);
!ident_token.is_reserved_ident() ||
ident_token.is_path_segment_keyword() ||
@ -116,8 +116,8 @@ fn ident_can_begin_expr(ident: ast::Ident) -> bool {
].contains(&ident.name)
}
fn ident_can_begin_type(ident: ast::Ident) -> bool {
let ident_token: Token = Ident(ident);
fn ident_can_begin_type(ident: ast::Ident, is_raw: bool) -> bool {
let ident_token: Token = Ident(ident, is_raw);
!ident_token.is_reserved_ident() ||
ident_token.is_path_segment_keyword() ||
@ -132,6 +132,37 @@ fn ident_can_begin_type(ident: ast::Ident) -> bool {
].contains(&ident.name)
}
pub fn is_path_segment_keyword(id: ast::Ident) -> bool {
id.name == keywords::Super.name() ||
id.name == keywords::SelfValue.name() ||
id.name == keywords::SelfType.name() ||
id.name == keywords::Extern.name() ||
id.name == keywords::Crate.name() ||
id.name == keywords::CrateRoot.name() ||
id.name == keywords::DollarCrate.name()
}
// Returns true for reserved identifiers used internally for elided lifetimes,
// unnamed method parameters, crate root module, error recovery etc.
pub fn is_special_ident(id: ast::Ident) -> bool {
id.name <= keywords::Underscore.name()
}
/// Returns `true` if the token is a keyword used in the language.
pub fn is_used_keyword(id: ast::Ident) -> bool {
id.name >= keywords::As.name() && id.name <= keywords::While.name()
}
/// Returns `true` if the token is a keyword reserved for possible future use.
pub fn is_unused_keyword(id: ast::Ident) -> bool {
id.name >= keywords::Abstract.name() && id.name <= keywords::Yield.name()
}
/// Returns `true` if the token is either a special identifier or a keyword.
pub fn is_reserved_ident(id: ast::Ident) -> bool {
is_special_ident(id) || is_used_keyword(id) || is_unused_keyword(id)
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug)]
pub enum Token {
/* Expression-operator symbols. */
@ -175,7 +206,7 @@ pub enum Token {
Literal(Lit, Option<ast::Name>),
/* Name components */
Ident(ast::Ident),
Ident(ast::Ident, /* is_raw */ bool),
Lifetime(ast::Ident),
// The `LazyTokenStream` is a pure function of the `Nonterminal`,
@ -203,6 +234,11 @@ impl Token {
Token::Interpolated(Lrc::new((nt, LazyTokenStream::new())))
}
/// Recovers a `Token` from an `ast::Ident`. This creates a raw identifier if necessary.
pub fn from_ast_ident(ident: ast::Ident) -> Token {
Ident(ident, is_reserved_ident(ident))
}
/// Returns `true` if the token starts with '>'.
pub fn is_like_gt(&self) -> bool {
match *self {
@ -214,7 +250,8 @@ impl Token {
/// Returns `true` if the token can appear at the start of an expression.
pub fn can_begin_expr(&self) -> bool {
match *self {
Ident(ident) => ident_can_begin_expr(ident), // value name or keyword
Ident(ident, is_raw) =>
ident_can_begin_expr(ident, is_raw), // value name or keyword
OpenDelim(..) | // tuple, array or block
Literal(..) | // literal
Not | // operator not
@ -239,7 +276,8 @@ impl Token {
/// Returns `true` if the token can appear at the start of a type.
pub fn can_begin_type(&self) -> bool {
match *self {
Ident(ident) => ident_can_begin_type(ident), // type name or keyword
Ident(ident, is_raw) =>
ident_can_begin_type(ident, is_raw), // type name or keyword
OpenDelim(Paren) | // tuple
OpenDelim(Bracket) | // array
Not | // never
@ -272,17 +310,32 @@ impl Token {
}
}
pub fn ident(&self) -> Option<ast::Ident> {
fn ident_common(&self, allow_raw: bool) -> Option<ast::Ident> {
match *self {
Ident(ident) => Some(ident),
Ident(ident, is_raw) if !is_raw || allow_raw => Some(ident),
Interpolated(ref nt) => match nt.0 {
NtIdent(ident) => Some(ident.node),
NtIdent(ident, is_raw) if !is_raw || allow_raw => Some(ident.node),
_ => None,
},
_ => None,
}
}
pub fn nonraw_ident(&self) -> Option<ast::Ident> {
self.ident_common(false)
}
pub fn is_raw_ident(&self) -> bool {
match *self {
Ident(_, true) => true,
_ => false,
}
}
pub fn ident(&self) -> Option<ast::Ident> {
self.ident_common(true)
}
/// Returns `true` if the token is an identifier.
pub fn is_ident(&self) -> bool {
self.ident().is_some()
@ -351,18 +404,12 @@ impl Token {
/// Returns `true` if the token is a given keyword, `kw`.
pub fn is_keyword(&self, kw: keywords::Keyword) -> bool {
self.ident().map(|ident| ident.name == kw.name()).unwrap_or(false)
self.nonraw_ident().map(|ident| ident.name == kw.name()).unwrap_or(false)
}
pub fn is_path_segment_keyword(&self) -> bool {
match self.ident() {
Some(id) => id.name == keywords::Super.name() ||
id.name == keywords::SelfValue.name() ||
id.name == keywords::SelfType.name() ||
id.name == keywords::Extern.name() ||
id.name == keywords::Crate.name() ||
id.name == keywords::CrateRoot.name() ||
id.name == keywords::DollarCrate.name(),
match self.nonraw_ident() {
Some(id) => is_path_segment_keyword(id),
None => false,
}
}
@ -370,24 +417,24 @@ impl Token {
// Returns true for reserved identifiers used internally for elided lifetimes,
// unnamed method parameters, crate root module, error recovery etc.
pub fn is_special_ident(&self) -> bool {
match self.ident() {
Some(id) => id.name <= keywords::Underscore.name(),
match self.nonraw_ident() {
Some(id) => is_special_ident(id),
_ => false,
}
}
/// Returns `true` if the token is a keyword used in the language.
pub fn is_used_keyword(&self) -> bool {
match self.ident() {
Some(id) => id.name >= keywords::As.name() && id.name <= keywords::While.name(),
match self.nonraw_ident() {
Some(id) => is_used_keyword(id),
_ => false,
}
}
/// Returns `true` if the token is a keyword reserved for possible future use.
pub fn is_unused_keyword(&self) -> bool {
match self.ident() {
Some(id) => id.name >= keywords::Abstract.name() && id.name <= keywords::Yield.name(),
match self.nonraw_ident() {
Some(id) => is_unused_keyword(id),
_ => false,
}
}
@ -460,7 +507,10 @@ impl Token {
/// Returns `true` if the token is either a special identifier or a keyword.
pub fn is_reserved_ident(&self) -> bool {
self.is_special_ident() || self.is_used_keyword() || self.is_unused_keyword()
match self.nonraw_ident() {
Some(id) => is_reserved_ident(id),
_ => false,
}
}
pub fn interpolated_to_tokenstream(&self, sess: &ParseSess, span: Span)
@ -496,8 +546,8 @@ impl Token {
Nonterminal::NtImplItem(ref item) => {
tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span);
}
Nonterminal::NtIdent(ident) => {
let token = Token::Ident(ident.node);
Nonterminal::NtIdent(ident, is_raw) => {
let token = Token::Ident(ident.node, is_raw);
tokens = Some(TokenTree::Token(ident.span, token).into());
}
Nonterminal::NtLifetime(lifetime) => {
@ -529,7 +579,7 @@ pub enum Nonterminal {
NtPat(P<ast::Pat>),
NtExpr(P<ast::Expr>),
NtTy(P<ast::Ty>),
NtIdent(ast::SpannedIdent),
NtIdent(ast::SpannedIdent, /* is_raw */ bool),
/// Stuff inside brackets for attributes
NtMeta(ast::MetaItem),
NtPath(ast::Path),

View File

@ -250,7 +250,8 @@ pub fn token_to_string(tok: &Token) -> String {
}
/* Name components */
token::Ident(s) => s.to_string(),
token::Ident(s, false) => s.to_string(),
token::Ident(s, true) => format!("r#{}", s),
token::Lifetime(s) => s.to_string(),
/* Other */
@ -261,24 +262,25 @@ pub fn token_to_string(tok: &Token) -> String {
token::Shebang(s) => format!("/* shebang: {}*/", s),
token::Interpolated(ref nt) => match nt.0 {
token::NtExpr(ref e) => expr_to_string(e),
token::NtMeta(ref e) => meta_item_to_string(e),
token::NtTy(ref e) => ty_to_string(e),
token::NtPath(ref e) => path_to_string(e),
token::NtItem(ref e) => item_to_string(e),
token::NtBlock(ref e) => block_to_string(e),
token::NtStmt(ref e) => stmt_to_string(e),
token::NtPat(ref e) => pat_to_string(e),
token::NtIdent(ref e) => ident_to_string(e.node),
token::NtTT(ref tree) => tt_to_string(tree.clone()),
token::NtArm(ref e) => arm_to_string(e),
token::NtImplItem(ref e) => impl_item_to_string(e),
token::NtTraitItem(ref e) => trait_item_to_string(e),
token::NtGenerics(ref e) => generic_params_to_string(&e.params),
token::NtWhereClause(ref e) => where_clause_to_string(e),
token::NtArg(ref e) => arg_to_string(e),
token::NtVis(ref e) => vis_to_string(e),
token::NtLifetime(ref e) => lifetime_to_string(e),
token::NtExpr(ref e) => expr_to_string(e),
token::NtMeta(ref e) => meta_item_to_string(e),
token::NtTy(ref e) => ty_to_string(e),
token::NtPath(ref e) => path_to_string(e),
token::NtItem(ref e) => item_to_string(e),
token::NtBlock(ref e) => block_to_string(e),
token::NtStmt(ref e) => stmt_to_string(e),
token::NtPat(ref e) => pat_to_string(e),
token::NtIdent(ref e, false) => ident_to_string(e.node),
token::NtIdent(ref e, true) => format!("r#{}", ident_to_string(e.node)),
token::NtTT(ref tree) => tt_to_string(tree.clone()),
token::NtArm(ref e) => arm_to_string(e),
token::NtImplItem(ref e) => impl_item_to_string(e),
token::NtTraitItem(ref e) => trait_item_to_string(e),
token::NtGenerics(ref e) => generic_params_to_string(&e.params),
token::NtWhereClause(ref e) => where_clause_to_string(e),
token::NtArg(ref e) => arg_to_string(e),
token::NtVis(ref e) => vis_to_string(e),
token::NtLifetime(ref e) => lifetime_to_string(e),
}
}
}

View File

@ -684,7 +684,7 @@ mod tests {
with_globals(|| {
let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect();
let test1: TokenStream =
TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a"))).into();
TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a"), false)).into();
let test2 = string_to_ts("foo(bar::baz)");
assert_eq!(test0.is_empty(), true);

View File

@ -44,7 +44,8 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt,
}
} else {
match *e {
TokenTree::Token(_, token::Ident(ident)) => res_str.push_str(&ident.name.as_str()),
TokenTree::Token(_, token::Ident(ident, _)) =>
res_str.push_str(&ident.name.as_str()),
_ => {
cx.span_err(sp, "concat_idents! requires ident args.");
return DummyResult::expr(sp);

View File

@ -149,7 +149,7 @@ fn parse_args(ecx: &mut ExtCtxt,
if named || (p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq)) {
named = true;
let ident = match p.token {
token::Ident(i) => {
token::Ident(i, _) => {
p.bump();
i
}

View File

@ -11,5 +11,5 @@
// compile-flags: -Z parse-only
static s: &'static str =
r#x"#"x# //~ ERROR found invalid character; only `#` is allowed in raw string delimitation
r#~"#"~# //~ ERROR found invalid character; only `#` is allowed in raw string delimitation
;

View File

@ -49,7 +49,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
}
let text = match args[0] {
TokenTree::Token(_, token::Ident(s)) => s.to_string(),
TokenTree::Token(_, token::Ident(s, _)) => s.to_string(),
_ => {
cx.span_err(sp, "argument should be a single identifier");
return DummyResult::any(sp);

View File

@ -0,0 +1,24 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::mem;
#[r#repr(r#C, r#packed)]
struct Test {
a: bool, b: u64
}
#[r#derive(r#Debug)]
struct Test2(u32);
pub fn main() {
assert_eq!(mem::size_of::<Test>(), 9);
assert_eq!("Test2(123)", format!("{:?}", Test2(123)));
}

View File

@ -0,0 +1,29 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn r#fn(r#match: u32) -> u32 {
r#match
}
pub fn main() {
let r#struct = 1;
assert_eq!(1, r#struct);
let foo = 2;
assert_eq!(2, r#foo);
let r#bar = 3;
assert_eq!(3, bar);
assert_eq!(4, r#fn(4));
let r#true = false;
assert_eq!(r#true, false);
}

View File

@ -0,0 +1,41 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[derive(Debug, PartialEq, Eq)]
struct IntWrapper(u32);
#[derive(Debug, Ord, PartialOrd, PartialEq, Eq, Hash, Copy, Clone, Default)]
struct HasKeywordField {
r#struct: u32,
}
struct Generic<r#T>(T);
trait Trait {
fn r#trait(&self) -> u32;
}
impl Trait for Generic<u32> {
fn r#trait(&self) -> u32 {
self.0
}
}
pub fn main() {
assert_eq!(IntWrapper(1), r#IntWrapper(1));
match IntWrapper(2) {
r#IntWrapper(r#struct) => assert_eq!(2, r#struct),
}
assert_eq!("HasKeywordField { struct: 3 }", format!("{:?}", HasKeywordField { r#struct: 3 }));
assert_eq!(4, Generic(4).0);
assert_eq!(5, Generic(5).r#trait());
}

View File

@ -0,0 +1,47 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(decl_macro)]
r#macro_rules! r#struct {
($r#struct:expr) => { $r#struct }
}
macro_rules! old_macro {
($a:expr) => {$a}
}
macro r#decl_macro($r#fn:expr) {
$r#fn
}
macro passthrough($id:ident) {
$id
}
macro_rules! test_pat_match {
(a) => { 6 };
(r#a) => { 7 };
}
pub fn main() {
r#println!("{struct}", r#struct = 1);
assert_eq!(2, r#struct!(2));
assert_eq!(3, r#old_macro!(3));
assert_eq!(4, decl_macro!(4));
let r#match = 5;
assert_eq!(5, passthrough!(r#match));
assert_eq!("r#struct", stringify!(r#struct));
assert_eq!(6, test_pat_match!(a));
assert_eq!(7, test_pat_match!(r#a));
}

View File

@ -0,0 +1,25 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags: -Z parse-only
#![feature(dyn_trait)]
fn test_if() {
r#if true { } //~ ERROR found `true`
}
fn test_struct() {
r#struct Test; //~ ERROR found `Test`
}
fn test_union() {
r#union Test; //~ ERROR found `Test`
}

View File

@ -0,0 +1,20 @@
error: expected one of `!`, `.`, `::`, `;`, `?`, `{`, `}`, or an operator, found `true`
--> $DIR/raw-literal-keywords.rs:16:10
|
LL | r#if true { } //~ ERROR found `true`
| ^^^^ expected one of 8 possible tokens here
error: expected one of `!`, `.`, `::`, `;`, `?`, `{`, `}`, or an operator, found `Test`
--> $DIR/raw-literal-keywords.rs:20:14
|
LL | r#struct Test; //~ ERROR found `Test`
| ^^^^ expected one of 8 possible tokens here
error: expected one of `!`, `.`, `::`, `;`, `?`, `{`, `}`, or an operator, found `Test`
--> $DIR/raw-literal-keywords.rs:24:13
|
LL | r#union Test; //~ ERROR found `Test`
| ^^^^ expected one of 8 possible tokens here
error: aborting due to 3 previous errors

View File

@ -0,0 +1,15 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags: -Z parse-only
fn self_test(r#self: u32) {
//~^ ERROR `r#self` is not currently supported.
}

View File

@ -0,0 +1,8 @@
error: `r#self` is not currently supported.
--> $DIR/raw-literal-self.rs:13:14
|
LL | fn self_test(r#self: u32) {
| ^^^^^^
error: aborting due to previous error

View File

@ -0,0 +1,15 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags: -Z parse-only
fn underscore_test(r#_: u32) {
//~^ ERROR `r#_` is not currently supported.
}

View File

@ -0,0 +1,8 @@
error: `r#_` is not currently supported.
--> $DIR/raw-literal-underscore.rs:13:20
|
LL | fn underscore_test(r#_: u32) {
| ^^^
error: aborting due to previous error