mirror of
https://github.com/rust-lang/rust.git
synced 2025-01-10 14:57:14 +00:00
syntax: Funnel all words through a single keyword table
This commit is contained in:
parent
f641dce852
commit
b9ae0c555b
@ -77,6 +77,7 @@ fn new_parser(sess: parse_sess, cfg: ast::crate_cfg, rdr: lexer::reader,
|
||||
mut restriction: parser::UNRESTRICTED,
|
||||
reader: rdr,
|
||||
binop_precs: prec::binop_prec_table(),
|
||||
keywords: token::keyword_table(),
|
||||
bad_expr_words: token::bad_expr_word_table()}
|
||||
}
|
||||
|
||||
|
@ -49,6 +49,7 @@ type parser = @{
|
||||
mut restriction: restriction,
|
||||
reader: reader,
|
||||
binop_precs: @[op_spec],
|
||||
keywords: hashmap<str, ()>,
|
||||
bad_expr_words: hashmap<str, ()>
|
||||
};
|
||||
|
||||
@ -83,6 +84,9 @@ impl parser for parser {
|
||||
fn span_fatal(sp: span, m: str) -> ! {
|
||||
self.sess.span_diagnostic.span_fatal(sp, m)
|
||||
}
|
||||
fn bug(m: str) -> ! {
|
||||
self.sess.span_diagnostic.span_bug(self.span, m)
|
||||
}
|
||||
fn warn(m: str) {
|
||||
self.sess.span_diagnostic.span_warn(self.span, m)
|
||||
}
|
||||
@ -161,7 +165,15 @@ fn eat(p: parser, tok: token::token) -> bool {
|
||||
ret if p.token == tok { p.bump(); true } else { false };
|
||||
}
|
||||
|
||||
// A sanity check that the word we are asking for is a known keyword
|
||||
fn require_keyword(p: parser, word: str) {
|
||||
if !p.keywords.contains_key(word) {
|
||||
p.bug(#fmt("unknown keyword: %s", word));
|
||||
}
|
||||
}
|
||||
|
||||
fn is_word(p: parser, word: str) -> bool {
|
||||
require_keyword(p, word);
|
||||
ret alt p.token {
|
||||
token::IDENT(sid, false) { str::eq(word, p.get_str(sid)) }
|
||||
_ { false }
|
||||
@ -169,6 +181,7 @@ fn is_word(p: parser, word: str) -> bool {
|
||||
}
|
||||
|
||||
fn eat_word(p: parser, word: str) -> bool {
|
||||
require_keyword(p, word);
|
||||
alt p.token {
|
||||
token::IDENT(sid, false) {
|
||||
if str::eq(word, p.get_str(sid)) {
|
||||
@ -181,6 +194,7 @@ fn eat_word(p: parser, word: str) -> bool {
|
||||
}
|
||||
|
||||
fn expect_word(p: parser, word: str) {
|
||||
require_keyword(p, word);
|
||||
if !eat_word(p, word) {
|
||||
p.fatal("expecting " + word + ", found " +
|
||||
token_to_str(p.reader, p.token));
|
||||
@ -386,6 +400,9 @@ fn parse_ret_ty(p: parser) -> (ast::ret_style, @ast::ty) {
|
||||
fn region_from_name(p: parser, s: option<str>) -> ast::region {
|
||||
let r = alt s {
|
||||
some (string) {
|
||||
// FIXME: To be consistent with our type resolution the
|
||||
// static region should probably be resolved during type
|
||||
// checking, not in the parser.
|
||||
if string == "static" {
|
||||
ast::re_static
|
||||
} else {
|
||||
@ -2578,14 +2595,7 @@ fn parse_view_item(p: parser) -> @ast::view_item {
|
||||
}
|
||||
|
||||
fn is_view_item(p: parser) -> bool {
|
||||
alt p.token {
|
||||
token::IDENT(sid, false) {
|
||||
let st = p.get_str(sid);
|
||||
ret str::eq(st, "use") || str::eq(st, "import") ||
|
||||
str::eq(st, "export");
|
||||
}
|
||||
_ { ret false; }
|
||||
}
|
||||
is_word(p, "use") || is_word(p, "import") || is_word(p, "export")
|
||||
}
|
||||
|
||||
fn maybe_parse_view(
|
||||
|
@ -217,6 +217,38 @@ fn is_bad_expr_word(t: token,
|
||||
}
|
||||
}
|
||||
|
||||
#[doc = "
|
||||
All the valid words that have meaning in the Rust language. Some of these are
|
||||
nonetheless valid as identifiers becasue they are unambiguous.
|
||||
"]
|
||||
fn keyword_table() -> hashmap<str, ()> {
|
||||
let keywords = str_hash();
|
||||
bad_expr_word_table().keys() {|word|
|
||||
keywords.insert(word, ());
|
||||
}
|
||||
let other_keywords = [
|
||||
"as",
|
||||
"bind",
|
||||
"else",
|
||||
"false",
|
||||
"implements",
|
||||
"move",
|
||||
"of",
|
||||
"priv",
|
||||
"self",
|
||||
"send",
|
||||
"static",
|
||||
"to",
|
||||
"true",
|
||||
"use",
|
||||
"with"
|
||||
];
|
||||
for other_keywords.each {|word|
|
||||
keywords.insert(word, ());
|
||||
}
|
||||
ret keywords;
|
||||
}
|
||||
|
||||
#[doc = "
|
||||
These are the words that shouldn't be allowed as value identifiers,
|
||||
because, if used at the start of a line, they will cause the line to be
|
||||
@ -228,8 +260,8 @@ fn bad_expr_word_table() -> hashmap<str, ()> {
|
||||
"class", "const", "cont", "copy", "crust", "do", "else",
|
||||
"enum", "export", "fail", "fn", "for", "if", "iface",
|
||||
"impl", "import", "let", "log", "loop", "mod",
|
||||
"mut", "native", "pure", "resource", "ret", "trait",
|
||||
"type", "unchecked", "unsafe", "while", "new"];
|
||||
"mut", "mutable", "native", "new", "pure", "resource",
|
||||
"ret", "trait", "type", "unchecked", "unsafe", "while"];
|
||||
for keys.each {|word|
|
||||
words.insert(word, ());
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user