rust/src/comp/syntax/parse/parser.rs

2475 lines
80 KiB
Rust
Raw Normal View History

2011-08-12 02:14:38 +00:00
import std::io;
import std::ivec;
import std::str;
import std::option;
import std::option::some;
import std::option::none;
import std::either;
import std::either::left;
import std::either::right;
import std::map::hashmap;
import token::can_begin_expr;
2011-07-27 12:19:39 +00:00
import ex = ext::base;
import codemap::span;
import std::map::new_str_hash;
import util::interner;
import ast::node_id;
import ast::spanned;
tag restriction { UNRESTRICTED; RESTRICT_NO_CALL_EXPRS; }
tag file_type { CRATE_FILE; SOURCE_FILE; }
tag ty_or_bang { a_ty(@ast::ty); a_bang; }
2011-07-27 12:19:39 +00:00
type parse_sess = @{cm: codemap::codemap, mutable next_id: node_id};
2011-07-27 12:19:39 +00:00
fn next_node_id(sess: &parse_sess) -> node_id {
let rv = sess.next_id;
sess.next_id += 1;
ret rv;
}
type parser =
obj {
2011-07-27 12:19:39 +00:00
fn peek() -> token::token ;
fn bump() ;
fn look_ahead(uint) -> token::token ;
fn fatal(str) -> ! ;
fn warn(str) ;
fn restrict(restriction) ;
fn get_restriction() -> restriction ;
fn get_file_type() -> file_type ;
fn get_cfg() -> ast::crate_cfg ;
fn get_span() -> span ;
fn get_lo_pos() -> uint ;
fn get_hi_pos() -> uint ;
fn get_last_lo_pos() -> uint ;
fn get_last_hi_pos() -> uint ;
fn get_prec_table() -> @[op_spec] ;
2011-07-27 12:19:39 +00:00
fn get_str(token::str_num) -> str ;
fn get_reader() -> lexer::reader ;
fn get_filemap() -> codemap::filemap ;
fn get_bad_expr_words() -> hashmap[str, ()] ;
fn get_chpos() -> uint ;
fn get_byte_pos() -> uint ;
fn get_id() -> node_id ;
fn get_sess() -> parse_sess ;
};
fn new_parser_from_file(sess: parse_sess, cfg:
ast::crate_cfg, path: str,
chpos: uint, byte_pos: uint,
ftype: file_type) -> parser {
2011-08-12 02:14:38 +00:00
let src = io::read_whole_file_str(path);
2011-07-27 12:19:39 +00:00
let filemap = codemap::new_filemap(path, chpos, byte_pos);
sess.cm.files += ~[filemap];
2011-07-27 12:19:39 +00:00
let itr = @interner::mk(str::hash, str::eq);
let rdr = lexer::new_reader(sess.cm, src, filemap, itr);
ret new_parser(sess, cfg, rdr, ftype);
}
2011-07-27 12:19:39 +00:00
fn new_parser(sess: parse_sess, cfg: ast::crate_cfg, rdr: lexer::reader,
ftype: file_type) -> parser {
obj stdio_parser(sess: parse_sess,
cfg: ast::crate_cfg,
ftype: file_type,
mutable tok: token::token,
mutable tok_span: span,
mutable last_tok_span: span,
mutable buffer: [{tok: token::token, span: span}],
2011-07-27 12:19:39 +00:00
mutable restr: restriction,
rdr: lexer::reader,
precs: @[op_spec],
2011-07-27 12:19:39 +00:00
bad_words: hashmap[str, ()]) {
fn peek() -> token::token { ret tok; }
fn bump() {
2011-07-25 15:02:49 +00:00
last_tok_span = tok_span;
if ivec::len(buffer) == 0u {
2011-07-27 12:19:39 +00:00
let next = lexer::next_token(rdr);
tok = next.tok;
2011-07-27 12:19:39 +00:00
tok_span = {lo: next.chpos, hi: rdr.get_chpos()};
2011-07-25 15:02:49 +00:00
} else {
2011-07-27 12:19:39 +00:00
let next = ivec::pop(buffer);
tok = next.tok;
tok_span = next.span;
2011-07-25 15:02:49 +00:00
}
}
2011-07-27 12:19:39 +00:00
fn look_ahead(distance: uint) -> token::token {
2011-07-25 15:02:49 +00:00
while ivec::len(buffer) < distance {
2011-07-27 12:19:39 +00:00
let next = lexer::next_token(rdr);
let sp = {lo: next.chpos, hi: rdr.get_chpos()};
buffer = ~[{tok: next.tok, span: sp}] + buffer;
2011-07-25 15:02:49 +00:00
}
2011-07-27 12:19:39 +00:00
ret buffer.(distance - 1u).tok;
}
2011-07-27 12:19:39 +00:00
fn fatal(m: str) -> ! {
codemap::emit_error(some(self.get_span()), m, sess.cm);
fail;
}
2011-07-27 12:19:39 +00:00
fn warn(m: str) {
codemap::emit_warning(some(self.get_span()), m, sess.cm);
}
2011-07-27 12:19:39 +00:00
fn restrict(r: restriction) { restr = r; }
fn get_restriction() -> restriction { ret restr; }
2011-07-25 15:02:49 +00:00
fn get_span() -> span { ret tok_span; }
fn get_lo_pos() -> uint { ret tok_span.lo; }
fn get_hi_pos() -> uint { ret tok_span.hi; }
fn get_last_lo_pos() -> uint { ret last_tok_span.lo; }
fn get_last_hi_pos() -> uint { ret last_tok_span.hi; }
fn get_file_type() -> file_type { ret ftype; }
fn get_cfg() -> ast::crate_cfg { ret cfg; }
fn get_prec_table() -> @[op_spec] { ret precs; }
2011-07-27 12:19:39 +00:00
fn get_str(i: token::str_num) -> str {
ret interner::get(*rdr.get_interner(), i);
}
fn get_reader() -> lexer::reader { ret rdr; }
fn get_filemap() -> codemap::filemap { ret rdr.get_filemap(); }
fn get_bad_expr_words() -> hashmap[str, ()] { ret bad_words; }
fn get_chpos() -> uint { ret rdr.get_chpos(); }
fn get_byte_pos() -> uint { ret rdr.get_byte_pos(); }
fn get_id() -> node_id { ret next_node_id(sess); }
fn get_sess() -> parse_sess { ret sess; }
}
2011-07-27 12:19:39 +00:00
let tok0 = lexer::next_token(rdr);
let span0 = {lo: tok0.chpos, hi: rdr.get_chpos()};
ret stdio_parser(sess, cfg, ftype, tok0.tok, span0, span0, ~[],
UNRESTRICTED, rdr, prec_table(), bad_expr_word_table());
}
// These are the words that shouldn't be allowed as value identifiers,
// because, if used at the start of a line, they will cause the line to be
// interpreted as a specific kind of statement, which would be confusing.
2011-06-04 00:47:38 +00:00
fn bad_expr_word_table() -> hashmap[str, ()] {
2011-07-27 12:19:39 +00:00
let words = new_str_hash();
words.insert("mod", ());
words.insert("if", ());
words.insert("else", ());
words.insert("while", ());
words.insert("do", ());
words.insert("alt", ());
words.insert("for", ());
words.insert("each", ());
words.insert("break", ());
words.insert("cont", ());
words.insert("put", ());
words.insert("ret", ());
words.insert("be", ());
words.insert("fail", ());
words.insert("type", ());
words.insert("resource", ());
words.insert("check", ());
words.insert("assert", ());
words.insert("claim", ());
words.insert("prove", ());
words.insert("native", ());
words.insert("fn", ());
words.insert("block", ());
words.insert("lambda", ());
words.insert("pred", ());
words.insert("iter", ());
words.insert("block", ());
words.insert("import", ());
words.insert("export", ());
words.insert("let", ());
words.insert("const", ());
words.insert("log", ());
words.insert("log_err", ());
words.insert("tag", ());
words.insert("obj", ());
ret words;
}
2011-07-27 12:19:39 +00:00
fn unexpected(p: &parser, t: token::token) -> ! {
let s: str = "unexpected token: ";
s += token::to_str(p.get_reader(), t);
p.fatal(s);
2010-12-10 01:11:52 +00:00
}
2011-07-27 12:19:39 +00:00
fn expect(p: &parser, t: token::token) {
if p.peek() == t {
p.bump();
} else {
2011-07-27 12:19:39 +00:00
let s: str = "expecting ";
s += token::to_str(p.get_reader(), t);
s += ", found ";
s += token::to_str(p.get_reader(), p.peek());
p.fatal(s);
}
}
2011-07-27 12:19:39 +00:00
fn spanned[T](lo: uint, hi: uint, node: &T) -> spanned[T] {
ret {node: node, span: {lo: lo, hi: hi}};
}
2011-07-27 12:19:39 +00:00
fn parse_ident(p: &parser) -> ast::ident {
alt p.peek() {
token::IDENT(i, _) { p.bump(); ret p.get_str(i); }
2011-07-29 18:51:18 +00:00
_ { p.fatal("expecting ident"); }
}
}
2011-07-27 12:19:39 +00:00
fn parse_value_ident(p: &parser) -> ast::ident {
check_bad_word(p);
ret parse_ident(p);
}
2011-07-27 12:19:39 +00:00
fn eat(p: &parser, tok: &token::token) -> bool {
ret if p.peek() == tok { p.bump(); true } else { false };
}
2011-07-27 12:19:39 +00:00
fn is_word(p: &parser, word: &str) -> bool {
ret alt p.peek() {
token::IDENT(sid, false) { str::eq(word, p.get_str(sid)) }
_ { false }
};
}
2011-07-27 12:19:39 +00:00
fn eat_word(p: &parser, word: &str) -> bool {
alt p.peek() {
token::IDENT(sid, false) {
if str::eq(word, p.get_str(sid)) {
p.bump();
ret true;
} else { ret false; }
}
_ { ret false; }
}
}
2011-07-27 12:19:39 +00:00
fn expect_word(p: &parser, word: &str) {
if !eat_word(p, word) {
p.fatal("expecting " + word + ", found " +
2011-07-27 12:19:39 +00:00
token::to_str(p.get_reader(), p.peek()));
}
}
2011-07-27 12:19:39 +00:00
fn check_bad_word(p: &parser) {
alt p.peek() {
token::IDENT(sid, false) {
let w = p.get_str(sid);
if p.get_bad_expr_words().contains_key(w) {
p.fatal("found " + w + " in expression position");
}
2011-07-27 12:19:39 +00:00
}
_ { }
}
}
2011-07-27 12:19:39 +00:00
fn parse_ty_fn(proto: ast::proto, p: &parser, lo: uint) -> ast::ty_ {
fn parse_fn_input_ty(p: &parser) -> ast::ty_arg {
let lo = p.get_lo_pos();
// Ignore arg name, if present
if is_plain_ident(p) && p.look_ahead(1u) == token::COLON {
2011-07-27 12:19:39 +00:00
p.bump();
p.bump();
}
2011-07-27 12:19:39 +00:00
let mode = ast::val;
if p.peek() == token::BINOP(token::AND) {
2010-11-05 22:23:03 +00:00
p.bump();
mode = ast::alias(eat_word(p, "mutable"));
2010-11-05 22:23:03 +00:00
}
2011-07-27 12:19:39 +00:00
let t = parse_ty(p);
ret spanned(lo, t.span.hi, {mode: mode, ty: t});
2010-11-05 22:23:03 +00:00
}
2011-07-27 12:19:39 +00:00
let lo = p.get_lo_pos();
let inputs =
parse_seq(token::LPAREN, token::RPAREN, some(token::COMMA),
2011-07-27 12:19:39 +00:00
parse_fn_input_ty, p);
// FIXME: there's no syntax for this right now anyway
// auto constrs = parse_constrs(~[], p);
let constrs: [@ast::constr] = ~[];
2011-07-27 12:19:39 +00:00
let output: @ast::ty;
let cf = ast::return;
if p.peek() == token::RARROW {
2010-11-05 22:23:03 +00:00
p.bump();
2011-07-27 12:19:39 +00:00
let tmp = parse_ty_or_bang(p);
alt tmp {
a_ty(t) { output = t; }
a_bang. {
output = @spanned(lo, inputs.span.hi, ast::ty_bot);
cf = ast::noreturn;
}
}
} else { output = @spanned(lo, inputs.span.hi, ast::ty_nil); }
ret ast::ty_fn(proto, inputs.node, output, cf, constrs);
}
2011-07-27 12:19:39 +00:00
fn parse_proto(p: &parser) -> ast::proto {
if eat_word(p, "iter") {
ret ast::proto_iter;
} else if (eat_word(p, "fn")) {
ret ast::proto_fn;
} else if (eat_word(p, "block")) {
ret ast::proto_block;
} else if (eat_word(p, "pred")) {
ret ast::proto_fn;
} else { unexpected(p, p.peek()); }
2010-11-05 22:23:03 +00:00
}
2011-07-27 12:19:39 +00:00
fn parse_ty_obj(p: &parser, hi: &mutable uint) -> ast::ty_ {
fn parse_method_sig(p: &parser) -> ast::ty_method {
let flo = p.get_lo_pos();
let proto: ast::proto = parse_proto(p);
let ident = parse_value_ident(p);
let f = parse_ty_fn(proto, p, flo);
expect(p, token::SEMI);
2011-07-27 12:19:39 +00:00
alt f {
ast::ty_fn(proto, inputs, output, cf, constrs) {
ret spanned(flo, output.span.hi,
{proto: proto,
ident: ident,
inputs: inputs,
output: output,
cf: cf,
constrs: constrs});
}
2010-12-15 01:42:12 +00:00
}
}
2011-07-27 12:19:39 +00:00
let meths =
parse_seq(token::LBRACE, token::RBRACE, none, parse_method_sig, p);
hi = meths.span.hi;
ret ast::ty_obj(meths.node);
2010-12-15 01:42:12 +00:00
}
2011-07-27 12:19:39 +00:00
fn parse_mt(p: &parser) -> ast::mt {
let mut = parse_mutability(p);
let t = parse_ty(p);
ret {ty: t, mut: mut};
}
2011-07-27 12:19:39 +00:00
fn parse_ty_field(p: &parser) -> ast::ty_field {
let lo = p.get_lo_pos();
let mut = parse_mutability(p);
let id = parse_ident(p);
expect(p, token::COLON);
2011-07-27 12:19:39 +00:00
let ty = parse_ty(p);
ret spanned(lo, ty.span.hi, {ident: id, mt: {ty: ty, mut: mut}});
}
// if i is the jth ident in args, return j
// otherwise, fail
fn ident_index(p: &parser, args: &[ast::arg], i: &ast::ident) -> uint {
2011-07-27 12:19:39 +00:00
let j = 0u;
for a: ast::arg in args { if a.ident == i { ret j; } j += 1u; }
p.fatal("Unbound variable " + i + " in constraint arg");
}
2011-07-27 12:19:39 +00:00
fn parse_type_constr_arg(p: &parser) -> @ast::ty_constr_arg {
let sp = p.get_span();
let carg = ast::carg_base;
expect(p, token::BINOP(token::STAR));
2011-07-27 12:19:39 +00:00
if p.peek() == token::DOT {
// "*..." notation for record fields
p.bump();
2011-07-27 12:19:39 +00:00
let pth: ast::path = parse_path(p);
carg = ast::carg_ident(pth);
}
// No literals yet, I guess?
2011-07-27 12:19:39 +00:00
ret @{node: carg, span: sp};
}
fn parse_constr_arg(args: &[ast::arg], p: &parser) -> @ast::constr_arg {
2011-07-27 12:19:39 +00:00
let sp = p.get_span();
let carg = ast::carg_base;
if p.peek() == token::BINOP(token::STAR) {
p.bump();
} else {
2011-07-27 12:19:39 +00:00
let i: ast::ident = parse_value_ident(p);
carg = ast::carg_ident(ident_index(p, args, i));
}
2011-07-27 12:19:39 +00:00
ret @{node: carg, span: sp};
}
fn parse_ty_constr(fn_args: &[ast::arg], p: &parser) -> @ast::constr {
2011-07-27 12:19:39 +00:00
let lo = p.get_lo_pos();
let path = parse_path(p);
let pf = bind parse_constr_arg(fn_args, _);
let args: {node: [@ast::constr_arg], span: span} =
2011-07-27 12:19:39 +00:00
parse_seq(token::LPAREN, token::RPAREN, some(token::COMMA), pf, p);
ret @spanned(lo, args.span.hi,
2011-07-27 12:19:39 +00:00
{path: path, args: args.node, id: p.get_id()});
}
2011-07-27 12:19:39 +00:00
fn parse_constr_in_type(p: &parser) -> @ast::ty_constr {
let lo = p.get_lo_pos();
let path = parse_path(p);
let args: [@ast::ty_constr_arg] =
2011-07-27 12:19:39 +00:00
parse_seq(token::LPAREN, token::RPAREN, some(token::COMMA),
parse_type_constr_arg, p).node;
let hi = p.get_lo_pos();
let tc: ast::ty_constr_ = {path: path, args: args, id: p.get_id()};
ret @spanned(lo, hi, tc);
}
2011-07-27 12:19:39 +00:00
fn parse_constrs[T](pser: fn(&parser) -> @ast::constr_general[T] , p: &parser)
-> [@ast::constr_general[T]] {
let constrs: [@ast::constr_general[T]] = ~[];
2011-07-27 12:19:39 +00:00
while true {
let constr = pser(p);
constrs += ~[constr];
2011-07-27 12:19:39 +00:00
if p.peek() == token::COMMA { p.bump(); } else { break; }
}
constrs
}
fn parse_type_constraints(p: &parser) -> [@ast::ty_constr] {
ret parse_constrs(parse_constr_in_type, p);
}
2011-07-27 12:19:39 +00:00
fn parse_ty_postfix(orig_t: ast::ty_, p: &parser) -> @ast::ty {
let lo = p.get_lo_pos();
if p.peek() == token::LBRACKET {
// This is explicit type parameter instantiation.
p.bump();
let seq =
parse_seq_to_end(token::RBRACKET, some(token::COMMA),
parse_ty, p);
alt orig_t {
ast::ty_path(pth, ann) {
let hi = p.get_hi_pos();
ret @spanned(lo, hi,
ast::ty_path(spanned(lo, hi,
{global: pth.node.global,
idents: pth.node.idents,
types: seq}), ann));
}
_ {
p.fatal("type parameter instantiation only allowed for paths");
}
}
}
ret @spanned(lo, p.get_lo_pos(), orig_t);
}
2011-07-27 12:19:39 +00:00
fn parse_ty_or_bang(p: &parser) -> ty_or_bang {
alt p.peek() {
token::NOT. { p.bump(); ret a_bang; }
_ { ret a_ty(parse_ty(p)); }
}
}
2011-07-27 12:19:39 +00:00
fn parse_ty(p: &parser) -> @ast::ty {
let lo = p.get_lo_pos();
let hi = lo;
let t: ast::ty_;
// FIXME: do something with this
2011-07-27 12:19:39 +00:00
if eat_word(p, "bool") {
t = ast::ty_bool;
} else if (eat_word(p, "int")) {
t = ast::ty_int;
} else if (eat_word(p, "uint")) {
t = ast::ty_uint;
} else if (eat_word(p, "float")) {
t = ast::ty_float;
} else if (eat_word(p, "str")) {
t = ast::ty_str;
} else if (eat_word(p, "istr")) {
t = ast::ty_istr;
} else if (eat_word(p, "char")) {
t = ast::ty_char;
} else if (eat_word(p, "task")) {
t = ast::ty_task;
} else if (eat_word(p, "i8")) {
t = ast::ty_machine(ast::ty_i8);
} else if (eat_word(p, "i16")) {
t = ast::ty_machine(ast::ty_i16);
} else if (eat_word(p, "i32")) {
t = ast::ty_machine(ast::ty_i32);
} else if (eat_word(p, "i64")) {
t = ast::ty_machine(ast::ty_i64);
} else if (eat_word(p, "u8")) {
t = ast::ty_machine(ast::ty_u8);
} else if (eat_word(p, "u16")) {
t = ast::ty_machine(ast::ty_u16);
} else if (eat_word(p, "u32")) {
t = ast::ty_machine(ast::ty_u32);
} else if (eat_word(p, "u64")) {
t = ast::ty_machine(ast::ty_u64);
} else if (eat_word(p, "f32")) {
t = ast::ty_machine(ast::ty_f32);
} else if (eat_word(p, "f64")) {
t = ast::ty_machine(ast::ty_f64);
} else if (p.peek() == token::LPAREN) {
p.bump();
2011-07-27 12:19:39 +00:00
alt p.peek() {
token::RPAREN. { hi = p.get_hi_pos(); p.bump(); t = ast::ty_nil; }
_ {
t = parse_ty(p).node;
hi = p.get_hi_pos();
expect(p, token::RPAREN);
}
}
} else if (p.peek() == token::AT) {
p.bump();
2011-07-27 12:19:39 +00:00
let mt = parse_mt(p);
hi = mt.ty.span.hi;
t = ast::ty_box(mt);
} else if (p.peek() == token::BINOP(token::STAR)) {
p.bump();
2011-07-27 12:19:39 +00:00
let mt = parse_mt(p);
hi = mt.ty.span.hi;
t = ast::ty_ptr(mt);
} else if (p.peek() == token::LBRACE) {
2011-07-27 12:19:39 +00:00
let elems =
parse_seq(token::LBRACE, token::RBRACE, some(token::COMMA),
parse_ty_field, p);
hi = elems.span.hi;
t = ast::ty_rec(elems.node);
2011-07-27 12:19:39 +00:00
if p.peek() == token::COLON {
p.bump();
2011-07-27 12:19:39 +00:00
t =
ast::ty_constr(@spanned(lo, hi, t),
parse_type_constraints(p));
}
} else if (eat_word(p, "vec")) {
expect(p, token::LBRACKET);
t = ast::ty_vec(parse_mt(p));
hi = p.get_hi_pos();
expect(p, token::RBRACKET);
} else if (p.peek() == token::LBRACKET) {
expect(p, token::LBRACKET);
t = ast::ty_ivec(parse_mt(p));
hi = p.get_hi_pos();
expect(p, token::RBRACKET);
} else if (eat_word(p, "fn")) {
2011-07-27 12:19:39 +00:00
let flo = p.get_last_lo_pos();
t = parse_ty_fn(ast::proto_fn, p, flo);
2011-07-27 12:19:39 +00:00
alt t { ast::ty_fn(_, _, out, _, _) { hi = out.span.hi; } }
} else if (eat_word(p, "block")) {
2011-07-27 12:19:39 +00:00
let flo = p.get_last_lo_pos();
t = parse_ty_fn(ast::proto_block, p, flo);
2011-07-27 12:19:39 +00:00
alt t { ast::ty_fn(_, _, out, _, _) { hi = out.span.hi; } }
} else if (eat_word(p, "iter")) {
2011-07-27 12:19:39 +00:00
let flo = p.get_last_lo_pos();
t = parse_ty_fn(ast::proto_iter, p, flo);
2011-07-27 12:19:39 +00:00
alt t { ast::ty_fn(_, _, out, _, _) { hi = out.span.hi; } }
} else if (eat_word(p, "obj")) {
t = parse_ty_obj(p, hi);
} else if (eat_word(p, "port")) {
expect(p, token::LBRACKET);
t = ast::ty_port(parse_ty(p));
hi = p.get_hi_pos();
expect(p, token::RBRACKET);
} else if (eat_word(p, "chan")) {
expect(p, token::LBRACKET);
t = ast::ty_chan(parse_ty(p));
hi = p.get_hi_pos();
expect(p, token::RBRACKET);
} else if (eat_word(p, "mutable")) {
p.warn("ignoring deprecated 'mutable' type constructor");
2011-07-27 12:19:39 +00:00
let typ = parse_ty(p);
t = typ.node;
hi = typ.span.hi;
} else if (p.peek() == token::MOD_SEP || is_ident(p.peek())) {
2011-07-27 12:19:39 +00:00
let path = parse_path(p);
t = ast::ty_path(path, p.get_id());
hi = path.span.hi;
2011-07-29 18:51:18 +00:00
} else { p.fatal("expecting type"); }
ret parse_ty_postfix(t, p);
}
2011-07-27 12:19:39 +00:00
fn parse_arg(p: &parser) -> ast::arg {
let m: ast::mode = ast::val;
let i: ast::ident = parse_value_ident(p);
expect(p, token::COLON);
if eat(p, token::BINOP(token::AND)) {
m = ast::alias(eat_word(p, "mutable"));
} else if eat(p, token::BINOP(token::MINUS)) {
m = ast::move;
}
2011-07-27 12:19:39 +00:00
let t: @ast::ty = parse_ty(p);
ret {mode: m, ty: t, ident: i, id: p.get_id()};
}
2011-07-27 12:19:39 +00:00
fn parse_seq_to_end[T](ket: token::token, sep: option::t[token::token],
f: fn(&parser) -> T , p: &parser) -> [T] {
2011-07-27 12:19:39 +00:00
let val = parse_seq_to_before_end(ket, sep, f, p);
p.bump();
ret val;
}
2011-07-27 12:19:39 +00:00
fn parse_seq_to_before_end[T](ket: token::token, sep: option::t[token::token],
f: fn(&parser) -> T , p: &parser) -> [T] {
2011-07-27 12:19:39 +00:00
let first: bool = true;
let v: [T] = ~[];
2011-07-27 12:19:39 +00:00
while p.peek() != ket {
alt sep {
some(t) { if first { first = false; } else { expect(p, t); } }
_ { }
}
v += ~[f(p)];
}
ret v;
}
2011-07-27 12:19:39 +00:00
fn parse_seq[T](bra: token::token, ket: token::token,
sep: option::t[token::token], f: fn(&parser) -> T ,
p: &parser) -> spanned[[T]] {
2011-07-27 12:19:39 +00:00
let lo = p.get_lo_pos();
expect(p, bra);
2011-07-27 12:19:39 +00:00
let result = parse_seq_to_before_end[T](ket, sep, f, p);
let hi = p.get_hi_pos();
p.bump();
ret spanned(lo, hi, result);
}
2011-07-27 12:19:39 +00:00
fn parse_lit(p: &parser) -> ast::lit {
let sp = p.get_span();
let lit: ast::lit_ = ast::lit_nil;
if eat_word(p, "true") {
lit = ast::lit_bool(true);
} else if (eat_word(p, "false")) {
lit = ast::lit_bool(false);
} else {
2011-07-27 12:19:39 +00:00
alt p.peek() {
token::LIT_INT(i) { p.bump(); lit = ast::lit_int(i); }
token::LIT_UINT(u) { p.bump(); lit = ast::lit_uint(u); }
token::LIT_FLOAT(s) {
p.bump();
lit = ast::lit_float(p.get_str(s));
}
token::LIT_MACH_INT(tm, i) {
p.bump();
lit = ast::lit_mach_int(tm, i);
}
token::LIT_MACH_FLOAT(tm, s) {
p.bump();
lit = ast::lit_mach_float(tm, p.get_str(s));
}
token::LIT_CHAR(c) { p.bump(); lit = ast::lit_char(c); }
token::LIT_STR(s) {
p.bump();
lit = ast::lit_str(p.get_str(s), ast::sk_rc);
}
token::LPAREN. {
p.bump();
expect(p, token::RPAREN);
lit = ast::lit_nil;
}
t { unexpected(p, t); }
}
}
2011-07-27 12:19:39 +00:00
ret {node: lit, span: sp};
}
2011-07-27 12:19:39 +00:00
fn is_ident(t: token::token) -> bool {
alt t { token::IDENT(_, _) { ret true; } _ { } }
ret false;
}
2011-07-27 12:19:39 +00:00
fn is_plain_ident(p: &parser) -> bool {
ret alt p.peek() { token::IDENT(_, false) { true } _ { false } };
}
2011-07-27 12:19:39 +00:00
fn parse_path(p: &parser) -> ast::path {
let lo = p.get_lo_pos();
let hi = lo;
2011-07-27 12:19:39 +00:00
let global;
if p.peek() == token::MOD_SEP {
global = true;
p.bump();
} else { global = false; }
let ids: [ast::ident] = ~[];
2011-07-27 12:19:39 +00:00
while true {
alt p.peek() {
token::IDENT(i, _) {
hi = p.get_hi_pos();
ids += ~[p.get_str(i)];
hi = p.get_hi_pos();
p.bump();
if p.peek() == token::MOD_SEP { p.bump(); } else { break; }
}
_ { break; }
}
}
2011-07-27 12:19:39 +00:00
ret spanned(lo, hi, {global: global, idents: ids, types: ~[]});
}
fn parse_path_and_ty_param_substs(p: &parser) -> ast::path {
let lo = p.get_lo_pos();
let path = parse_path(p);
if p.peek() == token::LBRACKET {
let seq =
parse_seq(token::LBRACKET, token::RBRACKET, some(token::COMMA),
parse_ty, p);
let hi = seq.span.hi;
path =
spanned(lo, hi,
{global: path.node.global,
idents: path.node.idents,
types: seq.node});
}
ret path;
}
2011-07-27 12:19:39 +00:00
fn parse_mutability(p: &parser) -> ast::mutability {
if eat_word(p, "mutable") {
if p.peek() == token::QUES { p.bump(); ret ast::maybe_mut; }
ret ast::mut;
}
ret ast::imm;
}
2011-07-27 12:19:39 +00:00
fn parse_field(p: &parser, sep: &token::token) -> ast::field {
let lo = p.get_lo_pos();
let m = parse_mutability(p);
let i = parse_ident(p);
expect(p, sep);
2011-07-27 12:19:39 +00:00
let e = parse_expr(p);
ret spanned(lo, e.span.hi, {mut: m, ident: i, expr: e});
2010-12-14 23:32:13 +00:00
}
2011-07-27 12:19:39 +00:00
fn mk_expr(p: &parser, lo: uint, hi: uint, node: &ast::expr_) -> @ast::expr {
ret @{id: p.get_id(), node: node, span: {lo: lo, hi: hi}};
}
2011-07-27 12:19:39 +00:00
fn mk_mac_expr(p: &parser, lo: uint, hi: uint, m: &ast::mac_) -> @ast::expr {
ret @{id: p.get_id(),
node: ast::expr_mac({node: m, span: {lo: lo, hi: hi}}),
span: {lo: lo, hi: hi}};
}
2011-07-27 12:19:39 +00:00
fn parse_bottom_expr(p: &parser) -> @ast::expr {
let lo = p.get_lo_pos();
let hi = p.get_hi_pos();
let ex: ast::expr_;
2011-07-27 12:19:39 +00:00
if p.peek() == token::LPAREN {
p.bump();
2011-07-27 12:19:39 +00:00
alt p.peek() {
token::RPAREN. {
hi = p.get_hi_pos();
p.bump();
let lit = @spanned(lo, hi, ast::lit_nil);
ret mk_expr(p, lo, hi, ast::expr_lit(lit));
}
_ {/* fall through */ }
}
2011-07-27 12:19:39 +00:00
let e = parse_expr(p);
hi = p.get_hi_pos();
expect(p, token::RPAREN);
ret mk_expr(p, lo, hi, e.node);
} else if (p.peek() == token::LBRACE) {
p.bump();
2011-07-27 12:19:39 +00:00
if is_word(p, "mutable") ||
is_plain_ident(p) && p.look_ahead(1u) == token::COLON {
let fields = ~[parse_field(p, token::COLON)];
let base = none;
while p.peek() != token::RBRACE {
2011-07-27 12:19:39 +00:00
if eat_word(p, "with") { base = some(parse_expr(p)); break; }
expect(p, token::COMMA);
fields += ~[parse_field(p, token::COLON)];
}
hi = p.get_hi_pos();
expect(p, token::RBRACE);
ex = ast::expr_rec(fields, base);
} else {
2011-07-27 12:19:39 +00:00
let blk = parse_block_tail(p, lo);
ret mk_expr(p, blk.span.lo, blk.span.hi, ast::expr_block(blk));
}
} else if (eat_word(p, "if")) {
ret parse_if_expr(p);
} else if (eat_word(p, "for")) {
ret parse_for_expr(p);
} else if (eat_word(p, "while")) {
ret parse_while_expr(p);
} else if (eat_word(p, "do")) {
ret parse_do_while_expr(p);
} else if (eat_word(p, "alt")) {
ret parse_alt_expr(p);
} else if (eat_word(p, "spawn")) {
ret parse_spawn_expr(p);
} else if (eat_word(p, "fn")) {
ret parse_fn_expr(p, ast::proto_fn);
} else if (eat_word(p, "block")) {
ret parse_fn_expr(p, ast::proto_block);
} else if (eat_word(p, "lambda")) {
ret parse_fn_expr(p, ast::proto_closure);
2011-05-16 22:25:10 +00:00
} else if (p.peek() == token::LBRACKET) {
p.bump();
2011-07-27 12:19:39 +00:00
let mut = parse_mutability(p);
let es =
parse_seq_to_end(token::RBRACKET, some(token::COMMA), parse_expr,
p);
ex = ast::expr_vec(es, mut, ast::sk_unique);
} else if (p.peek() == token::POUND_LT) {
p.bump();
2011-07-27 12:19:39 +00:00
let ty = parse_ty(p);
expect(p, token::GT);
2011-07-27 12:19:39 +00:00
/* hack: early return to take advantage of specialized function */
ret mk_mac_expr(p, lo, p.get_hi_pos(), ast::mac_embed_type(ty))
} else if (p.peek() == token::POUND_LBRACE) {
p.bump();
2011-07-27 12:19:39 +00:00
let blk = ast::mac_embed_block(parse_block_tail(p, lo));
ret mk_mac_expr(p, lo, p.get_hi_pos(), blk);
2011-07-21 23:47:47 +00:00
} else if (p.peek() == token::ELLIPSIS) {
p.bump();
ret mk_mac_expr(p, lo, p.get_hi_pos(), ast::mac_ellipsis)
} else if (p.peek() == token::TILDE) {
p.bump();
2011-07-27 12:19:39 +00:00
alt p.peek() {
token::LBRACKET. { // unique array (temporary)
p.bump();
let mut = parse_mutability(p);
let es =
parse_seq_to_end(token::RBRACKET, some(token::COMMA),
parse_expr, p);
ex = ast::expr_vec(es, mut, ast::sk_unique);
}
token::LIT_STR(s) {
p.bump();
let lit =
@{node: ast::lit_str(p.get_str(s), ast::sk_unique),
span: p.get_span()};
ex = ast::expr_lit(lit);
}
_ { p.fatal("unimplemented: unique pointer creation"); }
}
2011-05-13 18:00:26 +00:00
} else if (eat_word(p, "obj")) {
// Anonymous object
// Only make people type () if they're actually adding new fields
let fields: option::t[[ast::anon_obj_field]] = none;
2011-07-27 12:19:39 +00:00
if p.peek() == token::LPAREN {
p.bump();
fields =
some(parse_seq_to_end(token::RPAREN, some(token::COMMA),
2011-07-27 12:19:39 +00:00
parse_anon_obj_field, p));
}
let meths: [@ast::method] = ~[];
let inner_obj: option::t[@ast::expr] = none;
expect(p, token::LBRACE);
2011-07-27 12:19:39 +00:00
while p.peek() != token::RBRACE {
if eat_word(p, "with") {
inner_obj = some(parse_expr(p));
2011-07-27 12:19:39 +00:00
} else { meths += ~[parse_method(p)]; }
}
hi = p.get_hi_pos();
expect(p, token::RBRACE);
2011-05-13 18:00:26 +00:00
// fields and methods may be *additional* or *overriding* fields
// and methods if there's a inner_obj, or they may be the *only*
// fields and methods if there's no inner_obj.
// We don't need to pull ".node" out of fields because it's not a
// "spanned".
let ob = {fields: fields, methods: meths, inner_obj: inner_obj};
ex = ast::expr_anon_obj(ob);
} else if (eat_word(p, "bind")) {
2011-07-27 12:19:39 +00:00
let e = parse_expr_res(p, RESTRICT_NO_CALL_EXPRS);
fn parse_expr_opt(p: &parser) -> option::t[@ast::expr] {
alt p.peek() {
token::UNDERSCORE. { p.bump(); ret none; }
_ { ret some(parse_expr(p)); }
}
}
2011-07-27 12:19:39 +00:00
let es =
parse_seq(token::LPAREN, token::RPAREN, some(token::COMMA),
parse_expr_opt, p);
hi = es.span.hi;
ex = ast::expr_bind(e, es.node);
} else if (p.peek() == token::POUND) {
2011-07-27 12:19:39 +00:00
let ex_ext = parse_syntax_ext(p);
hi = ex_ext.span.hi;
ex = ex_ext.node;
} else if (eat_word(p, "fail")) {
2011-07-27 12:19:39 +00:00
if can_begin_expr(p.peek()) {
let e = parse_expr(p);
hi = e.span.hi;
ex = ast::expr_fail(some(e));
2011-07-27 12:19:39 +00:00
} else { ex = ast::expr_fail(none); }
} else if (eat_word(p, "log")) {
2011-07-27 12:19:39 +00:00
let e = parse_expr(p);
ex = ast::expr_log(1, e);
hi = e.span.hi;
} else if (eat_word(p, "log_err")) {
2011-07-27 12:19:39 +00:00
let e = parse_expr(p);
ex = ast::expr_log(0, e);
hi = e.span.hi;
} else if (eat_word(p, "assert")) {
2011-07-27 12:19:39 +00:00
let e = parse_expr(p);
ex = ast::expr_assert(e);
hi = e.span.hi;
} else if (eat_word(p, "check")) {
/* Should be a predicate (pure boolean function) applied to
arguments that are all either slot variables or literals.
but the typechecker enforces that. */
2011-07-27 12:19:39 +00:00
let e = parse_expr(p);
hi = e.span.hi;
ex = ast::expr_check(ast::checked, e);
} else if (eat_word(p, "claim")) {
/* Same rules as check, except that if check-claims
is enabled (a command-line flag), then the parser turns
claims into check */
2011-07-27 12:19:39 +00:00
let e = parse_expr(p);
hi = e.span.hi;
ex = ast::expr_check(ast::unchecked, e);
} else if (eat_word(p, "ret")) {
2011-07-27 12:19:39 +00:00
if can_begin_expr(p.peek()) {
let e = parse_expr(p);
hi = e.span.hi;
ex = ast::expr_ret(some(e));
2011-07-27 12:19:39 +00:00
} else { ex = ast::expr_ret(none); }
} else if (eat_word(p, "break")) {
ex = ast::expr_break;
hi = p.get_hi_pos();
} else if (eat_word(p, "cont")) {
ex = ast::expr_cont;
hi = p.get_hi_pos();
} else if (eat_word(p, "put")) {
2011-07-27 12:19:39 +00:00
alt p.peek() {
token::SEMI. { ex = ast::expr_put(none); }
_ {
let e = parse_expr(p);
hi = e.span.hi;
ex = ast::expr_put(some(e));
}
}
} else if (eat_word(p, "be")) {
2011-07-27 12:19:39 +00:00
let e = parse_expr(p);
// FIXME: Is this the right place for this check?
2011-07-27 12:19:39 +00:00
if /*check*/ast::is_call_expr(e) {
hi = e.span.hi;
ex = ast::expr_be(e);
} else { p.fatal("Non-call expression in tail call"); }
} else if (eat_word(p, "port")) {
let ty = @spanned(lo, hi, ast::ty_infer);
2011-07-27 12:19:39 +00:00
if token::LBRACKET == p.peek() {
expect(p, token::LBRACKET);
ty = parse_ty(p);
expect(p, token::RBRACKET);
}
expect(p, token::LPAREN);
expect(p, token::RPAREN);
hi = p.get_hi_pos();
ex = ast::expr_port(ty);
} else if (eat_word(p, "chan")) {
expect(p, token::LPAREN);
2011-07-27 12:19:39 +00:00
let e = parse_expr(p);
hi = e.span.hi;
expect(p, token::RPAREN);
ex = ast::expr_chan(e);
} else if (eat_word(p, "self")) {
log "parsing a self-call...";
expect(p, token::DOT);
// The rest is a call expression.
2011-07-27 12:19:39 +00:00
let f: @ast::expr = parse_self_method(p);
let es =
parse_seq(token::LPAREN, token::RPAREN, some(token::COMMA),
parse_expr, p);
hi = es.span.hi;
ex = ast::expr_call(f, es.node);
} else if (p.peek() == token::MOD_SEP ||
2011-07-27 12:19:39 +00:00
is_ident(p.peek()) && !is_word(p, "true") &&
!is_word(p, "false")) {
check_bad_word(p);
2011-07-27 12:19:39 +00:00
let pth = parse_path_and_ty_param_substs(p);
hi = pth.span.hi;
ex = ast::expr_path(pth);
} else {
2011-07-27 12:19:39 +00:00
let lit = parse_lit(p);
hi = lit.span.hi;
ex = ast::expr_lit(@lit);
}
ret mk_expr(p, lo, hi, ex);
}
2011-07-27 12:19:39 +00:00
fn parse_syntax_ext(p: &parser) -> @ast::expr {
let lo = p.get_lo_pos();
expect(p, token::POUND);
ret parse_syntax_ext_naked(p, lo);
}
2011-07-27 12:19:39 +00:00
fn parse_syntax_ext_naked(p: &parser, lo: uint) -> @ast::expr {
let pth = parse_path(p);
if ivec::len(pth.node.idents) == 0u {
p.fatal("expected a syntax expander name");
}
//temporary for a backwards-compatible cycle:
let es = if p.peek() == token::LPAREN {
2011-07-27 12:19:39 +00:00
parse_seq(token::LPAREN, token::RPAREN, some(token::COMMA),
parse_expr, p)
} else {
parse_seq(token::LBRACKET, token::RBRACKET, some(token::COMMA),
parse_expr, p)
};
2011-07-27 12:19:39 +00:00
let hi = es.span.hi;
let e = mk_expr(p, es.span.lo, hi,
ast::expr_vec(es.node, ast::imm, ast::sk_rc));
ret mk_mac_expr(p, lo, hi, ast::mac_invoc(pth, e, none));
2011-02-24 04:48:01 +00:00
}
2011-07-27 12:19:39 +00:00
fn parse_self_method(p: &parser) -> @ast::expr {
let sp = p.get_span();
let f_name: ast::ident = parse_ident(p);
ret mk_expr(p, sp.lo, sp.hi, ast::expr_self_method(f_name));
}
2011-07-27 12:19:39 +00:00
fn parse_dot_or_call_expr(p: &parser) -> @ast::expr {
ret parse_dot_or_call_expr_with(p, parse_bottom_expr(p));
}
2011-07-27 12:19:39 +00:00
fn parse_dot_or_call_expr_with(p: &parser, e: @ast::expr) -> @ast::expr {
let lo = e.span.lo;
let hi = e.span.hi;
while true {
alt p.peek() {
token::LPAREN. {
if p.get_restriction() == RESTRICT_NO_CALL_EXPRS {
ret e;
} else {
// Call expr.
let es =
parse_seq(token::LPAREN, token::RPAREN,
some(token::COMMA), parse_expr, p);
hi = es.span.hi;
e = mk_expr(p, lo, hi, ast::expr_call(e, es.node));
}
2011-07-27 12:19:39 +00:00
}
token::DOT. {
p.bump();
alt p.peek() {
token::IDENT(i, _) {
hi = p.get_hi_pos();
2010-09-28 17:30:34 +00:00
p.bump();
2011-07-27 12:19:39 +00:00
e = mk_expr(p, lo, hi, ast::expr_field(e, p.get_str(i)));
}
token::LPAREN. {
p.bump();
let ix = parse_expr(p);
hi = ix.span.hi;
expect(p, token::RPAREN);
e = mk_expr(p, lo, hi, ast::expr_index(e, ix));
}
t { unexpected(p, t); }
2010-09-28 17:30:34 +00:00
}
2011-07-27 12:19:39 +00:00
}
_ { ret e; }
2010-09-28 17:30:34 +00:00
}
}
ret e;
}
2011-07-27 12:19:39 +00:00
fn parse_prefix_expr(p: &parser) -> @ast::expr {
if eat_word(p, "mutable") {
p.warn("ignoring deprecated 'mutable' prefix operator");
}
2011-07-27 12:19:39 +00:00
let lo = p.get_lo_pos();
let hi = p.get_hi_pos();
// FIXME: can only remove this sort of thing when both typestate and
// alt-exhaustive-match checking are co-operating.
2011-07-27 12:19:39 +00:00
let lit = @spanned(lo, lo, ast::lit_nil);
let ex: ast::expr_ = ast::expr_lit(lit);
alt p.peek() {
token::NOT. {
p.bump();
let e = parse_prefix_expr(p);
hi = e.span.hi;
ex = ast::expr_unary(ast::not, e);
}
token::BINOP(b) {
alt b {
token::MINUS. {
p.bump();
2011-07-27 12:19:39 +00:00
let e = parse_prefix_expr(p);
hi = e.span.hi;
2011-07-27 12:19:39 +00:00
ex = ast::expr_unary(ast::neg, e);
}
token::STAR. {
2010-09-28 17:30:34 +00:00
p.bump();
2011-07-27 12:19:39 +00:00
let e = parse_prefix_expr(p);
hi = e.span.hi;
2011-07-27 12:19:39 +00:00
ex = ast::expr_unary(ast::deref, e);
}
_ { ret parse_dot_or_call_expr(p); }
2010-09-28 17:30:34 +00:00
}
2011-07-27 12:19:39 +00:00
}
token::AT. {
p.bump();
let m = parse_mutability(p);
let e = parse_prefix_expr(p);
hi = e.span.hi;
ex = ast::expr_unary(ast::box(m), e);
}
_ { ret parse_dot_or_call_expr(p); }
2010-09-28 17:30:34 +00:00
}
ret mk_expr(p, lo, hi, ex);
2010-09-28 17:30:34 +00:00
}
2011-07-27 12:19:39 +00:00
fn parse_ternary(p: &parser) -> @ast::expr {
let cond_expr = parse_binops(p);
if p.peek() == token::QUES {
p.bump();
2011-07-27 12:19:39 +00:00
let then_expr = parse_expr(p);
expect(p, token::COLON);
2011-07-27 12:19:39 +00:00
let else_expr = parse_expr(p);
ret mk_expr(p, cond_expr.span.lo, else_expr.span.hi,
ast::expr_ternary(cond_expr, then_expr, else_expr));
2011-07-27 12:19:39 +00:00
} else { ret cond_expr; }
}
2011-07-27 12:19:39 +00:00
type op_spec = {tok: token::token, op: ast::binop, prec: int};
// FIXME make this a const, don't store it in parser state
fn prec_table() -> @[op_spec] {
2011-07-27 12:19:39 +00:00
ret @~[{tok: token::BINOP(token::STAR), op: ast::mul, prec: 11},
{tok: token::BINOP(token::SLASH), op: ast::div, prec: 11},
{tok: token::BINOP(token::PERCENT), op: ast::rem, prec: 11},
{tok: token::BINOP(token::PLUS), op: ast::add, prec: 10},
{tok: token::BINOP(token::MINUS), op: ast::sub, prec: 10},
{tok: token::BINOP(token::LSL), op: ast::lsl, prec: 9},
{tok: token::BINOP(token::LSR), op: ast::lsr, prec: 9},
{tok: token::BINOP(token::ASR), op: ast::asr, prec: 9},
{tok: token::BINOP(token::AND), op: ast::bitand, prec: 8},
{tok: token::BINOP(token::CARET), op: ast::bitxor, prec: 6},
{tok: token::BINOP(token::OR), op: ast::bitor, prec: 6},
// 'as' sits between here with 5
2011-07-27 12:19:39 +00:00
{tok: token::LT, op: ast::lt, prec: 4},
{tok: token::LE, op: ast::le, prec: 4},
{tok: token::GE, op: ast::ge, prec: 4},
{tok: token::GT, op: ast::gt, prec: 4},
{tok: token::EQEQ, op: ast::eq, prec: 3},
{tok: token::NE, op: ast::ne, prec: 3},
{tok: token::ANDAND, op: ast::and, prec: 2},
{tok: token::OROR, op: ast::or, prec: 1}];
}
2011-07-27 12:19:39 +00:00
fn parse_binops(p: &parser) -> @ast::expr {
ret parse_more_binops(p, parse_prefix_expr(p), 0);
2010-09-28 17:30:34 +00:00
}
2011-07-27 12:19:39 +00:00
const unop_prec: int = 100;
2011-07-27 12:19:39 +00:00
const as_prec: int = 5;
const ternary_prec: int = 0;
2011-07-27 12:19:39 +00:00
fn parse_more_binops(p: &parser, lhs: @ast::expr, min_prec: int) ->
@ast::expr {
let peeked = p.peek();
for cur: op_spec in *p.get_prec_table() {
if cur.prec > min_prec && cur.tok == peeked {
p.bump();
2011-07-27 12:19:39 +00:00
let rhs = parse_more_binops(p, parse_prefix_expr(p), cur.prec);
let bin =
mk_expr(p, lhs.span.lo, rhs.span.hi,
ast::expr_binary(cur.op, lhs, rhs));
ret parse_more_binops(p, bin, min_prec);
2010-09-28 17:30:34 +00:00
}
}
2011-07-27 12:19:39 +00:00
if as_prec > min_prec && eat_word(p, "as") {
let rhs = parse_ty(p);
let _as =
mk_expr(p, lhs.span.lo, rhs.span.hi, ast::expr_cast(lhs, rhs));
ret parse_more_binops(p, _as, min_prec);
}
ret lhs;
}
2011-07-27 12:19:39 +00:00
fn parse_assign_expr(p: &parser) -> @ast::expr {
let lo = p.get_lo_pos();
let lhs = parse_ternary(p);
alt p.peek() {
token::EQ. {
p.bump();
let rhs = parse_expr(p);
ret mk_expr(p, lo, rhs.span.hi, ast::expr_assign(lhs, rhs));
}
token::BINOPEQ(op) {
p.bump();
let rhs = parse_expr(p);
let aop = ast::add;
alt op {
token::PLUS. { aop = ast::add; }
token::MINUS. { aop = ast::sub; }
token::STAR. { aop = ast::mul; }
token::SLASH. { aop = ast::div; }
token::PERCENT. { aop = ast::rem; }
token::CARET. { aop = ast::bitxor; }
token::AND. { aop = ast::bitand; }
token::OR. { aop = ast::bitor; }
token::LSL. { aop = ast::lsl; }
token::LSR. { aop = ast::lsr; }
token::ASR. { aop = ast::asr; }
}
ret mk_expr(p, lo, rhs.span.hi, ast::expr_assign_op(aop, lhs, rhs));
}
token::LARROW. {
p.bump();
let rhs = parse_expr(p);
ret mk_expr(p, lo, rhs.span.hi, ast::expr_move(lhs, rhs));
}
token::SEND. {
p.bump();
let rhs = parse_expr(p);
ret mk_expr(p, lo, rhs.span.hi, ast::expr_send(lhs, rhs));
}
token::RECV. {
p.bump();
let rhs = parse_expr(p);
ret mk_expr(p, lo, rhs.span.hi, ast::expr_recv(lhs, rhs));
}
token::DARROW. {
p.bump();
let rhs = parse_expr(p);
ret mk_expr(p, lo, rhs.span.hi, ast::expr_swap(lhs, rhs));
}
_ {/* fall through */ }
2010-10-19 23:33:11 +00:00
}
ret lhs;
}
2011-07-27 12:19:39 +00:00
fn parse_if_expr_1(p: &parser) ->
{cond: @ast::expr,
then: ast::blk,
els: option::t[@ast::expr],
lo: uint,
hi: uint} {
let lo = p.get_last_lo_pos();
let cond = parse_expr(p);
let thn = parse_block(p);
let els: option::t[@ast::expr] = none;
let hi = thn.span.hi;
if eat_word(p, "else") {
let elexpr = parse_else_expr(p);
els = some(elexpr);
hi = elexpr.span.hi;
}
2011-07-27 12:19:39 +00:00
ret {cond: cond, then: thn, els: els, lo: lo, hi: hi};
}
2011-07-27 12:19:39 +00:00
fn parse_if_expr(p: &parser) -> @ast::expr {
if eat_word(p, "check") {
let q = parse_if_expr_1(p);
ret mk_expr(p, q.lo, q.hi, ast::expr_if_check(q.cond, q.then, q.els));
} else {
let q = parse_if_expr_1(p);
ret mk_expr(p, q.lo, q.hi, ast::expr_if(q.cond, q.then, q.els));
}
}
2011-07-27 12:19:39 +00:00
fn parse_fn_expr(p: &parser, proto: ast::proto) -> @ast::expr {
let lo = p.get_last_lo_pos();
let decl = parse_fn_decl(p, ast::impure_fn, ast::il_normal);
2011-07-27 12:19:39 +00:00
let body = parse_block(p);
let _fn = {decl: decl, proto: proto, body: body};
ret mk_expr(p, lo, body.span.hi, ast::expr_fn(_fn));
}
2011-07-27 12:19:39 +00:00
fn parse_else_expr(p: &parser) -> @ast::expr {
if eat_word(p, "if") {
ret parse_if_expr(p);
} else {
2011-07-27 12:19:39 +00:00
let blk = parse_block(p);
ret mk_expr(p, blk.span.lo, blk.span.hi, ast::expr_block(blk));
2011-01-30 19:15:22 +00:00
}
}
2011-07-27 12:19:39 +00:00
fn parse_for_expr(p: &parser) -> @ast::expr {
let lo = p.get_last_lo_pos();
let is_each = eat_word(p, "each");
let decl = parse_local(p, false);
expect_word(p, "in");
2011-07-27 12:19:39 +00:00
let seq = parse_expr(p);
let body = parse_block(p);
let hi = body.span.hi;
if is_each {
ret mk_expr(p, lo, hi, ast::expr_for_each(decl, seq, body));
2011-07-27 12:19:39 +00:00
} else { ret mk_expr(p, lo, hi, ast::expr_for(decl, seq, body)); }
}
2011-07-27 12:19:39 +00:00
fn parse_while_expr(p: &parser) -> @ast::expr {
let lo = p.get_last_lo_pos();
let cond = parse_expr(p);
let body = parse_block(p);
let hi = body.span.hi;
ret mk_expr(p, lo, hi, ast::expr_while(cond, body));
}
2011-07-27 12:19:39 +00:00
fn parse_do_while_expr(p: &parser) -> @ast::expr {
let lo = p.get_last_lo_pos();
let body = parse_block(p);
expect_word(p, "while");
2011-07-27 12:19:39 +00:00
let cond = parse_expr(p);
let hi = cond.span.hi;
ret mk_expr(p, lo, hi, ast::expr_do_while(body, cond));
}
2011-07-27 12:19:39 +00:00
fn parse_alt_expr(p: &parser) -> @ast::expr {
let lo = p.get_last_lo_pos();
let discriminant = parse_expr(p);
expect(p, token::LBRACE);
let arms: [ast::arm] = ~[];
2011-07-27 12:19:39 +00:00
while p.peek() != token::RBRACE {
let pats = parse_pats(p);
let blk = parse_block(p);
arms += ~[{pats: pats, block: blk}];
}
let hi = p.get_hi_pos();
p.bump();
ret mk_expr(p, lo, hi, ast::expr_alt(discriminant, arms));
2010-11-24 22:42:01 +00:00
}
2011-07-27 12:19:39 +00:00
fn parse_spawn_expr(p: &parser) -> @ast::expr {
let lo = p.get_last_lo_pos();
// FIXME: Parse domain and name
// FIXME: why no full expr?
2011-07-27 12:19:39 +00:00
let fn_expr = parse_bottom_expr(p);
let es =
parse_seq(token::LPAREN, token::RPAREN, some(token::COMMA),
parse_expr, p);
let hi = es.span.hi;
ret mk_expr(p, lo, hi,
ast::expr_spawn(ast::dom_implicit, option::none, fn_expr,
es.node));
}
2011-07-27 12:19:39 +00:00
fn parse_expr(p: &parser) -> @ast::expr {
ret parse_expr_res(p, UNRESTRICTED);
}
2011-07-27 12:19:39 +00:00
fn parse_expr_res(p: &parser, r: restriction) -> @ast::expr {
let old = p.get_restriction();
p.restrict(r);
2011-07-27 12:19:39 +00:00
let e = parse_assign_expr(p);
p.restrict(old);
ret e;
}
2011-07-27 12:19:39 +00:00
fn parse_initializer(p: &parser) -> option::t[ast::initializer] {
alt p.peek() {
token::EQ. {
p.bump();
ret some({op: ast::init_assign, expr: parse_expr(p)});
}
token::LARROW. {
p.bump();
ret some({op: ast::init_move, expr: parse_expr(p)});
}
// Now that the the channel is the first argument to receive,
// combining it with an initializer doesn't really make sense.
// case (token::RECV) {
// p.bump();
// ret some(rec(op = ast::init_recv,
// expr = parse_expr(p)));
// }
_ {
ret none;
}
2010-10-12 01:20:25 +00:00
}
}
fn parse_pats(p: &parser) -> [@ast::pat] {
2011-07-27 12:19:39 +00:00
let pats = ~[];
while true {
pats += ~[parse_pat(p)];
2011-07-27 12:19:39 +00:00
if p.peek() == token::BINOP(token::OR) { p.bump(); } else { break; }
}
ret pats;
}
2011-07-27 12:19:39 +00:00
fn parse_pat(p: &parser) -> @ast::pat {
let lo = p.get_lo_pos();
let hi = p.get_hi_pos();
let pat;
alt p.peek() {
token::UNDERSCORE. { p.bump(); pat = ast::pat_wild; }
token::AT. {
p.bump();
let sub = parse_pat(p);
pat = ast::pat_box(sub);
hi = sub.span.hi;
}
token::LBRACE. {
p.bump();
let fields = ~[];
let etc = false;
let first = true;
while p.peek() != token::RBRACE {
if first { first = false; } else { expect(p, token::COMMA); }
if p.peek() == token::UNDERSCORE {
p.bump();
if p.peek() != token::RBRACE {
p.fatal("expecting }, found " +
2011-07-11 12:13:20 +00:00
token::to_str(p.get_reader(), p.peek()));
}
2011-07-27 12:19:39 +00:00
etc = true;
break;
}
2011-07-27 12:19:39 +00:00
let fieldname = parse_ident(p);
let subpat;
if p.peek() == token::COLON {
p.bump();
subpat = parse_pat(p);
} else {
if p.get_bad_expr_words().contains_key(fieldname) {
p.fatal("found " + fieldname + " in binding position");
2011-07-11 12:13:20 +00:00
}
2011-07-27 12:19:39 +00:00
subpat =
@{id: p.get_id(),
node: ast::pat_bind(fieldname),
span: {lo: lo, hi: hi}};
2011-07-11 12:13:20 +00:00
}
2011-07-27 12:19:39 +00:00
fields += ~[{ident: fieldname, pat: subpat}];
2011-07-11 12:13:20 +00:00
}
2011-07-27 12:19:39 +00:00
hi = p.get_hi_pos();
p.bump();
pat = ast::pat_rec(fields, etc);
}
tok {
if !is_ident(tok) || is_word(p, "true") || is_word(p, "false") {
let lit = parse_lit(p);
hi = lit.span.hi;
pat = ast::pat_lit(@lit);
} else if (is_plain_ident(p) &&
alt p.look_ahead(1u) {
2011-07-27 12:19:39 +00:00
token::DOT. | token::LPAREN. | token::LBRACKET. {
false
}
_ { true }
}) {
2011-07-27 12:19:39 +00:00
hi = p.get_hi_pos();
pat = ast::pat_bind(parse_value_ident(p));
2011-07-27 12:19:39 +00:00
} else {
let tag_path = parse_path_and_ty_param_substs(p);
hi = tag_path.span.hi;
let args: [@ast::pat];
2011-07-27 12:19:39 +00:00
alt p.peek() {
token::LPAREN. {
let a =
parse_seq(token::LPAREN, token::RPAREN,
some(token::COMMA), parse_pat, p);
args = a.node;
hi = a.span.hi;
}
token::DOT. { args = ~[]; p.bump(); }
_ { expect(p, token::LPAREN); fail; }
}
2011-07-27 12:19:39 +00:00
pat = ast::pat_tag(tag_path, args);
2010-11-24 22:42:01 +00:00
}
2011-07-27 12:19:39 +00:00
}
2010-11-24 22:42:01 +00:00
}
2011-07-27 12:19:39 +00:00
ret @{id: p.get_id(), node: pat, span: {lo: lo, hi: hi}};
2010-11-24 22:42:01 +00:00
}
2011-07-27 12:19:39 +00:00
fn parse_local(p: &parser, allow_init: bool) -> @ast::local {
let lo = p.get_lo_pos();
let pat = parse_pat(p);
let ty = @spanned(lo, lo, ast::ty_infer);
if eat(p, token::COLON) { ty = parse_ty(p); }
2011-07-27 12:19:39 +00:00
let init = if allow_init { parse_initializer(p) } else { none };
ret @spanned(lo, p.get_last_hi_pos(),
2011-07-27 12:19:39 +00:00
{ty: ty,
pat: pat,
2011-07-27 12:19:39 +00:00
init: init,
id: p.get_id()});
}
2011-07-27 12:19:39 +00:00
fn parse_let(p: &parser) -> @ast::decl {
let lo = p.get_lo_pos();
let locals = ~[parse_local(p, true)];
while p.peek() == token::COMMA {
p.bump();
locals += ~[parse_local(p, true)];
}
ret @spanned(lo, p.get_last_hi_pos(), ast::decl_local(locals));
}
2011-07-27 12:19:39 +00:00
fn parse_stmt(p: &parser) -> @ast::stmt {
if p.get_file_type() == SOURCE_FILE {
ret parse_source_stmt(p);
} else { ret parse_crate_stmt(p); }
}
2011-07-27 12:19:39 +00:00
fn parse_crate_stmt(p: &parser) -> @ast::stmt {
let cdir = parse_crate_directive(p, ~[]);
ret @spanned(cdir.span.lo, cdir.span.hi,
ast::stmt_crate_directive(@cdir));
}
2011-07-27 12:19:39 +00:00
fn parse_source_stmt(p: &parser) -> @ast::stmt {
let lo = p.get_lo_pos();
if eat_word(p, "let") {
let decl = parse_let(p);
ret @spanned(lo, decl.span.hi, ast::stmt_decl(decl, p.get_id()));
} else {
2011-07-27 12:19:39 +00:00
let item_attrs;
alt parse_outer_attrs_or_ext(p) {
none. { item_attrs = ~[]; }
some(left(attrs)) { item_attrs = attrs; }
some(right(ext)) {
ret @spanned(lo, ext.span.hi, ast::stmt_expr(ext, p.get_id()));
}
}
2011-07-27 12:19:39 +00:00
let maybe_item = parse_item(p, item_attrs);
// If we have attributes then we should have an item
2011-07-27 12:19:39 +00:00
if ivec::len(item_attrs) > 0u {
alt maybe_item {
some(_) {/* fallthrough */ }
2011-07-27 12:19:39 +00:00
_ { ret p.fatal("expected item"); }
}
}
2011-07-27 12:19:39 +00:00
alt maybe_item {
some(i) {
2011-07-27 12:19:39 +00:00
let hi = i.span.hi;
let decl = @spanned(lo, hi, ast::decl_item(i));
ret @spanned(lo, hi, ast::stmt_decl(decl, p.get_id()));
}
none. {
2011-07-27 12:19:39 +00:00
// Remainder are line-expr stmts.
let e = parse_expr(p);
ret @spanned(lo, e.span.hi, ast::stmt_expr(e, p.get_id()));
}
2011-07-29 18:51:18 +00:00
_ { p.fatal("expected statement"); }
}
}
}
2011-07-27 12:19:39 +00:00
fn stmt_to_expr(stmt: @ast::stmt) -> option::t[@ast::expr] {
ret alt stmt.node { ast::stmt_expr(e, _) { some(e) } _ { none } };
}
2011-07-27 12:19:39 +00:00
fn stmt_ends_with_semi(stmt: &ast::stmt) -> bool {
alt stmt.node {
ast::stmt_decl(d, _) {
ret alt d.node {
ast::decl_local(_) { true }
ast::decl_item(_) { false }
2010-12-31 22:33:49 +00:00
}
2011-07-27 12:19:39 +00:00
}
ast::stmt_expr(e, _) {
ret alt e.node {
ast::expr_vec(_, _, _) { true }
ast::expr_rec(_, _) { true }
ast::expr_call(_, _) { true }
ast::expr_self_method(_) { false }
ast::expr_bind(_, _) { true }
ast::expr_spawn(_, _, _, _) { true }
ast::expr_binary(_, _, _) { true }
ast::expr_unary(_, _) { true }
ast::expr_lit(_) { true }
ast::expr_cast(_, _) { true }
ast::expr_if(_, _, _) { false }
ast::expr_ternary(_, _, _) { true }
ast::expr_for(_, _, _) { false }
ast::expr_for_each(_, _, _) { false }
ast::expr_while(_, _) { false }
ast::expr_do_while(_, _) { false }
ast::expr_alt(_, _) { false }
ast::expr_fn(_) { false }
ast::expr_block(_) { false }
ast::expr_move(_, _) { true }
ast::expr_assign(_, _) { true }
ast::expr_swap(_, _) { true }
ast::expr_assign_op(_, _, _) { true }
ast::expr_send(_, _) { true }
ast::expr_recv(_, _) { true }
ast::expr_field(_, _) { true }
ast::expr_index(_, _) { true }
ast::expr_path(_) { true }
ast::expr_mac(_) { true }
ast::expr_fail(_) { true }
ast::expr_break. { true }
ast::expr_cont. { true }
ast::expr_ret(_) { true }
ast::expr_put(_) { true }
ast::expr_be(_) { true }
ast::expr_log(_, _) { true }
ast::expr_check(_, _) { true }
ast::expr_if_check(_, _, _) { false }
ast::expr_port(_) { true }
ast::expr_chan(_) { true }
ast::expr_anon_obj(_) { false }
ast::expr_assert(_) { true }
}
2011-07-27 12:19:39 +00:00
}
// We should not be calling this on a cdir.
ast::stmt_crate_directive(cdir) {
fail;
}
}
}
2011-07-27 12:19:39 +00:00
fn parse_block(p: &parser) -> ast::blk {
let lo = p.get_lo_pos();
expect(p, token::LBRACE);
be parse_block_tail(p, lo);
}
// some blocks start with "#{"...
2011-07-27 12:19:39 +00:00
fn parse_block_tail(p: &parser, lo: uint) -> ast::blk {
let stmts: [@ast::stmt] = ~[];
2011-07-27 12:19:39 +00:00
let expr: option::t[@ast::expr] = none;
while p.peek() != token::RBRACE {
alt p.peek() {
token::SEMI. {
p.bump(); // empty
}
_ {
let stmt = parse_stmt(p);
alt stmt_to_expr(stmt) {
some(e) {
alt p.peek() {
token::SEMI. { p.bump(); stmts += ~[stmt]; }
token::RBRACE. { expr = some(e); }
t {
if stmt_ends_with_semi(*stmt) {
p.fatal("expected ';' or '}' after " +
"expression but found " +
token::to_str(p.get_reader(), t));
}
2011-07-27 12:19:39 +00:00
stmts += ~[stmt];
}
}
2011-07-27 12:19:39 +00:00
}
none. {
// Not an expression statement.
stmts += ~[stmt];
if p.get_file_type() == SOURCE_FILE &&
stmt_ends_with_semi(*stmt) {
expect(p, token::SEMI);
}
}
}
2011-07-27 12:19:39 +00:00
}
}
}
2011-07-27 12:19:39 +00:00
let hi = p.get_hi_pos();
p.bump();
2011-07-27 12:19:39 +00:00
let bloc = {stmts: stmts, expr: expr, id: p.get_id()};
ret spanned(lo, hi, bloc);
}
fn parse_ty_param(p: &parser) -> ast::ty_param {
let k = alt p.peek() {
token::TILDE. { p.bump(); ast::kind_unique }
token::AT. { p.bump(); ast::kind_shared }
_ { ast::kind_pinned }
};
ret {ident: parse_ident(p), kind: k};
}
fn parse_ty_params(p: &parser) -> [ast::ty_param] {
let ty_params: [ast::ty_param] = ~[];
2011-07-27 12:19:39 +00:00
if p.peek() == token::LBRACKET {
ty_params =
parse_seq(token::LBRACKET, token::RBRACKET, some(token::COMMA),
parse_ty_param, p).node;
2010-11-25 00:52:49 +00:00
}
2010-11-25 01:15:54 +00:00
ret ty_params;
}
fn parse_fn_decl(p: &parser, purity: ast::purity, il: ast::inlineness)
-> ast::fn_decl {
let inputs: ast::spanned[[ast::arg]] =
2011-07-27 12:19:39 +00:00
parse_seq(token::LPAREN, token::RPAREN, some(token::COMMA), parse_arg,
p);
let rslt: ty_or_bang;
// Use the args list to translate each bound variable
// mentioned in a constraint to an arg index.
// Seems weird to do this in the parser, but I'm not sure how else to.
2011-07-27 12:19:39 +00:00
let constrs = ~[];
if p.peek() == token::COLON {
p.bump();
2011-07-27 12:19:39 +00:00
constrs = parse_constrs(bind parse_ty_constr(inputs.node, _), p);
}
2011-07-27 12:19:39 +00:00
if p.peek() == token::RARROW {
p.bump();
rslt = parse_ty_or_bang(p);
} else {
rslt = a_ty(@spanned(inputs.span.lo, inputs.span.hi, ast::ty_nil));
}
2011-07-27 12:19:39 +00:00
alt rslt {
a_ty(t) {
ret {inputs: inputs.node,
output: t,
purity: purity,
il: il,
2011-07-27 12:19:39 +00:00
cf: ast::return,
constraints: constrs};
}
a_bang. {
ret {inputs: inputs.node,
output: @spanned(p.get_lo_pos(), p.get_hi_pos(), ast::ty_bot),
purity: purity,
il: il,
2011-07-27 12:19:39 +00:00
cf: ast::noreturn,
constraints: constrs};
}
}
}
fn parse_fn(p: &parser, proto: ast::proto, purity: ast::purity,
il: ast::inlineness) -> ast::_fn {
let decl = parse_fn_decl(p, purity, il);
2011-07-27 12:19:39 +00:00
let body = parse_block(p);
ret {decl: decl, proto: proto, body: body};
}
fn parse_fn_header(p: &parser) -> {ident: ast::ident, tps: [ast::ty_param]} {
2011-07-27 12:19:39 +00:00
let id = parse_value_ident(p);
let ty_params = parse_ty_params(p);
ret {ident: id, tps: ty_params};
}
fn mk_item(p: &parser, lo: uint, hi: uint, ident: &ast::ident,
node: &ast::item_, attrs: &[ast::attribute]) -> @ast::item {
2011-07-27 12:19:39 +00:00
ret @{ident: ident,
attrs: attrs,
id: p.get_id(),
node: node,
span: {lo: lo, hi: hi}};
}
fn parse_item_fn_or_iter(p: &parser, purity: ast::purity, proto: ast::proto,
attrs: &[ast::attribute], il: ast::inlineness)
-> @ast::item {
2011-07-27 12:19:39 +00:00
let lo = p.get_last_lo_pos();
let t = parse_fn_header(p);
let f = parse_fn(p, proto, purity, il);
ret mk_item(p, lo, f.body.span.hi, t.ident, ast::item_fn(f, t.tps),
attrs);
2011-07-27 12:19:39 +00:00
}
fn parse_obj_field(p: &parser) -> ast::obj_field {
let mut = parse_mutability(p);
let ident = parse_value_ident(p);
expect(p, token::COLON);
2011-07-27 12:19:39 +00:00
let ty = parse_ty(p);
ret {mut: mut, ty: ty, ident: ident, id: p.get_id()};
2010-12-14 23:32:13 +00:00
}
2011-07-27 12:19:39 +00:00
fn parse_anon_obj_field(p: &parser) -> ast::anon_obj_field {
let mut = parse_mutability(p);
let ident = parse_value_ident(p);
expect(p, token::COLON);
2011-07-27 12:19:39 +00:00
let ty = parse_ty(p);
expect(p, token::EQ);
2011-07-27 12:19:39 +00:00
let expr = parse_expr(p);
ret {mut: mut, ty: ty, expr: expr, ident: ident, id: p.get_id()};
}
2011-07-27 12:19:39 +00:00
fn parse_method(p: &parser) -> @ast::method {
let lo = p.get_lo_pos();
let proto = parse_proto(p);
let ident = parse_value_ident(p);
let f = parse_fn(p, proto, ast::impure_fn, ast::il_normal);
2011-07-27 12:19:39 +00:00
let meth = {ident: ident, meth: f, id: p.get_id()};
ret @spanned(lo, f.body.span.hi, meth);
2010-12-14 23:32:13 +00:00
}
fn parse_item_obj(p: &parser, attrs: &[ast::attribute]) ->
@ast::item {
2011-07-27 12:19:39 +00:00
let lo = p.get_last_lo_pos();
let ident = parse_value_ident(p);
let ty_params = parse_ty_params(p);
let fields: ast::spanned[[ast::obj_field]] =
parse_seq(token::LPAREN, token::RPAREN, some(token::COMMA),
2011-07-27 12:19:39 +00:00
parse_obj_field, p);
let meths: [@ast::method] = ~[];
expect(p, token::LBRACE);
2011-07-27 12:19:39 +00:00
while p.peek() != token::RBRACE {
2011-07-29 11:36:57 +00:00
meths += ~[parse_method(p)];
2011-03-02 01:32:16 +00:00
}
2011-07-27 12:19:39 +00:00
let hi = p.get_hi_pos();
expect(p, token::RBRACE);
2011-07-29 11:36:57 +00:00
let ob: ast::_obj = {fields: fields.node, methods: meths};
2011-07-27 12:19:39 +00:00
ret mk_item(p, lo, hi, ident, ast::item_obj(ob, ty_params, p.get_id()),
attrs);
}
fn parse_item_res(p: &parser, attrs: &[ast::attribute]) ->
@ast::item {
2011-07-27 12:19:39 +00:00
let lo = p.get_last_lo_pos();
let ident = parse_value_ident(p);
let ty_params = parse_ty_params(p);
expect(p, token::LPAREN);
2011-07-27 12:19:39 +00:00
let arg_ident = parse_value_ident(p);
expect(p, token::COLON);
2011-07-27 12:19:39 +00:00
let t = parse_ty(p);
expect(p, token::RPAREN);
2011-07-27 12:19:39 +00:00
let dtor = parse_block(p);
let decl =
{inputs:
~[{mode: ast::alias(false),
ty: t,
ident: arg_ident,
id: p.get_id()}],
output: @spanned(lo, lo, ast::ty_nil),
purity: ast::impure_fn,
il: ast::il_normal,
2011-07-27 12:19:39 +00:00
cf: ast::return,
constraints: ~[]};
let f = {decl: decl, proto: ast::proto_fn, body: dtor};
ret mk_item(p, lo, dtor.span.hi, ident,
ast::item_res(f, p.get_id(), ty_params, p.get_id()), attrs);
}
2011-07-27 12:19:39 +00:00
fn parse_mod_items(p: &parser, term: token::token,
first_item_attrs: &[ast::attribute]) -> ast::_mod {
2011-07-27 12:48:34 +00:00
// Shouldn't be any view items since we've already parsed an item attr
let view_items =
2011-07-27 12:19:39 +00:00
if ivec::len(first_item_attrs) == 0u { parse_view(p) } else { ~[] };
let items: [@ast::item] = ~[];
2011-07-27 12:19:39 +00:00
let initial_attrs = first_item_attrs;
while p.peek() != term {
let attrs = initial_attrs + parse_outer_attributes(p);
initial_attrs = ~[];
2011-07-27 12:19:39 +00:00
alt parse_item(p, attrs) {
some(i) { items += ~[i]; }
2011-07-27 12:19:39 +00:00
_ {
p.fatal("expected item but found " +
token::to_str(p.get_reader(), p.peek()));
}
}
}
2011-07-27 12:19:39 +00:00
ret {view_items: view_items, items: items};
}
fn parse_item_const(p: &parser, attrs: &[ast::attribute]) -> @ast::item {
2011-07-27 12:19:39 +00:00
let lo = p.get_last_lo_pos();
let id = parse_value_ident(p);
expect(p, token::COLON);
2011-07-27 12:19:39 +00:00
let ty = parse_ty(p);
expect(p, token::EQ);
2011-07-27 12:19:39 +00:00
let e = parse_expr(p);
let hi = p.get_hi_pos();
expect(p, token::SEMI);
ret mk_item(p, lo, hi, id, ast::item_const(ty, e), attrs);
}
fn parse_item_mod(p: &parser, attrs: &[ast::attribute]) -> @ast::item {
2011-07-27 12:19:39 +00:00
let lo = p.get_last_lo_pos();
let id = parse_ident(p);
expect(p, token::LBRACE);
2011-07-27 12:19:39 +00:00
let inner_attrs = parse_inner_attrs_and_next(p);
let first_item_outer_attrs = inner_attrs.next;
let m = parse_mod_items(p, token::RBRACE, first_item_outer_attrs);
let hi = p.get_hi_pos();
expect(p, token::RBRACE);
ret mk_item(p, lo, hi, id, ast::item_mod(m), attrs + inner_attrs.inner);
}
fn parse_item_native_type(p: &parser, attrs: &[ast::attribute]) ->
2011-07-27 12:19:39 +00:00
@ast::native_item {
let t = parse_type_decl(p);
let hi = p.get_hi_pos();
expect(p, token::SEMI);
2011-07-27 12:19:39 +00:00
ret @{ident: t.ident,
attrs: attrs,
node: ast::native_item_ty,
id: p.get_id(),
span: {lo: t.lo, hi: hi}};
}
fn parse_item_native_fn(p: &parser, attrs: &[ast::attribute]) ->
2011-07-27 12:19:39 +00:00
@ast::native_item {
let lo = p.get_last_lo_pos();
let t = parse_fn_header(p);
let decl = parse_fn_decl(p, ast::impure_fn, ast::il_normal);
2011-07-27 12:19:39 +00:00
let link_name = none;
if p.peek() == token::EQ { p.bump(); link_name = some(parse_str(p)); }
let hi = p.get_hi_pos();
expect(p, token::SEMI);
2011-07-27 12:19:39 +00:00
ret @{ident: t.ident,
attrs: attrs,
node: ast::native_item_fn(link_name, decl, t.tps),
id: p.get_id(),
span: {lo: lo, hi: hi}};
2011-02-04 16:10:04 +00:00
}
fn parse_native_item(p: &parser, attrs: &[ast::attribute]) ->
2011-07-27 12:19:39 +00:00
@ast::native_item {
if eat_word(p, "type") {
ret parse_item_native_type(p, attrs);
} else if (eat_word(p, "fn")) {
ret parse_item_native_fn(p, attrs);
2011-07-29 18:51:18 +00:00
} else { unexpected(p, p.peek()); }
}
2011-07-27 12:19:39 +00:00
fn parse_native_mod_items(p: &parser, native_name: &str, abi: ast::native_abi,
first_item_attrs: &[ast::attribute])
2011-07-27 12:48:34 +00:00
-> ast::native_mod {
// Shouldn't be any view items since we've already parsed an item attr
let view_items =
2011-07-27 12:19:39 +00:00
if ivec::len(first_item_attrs) == 0u {
parse_native_view(p)
} else { ~[] };
let items: [@ast::native_item] = ~[];
2011-07-27 12:19:39 +00:00
let initial_attrs = first_item_attrs;
while p.peek() != token::RBRACE {
let attrs = initial_attrs + parse_outer_attributes(p);
initial_attrs = ~[];
items += ~[parse_native_item(p, attrs)];
}
2011-07-27 12:19:39 +00:00
ret {native_name: native_name,
abi: abi,
view_items: view_items,
items: items};
}
fn parse_item_native_mod(p: &parser, attrs: &[ast::attribute]) -> @ast::item {
2011-07-27 12:19:39 +00:00
let lo = p.get_last_lo_pos();
let abi = ast::native_abi_cdecl;
if !is_word(p, "mod") {
let t = parse_str(p);
if str::eq(t, "cdecl") {
} else if (str::eq(t, "rust")) {
abi = ast::native_abi_rust;
} else if (str::eq(t, "llvm")) {
abi = ast::native_abi_llvm;
} else if (str::eq(t, "rust-intrinsic")) {
abi = ast::native_abi_rust_intrinsic;
} else if (str::eq(t, "x86stdcall")) {
abi = ast::native_abi_x86stdcall;
2011-07-29 18:51:18 +00:00
} else { p.fatal("unsupported abi: " + t); }
}
expect_word(p, "mod");
2011-07-27 12:19:39 +00:00
let id = parse_ident(p);
let native_name;
if p.peek() == token::EQ {
expect(p, token::EQ);
native_name = parse_str(p);
2011-07-27 12:19:39 +00:00
} else { native_name = id; }
expect(p, token::LBRACE);
2011-07-27 12:19:39 +00:00
let more_attrs = parse_inner_attrs_and_next(p);
let inner_attrs = more_attrs.inner;
let first_item_outer_attrs = more_attrs.next;
let m =
parse_native_mod_items(p, native_name, abi, first_item_outer_attrs);
let hi = p.get_hi_pos();
expect(p, token::RBRACE);
ret mk_item(p, lo, hi, id, ast::item_native_mod(m), attrs + inner_attrs);
}
2011-07-27 12:19:39 +00:00
fn parse_type_decl(p: &parser) -> {lo: uint, ident: ast::ident} {
let lo = p.get_last_lo_pos();
let id = parse_ident(p);
ret {lo: lo, ident: id};
}
fn parse_item_type(p: &parser, attrs: &[ast::attribute]) -> @ast::item {
2011-07-27 12:19:39 +00:00
let t = parse_type_decl(p);
let tps = parse_ty_params(p);
expect(p, token::EQ);
2011-07-27 12:19:39 +00:00
let ty = parse_ty(p);
let hi = p.get_hi_pos();
expect(p, token::SEMI);
ret mk_item(p, t.lo, hi, t.ident, ast::item_ty(ty, tps), attrs);
}
fn parse_item_tag(p: &parser, attrs: &[ast::attribute]) -> @ast::item {
2011-07-27 12:19:39 +00:00
let lo = p.get_last_lo_pos();
let id = parse_ident(p);
let ty_params = parse_ty_params(p);
let variants: [ast::variant] = ~[];
// Newtype syntax
2011-07-27 12:19:39 +00:00
if p.peek() == token::EQ {
if p.get_bad_expr_words().contains_key(id) {
p.fatal("found " + id + " in tag constructor position");
}
p.bump();
2011-07-27 12:19:39 +00:00
let ty = parse_ty(p);
expect(p, token::SEMI);
2011-07-27 12:19:39 +00:00
let variant =
spanned(ty.span.lo, ty.span.hi,
{name: id,
args: ~[{ty: ty, id: p.get_id()}],
id: p.get_id()});
ret mk_item(p, lo, ty.span.hi, id,
ast::item_tag(~[variant], ty_params), attrs);
}
expect(p, token::LBRACE);
2011-07-27 12:19:39 +00:00
while p.peek() != token::RBRACE {
let tok = p.peek();
alt tok {
token::IDENT(name, _) {
check_bad_word(p);
let vlo = p.get_lo_pos();
p.bump();
let args: [ast::variant_arg] = ~[];
2011-07-27 12:19:39 +00:00
let vhi = p.get_hi_pos();
alt p.peek() {
token::LPAREN. {
let arg_tys =
parse_seq(token::LPAREN, token::RPAREN,
some(token::COMMA), parse_ty, p);
for ty: @ast::ty in arg_tys.node {
args += ~[{ty: ty, id: p.get_id()}];
}
2011-07-27 12:19:39 +00:00
vhi = arg_tys.span.hi;
}
_ {/* empty */ }
}
2011-07-27 12:19:39 +00:00
expect(p, token::SEMI);
p.get_id();
let vr = {name: p.get_str(name), args: args, id: p.get_id()};
variants += ~[spanned(vlo, vhi, vr)];
}
token::RBRACE. {/* empty */ }
_ {
p.fatal("expected name of variant or '}' but found " +
token::to_str(p.get_reader(), tok));
}
}
}
let hi = p.get_hi_pos();
p.bump();
ret mk_item(p, lo, hi, id, ast::item_tag(variants, ty_params), attrs);
}
2011-07-27 12:19:39 +00:00
fn parse_auth(p: &parser) -> ast::_auth {
if eat_word(p, "unsafe") {
ret ast::auth_unsafe;
} else { unexpected(p, p.peek()); }
2010-12-04 02:03:28 +00:00
}
fn parse_item(p: &parser, attrs: &[ast::attribute]) -> option::t[@ast::item] {
2011-07-27 12:19:39 +00:00
if eat_word(p, "const") {
ret some(parse_item_const(p, attrs));
} else if (eat_word(p, "inline")) {
expect_word(p, "fn");
ret some(parse_item_fn_or_iter(p, ast::impure_fn, ast::proto_fn,
attrs, ast::il_inline));
} else if (is_word(p, "fn") && p.look_ahead(1u) != token::LPAREN) {
p.bump();
ret some(parse_item_fn_or_iter(p, ast::impure_fn, ast::proto_fn,
attrs, ast::il_normal));
} else if (eat_word(p, "pred")) {
ret some(parse_item_fn_or_iter(p, ast::pure_fn, ast::proto_fn,
attrs, ast::il_normal));
} else if (eat_word(p, "iter")) {
ret some(parse_item_fn_or_iter(p, ast::impure_fn, ast::proto_iter,
attrs, ast::il_normal));
} else if (eat_word(p, "mod")) {
ret some(parse_item_mod(p, attrs));
} else if (eat_word(p, "native")) {
ret some(parse_item_native_mod(p, attrs));
}
2011-07-27 12:19:39 +00:00
if eat_word(p, "type") {
ret some(parse_item_type(p, attrs));
} else if (eat_word(p, "tag")) {
ret some(parse_item_tag(p, attrs));
} else if (is_word(p, "obj") && p.look_ahead(1u) != token::LPAREN) {
p.bump();
ret some(parse_item_obj(p, attrs));
} else if (eat_word(p, "resource")) {
ret some(parse_item_res(p, attrs));
} else { ret none; }
}
// A type to distingush between the parsing of item attributes or syntax
// extensions, which both begin with token.POUND
type attr_or_ext = option::t[either::t[[ast::attribute], @ast::expr]];
2011-07-27 12:19:39 +00:00
fn parse_outer_attrs_or_ext(p: &parser) -> attr_or_ext {
if p.peek() == token::POUND {
let lo = p.get_lo_pos();
p.bump();
2011-07-27 12:19:39 +00:00
if p.peek() == token::LBRACKET {
let first_attr = parse_attribute_naked(p, ast::attr_outer, lo);
ret some(left(~[first_attr] + parse_outer_attributes(p)));
2011-07-27 12:19:39 +00:00
} else if (!(p.peek() == token::LT || p.peek() == token::LBRACKET)) {
ret some(right(parse_syntax_ext_naked(p, lo)));
2011-07-27 12:19:39 +00:00
} else { ret none; }
} else { ret none; }
}
// Parse attributes that appear before an item
fn parse_outer_attributes(p: &parser) -> [ast::attribute] {
let attrs: [ast::attribute] = ~[];
2011-07-27 12:19:39 +00:00
while p.peek() == token::POUND {
attrs += ~[parse_attribute(p, ast::attr_outer)];
}
ret attrs;
}
2011-07-27 12:19:39 +00:00
fn parse_attribute(p: &parser, style: ast::attr_style) -> ast::attribute {
let lo = p.get_lo_pos();
expect(p, token::POUND);
ret parse_attribute_naked(p, style, lo);
}
2011-07-27 12:19:39 +00:00
fn parse_attribute_naked(p: &parser, style: ast::attr_style, lo: uint) ->
ast::attribute {
expect(p, token::LBRACKET);
2011-07-27 12:19:39 +00:00
let meta_item = parse_meta_item(p);
expect(p, token::RBRACKET);
2011-07-27 12:19:39 +00:00
let hi = p.get_hi_pos();
ret spanned(lo, hi, {style: style, value: *meta_item});
}
// Parse attributes that appear after the opening of an item, each terminated
// by a semicolon. In addition to a vector of inner attributes, this function
// also returns a vector that may contain the first outer attribute of the
// next item (since we can't know whether the attribute is an inner attribute
// of the containing item or an outer attribute of the first contained item
// until we see the semi).
2011-07-27 12:19:39 +00:00
fn parse_inner_attrs_and_next(p: &parser) ->
{inner: [ast::attribute], next: [ast::attribute]} {
let inner_attrs: [ast::attribute] = ~[];
let next_outer_attrs: [ast::attribute] = ~[];
2011-07-27 12:19:39 +00:00
while p.peek() == token::POUND {
let attr = parse_attribute(p, ast::attr_inner);
if p.peek() == token::SEMI {
p.bump();
inner_attrs += ~[attr];
} else {
// It's not really an inner attribute
2011-07-27 12:19:39 +00:00
let outer_attr =
spanned(attr.span.lo, attr.span.hi,
{style: ast::attr_outer, value: attr.node.value});
next_outer_attrs += ~[outer_attr];
break;
}
}
2011-07-27 12:19:39 +00:00
ret {inner: inner_attrs, next: next_outer_attrs};
2010-12-25 01:03:46 +00:00
}
2011-07-27 12:19:39 +00:00
fn parse_meta_item(p: &parser) -> @ast::meta_item {
let lo = p.get_lo_pos();
let ident = parse_ident(p);
alt p.peek() {
token::EQ. {
p.bump();
let lit = parse_lit(p);
let hi = p.get_hi_pos();
ret @spanned(lo, hi, ast::meta_name_value(ident, lit));
}
token::LPAREN. {
let inner_items = parse_meta_seq(p);
let hi = p.get_hi_pos();
ret @spanned(lo, hi, ast::meta_list(ident, inner_items));
}
_ {
let hi = p.get_hi_pos();
ret @spanned(lo, hi, ast::meta_word(ident));
}
}
}
fn parse_meta_seq(p: &parser) -> [@ast::meta_item] {
ret parse_seq(token::LPAREN, token::RPAREN, some(token::COMMA),
parse_meta_item, p).node;
2010-12-25 01:03:46 +00:00
}
fn parse_optional_meta(p: &parser) -> [@ast::meta_item] {
2011-07-27 12:19:39 +00:00
alt p.peek() { token::LPAREN. { ret parse_meta_seq(p); } _ { ret ~[]; } }
2010-12-25 01:03:46 +00:00
}
2011-07-27 12:19:39 +00:00
fn parse_use(p: &parser) -> ast::view_item_ {
let ident = parse_ident(p);
let metadata = parse_optional_meta(p);
ret ast::view_item_use(ident, metadata, p.get_id());
2010-12-30 16:21:37 +00:00
}
2011-07-27 12:19:39 +00:00
fn parse_rest_import_name(p: &parser, first: ast::ident,
def_ident: option::t[ast::ident]) ->
ast::view_item_ {
let identifiers: [ast::ident] = ~[first];
2011-07-27 12:19:39 +00:00
let glob: bool = false;
while true {
alt p.peek() {
token::SEMI. { break; }
token::MOD_SEP. {
if glob { p.fatal("cannot path into a glob"); }
p.bump();
}
_ { p.fatal("expecting '::' or ';'"); }
}
2011-07-27 12:19:39 +00:00
alt p.peek() {
token::IDENT(_, _) { identifiers += ~[parse_ident(p)]; }
//the lexer can't tell the different kinds of stars apart ) :
token::BINOP(token::STAR.) {
glob = true;
p.bump();
}
_ { p.fatal("expecting an identifier, or '*'"); }
}
}
alt def_ident {
some(i) {
if glob { p.fatal("globbed imports can't be renamed"); }
ret ast::view_item_import(i, identifiers, p.get_id());
}
_ {
if glob {
ret ast::view_item_import_glob(identifiers, p.get_id());
} else {
let len = ivec::len(identifiers);
ret ast::view_item_import(identifiers.(len - 1u), identifiers,
p.get_id());
}
2011-07-27 12:19:39 +00:00
}
}
2010-12-25 04:25:02 +00:00
}
2011-07-27 12:19:39 +00:00
fn parse_full_import_name(p: &parser, def_ident: ast::ident) ->
ast::view_item_ {
2011-07-27 12:19:39 +00:00
alt p.peek() {
token::IDENT(i, _) {
p.bump();
ret parse_rest_import_name(p, p.get_str(i), some(def_ident));
}
_ { p.fatal("expecting an identifier"); }
2010-12-25 04:25:02 +00:00
}
}
2011-07-27 12:19:39 +00:00
fn parse_import(p: &parser) -> ast::view_item_ {
alt p.peek() {
token::IDENT(i, _) {
p.bump();
alt p.peek() {
token::EQ. {
2010-12-25 04:25:02 +00:00
p.bump();
2011-07-27 12:19:39 +00:00
ret parse_full_import_name(p, p.get_str(i));
}
_ { ret parse_rest_import_name(p, p.get_str(i), none); }
2010-12-25 04:25:02 +00:00
}
2011-07-27 12:19:39 +00:00
}
_ { p.fatal("expecting an identifier"); }
2010-12-25 04:25:02 +00:00
}
}
2011-07-27 12:19:39 +00:00
fn parse_export(p: &parser) -> ast::view_item_ {
let id = parse_ident(p);
ret ast::view_item_export(id, p.get_id());
}
2011-07-27 12:19:39 +00:00
fn parse_view_item(p: &parser) -> @ast::view_item {
let lo = p.get_lo_pos();
let the_item =
if eat_word(p, "use") {
parse_use(p)
} else if (eat_word(p, "import")) {
parse_import(p)
} else if (eat_word(p, "export")) { parse_export(p) } else { fail };
let hi = p.get_lo_pos();
expect(p, token::SEMI);
ret @spanned(lo, hi, the_item);
}
2011-07-27 12:19:39 +00:00
fn is_view_item(p: &parser) -> bool {
alt p.peek() {
token::IDENT(sid, false) {
let st = p.get_str(sid);
ret str::eq(st, "use") || str::eq(st, "import") ||
str::eq(st, "export");
}
_ { ret false; }
}
}
fn parse_view(p: &parser) -> [@ast::view_item] {
let items: [@ast::view_item] = ~[];
2011-07-27 12:19:39 +00:00
while is_view_item(p) { items += ~[parse_view_item(p)]; }
ret items;
2010-12-25 01:03:46 +00:00
}
fn parse_native_view(p: &parser) -> [@ast::view_item] {
let items: [@ast::view_item] = ~[];
2011-07-27 12:19:39 +00:00
while is_view_item(p) { items += ~[parse_view_item(p)]; }
2011-03-07 19:48:43 +00:00
ret items;
}
2011-07-27 12:19:39 +00:00
fn parse_crate_from_source_file(input: &str, cfg: &ast::crate_cfg,
sess: &parse_sess) -> @ast::crate {
let p = new_parser_from_file(sess, cfg, input, 0u, 0u, SOURCE_FILE);
ret parse_crate_mod(p, cfg, sess);
}
2011-07-27 12:19:39 +00:00
fn parse_crate_from_source_str(name: &str, source: &str, cfg: &ast::crate_cfg,
sess: &parse_sess) -> @ast::crate {
2011-07-27 12:19:39 +00:00
let ftype = SOURCE_FILE;
let filemap = codemap::new_filemap(name, 0u, 0u);
sess.cm.files += ~[filemap];
2011-07-27 12:19:39 +00:00
let itr = @interner::mk(str::hash, str::eq);
let rdr = lexer::new_reader(sess.cm, source, filemap, itr);
let p = new_parser(sess, cfg, rdr, ftype);
ret parse_crate_mod(p, cfg, sess);
}
// Parses a source module as a crate
2011-07-27 12:19:39 +00:00
fn parse_crate_mod(p: &parser, cfg: &ast::crate_cfg, sess: parse_sess) ->
@ast::crate {
let lo = p.get_lo_pos();
let crate_attrs = parse_inner_attrs_and_next(p);
let first_item_outer_attrs = crate_attrs.next;
let m = parse_mod_items(p, token::EOF, first_item_outer_attrs);
ret @spanned(lo, p.get_lo_pos(),
{directives: ~[],
module: m,
attrs: crate_attrs.inner,
config: p.get_cfg()});
}
fn parse_str(p: &parser) -> ast::ident {
alt p.peek() {
token::LIT_STR(s) { p.bump(); ret p.get_str(s); }
_ { fail; }
}
}
// Logic for parsing crate files (.rc)
//
// Each crate file is a sequence of directives.
//
// Each directive imperatively extends its environment with 0 or more items.
fn parse_crate_directive(p: &parser, first_outer_attr: &[ast::attribute]) ->
2011-07-27 12:19:39 +00:00
ast::crate_directive {
// Collect the next attributes
2011-07-27 12:19:39 +00:00
let outer_attrs = first_outer_attr + parse_outer_attributes(p);
// In a crate file outer attributes are only going to apply to mods
2011-07-27 12:19:39 +00:00
let expect_mod = ivec::len(outer_attrs) > 0u;
2011-07-27 12:19:39 +00:00
let lo = p.get_lo_pos();
if expect_mod || is_word(p, "mod") {
expect_word(p, "mod");
2011-07-27 12:19:39 +00:00
let id = parse_ident(p);
let file_opt =
alt p.peek() {
token::EQ. { p.bump(); some(parse_str(p)) }
_ { none }
};
2011-07-27 12:19:39 +00:00
alt p.peek() {
// mod x = "foo.rs";
token::SEMI. {
let hi = p.get_hi_pos();
p.bump();
ret spanned(lo, hi, ast::cdir_src_mod(id, file_opt, outer_attrs));
}
// mod x = "foo_dir" { ...directives... }
token::LBRACE. {
p.bump();
let inner_attrs = parse_inner_attrs_and_next(p);
let mod_attrs = outer_attrs + inner_attrs.inner;
let next_outer_attr = inner_attrs.next;
let cdirs =
parse_crate_directives(p, token::RBRACE, next_outer_attr);
let hi = p.get_hi_pos();
expect(p, token::RBRACE);
ret spanned(lo, hi,
ast::cdir_dir_mod(id, file_opt, cdirs, mod_attrs));
}
t { unexpected(p, t); }
}
} else if (eat_word(p, "auth")) {
2011-07-27 12:19:39 +00:00
let n = parse_path(p);
expect(p, token::EQ);
2011-07-27 12:19:39 +00:00
let a = parse_auth(p);
let hi = p.get_hi_pos();
expect(p, token::SEMI);
ret spanned(lo, hi, ast::cdir_auth(n, a));
} else if (is_view_item(p)) {
2011-07-27 12:19:39 +00:00
let vi = parse_view_item(p);
ret spanned(lo, vi.span.hi, ast::cdir_view_item(vi));
2011-07-27 12:19:39 +00:00
} else { ret p.fatal("expected crate directive"); }
}
2011-07-27 12:19:39 +00:00
fn parse_crate_directives(p: &parser, term: token::token,
first_outer_attr: &[ast::attribute]) ->
[@ast::crate_directive] {
// This is pretty ugly. If we have an outer attribute then we can't accept
// seeing the terminator next, so if we do see it then fail the same way
// parse_crate_directive would
2011-07-27 12:19:39 +00:00
if ivec::len(first_outer_attr) > 0u && p.peek() == term {
expect_word(p, "mod");
}
let cdirs: [@ast::crate_directive] = ~[];
2011-07-27 12:19:39 +00:00
while p.peek() != term {
let cdir = @parse_crate_directive(p, first_outer_attr);
cdirs += ~[cdir];
}
ret cdirs;
}
2011-07-27 12:19:39 +00:00
fn parse_crate_from_crate_file(input: &str, cfg: &ast::crate_cfg,
sess: &parse_sess) -> @ast::crate {
let p = new_parser_from_file(sess, cfg, input, 0u, 0u, CRATE_FILE);
2011-07-27 12:19:39 +00:00
let lo = p.get_lo_pos();
let prefix = std::fs::dirname(p.get_filemap().name);
let leading_attrs = parse_inner_attrs_and_next(p);
let crate_attrs = leading_attrs.inner;
let first_cdir_attr = leading_attrs.next;
let cdirs = parse_crate_directives(p, token::EOF, first_cdir_attr);
let deps: [str] = ~[];
2011-07-27 12:19:39 +00:00
let cx =
@{p: p,
mode: eval::mode_parse,
mutable deps: deps,
sess: sess,
mutable chpos: p.get_chpos(),
mutable byte_pos: p.get_byte_pos(),
cfg: p.get_cfg()};
let m = eval::eval_crate_directives_to_mod(cx, cdirs, prefix);
let hi = p.get_hi_pos();
expect(p, token::EOF);
2011-07-27 12:19:39 +00:00
ret @spanned(lo, hi,
{directives: cdirs,
module: m,
attrs: crate_attrs,
config: p.get_cfg()});
}
fn parse_crate_from_file(input: &str, cfg: &ast::crate_cfg,
sess: &parse_sess) -> @ast::crate {
if str::ends_with(input, ".rc") {
parse_crate_from_crate_file(input, cfg, sess)
} else if str::ends_with(input, ".rs") {
parse_crate_from_source_file(input, cfg, sess)
} else {
codemap::emit_error(none,
"unknown input file type: " + input,
sess.cm);
fail
}
}
//
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// compile-command: "make -k -C $RBUILD 2>&1 | sed -e 's/\\/x\\//x:\\//g'";
// End:
//