Getting rid of lots more vector +=. (issue #2719)

This commit is contained in:
Eric Holk 2012-06-26 00:39:18 -07:00
parent 007b9d9acd
commit b9d3ad0736
34 changed files with 261 additions and 248 deletions

View File

@ -218,7 +218,7 @@ impl extensions<A:copy> for dvec<A> {
}
};
for ts.each { |t| v += [t]/~ };
for ts.each { |t| vec::push(v, t) };
v
}
}

View File

@ -31,7 +31,7 @@ export rsplit;
export rsplitn;
export shift;
export pop;
export push;
export push, push_all;
export grow;
export grow_fn;
export grow_set;

View File

@ -229,9 +229,9 @@ fn map_item(i: @item, cx: ctx, v: vt) {
}
alt i.node {
item_mod(_) | item_native_mod(_) {
cx.path += [path_mod(i.ident)]/~;
vec::push(cx.path, path_mod(i.ident));
}
_ { cx.path += [path_name(i.ident)]/~; }
_ { vec::push(cx.path, path_name(i.ident)); }
}
visit::visit_item(i, cx, v);
vec::pop(cx.path);

View File

@ -298,13 +298,13 @@ fn split_class_items(cs: [@class_member]/~) -> ([ivar]/~, [@method]/~) {
for cs.each {|c|
alt c.node {
instance_var(i, t, cm, id, vis) {
vs += [{ident: /* FIXME (#2543) */ copy i,
vec::push(vs, {ident: /* FIXME (#2543) */ copy i,
ty: t,
cm: cm,
id: id,
vis: vis}]/~;
vis: vis});
}
class_method(m) { ms += [m]/~; }
class_method(m) { vec::push(ms, m); }
}
};
(vs, ms)

View File

@ -74,7 +74,7 @@ fn mk_substr_filename(cm: codemap, sp: span) -> str
}
fn next_line(file: filemap, chpos: uint, byte_pos: uint) {
file.lines += [{ch: chpos, byte: byte_pos + file.start_pos.byte}]/~;
vec::push(file.lines, {ch: chpos, byte: byte_pos + file.start_pos.byte});
}
type lookup_fn = pure fn(file_pos) -> uint;
@ -185,7 +185,9 @@ fn span_to_lines(sp: span, cm: codemap::codemap) -> @file_lines {
let lo = lookup_char_pos(cm, sp.lo);
let hi = lookup_char_pos(cm, sp.hi);
let mut lines = []/~;
for uint::range(lo.line - 1u, hi.line as uint) {|i| lines += [i]/~; };
for uint::range(lo.line - 1u, hi.line as uint) {|i|
vec::push(lines, i);
};
ret @{file: lo.file, lines: lines};
}

View File

@ -44,7 +44,7 @@ fn consume_non_eol_whitespace(rdr: string_reader) {
fn push_blank_line_comment(rdr: string_reader, &comments: [cmnt]/~) {
#debug(">>> blank-line comment");
let v: [str]/~ = []/~;
comments += [{style: blank_line, lines: v, pos: rdr.chpos}]/~;
vec::push(comments, {style: blank_line, lines: v, pos: rdr.chpos});
}
fn consume_whitespace_counting_blank_lines(rdr: string_reader,
@ -73,7 +73,7 @@ fn read_line_comments(rdr: string_reader, code_to_the_left: bool) -> cmnt {
while rdr.curr == '/' && nextch(rdr) == '/' {
let line = read_one_line_comment(rdr);
log(debug, line);
lines += [line]/~;
vec::push(lines, line);
consume_non_eol_whitespace(rdr);
}
#debug("<<< line comments");
@ -98,7 +98,7 @@ fn trim_whitespace_prefix_and_push_line(&lines: [str]/~,
} else { s1 = ""; }
} else { s1 = s; }
log(debug, "pushing line: " + s1);
lines += [s1]/~;
vec::push(lines, s1);
}
fn read_block_comment(rdr: string_reader, code_to_the_left: bool) -> cmnt {
@ -156,11 +156,11 @@ fn consume_comment(rdr: string_reader, code_to_the_left: bool,
&comments: [cmnt]/~) {
#debug(">>> consume comment");
if rdr.curr == '/' && nextch(rdr) == '/' {
comments += [read_line_comments(rdr, code_to_the_left)]/~;
vec::push(comments, read_line_comments(rdr, code_to_the_left));
} else if rdr.curr == '/' && nextch(rdr) == '*' {
comments += [read_block_comment(rdr, code_to_the_left)]/~;
vec::push(comments, read_block_comment(rdr, code_to_the_left));
} else if rdr.curr == '#' && nextch(rdr) == '!' {
comments += [read_shebang_comment(rdr, code_to_the_left)]/~;
vec::push(comments, read_shebang_comment(rdr, code_to_the_left));
} else { fail; }
#debug("<<< consume comment");
}

View File

@ -277,7 +277,7 @@ class parser {
let mut constrs: [@constr_general<T>]/~ = []/~;
loop {
let constr = pser(self);
constrs += [constr]/~;
vec::push(constrs, constr);
if self.token == token::COMMA { self.bump(); }
else { ret constrs; }
};
@ -363,7 +363,7 @@ class parser {
let mut ts = [self.parse_ty(false)]/~;
while self.token == token::COMMA {
self.bump();
ts += [self.parse_ty(false)]/~;
vec::push(ts, self.parse_ty(false));
}
let t = if vec::len(ts) == 1u { ts[0].node }
else { ty_tup(ts) };
@ -591,10 +591,10 @@ class parser {
&& self.look_ahead(1u) == token::MOD_SEP;
if is_not_last {
ids += [parse_ident(self)]/~;
vec::push(ids, parse_ident(self));
self.expect(token::MOD_SEP);
} else {
ids += [parse_last_ident(self)]/~;
vec::push(ids, parse_last_ident(self));
break;
}
}
@ -718,7 +718,7 @@ class parser {
}
let mut es = [self.parse_expr()]/~;
while self.token == token::COMMA {
self.bump(); es += [self.parse_expr()]/~;
self.bump(); vec::push(es, self.parse_expr());
}
hi = self.span.hi;
self.expect(token::RPAREN);
@ -751,7 +751,7 @@ class parser {
// record ends by an optional trailing comma
break;
}
fields += [self.parse_field(token::COLON)]/~;
vec::push(fields, self.parse_field(token::COLON));
}
hi = self.span.hi;
self.expect(token::RBRACE);
@ -1393,7 +1393,7 @@ class parser {
if self.eat_keyword("if") { guard = some(self.parse_expr()); }
if self.token == token::FAT_ARROW { self.bump(); }
let blk = self.parse_block();
arms += [{pats: pats, guard: guard, body: blk}]/~;
vec::push(arms, {pats: pats, guard: guard, body: blk});
}
let mut hi = self.span.hi;
self.bump();
@ -1438,7 +1438,7 @@ class parser {
fn parse_pats() -> [@pat]/~ {
let mut pats = []/~;
loop {
pats += [self.parse_pat()]/~;
vec::push(pats, self.parse_pat());
if self.token == token::BINOP(token::OR) { self.bump(); }
else { ret pats; }
};
@ -1499,7 +1499,7 @@ class parser {
node: pat_ident(fieldpath, none),
span: mk_sp(lo, hi)};
}
fields += [{ident: fieldname, pat: subpat}]/~;
vec::push(fields, {ident: fieldname, pat: subpat});
}
hi = self.span.hi;
self.bump();
@ -1517,7 +1517,7 @@ class parser {
let mut fields = [self.parse_pat()]/~;
while self.token == token::COMMA {
self.bump();
fields += [self.parse_pat()]/~;
vec::push(fields, self.parse_pat());
}
if vec::len(fields) == 1u { self.expect(token::COMMA); }
hi = self.span.hi;
@ -1607,7 +1607,7 @@ class parser {
let lo = self.span.lo;
let mut locals = [self.parse_local(is_mutbl, true)]/~;
while self.eat(token::COMMA) {
locals += [self.parse_local(is_mutbl, true)]/~;
vec::push(locals, self.parse_local(is_mutbl, true));
}
ret @spanned(lo, self.last_span.hi, decl_local(locals));
}
@ -1769,13 +1769,13 @@ class parser {
but found '"
+ token_to_str(self.reader, t) + "'");
}
stmts += [stmt]/~;
vec::push(stmts, stmt);
}
}
}
_ { // All other kinds of statements:
stmts += [stmt]/~;
vec::push(stmts, stmt);
if classify::stmt_ends_with_semi(*stmt) {
self.expect(token::SEMI);
@ -1964,7 +1964,7 @@ class parser {
let mut meths = []/~;
self.expect(token::LBRACE);
while !self.eat(token::RBRACE) {
meths += [self.parse_method(public)]/~;
vec::push(meths, self.parse_method(public));
}
(ident, item_impl(tps, rp, ifce, ty, meths), none)
}
@ -2099,7 +2099,7 @@ class parser {
self.expect(token::LBRACE);
let mut results = []/~;
while self.token != token::RBRACE {
results += [self.parse_single_class_item(private)]/~;
vec::push(results, self.parse_single_class_item(private));
}
self.bump();
ret members(results);
@ -2129,7 +2129,7 @@ class parser {
#debug["parse_mod_items: parse_item(attrs=%?)", attrs];
let vis = self.parse_visibility(private);
alt self.parse_item(attrs, vis) {
some(i) { items += [i]/~; }
some(i) { vec::push(items, i); }
_ {
self.fatal("expected item but found '" +
token_to_str(self.reader, self.token) + "'");
@ -2206,7 +2206,7 @@ class parser {
while self.token != token::RBRACE {
let attrs = initial_attrs + self.parse_outer_attributes();
initial_attrs = []/~;
items += [self.parse_native_item(attrs)]/~;
vec::push(items, self.parse_native_item(attrs));
}
ret {view_items: view_items,
items: items};
@ -2285,7 +2285,7 @@ class parser {
seq_sep_trailing_disallowed(token::COMMA),
{|p| p.parse_ty(false)});
for arg_tys.each {|ty|
args += [{ty: ty, id: self.get_id()}]/~;
vec::push(args, {ty: ty, id: self.get_id()});
}
} else if self.eat(token::EQ) {
have_disr = true;
@ -2295,7 +2295,7 @@ class parser {
let vr = {name: ident, attrs: variant_attrs,
args: args, id: self.get_id(),
disr_expr: disr_expr, vis: vis};
variants += [spanned(vlo, self.last_span.hi, vr)]/~;
vec::push(variants, spanned(vlo, self.last_span.hi, vr));
if !self.eat(token::COMMA) { break; }
}
@ -2399,7 +2399,7 @@ class parser {
while self.token == token::MOD_SEP {
self.bump();
let id = self.parse_ident();
path += [id]/~;
vec::push(path, id);
}
let path = @{span: mk_sp(lo, self.span.hi), global: false,
idents: path, rp: none, types: []/~};
@ -2416,7 +2416,7 @@ class parser {
token::IDENT(i, _) {
self.bump();
path += [self.get_str(i)]/~;
vec::push(path, self.get_str(i));
}
// foo::bar::{a,b,c}
@ -2459,7 +2459,7 @@ class parser {
let mut vp = [self.parse_view_path()]/~;
while self.token == token::COMMA {
self.bump();
vp += [self.parse_view_path()]/~;
vec::push(vp, self.parse_view_path());
}
ret vp;
}
@ -2494,7 +2494,7 @@ class parser {
let mut items = []/~;
while if only_imports { self.is_keyword("import") }
else { self.is_view_item() } {
items += [self.parse_view_item(attrs)]/~;
vec::push(items, self.parse_view_item(attrs));
attrs = self.parse_outer_attributes();
}
{attrs_remaining: attrs, view_items: items}
@ -2583,7 +2583,7 @@ class parser {
let mut first_outer_attr = first_outer_attr;
while self.token != term {
let cdir = @self.parse_crate_directive(first_outer_attr);
cdirs += [cdir]/~;
vec::push(cdirs, cdir);
first_outer_attr = []/~;
}
ret cdirs;

View File

@ -305,14 +305,14 @@ fn build_link_meta(sess: session, c: ast::crate, output: str,
if *attr::get_meta_item_name(meta) == "name" {
alt attr::get_meta_item_value_str(meta) {
some(v) { name = some(v); }
none { cmh_items += [meta]/~; }
none { vec::push(cmh_items, meta); }
}
} else if *attr::get_meta_item_name(meta) == "vers" {
alt attr::get_meta_item_value_str(meta) {
some(v) { vers = some(v); }
none { cmh_items += [meta]/~; }
none { vec::push(cmh_items, meta); }
}
} else { cmh_items += [meta]/~; }
} else { vec::push(cmh_items, meta); }
}
ret {name: name, vers: vers, cmh_items: cmh_items};
}
@ -591,18 +591,18 @@ fn link_binary(sess: session,
let cstore = sess.cstore;
for cstore::get_used_crate_files(cstore).each {|cratepath|
if str::ends_with(cratepath, ".rlib") {
cc_args += [cratepath]/~;
vec::push(cc_args, cratepath);
cont;
}
let cratepath = cratepath;
let dir = path::dirname(cratepath);
if dir != "" { cc_args += ["-L" + dir]/~; }
if dir != "" { vec::push(cc_args, "-L" + dir); }
let libarg = unlib(sess.targ_cfg, path::basename(cratepath));
cc_args += ["-l" + libarg]/~;
vec::push(cc_args, "-l" + libarg);
}
let ula = cstore::get_used_link_args(cstore);
for ula.each {|arg| cc_args += [arg]/~; }
for ula.each {|arg| vec::push(cc_args, arg); }
// # Native library linking
@ -613,47 +613,47 @@ fn link_binary(sess: session,
// forces to make sure that library can be found at runtime.
let addl_paths = sess.opts.addl_lib_search_paths;
for addl_paths.each {|path| cc_args += ["-L" + path]/~; }
for addl_paths.each {|path| vec::push(cc_args, "-L" + path); }
// The names of the native libraries
let used_libs = cstore::get_used_libraries(cstore);
for used_libs.each {|l| cc_args += ["-l" + l]/~; }
for used_libs.each {|l| vec::push(cc_args, "-l" + l); }
if sess.building_library {
cc_args += [lib_cmd]/~;
vec::push(cc_args, lib_cmd);
// On mac we need to tell the linker to let this library
// be rpathed
if sess.targ_cfg.os == session::os_macos {
cc_args += ["-Wl,-install_name,@rpath/"
+ path::basename(output)]/~;
vec::push(cc_args, "-Wl,-install_name,@rpath/"
+ path::basename(output));
}
}
if !sess.debugging_opt(session::no_rt) {
// Always want the runtime linked in
cc_args += ["-lrustrt"]/~;
vec::push(cc_args, "-lrustrt");
}
// On linux librt and libdl are an indirect dependencies via rustrt,
// and binutils 2.22+ won't add them automatically
if sess.targ_cfg.os == session::os_linux {
cc_args += ["-lrt", "-ldl"]/~;
vec::push_all(cc_args, ["-lrt", "-ldl"]/~);
// LLVM implements the `frem` instruction as a call to `fmod`,
// which lives in libm. Similar to above, on some linuxes we
// have to be explicit about linking to it. See #2510
cc_args += ["-lm"]/~;
vec::push(cc_args, "-lm");
}
if sess.targ_cfg.os == session::os_freebsd {
cc_args += ["-pthread", "-lrt",
vec::push_all(cc_args, ["-pthread", "-lrt",
"-L/usr/local/lib", "-lexecinfo",
"-L/usr/local/lib/gcc46",
"-L/usr/local/lib/gcc44", "-lstdc++",
"-Wl,-z,origin",
"-Wl,-rpath,/usr/local/lib/gcc46",
"-Wl,-rpath,/usr/local/lib/gcc44"]/~;
"-Wl,-rpath,/usr/local/lib/gcc44"]/~);
}
// OS X 10.6 introduced 'compact unwind info', which is produced by the
@ -661,15 +661,15 @@ fn link_binary(sess: session,
// understand how to unwind our __morestack frame, so we have to turn it
// off. This has impacted some other projects like GHC.
if sess.targ_cfg.os == session::os_macos {
cc_args += ["-Wl,-no_compact_unwind"]/~;
vec::push(cc_args, "-Wl,-no_compact_unwind");
}
// Stack growth requires statically linking a __morestack function
cc_args += ["-lmorestack"]/~;
vec::push(cc_args, "-lmorestack");
// FIXME (#2397): At some point we want to rpath our guesses as to where
// native libraries might live, based on the addl_lib_search_paths
cc_args += rpath::get_rpath_flags(sess, output);
vec::push_all(cc_args, rpath::get_rpath_flags(sess, output));
#debug("%s link args: %s", cc_prog, str::connect(cc_args, " "));
// We run 'cc' here

View File

@ -44,7 +44,7 @@ fn lookup_defs(cstore: cstore::cstore, cnum: ast::crate_num,
#debug("lookup_defs: path = %? cnum = %?", path, cnum);
for resolve_path(cstore, cnum, path).each {|elt|
let (c, data, def) = elt;
result += [decoder::lookup_def(c, data, def)]/~;
vec::push(result, decoder::lookup_def(c, data, def));
}
ret result;
}
@ -68,13 +68,13 @@ fn resolve_path(cstore: cstore::cstore, cnum: ast::crate_num,
let mut result = []/~;
for decoder::resolve_path(path, cm.data).each {|def|
if def.crate == ast::local_crate {
result += [(cnum, cm.data, def)]/~;
vec::push(result, (cnum, cm.data, def));
} else {
if cm.cnum_map.contains_key(def.crate) {
// This reexport is itself a reexport from another crate
let next_cnum = cm.cnum_map.get(def.crate);
let next_cm_data = cstore::get_crate_data(cstore, next_cnum);
result += [(next_cnum, next_cm_data.data, def)]/~;
vec::push(result, (next_cnum, next_cm_data.data, def));
}
}
}

View File

@ -113,7 +113,7 @@ fn iter_crate_data(cstore: cstore, i: fn(ast::crate_num, crate_metadata)) {
fn add_used_crate_file(cstore: cstore, lib: str) {
if !vec::contains(p(cstore).used_crate_files, lib) {
p(cstore).used_crate_files += [lib]/~;
vec::push(p(cstore).used_crate_files, lib);
}
}
@ -125,7 +125,7 @@ fn add_used_library(cstore: cstore, lib: str) -> bool {
assert lib != "";
if vec::contains(p(cstore).used_libraries, lib) { ret false; }
p(cstore).used_libraries += [lib]/~;
vec::push(p(cstore).used_libraries, lib);
ret true;
}
@ -161,7 +161,7 @@ fn get_dep_hashes(cstore: cstore) -> [@str]/~ {
let cdata = cstore::get_crate_data(cstore, cnum);
let hash = decoder::get_crate_hash(cdata.data);
#debug("Add hash[%s]: %s", cdata.name, *hash);
result += [{name: @cdata.name, hash: hash}]/~;
vec::push(result, {name: @cdata.name, hash: hash});
};
fn lteq(a: crate_hash, b: crate_hash) -> bool {
ret *a.name <= *b.name;

View File

@ -67,7 +67,7 @@ fn lookup_hash(d: ebml::doc, eq_fn: fn@([u8]/~) -> bool, hash: uint) ->
ebml::tagged_docs(bucket, belt) {|elt|
let pos = io::u64_from_be_bytes(*elt.data, elt.start, 4u) as uint;
if eq_fn(vec::slice::<u8>(*elt.data, elt.start + 4u, elt.end)) {
result += [ebml::doc_at(d.data, pos).doc]/~;
vec::push(result, ebml::doc_at(d.data, pos).doc);
}
};
ret result;
@ -170,7 +170,7 @@ fn item_ty_param_bounds(item: ebml::doc, tcx: ty::ctxt, cdata: cmd)
let bd = parse_bounds_data(p.data, p.start, cdata.cnum, tcx, {|did|
translate_def_id(cdata, did)
});
bounds += [bd]/~;
vec::push(bounds, bd);
}
@bounds
}
@ -199,7 +199,7 @@ fn enum_variant_ids(item: ebml::doc, cdata: cmd) -> [ast::def_id]/~ {
let v = tag_items_data_item_variant;
ebml::tagged_docs(item, v) {|p|
let ext = parse_def_id(ebml::doc_data(p));
ids += [{crate: cdata.cnum, node: ext.node}]/~;
vec::push(ids, {crate: cdata.cnum, node: ext.node});
};
ret ids;
}
@ -218,7 +218,7 @@ fn resolve_path(path: [ast::ident]/~, data: @[u8]/~) -> [ast::def_id]/~ {
#debug("resolve_path: looking up %s", s);
for lookup_hash(paths, eqer, hash_path(s)).each {|doc|
let did_doc = ebml::get_doc(doc, tag_def_id);
result += [parse_def_id(ebml::doc_data(did_doc))]/~;
vec::push(result, parse_def_id(ebml::doc_data(did_doc)));
}
ret result;
}
@ -235,10 +235,10 @@ fn item_path(item_doc: ebml::doc) -> ast_map::path {
ebml::docs(path_doc) {|tag, elt_doc|
if tag == tag_path_elt_mod {
let str = ebml::doc_as_str(elt_doc);
result += [ast_map::path_mod(@str)]/~;
vec::push(result, ast_map::path_mod(@str));
} else if tag == tag_path_elt_name {
let str = ebml::doc_as_str(elt_doc);
result += [ast_map::path_name(@str)]/~;
vec::push(result, ast_map::path_name(@str));
} else {
// ignore tag_path_len element
}
@ -407,7 +407,7 @@ fn get_enum_variants(cdata: cmd, id: ast::node_id, tcx: ty::ctxt)
let mut arg_tys: [ty::t]/~ = []/~;
alt ty::get(ctor_ty).struct {
ty::ty_fn(f) {
for f.inputs.each {|a| arg_tys += [a.ty]/~; }
for f.inputs.each {|a| vec::push(arg_tys, a.ty); }
}
_ { /* Nullary enum variant. */ }
}
@ -415,8 +415,8 @@ fn get_enum_variants(cdata: cmd, id: ast::node_id, tcx: ty::ctxt)
some(val) { disr_val = val; }
_ { /* empty */ }
}
infos += [@{args: arg_tys, ctor_ty: ctor_ty, name: name,
id: did, disr_val: disr_val}]/~;
vec::push(infos, @{args: arg_tys, ctor_ty: ctor_ty, name: name,
id: did, disr_val: disr_val});
disr_val += 1;
}
ret infos;
@ -432,10 +432,10 @@ fn item_impl_methods(cdata: cmd, item: ebml::doc, base_tps: uint)
ebml::tagged_docs(item, tag_item_impl_method) {|doc|
let m_did = parse_def_id(ebml::doc_data(doc));
let mth_item = lookup_item(m_did.node, cdata.data);
rslt += [@{did: translate_def_id(cdata, m_did),
vec::push(rslt, @{did: translate_def_id(cdata, m_did),
/* FIXME (maybe #2323) tjc: take a look at this. */
n_tps: item_ty_param_count(mth_item) - base_tps,
ident: item_name(mth_item)}]/~;
ident: item_name(mth_item)});
}
rslt
}
@ -458,10 +458,10 @@ fn get_impls_for_mod(cdata: cmd, m_id: ast::node_id,
let nm = item_name(item);
if alt name { some(n) { n == nm } none { true } } {
let base_tps = item_ty_param_count(item);
result += [@{
vec::push(result, @{
did: local_did, ident: nm,
methods: item_impl_methods(impl_cdata, item, base_tps)
}]/~;
});
};
}
@result
@ -482,12 +482,12 @@ fn get_iface_methods(cdata: cmd, id: ast::node_id, tcx: ty::ctxt)
tcx.diag.handler().bug(
"get_iface_methods: id has non-function type");
} };
result += [{ident: name, tps: bounds, fty: fty,
vec::push(result, {ident: name, tps: bounds, fty: fty,
purity: alt check item_family(mth) {
'u' { ast::unsafe_fn }
'f' { ast::impure_fn }
'p' { ast::pure_fn }
}, vis: ast::public}]/~;
}, vis: ast::public});
}
@result
}
@ -504,8 +504,8 @@ fn get_class_members(cdata: cmd, id: ast::node_id,
let name = item_name(an_item);
let did = class_member_id(an_item, cdata);
let mt = field_mutability(an_item);
result += [{ident: name, id: did, vis:
family_to_visibility(f), mutability: mt}]/~;
vec::push(result, {ident: name, id: did, vis:
family_to_visibility(f), mutability: mt});
}
}
result
@ -581,7 +581,7 @@ fn get_meta_items(md: ebml::doc) -> [@ast::meta_item]/~ {
ebml::tagged_docs(md, tag_meta_item_word) {|meta_item_doc|
let nd = ebml::get_doc(meta_item_doc, tag_meta_item_name);
let n = str::from_bytes(ebml::doc_data(nd));
items += [attr::mk_word_item(@n)]/~;
vec::push(items, attr::mk_word_item(@n));
};
ebml::tagged_docs(md, tag_meta_item_name_value) {|meta_item_doc|
let nd = ebml::get_doc(meta_item_doc, tag_meta_item_name);
@ -590,13 +590,13 @@ fn get_meta_items(md: ebml::doc) -> [@ast::meta_item]/~ {
let v = str::from_bytes(ebml::doc_data(vd));
// FIXME (#623): Should be able to decode meta_name_value variants,
// but currently the encoder just drops them
items += [attr::mk_name_value_item_str(@n, v)]/~;
vec::push(items, attr::mk_name_value_item_str(@n, v));
};
ebml::tagged_docs(md, tag_meta_item_list) {|meta_item_doc|
let nd = ebml::get_doc(meta_item_doc, tag_meta_item_name);
let n = str::from_bytes(ebml::doc_data(nd));
let subitems = get_meta_items(meta_item_doc);
items += [attr::mk_list_item(@n, subitems)]/~;
vec::push(items, attr::mk_list_item(@n, subitems));
};
ret items;
}
@ -653,10 +653,10 @@ fn get_crate_deps(data: @[u8]/~) -> [crate_dep]/~ {
str::from_bytes(ebml::doc_data(ebml::get_doc(doc, tag_)))
}
ebml::tagged_docs(depsdoc, tag_crate_dep) {|depdoc|
deps += [{cnum: crate_num,
vec::push(deps, {cnum: crate_num,
name: @docstr(depdoc, tag_crate_dep_name),
vers: @docstr(depdoc, tag_crate_dep_vers),
hash: @docstr(depdoc, tag_crate_dep_hash)}]/~;
hash: @docstr(depdoc, tag_crate_dep_hash)});
crate_num += 1;
};
ret deps;
@ -732,7 +732,7 @@ fn get_crate_module_paths(bytes: @[u8]/~) -> [(ast::def_id, str)]/~ {
// Collect everything by now. There might be multiple
// paths pointing to the same did. Those will be
// unified later by using the mods map
res += [(did, path)]/~;
vec::push(res, (did, path));
}
ret vec::filter(res) {|x|
let (_, xp) = x;

View File

@ -238,7 +238,7 @@ fn encode_reexport_paths(ebml_w: ebml::writer,
ecx: @encode_ctxt, &index: [entry<str>]/~) {
for ecx.reexports.each {|reexport|
let (path, def_id) = reexport;
index += [{val: path, pos: ebml_w.writer.tell()}]/~;
vec::push(index, {val: path, pos: ebml_w.writer.tell()});
ebml_w.start_tag(tag_paths_data_item);
encode_name(ebml_w, @path);
encode_def_id(ebml_w, def_id);
@ -332,7 +332,7 @@ fn encode_enum_variant_info(ecx: @encode_ctxt, ebml_w: ebml::writer,
let mut i = 0;
let vi = ty::enum_variants(ecx.tcx, {crate: local_crate, node: id});
for variants.each {|variant|
*index += [{val: variant.node.id, pos: ebml_w.writer.tell()}]/~;
vec::push(*index, {val: variant.node.id, pos: ebml_w.writer.tell()});
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(variant.node.id));
encode_family(ebml_w, 'v');
@ -433,8 +433,8 @@ fn encode_info_for_class(ecx: @encode_ctxt, ebml_w: ebml::writer,
private fields to get the offsets right */
alt ci.node {
instance_var(nm, _, mt, id, vis) {
*index += [{val: id, pos: ebml_w.writer.tell()}]/~;
*global_index += [{val: id, pos: ebml_w.writer.tell()}]/~;
vec::push(*index, {val: id, pos: ebml_w.writer.tell()});
vec::push(*global_index, {val: id, pos: ebml_w.writer.tell()});
ebml_w.start_tag(tag_items_data_item);
#debug("encode_info_for_class: doing %s %d", *nm, id);
encode_visibility(ebml_w, vis);
@ -448,8 +448,9 @@ fn encode_info_for_class(ecx: @encode_ctxt, ebml_w: ebml::writer,
class_method(m) {
alt m.vis {
public {
*index += [{val: m.id, pos: ebml_w.writer.tell()}]/~;
*global_index += [{val: m.id, pos: ebml_w.writer.tell()}]/~;
vec::push(*index, {val: m.id, pos: ebml_w.writer.tell()});
vec::push(*global_index,
{val: m.id, pos: ebml_w.writer.tell()});
let impl_path = path + [ast_map::path_name(m.ident)]/~;
#debug("encode_info_for_class: doing %s %d", *m.ident, m.id);
encode_info_for_method(ecx, ebml_w, impl_path,
@ -539,7 +540,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
fn add_to_index_(item: @item, ebml_w: ebml::writer,
index: @mut [entry<int>]/~) {
*index += [{val: item.id, pos: ebml_w.writer.tell()}]/~;
vec::push(*index, {val: item.id, pos: ebml_w.writer.tell()});
}
let add_to_index = {|copy ebml_w|add_to_index_(item, ebml_w, index)};
@ -621,7 +622,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
items, index);
/* Encode the dtor */
option::iter(m_dtor) {|dtor|
*index += [{val: dtor.node.id, pos: ebml_w.writer.tell()}]/~;
vec::push(*index, {val: dtor.node.id, pos: ebml_w.writer.tell()});
encode_info_for_fn(ecx, ebml_w, dtor.node.id, @(*item.ident
+ "_dtor"), path, if tps.len() > 0u {
some(ii_dtor(dtor, item.ident, tps,
@ -710,7 +711,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
let impl_path = path + [ast_map::path_name(item.ident)]/~;
for methods.each {|m|
*index += [{val: m.id, pos: ebml_w.writer.tell()}]/~;
vec::push(*index, {val: m.id, pos: ebml_w.writer.tell()});
encode_info_for_method(ecx, ebml_w, impl_path,
should_inline(m.attrs), item.id, m, tps + m.tps);
}
@ -745,7 +746,7 @@ fn encode_info_for_native_item(ecx: @encode_ctxt, ebml_w: ebml::writer,
index: @mut [entry<int>]/~,
path: ast_map::path, abi: native_abi) {
if !reachable(ecx, nitem.id) { ret; }
*index += [{val: nitem.id, pos: ebml_w.writer.tell()}]/~;
vec::push(*index, {val: nitem.id, pos: ebml_w.writer.tell()});
ebml_w.start_tag(tag_items_data_item);
alt nitem.node {
@ -770,7 +771,7 @@ fn encode_info_for_items(ecx: @encode_ctxt, ebml_w: ebml::writer,
crate: @crate) -> [entry<int>]/~ {
let index = @mut []/~;
ebml_w.start_tag(tag_items_data);
*index += [{val: crate_node_id, pos: ebml_w.writer.tell()}]/~;
vec::push(*index, {val: crate_node_id, pos: ebml_w.writer.tell()});
encode_info_for_mod(ecx, ebml_w, crate.node.module,
crate_node_id, []/~, @"");
visit::visit_crate(*crate, (), visit::mk_vt(@{
@ -818,7 +819,7 @@ fn encode_info_for_items(ecx: @encode_ctxt, ebml_w: ebml::writer,
fn create_index<T: copy>(index: [entry<T>]/~, hash_fn: fn@(T) -> uint) ->
[@[entry<T>]/~]/~ {
let mut buckets: [@mut [entry<T>]/~]/~ = []/~;
for uint::range(0u, 256u) {|_i| buckets += [@mut []/~]/~; };
for uint::range(0u, 256u) {|_i| vec::push(buckets, @mut []/~); };
for index.each {|elt|
let h = hash_fn(elt.val);
vec::push(*buckets[h % 256u], elt);
@ -838,7 +839,7 @@ fn encode_index<T>(ebml_w: ebml::writer, buckets: [@[entry<T>]/~]/~,
let mut bucket_locs: [uint]/~ = []/~;
ebml_w.start_tag(tag_index_buckets);
for buckets.each {|bucket|
bucket_locs += [ebml_w.writer.tell()]/~;
vec::push(bucket_locs, ebml_w.writer.tell());
ebml_w.start_tag(tag_index_buckets_bucket);
for vec::each(*bucket) {|elt|
ebml_w.start_tag(tag_index_buckets_bucket_elt);
@ -954,7 +955,7 @@ fn synthesize_crate_attrs(ecx: @encode_ctxt, crate: @crate) -> [attribute]/~ {
};
}
if !found_link_attr { attrs += [synthesize_link_attr(ecx, []/~)]/~; }
if !found_link_attr { vec::push(attrs, synthesize_link_attr(ecx, []/~)); }
ret attrs;
}
@ -971,7 +972,7 @@ fn encode_crate_deps(ebml_w: ebml::writer, cstore: cstore::cstore) {
let dep = {cnum: key, name: @val.name,
vers: decoder::get_crate_vers(val.data),
hash: decoder::get_crate_hash(val.data)};
deps += [mut dep]/~;
vec::push(deps, dep);
};
// Sort by cnum

View File

@ -89,7 +89,7 @@ fn find_library_crate_aux(cx: ctxt,
option::none::<()>
} else {
#debug("found %s with matching metadata", path);
matches += [{ident: path, data: cvec}]/~;
vec::push(matches, {ident: path, data: cvec});
option::none::<()>
}
}

View File

@ -71,7 +71,7 @@ fn parse_constrs_gen<T: copy>(st: @pstate, conv: conv_did,
':' {
loop {
next(st);
rslt += [parse_constr(st, conv, pser)]/~;
vec::push(rslt, parse_constr(st, conv, pser));
if peek(st) != ';' { break; }
}
}
@ -91,7 +91,7 @@ fn parse_ty_constrs(st: @pstate, conv: conv_did) -> [@ty::type_constr]/~ {
fn parse_path(st: @pstate) -> @ast::path {
let mut idents: [ast::ident]/~ = []/~;
fn is_last(c: char) -> bool { ret c == '(' || c == ':'; }
idents += [parse_ident_(st, is_last)]/~;
vec::push(idents, parse_ident_(st, is_last));
loop {
alt peek(st) {
':' { next(st); next(st); }
@ -100,7 +100,7 @@ fn parse_path(st: @pstate) -> @ast::path {
ret @{span: ast_util::dummy_sp(),
global: false, idents: idents,
rp: none, types: []/~};
} else { idents += [parse_ident_(st, is_last)]/~; }
} else { vec::push(idents, parse_ident_(st, is_last)); }
}
}
};
@ -124,7 +124,7 @@ fn parse_constr_arg(st: @pstate) -> ast::fn_constr_arg {
/*
else {
auto lit = parse_lit(st, conv, ',');
args += [respan(st.span, ast::carg_lit(lit))]/~;
vec::push(args, respan(st.span, ast::carg_lit(lit)));
}
*/
}
@ -151,7 +151,7 @@ fn parse_constr<T: copy>(st: @pstate, conv: conv_did,
let mut an_arg: constr_arg_general_<T>;
loop {
an_arg = pser(st);
args += [@respan(sp, an_arg)]/~;
vec::push(args, @respan(sp, an_arg));
ignore = next(st);
if ignore != ';' { break; }
}
@ -198,7 +198,7 @@ fn parse_substs(st: @pstate, conv: conv_did) -> ty::substs {
assert next(st) == '[';
let mut params: [ty::t]/~ = []/~;
while peek(st) != ']' { params += [parse_ty(st, conv)]/~; }
while peek(st) != ']' { vec::push(params, parse_ty(st, conv)); }
st.pos = st.pos + 1u;
ret {self_r: self_r,
@ -323,7 +323,7 @@ fn parse_ty(st: @pstate, conv: conv_did) -> ty::t {
let mut fields: [ty::field]/~ = []/~;
while peek(st) != ']' {
let name = @parse_str(st, '=');
fields += [{ident: name, mt: parse_mt(st, conv)}]/~;
vec::push(fields, {ident: name, mt: parse_mt(st, conv)});
}
st.pos = st.pos + 1u;
ret ty::mk_rec(st.tcx, fields);
@ -331,7 +331,7 @@ fn parse_ty(st: @pstate, conv: conv_did) -> ty::t {
'T' {
assert (next(st) == '[');
let mut params = []/~;
while peek(st) != ']' { params += [parse_ty(st, conv)]/~; }
while peek(st) != ']' { vec::push(params, parse_ty(st, conv)); }
st.pos = st.pos + 1u;
ret ty::mk_tup(st.tcx, params);
}
@ -404,7 +404,7 @@ fn parse_mt(st: @pstate, conv: conv_did) -> ty::mt {
fn parse_def(st: @pstate, conv: conv_did) -> ast::def_id {
let mut def = []/~;
while peek(st) != '|' { def += [next_byte(st)]/~; }
while peek(st) != '|' { vec::push(def, next_byte(st)); }
st.pos = st.pos + 1u;
ret conv(parse_def_id(def));
}
@ -456,7 +456,7 @@ fn parse_ty_fn(st: @pstate, conv: conv_did) -> ty::fn_ty {
'#' { ast::by_val }
};
st.pos += 1u;
inputs += [{mode: ast::expl(mode), ty: parse_ty(st, conv)}]/~;
vec::push(inputs, {mode: ast::expl(mode), ty: parse_ty(st, conv)});
}
st.pos += 1u; // eat the ']'
let cs = parse_constrs(st, conv);
@ -501,13 +501,13 @@ fn parse_bounds_data(data: @[u8]/~, start: uint,
fn parse_bounds(st: @pstate, conv: conv_did) -> @[ty::param_bound]/~ {
let mut bounds = []/~;
loop {
bounds += [alt check next(st) {
vec::push(bounds, alt check next(st) {
'S' { ty::bound_send }
'C' { ty::bound_copy }
'K' { ty::bound_const }
'I' { ty::bound_iface(parse_ty(st, conv)) }
'.' { break; }
}]/~;
});
}
@bounds
}

View File

@ -120,6 +120,6 @@ fn compute_capture_vars(tcx: ty::ctxt,
}
let mut result = []/~;
for cap_map.each_value { |cap_var| result += [cap_var]/~; }
for cap_map.each_value { |cap_var| vec::push(result, cap_var); }
ret result;
}

View File

@ -782,12 +782,12 @@ fn resolve_import(e: env, n_id: node_id, name: ast::ident,
view_path_simple(_, _, id)
| view_path_glob(_, id) {
if id == my_id { found = true; }
if found { imports += [id]/~; }
if found { vec::push(imports, id); }
}
view_path_list(_, ids, _) {
for ids.each {|id|
if id.node.id == my_id { found = true; }
if found { imports += [id.node.id]/~; }
if found { vec::push(imports, id.node.id); }
}
}
}
@ -1151,7 +1151,7 @@ fn lookup_in_scope(e: env, &&sc: scopes, sp: span, name: ident, ns: namespace,
} else if ns != ns_module {
left_fn = scope_is_fn(hd);
alt scope_closes(hd) {
some(node_id) { closing += [node_id]/~; }
some(node_id) { vec::push(closing, node_id); }
_ { }
}
}
@ -1421,7 +1421,7 @@ fn lookup_import(e: env, n_id: node_id, ns: namespace) -> option<def> {
}
resolved(val, typ, md, _, _, _) {
if e.used_imports.track {
e.used_imports.data += [n_id]/~;
vec::push(e.used_imports.data, n_id);
}
ret alt ns { ns_val { val } ns_type { typ } ns_module { md } };
}
@ -1760,7 +1760,7 @@ fn mie_span(mie: mod_index_entry) -> span {
fn check_item(e: @env, i: @ast::item, &&x: (), v: vt<()>) {
fn typaram_names(tps: [ast::ty_param]/~) -> [ident]/~ {
let mut x: [ast::ident]/~ = []/~;
for tps.each {|tp| x += [tp.ident]/~; }
for tps.each {|tp| vec::push(x, tp.ident); }
ret x;
}
visit::visit_item(i, x, v);
@ -2121,7 +2121,7 @@ fn check_exports(e: @env) {
if ! glob_is_re_exported.contains_key(id) { cont; }
iter_mod(*e, glob.def,
glob.path.span, outside) {|ident, def|
_mod.globbed_exports += [ident]/~;
vec::push(_mod.globbed_exports, ident);
maybe_add_reexport(e, id, some(def));
}
}
@ -2180,7 +2180,8 @@ fn find_impls_in_view_item(e: env, vi: @ast::view_item,
if vec::len(found) == 0u {
for vec::each(*level) {|imp|
if imp.ident == pt.idents[0] {
found += [@{ident: name with *imp}]/~;
vec::push(found,
@{ident: name with *imp});
}
}
if vec::len(found) > 0u { impls += found; }
@ -2190,7 +2191,7 @@ fn find_impls_in_view_item(e: env, vi: @ast::view_item,
} else {
lookup_imported_impls(e, id) {|is|
for vec::each(*is) {|i|
impls += [@{ident: name with *i}]/~;
vec::push(impls, @{ident: name with *i});
}
}
}
@ -2232,13 +2233,13 @@ fn find_impls_in_item(e: env, i: @ast::item, &impls: [@_impl]/~,
some(m) { is_exported(e, i.ident, m) }
_ { true }
} {
impls += [@{did: local_def(i.id),
vec::push(impls, @{did: local_def(i.id),
ident: i.ident,
methods: vec::map(mthds, {|m|
@{did: local_def(m.id),
n_tps: vec::len(m.tps),
ident: m.ident}
})}]/~;
})});
}
}
ast::item_class(tps, ifces, items, _, _, _) {
@ -2247,12 +2248,12 @@ fn find_impls_in_item(e: env, i: @ast::item, &impls: [@_impl]/~,
vec::iter(ifces) {|p|
// The def_id, in this case, identifies the combination of
// class and iface
impls += [@{did: local_def(p.id),
vec::push(impls, @{did: local_def(p.id),
ident: i.ident,
methods: vec::map(mthds, {|m|
@{did: local_def(m.id),
n_tps: n_tps + m.tps.len(),
ident: m.ident}})}]/~;
ident: m.ident}})});
}
}
_ {}
@ -2287,10 +2288,10 @@ fn find_impls_in_mod_by_id(e: env, defid: def_id, &impls: [@_impl]/~,
alt name {
some(n) {
for vec::each(*cached) {|im|
if n == im.ident { impls += [im]/~; }
if n == im.ident { vec::push(impls, im); }
}
}
_ { impls += *cached; }
_ { vec::push_all(impls, *cached); }
}
}

View File

@ -110,12 +110,12 @@ fn expand_nested_bindings(m: match, col: uint, val: ValueRef) -> match {
ast::pat_ident(name, some(inner)) {
let pats = vec::slice(br.pats, 0u, col) + [inner]/~ +
vec::slice(br.pats, col + 1u, br.pats.len());
result += [@{pats: pats,
vec::push(result, @{pats: pats,
bound: br.bound + [{ident: path_to_ident(name),
val: val}]/~
with *br}]/~;
with *br});
}
_ { result += [br]/~; }
_ { vec::push(result, br); }
}
}
result
@ -138,7 +138,7 @@ fn enter_match(dm: def_map, m: match, col: uint, val: ValueRef,
}
_ { br.bound }
};
result += [@{pats: pats, bound: bound with *br}]/~;
vec::push(result, @{pats: pats, bound: bound with *br});
}
none { }
}
@ -196,7 +196,7 @@ fn enter_rec(dm: def_map, m: match, col: uint, fields: [ast::ident]/~,
for vec::each(fpats) {|fpat|
if str::eq(*fpat.ident, *fname) { pat = fpat.pat; break; }
}
pats += [pat]/~;
vec::push(pats, pat);
}
some(pats)
}
@ -293,7 +293,7 @@ fn collect_record_fields(m: match, col: uint) -> [ast::ident]/~ {
ast::pat_rec(fs, _) {
for vec::each(fs) {|f|
if !vec::any(fields, {|x| str::eq(*f.ident, *x)}) {
fields += [f.ident]/~;
vec::push(fields, f.ident);
}
}
}
@ -404,8 +404,8 @@ fn compile_submatch(bcx: block, m: match, vals: [ValueRef]/~,
_ { }
}
if !bcx.unreachable {
exits += [{bound: m[0].bound, from: bcx.llbb,
to: data.bodycx.llbb}]/~;
vec::push(exits, {bound: m[0].bound, from: bcx.llbb,
to: data.bodycx.llbb});
}
Br(bcx, data.bodycx.llbb);
ret;
@ -436,7 +436,7 @@ fn compile_submatch(bcx: block, m: match, vals: [ValueRef]/~,
let mut rec_vals = []/~;
for vec::each(rec_fields) {|field_name|
let ix = option::get(ty::field_idx(field_name, fields));
rec_vals += [GEPi(bcx, val, [0u, ix]/~)]/~;
vec::push(rec_vals, GEPi(bcx, val, [0u, ix]/~));
}
compile_submatch(bcx, enter_rec(dm, m, col, rec_fields, val),
rec_vals + vals_left, chk, exits);
@ -451,7 +451,7 @@ fn compile_submatch(bcx: block, m: match, vals: [ValueRef]/~,
};
let mut tup_vals = []/~, i = 0u;
while i < n_tup_elts {
tup_vals += [GEPi(bcx, val, [0u, i]/~)]/~;
vec::push(tup_vals, GEPi(bcx, val, [0u, i]/~));
i += 1u;
}
compile_submatch(bcx, enter_tup(dm, m, col, val, n_tup_elts),
@ -604,7 +604,10 @@ fn make_phi_bindings(bcx: block, map: [exit_node]/~,
for vec::each(map) {|ex|
if ex.to as uint == our_block {
alt assoc(name, ex.bound) {
some(val) { llbbs += [ex.from]/~; vals += [val]/~; }
some(val) {
vec::push(llbbs, ex.from);
vec::push(vals, val);
}
none { }
}
}
@ -644,12 +647,12 @@ fn trans_alt_inner(scope_cx: block, expr: @ast::expr, arms: [ast::arm]/~,
for vec::each(arms) {|a|
let body = scope_block(bcx, a.body.info(), "case_body");
let id_map = pat_util::pat_id_map(tcx.def_map, a.pats[0]);
bodies += [body]/~;
vec::push(bodies, body);
for vec::each(a.pats) {|p|
match += [@{pats: [p]/~,
vec::push(match, @{pats: [p]/~,
bound: []/~,
data: @{bodycx: body, guard: a.guard,
id_map: id_map}}]/~;
id_map: id_map}});
}
}
@ -680,10 +683,10 @@ fn trans_alt_inner(scope_cx: block, expr: @ast::expr, arms: [ast::arm]/~,
let id_map = pat_util::pat_id_map(tcx.def_map, a.pats[0]);
if make_phi_bindings(body_cx, exit_map, id_map) {
let arm_dest = dup_for_join(dest);
arm_dests += [arm_dest]/~;
vec::push(arm_dests, arm_dest);
let mut arm_cx = trans_block(body_cx, a.body, arm_dest);
arm_cx = trans_block_cleanups(arm_cx, body_cx);
arm_cxs += [arm_cx]/~;
vec::push(arm_cxs, arm_cx);
}
i += 1u;
}

View File

@ -102,7 +102,7 @@ impl ccx_icx for @crate_ctxt {
fn insn_ctxt(s: str) -> icx_popper {
#debug("new insn_ctxt: %s", s);
if self.sess.count_llvm_insns() {
*self.stats.llvm_insn_ctxt += [s]/~;
vec::push(*self.stats.llvm_insn_ctxt, s);
}
icx_popper(self)
}
@ -172,7 +172,7 @@ fn log_fn_time(ccx: @crate_ctxt, name: str, start: time::timespec,
end: time::timespec) {
let elapsed = 1000 * ((end.sec - start.sec) as int) +
((end.nsec as int) - (start.nsec as int)) / 1000000;
*ccx.stats.fn_times += [{ident: name, time: elapsed}]/~;
vec::push(*ccx.stats.fn_times, {ident: name, time: elapsed});
}
@ -236,7 +236,7 @@ fn trans_native_call(cx: block, externs: hashmap<str, ValueRef>,
get_simple_extern_fn(cx, externs, llmod, name, n);
let mut call_args: [ValueRef]/~ = []/~;
for vec::each(args) {|a|
call_args += [a]/~;
vec::push(call_args, a);
}
ret Call(cx, llnative, call_args);
}
@ -2983,7 +2983,7 @@ fn trans_arg_expr(cx: block, arg: ty::arg, lldestty: TypeRef, e: @ast::expr,
// In the event that failure occurs before the call actually
// happens, have to cleanup this copy:
add_clean_temp_mem(bcx, val, arg.ty);
temp_cleanups += [val]/~;
vec::push(temp_cleanups, val);
}
}
}
@ -3095,10 +3095,10 @@ fn trans_args(cx: block, llenv: ValueRef, args: call_args, fn_ty: ty::t,
by_val(_) { alloc_ty(bcx, retty) }
};
llargs += [llretslot]/~;
vec::push(llargs, llretslot);
// Arg 1: Env (closure-bindings / self value)
llargs += [llenv]/~;
vec::push(llargs, llenv);
// ... then explicit args.
@ -3114,7 +3114,7 @@ fn trans_args(cx: block, llenv: ValueRef, args: call_args, fn_ty: ty::t,
e, temp_cleanups, if i == last { ret_flag }
else { none }, 0u);
bcx = r.bcx;
llargs += [r.val]/~;
vec::push(llargs, r.val);
}
}
arg_vals(vs) {
@ -3387,7 +3387,7 @@ fn trans_tup(bcx: block, elts: [@ast::expr]/~, dest: dest) -> block {
let e_ty = expr_ty(bcx, e);
bcx = trans_expr_save_in(bcx, e, dst);
add_clean_temp_mem(bcx, dst, e_ty);
temp_cleanups += [dst]/~;
vec::push(temp_cleanups, dst);
}
for vec::each(temp_cleanups) {|cleanup| revoke_clean(bcx, cleanup); }
ret bcx;
@ -3419,7 +3419,7 @@ fn trans_rec(bcx: block, fields: [ast::field]/~,
let dst = GEPi(bcx, addr, [0u, ix]/~);
bcx = trans_expr_save_in(bcx, fld.node.expr, dst);
add_clean_temp_mem(bcx, dst, ty_fields[ix].mt.ty);
temp_cleanups += [dst]/~;
vec::push(temp_cleanups, dst);
}
alt base {
some(bexp) {
@ -4264,7 +4264,7 @@ fn cleanup_and_leave(bcx: block, upto: option<BasicBlockRef>,
}
let sub_cx = sub_block(bcx, "cleanup");
Br(bcx, sub_cx.llbb);
inf.cleanup_paths += [{target: leave, dest: sub_cx.llbb}]/~;
vec::push(inf.cleanup_paths, {target: leave, dest: sub_cx.llbb});
bcx = trans_block_cleanups_(sub_cx, cur, is_lpad);
}
_ {}
@ -5059,7 +5059,7 @@ fn create_main_wrapper(ccx: @crate_ctxt, sp: span, main_llfn: ValueRef,
let llenvarg = llvm::LLVMGetParam(llfdecl, 1 as c_uint);
let mut args = [lloutputarg, llenvarg]/~;
if takes_argv {
args += [llvm::LLVMGetParam(llfdecl, 2 as c_uint)]/~;
vec::push(args, llvm::LLVMGetParam(llfdecl, 2 as c_uint));
}
Call(bcx, main_llfn, args);
build_return(bcx);
@ -5369,10 +5369,10 @@ fn create_module_map(ccx: @crate_ctxt) -> ValueRef {
for ccx.module_data.each {|key, val|
let elt = C_struct([p2i(ccx, C_cstr(ccx, key)),
p2i(ccx, val)]/~);
elts += [elt]/~;
vec::push(elts, elt);
};
let term = C_struct([C_int(ccx, 0), C_int(ccx, 0)]/~);
elts += [term]/~;
vec::push(elts, term);
llvm::LLVMSetInitializer(map, C_array(elttype, elts));
ret map;
}
@ -5410,10 +5410,10 @@ fn fill_crate_map(ccx: @crate_ctxt, map: ValueRef) {
let cr = str::as_c_str(nm, {|buf|
llvm::LLVMAddGlobal(ccx.llmod, ccx.int_type, buf)
});
subcrates += [p2i(ccx, cr)]/~;
vec::push(subcrates, p2i(ccx, cr));
i += 1;
}
subcrates += [C_int(ccx, 0)]/~;
vec::push(subcrates, C_int(ccx, 0));
llvm::LLVMSetInitializer(map, C_struct(
[p2i(ccx, create_module_map(ccx)),
C_array(ccx.int_type, subcrates)]/~));
@ -5448,7 +5448,7 @@ fn crate_ctxt_to_encode_parms(cx: @crate_ctxt)
ast_map::path_to_str(*path)
}
};
reexports += [(path, def.id)]/~;
vec::push(reexports, (path, def.id));
}
}
ret reexports;

View File

@ -430,7 +430,7 @@ fn GEP(cx: block, Pointer: ValueRef, Indices: [ValueRef]/~) -> ValueRef {
// in C_i32()
fn GEPi(cx: block, base: ValueRef, ixs: [uint]/~) -> ValueRef {
let mut v: [ValueRef]/~ = []/~;
for vec::each(ixs) {|i| v += [C_i32(i as i32)]/~; }
for vec::each(ixs) {|i| vec::push(v, C_i32(i as i32)); }
count_insn(cx, "gepi");
ret InBoundsGEP(cx, base, v);
}

View File

@ -128,12 +128,12 @@ fn mk_closure_tys(tcx: ty::ctxt,
// Compute the closed over data
for vec::each(bound_values) {|bv|
bound_tys += [alt bv {
vec::push(bound_tys, alt bv {
env_copy(_, t, _) { t }
env_move(_, t, _) { t }
env_ref(_, t, _) { t }
env_expr(_, t) { t }
}]/~;
});
}
let bound_data_ty = ty::mk_tup(tcx, bound_tys);
// FIXME[mono] remove tuple of tydescs from closure types (#2531)
@ -247,7 +247,7 @@ fn store_environment(bcx: block,
env_expr(e, _) {
bcx = base::trans_expr_save_in(bcx, e, bound_data);
add_clean_temp_mem(bcx, bound_data, bound_tys[i]);
temp_cleanups += [bound_data]/~;
vec::push(temp_cleanups, bound_data);
}
env_copy(val, ty, owned) {
let val1 = load_if_immediate(bcx, val, ty);
@ -303,18 +303,18 @@ fn build_closure(bcx0: block,
capture::cap_ref {
assert ck == ty::ck_block;
ty = ty::mk_mut_ptr(tcx, ty);
env_vals += [env_ref(lv.val, ty, lv.kind)]/~;
vec::push(env_vals, env_ref(lv.val, ty, lv.kind));
}
capture::cap_copy {
let mv = alt check ccx.maps.last_use_map.find(id) {
none { false }
some(vars) { (*vars).contains(nid) }
};
if mv { env_vals += [env_move(lv.val, ty, lv.kind)]/~; }
else { env_vals += [env_copy(lv.val, ty, lv.kind)]/~; }
if mv { vec::push(env_vals, env_move(lv.val, ty, lv.kind)); }
else { vec::push(env_vals, env_copy(lv.val, ty, lv.kind)); }
}
capture::cap_move {
env_vals += [env_move(lv.val, ty, lv.kind)]/~;
vec::push(env_vals, env_move(lv.val, ty, lv.kind));
}
capture::cap_drop {
assert lv.kind == owned;
@ -435,7 +435,7 @@ fn trans_bind_1(cx: block, outgoing_fty: ty::t,
let ccx = cx.ccx();
let mut bound: [@ast::expr]/~ = []/~;
for vec::each(args) {|argopt|
alt argopt { none { } some(e) { bound += [e]/~; } }
alt argopt { none { } some(e) { vec::push(bound, e); } }
}
let mut bcx = f_res.bcx;
if dest == ignore {
@ -758,13 +758,13 @@ fn trans_bind_thunk(ccx: @crate_ctxt,
}
ast::by_ref | ast::by_mutbl_ref | ast::by_move { }
}
llargs += [val]/~;
vec::push(llargs, val);
b += 1u;
}
// Arg will be provided when the thunk is invoked.
none {
llargs += [llvm::LLVMGetParam(llthunk, a as c_uint)]/~;
vec::push(llargs, llvm::LLVMGetParam(llthunk, a as c_uint));
a += 1u;
}
}

View File

@ -236,8 +236,8 @@ fn add_clean(cx: block, val: ValueRef, ty: ty::t) {
ty_to_str(cx.ccx().tcx, ty)];
let cleanup_type = cleanup_type(cx.tcx(), ty);
in_scope_cx(cx) {|info|
info.cleanups += [clean({|a|base::drop_ty(a, val, ty)},
cleanup_type)]/~;
vec::push(info.cleanups, clean({|a|base::drop_ty(a, val, ty)},
cleanup_type));
scope_clean_changed(info);
}
}
@ -256,8 +256,8 @@ fn add_clean_temp(cx: block, val: ValueRef, ty: ty::t) {
}
}
in_scope_cx(cx) {|info|
info.cleanups += [clean_temp(val, {|a|do_drop(a, val, ty)},
cleanup_type)]/~;
vec::push(info.cleanups, clean_temp(val, {|a|do_drop(a, val, ty)},
cleanup_type));
scope_clean_changed(info);
}
}
@ -268,8 +268,9 @@ fn add_clean_temp_mem(cx: block, val: ValueRef, ty: ty::t) {
ty_to_str(cx.ccx().tcx, ty)];
let cleanup_type = cleanup_type(cx.tcx(), ty);
in_scope_cx(cx) {|info|
info.cleanups += [clean_temp(val, {|a|base::drop_ty(a, val, ty)},
cleanup_type)]/~;
vec::push(info.cleanups,
clean_temp(val, {|a|base::drop_ty(a, val, ty)},
cleanup_type));
scope_clean_changed(info);
}
}
@ -277,8 +278,8 @@ fn add_clean_free(cx: block, ptr: ValueRef, shared: bool) {
let free_fn = if shared { {|a|base::trans_unique_free(a, ptr)} }
else { {|a|base::trans_free(a, ptr)} };
in_scope_cx(cx) {|info|
info.cleanups += [clean_temp(ptr, free_fn,
normal_exit_and_unwind)]/~;
vec::push(info.cleanups, clean_temp(ptr, free_fn,
normal_exit_and_unwind));
scope_clean_changed(info);
}
}
@ -849,7 +850,7 @@ fn C_postr(s: str) -> ValueRef {
fn C_zero_byte_arr(size: uint) -> ValueRef unsafe {
let mut i = 0u;
let mut elts: [ValueRef]/~ = []/~;
while i < size { elts += [C_u8(0u)]/~; i += 1u; }
while i < size { vec::push(elts, C_u8(0u)); i += 1u; }
ret llvm::LLVMConstArray(T_i8(), vec::unsafe::to_ptr(elts),
elts.len() as c_uint);
}

View File

@ -403,9 +403,9 @@ fn create_derived_type(type_tag: int, file: ValueRef, name: str, line: int,
fn add_member(cx: @struct_ctxt, name: str, line: int, size: int, align: int,
ty: ValueRef) {
cx.members += [create_derived_type(MemberTag, cx.file, name, line,
vec::push(cx.members, create_derived_type(MemberTag, cx.file, name, line,
size * 8, align * 8, cx.total_size,
ty)]/~;
ty));
cx.total_size += size * 8;
}
@ -550,10 +550,10 @@ fn create_ty(_cx: @crate_ctxt, _t: ty::t, _ty: @ast::ty)
ty::ty_rec(fields) {
let fs = []/~;
for field in fields {
fs += [{node: {ident: field.ident,
vec::push(fs, {node: {ident: field.ident,
mt: {ty: t_to_ty(cx, field.mt.ty, span),
mutbl: field.mt.mutbl}},
span: span}]/~;
span: span});
}
ast::ty_rec(fs)
}

View File

@ -292,21 +292,21 @@ fn llreg_ty(cls: [x86_64_reg_class]/~) -> TypeRef {
while i < e {
alt cls[i] {
integer_class {
tys += [T_i64()]/~;
vec::push(tys, T_i64());
}
sse_fv_class {
let vec_len = llvec_len(vec::tailn(cls, i + 1u)) * 2u;
let vec_ty = llvm::LLVMVectorType(T_f32(),
vec_len as c_uint);
tys += [vec_ty]/~;
vec::push(tys, vec_ty);
i += vec_len;
cont;
}
sse_fs_class {
tys += [T_f32()]/~;
vec::push(tys, T_f32());
}
sse_ds_class {
tys += [T_f64()]/~;
vec::push(tys, T_f64());
}
_ {
fail "llregtype: unhandled class";
@ -375,8 +375,8 @@ fn x86_64_tys(atys: [TypeRef]/~,
let mut attrs = []/~;
for vec::each(atys) {|t|
let (ty, attr) = x86_64_ty(t, is_pass_byval, ByValAttribute);
arg_tys += [ty]/~;
attrs += [attr]/~;
vec::push(arg_tys, ty);
vec::push(attrs, attr);
}
let mut (ret_ty, ret_attr) = x86_64_ty(rty, is_ret_bysret,
StructRetAttribute);
@ -617,7 +617,7 @@ fn trans_native_mod(ccx: @crate_ctxt,
} else {
load_inbounds(bcx, llargbundle, [0u, i]/~)
};
llargvals += [llargval]/~;
vec::push(llargvals, llargval);
i += 1u;
}
}
@ -625,7 +625,7 @@ fn trans_native_mod(ccx: @crate_ctxt,
while i < n {
let llargval = load_inbounds(bcx, llargbundle,
[0u, i]/~);
llargvals += [llargval]/~;
vec::push(llargvals, llargval);
i += 1u;
}
}
@ -952,12 +952,12 @@ fn trans_crust_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl,
let mut i = 0u;
let n = vec::len(tys.arg_tys);
let llretptr = load_inbounds(bcx, llargbundle, [0u, n]/~);
llargvals += [llretptr]/~;
vec::push(llargvals, llretptr);
let llenvptr = C_null(T_opaque_box_ptr(bcx.ccx()));
llargvals += [llenvptr]/~;
vec::push(llargvals, llenvptr);
while i < n {
let llargval = load_inbounds(bcx, llargbundle, [0u, i]/~);
llargvals += [llargval]/~;
vec::push(llargvals, llargval);
i += 1u;
}
ret llargvals;

View File

@ -31,10 +31,10 @@ fn type_of_fn(cx: @crate_ctxt, inputs: [ty::arg]/~,
let mut atys: [TypeRef]/~ = []/~;
// Arg 0: Output pointer.
atys += [T_ptr(type_of(cx, output))]/~;
vec::push(atys, T_ptr(type_of(cx, output)));
// Arg 1: Environment
atys += [T_opaque_box_ptr(cx)]/~;
vec::push(atys, T_opaque_box_ptr(cx));
// ... then explicit args.
atys += type_of_explicit_args(cx, inputs);
@ -137,7 +137,7 @@ fn type_of(cx: @crate_ctxt, t: ty::t) -> TypeRef {
let mut tys: [TypeRef]/~ = []/~;
for vec::each(fields) {|f|
let mt_ty = f.mt.ty;
tys += [type_of(cx, mt_ty)]/~;
vec::push(tys, type_of(cx, mt_ty));
}
T_struct(tys)
}
@ -147,7 +147,7 @@ fn type_of(cx: @crate_ctxt, t: ty::t) -> TypeRef {
ty::ty_tup(elts) {
let mut tys = []/~;
for vec::each(elts) {|elt|
tys += [type_of(cx, elt)]/~;
vec::push(tys, type_of(cx, elt));
}
T_struct(tys)
}

View File

@ -553,7 +553,7 @@ fn exprs_to_constr_args(tcx: ty::ctxt,
args: [@expr]/~) -> [@constr_arg_use]/~ {
let f = {|a|expr_to_constr_arg(tcx, a)};
let mut rslt: [@constr_arg_use]/~ = []/~;
for args.each {|e| rslt += [f(e)]/~; }
for args.each {|e| vec::push(rslt, f(e)); }
rslt
}
@ -589,7 +589,7 @@ fn substitute_constr_args(cx: ty::ctxt, actuals: [@expr]/~, c: @ty::constr) ->
tsconstr {
let mut rslt: [@constr_arg_use]/~ = []/~;
for c.node.args.each {|a|
rslt += [substitute_arg(cx, actuals, a)]/~;
vec::push(rslt, substitute_arg(cx, actuals, a));
}
ret {path: c.node.path,
def_id: c.node.id,
@ -668,7 +668,7 @@ fn find_instances(_fcx: fn_ctxt, subst: subst,
let old_bit_num = d.node.bit_num;
let newv = replace(subst, d);
alt find_instance_(newv, v) {
some(d1) {res += [{from: old_bit_num, to: d1}]/~}
some(d1) {vec::push(res, {from: old_bit_num, to: d1})}
_ {}
}
} else {}
@ -710,12 +710,12 @@ fn replace(subst: subst, d: pred_args) -> [constr_arg_general_<inst>]/~ {
alt c.node {
carg_ident(p) {
alt find_in_subst(p.node, subst) {
some(newv) { rslt += [carg_ident(newv)]/~; }
_ { rslt += [c.node]/~; }
some(newv) { vec::push(rslt, carg_ident(newv)); }
_ { vec::push(rslt, c.node); }
}
}
_ {
rslt += [c.node]/~;
vec::push(rslt, c.node);
}
}
}
@ -866,7 +866,9 @@ fn args_mention<T>(args: [@constr_arg_use]/~,
ret false;
}
fn use_var(fcx: fn_ctxt, v: node_id) { *fcx.enclosing.used_vars += [v]/~; }
fn use_var(fcx: fn_ctxt, v: node_id) {
vec::push(*fcx.enclosing.used_vars, v);
}
fn op_to_oper_ty(io: init_op) -> oper_type {
alt io { init_move { oper_move } _ { oper_assign } }
@ -924,14 +926,14 @@ type binding = {lhs: [dest]/~, rhs: option<initializer>};
fn local_to_bindings(tcx: ty::ctxt, loc: @local) -> binding {
let mut lhs = []/~;
pat_bindings(tcx.def_map, loc.node.pat) {|p_id, _s, name|
lhs += [local_dest({ident: path_to_ident(name), node: p_id})]/~;
vec::push(lhs, local_dest({ident: path_to_ident(name), node: p_id}));
};
{lhs: lhs, rhs: loc.node.init}
}
fn locals_to_bindings(tcx: ty::ctxt, locals: [@local]/~) -> [binding]/~ {
let mut rslt = []/~;
for locals.each {|loc| rslt += [local_to_bindings(tcx, loc)]/~; }
for locals.each {|loc| vec::push(rslt, local_to_bindings(tcx, loc)); }
ret rslt;
}
@ -941,7 +943,7 @@ fn callee_modes(fcx: fn_ctxt, callee: node_id) -> [mode]/~ {
alt ty::get(ty).struct {
ty::ty_fn({inputs: args, _}) {
let mut modes = []/~;
for args.each {|arg| modes += [arg.mode]/~; }
for args.each {|arg| vec::push(modes, arg.mode); }
ret modes;
}
_ {

View File

@ -14,8 +14,10 @@ type ctxt = {cs: @mut [sp_constr]/~, tcx: ty::ctxt};
fn collect_pred(e: @expr, cx: ctxt, v: visit::vt<ctxt>) {
alt e.node {
expr_check(_, ch) { *cx.cs += [expr_to_constr(cx.tcx, ch)]/~; }
expr_if_check(ex, _, _) { *cx.cs += [expr_to_constr(cx.tcx, ex)]/~; }
expr_check(_, ch) { vec::push(*cx.cs, expr_to_constr(cx.tcx, ch)); }
expr_if_check(ex, _, _) {
vec::push(*cx.cs, expr_to_constr(cx.tcx, ex));
}
// If it's a call, generate appropriate instances of the
// call's constraints.
@ -24,7 +26,7 @@ fn collect_pred(e: @expr, cx: ctxt, v: visit::vt<ctxt>) {
let ct: sp_constr =
respan(c.span,
aux::substitute_constr_args(cx.tcx, operands, c));
*cx.cs += [ct]/~;
vec::push(*cx.cs, ct);
}
}
_ { }

View File

@ -255,7 +255,7 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) {
/* copy */
let mut args = operands;
args += [operator]/~;
vec::push(args, operator);
find_pre_post_exprs(fcx, args, e.id);
/* see if the call has any constraints on its type */
@ -314,7 +314,7 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) {
}
expr_rec(fields, maybe_base) {
let mut es = field_exprs(fields);
alt maybe_base { none {/* no-op */ } some(b) { es += [b]/~; } }
alt maybe_base { none {/* no-op */ } some(b) { vec::push(es, b); } }
find_pre_post_exprs(fcx, es, e.id);
}
expr_tup(elts) { find_pre_post_exprs(fcx, elts, e.id); }
@ -398,7 +398,7 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) {
ret block_pp(fcx.ccx, an_alt.body);
}
let mut alt_pps = []/~;
for alts.each {|a| alt_pps += [do_an_alt(fcx, a)]/~; }
for alts.each {|a| vec::push(alt_pps, do_an_alt(fcx, a)); }
fn combine_pp(antec: pre_and_post, fcx: fn_ctxt, &&pp: pre_and_post,
&&next: pre_and_post) -> pre_and_post {
union(pp.precondition, seq_preconds(fcx, [antec, next]/~));
@ -555,20 +555,20 @@ fn find_pre_post_block(fcx: fn_ctxt, b: blk) {
option::map::<@expr, ()>(b.node.expr, do_inner);
let mut pps: [pre_and_post]/~ = []/~;
for b.node.stmts.each {|s| pps += [stmt_pp(fcx.ccx, *s)]/~; }
for b.node.stmts.each {|s| vec::push(pps, stmt_pp(fcx.ccx, *s)); }
alt b.node.expr {
none {/* no-op */ }
some(e) { pps += [expr_pp(fcx.ccx, e)]/~; }
some(e) { vec::push(pps, expr_pp(fcx.ccx, e)); }
}
let block_precond = seq_preconds(fcx, pps);
let mut postconds = []/~;
for pps.each {|pp| postconds += [get_post(pp)]/~; }
for pps.each {|pp| vec::push(postconds, get_post(pp)); }
/* A block may be empty, so this next line ensures that the postconds
vector is non-empty. */
postconds += [block_precond]/~;
vec::push(postconds, block_precond);
let mut block_postcond = empty_poststate(nv);
/* conservative approximation */

View File

@ -2281,7 +2281,7 @@ fn occurs_check(tcx: ctxt, sp: span, vid: tv_vid, rt: t) {
fn vars_in_type(ty: t) -> [tv_vid]/~ {
let mut rslt = []/~;
walk_ty(ty) {|ty|
alt get(ty).struct { ty_var(v) { rslt += [v]/~; } _ { } }
alt get(ty).struct { ty_var(v) { vec::push(rslt, v); } _ { } }
}
rslt
}
@ -2837,8 +2837,8 @@ fn class_field_tys(items: [@class_member]/~) -> [field_ty]/~ {
for items.each {|it|
alt it.node {
instance_var(nm, _, cm, id, vis) {
rslt += [{ident: nm, id: ast_util::local_def(id),
vis: vis, mutability: cm}]/~;
vec::push(rslt, {ident: nm, id: ast_util::local_def(id),
vis: vis, mutability: cm});
}
class_method(_) { }
}
@ -2874,9 +2874,9 @@ fn class_item_fields(cx:ctxt, did: ast::def_id,
for lookup_class_fields(cx, did).each {|f|
// consider all instance vars mut, because the
// constructor may mutate all vars
rslt += [{ident: f.ident, mt:
vec::push(rslt, {ident: f.ident, mt:
{ty: lookup_field_type(cx, did, f.id, substs),
mutbl: frob_mutability(f.mutability)}}]/~;
mutbl: frob_mutability(f.mutability)}});
}
rslt
}

View File

@ -339,7 +339,7 @@ fn ast_ty_to_ty<AC: ast_conv, RS: region_scope copy>(
ast::ty_constr(t, cs) {
let mut out_cs = []/~;
for cs.each {|constr|
out_cs += [ty::ast_constr_to_constr(tcx, constr)]/~;
vec::push(out_cs, ty::ast_constr_to_constr(tcx, constr));
}
ty::mk_constr(tcx, ast_ty_to_ty(self, rscope, t), out_cs)
}

View File

@ -1424,7 +1424,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt,
for elts.eachi {|i, e|
check_expr(fcx, e, flds.map {|fs| fs[i]});
let ety = fcx.expr_ty(e);
elt_ts += [ety]/~;
vec::push(elt_ts, ety);
}
let typ = ty::mk_tup(tcx, elt_ts);
fcx.write_ty(id, typ);
@ -1826,14 +1826,14 @@ fn check_enum_variants(ccx: @crate_ctxt,
ccx.tcx.sess.span_err(v.span,
"discriminator value already exists");
}
disr_vals += [disr_val]/~;
vec::push(disr_vals, disr_val);
let ctor_ty = ty::node_id_to_type(ccx.tcx, v.node.id);
let arg_tys = if v.node.args.len() > 0u {
ty::ty_fn_args(ctor_ty).map {|a| a.ty }
} else { []/~ };
variants += [@{args: arg_tys, ctor_ty: ctor_ty,
vec::push(variants, @{args: arg_tys, ctor_ty: ctor_ty,
name: v.node.name, id: local_def(v.node.id),
disr_val: disr_val}]/~;
disr_val: disr_val});
disr_val += 1;
}
@ -1913,7 +1913,7 @@ fn check_constraints(fcx: @fn_ctxt, cs: [@ast::constr]/~,
for cs.each {|c|
let mut c_args = []/~;
for c.node.args.each {|a|
c_args += [
vec::push(c_args,
// "base" should not occur in a fn type thing, as of
// yet, b/c we don't allow constraints on the return type
@ -1953,7 +1953,7 @@ fn check_constraints(fcx: @fn_ctxt, cs: [@ast::constr]/~,
carg_ident index out of bounds");
}
}
}]/~;
});
}
let p_op: ast::expr_ = ast::expr_path(c.node.path);
let oper: @ast::expr = @{id: c.node.id, node: p_op, span: c.span};

View File

@ -12,7 +12,7 @@ fn replace_bound_regions_in_fn_ty(
fn_ty: ty::fn_ty} {
let mut all_tys = ty::tys_in_fn_ty(fn_ty);
for self_ty.each { |t| all_tys += [t]/~ }
for self_ty.each { |t| vec::push(all_tys, t) }
#debug["replace_bound_regions_in_fn_ty(self_ty=%?, fn_ty=%s, all_tys=%?)",
self_ty.map { |t| ty_to_str(tcx, t) },

View File

@ -18,8 +18,8 @@ fn lookup_vtables(fcx: @fn_ctxt, isc: resolve::iscopes, sp: span,
alt bound {
ty::bound_iface(i_ty) {
let i_ty = ty::subst(tcx, substs, i_ty);
result += [lookup_vtable(fcx, isc, sp, ty, i_ty,
allow_unsafe)]/~;
vec::push(result, lookup_vtable(fcx, isc, sp, ty, i_ty,
allow_unsafe));
}
_ {}
}
@ -144,7 +144,8 @@ fn lookup_vtable(fcx: @fn_ctxt, isc: resolve::iscopes, sp: span,
iface_tps, im.did);
let subres = lookup_vtables(fcx, isc, sp,
im_bs, substs_f, false);
found += [vtable_static(im.did, substs_f.tps, subres)]/~;
vec::push(found,
vtable_static(im.did, substs_f.tps, subres));
}
alt found.len() {

View File

@ -43,7 +43,7 @@ fn resolve_type_vars_for_node(wbcx: wb_ctxt, sp: span, id: ast::node_id)
let mut new_tps = []/~;
for substs.tps.each {|subst|
alt resolve_type_vars_in_type(fcx, sp, subst) {
some(t) { new_tps += [t]/~; }
some(t) { vec::push(new_tps, t); }
none { wbcx.success = false; ret none; }
}
}

View File

@ -31,7 +31,7 @@ fn field_expr(f: ast::field) -> @ast::expr { ret f.node.expr; }
fn field_exprs(fields: [ast::field]/~) -> [@ast::expr]/~ {
let mut es = []/~;
for fields.each {|f| es += [f.node.expr]/~; }
for fields.each {|f| vec::push(es, f.node.expr); }
ret es;
}