add mut decls to rustc and make them mandatory

This commit is contained in:
Niko Matsakis 2012-03-15 09:47:03 -04:00
parent d7be4abdae
commit b653a18416
70 changed files with 955 additions and 923 deletions

View File

@ -53,7 +53,8 @@ $$(TBIN$(1)_T_$(2)_H_$(3))/rustc$$(X): \
$$(RUSTC_INPUTS) \ $$(RUSTC_INPUTS) \
$$(TLIBRUSTC_DEFAULT$(1)_T_$(2)_H_$(3)) $$(TLIBRUSTC_DEFAULT$(1)_T_$(2)_H_$(3))
@$$(call E, compile_and_link: $$@) @$$(call E, compile_and_link: $$@)
$$(STAGE$(1)_T_$(2)_H_$(3)) -o $$@ $$< $$(STAGE$(1)_T_$(2)_H_$(3)) $$(ENFORCE_MUT_VARS_$(1)) \
-o $$@ $$<
$$(TLIB$(1)_T_$(2)_H_$(3))/$$(CFG_LIBRUSTC): \ $$(TLIB$(1)_T_$(2)_H_$(3))/$$(CFG_LIBRUSTC): \
$$(COMPILER_CRATE) $$(COMPILER_INPUTS) \ $$(COMPILER_CRATE) $$(COMPILER_INPUTS) \
@ -62,7 +63,8 @@ $$(TLIB$(1)_T_$(2)_H_$(3))/$$(CFG_LIBRUSTC): \
$$(TCORELIB_DEFAULT$(1)_T_$(2)_H_$(3)) \ $$(TCORELIB_DEFAULT$(1)_T_$(2)_H_$(3)) \
$$(TSTDLIB_DEFAULT$(1)_T_$(2)_H_$(3)) $$(TSTDLIB_DEFAULT$(1)_T_$(2)_H_$(3))
@$$(call E, compile_and_link: $$@) @$$(call E, compile_and_link: $$@)
$$(STAGE$(1)_T_$(2)_H_$(3)) -o $$@ $$< && touch $$@ $$(STAGE$(1)_T_$(2)_H_$(3)) $$(ENFORCE_MUT_VARS_$(1)) \
-o $$@ $$< && touch $$@
endef endef

View File

@ -116,7 +116,7 @@ mod write {
let opts = sess.opts; let opts = sess.opts;
if opts.time_llvm_passes { llvm::LLVMRustEnableTimePasses(); } if opts.time_llvm_passes { llvm::LLVMRustEnableTimePasses(); }
link_intrinsics(sess, llmod); link_intrinsics(sess, llmod);
let pm = mk_pass_manager(); let mut pm = mk_pass_manager();
let td = mk_target_data( let td = mk_target_data(
sess.targ_cfg.target_strs.data_layout); sess.targ_cfg.target_strs.data_layout);
llvm::LLVMAddTargetData(td.lltd, pm.llpm); llvm::LLVMAddTargetData(td.lltd, pm.llpm);
@ -165,7 +165,7 @@ mod write {
llvm::LLVMPassManagerBuilderDispose(FPMB); llvm::LLVMPassManagerBuilderDispose(FPMB);
llvm::LLVMRunPassManager(fpm.llpm, llmod); llvm::LLVMRunPassManager(fpm.llpm, llmod);
let threshold = 225u; let mut threshold = 225u;
if opts.optimize == 3u { threshold = 275u; } if opts.optimize == 3u { threshold = 275u; }
let MPMB = llvm::LLVMPassManagerBuilderCreate(); let MPMB = llvm::LLVMPassManagerBuilderCreate();
@ -195,7 +195,7 @@ mod write {
let LLVMOptDefault = 2 as c_int; // -O2, -Os let LLVMOptDefault = 2 as c_int; // -O2, -Os
let LLVMOptAggressive = 3 as c_int; // -O3 let LLVMOptAggressive = 3 as c_int; // -O3
let CodeGenOptLevel; let mut CodeGenOptLevel;
alt check opts.optimize { alt check opts.optimize {
0u { CodeGenOptLevel = LLVMOptNone; } 0u { CodeGenOptLevel = LLVMOptNone; }
1u { CodeGenOptLevel = LLVMOptLess; } 1u { CodeGenOptLevel = LLVMOptLess; }
@ -203,7 +203,7 @@ mod write {
3u { CodeGenOptLevel = LLVMOptAggressive; } 3u { CodeGenOptLevel = LLVMOptAggressive; }
} }
let FileType; let mut FileType;
if opts.output_type == output_type_object || if opts.output_type == output_type_object ||
opts.output_type == output_type_exe { opts.output_type == output_type_exe {
FileType = LLVMObjectFile; FileType = LLVMObjectFile;
@ -362,9 +362,9 @@ fn build_link_meta(sess: session, c: ast::crate, output: str,
fn provided_link_metas(sess: session, c: ast::crate) -> fn provided_link_metas(sess: session, c: ast::crate) ->
provided_metas { provided_metas {
let name: option<str> = none; let mut name: option<str> = none;
let vers: option<str> = none; let mut vers: option<str> = none;
let cmh_items: [@ast::meta_item] = []; let mut cmh_items: [@ast::meta_item] = [];
let linkage_metas = attr::find_linkage_metas(c.node.attrs); let linkage_metas = attr::find_linkage_metas(c.node.attrs);
attr::require_unique_names(sess, linkage_metas); attr::require_unique_names(sess, linkage_metas);
for meta: @ast::meta_item in linkage_metas { for meta: @ast::meta_item in linkage_metas {
@ -433,7 +433,8 @@ fn build_link_meta(sess: session, c: ast::crate, output: str,
none { none {
let name = let name =
{ {
let os = str::split_char(path::basename(output), '.'); let mut os =
str::split_char(path::basename(output), '.');
if (vec::len(os) < 2u) { if (vec::len(os) < 2u) {
sess.fatal(#fmt("output file name %s doesn't\ sess.fatal(#fmt("output file name %s doesn't\
appear to have an extension", output)); appear to have an extension", output));
@ -494,7 +495,7 @@ fn symbol_hash(tcx: ty::ctxt, sha: sha1, t: ty::t, link_meta: link_meta) ->
} }
fn get_symbol_hash(ccx: @crate_ctxt, t: ty::t) -> str { fn get_symbol_hash(ccx: @crate_ctxt, t: ty::t) -> str {
let hash = ""; let mut hash = "";
alt ccx.type_sha1s.find(t) { alt ccx.type_sha1s.find(t) {
some(h) { hash = h; } some(h) { hash = h; }
none { none {
@ -509,7 +510,7 @@ fn get_symbol_hash(ccx: @crate_ctxt, t: ty::t) -> str {
// Name sanitation. LLVM will happily accept identifiers with weird names, but // Name sanitation. LLVM will happily accept identifiers with weird names, but
// gas doesn't! // gas doesn't!
fn sanitize(s: str) -> str { fn sanitize(s: str) -> str {
let result = ""; let mut result = "";
str::chars_iter(s) {|c| str::chars_iter(s) {|c|
alt c { alt c {
'@' { result += "_sbox_"; } '@' { result += "_sbox_"; }
@ -536,7 +537,7 @@ fn sanitize(s: str) -> str {
fn mangle(ss: path) -> str { fn mangle(ss: path) -> str {
// Follow C++ namespace-mangling style // Follow C++ namespace-mangling style
let n = "_ZN"; // Begin name-sequence. let mut n = "_ZN"; // Begin name-sequence.
for s in ss { for s in ss {
alt s { path_name(s) | path_mod(s) { alt s { path_name(s) | path_mod(s) {
@ -597,7 +598,7 @@ fn link_binary(sess: session,
} else { ret filename; } } else { ret filename; }
}; };
fn rmext(filename: str) -> str { fn rmext(filename: str) -> str {
let parts = str::split_char(filename, '.'); let mut parts = str::split_char(filename, '.');
vec::pop(parts); vec::pop(parts);
ret str::connect(parts, "."); ret str::connect(parts, ".");
} }
@ -636,11 +637,11 @@ fn link_binary(sess: session,
if sess.targ_cfg.os == session::os_win32 { "gcc" } else { "cc" }; if sess.targ_cfg.os == session::os_win32 { "gcc" } else { "cc" };
// The invocations of cc share some flags across platforms // The invocations of cc share some flags across platforms
let cc_args = let mut cc_args =
[stage] + sess.targ_cfg.target_strs.cc_args + [stage] + sess.targ_cfg.target_strs.cc_args +
["-o", output, obj_filename]; ["-o", output, obj_filename];
let lib_cmd; let mut lib_cmd;
let os = sess.targ_cfg.os; let os = sess.targ_cfg.os;
if os == session::os_macos { if os == session::os_macos {
lib_cmd = "-dynamiclib"; lib_cmd = "-dynamiclib";

View File

@ -130,14 +130,13 @@ fn get_relative_to(abs1: path::path, abs2: path::path) -> path::path {
assert len2 > 0u; assert len2 > 0u;
let max_common_path = uint::min(len1, len2) - 1u; let max_common_path = uint::min(len1, len2) - 1u;
let start_idx = 0u; let mut start_idx = 0u;
while start_idx < max_common_path while start_idx < max_common_path
&& split1[start_idx] == split2[start_idx] { && split1[start_idx] == split2[start_idx] {
start_idx += 1u; start_idx += 1u;
} }
let path = []; let mut path = [];
uint::range(start_idx, len1 - 1u) {|_i| path += [".."]; }; uint::range(start_idx, len1 - 1u) {|_i| path += [".."]; };
path += vec::slice(split2, start_idx, len2 - 1u); path += vec::slice(split2, start_idx, len2 - 1u);
@ -179,7 +178,7 @@ fn get_install_prefix_rpath(cwd: path::path, target_triple: str) -> str {
fn minimize_rpaths(rpaths: [str]) -> [str] { fn minimize_rpaths(rpaths: [str]) -> [str] {
let set = map::str_hash::<()>(); let set = map::str_hash::<()>();
let minimized = []; let mut minimized = [];
for rpath in rpaths { for rpath in rpaths {
if !set.contains_key(rpath) { if !set.contains_key(rpath) {
minimized += [rpath]; minimized += [rpath];

View File

@ -34,7 +34,7 @@ fn declare_upcalls(targ_cfg: @session::config,
fn decl(llmod: ModuleRef, prefix: str, name: str, fn decl(llmod: ModuleRef, prefix: str, name: str,
tys: [TypeRef], rv: TypeRef) -> tys: [TypeRef], rv: TypeRef) ->
ValueRef { ValueRef {
let arg_tys: [TypeRef] = []; let mut arg_tys: [TypeRef] = [];
for t: TypeRef in tys { arg_tys += [t]; } for t: TypeRef in tys { arg_tys += [t]; }
let fn_ty = T_fn(arg_tys, rv); let fn_ty = T_fn(arg_tys, rv);
ret base::decl_cdecl_fn(llmod, prefix + name, fn_ty); ret base::decl_cdecl_fn(llmod, prefix + name, fn_ty);

View File

@ -195,8 +195,8 @@ fn highlight_lines(cm: codemap::codemap, sp: span,
// arbitrarily only print up to six lines of the error // arbitrarily only print up to six lines of the error
let max_lines = 6u; let max_lines = 6u;
let elided = false; let mut elided = false;
let display_lines = lines.lines; let mut display_lines = lines.lines;
if vec::len(display_lines) > max_lines { if vec::len(display_lines) > max_lines {
display_lines = vec::slice(display_lines, 0u, max_lines); display_lines = vec::slice(display_lines, 0u, max_lines);
elided = true; elided = true;
@ -210,8 +210,8 @@ fn highlight_lines(cm: codemap::codemap, sp: span,
if elided { if elided {
let last_line = display_lines[vec::len(display_lines) - 1u]; let last_line = display_lines[vec::len(display_lines) - 1u];
let s = #fmt["%s:%u ", fm.name, last_line + 1u]; let s = #fmt["%s:%u ", fm.name, last_line + 1u];
let indent = str::len(s); let mut indent = str::len(s);
let out = ""; let mut out = "";
while indent > 0u { out += " "; indent -= 1u; } while indent > 0u { out += " "; indent -= 1u; }
out += "...\n"; out += "...\n";
io::stderr().write_str(out); io::stderr().write_str(out);
@ -221,22 +221,22 @@ fn highlight_lines(cm: codemap::codemap, sp: span,
// If there's one line at fault we can easily point to the problem // If there's one line at fault we can easily point to the problem
if vec::len(lines.lines) == 1u { if vec::len(lines.lines) == 1u {
let lo = codemap::lookup_char_pos(cm, sp.lo); let lo = codemap::lookup_char_pos(cm, sp.lo);
let digits = 0u; let mut digits = 0u;
let num = (lines.lines[0] + 1u) / 10u; let mut num = (lines.lines[0] + 1u) / 10u;
// how many digits must be indent past? // how many digits must be indent past?
while num > 0u { num /= 10u; digits += 1u; } while num > 0u { num /= 10u; digits += 1u; }
// indent past |name:## | and the 0-offset column location // indent past |name:## | and the 0-offset column location
let left = str::len(fm.name) + digits + lo.col + 3u; let mut left = str::len(fm.name) + digits + lo.col + 3u;
let s = ""; let mut s = "";
while left > 0u { str::push_char(s, ' '); left -= 1u; } while left > 0u { str::push_char(s, ' '); left -= 1u; }
s += "^"; s += "^";
let hi = codemap::lookup_char_pos(cm, sp.hi); let hi = codemap::lookup_char_pos(cm, sp.hi);
if hi.col != lo.col { if hi.col != lo.col {
// the ^ already takes up one space // the ^ already takes up one space
let width = hi.col - lo.col - 1u; let mut width = hi.col - lo.col - 1u;
while width > 0u { str::push_char(s, '~'); width -= 1u; } while width > 0u { str::push_char(s, '~'); width -= 1u; }
} }
io::stderr().write_str(s + "\n"); io::stderr().write_str(s + "\n");

View File

@ -66,7 +66,7 @@ fn build_configuration(sess: session, argv0: str, input: str) ->
fn parse_cfgspecs(cfgspecs: [str]) -> ast::crate_cfg { fn parse_cfgspecs(cfgspecs: [str]) -> ast::crate_cfg {
// FIXME: It would be nice to use the parser to parse all varieties of // FIXME: It would be nice to use the parser to parse all varieties of
// meta_item here. At the moment we just support the meta_word variant. // meta_item here. At the moment we just support the meta_word variant.
let words = []; let mut words = [];
for s: str in cfgspecs { words += [attr::mk_word_item(s)]; } for s: str in cfgspecs { words += [attr::mk_word_item(s)]; }
ret words; ret words;
} }
@ -106,8 +106,8 @@ fn compile_upto(sess: session, cfg: ast::crate_cfg,
outputs: option<output_filenames>) outputs: option<output_filenames>)
-> {crate: @ast::crate, tcx: option<ty::ctxt>} { -> {crate: @ast::crate, tcx: option<ty::ctxt>} {
let time_passes = sess.opts.time_passes; let time_passes = sess.opts.time_passes;
let crate = time(time_passes, "parsing", let mut crate = time(time_passes, "parsing",
bind parse_input(sess, cfg, input)); bind parse_input(sess, cfg, input));
if upto == cu_parse { ret {crate: crate, tcx: none}; } if upto == cu_parse { ret {crate: crate, tcx: none}; }
sess.building_library = session::building_library( sess.building_library = session::building_library(
@ -261,7 +261,7 @@ fn pretty_print_input(sess: session, cfg: ast::crate_cfg, input: str,
}; };
let {crate, tcx} = compile_upto(sess, cfg, input, upto, none); let {crate, tcx} = compile_upto(sess, cfg, input, upto, none);
let ann: pprust::pp_ann = pprust::no_ann(); let mut ann: pprust::pp_ann = pprust::no_ann();
alt ppm { alt ppm {
ppm_typed { ppm_typed {
ann = {pre: ann_paren_for_expr, ann = {pre: ann_paren_for_expr,
@ -362,7 +362,7 @@ fn build_session_options(match: getopts::match,
let parse_only = opt_present(match, "parse-only"); let parse_only = opt_present(match, "parse-only");
let no_trans = opt_present(match, "no-trans"); let no_trans = opt_present(match, "no-trans");
let lint_opts = []; let mut lint_opts = [];
if opt_present(match, "no-lint-ctypes") { if opt_present(match, "no-lint-ctypes") {
lint_opts += [(lint::ctypes, false)]; lint_opts += [(lint::ctypes, false)];
} }
@ -388,7 +388,7 @@ fn build_session_options(match: getopts::match,
let time_llvm_passes = opt_present(match, "time-llvm-passes"); let time_llvm_passes = opt_present(match, "time-llvm-passes");
let sysroot_opt = getopts::opt_maybe_str(match, "sysroot"); let sysroot_opt = getopts::opt_maybe_str(match, "sysroot");
let target_opt = getopts::opt_maybe_str(match, "target"); let target_opt = getopts::opt_maybe_str(match, "target");
let no_asm_comments = getopts::opt_present(match, "no-asm-comments"); let mut no_asm_comments = getopts::opt_present(match, "no-asm-comments");
alt output_type { alt output_type {
// unless we're emitting huamn-readable assembly, omit comments. // unless we're emitting huamn-readable assembly, omit comments.
link::output_type_llvm_assembly | link::output_type_assembly {} link::output_type_llvm_assembly | link::output_type_assembly {}
@ -531,8 +531,8 @@ fn build_output_filenames(ifile: str,
ofile: option<str>, ofile: option<str>,
sess: session) sess: session)
-> output_filenames { -> output_filenames {
let obj_path = ""; let mut obj_path = "";
let out_path: str = ""; let mut out_path: str = "";
let sopts = sess.opts; let sopts = sess.opts;
let stop_after_codegen = let stop_after_codegen =
sopts.output_type != link::output_type_exe || sopts.output_type != link::output_type_exe ||

View File

@ -11,7 +11,7 @@ import rustc::syntax::codemap;
import rustc::driver::diagnostic; import rustc::driver::diagnostic;
fn version(argv0: str) { fn version(argv0: str) {
let vers = "unknown version"; let mut vers = "unknown version";
let env_vers = #env["CFG_VERSION"]; let env_vers = #env["CFG_VERSION"];
if str::len(env_vers) != 0u { vers = env_vers; } if str::len(env_vers) != 0u { vers = env_vers; }
io::stdout().write_str(#fmt["%s %s\n", argv0, vers]); io::stdout().write_str(#fmt["%s %s\n", argv0, vers]);
@ -73,7 +73,8 @@ fn run_compiler(args: [str], demitter: diagnostic::emitter) {
// Don't display log spew by default. Can override with RUST_LOG. // Don't display log spew by default. Can override with RUST_LOG.
logging::console_off(); logging::console_off();
let args = args, binary = vec::shift(args); let mut args = args;
let binary = vec::shift(args);
if vec::len(args) == 0u { usage(binary); ret; } if vec::len(args) == 0u { usage(binary); ret; }

View File

@ -37,7 +37,7 @@ export native_abi;
// From a list of crate attributes get only the meta_items that impact crate // From a list of crate attributes get only the meta_items that impact crate
// linkage // linkage
fn find_linkage_metas(attrs: [ast::attribute]) -> [@ast::meta_item] { fn find_linkage_metas(attrs: [ast::attribute]) -> [@ast::meta_item] {
let metas: [@ast::meta_item] = []; let mut metas: [@ast::meta_item] = [];
for attr: ast::attribute in find_attrs_by_name(attrs, "link") { for attr: ast::attribute in find_attrs_by_name(attrs, "link") {
alt attr.node.value.node { alt attr.node.value.node {
ast::meta_list(_, items) { metas += items; } ast::meta_list(_, items) { metas += items; }
@ -141,7 +141,7 @@ fn attr_meta(attr: ast::attribute) -> @ast::meta_item { @attr.node.value }
// Get the meta_items from inside a vector of attributes // Get the meta_items from inside a vector of attributes
fn attr_metas(attrs: [ast::attribute]) -> [@ast::meta_item] { fn attr_metas(attrs: [ast::attribute]) -> [@ast::meta_item] {
let mitems = []; let mut mitems = [];
for a: ast::attribute in attrs { mitems += [attr_meta(a)]; } for a: ast::attribute in attrs { mitems += [attr_meta(a)]; }
ret mitems; ret mitems;
} }
@ -198,12 +198,12 @@ fn sort_meta_items(items: [@ast::meta_item]) -> [@ast::meta_item] {
} }
// This is sort of stupid here, converting to a vec of mutables and back // This is sort of stupid here, converting to a vec of mutables and back
let v: [mutable @ast::meta_item] = [mutable]; let mut v: [mutable @ast::meta_item] = [mutable];
for mi: @ast::meta_item in items { v += [mutable mi]; } for mi: @ast::meta_item in items { v += [mutable mi]; }
std::sort::quick_sort(lteq, v); std::sort::quick_sort(lteq, v);
let v2: [@ast::meta_item] = []; let mut v2: [@ast::meta_item] = [];
for mi: @ast::meta_item in v { v2 += [mi]; } for mi: @ast::meta_item in v { v2 += [mi]; }
ret v2; ret v2;
} }

View File

@ -267,7 +267,7 @@ fn mk_test_desc_vec_ty(cx: test_ctxt) -> @ast::ty {
fn mk_test_desc_vec(cx: test_ctxt) -> @ast::expr { fn mk_test_desc_vec(cx: test_ctxt) -> @ast::expr {
#debug("building test vector from %u tests", vec::len(cx.testfns)); #debug("building test vector from %u tests", vec::len(cx.testfns));
let descs = []; let mut descs = [];
for test: test in cx.testfns { for test: test in cx.testfns {
let test_ = test; // Satisfy alias analysis let test_ = test; // Satisfy alias analysis
descs += [mk_test_desc_rec(cx, test_)]; descs += [mk_test_desc_rec(cx, test_)];

View File

@ -965,8 +965,8 @@ fn type_to_str_inner(names: type_names, outer0: [TypeRef], ty: TypeRef) ->
let kind: int = llvm::LLVMGetTypeKind(ty) as int; let kind: int = llvm::LLVMGetTypeKind(ty) as int;
fn tys_str(names: type_names, outer: [TypeRef], tys: [TypeRef]) -> str { fn tys_str(names: type_names, outer: [TypeRef], tys: [TypeRef]) -> str {
let s: str = ""; let mut s: str = "";
let first: bool = true; let mut first: bool = true;
for t: TypeRef in tys { for t: TypeRef in tys {
if first { first = false; } else { s += ", "; } if first { first = false; } else { s += ", "; }
s += type_to_str_inner(names, outer, t); s += type_to_str_inner(names, outer, t);
@ -989,7 +989,7 @@ fn type_to_str_inner(names: type_names, outer0: [TypeRef], ty: TypeRef) ->
ret "i" + int::str(llvm::LLVMGetIntTypeWidth(ty) as int); ret "i" + int::str(llvm::LLVMGetIntTypeWidth(ty) as int);
} }
9 { 9 {
let s = "fn("; let mut s = "fn(";
let out_ty: TypeRef = llvm::LLVMGetReturnType(ty); let out_ty: TypeRef = llvm::LLVMGetReturnType(ty);
let n_args = llvm::LLVMCountParamTypes(ty) as uint; let n_args = llvm::LLVMCountParamTypes(ty) as uint;
let args: [TypeRef] = vec::from_elem::<TypeRef>(n_args, 0 as TypeRef); let args: [TypeRef] = vec::from_elem::<TypeRef>(n_args, 0 as TypeRef);
@ -1002,7 +1002,7 @@ fn type_to_str_inner(names: type_names, outer0: [TypeRef], ty: TypeRef) ->
ret s; ret s;
} }
10 { 10 {
let s: str = "{"; let mut s: str = "{";
let n_elts = llvm::LLVMCountStructElementTypes(ty) as uint; let n_elts = llvm::LLVMCountStructElementTypes(ty) as uint;
let elts: [TypeRef] = vec::from_elem::<TypeRef>(n_elts, 0 as TypeRef); let elts: [TypeRef] = vec::from_elem::<TypeRef>(n_elts, 0 as TypeRef);
unsafe { unsafe {
@ -1018,7 +1018,7 @@ fn type_to_str_inner(names: type_names, outer0: [TypeRef], ty: TypeRef) ->
uint::str(llvm::LLVMGetArrayLength(ty) as uint) + "]"; uint::str(llvm::LLVMGetArrayLength(ty) as uint) + "]";
} }
12 { 12 {
let i: uint = 0u; let mut i: uint = 0u;
for tout: TypeRef in outer0 { for tout: TypeRef in outer0 {
i += 1u; i += 1u;
if tout as int == ty as int { if tout as int == ty as int {

View File

@ -109,7 +109,7 @@ enum astencode_tag { // Reserves 0x50 -- 0x6f
fn hash_node_id(&&node_id: int) -> uint { ret 177573u ^ (node_id as uint); } fn hash_node_id(&&node_id: int) -> uint { ret 177573u ^ (node_id as uint); }
fn hash_path(&&s: str) -> uint { fn hash_path(&&s: str) -> uint {
let h = 5381u; let mut h = 5381u;
for ch: u8 in str::bytes(s) { h = (h << 5u) + h ^ (ch as uint); } for ch: u8 in str::bytes(s) { h = (h << 5u) + h ^ (ch as uint); }
ret h; ret h;
} }

View File

@ -68,7 +68,7 @@ fn visit_item(e: env, i: @ast::item) {
} }
none { i.ident } none { i.ident }
}; };
let already_added = false; let mut already_added = false;
if vec::len(attr::find_attrs_by_name(i.attrs, "nolink")) == 0u { if vec::len(attr::find_attrs_by_name(i.attrs, "nolink")) == 0u {
already_added = !cstore::add_used_library(cstore, native_name); already_added = !cstore::add_used_library(cstore, native_name);
} }

View File

@ -40,7 +40,7 @@ fn get_type_param_count(cstore: cstore::cstore, def: ast::def_id) -> uint {
fn lookup_defs(cstore: cstore::cstore, cnum: ast::crate_num, fn lookup_defs(cstore: cstore::cstore, cnum: ast::crate_num,
path: [ast::ident]) -> [ast::def] { path: [ast::ident]) -> [ast::def] {
let result = []; let mut result = [];
#debug("lookup_defs: path = %? cnum = %?", path, cnum); #debug("lookup_defs: path = %? cnum = %?", path, cnum);
for (c, data, def) in resolve_path(cstore, cnum, path) { for (c, data, def) in resolve_path(cstore, cnum, path) {
result += [decoder::lookup_def(c, data, def)]; result += [decoder::lookup_def(c, data, def)];
@ -64,7 +64,7 @@ fn resolve_path(cstore: cstore::cstore, cnum: ast::crate_num,
let cm = cstore::get_crate_data(cstore, cnum); let cm = cstore::get_crate_data(cstore, cnum);
#debug("resolve_path %s in crates[%d]:%s", #debug("resolve_path %s in crates[%d]:%s",
str::connect(path, "::"), cnum, cm.name); str::connect(path, "::"), cnum, cm.name);
let result = []; let mut result = [];
for def in decoder::resolve_path(path, cm.data) { for def in decoder::resolve_path(path, cm.data) {
if def.crate == ast::local_crate { if def.crate == ast::local_crate {
result += [(cnum, cm.data, def)]; result += [(cnum, cm.data, def)];

View File

@ -141,7 +141,7 @@ fn find_use_stmt_cnum(cstore: cstore,
// sorted by crate name. // sorted by crate name.
fn get_dep_hashes(cstore: cstore) -> [str] { fn get_dep_hashes(cstore: cstore) -> [str] {
type crate_hash = {name: str, hash: str}; type crate_hash = {name: str, hash: str};
let result = []; let mut result = [];
p(cstore).use_crate_map.values {|cnum| p(cstore).use_crate_map.values {|cnum|
let cdata = cstore::get_crate_data(cstore, cnum); let cdata = cstore::get_crate_data(cstore, cnum);

View File

@ -60,7 +60,7 @@ fn lookup_hash(d: ebml::doc, eq_fn: fn@([u8]) -> bool, hash: uint) ->
let {tag:_, doc:bucket} = ebml::doc_at(d.data, pos); let {tag:_, doc:bucket} = ebml::doc_at(d.data, pos);
// Awkward logic because we can't ret from foreach yet // Awkward logic because we can't ret from foreach yet
let result: [ebml::doc] = []; let mut result: [ebml::doc] = [];
let belt = tag_index_buckets_bucket_elt; let belt = tag_index_buckets_bucket_elt;
ebml::tagged_docs(bucket, belt) {|elt| ebml::tagged_docs(bucket, belt) {|elt|
let pos = io::u64_from_be_bytes(*elt.data, elt.start, 4u) as uint; let pos = io::u64_from_be_bytes(*elt.data, elt.start, 4u) as uint;
@ -107,7 +107,7 @@ fn item_symbol(item: ebml::doc) -> str {
} }
fn item_parent_item(d: ebml::doc) -> option<ast::def_id> { fn item_parent_item(d: ebml::doc) -> option<ast::def_id> {
let found = none; let mut found = none;
ebml::tagged_docs(d, tag_items_data_parent_item) {|did| ebml::tagged_docs(d, tag_items_data_parent_item) {|did|
found = some(parse_def_id(ebml::doc_data(did))); found = some(parse_def_id(ebml::doc_data(did)));
} }
@ -142,7 +142,7 @@ fn item_type(item_id: ast::def_id, item: ebml::doc,
fn item_impl_iface(item: ebml::doc, tcx: ty::ctxt, cdata: cmd) fn item_impl_iface(item: ebml::doc, tcx: ty::ctxt, cdata: cmd)
-> option<ty::t> { -> option<ty::t> {
let result = none; let mut result = none;
ebml::tagged_docs(item, tag_impl_iface) {|ity| ebml::tagged_docs(item, tag_impl_iface) {|ity|
let t = parse_ty_data(ity.data, cdata.cnum, ity.start, tcx, {|did| let t = parse_ty_data(ity.data, cdata.cnum, ity.start, tcx, {|did|
translate_def_id(cdata, did) translate_def_id(cdata, did)
@ -154,7 +154,7 @@ fn item_impl_iface(item: ebml::doc, tcx: ty::ctxt, cdata: cmd)
fn item_ty_param_bounds(item: ebml::doc, tcx: ty::ctxt, cdata: cmd) fn item_ty_param_bounds(item: ebml::doc, tcx: ty::ctxt, cdata: cmd)
-> @[ty::param_bounds] { -> @[ty::param_bounds] {
let bounds = []; let mut bounds = [];
ebml::tagged_docs(item, tag_items_data_item_ty_param_bounds) {|p| ebml::tagged_docs(item, tag_items_data_item_ty_param_bounds) {|p|
let bd = parse_bounds_data(p.data, p.start, cdata.cnum, tcx, {|did| let bd = parse_bounds_data(p.data, p.start, cdata.cnum, tcx, {|did|
translate_def_id(cdata, did) translate_def_id(cdata, did)
@ -165,14 +165,14 @@ fn item_ty_param_bounds(item: ebml::doc, tcx: ty::ctxt, cdata: cmd)
} }
fn item_ty_param_count(item: ebml::doc) -> uint { fn item_ty_param_count(item: ebml::doc) -> uint {
let n = 0u; let mut n = 0u;
ebml::tagged_docs(item, tag_items_data_item_ty_param_bounds, ebml::tagged_docs(item, tag_items_data_item_ty_param_bounds,
{|_p| n += 1u; }); {|_p| n += 1u; });
n n
} }
fn enum_variant_ids(item: ebml::doc, cdata: cmd) -> [ast::def_id] { fn enum_variant_ids(item: ebml::doc, cdata: cmd) -> [ast::def_id] {
let ids: [ast::def_id] = []; let mut ids: [ast::def_id] = [];
let v = tag_items_data_item_variant; let v = tag_items_data_item_variant;
ebml::tagged_docs(item, v) {|p| ebml::tagged_docs(item, v) {|p|
let ext = parse_def_id(ebml::doc_data(p)); let ext = parse_def_id(ebml::doc_data(p));
@ -191,7 +191,7 @@ fn resolve_path(path: [ast::ident], data: @[u8]) -> [ast::def_id] {
let md = ebml::doc(data); let md = ebml::doc(data);
let paths = ebml::get_doc(md, tag_paths); let paths = ebml::get_doc(md, tag_paths);
let eqer = bind eq_item(_, s); let eqer = bind eq_item(_, s);
let result: [ast::def_id] = []; let mut result: [ast::def_id] = [];
#debug("resolve_path: looking up %s", s); #debug("resolve_path: looking up %s", s);
for doc: ebml::doc in lookup_hash(paths, eqer, hash_path(s)) { for doc: ebml::doc in lookup_hash(paths, eqer, hash_path(s)) {
let did_doc = ebml::get_doc(doc, tag_def_id); let did_doc = ebml::get_doc(doc, tag_def_id);
@ -206,7 +206,7 @@ fn item_path(item_doc: ebml::doc) -> ast_map::path {
let len_doc = ebml::get_doc(path_doc, tag_path_len); let len_doc = ebml::get_doc(path_doc, tag_path_len);
let len = ebml::doc_as_u32(len_doc) as uint; let len = ebml::doc_as_u32(len_doc) as uint;
let result = []; let mut result = [];
vec::reserve(result, len); vec::reserve(result, len);
ebml::docs(path_doc) {|tag, elt_doc| ebml::docs(path_doc) {|tag, elt_doc|
@ -250,7 +250,7 @@ fn lookup_def(cnum: ast::crate_num, data: @[u8], did_: ast::def_id) ->
'm' { ast::def_mod(did) } 'm' { ast::def_mod(did) }
'n' { ast::def_native_mod(did) } 'n' { ast::def_native_mod(did) }
'v' { 'v' {
let tid = option::get(item_parent_item(item)); let mut tid = option::get(item_parent_item(item));
tid = {crate: cnum, node: tid.node}; tid = {crate: cnum, node: tid.node};
ast::def_variant(tid, did) ast::def_variant(tid, did)
} }
@ -279,7 +279,7 @@ fn get_impl_iface(cdata: cmd, id: ast::node_id, tcx: ty::ctxt)
fn get_impl_method(cdata: cmd, id: ast::node_id, name: str) -> ast::def_id { fn get_impl_method(cdata: cmd, id: ast::node_id, name: str) -> ast::def_id {
let items = ebml::get_doc(ebml::doc(cdata.data), tag_items); let items = ebml::get_doc(ebml::doc(cdata.data), tag_items);
let found = none; let mut found = none;
ebml::tagged_docs(find_item(id, items), tag_item_method) {|mid| ebml::tagged_docs(find_item(id, items), tag_item_method) {|mid|
let m_did = parse_def_id(ebml::doc_data(mid)); let m_did = parse_def_id(ebml::doc_data(mid));
if item_name(find_item(m_did.node, items)) == name { if item_name(find_item(m_did.node, items)) == name {
@ -290,7 +290,7 @@ fn get_impl_method(cdata: cmd, id: ast::node_id, name: str) -> ast::def_id {
} }
fn item_is_intrinsic(cdata: cmd, id: ast::node_id) -> bool { fn item_is_intrinsic(cdata: cmd, id: ast::node_id) -> bool {
let intrinsic = false; let mut intrinsic = false;
ebml::tagged_docs(lookup_item(id, cdata.data), tag_item_is_intrinsic, ebml::tagged_docs(lookup_item(id, cdata.data), tag_item_is_intrinsic,
{|_i| intrinsic = true;}); {|_i| intrinsic = true;});
intrinsic intrinsic
@ -332,15 +332,15 @@ fn get_enum_variants(cdata: cmd, id: ast::node_id, tcx: ty::ctxt)
let data = cdata.data; let data = cdata.data;
let items = ebml::get_doc(ebml::doc(data), tag_items); let items = ebml::get_doc(ebml::doc(data), tag_items);
let item = find_item(id, items); let item = find_item(id, items);
let infos: [ty::variant_info] = []; let mut infos: [ty::variant_info] = [];
let variant_ids = enum_variant_ids(item, cdata); let variant_ids = enum_variant_ids(item, cdata);
let disr_val = 0; let mut disr_val = 0;
for did: ast::def_id in variant_ids { for did: ast::def_id in variant_ids {
let item = find_item(did.node, items); let item = find_item(did.node, items);
let ctor_ty = item_type({crate: cdata.cnum, node: id}, item, let ctor_ty = item_type({crate: cdata.cnum, node: id}, item,
tcx, cdata); tcx, cdata);
let name = item_name(item); let name = item_name(item);
let arg_tys: [ty::t] = []; let mut arg_tys: [ty::t] = [];
alt ty::get(ctor_ty).struct { alt ty::get(ctor_ty).struct {
ty::ty_fn(f) { ty::ty_fn(f) {
for a: ty::arg in f.inputs { arg_tys += [a.ty]; } for a: ty::arg in f.inputs { arg_tys += [a.ty]; }
@ -360,7 +360,7 @@ fn get_enum_variants(cdata: cmd, id: ast::node_id, tcx: ty::ctxt)
fn item_impl_methods(cdata: cmd, item: ebml::doc, base_tps: uint) fn item_impl_methods(cdata: cmd, item: ebml::doc, base_tps: uint)
-> [@middle::resolve::method_info] { -> [@middle::resolve::method_info] {
let rslt = []; let mut rslt = [];
ebml::tagged_docs(item, tag_item_method) {|doc| ebml::tagged_docs(item, tag_item_method) {|doc|
let m_did = parse_def_id(ebml::doc_data(doc)); let m_did = parse_def_id(ebml::doc_data(doc));
let mth_item = lookup_item(m_did.node, cdata.data); let mth_item = lookup_item(m_did.node, cdata.data);
@ -375,7 +375,8 @@ fn get_impls_for_mod(cdata: cmd, m_id: ast::node_id,
name: option<ast::ident>) name: option<ast::ident>)
-> @[@middle::resolve::_impl] { -> @[@middle::resolve::_impl] {
let data = cdata.data; let data = cdata.data;
let mod_item = lookup_item(m_id, data), result = []; let mod_item = lookup_item(m_id, data);
let mut result = [];
ebml::tagged_docs(mod_item, tag_mod_impl) {|doc| ebml::tagged_docs(mod_item, tag_mod_impl) {|doc|
let did = translate_def_id(cdata, parse_def_id(ebml::doc_data(doc))); let did = translate_def_id(cdata, parse_def_id(ebml::doc_data(doc)));
let item = lookup_item(did.node, data), nm = item_name(item); let item = lookup_item(did.node, data), nm = item_name(item);
@ -391,7 +392,8 @@ fn get_impls_for_mod(cdata: cmd, m_id: ast::node_id,
fn get_iface_methods(cdata: cmd, id: ast::node_id, tcx: ty::ctxt) fn get_iface_methods(cdata: cmd, id: ast::node_id, tcx: ty::ctxt)
-> @[ty::method] { -> @[ty::method] {
let data = cdata.data; let data = cdata.data;
let item = lookup_item(id, data), result = []; let item = lookup_item(id, data);
let mut result = [];
ebml::tagged_docs(item, tag_item_method) {|mth| ebml::tagged_docs(item, tag_item_method) {|mth|
let bounds = item_ty_param_bounds(mth, tcx, cdata); let bounds = item_ty_param_bounds(mth, tcx, cdata);
let name = item_name(mth); let name = item_name(mth);
@ -413,7 +415,8 @@ fn get_iface_methods(cdata: cmd, id: ast::node_id, tcx: ty::ctxt)
fn get_class_members(cdata: cmd, id: ast::node_id, fn get_class_members(cdata: cmd, id: ast::node_id,
family: char) -> [ty::field_ty] { family: char) -> [ty::field_ty] {
let data = cdata.data; let data = cdata.data;
let item = lookup_item(id, data), result = []; let item = lookup_item(id, data);
let mut result = [];
ebml::tagged_docs(item, tag_items_data_item) {|an_item| ebml::tagged_docs(item, tag_items_data_item) {|an_item|
if item_family(an_item) == family { if item_family(an_item) == family {
let name = item_name(an_item); let name = item_name(an_item);
@ -487,7 +490,7 @@ fn item_family_to_str(fam: char) -> str {
} }
fn get_meta_items(md: ebml::doc) -> [@ast::meta_item] { fn get_meta_items(md: ebml::doc) -> [@ast::meta_item] {
let items: [@ast::meta_item] = []; let mut items: [@ast::meta_item] = [];
ebml::tagged_docs(md, tag_meta_item_word) {|meta_item_doc| ebml::tagged_docs(md, tag_meta_item_word) {|meta_item_doc|
let nd = ebml::get_doc(meta_item_doc, tag_meta_item_name); let nd = ebml::get_doc(meta_item_doc, tag_meta_item_name);
let n = str::from_bytes(ebml::doc_data(nd)); let n = str::from_bytes(ebml::doc_data(nd));
@ -512,7 +515,7 @@ fn get_meta_items(md: ebml::doc) -> [@ast::meta_item] {
} }
fn get_attributes(md: ebml::doc) -> [ast::attribute] { fn get_attributes(md: ebml::doc) -> [ast::attribute] {
let attrs: [ast::attribute] = []; let mut attrs: [ast::attribute] = [];
alt ebml::maybe_get_doc(md, tag_attributes) { alt ebml::maybe_get_doc(md, tag_attributes) {
option::some(attrs_d) { option::some(attrs_d) {
ebml::tagged_docs(attrs_d, tag_attribute) {|attr_doc| ebml::tagged_docs(attrs_d, tag_attribute) {|attr_doc|
@ -554,10 +557,10 @@ fn get_crate_attributes(data: @[u8]) -> [ast::attribute] {
type crate_dep = {cnum: ast::crate_num, ident: str}; type crate_dep = {cnum: ast::crate_num, ident: str};
fn get_crate_deps(data: @[u8]) -> [crate_dep] { fn get_crate_deps(data: @[u8]) -> [crate_dep] {
let deps: [crate_dep] = []; let mut deps: [crate_dep] = [];
let cratedoc = ebml::doc(data); let cratedoc = ebml::doc(data);
let depsdoc = ebml::get_doc(cratedoc, tag_crate_deps); let depsdoc = ebml::get_doc(cratedoc, tag_crate_deps);
let crate_num = 1; let mut crate_num = 1;
ebml::tagged_docs(depsdoc, tag_crate_dep) {|depdoc| ebml::tagged_docs(depsdoc, tag_crate_dep) {|depdoc|
let depname = str::from_bytes(ebml::doc_data(depdoc)); let depname = str::from_bytes(ebml::doc_data(depdoc));
deps += [{cnum: crate_num, ident: depname}]; deps += [{cnum: crate_num, ident: depname}];
@ -615,7 +618,7 @@ fn get_crate_module_paths(bytes: @[u8]) -> [(ast::def_id, str)] {
// find all module (path, def_ids), which are not // find all module (path, def_ids), which are not
// fowarded path due to renamed import or reexport // fowarded path due to renamed import or reexport
let res = []; let mut res = [];
let mods = map::str_hash(); let mods = map::str_hash();
iter_crate_items(bytes) {|path, did| iter_crate_items(bytes) {|path, did|
let m = mod_of_path(path); let m = mod_of_path(path);

View File

@ -180,8 +180,8 @@ fn encode_module_item_paths(ebml_w: ebml::writer, ecx: @encode_ctxt,
fn encode_item_paths(ebml_w: ebml::writer, ecx: @encode_ctxt, crate: @crate) fn encode_item_paths(ebml_w: ebml::writer, ecx: @encode_ctxt, crate: @crate)
-> [entry<str>] { -> [entry<str>] {
let index: [entry<str>] = []; let mut index: [entry<str>] = [];
let path: [str] = []; let mut path: [str] = [];
ebml_w.start_tag(tag_paths); ebml_w.start_tag(tag_paths);
encode_module_item_paths(ebml_w, ecx, crate.node.module, path, index); encode_module_item_paths(ebml_w, ecx, crate.node.module, path, index);
encode_reexport_paths(ebml_w, ecx, index); encode_reexport_paths(ebml_w, ecx, index);
@ -283,8 +283,8 @@ fn encode_enum_variant_info(ecx: @encode_ctxt, ebml_w: ebml::writer,
id: node_id, variants: [variant], id: node_id, variants: [variant],
path: ast_map::path, index: @mutable [entry<int>], path: ast_map::path, index: @mutable [entry<int>],
ty_params: [ty_param]) { ty_params: [ty_param]) {
let disr_val = 0; let mut disr_val = 0;
let i = 0; let mut i = 0;
let vi = ty::enum_variants(ecx.ccx.tcx, {crate: local_crate, node: id}); let vi = ty::enum_variants(ecx.ccx.tcx, {crate: local_crate, node: id});
for variant: variant in variants { for variant: variant in variants {
*index += [{val: variant.node.id, pos: ebml_w.writer.tell()}]; *index += [{val: variant.node.id, pos: ebml_w.writer.tell()}];
@ -604,7 +604,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
encode_type_param_bounds(ebml_w, ecx, tps); encode_type_param_bounds(ebml_w, ecx, tps);
encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id)); encode_type(ecx, ebml_w, node_id_to_type(tcx, item.id));
encode_name(ebml_w, item.ident); encode_name(ebml_w, item.ident);
let i = 0u; let mut i = 0u;
for mty in *ty::iface_methods(tcx, local_def(item.id)) { for mty in *ty::iface_methods(tcx, local_def(item.id)) {
ebml_w.start_tag(tag_item_method); ebml_w.start_tag(tag_item_method);
encode_name(ebml_w, mty.ident); encode_name(ebml_w, mty.ident);
@ -695,14 +695,14 @@ fn encode_info_for_items(ecx: @encode_ctxt, ebml_w: ebml::writer,
fn create_index<T: copy>(index: [entry<T>], hash_fn: fn@(T) -> uint) -> fn create_index<T: copy>(index: [entry<T>], hash_fn: fn@(T) -> uint) ->
[@[entry<T>]] { [@[entry<T>]] {
let buckets: [@mutable [entry<T>]] = []; let mut buckets: [@mutable [entry<T>]] = [];
uint::range(0u, 256u) {|_i| buckets += [@mutable []]; }; uint::range(0u, 256u) {|_i| buckets += [@mutable []]; };
for elt: entry<T> in index { for elt: entry<T> in index {
let h = hash_fn(elt.val); let h = hash_fn(elt.val);
*buckets[h % 256u] += [elt]; *buckets[h % 256u] += [elt];
} }
let buckets_frozen = []; let mut buckets_frozen = [];
for bucket: @mutable [entry<T>] in buckets { for bucket: @mutable [entry<T>] in buckets {
buckets_frozen += [@*bucket]; buckets_frozen += [@*bucket];
} }
@ -713,7 +713,7 @@ fn encode_index<T>(ebml_w: ebml::writer, buckets: [@[entry<T>]],
write_fn: fn(io::writer, T)) { write_fn: fn(io::writer, T)) {
let writer = ebml_w.writer; let writer = ebml_w.writer;
ebml_w.start_tag(tag_index); ebml_w.start_tag(tag_index);
let bucket_locs: [uint] = []; let mut bucket_locs: [uint] = [];
ebml_w.start_tag(tag_index_buckets); ebml_w.start_tag(tag_index_buckets);
for bucket: @[entry<T>] in buckets { for bucket: @[entry<T>] in buckets {
bucket_locs += [ebml_w.writer.tell()]; bucket_locs += [ebml_w.writer.tell()];
@ -815,8 +815,8 @@ fn synthesize_crate_attrs(ecx: @encode_ctxt, crate: @crate) -> [attribute] {
ret attr::mk_attr(link_item); ret attr::mk_attr(link_item);
} }
let attrs: [attribute] = []; let mut attrs: [attribute] = [];
let found_link_attr = false; let mut found_link_attr = false;
for attr: attribute in crate.node.attrs { for attr: attribute in crate.node.attrs {
attrs += attrs +=
if attr::get_attr_name(attr) != "link" { if attr::get_attr_name(attr) != "link" {
@ -844,7 +844,7 @@ fn encode_crate_deps(ebml_w: ebml::writer, cstore: cstore::cstore) {
type numname = {crate: crate_num, ident: str}; type numname = {crate: crate_num, ident: str};
// Pull the cnums and names out of cstore // Pull the cnums and names out of cstore
let pairs: [mutable numname] = [mutable]; let mut pairs: [mutable numname] = [mutable];
cstore::iter_crate_data(cstore) {|key, val| cstore::iter_crate_data(cstore) {|key, val|
pairs += [mutable {crate: key, ident: val.name}]; pairs += [mutable {crate: key, ident: val.name}];
}; };
@ -854,7 +854,7 @@ fn encode_crate_deps(ebml_w: ebml::writer, cstore: cstore::cstore) {
std::sort::quick_sort(lteq, pairs); std::sort::quick_sort(lteq, pairs);
// Sanity-check the crate numbers // Sanity-check the crate numbers
let expected_cnum = 1; let mut expected_cnum = 1;
for n: numname in pairs { for n: numname in pairs {
assert (n.crate == expected_cnum); assert (n.crate == expected_cnum);
expected_cnum += 1; expected_cnum += 1;

View File

@ -42,7 +42,7 @@ fn parse_ident(st: @pstate, last: char) -> ast::ident {
fn parse_ident_(st: @pstate, is_last: fn@(char) -> bool) -> fn parse_ident_(st: @pstate, is_last: fn@(char) -> bool) ->
ast::ident { ast::ident {
let rslt = ""; let mut rslt = "";
while !is_last(peek(st)) { while !is_last(peek(st)) {
rslt += str::from_byte(next_byte(st)); rslt += str::from_byte(next_byte(st));
} }
@ -64,7 +64,7 @@ fn parse_ret_ty(st: @pstate, conv: conv_did) -> (ast::ret_style, ty::t) {
} }
fn parse_constrs(st: @pstate, conv: conv_did) -> [@ty::constr] { fn parse_constrs(st: @pstate, conv: conv_did) -> [@ty::constr] {
let rslt: [@ty::constr] = []; let mut rslt: [@ty::constr] = [];
alt peek(st) { alt peek(st) {
':' { ':' {
do { do {
@ -79,7 +79,7 @@ fn parse_constrs(st: @pstate, conv: conv_did) -> [@ty::constr] {
// FIXME less copy-and-paste // FIXME less copy-and-paste
fn parse_ty_constrs(st: @pstate, conv: conv_did) -> [@ty::type_constr] { fn parse_ty_constrs(st: @pstate, conv: conv_did) -> [@ty::type_constr] {
let rslt: [@ty::type_constr] = []; let mut rslt: [@ty::type_constr] = [];
alt peek(st) { alt peek(st) {
':' { ':' {
do { do {
@ -93,7 +93,7 @@ fn parse_ty_constrs(st: @pstate, conv: conv_did) -> [@ty::type_constr] {
} }
fn parse_path(st: @pstate) -> @ast::path { fn parse_path(st: @pstate) -> @ast::path {
let idents: [ast::ident] = []; let mut idents: [ast::ident] = [];
fn is_last(c: char) -> bool { ret c == '(' || c == ':'; } fn is_last(c: char) -> bool { ret c == '(' || c == ':'; }
idents += [parse_ident_(st, is_last)]; idents += [parse_ident_(st, is_last)];
loop { loop {
@ -145,12 +145,12 @@ fn parse_constr<T: copy>(st: @pstate, conv: conv_did,
pser: fn(@pstate) -> ast::constr_arg_general_<T>) pser: fn(@pstate) -> ast::constr_arg_general_<T>)
-> @ty::constr_general<T> { -> @ty::constr_general<T> {
let sp = ast_util::dummy_sp(); // FIXME: use a real span let sp = ast_util::dummy_sp(); // FIXME: use a real span
let args: [@sp_constr_arg<T>] = []; let mut args: [@sp_constr_arg<T>] = [];
let pth = parse_path(st); let pth = parse_path(st);
let ignore: char = next(st); let mut ignore: char = next(st);
assert (ignore == '('); assert (ignore == '(');
let def = parse_def(st, conv); let def = parse_def(st, conv);
let an_arg: constr_arg_general_<T>; let mut an_arg: constr_arg_general_<T>;
do { do {
an_arg = pser(st); an_arg = pser(st);
// FIXME use a real span // FIXME use a real span
@ -203,7 +203,7 @@ fn parse_ty(st: @pstate, conv: conv_did) -> ty::t {
't' { 't' {
assert (next(st) == '['); assert (next(st) == '[');
let def = parse_def(st, conv); let def = parse_def(st, conv);
let params: [ty::t] = []; let mut params: [ty::t] = [];
while peek(st) != ']' { params += [parse_ty(st, conv)]; } while peek(st) != ']' { params += [parse_ty(st, conv)]; }
st.pos = st.pos + 1u; st.pos = st.pos + 1u;
ret ty::mk_enum(st.tcx, def, params); ret ty::mk_enum(st.tcx, def, params);
@ -211,7 +211,7 @@ fn parse_ty(st: @pstate, conv: conv_did) -> ty::t {
'x' { 'x' {
assert (next(st) == '['); assert (next(st) == '[');
let def = parse_def(st, conv); let def = parse_def(st, conv);
let params: [ty::t] = []; let mut params: [ty::t] = [];
while peek(st) != ']' { params += [parse_ty(st, conv)]; } while peek(st) != ']' { params += [parse_ty(st, conv)]; }
st.pos = st.pos + 1u; st.pos = st.pos + 1u;
ret ty::mk_iface(st.tcx, def, params); ret ty::mk_iface(st.tcx, def, params);
@ -222,7 +222,7 @@ fn parse_ty(st: @pstate, conv: conv_did) -> ty::t {
} }
's' { 's' {
assert next(st) == '['; assert next(st) == '[';
let params = []; let mut params = [];
while peek(st) != ']' { params += [parse_ty(st, conv)]; } while peek(st) != ']' { params += [parse_ty(st, conv)]; }
st.pos += 1u; st.pos += 1u;
ret ty::mk_self(st.tcx, params); ret ty::mk_self(st.tcx, params);
@ -233,9 +233,9 @@ fn parse_ty(st: @pstate, conv: conv_did) -> ty::t {
'I' { ret ty::mk_vec(st.tcx, parse_mt(st, conv)); } 'I' { ret ty::mk_vec(st.tcx, parse_mt(st, conv)); }
'R' { 'R' {
assert (next(st) == '['); assert (next(st) == '[');
let fields: [ty::field] = []; let mut fields: [ty::field] = [];
while peek(st) != ']' { while peek(st) != ']' {
let name = ""; let mut name = "";
while peek(st) != '=' { while peek(st) != '=' {
name += str::from_byte(next_byte(st)); name += str::from_byte(next_byte(st));
} }
@ -247,7 +247,7 @@ fn parse_ty(st: @pstate, conv: conv_did) -> ty::t {
} }
'T' { 'T' {
assert (next(st) == '['); assert (next(st) == '[');
let params = []; let mut params = [];
while peek(st) != ']' { params += [parse_ty(st, conv)]; } while peek(st) != ']' { params += [parse_ty(st, conv)]; }
st.pos = st.pos + 1u; st.pos = st.pos + 1u;
ret ty::mk_tup(st.tcx, params); ret ty::mk_tup(st.tcx, params);
@ -260,7 +260,7 @@ fn parse_ty(st: @pstate, conv: conv_did) -> ty::t {
assert (next(st) == '['); assert (next(st) == '[');
let def = parse_def(st, conv); let def = parse_def(st, conv);
let inner = parse_ty(st, conv); let inner = parse_ty(st, conv);
let params: [ty::t] = []; let mut params: [ty::t] = [];
while peek(st) != ']' { params += [parse_ty(st, conv)]; } while peek(st) != ']' { params += [parse_ty(st, conv)]; }
st.pos = st.pos + 1u; st.pos = st.pos + 1u;
ret ty::mk_res(st.tcx, def, inner, params); ret ty::mk_res(st.tcx, def, inner, params);
@ -309,7 +309,7 @@ fn parse_ty(st: @pstate, conv: conv_did) -> ty::t {
#debug("saw a ["); #debug("saw a [");
let did = parse_def(st, conv); let did = parse_def(st, conv);
#debug("parsed a def_id %?", did); #debug("parsed a def_id %?", did);
let params: [ty::t] = []; let mut params: [ty::t] = [];
while peek(st) != ']' { params += [parse_ty(st, conv)]; } while peek(st) != ']' { params += [parse_ty(st, conv)]; }
assert (next(st) == ']'); assert (next(st) == ']');
ret ty::mk_class(st.tcx, did, params); ret ty::mk_class(st.tcx, did, params);
@ -319,7 +319,7 @@ fn parse_ty(st: @pstate, conv: conv_did) -> ty::t {
} }
fn parse_mt(st: @pstate, conv: conv_did) -> ty::mt { fn parse_mt(st: @pstate, conv: conv_did) -> ty::mt {
let m; let mut m;
alt peek(st) { alt peek(st) {
'm' { next(st); m = ast::m_mutbl; } 'm' { next(st); m = ast::m_mutbl; }
'?' { next(st); m = ast::m_const; } '?' { next(st); m = ast::m_const; }
@ -329,14 +329,14 @@ fn parse_mt(st: @pstate, conv: conv_did) -> ty::mt {
} }
fn parse_def(st: @pstate, conv: conv_did) -> ast::def_id { fn parse_def(st: @pstate, conv: conv_did) -> ast::def_id {
let def = []; let mut def = [];
while peek(st) != '|' { def += [next_byte(st)]; } while peek(st) != '|' { def += [next_byte(st)]; }
st.pos = st.pos + 1u; st.pos = st.pos + 1u;
ret conv(parse_def_id(def)); ret conv(parse_def_id(def));
} }
fn parse_int(st: @pstate) -> int { fn parse_int(st: @pstate) -> int {
let n = 0; let mut n = 0;
loop { loop {
let cur = peek(st); let cur = peek(st);
if cur < '0' || cur > '9' { ret n; } if cur < '0' || cur > '9' { ret n; }
@ -347,7 +347,7 @@ fn parse_int(st: @pstate) -> int {
} }
fn parse_hex(st: @pstate) -> uint { fn parse_hex(st: @pstate) -> uint {
let n = 0u; let mut n = 0u;
loop { loop {
let cur = peek(st); let cur = peek(st);
if (cur < '0' || cur > '9') && (cur < 'a' || cur > 'f') { ret n; } if (cur < '0' || cur > '9') && (cur < 'a' || cur > 'f') { ret n; }
@ -361,7 +361,7 @@ fn parse_hex(st: @pstate) -> uint {
fn parse_ty_fn(st: @pstate, conv: conv_did) -> ty::fn_ty { fn parse_ty_fn(st: @pstate, conv: conv_did) -> ty::fn_ty {
assert (next(st) == '['); assert (next(st) == '[');
let inputs: [ty::arg] = []; let mut inputs: [ty::arg] = [];
while peek(st) != ']' { while peek(st) != ']' {
let mode = alt check peek(st) { let mode = alt check peek(st) {
'&' { ast::by_mutbl_ref } '&' { ast::by_mutbl_ref }
@ -383,7 +383,7 @@ fn parse_ty_fn(st: @pstate, conv: conv_did) -> ty::fn_ty {
// Rust metadata parsing // Rust metadata parsing
fn parse_def_id(buf: [u8]) -> ast::def_id { fn parse_def_id(buf: [u8]) -> ast::def_id {
let colon_idx = 0u; let mut colon_idx = 0u;
let len = vec::len(buf); let len = vec::len(buf);
while colon_idx < len && buf[colon_idx] != ':' as u8 { colon_idx += 1u; } while colon_idx < len && buf[colon_idx] != ':' as u8 { colon_idx += 1u; }
if colon_idx == len { if colon_idx == len {
@ -393,8 +393,8 @@ fn parse_def_id(buf: [u8]) -> ast::def_id {
let crate_part = vec::slice::<u8>(buf, 0u, colon_idx); let crate_part = vec::slice::<u8>(buf, 0u, colon_idx);
let def_part = vec::slice::<u8>(buf, colon_idx + 1u, len); let def_part = vec::slice::<u8>(buf, colon_idx + 1u, len);
let crate_part_vec = []; let mut crate_part_vec = [];
let def_part_vec = []; let mut def_part_vec = [];
for b: u8 in crate_part { crate_part_vec += [b]; } for b: u8 in crate_part { crate_part_vec += [b]; }
for b: u8 in def_part { def_part_vec += [b]; } for b: u8 in def_part { def_part_vec += [b]; }
@ -421,7 +421,7 @@ fn parse_bounds_data(data: @[u8], start: uint,
} }
fn parse_bounds(st: @pstate, conv: conv_did) -> @[ty::param_bound] { fn parse_bounds(st: @pstate, conv: conv_did) -> @[ty::param_bound] {
let bounds = []; let mut bounds = [];
loop { loop {
bounds += [alt check next(st) { bounds += [alt check next(st) {
'S' { ty::bound_send } 'S' { ty::bound_send }

View File

@ -65,8 +65,8 @@ fn enc_ty(w: io::writer, cx: @ctxt, t: ty::t) {
let end = w.tell(); let end = w.tell();
let len = end - pos; let len = end - pos;
fn estimate_sz(u: uint) -> uint { fn estimate_sz(u: uint) -> uint {
let n = u; let mut n = u;
let len = 0u; let mut len = 0u;
while n != 0u { len += 1u; n = n >> 4u; } while n != 0u { len += 1u; n = n >> 4u; }
ret len; ret len;
} }
@ -256,7 +256,7 @@ fn enc_ty_fn(w: io::writer, cx: @ctxt, ft: ty::fn_ty) {
enc_ty(w, cx, arg.ty); enc_ty(w, cx, arg.ty);
} }
w.write_char(']'); w.write_char(']');
let colon = true; let mut colon = true;
for c: @ty::constr in ft.constraints { for c: @ty::constr in ft.constraints {
if colon { if colon {
w.write_char(':'); w.write_char(':');
@ -276,7 +276,7 @@ fn enc_constr(w: io::writer, cx: @ctxt, c: @ty::constr) {
w.write_char('('); w.write_char('(');
w.write_str(cx.ds(c.node.id)); w.write_str(cx.ds(c.node.id));
w.write_char('|'); w.write_char('|');
let semi = false; let mut semi = false;
for a: @constr_arg in c.node.args { for a: @constr_arg in c.node.args {
if semi { w.write_char(';'); } else { semi = true; } if semi { w.write_char(';'); } else { semi = true; }
alt a.node { alt a.node {
@ -293,7 +293,7 @@ fn enc_ty_constr(w: io::writer, cx: @ctxt, c: @ty::type_constr) {
w.write_char('('); w.write_char('(');
w.write_str(cx.ds(c.node.id)); w.write_str(cx.ds(c.node.id));
w.write_char('|'); w.write_char('|');
let semi = false; let mut semi = false;
for a: @ty::ty_constr_arg in c.node.args { for a: @ty::ty_constr_arg in c.node.args {
if semi { w.write_char(';'); } else { semi = true; } if semi { w.write_char(';'); } else { semi = true; }
alt a.node { alt a.node {

View File

@ -96,7 +96,7 @@ fn visit_fn(cx: @ctx, _fk: visit::fn_kind, decl: ast::fn_decl,
} }
fn visit_expr(cx: @ctx, ex: @ast::expr, sc: scope, v: vt<scope>) { fn visit_expr(cx: @ctx, ex: @ast::expr, sc: scope, v: vt<scope>) {
let handled = true; let mut handled = true;
alt ex.node { alt ex.node {
ast::expr_call(f, args, _) { ast::expr_call(f, args, _) {
check_call(*cx, sc, f, args); check_call(*cx, sc, f, args);
@ -213,9 +213,9 @@ fn check_call(cx: ctx, sc: scope, f: @ast::expr, args: [@ast::expr])
-> [binding] { -> [binding] {
let fty = ty::expr_ty(cx.tcx, f); let fty = ty::expr_ty(cx.tcx, f);
let arg_ts = ty::ty_fn_args(fty); let arg_ts = ty::ty_fn_args(fty);
let mut_roots: [{arg: uint, node: node_id}] = []; let mut mut_roots: [{arg: uint, node: node_id}] = [];
let bindings = []; let mut bindings = [];
let i = 0u; let mut i = 0u;
for arg_t: ty::arg in arg_ts { for arg_t: ty::arg in arg_ts {
let arg = args[i]; let arg = args[i];
let root = expr_root(cx, arg, false); let root = expr_root(cx, arg, false);
@ -251,9 +251,9 @@ fn check_call(cx: ctx, sc: scope, f: @ast::expr, args: [@ast::expr])
_ { true } _ { true }
}; };
if f_may_close { if f_may_close {
let i = 0u; let mut i = 0u;
for b in bindings { for b in bindings {
let unsfe = vec::len(b.unsafe_tys) > 0u; let mut unsfe = vec::len(b.unsafe_tys) > 0u;
alt b.root_var { alt b.root_var {
some(rid) { some(rid) {
for o in sc.bs { for o in sc.bs {
@ -271,10 +271,10 @@ fn check_call(cx: ctx, sc: scope, f: @ast::expr, args: [@ast::expr])
i += 1u; i += 1u;
} }
} }
let j = 0u; let mut j = 0u;
for b in bindings { for b in bindings {
for unsafe_ty in b.unsafe_tys { for unsafe_ty in b.unsafe_tys {
let i = 0u; let mut i = 0u;
for arg_t: ty::arg in arg_ts { for arg_t: ty::arg in arg_ts {
let mut_alias = let mut_alias =
(ast::by_mutbl_ref == ty::arg_mode(cx.tcx, arg_t)); (ast::by_mutbl_ref == ty::arg_mode(cx.tcx, arg_t));
@ -294,7 +294,7 @@ fn check_call(cx: ctx, sc: scope, f: @ast::expr, args: [@ast::expr])
// Ensure we're not passing a root by mutable alias. // Ensure we're not passing a root by mutable alias.
for {node: node, arg: arg} in mut_roots { for {node: node, arg: arg} in mut_roots {
let i = 0u; let mut i = 0u;
for b in bindings { for b in bindings {
if i != arg { if i != arg {
alt b.root_var { alt b.root_var {
@ -319,17 +319,17 @@ fn check_alt(cx: ctx, input: @ast::expr, arms: [ast::arm], sc: scope,
v: vt<scope>) { v: vt<scope>) {
v.visit_expr(input, sc, v); v.visit_expr(input, sc, v);
let orig_invalid = *sc.invalid; let orig_invalid = *sc.invalid;
let all_invalid = orig_invalid; let mut all_invalid = orig_invalid;
let root = expr_root(cx, input, true); let root = expr_root(cx, input, true);
for a: ast::arm in arms { for a: ast::arm in arms {
let new_bs = sc.bs; let mut new_bs = sc.bs;
let root_var = path_def_id(cx, root.ex); let root_var = path_def_id(cx, root.ex);
let pat_id_map = pat_util::pat_id_map(cx.tcx.def_map, a.pats[0]); let pat_id_map = pat_util::pat_id_map(cx.tcx.def_map, a.pats[0]);
type info = { type info = {
id: node_id, id: node_id,
mutable unsafe_tys: [unsafe_ty], mutable unsafe_tys: [unsafe_ty],
span: span}; span: span};
let binding_info: [info] = []; let mut binding_info: [info] = [];
for pat in a.pats { for pat in a.pats {
for proot in pattern_roots(cx.tcx, root.mutbl, pat) { for proot in pattern_roots(cx.tcx, root.mutbl, pat) {
let canon_id = pat_id_map.get(proot.name); let canon_id = pat_id_map.get(proot.name);
@ -361,7 +361,7 @@ fn check_for(cx: ctx, local: @ast::local, seq: @ast::expr, blk: ast::blk,
// If this is a mutable vector, don't allow it to be touched. // If this is a mutable vector, don't allow it to be touched.
let seq_t = ty::expr_ty(cx.tcx, seq); let seq_t = ty::expr_ty(cx.tcx, seq);
let cur_mutbl = root.mutbl; let mut cur_mutbl = root.mutbl;
alt ty::get(seq_t).struct { alt ty::get(seq_t).struct {
ty::ty_vec(mt) { ty::ty_vec(mt) {
if mt.mutbl != ast::m_imm { if mt.mutbl != ast::m_imm {
@ -371,7 +371,7 @@ fn check_for(cx: ctx, local: @ast::local, seq: @ast::expr, blk: ast::blk,
_ {} _ {}
} }
let root_var = path_def_id(cx, root.ex); let root_var = path_def_id(cx, root.ex);
let new_bs = sc.bs; let mut new_bs = sc.bs;
for proot in pattern_roots(cx.tcx, cur_mutbl, local.node.pat) { for proot in pattern_roots(cx.tcx, cur_mutbl, local.node.pat) {
new_bs += [mk_binding(cx, proot.id, proot.span, root_var, new_bs += [mk_binding(cx, proot.id, proot.span, root_var,
unsafe_set(proot.mutbl))]; unsafe_set(proot.mutbl))];
@ -445,7 +445,7 @@ fn check_loop(cx: ctx, sc: scope, checker: fn()) {
} }
fn test_scope(cx: ctx, sc: scope, b: binding, p: @ast::path) { fn test_scope(cx: ctx, sc: scope, b: binding, p: @ast::path) {
let prob = find_invalid(b.node_id, *sc.invalid); let mut prob = find_invalid(b.node_id, *sc.invalid);
alt b.root_var { alt b.root_var {
some(dn) { some(dn) {
for other in sc.bs { for other in sc.bs {
@ -560,12 +560,12 @@ fn copy_is_expensive(tcx: ty::ctxt, ty: ty::t) -> bool {
ty::ty_str | ty::ty_vec(_) | ty::ty_param(_, _) { 50u } ty::ty_str | ty::ty_vec(_) | ty::ty_param(_, _) { 50u }
ty::ty_uniq(mt) { 1u + score_ty(tcx, mt.ty) } ty::ty_uniq(mt) { 1u + score_ty(tcx, mt.ty) }
ty::ty_enum(_, ts) | ty::ty_tup(ts) { ty::ty_enum(_, ts) | ty::ty_tup(ts) {
let sum = 0u; let mut sum = 0u;
for t in ts { sum += score_ty(tcx, t); } for t in ts { sum += score_ty(tcx, t); }
sum sum
} }
ty::ty_rec(fs) { ty::ty_rec(fs) {
let sum = 0u; let mut sum = 0u;
for f in fs { sum += score_ty(tcx, f.mt.ty); } for f in fs { sum += score_ty(tcx, f.mt.ty); }
sum sum
} }
@ -628,7 +628,7 @@ fn pattern_roots(tcx: ty::ctxt, mutbl: option<unsafe_ty>, pat: @ast::pat)
} }
} }
} }
let set = []; let mut set = [];
walk(tcx, mutbl, pat, set); walk(tcx, mutbl, pat, set);
ret set; ret set;
} }
@ -638,7 +638,7 @@ fn pattern_roots(tcx: ty::ctxt, mutbl: option<unsafe_ty>, pat: @ast::pat)
fn expr_root(cx: ctx, ex: @ast::expr, autoderef: bool) fn expr_root(cx: ctx, ex: @ast::expr, autoderef: bool)
-> {ex: @ast::expr, mutbl: option<unsafe_ty>} { -> {ex: @ast::expr, mutbl: option<unsafe_ty>} {
let base_root = mutbl::expr_root(cx.tcx, ex, autoderef); let base_root = mutbl::expr_root(cx.tcx, ex, autoderef);
let unsafe_ty = none; let mut unsafe_ty = none;
for d in *base_root.ds { for d in *base_root.ds {
if d.mutbl { unsafe_ty = some(contains(d.outer_t)); break; } if d.mutbl { unsafe_ty = some(contains(d.outer_t)); break; }
} }
@ -651,7 +651,7 @@ fn unsafe_set(from: option<unsafe_ty>) -> [unsafe_ty] {
fn find_invalid(id: node_id, lst: list<@invalid>) fn find_invalid(id: node_id, lst: list<@invalid>)
-> option<@invalid> { -> option<@invalid> {
let cur = lst; let mut cur = lst;
loop { loop {
alt cur { alt cur {
list::nil { ret none; } list::nil { ret none; }
@ -664,9 +664,9 @@ fn find_invalid(id: node_id, lst: list<@invalid>)
} }
fn join_invalid(a: list<@invalid>, b: list<@invalid>) -> list<@invalid> { fn join_invalid(a: list<@invalid>, b: list<@invalid>) -> list<@invalid> {
let result = a; let mut result = a;
list::iter(b) {|elt| list::iter(b) {|elt|
let found = false; let mut found = false;
list::iter(a) {|e| if e == elt { found = true; } } list::iter(a) {|e| if e == elt { found = true; } }
if !found { result = list::cons(elt, @result); } if !found { result = list::cons(elt, @result); }
} }
@ -674,7 +674,7 @@ fn join_invalid(a: list<@invalid>, b: list<@invalid>) -> list<@invalid> {
} }
fn filter_invalid(src: list<@invalid>, bs: [binding]) -> list<@invalid> { fn filter_invalid(src: list<@invalid>, bs: [binding]) -> list<@invalid> {
let out = list::nil, cur = src; let mut out = list::nil, cur = src;
while cur != list::nil { while cur != list::nil {
alt cur { alt cur {
list::cons(head, tail) { list::cons(head, tail) {

View File

@ -26,7 +26,7 @@ fn visit_expr(ex: @expr, cx: ctx, v: visit::vt<ctx>) {
expr_call(f, args, _) { expr_call(f, args, _) {
cx.allow_block = true; cx.allow_block = true;
v.visit_expr(f, cx, v); v.visit_expr(f, cx, v);
let i = 0u; let mut i = 0u;
for arg_t in ty::ty_fn_args(ty::expr_ty(cx.tcx, f)) { for arg_t in ty::ty_fn_args(ty::expr_ty(cx.tcx, f)) {
cx.allow_block = (ty::arg_mode(cx.tcx, arg_t) == by_ref); cx.allow_block = (ty::arg_mode(cx.tcx, arg_t) == by_ref);
v.visit_expr(args[i], cx, v); v.visit_expr(args[i], cx, v);

View File

@ -128,7 +128,7 @@ fn compute_capture_vars(tcx: ty::ctxt,
} }
} }
let result = []; let mut result = [];
cap_map.values { |cap_var| result += [cap_var]; } cap_map.values { |cap_var| result += [cap_var]; }
ret result; ret result;
} }

View File

@ -35,12 +35,12 @@ fn check_expr(tcx: ty::ctxt, ex: @expr, &&s: (), v: visit::vt<()>) {
} }
fn check_arms(tcx: ty::ctxt, arms: [arm]) { fn check_arms(tcx: ty::ctxt, arms: [arm]) {
let i = 0; let mut i = 0;
/* Check for unreachable patterns */ /* Check for unreachable patterns */
for arm: arm in arms { for arm: arm in arms {
for arm_pat: @pat in arm.pats { for arm_pat: @pat in arm.pats {
let reachable = true; let mut reachable = true;
let j = 0; let mut j = 0;
while j < i { while j < i {
if option::is_none(arms[j].guard) { if option::is_none(arms[j].guard) {
for prev_pat: @pat in arms[j].pats { for prev_pat: @pat in arms[j].pats {
@ -121,7 +121,7 @@ fn check_exhaustive(tcx: ty::ctxt, sp: span, pats: [@pat]) {
} }
} }
ty::ty_bool { ty::ty_bool {
let saw_true = false, saw_false = false; let mut saw_true = false, saw_false = false;
for p in pats { for p in pats {
alt raw_pat(p).node { alt raw_pat(p).node {
pat_lit(@{node: expr_lit(@{node: lit_bool(b), _}), _}) { pat_lit(@{node: expr_lit(@{node: lit_bool(b), _}), _}) {
@ -192,7 +192,7 @@ fn check_exhaustive_enum(tcx: ty::ctxt, enum_id: def_id, sp: span,
fn pattern_supersedes(tcx: ty::ctxt, a: @pat, b: @pat) -> bool { fn pattern_supersedes(tcx: ty::ctxt, a: @pat, b: @pat) -> bool {
fn patterns_supersede(tcx: ty::ctxt, as: [@pat], bs: [@pat]) -> bool { fn patterns_supersede(tcx: ty::ctxt, as: [@pat], bs: [@pat]) -> bool {
let i = 0; let mut i = 0;
for a: @pat in as { for a: @pat in as {
if !pattern_supersedes(tcx, a, bs[i]) { ret false; } if !pattern_supersedes(tcx, a, bs[i]) { ret false; }
i += 1; i += 1;
@ -203,7 +203,7 @@ fn pattern_supersedes(tcx: ty::ctxt, a: @pat, b: @pat) -> bool {
fbs: [field_pat]) -> bool { fbs: [field_pat]) -> bool {
let wild = @{id: 0, node: pat_wild, span: dummy_sp()}; let wild = @{id: 0, node: pat_wild, span: dummy_sp()};
for fa: field_pat in fas { for fa: field_pat in fas {
let pb = wild; let mut pb = wild;
for fb: field_pat in fbs { for fb: field_pat in fbs {
if fa.ident == fb.ident { pb = fb.pat; } if fa.ident == fb.ident { pb = fb.pat; }
} }

View File

@ -49,11 +49,11 @@ fn collect_freevars(def_map: resolve::def_map, blk: ast::blk)
visit::visit_expr(expr, depth + 1, v); visit::visit_expr(expr, depth + 1, v);
} }
ast::expr_path(path) { ast::expr_path(path) {
let i = 0; let mut i = 0;
alt def_map.find(expr.id) { alt def_map.find(expr.id) {
none { fail ("Not found: " + path_to_str(path)) } none { fail ("Not found: " + path_to_str(path)) }
some(df) { some(df) {
let def = df; let mut def = df;
while i < depth { while i < depth {
alt copy def { alt copy def {
ast::def_upvar(_, inner, _) { def = *inner; } ast::def_upvar(_, inner, _) { def = *inner; }

View File

@ -173,7 +173,7 @@ fn check_expr(e: @expr, cx: ctx, v: visit::vt<ctx>) {
for a in args { alt a { some(ex) { maybe_copy(cx, ex); } _ {} } } for a in args { alt a { some(ex) { maybe_copy(cx, ex); } _ {} } }
} }
expr_call(f, args, _) { expr_call(f, args, _) {
let i = 0u; let mut i = 0u;
for arg_t in ty::ty_fn_args(ty::expr_ty(cx.tcx, f)) { for arg_t in ty::ty_fn_args(ty::expr_ty(cx.tcx, f)) {
alt ty::arg_mode(cx.tcx, arg_t) { alt ty::arg_mode(cx.tcx, arg_t) {
by_copy { maybe_copy(cx, args[i]); } by_copy { maybe_copy(cx, args[i]); }

View File

@ -108,7 +108,8 @@ fn visit_expr(ex: @expr, cx: ctx, v: visit::vt<ctx>) {
} }
expr_alt(input, arms, _) { expr_alt(input, arms, _) {
v.visit_expr(input, cx, v); v.visit_expr(input, cx, v);
let before = cx.current, sets = []; let before = cx.current;
let mut sets = [];
for arm in arms { for arm in arms {
cx.current = before; cx.current = before;
v.visit_arm(arm, cx, v); v.visit_arm(arm, cx, v);
@ -118,7 +119,7 @@ fn visit_expr(ex: @expr, cx: ctx, v: visit::vt<ctx>) {
} }
expr_if(cond, then, els) { expr_if(cond, then, els) {
v.visit_expr(cond, cx, v); v.visit_expr(cond, cx, v);
let cur = cx.current; let mut cur = cx.current;
visit::visit_block(then, cx, v); visit::visit_block(then, cx, v);
cx.current <-> cur; cx.current <-> cur;
visit::visit_expr_opt(els, cx, v); visit::visit_expr_opt(els, cx, v);
@ -164,7 +165,7 @@ fn visit_expr(ex: @expr, cx: ctx, v: visit::vt<ctx>) {
} }
expr_call(f, args, _) { expr_call(f, args, _) {
v.visit_expr(f, cx, v); v.visit_expr(f, cx, v);
let fns = []; let mut fns = [];
let arg_ts = ty::ty_fn_args(ty::expr_ty(cx.tcx, f)); let arg_ts = ty::ty_fn_args(ty::expr_ty(cx.tcx, f));
vec::iter2(args, arg_ts) {|arg, arg_t| vec::iter2(args, arg_ts) {|arg, arg_t|
alt arg.node { alt arg.node {
@ -204,7 +205,7 @@ fn visit_stmt(s: @stmt, cx: ctx, v: visit::vt<ctx>) {
stmt_decl(@{node: decl_local(ls), _}, _) { stmt_decl(@{node: decl_local(ls), _}, _) {
shadow_in_current(cx, {|id| shadow_in_current(cx, {|id|
for local in ls { for local in ls {
let found = false; let mut found = false;
pat_util::pat_bindings(cx.tcx.def_map, local.node.pat, pat_util::pat_bindings(cx.tcx.def_map, local.node.pat,
{|pid, _a, _b| {|pid, _a, _b|
if pid == id { found = true; } if pid == id { found = true; }
@ -247,7 +248,7 @@ fn visit_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
} }
_ {} _ {}
} }
let old_cur = [], old_blocks = nil; let mut old_cur = [], old_blocks = nil;
cx.blocks <-> old_blocks; cx.blocks <-> old_blocks;
cx.current <-> old_cur; cx.current <-> old_cur;
visit::visit_fn(fk, decl, body, sp, id, cx, v); visit::visit_fn(fk, decl, body, sp, id, cx, v);
@ -272,7 +273,7 @@ fn visit_block(tp: block_type, cx: ctx, visit: fn()) {
} }
fn add_block_exit(cx: ctx, tp: block_type) -> bool { fn add_block_exit(cx: ctx, tp: block_type) -> bool {
let cur = cx.blocks; let mut cur = cx.blocks;
while cur != nil { while cur != nil {
alt cur { alt cur {
cons(b, tail) { cons(b, tail) {
@ -293,12 +294,13 @@ fn add_block_exit(cx: ctx, tp: block_type) -> bool {
} }
fn join_branches(branches: [set]) -> set { fn join_branches(branches: [set]) -> set {
let found: set = [], i = 0u, l = vec::len(branches); let mut found: set = [], i = 0u;
let l = vec::len(branches);
for set in branches { for set in branches {
i += 1u; i += 1u;
for {def, uses} in set { for {def, uses} in set {
if !vec::any(found, {|v| v.def == def}) { if !vec::any(found, {|v| v.def == def}) {
let j = i, nne = uses; let mut j = i, nne = uses;
while j < l { while j < l {
for {def: d2, uses} in branches[j] { for {def: d2, uses} in branches[j] {
if d2 == def { if d2 == def {
@ -331,7 +333,7 @@ fn leave_fn(cx: ctx) {
} }
fn shadow_in_current(cx: ctx, p: fn(node_id) -> bool) { fn shadow_in_current(cx: ctx, p: fn(node_id) -> bool) {
let out = []; let mut out = [];
cx.current <-> out; cx.current <-> out;
for e in out { if !p(e.def) { cx.current += [e]; } } for e in out { if !p(e.def) { cx.current += [e]; } }
} }

View File

@ -66,7 +66,7 @@ fn merge_opts(attrs: [ast::attribute], cmd_opts: [(option, bool)]) ->
ret false; ret false;
} }
let result = cmd_opts; let mut result = cmd_opts;
let lint_metas = let lint_metas =
attr::attr_metas(attr::find_attrs_by_name(attrs, "lint")); attr::attr_metas(attr::find_attrs_by_name(attrs, "lint"));

View File

@ -15,7 +15,7 @@ type deref = @{mutbl: bool, kind: deref_t, outer_t: ty::t};
fn expr_root(tcx: ty::ctxt, ex: @expr, autoderef: bool) -> fn expr_root(tcx: ty::ctxt, ex: @expr, autoderef: bool) ->
{ex: @expr, ds: @[deref]} { {ex: @expr, ds: @[deref]} {
fn maybe_auto_unbox(tcx: ty::ctxt, t: ty::t) -> {t: ty::t, ds: [deref]} { fn maybe_auto_unbox(tcx: ty::ctxt, t: ty::t) -> {t: ty::t, ds: [deref]} {
let ds = [], t = t; let mut ds = [], t = t;
loop { loop {
alt ty::get(t).struct { alt ty::get(t).struct {
ty::ty_box(mt) | ty::ty_uniq(mt) | ty::ty_rptr(_, mt) { ty::ty_box(mt) | ty::ty_uniq(mt) | ty::ty_rptr(_, mt) {
@ -42,12 +42,12 @@ fn expr_root(tcx: ty::ctxt, ex: @expr, autoderef: bool) ->
} }
ret {t: t, ds: ds}; ret {t: t, ds: ds};
} }
let ds: [deref] = [], ex = ex; let mut ds: [deref] = [], ex = ex;
loop { loop {
alt copy ex.node { alt copy ex.node {
expr_field(base, ident, _) { expr_field(base, ident, _) {
let auto_unbox = maybe_auto_unbox(tcx, ty::expr_ty(tcx, base)); let auto_unbox = maybe_auto_unbox(tcx, ty::expr_ty(tcx, base));
let is_mutbl = false; let mut is_mutbl = false;
alt ty::get(auto_unbox.t).struct { alt ty::get(auto_unbox.t).struct {
ty::ty_rec(fields) { ty::ty_rec(fields) {
for fld: ty::field in fields { for fld: ty::field in fields {
@ -83,7 +83,7 @@ fn expr_root(tcx: ty::ctxt, ex: @expr, autoderef: bool) ->
expr_unary(op, base) { expr_unary(op, base) {
if op == deref { if op == deref {
let base_t = ty::expr_ty(tcx, base); let base_t = ty::expr_ty(tcx, base);
let is_mutbl = false, ptr = false; let mut is_mutbl = false, ptr = false;
alt ty::get(base_t).struct { alt ty::get(base_t).struct {
ty::ty_box(mt) { is_mutbl = mt.mutbl == m_mutbl; } ty::ty_box(mt) { is_mutbl = mt.mutbl == m_mutbl; }
ty::ty_uniq(mt) { is_mutbl = mt.mutbl == m_mutbl; } ty::ty_uniq(mt) { is_mutbl = mt.mutbl == m_mutbl; }
@ -236,7 +236,7 @@ fn check_move_rhs(cx: @ctx, src: @expr) {
fn check_call(cx: @ctx, f: @expr, args: [@expr]) { fn check_call(cx: @ctx, f: @expr, args: [@expr]) {
let arg_ts = ty::ty_fn_args(ty::expr_ty(cx.tcx, f)); let arg_ts = ty::ty_fn_args(ty::expr_ty(cx.tcx, f));
let i = 0u; let mut i = 0u;
for arg_t: ty::arg in arg_ts { for arg_t: ty::arg in arg_ts {
alt ty::resolved_mode(cx.tcx, arg_t.mode) { alt ty::resolved_mode(cx.tcx, arg_t.mode) {
by_mutbl_ref { check_lval(cx, args[i], msg_mutbl_ref); } by_mutbl_ref { check_lval(cx, args[i], msg_mutbl_ref); }
@ -249,7 +249,7 @@ fn check_call(cx: @ctx, f: @expr, args: [@expr]) {
fn check_bind(cx: @ctx, f: @expr, args: [option<@expr>]) { fn check_bind(cx: @ctx, f: @expr, args: [option<@expr>]) {
let arg_ts = ty::ty_fn_args(ty::expr_ty(cx.tcx, f)); let arg_ts = ty::ty_fn_args(ty::expr_ty(cx.tcx, f));
let i = 0u; let mut i = 0u;
for arg in args { for arg in args {
alt arg { alt arg {
some(expr) { some(expr) {

View File

@ -63,7 +63,7 @@ fn walk_pat(pat: @pat, it: fn(@pat)) {
} }
fn pat_binding_ids(dm: resolve::def_map, pat: @pat) -> [node_id] { fn pat_binding_ids(dm: resolve::def_map, pat: @pat) -> [node_id] {
let found = []; let mut found = [];
pat_bindings(dm, pat) {|b_id, _sp, _pt| found += [b_id]; }; pat_bindings(dm, pat) {|b_id, _sp, _pt| found += [b_id]; };
ret found; ret found;
} }

View File

@ -88,7 +88,7 @@ fn region_to_scope(region_map: @region_map, region: ty::region)
// `superscope` and false otherwise. // `superscope` and false otherwise.
fn scope_contains(region_map: @region_map, superscope: ast::node_id, fn scope_contains(region_map: @region_map, superscope: ast::node_id,
subscope: ast::node_id) -> bool { subscope: ast::node_id) -> bool {
let subscope = subscope; let mut subscope = subscope;
while superscope != subscope { while superscope != subscope {
alt region_map.parents.find(subscope) { alt region_map.parents.find(subscope) {
none { ret false; } none { ret false; }
@ -140,7 +140,7 @@ fn resolve_ty(ty: @ast::ty, cx: ctxt, visitor: visit::vt<ctxt>) {
// If at item scope, introduce or reuse a binding. If at // If at item scope, introduce or reuse a binding. If at
// block scope, require that the binding be introduced. // block scope, require that the binding be introduced.
let bindings = cx.bindings; let bindings = cx.bindings;
let region; let mut region;
alt list::find(*bindings, {|b| ident == b.name}) { alt list::find(*bindings, {|b| ident == b.name}) {
some(binding) { region = ty::re_named(binding.id); } some(binding) { region = ty::re_named(binding.id); }
none { none {
@ -301,7 +301,7 @@ fn resolve_local(local: @ast::local, cx: ctxt, visitor: visit::vt<ctxt>) {
fn resolve_item(item: @ast::item, cx: ctxt, visitor: visit::vt<ctxt>) { fn resolve_item(item: @ast::item, cx: ctxt, visitor: visit::vt<ctxt>) {
// Items create a new outer block scope as far as we're concerned. // Items create a new outer block scope as far as we're concerned.
let parent; let mut parent;
let mut self_binding = cx.self_binding; let mut self_binding = cx.self_binding;
alt item.node { alt item.node {
ast::item_fn(_, _, _) | ast::item_enum(_, _) { ast::item_fn(_, _, _) | ast::item_enum(_, _) {

View File

@ -251,7 +251,7 @@ fn map_crate(e: @env, c: @ast::crate) {
} }
fn path_from_scope(sc: scopes, n: str) -> str { fn path_from_scope(sc: scopes, n: str) -> str {
let path = n + "::"; let mut path = n + "::";
list::iter(sc) {|s| list::iter(sc) {|s|
alt s { alt s {
scope_item(i) { path = i.ident + "::" + path; } scope_item(i) { path = i.ident + "::" + path; }
@ -439,7 +439,8 @@ fn resolve_names(e: @env, c: @ast::crate) {
} }
} }
fn walk_tps(e: @env, tps: [ast::ty_param], sc: scopes, v: vt<scopes>) { fn walk_tps(e: @env, tps: [ast::ty_param], sc: scopes, v: vt<scopes>) {
let outer_current_tp = e.current_tp, current = 0u; let outer_current_tp = e.current_tp;
let mut current = 0u;
for tp in tps { for tp in tps {
e.current_tp = some(current); e.current_tp = some(current);
for bound in *tp.bounds { for bound in *tp.bounds {
@ -653,8 +654,8 @@ fn visit_local_with_scope(e: @env, loc: @local, sc:scopes, v:vt<scopes>) {
fn follow_import(e: env, sc: scopes, path: [ident], sp: span) -> fn follow_import(e: env, sc: scopes, path: [ident], sp: span) ->
option<def> { option<def> {
let path_len = vec::len(path); let path_len = vec::len(path);
let dcur = lookup_in_scope_strict(e, sc, sp, path[0], ns_module); let mut dcur = lookup_in_scope_strict(e, sc, sp, path[0], ns_module);
let i = 1u; let mut i = 1u;
loop { loop {
alt dcur { alt dcur {
some(dcur_def) { some(dcur_def) {
@ -711,7 +712,7 @@ fn resolve_import(e: env, defid: ast::def_id, name: ast::ident,
// resolution of this import. // resolution of this import.
fn find_imports_after(e: env, id: node_id, sc: scopes) -> [node_id] { fn find_imports_after(e: env, id: node_id, sc: scopes) -> [node_id] {
fn lst(my_id: node_id, vis: [@view_item]) -> [node_id] { fn lst(my_id: node_id, vis: [@view_item]) -> [node_id] {
let imports = [], found = false; let mut imports = [], found = false;
for vi in vis { for vi in vis {
iter_effective_import_paths(*vi) {|vp| iter_effective_import_paths(*vi) {|vp|
alt vp.node { alt vp.node {
@ -753,7 +754,7 @@ fn resolve_import(e: env, defid: ast::def_id, name: ast::ident,
// This function has cleanup code at the end. Do not return without going // This function has cleanup code at the end. Do not return without going
// through that. // through that.
e.imports.insert(defid.node, resolving(sp)); e.imports.insert(defid.node, resolving(sp));
let ignored = find_imports_after(e, defid.node, sc); let mut ignored = find_imports_after(e, defid.node, sc);
e.ignored_imports <-> ignored; e.ignored_imports <-> ignored;
let n_idents = vec::len(ids); let n_idents = vec::len(ids);
let end_id = ids[n_idents - 1u]; let end_id = ids[n_idents - 1u];
@ -766,10 +767,10 @@ fn resolve_import(e: env, defid: ast::def_id, name: ast::ident,
unresolved_err(e, in_scope(sc), sp, ids[0], ns_name(ns_module)); unresolved_err(e, in_scope(sc), sp, ids[0], ns_name(ns_module));
} }
some(dcur_) { some(dcur_) {
let dcur = dcur_, i = 1u; let mut dcur = dcur_, i = 1u;
loop { loop {
if i == n_idents - 1u { if i == n_idents - 1u {
let impls = []; let mut impls = [];
find_impls_in_mod(e, dcur, impls, some(end_id)); find_impls_in_mod(e, dcur, impls, some(end_id));
register(e, defid.node, in_mod(dcur), sp, name, {|ns| register(e, defid.node, in_mod(dcur), sp, name, {|ns|
lookup_in_mod(e, dcur, sp, end_id, ns, outside) lookup_in_mod(e, dcur, sp, end_id, ns, outside)
@ -817,7 +818,7 @@ enum ctxt { in_mod(def), in_scope(scopes), }
fn unresolved_err(e: env, cx: ctxt, sp: span, name: ident, kind: str) { fn unresolved_err(e: env, cx: ctxt, sp: span, name: ident, kind: str) {
fn find_fn_or_mod_scope(sc: scopes) -> option<scope> { fn find_fn_or_mod_scope(sc: scopes) -> option<scope> {
let sc = sc; let mut sc = sc;
loop { loop {
alt sc { alt sc {
cons(cur, rest) { cons(cur, rest) {
@ -834,7 +835,7 @@ fn unresolved_err(e: env, cx: ctxt, sp: span, name: ident, kind: str) {
} }
}; };
} }
let path = name; let mut path = name;
alt cx { alt cx {
in_scope(sc) { in_scope(sc) {
alt find_fn_or_mod_scope(sc) { alt find_fn_or_mod_scope(sc) {
@ -884,8 +885,8 @@ fn lookup_path_strict(e: env, sc: scopes, sp: span, pth: ast::path_,
alt dcur_ { alt dcur_ {
none { ret none; } none { ret none; }
some(dcur__) { some(dcur__) {
let i = 1u; let mut i = 1u;
let dcur = dcur__; let mut dcur = dcur__;
while i < n_idents { while i < n_idents {
let curns = if n_idents == i + 1u { ns } else { ns_module }; let curns = if n_idents == i + 1u { ns } else { ns_module };
alt lookup_in_mod_strict(e, dcur, sp, pth.idents[i], alt lookup_in_mod_strict(e, dcur, sp, pth.idents[i],
@ -1059,18 +1060,18 @@ fn lookup_in_scope(e: env, sc: scopes, sp: span, name: ident, ns: namespace,
} }
ret none; ret none;
} }
let left_fn = false; let mut left_fn = false;
let closing = []; let mut closing = [];
// Used to determine whether self is in scope // Used to determine whether self is in scope
let left_fn_level2 = false; let mut left_fn_level2 = false;
let sc = sc; let mut sc = sc;
loop { loop {
alt copy sc { alt copy sc {
nil { ret none; } nil { ret none; }
cons(hd, tl) { cons(hd, tl) {
alt in_scope(e, sp, name, hd, ns) { alt in_scope(e, sp, name, hd, ns) {
some(df_) { some(df_) {
let df = df_; let mut df = df_;
let local = def_is_local(df), self_scope = def_is_self(df); let local = def_is_local(df), self_scope = def_is_self(df);
if check_capture && if check_capture &&
(left_fn && local || left_fn_level2 && self_scope (left_fn && local || left_fn_level2 && self_scope
@ -1082,7 +1083,7 @@ fn lookup_in_scope(e: env, sc: scopes, sp: span, name: ident, ns: namespace,
}; };
e.sess.span_fatal(sp, msg); e.sess.span_fatal(sp, msg);
} else if local || self_scope { } else if local || self_scope {
let i = vec::len(closing); let mut i = vec::len(closing);
while i > 0u { while i > 0u {
i -= 1u; i -= 1u;
#debug["name=%s df=%?", name, df]; #debug["name=%s df=%?", name, df];
@ -1112,7 +1113,7 @@ fn lookup_in_scope(e: env, sc: scopes, sp: span, name: ident, ns: namespace,
fn lookup_in_ty_params(e: env, name: ident, ty_params: [ast::ty_param]) fn lookup_in_ty_params(e: env, name: ident, ty_params: [ast::ty_param])
-> option<def> { -> option<def> {
let n = 0u; let mut n = 0u;
for tp: ast::ty_param in ty_params { for tp: ast::ty_param in ty_params {
if str::eq(tp.ident, name) && alt e.current_tp { if str::eq(tp.ident, name) && alt e.current_tp {
some(cur) { n < cur } none { true } some(cur) { n < cur } none { true }
@ -1123,7 +1124,7 @@ fn lookup_in_ty_params(e: env, name: ident, ty_params: [ast::ty_param])
} }
fn lookup_in_pat(e: env, name: ident, pat: @ast::pat) -> option<node_id> { fn lookup_in_pat(e: env, name: ident, pat: @ast::pat) -> option<node_id> {
let found = none; let mut found = none;
pat_util::pat_bindings(e.def_map, pat) {|p_id, _sp, n| pat_util::pat_bindings(e.def_map, pat) {|p_id, _sp, n|
if str::eq(path_to_ident(n), name) if str::eq(path_to_ident(n), name)
@ -1176,7 +1177,7 @@ fn lookup_in_class(parent_id: def_id,
fn lookup_in_block(e: env, name: ident, sp: span, b: ast::blk_, pos: uint, fn lookup_in_block(e: env, name: ident, sp: span, b: ast::blk_, pos: uint,
loc_pos: uint, ns: namespace) -> option<def> { loc_pos: uint, ns: namespace) -> option<def> {
let i = vec::len(b.stmts); let mut i = vec::len(b.stmts);
while i > 0u { while i > 0u {
i -= 1u; i -= 1u;
let st = b.stmts[i]; let st = b.stmts[i];
@ -1185,7 +1186,7 @@ fn lookup_in_block(e: env, name: ident, sp: span, b: ast::blk_, pos: uint,
alt d.node { alt d.node {
ast::decl_local(locs) { ast::decl_local(locs) {
if i <= pos { if i <= pos {
let j = vec::len(locs); let mut j = vec::len(locs);
while j > 0u { while j > 0u {
j -= 1u; j -= 1u;
let loc = locs[j]; let loc = locs[j];
@ -1240,7 +1241,7 @@ fn lookup_in_block(e: env, name: ident, sp: span, b: ast::blk_, pos: uint,
} }
for vi in b.view_items { for vi in b.view_items {
let is_import = false; let mut is_import = false;
alt vi.node { alt vi.node {
ast::view_item_import(_) { is_import = true; } ast::view_item_import(_) { is_import = true; }
_ {} _ {}
@ -1344,7 +1345,7 @@ fn lookup_in_mod(e: env, m: def, sp: span, name: ident, ns: namespace,
// examining a module in an external crate // examining a module in an external crate
let cached = e.ext_cache.find({did: defid, ident: name, ns: ns}); let cached = e.ext_cache.find({did: defid, ident: name, ns: ns});
if !is_none(cached) { ret cached; } if !is_none(cached) { ret cached; }
let path = [name]; let mut path = [name];
if defid.node != ast::crate_node_id { if defid.node != ast::crate_node_id {
path = cstore::get_path(e.cstore, defid) + path; path = cstore::get_path(e.cstore, defid) + path;
} }
@ -1430,7 +1431,7 @@ fn is_exported(e: env, i: ident, m: @indexed_mod) -> bool {
// `f` returns `none` for every element, `list_search` returns `none`. // `f` returns `none` for every element, `list_search` returns `none`.
fn list_search<T: copy, U: copy>(ls: list<T>, f: fn(T) -> option<U>) fn list_search<T: copy, U: copy>(ls: list<T>, f: fn(T) -> option<U>)
-> option<U> { -> option<U> {
let ls = ls; let mut ls = ls;
loop { loop {
alt ls { alt ls {
cons(hd, tl) { cons(hd, tl) {
@ -1624,7 +1625,7 @@ fn index_mod(md: ast::_mod) -> mod_index {
} }
ast::item_enum(variants, _) { ast::item_enum(variants, _) {
add_to_index(index, it.ident, mie_item(it)); add_to_index(index, it.ident, mie_item(it));
let variant_idx: uint = 0u; let mut variant_idx: uint = 0u;
for v: ast::variant in variants { for v: ast::variant in variants {
add_to_index(index, v.node.name, add_to_index(index, v.node.name,
mie_enum_variant(variant_idx, variants, mie_enum_variant(variant_idx, variants,
@ -1716,10 +1717,10 @@ fn check_for_collisions(e: @env, c: ast::crate) {
} }
fn check_mod_name(e: env, name: ident, entries: list<mod_index_entry>) { fn check_mod_name(e: env, name: ident, entries: list<mod_index_entry>) {
let saw_mod = false; let mut saw_mod = false;
let saw_type = false; let mut saw_type = false;
let saw_value = false; let mut saw_value = false;
let entries = entries; let mut entries = entries;
fn dup(e: env, sp: span, word: str, name: ident) { fn dup(e: env, sp: span, word: str, name: ident) {
e.sess.span_fatal(sp, "duplicate definition of " + word + name); e.sess.span_fatal(sp, "duplicate definition of " + word + name);
} }
@ -1761,7 +1762,7 @@ fn mie_span(mie: mod_index_entry) -> span {
fn check_item(e: @env, i: @ast::item, &&x: (), v: vt<()>) { fn check_item(e: @env, i: @ast::item, &&x: (), v: vt<()>) {
fn typaram_names(tps: [ast::ty_param]) -> [ident] { fn typaram_names(tps: [ast::ty_param]) -> [ident] {
let x: [ast::ident] = []; let mut x: [ast::ident] = [];
for tp: ast::ty_param in tps { x += [tp.ident]; } for tp: ast::ty_param in tps { x += [tp.ident]; }
ret x; ret x;
} }
@ -1791,7 +1792,7 @@ fn check_arm(e: @env, a: ast::arm, &&x: (), v: vt<()>) {
let ch0 = checker(*e, "binding"); let ch0 = checker(*e, "binding");
check_pat(e, ch0, a.pats[0]); check_pat(e, ch0, a.pats[0]);
let seen0 = ch0.seen; let seen0 = ch0.seen;
let i = vec::len(a.pats); let mut i = vec::len(a.pats);
while i > 1u { while i > 1u {
i -= 1u; i -= 1u;
let ch = checker(*e, "binding"); let ch = checker(*e, "binding");
@ -1992,7 +1993,7 @@ fn check_exports(e: @env) {
fn check_export(e: @env, ident: str, _mod: @indexed_mod, fn check_export(e: @env, ident: str, _mod: @indexed_mod,
export_id: node_id, vi: @view_item) { export_id: node_id, vi: @view_item) {
let found_something = false; let mut found_something = false;
if _mod.index.contains_key(ident) { if _mod.index.contains_key(ident) {
found_something = true; found_something = true;
let xs = _mod.index.get(ident); let xs = _mod.index.get(ident);
@ -2051,7 +2052,7 @@ fn check_exports(e: @env) {
let parent_id = check_enum_ok(e, span, id, _mod); let parent_id = check_enum_ok(e, span, id, _mod);
add_export(e, export_id, local_def(parent_id), false); add_export(e, export_id, local_def(parent_id), false);
for variant_id in ids { for variant_id in ids {
let found = false; let mut found = false;
alt _mod.index.find(variant_id.node.name) { alt _mod.index.find(variant_id.node.name) {
some(ms) { some(ms) {
list::iter(ms) {|m| list::iter(ms) {|m|
@ -2158,7 +2159,7 @@ fn find_impls_in_view_item(e: env, vi: @ast::view_item,
iter_effective_import_paths(*vi) { |vp| iter_effective_import_paths(*vi) { |vp|
alt vp.node { alt vp.node {
ast::view_path_simple(name, pt, id) { ast::view_path_simple(name, pt, id) {
let found = []; let mut found = [];
if vec::len(*pt) == 1u { if vec::len(*pt) == 1u {
option::may(sc) {|sc| option::may(sc) {|sc|
list::iter(sc) {|level| list::iter(sc) {|level|
@ -2223,14 +2224,14 @@ fn find_impls_in_item(e: env, i: @ast::item, &impls: [@_impl],
fn find_impls_in_mod_by_id(e: env, defid: def_id, &impls: [@_impl], fn find_impls_in_mod_by_id(e: env, defid: def_id, &impls: [@_impl],
name: option<ident>) { name: option<ident>) {
let cached; let mut cached;
alt e.impl_cache.find(defid) { alt e.impl_cache.find(defid) {
some(some(v)) { cached = v; } some(some(v)) { cached = v; }
some(none) { ret; } some(none) { ret; }
none { none {
e.impl_cache.insert(defid, none); e.impl_cache.insert(defid, none);
cached = if defid.crate == ast::local_crate { cached = if defid.crate == ast::local_crate {
let tmp = []; let mut tmp = [];
let mi = e.mod_map.get(defid.node); let mi = e.mod_map.get(defid.node);
let md = option::get(mi.m); let md = option::get(mi.m);
for vi in md.view_items { for vi in md.view_items {
@ -2268,7 +2269,7 @@ fn find_impls_in_mod(e: env, m: def, &impls: [@_impl],
fn visit_block_with_impl_scope(e: @env, b: ast::blk, sc: iscopes, fn visit_block_with_impl_scope(e: @env, b: ast::blk, sc: iscopes,
v: vt<iscopes>) { v: vt<iscopes>) {
let impls = []; let mut impls = [];
for vi in b.node.view_items { for vi in b.node.view_items {
find_impls_in_view_item(*e, vi, impls, some(sc)); find_impls_in_view_item(*e, vi, impls, some(sc));
} }
@ -2286,7 +2287,7 @@ fn visit_block_with_impl_scope(e: @env, b: ast::blk, sc: iscopes,
fn visit_mod_with_impl_scope(e: @env, m: ast::_mod, s: span, id: node_id, fn visit_mod_with_impl_scope(e: @env, m: ast::_mod, s: span, id: node_id,
sc: iscopes, v: vt<iscopes>) { sc: iscopes, v: vt<iscopes>) {
let impls = []; let mut impls = [];
for vi in m.view_items { for vi in m.view_items {
find_impls_in_view_item(*e, vi, impls, some(sc)); find_impls_in_view_item(*e, vi, impls, some(sc));
} }

View File

@ -40,7 +40,8 @@ enum opt_result {
range_result(result, result), range_result(result, result),
} }
fn trans_opt(bcx: block, o: opt) -> opt_result { fn trans_opt(bcx: block, o: opt) -> opt_result {
let ccx = bcx.ccx(), bcx = bcx; let ccx = bcx.ccx();
let mut bcx = bcx;
alt o { alt o {
lit(l) { lit(l) {
alt l.node { alt l.node {
@ -101,7 +102,7 @@ fn has_nested_bindings(m: match, col: uint) -> bool {
} }
fn expand_nested_bindings(m: match, col: uint, val: ValueRef) -> match { fn expand_nested_bindings(m: match, col: uint, val: ValueRef) -> match {
let result = []; let mut result = [];
for br in m { for br in m {
alt br.pats[col].node { alt br.pats[col].node {
ast::pat_ident(name, some(inner)) { ast::pat_ident(name, some(inner)) {
@ -122,7 +123,7 @@ type enter_pat = fn(@ast::pat) -> option<[@ast::pat]>;
fn enter_match(dm: def_map, m: match, col: uint, val: ValueRef, fn enter_match(dm: def_map, m: match, col: uint, val: ValueRef,
e: enter_pat) -> match { e: enter_pat) -> match {
let result = []; let mut result = [];
for br: match_branch in m { for br: match_branch in m {
alt e(br.pats[col]) { alt e(br.pats[col]) {
some(sub) { some(sub) {
@ -185,9 +186,9 @@ fn enter_rec(dm: def_map, m: match, col: uint, fields: [ast::ident],
enter_match(dm, m, col, val) {|p| enter_match(dm, m, col, val) {|p|
alt p.node { alt p.node {
ast::pat_rec(fpats, _) { ast::pat_rec(fpats, _) {
let pats = []; let mut pats = [];
for fname: ast::ident in fields { for fname: ast::ident in fields {
let pat = dummy; let mut pat = dummy;
for fpat: ast::field_pat in fpats { for fpat: ast::field_pat in fpats {
if str::eq(fpat.ident, fname) { pat = fpat.pat; break; } if str::eq(fpat.ident, fname) { pat = fpat.pat; break; }
} }
@ -237,7 +238,7 @@ fn get_options(ccx: @crate_ctxt, m: match, col: uint) -> [opt] {
set += [val]; set += [val];
} }
let found = []; let mut found = [];
for br in m { for br in m {
let cur = br.pats[col]; let cur = br.pats[col];
if pat_is_variant(ccx.tcx.def_map, cur) { if pat_is_variant(ccx.tcx.def_map, cur) {
@ -258,13 +259,14 @@ fn get_options(ccx: @crate_ctxt, m: match, col: uint) -> [opt] {
fn extract_variant_args(bcx: block, pat_id: ast::node_id, fn extract_variant_args(bcx: block, pat_id: ast::node_id,
vdefs: {enm: def_id, var: def_id}, val: ValueRef) -> vdefs: {enm: def_id, var: def_id}, val: ValueRef) ->
{vals: [ValueRef], bcx: block} { {vals: [ValueRef], bcx: block} {
let ccx = bcx.fcx.ccx, bcx = bcx; let ccx = bcx.fcx.ccx;
let mut bcx = bcx;
let enum_ty_substs = alt check ty::get(node_id_type(bcx, pat_id)).struct { let enum_ty_substs = alt check ty::get(node_id_type(bcx, pat_id)).struct {
ty::ty_enum(id, tps) { assert id == vdefs.enm; tps } ty::ty_enum(id, tps) { assert id == vdefs.enm; tps }
}; };
let blobptr = val; let mut blobptr = val;
let variants = ty::enum_variants(ccx.tcx, vdefs.enm); let variants = ty::enum_variants(ccx.tcx, vdefs.enm);
let args = []; let mut args = [];
let size = ty::enum_variant_with_id(ccx.tcx, vdefs.enm, let size = ty::enum_variant_with_id(ccx.tcx, vdefs.enm,
vdefs.var).args.len(); vdefs.var).args.len();
if size > 0u && (*variants).len() != 1u { if size > 0u && (*variants).len() != 1u {
@ -272,7 +274,7 @@ fn extract_variant_args(bcx: block, pat_id: ast::node_id,
PointerCast(bcx, val, T_opaque_enum_ptr(ccx)); PointerCast(bcx, val, T_opaque_enum_ptr(ccx));
blobptr = GEPi(bcx, enumptr, [0, 1]); blobptr = GEPi(bcx, enumptr, [0, 1]);
} }
let i = 0u; let mut i = 0u;
let vdefs_tg = vdefs.enm; let vdefs_tg = vdefs.enm;
let vdefs_var = vdefs.var; let vdefs_var = vdefs.var;
while i < size { while i < size {
@ -286,7 +288,7 @@ fn extract_variant_args(bcx: block, pat_id: ast::node_id,
} }
fn collect_record_fields(m: match, col: uint) -> [ast::ident] { fn collect_record_fields(m: match, col: uint) -> [ast::ident] {
let fields = []; let mut fields = [];
for br: match_branch in m { for br: match_branch in m {
alt br.pats[col].node { alt br.pats[col].node {
ast::pat_rec(fs, _) { ast::pat_rec(fs, _) {
@ -336,12 +338,12 @@ fn pick_col(m: match) -> uint {
} }
let scores = vec::to_mut(vec::from_elem(m[0].pats.len(), 0u)); let scores = vec::to_mut(vec::from_elem(m[0].pats.len(), 0u));
for br: match_branch in m { for br: match_branch in m {
let i = 0u; let mut i = 0u;
for p: @ast::pat in br.pats { scores[i] += score(p); i += 1u; } for p: @ast::pat in br.pats { scores[i] += score(p); i += 1u; }
} }
let max_score = 0u; let mut max_score = 0u;
let best_col = 0u; let mut best_col = 0u;
let i = 0u; let mut i = 0u;
for score: uint in scores { for score: uint in scores {
// Irrefutable columns always go first, they'd only be duplicated in // Irrefutable columns always go first, they'd only be duplicated in
// the branches. // the branches.
@ -356,7 +358,8 @@ fn pick_col(m: match) -> uint {
fn compile_submatch(bcx: block, m: match, vals: [ValueRef], fn compile_submatch(bcx: block, m: match, vals: [ValueRef],
chk: option<mk_fail>, &exits: [exit_node]) { chk: option<mk_fail>, &exits: [exit_node]) {
let bcx = bcx, tcx = bcx.tcx(), dm = tcx.def_map; let mut bcx = bcx;
let tcx = bcx.tcx(), dm = tcx.def_map;
if m.len() == 0u { Br(bcx, option::get(chk)()); ret; } if m.len() == 0u { Br(bcx, option::get(chk)()); ret; }
if m[0].pats.len() == 0u { if m[0].pats.len() == 0u {
let data = m[0].data; let data = m[0].data;
@ -394,7 +397,7 @@ fn compile_submatch(bcx: block, m: match, vals: [ValueRef],
let vals_left = vec::slice(vals, 0u, col) + let vals_left = vec::slice(vals, 0u, col) +
vec::slice(vals, col + 1u, vals.len()); vec::slice(vals, col + 1u, vals.len());
let ccx = bcx.fcx.ccx; let ccx = bcx.fcx.ccx;
let pat_id = 0; let mut pat_id = 0;
for br: match_branch in m { for br: match_branch in m {
// Find a real id (we're adding placeholder wildcard patterns, but // Find a real id (we're adding placeholder wildcard patterns, but
// each column is guaranteed to have at least one real pattern) // each column is guaranteed to have at least one real pattern)
@ -405,7 +408,7 @@ fn compile_submatch(bcx: block, m: match, vals: [ValueRef],
// Separate path for extracting and binding record fields // Separate path for extracting and binding record fields
if rec_fields.len() > 0u { if rec_fields.len() > 0u {
let fields = ty::get_fields(node_id_type(bcx, pat_id)); let fields = ty::get_fields(node_id_type(bcx, pat_id));
let rec_vals = []; let mut rec_vals = [];
for field_name: ast::ident in rec_fields { for field_name: ast::ident in rec_fields {
let ix = option::get(ty::field_idx(field_name, fields)); let ix = option::get(ty::field_idx(field_name, fields));
rec_vals += [GEPi(bcx, val, [0, ix as int])]; rec_vals += [GEPi(bcx, val, [0, ix as int])];
@ -421,7 +424,7 @@ fn compile_submatch(bcx: block, m: match, vals: [ValueRef],
ty::ty_tup(elts) { elts.len() } ty::ty_tup(elts) { elts.len() }
_ { ccx.sess.bug("non-tuple type in tuple pattern"); } _ { ccx.sess.bug("non-tuple type in tuple pattern"); }
}; };
let tup_vals = [], i = 0u; let mut tup_vals = [], i = 0u;
while i < n_tup_elts { while i < n_tup_elts {
tup_vals += [GEPi(bcx, val, [0, i as int])]; tup_vals += [GEPi(bcx, val, [0, i as int])];
i += 1u; i += 1u;
@ -450,8 +453,8 @@ fn compile_submatch(bcx: block, m: match, vals: [ValueRef],
// Decide what kind of branch we need // Decide what kind of branch we need
let opts = get_options(ccx, m, col); let opts = get_options(ccx, m, col);
enum branch_kind { no_branch, single, switch, compare, } enum branch_kind { no_branch, single, switch, compare, }
let kind = no_branch; let mut kind = no_branch;
let test_val = val; let mut test_val = val;
if opts.len() > 0u { if opts.len() > 0u {
alt opts[0] { alt opts[0] {
var(_, vdef) { var(_, vdef) {
@ -493,11 +496,12 @@ fn compile_submatch(bcx: block, m: match, vals: [ValueRef],
let defaults = enter_default(dm, m, col, val); let defaults = enter_default(dm, m, col, val);
let exhaustive = option::is_none(chk) && defaults.len() == 0u; let exhaustive = option::is_none(chk) && defaults.len() == 0u;
let len = opts.len(), i = 0u; let len = opts.len();
let mut i = 0u;
// Compile subtrees for each option // Compile subtrees for each option
for opt in opts { for opt in opts {
i += 1u; i += 1u;
let opt_cx = else_cx; let mut opt_cx = else_cx;
if !exhaustive || i < len { if !exhaustive || i < len {
opt_cx = sub_block(bcx, "match_case"); opt_cx = sub_block(bcx, "match_case");
alt kind { alt kind {
@ -533,8 +537,8 @@ fn compile_submatch(bcx: block, m: match, vals: [ValueRef],
_ { } _ { }
} }
} else if kind == compare { Br(bcx, else_cx.llbb); } } else if kind == compare { Br(bcx, else_cx.llbb); }
let size = 0u; let mut size = 0u;
let unpacked = []; let mut unpacked = [];
alt opt { alt opt {
var(_, vdef) { var(_, vdef) {
let args = extract_variant_args(opt_cx, pat_id, vdef, val); let args = extract_variant_args(opt_cx, pat_id, vdef, val);
@ -561,10 +565,10 @@ fn compile_submatch(bcx: block, m: match, vals: [ValueRef],
fn make_phi_bindings(bcx: block, map: [exit_node], fn make_phi_bindings(bcx: block, map: [exit_node],
ids: pat_util::pat_id_map) -> bool { ids: pat_util::pat_id_map) -> bool {
let our_block = bcx.llbb as uint; let our_block = bcx.llbb as uint;
let success = true, bcx = bcx; let mut success = true, bcx = bcx;
ids.items {|name, node_id| ids.items {|name, node_id|
let llbbs = []; let mut llbbs = [];
let vals = []; let mut vals = [];
for ex: exit_node in map { for ex: exit_node in map {
if ex.to as uint == our_block { if ex.to as uint == our_block {
alt assoc(name, ex.bound) { alt assoc(name, ex.bound) {
@ -613,7 +617,7 @@ fn trans_alt(bcx: block, expr: @ast::expr, arms: [ast::arm],
fn trans_alt_inner(scope_cx: block, expr: @ast::expr, arms: [ast::arm], fn trans_alt_inner(scope_cx: block, expr: @ast::expr, arms: [ast::arm],
mode: ast::alt_mode, dest: dest) -> block { mode: ast::alt_mode, dest: dest) -> block {
let bcx = scope_cx, tcx = bcx.tcx(); let bcx = scope_cx, tcx = bcx.tcx();
let bodies = [], match = []; let mut bodies = [], match = [];
let {bcx, val, _} = trans_temp_expr(bcx, expr); let {bcx, val, _} = trans_temp_expr(bcx, expr);
if bcx.unreachable { ret bcx; } if bcx.unreachable { ret bcx; }
@ -647,19 +651,19 @@ fn trans_alt_inner(scope_cx: block, expr: @ast::expr, arms: [ast::arm],
} }
ast::alt_exhaustive { none } ast::alt_exhaustive { none }
}; };
let exit_map = []; let mut exit_map = [];
let t = node_id_type(bcx, expr.id); let t = node_id_type(bcx, expr.id);
let {bcx, val: spilled} = spill_if_immediate(bcx, val, t); let {bcx, val: spilled} = spill_if_immediate(bcx, val, t);
compile_submatch(bcx, match, [spilled], mk_fail, exit_map); compile_submatch(bcx, match, [spilled], mk_fail, exit_map);
let arm_cxs = [], arm_dests = [], i = 0u; let mut arm_cxs = [], arm_dests = [], i = 0u;
for a in arms { for a in arms {
let body_cx = bodies[i]; let body_cx = bodies[i];
let id_map = pat_util::pat_id_map(tcx.def_map, a.pats[0]); let id_map = pat_util::pat_id_map(tcx.def_map, a.pats[0]);
if make_phi_bindings(body_cx, exit_map, id_map) { if make_phi_bindings(body_cx, exit_map, id_map) {
let arm_dest = dup_for_join(dest); let arm_dest = dup_for_join(dest);
arm_dests += [arm_dest]; arm_dests += [arm_dest];
let arm_cx = trans_block(body_cx, a.body, arm_dest); let mut arm_cx = trans_block(body_cx, a.body, arm_dest);
arm_cx = trans_block_cleanups(arm_cx, body_cx); arm_cx = trans_block_cleanups(arm_cx, body_cx);
arm_cxs += [arm_cx]; arm_cxs += [arm_cx];
} }
@ -671,7 +675,8 @@ fn trans_alt_inner(scope_cx: block, expr: @ast::expr, arms: [ast::arm],
// Not alt-related, but similar to the pattern-munging code above // Not alt-related, but similar to the pattern-munging code above
fn bind_irrefutable_pat(bcx: block, pat: @ast::pat, val: ValueRef, fn bind_irrefutable_pat(bcx: block, pat: @ast::pat, val: ValueRef,
make_copy: bool) -> block { make_copy: bool) -> block {
let ccx = bcx.fcx.ccx, bcx = bcx; let ccx = bcx.fcx.ccx;
let mut bcx = bcx;
// Necessary since bind_irrefutable_pat is called outside trans_alt // Necessary since bind_irrefutable_pat is called outside trans_alt
alt pat.node { alt pat.node {
@ -694,7 +699,7 @@ fn bind_irrefutable_pat(bcx: block, pat: @ast::pat, val: ValueRef,
ast::pat_enum(_, sub) { ast::pat_enum(_, sub) {
let vdefs = ast_util::variant_def_ids(ccx.tcx.def_map.get(pat.id)); let vdefs = ast_util::variant_def_ids(ccx.tcx.def_map.get(pat.id));
let args = extract_variant_args(bcx, pat.id, vdefs, val); let args = extract_variant_args(bcx, pat.id, vdefs, val);
let i = 0; let mut i = 0;
for argval: ValueRef in args.vals { for argval: ValueRef in args.vals {
bcx = bind_irrefutable_pat(bcx, sub[i], argval, make_copy); bcx = bind_irrefutable_pat(bcx, sub[i], argval, make_copy);
i += 1; i += 1;
@ -710,7 +715,7 @@ fn bind_irrefutable_pat(bcx: block, pat: @ast::pat, val: ValueRef,
} }
} }
ast::pat_tup(elems) { ast::pat_tup(elems) {
let i = 0u; let mut i = 0u;
for elem in elems { for elem in elems {
let fldptr = GEPi(bcx, val, [0, i as int]); let fldptr = GEPi(bcx, val, [0, i as int]);
bcx = bind_irrefutable_pat(bcx, elem, fldptr, make_copy); bcx = bind_irrefutable_pat(bcx, elem, fldptr, make_copy);

View File

@ -83,7 +83,7 @@ fn dup_for_join(dest: dest) -> dest {
fn join_returns(parent_cx: block, in_cxs: [block], fn join_returns(parent_cx: block, in_cxs: [block],
in_ds: [dest], out_dest: dest) -> block { in_ds: [dest], out_dest: dest) -> block {
let out = sub_block(parent_cx, "join"); let out = sub_block(parent_cx, "join");
let reachable = false, i = 0u, phi = none; let mut reachable = false, i = 0u, phi = none;
for cx in in_cxs { for cx in in_cxs {
if !cx.unreachable { if !cx.unreachable {
Br(cx, out.llbb); Br(cx, out.llbb);
@ -192,7 +192,7 @@ fn trans_native_call(cx: block, externs: hashmap<str, ValueRef>,
let n = args.len() as int; let n = args.len() as int;
let llnative: ValueRef = let llnative: ValueRef =
get_simple_extern_fn(cx, externs, llmod, name, n); get_simple_extern_fn(cx, externs, llmod, name, n);
let call_args: [ValueRef] = []; let mut call_args: [ValueRef] = [];
for a: ValueRef in args { for a: ValueRef in args {
call_args += [ZExtOrBitCast(cx, a, cx.ccx().int_type)]; call_args += [ZExtOrBitCast(cx, a, cx.ccx().int_type)];
} }
@ -288,7 +288,8 @@ fn opaque_box_body(bcx: block,
// header. // header.
fn trans_malloc_boxed_raw(bcx: block, t: ty::t, fn trans_malloc_boxed_raw(bcx: block, t: ty::t,
&static_ti: option<@tydesc_info>) -> result { &static_ti: option<@tydesc_info>) -> result {
let bcx = bcx, ccx = bcx.ccx(); let mut bcx = bcx;
let ccx = bcx.ccx();
// Grab the TypeRef type of box_ptr, because that's what trans_raw_malloc // Grab the TypeRef type of box_ptr, because that's what trans_raw_malloc
// wants. // wants.
@ -308,7 +309,7 @@ fn trans_malloc_boxed_raw(bcx: block, t: ty::t,
// initializes the reference count to 1, and pulls out the body and rc // initializes the reference count to 1, and pulls out the body and rc
fn trans_malloc_boxed(bcx: block, t: ty::t) -> fn trans_malloc_boxed(bcx: block, t: ty::t) ->
{bcx: block, box: ValueRef, body: ValueRef} { {bcx: block, box: ValueRef, body: ValueRef} {
let ti = none; let mut ti = none;
let {bcx, val:box} = trans_malloc_boxed_raw(bcx, t, ti); let {bcx, val:box} = trans_malloc_boxed_raw(bcx, t, ti);
let body = GEPi(bcx, box, [0, abi::box_field_body]); let body = GEPi(bcx, box, [0, abi::box_field_body]);
ret {bcx: bcx, box: box, body: body}; ret {bcx: bcx, box: box, body: body};
@ -317,7 +318,7 @@ fn trans_malloc_boxed(bcx: block, t: ty::t) ->
// Type descriptor and type glue stuff // Type descriptor and type glue stuff
fn get_tydesc_simple(bcx: block, t: ty::t) -> result { fn get_tydesc_simple(bcx: block, t: ty::t) -> result {
let ti = none; let mut ti = none;
get_tydesc(bcx, t, ti) get_tydesc(bcx, t, ti)
} }
@ -396,12 +397,10 @@ fn note_unique_llvm_symbol(ccx: @crate_ctxt, sym: str) {
// Generates the declaration for (but doesn't emit) a type descriptor. // Generates the declaration for (but doesn't emit) a type descriptor.
fn declare_tydesc(ccx: @crate_ctxt, t: ty::t) -> @tydesc_info { fn declare_tydesc(ccx: @crate_ctxt, t: ty::t) -> @tydesc_info {
log(debug, "+++ declare_tydesc " + ty_to_str(ccx.tcx, t)); log(debug, "+++ declare_tydesc " + ty_to_str(ccx.tcx, t));
let llsize;
let llalign;
let llty = type_of(ccx, t); let llty = type_of(ccx, t);
llsize = llsize_of(ccx, llty); let llsize = llsize_of(ccx, llty);
llalign = llalign_of(ccx, llty); let llalign = llalign_of(ccx, llty);
let name; let mut name;
//XXX this triggers duplicate LLVM symbols //XXX this triggers duplicate LLVM symbols
if false /*ccx.sess.opts.debuginfo*/ { if false /*ccx.sess.opts.debuginfo*/ {
name = mangle_internal_name_by_type_only(ccx, t, "tydesc"); name = mangle_internal_name_by_type_only(ccx, t, "tydesc");
@ -427,7 +426,7 @@ type glue_helper = fn@(block, ValueRef, ty::t);
fn declare_generic_glue(ccx: @crate_ctxt, t: ty::t, llfnty: TypeRef, fn declare_generic_glue(ccx: @crate_ctxt, t: ty::t, llfnty: TypeRef,
name: str) -> ValueRef { name: str) -> ValueRef {
let name = name; let name = name;
let fn_nm; let mut fn_nm;
//XXX this triggers duplicate LLVM symbols //XXX this triggers duplicate LLVM symbols
if false /*ccx.sess.opts.debuginfo*/ { if false /*ccx.sess.opts.debuginfo*/ {
fn_nm = mangle_internal_name_by_type_only(ccx, t, "glue_" + name); fn_nm = mangle_internal_name_by_type_only(ccx, t, "glue_" + name);
@ -527,7 +526,7 @@ fn emit_tydescs(ccx: @crate_ctxt) {
} }
fn make_take_glue(cx: block, v: ValueRef, t: ty::t) { fn make_take_glue(cx: block, v: ValueRef, t: ty::t) {
let bcx = cx; let mut bcx = cx;
// NB: v is a *pointer* to type t here, not a direct value. // NB: v is a *pointer* to type t here, not a direct value.
bcx = alt ty::get(t).struct { bcx = alt ty::get(t).struct {
ty::ty_box(_) | ty::ty_opaque_box { ty::ty_box(_) | ty::ty_opaque_box {
@ -567,7 +566,7 @@ fn incr_refcnt_of_boxed(cx: block, box_ptr: ValueRef) -> block {
maybe_validate_box(cx, box_ptr); maybe_validate_box(cx, box_ptr);
let rc_ptr = GEPi(cx, box_ptr, [0, abi::box_field_refcnt]); let rc_ptr = GEPi(cx, box_ptr, [0, abi::box_field_refcnt]);
let rc = Load(cx, rc_ptr); let rc = Load(cx, rc_ptr);
rc = Add(cx, rc, C_int(ccx, 1)); let rc = Add(cx, rc, C_int(ccx, 1));
Store(cx, rc, rc_ptr); Store(cx, rc, rc_ptr);
ret cx; ret cx;
} }
@ -819,10 +818,10 @@ fn iter_structural_ty(cx: block, av: ValueRef, t: ty::t,
if variant.args.len() == 0u { ret cx; } if variant.args.len() == 0u { ret cx; }
let fn_ty = variant.ctor_ty; let fn_ty = variant.ctor_ty;
let ccx = cx.ccx(); let ccx = cx.ccx();
let cx = cx; let mut cx = cx;
alt ty::get(fn_ty).struct { alt ty::get(fn_ty).struct {
ty::ty_fn({inputs: args, _}) { ty::ty_fn({inputs: args, _}) {
let j = 0u; let mut j = 0u;
let v_id = variant.id; let v_id = variant.id;
for a: ty::arg in args { for a: ty::arg in args {
let rslt = GEP_enum(cx, a_tup, tid, v_id, tps, j); let rslt = GEP_enum(cx, a_tup, tid, v_id, tps, j);
@ -841,10 +840,10 @@ fn iter_structural_ty(cx: block, av: ValueRef, t: ty::t,
/* /*
Typestate constraint that shows the unimpl case doesn't happen? Typestate constraint that shows the unimpl case doesn't happen?
*/ */
let cx = cx; let mut cx = cx;
alt ty::get(t).struct { alt ty::get(t).struct {
ty::ty_rec(fields) { ty::ty_rec(fields) {
let i: int = 0; let mut i: int = 0;
for fld: ty::field in fields { for fld: ty::field in fields {
let llfld_a = GEPi(cx, av, [0, i]); let llfld_a = GEPi(cx, av, [0, i]);
cx = f(cx, llfld_a, fld.mt.ty); cx = f(cx, llfld_a, fld.mt.ty);
@ -852,7 +851,7 @@ fn iter_structural_ty(cx: block, av: ValueRef, t: ty::t,
} }
} }
ty::ty_tup(args) { ty::ty_tup(args) {
let i = 0; let mut i = 0;
for arg in args { for arg in args {
let llfld_a = GEPi(cx, av, [0, i]); let llfld_a = GEPi(cx, av, [0, i]);
cx = f(cx, llfld_a, arg); cx = f(cx, llfld_a, arg);
@ -894,7 +893,7 @@ fn iter_structural_ty(cx: block, av: ValueRef, t: ty::t,
"enum-iter-variant-" + "enum-iter-variant-" +
int::to_str(variant.disr_val, 10u)); int::to_str(variant.disr_val, 10u));
AddCase(llswitch, C_int(ccx, variant.disr_val), variant_cx.llbb); AddCase(llswitch, C_int(ccx, variant.disr_val), variant_cx.llbb);
variant_cx = let variant_cx =
iter_variant(variant_cx, llunion_a_ptr, variant, tps, tid, f); iter_variant(variant_cx, llunion_a_ptr, variant, tps, tid, f);
Br(variant_cx, next_cx.llbb); Br(variant_cx, next_cx.llbb);
} }
@ -902,7 +901,7 @@ fn iter_structural_ty(cx: block, av: ValueRef, t: ty::t,
} }
ty::ty_class(did, tps) { ty::ty_class(did, tps) {
// a class is like a record type // a class is like a record type
let i: int = 0; let mut i: int = 0;
for fld: ty::field in ty::class_items_as_fields(cx.tcx(), did) { for fld: ty::field in ty::class_items_as_fields(cx.tcx(), did) {
let llfld_a = GEPi(cx, av, [0, i]); let llfld_a = GEPi(cx, av, [0, i]);
cx = f(cx, llfld_a, fld.mt.ty); cx = f(cx, llfld_a, fld.mt.ty);
@ -981,7 +980,7 @@ fn call_tydesc_glue_full(cx: block, v: ValueRef, tydesc: ValueRef,
lazily_emit_tydesc_glue(cx.ccx(), field, static_ti); lazily_emit_tydesc_glue(cx.ccx(), field, static_ti);
if cx.unreachable { ret; } if cx.unreachable { ret; }
let static_glue_fn = none; let mut static_glue_fn = none;
alt static_ti { alt static_ti {
none {/* no-op */ } none {/* no-op */ }
some(sti) { some(sti) {
@ -996,18 +995,18 @@ fn call_tydesc_glue_full(cx: block, v: ValueRef, tydesc: ValueRef,
} }
let llrawptr = PointerCast(cx, v, T_ptr(T_i8())); let llrawptr = PointerCast(cx, v, T_ptr(T_i8()));
let lltydescs = let lltydescs = GEPi(cx, tydesc, [0, abi::tydesc_field_first_param]);
GEPi(cx, tydesc, [0, abi::tydesc_field_first_param]); let lltydescs = Load(cx, lltydescs);
lltydescs = Load(cx, lltydescs);
let llfn; let llfn = {
alt static_glue_fn { alt static_glue_fn {
none { none {
let llfnptr = GEPi(cx, tydesc, [0, field]); let llfnptr = GEPi(cx, tydesc, [0, field]);
llfn = Load(cx, llfnptr); Load(cx, llfnptr)
} }
some(sgf) { llfn = sgf; } some(sgf) { sgf }
} }
};
Call(cx, llfn, [C_null(T_ptr(T_nil())), C_null(T_ptr(T_nil())), Call(cx, llfn, [C_null(T_ptr(T_nil())), C_null(T_ptr(T_nil())),
lltydescs, llrawptr]); lltydescs, llrawptr]);
@ -1015,7 +1014,7 @@ fn call_tydesc_glue_full(cx: block, v: ValueRef, tydesc: ValueRef,
fn call_tydesc_glue(cx: block, v: ValueRef, t: ty::t, field: int) -> fn call_tydesc_glue(cx: block, v: ValueRef, t: ty::t, field: int) ->
block { block {
let ti: option<@tydesc_info> = none::<@tydesc_info>; let mut ti: option<@tydesc_info> = none;
let {bcx: bcx, val: td} = get_tydesc(cx, t, ti); let {bcx: bcx, val: td} = get_tydesc(cx, t, ti);
call_tydesc_glue_full(bcx, v, td, field, ti); call_tydesc_glue_full(bcx, v, td, field, ti);
ret bcx; ret bcx;
@ -1030,19 +1029,18 @@ fn call_cmp_glue(cx: block, lhs: ValueRef, rhs: ValueRef, t: ty::t,
let r = spill_if_immediate(bcx, lhs, t); let r = spill_if_immediate(bcx, lhs, t);
let lllhs = r.val; let lllhs = r.val;
bcx = r.bcx; let bcx = r.bcx;
r = spill_if_immediate(bcx, rhs, t); let r = spill_if_immediate(bcx, rhs, t);
let llrhs = r.val; let llrhs = r.val;
bcx = r.bcx; let bcx = r.bcx;
let llrawlhsptr = BitCast(bcx, lllhs, T_ptr(T_i8())); let llrawlhsptr = BitCast(bcx, lllhs, T_ptr(T_i8()));
let llrawrhsptr = BitCast(bcx, llrhs, T_ptr(T_i8())); let llrawrhsptr = BitCast(bcx, llrhs, T_ptr(T_i8()));
r = get_tydesc_simple(bcx, t); let r = get_tydesc_simple(bcx, t);
let lltydesc = r.val; let lltydesc = r.val;
bcx = r.bcx; let bcx = r.bcx;
let lltydescs = let lltydescs = GEPi(bcx, lltydesc, [0, abi::tydesc_field_first_param]);
GEPi(bcx, lltydesc, [0, abi::tydesc_field_first_param]); let lltydescs = Load(bcx, lltydescs);
lltydescs = Load(bcx, lltydescs);
let llfn = bcx.ccx().upcalls.cmp_type; let llfn = bcx.ccx().upcalls.cmp_type;
@ -1161,7 +1159,8 @@ fn copy_val(cx: block, action: copy_action, dst: ValueRef,
fn copy_val_no_check(bcx: block, action: copy_action, dst: ValueRef, fn copy_val_no_check(bcx: block, action: copy_action, dst: ValueRef,
src: ValueRef, t: ty::t) -> block { src: ValueRef, t: ty::t) -> block {
let ccx = bcx.ccx(), bcx = bcx; let ccx = bcx.ccx();
let mut bcx = bcx;
if ty::type_is_scalar(t) { if ty::type_is_scalar(t) {
Store(bcx, src, dst); Store(bcx, src, dst);
ret bcx; ret bcx;
@ -1190,8 +1189,9 @@ fn copy_val_no_check(bcx: block, action: copy_action, dst: ValueRef,
// doesn't need to be dropped. // doesn't need to be dropped.
fn move_val(cx: block, action: copy_action, dst: ValueRef, fn move_val(cx: block, action: copy_action, dst: ValueRef,
src: lval_result, t: ty::t) -> block { src: lval_result, t: ty::t) -> block {
let src_val = src.val; let mut src_val = src.val;
let tcx = cx.tcx(), cx = cx; let tcx = cx.tcx();
let mut cx = cx;
if ty::type_is_scalar(t) { if ty::type_is_scalar(t) {
if src.kind == owned { src_val = Load(cx, src_val); } if src.kind == owned { src_val = Load(cx, src_val); }
Store(cx, src_val, dst); Store(cx, src_val, dst);
@ -1285,7 +1285,7 @@ fn trans_unary(bcx: block, op: ast::unop, e: @ast::expr,
ret store_in_dest(bcx, neg, dest); ret store_in_dest(bcx, neg, dest);
} }
ast::box(_) { ast::box(_) {
let {bcx, box, body} = trans_malloc_boxed(bcx, e_ty); let mut {bcx, box, body} = trans_malloc_boxed(bcx, e_ty);
add_clean_free(bcx, box, false); add_clean_free(bcx, box, false);
// Cast the body type to the type of the value. This is needed to // Cast the body type to the type of the value. This is needed to
// make enums work, since enums have a different LLVM type depending // make enums work, since enums have a different LLVM type depending
@ -1309,7 +1309,7 @@ fn trans_unary(bcx: block, op: ast::unop, e: @ast::expr,
} }
fn trans_addr_of(cx: block, e: @ast::expr, dest: dest) -> block { fn trans_addr_of(cx: block, e: @ast::expr, dest: dest) -> block {
let {bcx, val, kind} = trans_temp_lval(cx, e); let mut {bcx, val, kind} = trans_temp_lval(cx, e);
let ety = expr_ty(cx, e); let ety = expr_ty(cx, e);
let is_immediate = ty::type_is_immediate(ety); let is_immediate = ty::type_is_immediate(ety);
if (kind == temporary && is_immediate) || kind == owned_imm { if (kind == temporary && is_immediate) || kind == owned_imm {
@ -1327,13 +1327,14 @@ fn trans_compare(cx: block, op: ast::binop, lhs: ValueRef,
} }
// Determine the operation we need. // Determine the operation we need.
let llop; let llop = {
alt op { alt op {
ast::eq | ast::ne { llop = C_u8(abi::cmp_glue_op_eq); } ast::eq | ast::ne { C_u8(abi::cmp_glue_op_eq) }
ast::lt | ast::ge { llop = C_u8(abi::cmp_glue_op_lt); } ast::lt | ast::ge { C_u8(abi::cmp_glue_op_lt) }
ast::le | ast::gt { llop = C_u8(abi::cmp_glue_op_le); } ast::le | ast::gt { C_u8(abi::cmp_glue_op_le) }
_ { cx.tcx().sess.bug("trans_compare got non-comparison-op"); } _ { cx.tcx().sess.bug("trans_compare got non-comparison-op"); }
} }
};
let rs = call_cmp_glue(cx, lhs, rhs, rhs_t, llop); let rs = call_cmp_glue(cx, lhs, rhs, rhs_t, llop);
@ -1392,8 +1393,10 @@ fn trans_eager_binop(cx: block, op: ast::binop, lhs: ValueRef,
lhs_t: ty::t, rhs: ValueRef, rhs_t: ty::t, dest: dest) lhs_t: ty::t, rhs: ValueRef, rhs_t: ty::t, dest: dest)
-> block { -> block {
if dest == ignore { ret cx; } if dest == ignore { ret cx; }
let intype = lhs_t; let intype = {
if ty::type_is_bot(intype) { intype = rhs_t; } if ty::type_is_bot(lhs_t) { rhs_t }
else { lhs_t }
};
let is_float = ty::type_is_fp(intype); let is_float = ty::type_is_fp(intype);
let rhs = cast_shift_expr_rhs(cx, op, lhs, rhs); let rhs = cast_shift_expr_rhs(cx, op, lhs, rhs);
@ -1401,7 +1404,8 @@ fn trans_eager_binop(cx: block, op: ast::binop, lhs: ValueRef,
if op == ast::add && ty::type_is_sequence(intype) { if op == ast::add && ty::type_is_sequence(intype) {
ret tvec::trans_add(cx, intype, lhs, rhs, dest); ret tvec::trans_add(cx, intype, lhs, rhs, dest);
} }
let cx = cx, val = alt op { let mut cx = cx;
let val = alt op {
ast::add { ast::add {
if is_float { FAdd(cx, lhs, rhs) } if is_float { FAdd(cx, lhs, rhs) }
else { Add(cx, lhs, rhs) } else { Add(cx, lhs, rhs) }
@ -1487,8 +1491,8 @@ fn trans_assign_op(bcx: block, ex: @ast::expr, op: ast::binop,
} }
fn autoderef(cx: block, v: ValueRef, t: ty::t) -> result_t { fn autoderef(cx: block, v: ValueRef, t: ty::t) -> result_t {
let v1: ValueRef = v; let mut v1: ValueRef = v;
let t1: ty::t = t; let mut t1: ty::t = t;
let ccx = cx.ccx(); let ccx = cx.ccx();
loop { loop {
alt ty::get(t1).struct { alt ty::get(t1).struct {
@ -1602,7 +1606,7 @@ fn trans_if(cx: block, cond: @ast::expr, thn: ast::blk,
option::may(els) {|e| else_cx.block_span = some(e.span); } option::may(els) {|e| else_cx.block_span = some(e.span); }
CondBr(bcx, cond_val, then_cx.llbb, else_cx.llbb); CondBr(bcx, cond_val, then_cx.llbb, else_cx.llbb);
let then_bcx = trans_block(then_cx, thn, then_dest); let then_bcx = trans_block(then_cx, thn, then_dest);
then_bcx = trans_block_cleanups(then_bcx, then_cx); let then_bcx = trans_block_cleanups(then_bcx, then_cx);
// Calling trans_block directly instead of trans_expr // Calling trans_block directly instead of trans_expr
// because trans_expr will create another scope block // because trans_expr will create another scope block
// context for the block, but we've already got the // context for the block, but we've already got the
@ -1623,7 +1627,7 @@ fn trans_if(cx: block, cond: @ast::expr, thn: ast::blk,
} }
_ { else_cx } _ { else_cx }
}; };
else_bcx = trans_block_cleanups(else_bcx, else_cx); let else_bcx = trans_block_cleanups(else_bcx, else_cx);
ret join_returns(cx, [then_bcx, else_bcx], [then_dest, else_dest], dest); ret join_returns(cx, [then_bcx, else_bcx], [then_dest, else_dest], dest);
} }
@ -1640,7 +1644,7 @@ fn trans_for(cx: block, local: @ast::local, seq: @ast::expr,
T_ptr(type_of(bcx.ccx(), t))); T_ptr(type_of(bcx.ccx(), t)));
let bcx = alt::bind_irrefutable_pat(scope_cx, local.node.pat, let bcx = alt::bind_irrefutable_pat(scope_cx, local.node.pat,
curr, false); curr, false);
bcx = trans_block(bcx, body, ignore); let bcx = trans_block(bcx, body, ignore);
cleanup_and_Br(bcx, scope_cx, next_cx.llbb); cleanup_and_Br(bcx, scope_cx, next_cx.llbb);
ret next_cx; ret next_cx;
} }
@ -1649,7 +1653,7 @@ fn trans_for(cx: block, local: @ast::local, seq: @ast::expr,
let seq_ty = expr_ty(cx, seq); let seq_ty = expr_ty(cx, seq);
let {bcx: bcx, val: seq} = trans_temp_expr(cx, seq); let {bcx: bcx, val: seq} = trans_temp_expr(cx, seq);
let seq = PointerCast(bcx, seq, T_ptr(ccx.opaque_vec_type)); let seq = PointerCast(bcx, seq, T_ptr(ccx.opaque_vec_type));
let fill = tvec::get_fill(bcx, seq); let mut fill = tvec::get_fill(bcx, seq);
if ty::type_is_str(seq_ty) { if ty::type_is_str(seq_ty) {
fill = Sub(bcx, fill, C_int(ccx, 1)); fill = Sub(bcx, fill, C_int(ccx, 1));
} }
@ -1777,9 +1781,9 @@ fn make_mono_id(ccx: @crate_ctxt, item: ast::def_id, substs: [ty::t],
let precise_param_ids = alt vtables { let precise_param_ids = alt vtables {
some(vts) { some(vts) {
let bounds = ty::lookup_item_type(ccx.tcx, item).bounds; let bounds = ty::lookup_item_type(ccx.tcx, item).bounds;
let i = 0u; let mut i = 0u;
vec::map2(*bounds, substs, {|bounds, subst| vec::map2(*bounds, substs, {|bounds, subst|
let v = []; let mut v = [];
for bound in *bounds { for bound in *bounds {
alt bound { alt bound {
ty::bound_iface(_) { ty::bound_iface(_) {
@ -1848,7 +1852,7 @@ fn monomorphic_fn(ccx: @crate_ctxt, fn_id: ast::def_id, real_substs: [ty::t],
} }
let tpt = ty::lookup_item_type(ccx.tcx, fn_id); let tpt = ty::lookup_item_type(ccx.tcx, fn_id);
let item_ty = tpt.ty; let mut item_ty = tpt.ty;
let map_node = ccx.tcx.items.get(fn_id.node); let map_node = ccx.tcx.items.get(fn_id.node);
// Get the path so that we can create a symbol // Get the path so that we can create a symbol
@ -1949,7 +1953,7 @@ fn maybe_instantiate_inline(ccx: @crate_ctxt, fn_id: ast::def_id)
} }
csearch::found_parent(parent_id, ast::ii_item(item)) { csearch::found_parent(parent_id, ast::ii_item(item)) {
ccx.external.insert(parent_id, some(item.id)); ccx.external.insert(parent_id, some(item.id));
let my_id = 0; let mut my_id = 0;
alt check item.node { alt check item.node {
ast::item_enum(_, _) { ast::item_enum(_, _) {
let vs_here = ty::enum_variants(ccx.tcx, local_def(item.id)); let vs_here = ty::enum_variants(ccx.tcx, local_def(item.id));
@ -1995,9 +1999,11 @@ fn lval_intrinsic_fn(bcx: block, val: ValueRef, tys: [ty::t],
vec::tailn(args, first_real_arg), out_ty) vec::tailn(args, first_real_arg), out_ty)
} }
let bcx = bcx, ccx = bcx.ccx(); let mut bcx = bcx;
let ccx = bcx.ccx();
let tds = vec::map(tys, {|t| let tds = vec::map(tys, {|t|
let ti = none, td_res = get_tydesc(bcx, t, ti); let mut ti = none;
let td_res = get_tydesc(bcx, t, ti);
bcx = td_res.bcx; bcx = td_res.bcx;
lazily_emit_all_tydesc_glue(ccx, ti); lazily_emit_all_tydesc_glue(ccx, ti);
td_res.val td_res.val
@ -2029,8 +2035,8 @@ fn lval_static_fn_inner(bcx: block, fn_id: ast::def_id, id: ast::node_id,
} else { fn_id }; } else { fn_id };
if fn_id.crate == ast::local_crate && tys.len() > 0u { if fn_id.crate == ast::local_crate && tys.len() > 0u {
let {val, must_cast, intrinsic} = monomorphic_fn(ccx, fn_id, tys, let mut {val, must_cast, intrinsic} =
vtables); monomorphic_fn(ccx, fn_id, tys, vtables);
if intrinsic { ret lval_intrinsic_fn(bcx, val, tys, id); } if intrinsic { ret lval_intrinsic_fn(bcx, val, tys, id); }
if must_cast { if must_cast {
val = PointerCast(bcx, val, T_ptr(type_of_fn_from_ty( val = PointerCast(bcx, val, T_ptr(type_of_fn_from_ty(
@ -2039,7 +2045,7 @@ fn lval_static_fn_inner(bcx: block, fn_id: ast::def_id, id: ast::node_id,
ret {bcx: bcx, val: val, kind: owned, env: null_env, tds: none}; ret {bcx: bcx, val: val, kind: owned, env: null_env, tds: none};
} }
let val = if fn_id.crate == ast::local_crate { let mut val = if fn_id.crate == ast::local_crate {
// Internal reference. // Internal reference.
get_item_val(ccx, fn_id.node) get_item_val(ccx, fn_id.node)
} else { } else {
@ -2218,14 +2224,15 @@ fn trans_index(cx: block, ex: @ast::expr, base: @ast::expr,
let ccx = cx.ccx(); let ccx = cx.ccx();
// Cast to an LLVM integer. Rust is less strict than LLVM in this regard. // Cast to an LLVM integer. Rust is less strict than LLVM in this regard.
let ix_val;
let ix_size = llsize_of_real(cx.ccx(), val_ty(ix.val)); let ix_size = llsize_of_real(cx.ccx(), val_ty(ix.val));
let int_size = llsize_of_real(cx.ccx(), ccx.int_type); let int_size = llsize_of_real(cx.ccx(), ccx.int_type);
if ix_size < int_size { let ix_val = if ix_size < int_size {
ix_val = ZExt(bcx, ix.val, ccx.int_type); ZExt(bcx, ix.val, ccx.int_type)
} else if ix_size > int_size { } else if ix_size > int_size {
ix_val = Trunc(bcx, ix.val, ccx.int_type); Trunc(bcx, ix.val, ccx.int_type)
} else { ix_val = ix.val; } } else {
ix.val
};
let unit_ty = node_id_type(cx, ex.id); let unit_ty = node_id_type(cx, ex.id);
let llunitty = type_of(ccx, unit_ty); let llunitty = type_of(ccx, unit_ty);
@ -2236,7 +2243,7 @@ fn trans_index(cx: block, ex: @ast::expr, base: @ast::expr,
let lim = tvec::get_fill(bcx, v); let lim = tvec::get_fill(bcx, v);
let body = tvec::get_dataptr(bcx, v, type_of(ccx, unit_ty)); let body = tvec::get_dataptr(bcx, v, type_of(ccx, unit_ty));
let bounds_check = ICmp(bcx, lib::llvm::IntUGE, scaled_ix, lim); let bounds_check = ICmp(bcx, lib::llvm::IntUGE, scaled_ix, lim);
bcx = with_cond(bcx, bounds_check) {|bcx| let bcx = with_cond(bcx, bounds_check) {|bcx|
// fail: bad bounds check. // fail: bad bounds check.
trans_fail(bcx, some(ex.span), "bounds check") trans_fail(bcx, some(ex.span), "bounds check")
}; };
@ -2440,8 +2447,8 @@ fn trans_arg_expr(cx: block, arg: ty::arg, lldestty: TypeRef, e: @ast::expr,
let e_ty = expr_ty(cx, e); let e_ty = expr_ty(cx, e);
let is_bot = ty::type_is_bot(e_ty); let is_bot = ty::type_is_bot(e_ty);
let lv = trans_temp_lval(cx, e); let lv = trans_temp_lval(cx, e);
let bcx = lv.bcx; let mut bcx = lv.bcx;
let val = lv.val; let mut val = lv.val;
let arg_mode = ty::resolved_mode(ccx.tcx, arg.mode); let arg_mode = ty::resolved_mode(ccx.tcx, arg.mode);
if is_bot { if is_bot {
// For values of type _|_, we generate an // For values of type _|_, we generate an
@ -2450,7 +2457,8 @@ fn trans_arg_expr(cx: block, arg: ty::arg, lldestty: TypeRef, e: @ast::expr,
// to have type lldestty (the callee's expected type). // to have type lldestty (the callee's expected type).
val = llvm::LLVMGetUndef(lldestty); val = llvm::LLVMGetUndef(lldestty);
} else if arg_mode == ast::by_ref || arg_mode == ast::by_val { } else if arg_mode == ast::by_ref || arg_mode == ast::by_val {
let copied = false, imm = ty::type_is_immediate(e_ty); let mut copied = false;
let imm = ty::type_is_immediate(e_ty);
if arg_mode == ast::by_ref && lv.kind != owned && imm { if arg_mode == ast::by_ref && lv.kind != owned && imm {
val = do_spill_noroot(bcx, val); val = do_spill_noroot(bcx, val);
copied = true; copied = true;
@ -2515,12 +2523,12 @@ fn trans_args(cx: block, llenv: ValueRef, args: call_args, fn_ty: ty::t,
dest: dest, generic_intrinsic: bool) dest: dest, generic_intrinsic: bool)
-> {bcx: block, args: [ValueRef], retslot: ValueRef} { -> {bcx: block, args: [ValueRef], retslot: ValueRef} {
let temp_cleanups = []; let mut temp_cleanups = [];
let arg_tys = ty::ty_fn_args(fn_ty); let arg_tys = ty::ty_fn_args(fn_ty);
let llargs: [ValueRef] = []; let mut llargs: [ValueRef] = [];
let ccx = cx.ccx(); let ccx = cx.ccx();
let bcx = cx; let mut bcx = cx;
let retty = ty::ty_fn_ret(fn_ty); let retty = ty::ty_fn_ret(fn_ty);
// Arg 0: Output pointer. // Arg 0: Output pointer.
@ -2555,7 +2563,7 @@ fn trans_args(cx: block, llenv: ValueRef, args: call_args, fn_ty: ty::t,
alt args { alt args {
arg_exprs(es) { arg_exprs(es) {
let llarg_tys = type_of_explicit_args(ccx, arg_tys); let llarg_tys = type_of_explicit_args(ccx, arg_tys);
let i = 0u; let mut i = 0u;
for e: @ast::expr in es { for e: @ast::expr in es {
let r = trans_arg_expr(bcx, arg_tys[i], llarg_tys[i], let r = trans_arg_expr(bcx, arg_tys[i], llarg_tys[i],
e, temp_cleanups); e, temp_cleanups);
@ -2594,9 +2602,10 @@ fn trans_call_inner(in_cx: block, fn_expr_ty: ty::t, ret_ty: ty::t,
-> block { -> block {
with_scope(in_cx, "call") {|cx| with_scope(in_cx, "call") {|cx|
let f_res = get_callee(cx); let f_res = get_callee(cx);
let bcx = f_res.bcx, ccx = cx.ccx(); let mut bcx = f_res.bcx;
let ccx = cx.ccx();
let faddr = f_res.val; let mut faddr = f_res.val;
let llenv = alt f_res.env { let llenv = alt f_res.env {
null_env { null_env {
llvm::LLVMGetUndef(T_opaque_box_ptr(ccx)) llvm::LLVMGetUndef(T_opaque_box_ptr(ccx))
@ -2622,7 +2631,7 @@ fn trans_call_inner(in_cx: block, fn_expr_ty: ty::t, ret_ty: ty::t,
option::is_some(f_res.tds)) option::is_some(f_res.tds))
}; };
bcx = args_res.bcx; bcx = args_res.bcx;
let llargs = args_res.args; let mut llargs = args_res.args;
option::may(f_res.tds) {|vals| option::may(f_res.tds) {|vals|
llargs = vec::slice(llargs, 0u, first_real_arg) + vals + llargs = vec::slice(llargs, 0u, first_real_arg) + vals +
vec::tailn(llargs, first_real_arg); vec::tailn(llargs, first_real_arg);
@ -2680,7 +2689,7 @@ fn invoke_(bcx: block, llfn: ValueRef, llargs: [ValueRef],
fn get_landing_pad(bcx: block) -> BasicBlockRef { fn get_landing_pad(bcx: block) -> BasicBlockRef {
fn in_lpad_scope_cx(bcx: block, f: fn(scope_info)) { fn in_lpad_scope_cx(bcx: block, f: fn(scope_info)) {
let bcx = bcx; let mut bcx = bcx;
loop { loop {
alt bcx.kind { alt bcx.kind {
block_scope(info) { block_scope(info) {
@ -2694,7 +2703,7 @@ fn get_landing_pad(bcx: block) -> BasicBlockRef {
} }
} }
let cached = none, pad_bcx = bcx; // Guaranteed to be set below let mut cached = none, pad_bcx = bcx; // Guaranteed to be set below
in_lpad_scope_cx(bcx) {|info| in_lpad_scope_cx(bcx) {|info|
// If there is a valid landing pad still around, use it // If there is a valid landing pad still around, use it
alt info.landing_pad { alt info.landing_pad {
@ -2740,7 +2749,7 @@ fn get_landing_pad(bcx: block) -> BasicBlockRef {
} }
fn trans_tup(bcx: block, elts: [@ast::expr], dest: dest) -> block { fn trans_tup(bcx: block, elts: [@ast::expr], dest: dest) -> block {
let bcx = bcx; let mut bcx = bcx;
let addr = alt dest { let addr = alt dest {
ignore { ignore {
for ex in elts { bcx = trans_expr(bcx, ex, ignore); } for ex in elts { bcx = trans_expr(bcx, ex, ignore); }
@ -2749,7 +2758,7 @@ fn trans_tup(bcx: block, elts: [@ast::expr], dest: dest) -> block {
save_in(pos) { pos } save_in(pos) { pos }
_ { bcx.tcx().sess.bug("trans_tup: weird dest"); } _ { bcx.tcx().sess.bug("trans_tup: weird dest"); }
}; };
let temp_cleanups = [], i = 0; let mut temp_cleanups = [], i = 0;
for e in elts { for e in elts {
let dst = GEPi(bcx, addr, [0, i]); let dst = GEPi(bcx, addr, [0, i]);
let e_ty = expr_ty(bcx, e); let e_ty = expr_ty(bcx, e);
@ -2766,7 +2775,7 @@ fn trans_rec(bcx: block, fields: [ast::field],
base: option<@ast::expr>, id: ast::node_id, base: option<@ast::expr>, id: ast::node_id,
dest: dest) -> block { dest: dest) -> block {
let t = node_id_type(bcx, id); let t = node_id_type(bcx, id);
let bcx = bcx; let mut bcx = bcx;
let addr = alt dest { let addr = alt dest {
ignore { ignore {
for fld in fields { for fld in fields {
@ -2782,7 +2791,7 @@ fn trans_rec(bcx: block, fields: [ast::field],
ty::ty_rec(f) { f } ty::ty_rec(f) { f }
_ { bcx.tcx().sess.bug("trans_rec: id doesn't\ _ { bcx.tcx().sess.bug("trans_rec: id doesn't\
have a record type") } }; have a record type") } };
let temp_cleanups = []; let mut temp_cleanups = [];
for fld in fields { for fld in fields {
let ix = option::get(vec::position(ty_fields, {|ft| let ix = option::get(vec::position(ty_fields, {|ft|
str::eq(fld.node.ident, ft.ident) str::eq(fld.node.ident, ft.ident)
@ -2794,7 +2803,8 @@ fn trans_rec(bcx: block, fields: [ast::field],
} }
alt base { alt base {
some(bexp) { some(bexp) {
let {bcx: cx, val: base_val} = trans_temp_expr(bcx, bexp), i = 0; let {bcx: cx, val: base_val} = trans_temp_expr(bcx, bexp);
let mut i = 0;
bcx = cx; bcx = cx;
// Copy over inherited fields // Copy over inherited fields
for tf in ty_fields { for tf in ty_fields {
@ -2831,7 +2841,7 @@ fn trans_expr_save_in(bcx: block, e: @ast::expr, dest: ValueRef)
// trans_expr_save_in. For intermediates where you don't care about lval-ness, // trans_expr_save_in. For intermediates where you don't care about lval-ness,
// use trans_temp_expr. // use trans_temp_expr.
fn trans_temp_lval(bcx: block, e: @ast::expr) -> lval_result { fn trans_temp_lval(bcx: block, e: @ast::expr) -> lval_result {
let bcx = bcx; let mut bcx = bcx;
if expr_is_lval(bcx, e) { if expr_is_lval(bcx, e) {
ret trans_lval(bcx, e); ret trans_lval(bcx, e);
} else { } else {
@ -2846,7 +2856,7 @@ fn trans_temp_lval(bcx: block, e: @ast::expr) -> lval_result {
ret {bcx: bcx, val: *cell, kind: temporary}; ret {bcx: bcx, val: *cell, kind: temporary};
} else { } else {
let {bcx, val: scratch} = alloc_ty(bcx, ty); let {bcx, val: scratch} = alloc_ty(bcx, ty);
bcx = trans_expr_save_in(bcx, e, scratch); let bcx = trans_expr_save_in(bcx, e, scratch);
add_clean_temp(bcx, scratch, ty); add_clean_temp(bcx, scratch, ty);
ret {bcx: bcx, val: scratch, kind: temporary}; ret {bcx: bcx, val: scratch, kind: temporary};
} }
@ -2856,7 +2866,7 @@ fn trans_temp_lval(bcx: block, e: @ast::expr) -> lval_result {
// Use only for intermediate values. See trans_expr and trans_expr_save_in for // Use only for intermediate values. See trans_expr and trans_expr_save_in for
// expressions that must 'end up somewhere' (or get ignored). // expressions that must 'end up somewhere' (or get ignored).
fn trans_temp_expr(bcx: block, e: @ast::expr) -> result { fn trans_temp_expr(bcx: block, e: @ast::expr) -> result {
let {bcx, val, kind} = trans_temp_lval(bcx, e); let mut {bcx, val, kind} = trans_temp_lval(bcx, e);
if kind == owned { if kind == owned {
val = load_if_immediate(bcx, val, expr_ty(bcx, e)); val = load_if_immediate(bcx, val, expr_ty(bcx, e));
} }
@ -3044,8 +3054,8 @@ fn trans_expr(bcx: block, e: @ast::expr, dest: dest) -> block {
let t = expr_ty(bcx, src); let t = expr_ty(bcx, src);
let {bcx: bcx, val: tmp_alloc} = alloc_ty(rhs_res.bcx, t); let {bcx: bcx, val: tmp_alloc} = alloc_ty(rhs_res.bcx, t);
// Swap through a temporary. // Swap through a temporary.
bcx = move_val(bcx, INIT, tmp_alloc, lhs_res, t); let bcx = move_val(bcx, INIT, tmp_alloc, lhs_res, t);
bcx = move_val(bcx, INIT, lhs_res.val, rhs_res, t); let bcx = move_val(bcx, INIT, lhs_res.val, rhs_res, t);
ret move_val(bcx, INIT, rhs_res.val, lval_owned(bcx, tmp_alloc), t); ret move_val(bcx, INIT, rhs_res.val, lval_owned(bcx, tmp_alloc), t);
} }
ast::expr_assign_op(op, dst, src) { ast::expr_assign_op(op, dst, src) {
@ -3099,7 +3109,7 @@ fn trans_expr(bcx: block, e: @ast::expr, dest: dest) -> block {
fn lval_to_dps(bcx: block, e: @ast::expr, dest: dest) -> block { fn lval_to_dps(bcx: block, e: @ast::expr, dest: dest) -> block {
let lv = trans_lval(bcx, e), ccx = bcx.ccx(); let lv = trans_lval(bcx, e), ccx = bcx.ccx();
let {bcx, val, kind} = lv; let mut {bcx, val, kind} = lv;
let last_use = kind == owned && ccx.maps.last_uses.contains_key(e.id); let last_use = kind == owned && ccx.maps.last_uses.contains_key(e.id);
let ty = expr_ty(bcx, e); let ty = expr_ty(bcx, e);
alt dest { alt dest {
@ -3129,7 +3139,7 @@ fn lval_to_dps(bcx: block, e: @ast::expr, dest: dest) -> block {
fn do_spill(cx: block, v: ValueRef, t: ty::t) -> result { fn do_spill(cx: block, v: ValueRef, t: ty::t) -> result {
// We have a value but we have to spill it, and root it, to pass by alias. // We have a value but we have to spill it, and root it, to pass by alias.
let bcx = cx; let mut bcx = cx;
if ty::type_is_bot(t) { if ty::type_is_bot(t) {
ret rslt(bcx, C_null(T_ptr(T_i8()))); ret rslt(bcx, C_null(T_ptr(T_i8())));
@ -3219,7 +3229,7 @@ fn trans_check_expr(bcx: block, e: @ast::expr, s: str) -> block {
fn trans_fail_expr(bcx: block, sp_opt: option<span>, fn trans_fail_expr(bcx: block, sp_opt: option<span>,
fail_expr: option<@ast::expr>) -> block { fail_expr: option<@ast::expr>) -> block {
let bcx = bcx; let mut bcx = bcx;
alt fail_expr { alt fail_expr {
some(expr) { some(expr) {
let ccx = bcx.ccx(), tcx = ccx.tcx; let ccx = bcx.ccx(), tcx = ccx.tcx;
@ -3253,19 +3263,20 @@ fn trans_fail(bcx: block, sp_opt: option<span>, fail_str: str) ->
fn trans_fail_value(bcx: block, sp_opt: option<span>, fn trans_fail_value(bcx: block, sp_opt: option<span>,
V_fail_str: ValueRef) -> block { V_fail_str: ValueRef) -> block {
let ccx = bcx.ccx(); let ccx = bcx.ccx();
let V_filename; let {V_filename, V_line} = alt sp_opt {
let V_line;
alt sp_opt {
some(sp) { some(sp) {
let sess = bcx.sess(); let sess = bcx.sess();
let loc = codemap::lookup_char_pos(sess.parse_sess.cm, sp.lo); let loc = codemap::lookup_char_pos(sess.parse_sess.cm, sp.lo);
V_filename = C_cstr(bcx.ccx(), loc.file.name); {V_filename: C_cstr(bcx.ccx(), loc.file.name),
V_line = loc.line as int; V_line: loc.line as int}
} }
none { V_filename = C_cstr(bcx.ccx(), "<runtime>"); V_line = 0; } none {
} {V_filename: C_cstr(bcx.ccx(), "<runtime>"),
V_line: 0}
}
};
let V_str = PointerCast(bcx, V_fail_str, T_ptr(T_i8())); let V_str = PointerCast(bcx, V_fail_str, T_ptr(T_i8()));
V_filename = PointerCast(bcx, V_filename, T_ptr(T_i8())); let V_filename = PointerCast(bcx, V_filename, T_ptr(T_i8()));
let args = [V_str, V_filename, C_int(ccx, V_line)]; let args = [V_str, V_filename, C_int(ccx, V_line)];
let bcx = invoke(bcx, bcx.ccx().upcalls._fail, args); let bcx = invoke(bcx, bcx.ccx().upcalls._fail, args);
Unreachable(bcx); Unreachable(bcx);
@ -3275,7 +3286,8 @@ fn trans_fail_value(bcx: block, sp_opt: option<span>,
fn trans_break_cont(bcx: block, to_end: bool) fn trans_break_cont(bcx: block, to_end: bool)
-> block { -> block {
// Locate closest loop block, outputting cleanup as we go. // Locate closest loop block, outputting cleanup as we go.
let unwind = bcx, target = bcx; let mut unwind = bcx;
let mut target = bcx;
loop { loop {
alt unwind.kind { alt unwind.kind {
block_scope({is_loop: some({cnt, brk}), _}) { block_scope({is_loop: some({cnt, brk}), _}) {
@ -3313,7 +3325,7 @@ fn trans_cont(cx: block) -> block {
} }
fn trans_ret(bcx: block, e: option<@ast::expr>) -> block { fn trans_ret(bcx: block, e: option<@ast::expr>) -> block {
let bcx = bcx; let mut bcx = bcx;
alt e { alt e {
some(x) { bcx = trans_expr_save_in(bcx, x, bcx.fcx.llretptr); } some(x) { bcx = trans_expr_save_in(bcx, x, bcx.fcx.llretptr); }
_ {} _ {}
@ -3347,7 +3359,7 @@ fn init_local(bcx: block, local: @ast::local) -> block {
"init_local: Someone forgot to document why it's\ "init_local: Someone forgot to document why it's\
safe to assume local.node.init isn't none!"); } safe to assume local.node.init isn't none!"); }
}; };
let {bcx, val, kind} = trans_temp_lval(bcx, initexpr); let mut {bcx, val, kind} = trans_temp_lval(bcx, initexpr);
if kind != temporary { if kind != temporary {
if kind == owned { val = Load(bcx, val); } if kind == owned { val = Load(bcx, val); }
let rs = take_ty_immediate(bcx, val, ty); let rs = take_ty_immediate(bcx, val, ty);
@ -3359,7 +3371,7 @@ fn init_local(bcx: block, local: @ast::local) -> block {
} }
}; };
let bcx = bcx; let mut bcx = bcx;
alt local.node.init { alt local.node.init {
some(init) { some(init) {
if init.op == ast::init_assign || !expr_is_lval(bcx, init.expr) { if init.op == ast::init_assign || !expr_is_lval(bcx, init.expr) {
@ -3392,7 +3404,7 @@ fn trans_stmt(cx: block, s: ast::stmt) -> block {
add_span_comment(cx, s.span, stmt_to_str(s)); add_span_comment(cx, s.span, stmt_to_str(s));
} }
let bcx = cx; let mut bcx = cx;
debuginfo::update_source_pos(cx, s.span); debuginfo::update_source_pos(cx, s.span);
alt s.node { alt s.node {
@ -3422,10 +3434,9 @@ fn trans_stmt(cx: block, s: ast::stmt) -> block {
// next three functions instead. // next three functions instead.
fn new_block(cx: fn_ctxt, parent: block_parent, kind: block_kind, fn new_block(cx: fn_ctxt, parent: block_parent, kind: block_kind,
name: str, block_span: option<span>) -> block { name: str, block_span: option<span>) -> block {
let s = ""; let s = if cx.ccx.sess.opts.save_temps || cx.ccx.sess.opts.debuginfo {
if cx.ccx.sess.opts.save_temps || cx.ccx.sess.opts.debuginfo { cx.ccx.names(name)
s = cx.ccx.names(name); } else { "" };
}
let llbb: BasicBlockRef = str::as_c_str(s, {|buf| let llbb: BasicBlockRef = str::as_c_str(s, {|buf|
llvm::LLVMAppendBasicBlock(cx.llfn, buf) llvm::LLVMAppendBasicBlock(cx.llfn, buf)
}); });
@ -3499,7 +3510,7 @@ fn raw_block(fcx: fn_ctxt, llbb: BasicBlockRef) -> block {
fn trans_block_cleanups(bcx: block, cleanup_cx: block) -> fn trans_block_cleanups(bcx: block, cleanup_cx: block) ->
block { block {
if bcx.unreachable { ret bcx; } if bcx.unreachable { ret bcx; }
let bcx = bcx; let mut bcx = bcx;
alt check cleanup_cx.kind { alt check cleanup_cx.kind {
block_scope({cleanups, _}) { block_scope({cleanups, _}) {
vec::riter(cleanups) {|cu| vec::riter(cleanups) {|cu|
@ -3515,7 +3526,7 @@ fn trans_block_cleanups(bcx: block, cleanup_cx: block) ->
// instruction. // instruction.
fn cleanup_and_leave(bcx: block, upto: option<BasicBlockRef>, fn cleanup_and_leave(bcx: block, upto: option<BasicBlockRef>,
leave: option<BasicBlockRef>) { leave: option<BasicBlockRef>) {
let cur = bcx, bcx = bcx; let mut cur = bcx, bcx = bcx;
loop { loop {
alt cur.kind { alt cur.kind {
block_scope(info) if info.cleanups.len() > 0u { block_scope(info) if info.cleanups.len() > 0u {
@ -3644,7 +3655,7 @@ fn alloc_local(cx: block, local: @ast::local) -> block {
fn trans_block(bcx: block, b: ast::blk, dest: dest) fn trans_block(bcx: block, b: ast::blk, dest: dest)
-> block { -> block {
let bcx = bcx; let mut bcx = bcx;
block_locals(b) {|local| bcx = alloc_local(bcx, local); }; block_locals(b) {|local| bcx = alloc_local(bcx, local); };
for s: @ast::stmt in b.node.stmts { for s: @ast::stmt in b.node.stmts {
debuginfo::update_source_pos(bcx, b.span); debuginfo::update_source_pos(bcx, b.span);
@ -3727,7 +3738,7 @@ fn create_llargs_for_fn_args(cx: fn_ctxt,
ty_self: self_arg, ty_self: self_arg,
args: [ast::arg]) { args: [ast::arg]) {
// Skip the implicit arguments 0, and 1. // Skip the implicit arguments 0, and 1.
let arg_n = first_real_arg; let mut arg_n = first_real_arg;
alt ty_self { alt ty_self {
impl_self(tt) { impl_self(tt) {
cx.llself = some({v: cx.llenv, t: tt}); cx.llself = some({v: cx.llenv, t: tt});
@ -3751,7 +3762,7 @@ fn create_llargs_for_fn_args(cx: fn_ctxt,
fn copy_args_to_allocas(fcx: fn_ctxt, bcx: block, args: [ast::arg], fn copy_args_to_allocas(fcx: fn_ctxt, bcx: block, args: [ast::arg],
arg_tys: [ty::arg]) -> block { arg_tys: [ty::arg]) -> block {
let tcx = bcx.tcx(); let tcx = bcx.tcx();
let arg_n: uint = 0u, bcx = bcx; let mut arg_n: uint = 0u, bcx = bcx;
let epic_fail = fn@() -> ! { let epic_fail = fn@() -> ! {
tcx.sess.bug("someone forgot\ tcx.sess.bug("someone forgot\
to document an invariant in copy_args_to_allocas!"); to document an invariant in copy_args_to_allocas!");
@ -3816,7 +3827,8 @@ fn trans_closure(ccx: @crate_ctxt, path: path, decl: ast::fn_decl,
// Create the first basic block in the function and keep a handle on it to // Create the first basic block in the function and keep a handle on it to
// pass to finish_fn later. // pass to finish_fn later.
let bcx_top = top_scope_block(fcx, some(body.span)), bcx = bcx_top; let bcx_top = top_scope_block(fcx, some(body.span));
let mut bcx = bcx_top;
let lltop = bcx.llbb; let lltop = bcx.llbb;
let block_ty = node_id_type(bcx, body.node.id); let block_ty = node_id_type(bcx, body.node.id);
@ -3880,7 +3892,7 @@ fn trans_res_ctor(ccx: @crate_ctxt, path: path, dtor: ast::fn_decl,
let fcx = new_fn_ctxt_w_id(ccx, path, llfndecl, ctor_id, let fcx = new_fn_ctxt_w_id(ccx, path, llfndecl, ctor_id,
none, param_substs, none); none, param_substs, none);
create_llargs_for_fn_args(fcx, no_self, dtor.inputs); create_llargs_for_fn_args(fcx, no_self, dtor.inputs);
let bcx = top_scope_block(fcx, none), lltop = bcx.llbb; let mut bcx = top_scope_block(fcx, none), lltop = bcx.llbb;
let fty = node_id_type(bcx, ctor_id); let fty = node_id_type(bcx, ctor_id);
let arg_t = ty::ty_fn_args(fty)[0].ty; let arg_t = ty::ty_fn_args(fty)[0].ty;
let arg = alt fcx.llargs.find(dtor.inputs[0].id) { let arg = alt fcx.llargs.find(dtor.inputs[0].id) {
@ -3905,7 +3917,7 @@ fn trans_enum_variant(ccx: @crate_ctxt, enum_id: ast::node_id,
param_substs: option<param_substs>, param_substs: option<param_substs>,
llfndecl: ValueRef) { llfndecl: ValueRef) {
// Translate variant arguments to function arguments. // Translate variant arguments to function arguments.
let fn_args = [], i = 0u; let mut fn_args = [], i = 0u;
for varg in variant.node.args { for varg in variant.node.args {
fn_args += [{mode: ast::expl(ast::by_copy), fn_args += [{mode: ast::expl(ast::by_copy),
ty: varg.ty, ty: varg.ty,
@ -3919,7 +3931,7 @@ fn trans_enum_variant(ccx: @crate_ctxt, enum_id: ast::node_id,
some(substs) { substs.tys } some(substs) { substs.tys }
none { [] } none { [] }
}; };
let bcx = top_scope_block(fcx, none), lltop = bcx.llbb; let mut bcx = top_scope_block(fcx, none), lltop = bcx.llbb;
let arg_tys = ty::ty_fn_args(node_id_type(bcx, variant.node.id)); let arg_tys = ty::ty_fn_args(node_id_type(bcx, variant.node.id));
bcx = copy_args_to_allocas(fcx, bcx, fn_args, arg_tys); bcx = copy_args_to_allocas(fcx, bcx, fn_args, arg_tys);
@ -3933,7 +3945,7 @@ fn trans_enum_variant(ccx: @crate_ctxt, enum_id: ast::node_id,
Store(bcx, C_int(ccx, disr), lldiscrimptr); Store(bcx, C_int(ccx, disr), lldiscrimptr);
GEPi(bcx, llenumptr, [0, 1]) GEPi(bcx, llenumptr, [0, 1])
}; };
let i = 0u; let mut i = 0u;
let t_id = local_def(enum_id); let t_id = local_def(enum_id);
let v_id = local_def(variant.node.id); let v_id = local_def(variant.node.id);
for va: ast::variant_arg in variant.node.args { for va: ast::variant_arg in variant.node.args {
@ -4107,7 +4119,7 @@ fn trans_item(ccx: @crate_ctxt, item: ast::item) {
if tps.len() == 0u { if tps.len() == 0u {
let degen = variants.len() == 1u; let degen = variants.len() == 1u;
let vi = ty::enum_variants(ccx.tcx, local_def(item.id)); let vi = ty::enum_variants(ccx.tcx, local_def(item.id));
let i = 0; let mut i = 0;
for variant: ast::variant in variants { for variant: ast::variant in variants {
if variant.node.args.len() > 0u { if variant.node.args.len() > 0u {
let llfn = get_item_val(ccx, variant.node.id); let llfn = get_item_val(ccx, variant.node.id);
@ -4149,10 +4161,11 @@ fn trans_item(ccx: @crate_ctxt, item: ast::item) {
node: ast::ty_infer, node: ast::ty_infer,
span: ctor.node.body.span}; span: ctor.node.body.span};
// kludgy // kludgy
let ty_args = [], i = 0u; let mut ty_args = [], i = 0u;
for tp in tps { for tp in tps {
ty_args += [ty::mk_param(ccx.tcx, i, ty_args += [ty::mk_param(ccx.tcx, i,
local_def(tps[i].id))]; local_def(tps[i].id))];
i += 1u;
} }
let rslt_ty = ty::mk_class(ccx.tcx, let rslt_ty = ty::mk_class(ccx.tcx,
local_def(item.id), local_def(item.id),
@ -4285,7 +4298,7 @@ fn create_main_wrapper(ccx: @crate_ctxt, sp: span, main_llfn: ValueRef,
let lloutputarg = llvm::LLVMGetParam(llfdecl, 0 as c_uint); let lloutputarg = llvm::LLVMGetParam(llfdecl, 0 as c_uint);
let llenvarg = llvm::LLVMGetParam(llfdecl, 1 as c_uint); let llenvarg = llvm::LLVMGetParam(llfdecl, 1 as c_uint);
let args = [lloutputarg, llenvarg]; let mut args = [lloutputarg, llenvarg];
if takes_argv { args += [llvm::LLVMGetParam(llfdecl, 2 as c_uint)]; } if takes_argv { args += [llvm::LLVMGetParam(llfdecl, 2 as c_uint)]; }
Call(bcx, main_llfn, args); Call(bcx, main_llfn, args);
build_return(bcx); build_return(bcx);
@ -4357,7 +4370,7 @@ fn get_item_val(ccx: @crate_ctxt, id: ast::node_id) -> ValueRef {
alt ccx.item_vals.find(id) { alt ccx.item_vals.find(id) {
some(v) { v } some(v) { v }
none { none {
let exprt = false; let mut exprt = false;
let val = alt check ccx.tcx.items.get(id) { let val = alt check ccx.tcx.items.get(id) {
ast_map::node_item(i, pth) { ast_map::node_item(i, pth) {
let my_path = *pth + [path_name(i.ident)]; let my_path = *pth + [path_name(i.ident)];
@ -4445,7 +4458,8 @@ fn trans_constant(ccx: @crate_ctxt, it: @ast::item) {
ast::item_enum(variants, _) { ast::item_enum(variants, _) {
let vi = ty::enum_variants(ccx.tcx, {crate: ast::local_crate, let vi = ty::enum_variants(ccx.tcx, {crate: ast::local_crate,
node: it.id}); node: it.id});
let i = 0, path = item_path(ccx, it); let mut i = 0;
let path = item_path(ccx, it);
for variant in variants { for variant in variants {
let p = path + [path_name(variant.node.name), let p = path + [path_name(variant.node.name),
path_name("discrim")]; path_name("discrim")];
@ -4550,7 +4564,7 @@ fn create_module_map(ccx: @crate_ctxt) -> ValueRef {
llvm::LLVMAddGlobal(ccx.llmod, maptype, buf) llvm::LLVMAddGlobal(ccx.llmod, maptype, buf)
}); });
lib::llvm::SetLinkage(map, lib::llvm::InternalLinkage); lib::llvm::SetLinkage(map, lib::llvm::InternalLinkage);
let elts: [ValueRef] = []; let mut elts: [ValueRef] = [];
ccx.module_data.items {|key, val| ccx.module_data.items {|key, val|
let elt = C_struct([p2i(ccx, C_cstr(ccx, key)), let elt = C_struct([p2i(ccx, C_cstr(ccx, key)),
p2i(ccx, val)]); p2i(ccx, val)]);
@ -4567,7 +4581,7 @@ fn decl_crate_map(sess: session::session, mapname: str,
llmod: ModuleRef) -> ValueRef { llmod: ModuleRef) -> ValueRef {
let targ_cfg = sess.targ_cfg; let targ_cfg = sess.targ_cfg;
let int_type = T_int(targ_cfg); let int_type = T_int(targ_cfg);
let n_subcrates = 1; let mut n_subcrates = 1;
let cstore = sess.cstore; let cstore = sess.cstore;
while cstore::have_crate_data(cstore, n_subcrates) { n_subcrates += 1; } while cstore::have_crate_data(cstore, n_subcrates) { n_subcrates += 1; }
let mapname = if sess.building_library { mapname } else { "toplevel" }; let mapname = if sess.building_library { mapname } else { "toplevel" };
@ -4583,8 +4597,8 @@ fn decl_crate_map(sess: session::session, mapname: str,
// FIXME use hashed metadata instead of crate names once we have that // FIXME use hashed metadata instead of crate names once we have that
fn fill_crate_map(ccx: @crate_ctxt, map: ValueRef) { fn fill_crate_map(ccx: @crate_ctxt, map: ValueRef) {
let subcrates: [ValueRef] = []; let mut subcrates: [ValueRef] = [];
let i = 1; let mut i = 1;
let cstore = ccx.sess.cstore; let cstore = ccx.sess.cstore;
while cstore::have_crate_data(cstore, i) { while cstore::have_crate_data(cstore, i) {
let nm = "_rust_crate_map_" + cstore::get_crate_data(cstore, i).name; let nm = "_rust_crate_map_" + cstore::get_crate_data(cstore, i).name;
@ -4604,7 +4618,7 @@ fn write_metadata(cx: @crate_ctxt, crate: @ast::crate) {
if !cx.sess.building_library { ret; } if !cx.sess.building_library { ret; }
let llmeta = C_bytes(metadata::encoder::encode_metadata(cx, crate)); let llmeta = C_bytes(metadata::encoder::encode_metadata(cx, crate));
let llconst = C_struct([llmeta]); let llconst = C_struct([llmeta]);
let llglobal = str::as_c_str("rust_metadata", {|buf| let mut llglobal = str::as_c_str("rust_metadata", {|buf|
llvm::LLVMAddGlobal(cx.llmod, val_ty(llconst), buf) llvm::LLVMAddGlobal(cx.llmod, val_ty(llconst), buf)
}); });
llvm::LLVMSetInitializer(llglobal, llconst); llvm::LLVMSetInitializer(llglobal, llconst);

View File

@ -338,7 +338,7 @@ fn GEP(cx: block, Pointer: ValueRef, Indices: [ValueRef]) -> ValueRef {
// Simple wrapper around GEP that takes an array of ints and wraps them // Simple wrapper around GEP that takes an array of ints and wraps them
// in C_i32() // in C_i32()
fn GEPi(cx: block, base: ValueRef, ixs: [int]) -> ValueRef { fn GEPi(cx: block, base: ValueRef, ixs: [int]) -> ValueRef {
let v: [ValueRef] = []; let mut v: [ValueRef] = [];
for i: int in ixs { v += [C_i32(i as i32)]; } for i: int in ixs { v += [C_i32(i as i32)]; }
ret InBoundsGEP(cx, base, v); ret InBoundsGEP(cx, base, v);
} }

View File

@ -123,7 +123,7 @@ fn mk_tuplified_uniq_cbox_ty(tcx: ty::ctxt, cdata_ty: ty::t) -> ty::t {
fn mk_closure_tys(tcx: ty::ctxt, fn mk_closure_tys(tcx: ty::ctxt,
bound_values: [environment_value]) bound_values: [environment_value])
-> (ty::t, [ty::t]) { -> (ty::t, [ty::t]) {
let bound_tys = []; let mut bound_tys = [];
// Compute the closed over data // Compute the closed over data
for bv in bound_values { for bv in bound_values {
@ -169,8 +169,8 @@ fn allocate_cbox(bcx: block,
} }
// Allocate and initialize the box: // Allocate and initialize the box:
let ti = none; let mut ti = none;
let temp_cleanups = []; let mut temp_cleanups = [];
let (bcx, box) = alt ck { let (bcx, box) = alt ck {
ty::ck_box { ty::ck_box {
let {bcx, val: box} = trans_malloc_boxed_raw(bcx, cdata_ty, ti); let {bcx, val: box} = trans_malloc_boxed_raw(bcx, cdata_ty, ti);
@ -219,7 +219,7 @@ fn store_environment(bcx: block,
mk_closure_tys(tcx, bound_values); mk_closure_tys(tcx, bound_values);
// allocate closure in the heap // allocate closure in the heap
let (bcx, llbox, temp_cleanups) = let mut (bcx, llbox, temp_cleanups) =
allocate_cbox(bcx, ck, cdata_ty); allocate_cbox(bcx, ck, cdata_ty);
// cbox_ty has the form of a tuple: (a, b, c) we want a ptr to a // cbox_ty has the form of a tuple: (a, b, c) we want a ptr to a
@ -232,6 +232,7 @@ fn store_environment(bcx: block,
#debug["tuplify_box_ty = %s", ty_to_str(tcx, cbox_ty)]; #debug["tuplify_box_ty = %s", ty_to_str(tcx, cbox_ty)];
// Copy expr values into boxed bindings. // Copy expr values into boxed bindings.
let mut bcx = bcx;
vec::iteri(bound_values) { |i, bv| vec::iteri(bound_values) { |i, bv|
#debug["Copy %s into closure", ev_to_str(ccx, bv)]; #debug["Copy %s into closure", ev_to_str(ccx, bv)];
@ -287,15 +288,16 @@ fn build_closure(bcx0: block,
ck: ty::closure_kind, ck: ty::closure_kind,
id: ast::node_id) -> closure_result { id: ast::node_id) -> closure_result {
// If we need to, package up the iterator body to call // If we need to, package up the iterator body to call
let env_vals = []; let mut env_vals = [];
let bcx = bcx0, ccx = bcx.ccx(), tcx = ccx.tcx; let mut bcx = bcx0;
let ccx = bcx.ccx(), tcx = ccx.tcx;
// Package up the captured upvars // Package up the captured upvars
vec::iter(cap_vars) { |cap_var| vec::iter(cap_vars) { |cap_var|
#debug["Building closure: captured variable %?", cap_var]; #debug["Building closure: captured variable %?", cap_var];
let lv = trans_local_var(bcx, cap_var.def); let lv = trans_local_var(bcx, cap_var.def);
let nid = ast_util::def_id_of_def(cap_var.def).node; let nid = ast_util::def_id_of_def(cap_var.def).node;
let ty = node_id_type(bcx, nid); let mut ty = node_id_type(bcx, nid);
alt cap_var.mode { alt cap_var.mode {
capture::cap_ref { capture::cap_ref {
assert ck == ty::ck_block; assert ck == ty::ck_block;
@ -336,12 +338,12 @@ fn load_environment(fcx: fn_ctxt,
let llcdata = base::opaque_box_body(bcx, cdata_ty, fcx.llenv); let llcdata = base::opaque_box_body(bcx, cdata_ty, fcx.llenv);
// Populate the upvars from the environment. // Populate the upvars from the environment.
let i = 0u; let mut i = 0u;
vec::iter(cap_vars) { |cap_var| vec::iter(cap_vars) { |cap_var|
alt cap_var.mode { alt cap_var.mode {
capture::cap_drop { /* ignore */ } capture::cap_drop { /* ignore */ }
_ { _ {
let upvarptr = let mut upvarptr =
GEPi(bcx, llcdata, [0, abi::closure_body_bindings, i as int]); GEPi(bcx, llcdata, [0, abi::closure_body_bindings, i as int]);
alt ck { alt ck {
ty::ck_block { upvarptr = Load(bcx, upvarptr); } ty::ck_block { upvarptr = Load(bcx, upvarptr); }
@ -410,11 +412,11 @@ fn trans_bind_1(cx: block, outgoing_fty: ty::t,
dest: dest) -> block { dest: dest) -> block {
assert option::is_none(f_res.tds); assert option::is_none(f_res.tds);
let ccx = cx.ccx(); let ccx = cx.ccx();
let bound: [@ast::expr] = []; let mut bound: [@ast::expr] = [];
for argopt: option<@ast::expr> in args { for argopt: option<@ast::expr> in args {
alt argopt { none { } some(e) { bound += [e]; } } alt argopt { none { } some(e) { bound += [e]; } }
} }
let bcx = f_res.bcx; let mut bcx = f_res.bcx;
if dest == ignore { if dest == ignore {
for ex in bound { bcx = trans_expr(bcx, ex, ignore); } for ex in bound { bcx = trans_expr(bcx, ex, ignore); }
ret bcx; ret bcx;
@ -648,7 +650,7 @@ fn trans_bind_thunk(ccx: @crate_ctxt,
// Create a new function context and block context for the thunk, and hold // Create a new function context and block context for the thunk, and hold
// onto a pointer to the first block in the function for later use. // onto a pointer to the first block in the function for later use.
let fcx = new_fn_ctxt(ccx, path, llthunk, none); let fcx = new_fn_ctxt(ccx, path, llthunk, none);
let bcx = top_scope_block(fcx, none); let mut bcx = top_scope_block(fcx, none);
let lltop = bcx.llbb; let lltop = bcx.llbb;
// Since we might need to construct derived tydescs that depend on // Since we might need to construct derived tydescs that depend on
// our bound tydescs, we need to load tydescs out of the environment // our bound tydescs, we need to load tydescs out of the environment
@ -701,18 +703,18 @@ fn trans_bind_thunk(ccx: @crate_ctxt,
let outgoing_args = ty::ty_fn_args(outgoing_fty); let outgoing_args = ty::ty_fn_args(outgoing_fty);
// Set up the three implicit arguments to the thunk. // Set up the three implicit arguments to the thunk.
let llargs: [ValueRef] = [fcx.llretptr, lltargetenv]; let mut llargs: [ValueRef] = [fcx.llretptr, lltargetenv];
let a: uint = first_real_arg; // retptr, env come first let mut a: uint = first_real_arg; // retptr, env come first
let b: int = starting_idx; let mut b: int = starting_idx;
let outgoing_arg_index: uint = 0u; let mut outgoing_arg_index: uint = 0u;
for arg: option<@ast::expr> in args { for arg: option<@ast::expr> in args {
let out_arg = outgoing_args[outgoing_arg_index]; let out_arg = outgoing_args[outgoing_arg_index];
alt arg { alt arg {
// Arg provided at binding time; thunk copies it from // Arg provided at binding time; thunk copies it from
// closure. // closure.
some(e) { some(e) {
let val = let mut val =
GEPi(bcx, llcdata, [0, abi::closure_body_bindings, b]); GEPi(bcx, llcdata, [0, abi::closure_body_bindings, b]);
alt ty::resolved_mode(tcx, out_arg.mode) { alt ty::resolved_mode(tcx, out_arg.mode) {
@ -744,9 +746,8 @@ fn trans_bind_thunk(ccx: @crate_ctxt,
// This is necessary because the type of the function that we have // This is necessary because the type of the function that we have
// in the closure does not know how many type descriptors the function // in the closure does not know how many type descriptors the function
// needs to take. // needs to take.
let lltargetty = let lltargetty = type_of_fn_from_ty(ccx, outgoing_fty);
type_of_fn_from_ty(ccx, outgoing_fty); let lltargetfn = PointerCast(bcx, lltargetfn, T_ptr(lltargetty));
lltargetfn = PointerCast(bcx, lltargetfn, T_ptr(lltargetty));
Call(bcx, lltargetfn, llargs); Call(bcx, lltargetfn, llargs);
build_return(bcx); build_return(bcx);
finish_fn(fcx, lltop); finish_fn(fcx, lltop);

View File

@ -254,7 +254,7 @@ fn add_clean_free(cx: block, ptr: ValueRef, shared: bool) {
// drop glue checks whether it is zero. // drop glue checks whether it is zero.
fn revoke_clean(cx: block, val: ValueRef) { fn revoke_clean(cx: block, val: ValueRef) {
in_scope_cx(cx) {|info| in_scope_cx(cx) {|info|
let i = 0u; let mut i = 0u;
for cu in info.cleanups { for cu in info.cleanups {
alt cu { alt cu {
clean_temp(v, _) if v == val { clean_temp(v, _) if v == val {
@ -356,7 +356,7 @@ fn struct_elt(llstructty: TypeRef, n: uint) -> TypeRef unsafe {
} }
fn in_scope_cx(cx: block, f: fn(scope_info)) { fn in_scope_cx(cx: block, f: fn(scope_info)) {
let cur = cx; let mut cur = cx;
loop { loop {
alt cur.kind { alt cur.kind {
block_scope(info) { f(info); ret; } block_scope(info) { f(info); ret; }
@ -762,8 +762,8 @@ fn C_postr(s: str) -> ValueRef {
} }
fn C_zero_byte_arr(size: uint) -> ValueRef unsafe { fn C_zero_byte_arr(size: uint) -> ValueRef unsafe {
let i = 0u; let mut i = 0u;
let elts: [ValueRef] = []; let mut elts: [ValueRef] = [];
while i < size { elts += [C_u8(0u)]; i += 1u; } while i < size { elts += [C_u8(0u)]; i += 1u; }
ret llvm::LLVMConstArray(T_i8(), vec::unsafe::to_ptr(elts), ret llvm::LLVMConstArray(T_i8(), vec::unsafe::to_ptr(elts),
elts.len() as c_uint); elts.len() as c_uint);
@ -809,11 +809,11 @@ enum mono_param_id {
} }
type mono_id = @{def: ast::def_id, params: [mono_param_id]}; type mono_id = @{def: ast::def_id, params: [mono_param_id]};
fn hash_mono_id(&&mi: mono_id) -> uint { fn hash_mono_id(&&mi: mono_id) -> uint {
let h = syntax::ast_util::hash_def_id(mi.def); let mut h = syntax::ast_util::hash_def_id(mi.def);
for param in mi.params { for param in mi.params {
h = h * alt param { h = h * alt param {
mono_precise(ty, vts) { mono_precise(ty, vts) {
let h = ty::type_id(ty); let mut h = ty::type_id(ty);
option::may(vts) {|vts| option::may(vts) {|vts|
for vt in vts { h += hash_mono_id(vt); } for vt in vts { h += hash_mono_id(vt); }
} }
@ -843,7 +843,7 @@ fn align_to(cx: block, off: ValueRef, align: ValueRef) -> ValueRef {
} }
fn path_str(p: path) -> str { fn path_str(p: path) -> str {
let r = "", first = true; let mut r = "", first = true;
for e in p { for e in p {
alt e { ast_map::path_name(s) | ast_map::path_mod(s) { alt e { ast_map::path_name(s) | ast_map::path_mod(s) {
if first { first = false; } if first { first = false; }

View File

@ -235,7 +235,7 @@ fn line_from_span(cm: codemap::codemap, sp: span) -> uint {
fn create_block(cx: block) -> @metadata<block_md> { fn create_block(cx: block) -> @metadata<block_md> {
let cache = get_cache(cx.ccx()); let cache = get_cache(cx.ccx());
let cx = cx; let mut cx = cx;
while option::is_none(cx.block_span) { while option::is_none(cx.block_span) {
alt cx.parent { alt cx.parent {
parent_some(b) { cx = b; } parent_some(b) { cx = b; }

View File

@ -31,7 +31,7 @@ fn trans_impl(ccx: @crate_ctxt, path: path, name: ast::ident,
fn trans_self_arg(bcx: block, base: @ast::expr) -> result { fn trans_self_arg(bcx: block, base: @ast::expr) -> result {
let basety = expr_ty(bcx, base); let basety = expr_ty(bcx, base);
let m_by_ref = ast::expl(ast::by_ref); let m_by_ref = ast::expl(ast::by_ref);
let temp_cleanups = []; let mut temp_cleanups = [];
let result = trans_arg_expr(bcx, {mode: m_by_ref, ty: basety}, let result = trans_arg_expr(bcx, {mode: m_by_ref, ty: basety},
T_ptr(type_of::type_of(bcx.ccx(), basety)), T_ptr(type_of::type_of(bcx.ccx(), basety)),
base, temp_cleanups); base, temp_cleanups);
@ -147,7 +147,7 @@ fn trans_iface_callee(bcx: block, base: @ast::expr,
fn find_vtable_in_fn_ctxt(ps: param_substs, n_param: uint, n_bound: uint) fn find_vtable_in_fn_ctxt(ps: param_substs, n_param: uint, n_bound: uint)
-> typeck::vtable_origin { -> typeck::vtable_origin {
let vtable_off = n_bound, i = 0u; let mut vtable_off = n_bound, i = 0u;
// Vtables are stored in a flat array, finding the right one is // Vtables are stored in a flat array, finding the right one is
// somewhat awkward // somewhat awkward
for bounds in *ps.bounds { for bounds in *ps.bounds {
@ -258,7 +258,7 @@ fn trans_cast(bcx: block, val: @ast::expr, id: ast::node_id, dest: dest)
if dest == ignore { ret trans_expr(bcx, val, ignore); } if dest == ignore { ret trans_expr(bcx, val, ignore); }
let ccx = bcx.ccx(); let ccx = bcx.ccx();
let v_ty = expr_ty(bcx, val); let v_ty = expr_ty(bcx, val);
let {bcx, box, body} = trans_malloc_boxed(bcx, v_ty); let mut {bcx, box, body} = trans_malloc_boxed(bcx, v_ty);
add_clean_free(bcx, box, false); add_clean_free(bcx, box, false);
bcx = trans_expr_save_in(bcx, val, body); bcx = trans_expr_save_in(bcx, val, body);
revoke_clean(bcx, box); revoke_clean(bcx, box);

View File

@ -142,7 +142,7 @@ fn classify_ty(ty: TypeRef) -> [x86_64_reg_class] {
if vec::is_empty(tys) { if vec::is_empty(tys) {
classify(T_i64(), cls, i, off); classify(T_i64(), cls, i, off);
} else { } else {
let field_off = off; let mut field_off = off;
for ty in tys { for ty in tys {
field_off = align(field_off, ty); field_off = align(field_off, ty);
classify(ty, cls, i, field_off); classify(ty, cls, i, field_off);
@ -159,7 +159,7 @@ fn classify_ty(ty: TypeRef) -> [x86_64_reg_class] {
let misalign = off % t_align; let misalign = off % t_align;
if misalign != 0u { if misalign != 0u {
let i = off / 8u; let mut i = off / 8u;
let e = (off + t_size + 7u) / 8u; let e = (off + t_size + 7u) / 8u;
while i < e { while i < e {
unify(cls, i, memory_class); unify(cls, i, memory_class);
@ -193,7 +193,7 @@ fn classify_ty(ty: TypeRef) -> [x86_64_reg_class] {
} }
fn fixup(ty: TypeRef, cls: [mut x86_64_reg_class]) { fn fixup(ty: TypeRef, cls: [mut x86_64_reg_class]) {
let i = 0u; let mut i = 0u;
let e = vec::len(cls); let e = vec::len(cls);
if vec::len(cls) > 2u && if vec::len(cls) > 2u &&
llvm::LLVMGetTypeKind(ty) as int == 10 /* struct */ { llvm::LLVMGetTypeKind(ty) as int == 10 /* struct */ {
@ -250,7 +250,7 @@ fn classify_ty(ty: TypeRef) -> [x86_64_reg_class] {
fn llreg_ty(cls: [x86_64_reg_class]) -> TypeRef { fn llreg_ty(cls: [x86_64_reg_class]) -> TypeRef {
fn llvec_len(cls: [x86_64_reg_class]) -> uint { fn llvec_len(cls: [x86_64_reg_class]) -> uint {
let len = 1u; let mut len = 1u;
for c in cls { for c in cls {
if c != sseup_class { if c != sseup_class {
break; break;
@ -260,8 +260,8 @@ fn llreg_ty(cls: [x86_64_reg_class]) -> TypeRef {
ret len; ret len;
} }
let tys = []; let mut tys = [];
let i = 0u; let mut i = 0u;
let e = vec::len(cls); let e = vec::len(cls);
while i < e { while i < e {
alt cls[i] { alt cls[i] {
@ -329,9 +329,9 @@ fn x86_64_tys(atys: [TypeRef],
fn x86_64_ty(ty: TypeRef, fn x86_64_ty(ty: TypeRef,
is_mem_cls: fn(cls: [x86_64_reg_class]) -> bool, is_mem_cls: fn(cls: [x86_64_reg_class]) -> bool,
attr: Attribute) -> (x86_64_llty, option<Attribute>) { attr: Attribute) -> (x86_64_llty, option<Attribute>) {
let cast = false; let mut cast = false;
let ty_attr = option::none; let mut ty_attr = option::none;
let llty = ty; let mut llty = ty;
if !is_reg_ty(ty) { if !is_reg_ty(ty) {
let cls = classify_ty(ty); let cls = classify_ty(ty);
if is_mem_cls(cls) { if is_mem_cls(cls) {
@ -345,15 +345,15 @@ fn x86_64_tys(atys: [TypeRef],
ret ({ cast: cast, ty: llty }, ty_attr); ret ({ cast: cast, ty: llty }, ty_attr);
} }
let arg_tys = []; let mut arg_tys = [];
let attrs = []; let mut attrs = [];
for t in atys { for t in atys {
let (ty, attr) = x86_64_ty(t, is_pass_byval, ByValAttribute); let (ty, attr) = x86_64_ty(t, is_pass_byval, ByValAttribute);
arg_tys += [ty]; arg_tys += [ty];
attrs += [attr]; attrs += [attr];
} }
let (ret_ty, ret_attr) = x86_64_ty(rty, is_ret_bysret, let mut (ret_ty, ret_attr) = x86_64_ty(rty, is_ret_bysret,
StructRetAttribute); StructRetAttribute);
let sret = option::is_some(ret_attr); let sret = option::is_some(ret_attr);
if sret { if sret {
arg_tys = [ret_ty] + arg_tys; arg_tys = [ret_ty] + arg_tys;
@ -557,14 +557,14 @@ fn trans_native_mod(ccx: @crate_ctxt,
fn build_args(bcx: block, tys: @c_stack_tys, fn build_args(bcx: block, tys: @c_stack_tys,
llargbundle: ValueRef) -> [ValueRef] { llargbundle: ValueRef) -> [ValueRef] {
let llargvals = []; let mut llargvals = [];
let i = 0u; let mut i = 0u;
let n = vec::len(tys.arg_tys); let n = vec::len(tys.arg_tys);
alt tys.x86_64_tys { alt tys.x86_64_tys {
some(x86_64) { some(x86_64) {
let atys = x86_64.arg_tys; let mut atys = x86_64.arg_tys;
let attrs = x86_64.attrs; let mut attrs = x86_64.attrs;
if x86_64.sret { if x86_64.sret {
let llretptr = GEPi(bcx, llargbundle, [0, n as int]); let llretptr = GEPi(bcx, llargbundle, [0, n as int]);
let llretloc = Load(bcx, llretptr); let llretloc = Load(bcx, llretptr);
@ -575,9 +575,9 @@ fn trans_native_mod(ccx: @crate_ctxt,
while i < n { while i < n {
let llargval = if atys[i].cast { let llargval = if atys[i].cast {
let arg_ptr = GEPi(bcx, llargbundle, let arg_ptr = GEPi(bcx, llargbundle,
[0, i as int]); [0, i as int]);
arg_ptr = BitCast(bcx, arg_ptr, let arg_ptr = BitCast(bcx, arg_ptr,
T_ptr(atys[i].ty)); T_ptr(atys[i].ty));
Load(bcx, arg_ptr) Load(bcx, arg_ptr)
} else if option::is_some(attrs[i]) { } else if option::is_some(attrs[i]) {
GEPi(bcx, llargbundle, [0, i as int]) GEPi(bcx, llargbundle, [0, i as int])
@ -668,7 +668,8 @@ fn trans_native_mod(ccx: @crate_ctxt,
fn build_args(bcx: block, tys: @c_stack_tys, fn build_args(bcx: block, tys: @c_stack_tys,
llwrapfn: ValueRef, llargbundle: ValueRef) { llwrapfn: ValueRef, llargbundle: ValueRef) {
let i = 0u, n = vec::len(tys.arg_tys); let mut i = 0u;
let n = vec::len(tys.arg_tys);
let implicit_args = first_real_arg; // ret + env let implicit_args = first_real_arg; // ret + env
while i < n { while i < n {
let llargval = llvm::LLVMGetParam( let llargval = llvm::LLVMGetParam(
@ -691,7 +692,7 @@ fn trans_native_mod(ccx: @crate_ctxt,
build_args, build_ret); build_args, build_ret);
} }
let cc = lib::llvm::CCallConv; let mut cc = lib::llvm::CCallConv;
alt abi { alt abi {
ast::native_abi_rust_intrinsic { ast::native_abi_rust_intrinsic {
for item in native_mod.items { get_item_val(ccx, item.id); } for item in native_mod.items { get_item_val(ccx, item.id); }
@ -735,8 +736,8 @@ fn trans_crust_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl,
fn build_args(bcx: block, tys: @c_stack_tys, fn build_args(bcx: block, tys: @c_stack_tys,
llargbundle: ValueRef) -> [ValueRef] { llargbundle: ValueRef) -> [ValueRef] {
let llargvals = []; let mut llargvals = [];
let i = 0u; let mut i = 0u;
let n = vec::len(tys.arg_tys); let n = vec::len(tys.arg_tys);
let llretptr = load_inbounds(bcx, llargbundle, [0, n as int]); let llretptr = load_inbounds(bcx, llargbundle, [0, n as int]);
llargvals += [llretptr]; llargvals += [llretptr];
@ -770,9 +771,9 @@ fn trans_crust_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl,
llwrapfn: ValueRef, llargbundle: ValueRef) { llwrapfn: ValueRef, llargbundle: ValueRef) {
alt tys.x86_64_tys { alt tys.x86_64_tys {
option::some(x86_64) { option::some(x86_64) {
let atys = x86_64.arg_tys; let mut atys = x86_64.arg_tys;
let attrs = x86_64.attrs; let mut attrs = x86_64.attrs;
let j = 0u; let mut j = 0u;
let llretptr = if x86_64.sret { let llretptr = if x86_64.sret {
atys = vec::tail(atys); atys = vec::tail(atys);
attrs = vec::tail(attrs); attrs = vec::tail(attrs);
@ -785,18 +786,20 @@ fn trans_crust_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl,
alloca(bcx, tys.ret_ty) alloca(bcx, tys.ret_ty)
}; };
let i = 0u, n = vec::len(atys); let mut i = 0u;
let n = vec::len(atys);
while i < n { while i < n {
let argval = llvm::LLVMGetParam(llwrapfn, let mut argval =
(i + j) as c_uint); llvm::LLVMGetParam(llwrapfn, (i + j) as c_uint);
if option::is_some(attrs[i]) { if option::is_some(attrs[i]) {
argval = Load(bcx, argval); argval = Load(bcx, argval);
store_inbounds(bcx, argval, llargbundle, store_inbounds(bcx, argval, llargbundle,
[0, i as int]); [0, i as int]);
} else if atys[i].cast { } else if atys[i].cast {
let argptr = GEPi(bcx, llargbundle, let argptr = GEPi(bcx, llargbundle,
[0, i as int]); [0, i as int]);
argptr = BitCast(bcx, argptr, T_ptr(atys[i].ty)); let argptr = BitCast(bcx, argptr,
T_ptr(atys[i].ty));
Store(bcx, argval, argptr); Store(bcx, argval, argptr);
} else { } else {
store_inbounds(bcx, argval, llargbundle, store_inbounds(bcx, argval, llargbundle,
@ -808,7 +811,8 @@ fn trans_crust_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl,
} }
_ { _ {
let llretptr = alloca(bcx, tys.ret_ty); let llretptr = alloca(bcx, tys.ret_ty);
let i = 0u, n = vec::len(tys.arg_tys); let mut i = 0u;
let n = vec::len(tys.arg_tys);
while i < n { while i < n {
let llargval = llvm::LLVMGetParam(llwrapfn, let llargval = llvm::LLVMGetParam(llwrapfn,
i as c_uint); i as c_uint);

View File

@ -30,7 +30,7 @@ fn find_reachable(crate_mod: _mod, exp_map: resolve::exp_map,
} }
fn traverse_exports(cx: ctx, vis: [@view_item]) -> bool { fn traverse_exports(cx: ctx, vis: [@view_item]) -> bool {
let found_export = false; let mut found_export = false;
for vi in vis { for vi in vis {
alt vi.node { alt vi.node {
view_item_export(vps) { view_item_export(vps) {

View File

@ -62,7 +62,7 @@ const shape_class: u8 = 30u8;
const shape_rptr: u8 = 31u8; const shape_rptr: u8 = 31u8;
fn hash_res_info(ri: res_info) -> uint { fn hash_res_info(ri: res_info) -> uint {
let h = 5381u; let mut h = 5381u;
h *= 33u; h *= 33u;
h += ri.did.crate as uint; h += ri.did.crate as uint;
h *= 33u; h *= 33u;
@ -103,11 +103,11 @@ fn largest_variants(ccx: @crate_ctxt, tag_id: ast::def_id) -> [uint] {
// FIXME: We could do better here; e.g. we know that any variant that // FIXME: We could do better here; e.g. we know that any variant that
// contains (T,T) must be as least as large as any variant that contains // contains (T,T) must be as least as large as any variant that contains
// just T. // just T.
let ranges = []; let mut ranges = [];
let variants = ty::enum_variants(ccx.tcx, tag_id); let variants = ty::enum_variants(ccx.tcx, tag_id);
for variant: ty::variant_info in *variants { for variant: ty::variant_info in *variants {
let bounded = true; let mut bounded = true;
let min_size = 0u, min_align = 0u; let mut min_size = 0u, min_align = 0u;
for elem_t: ty::t in variant.args { for elem_t: ty::t in variant.args {
if ty::type_has_params(elem_t) { if ty::type_has_params(elem_t) {
// FIXME: We could do better here; this causes us to // FIXME: We could do better here; this causes us to
@ -127,16 +127,16 @@ fn largest_variants(ccx: @crate_ctxt, tag_id: ast::def_id) -> [uint] {
} }
// Initialize the candidate set to contain all variants. // Initialize the candidate set to contain all variants.
let candidates = [mutable]; let mut candidates = [mutable];
for variant in *variants { candidates += [mutable true]; } for variant in *variants { candidates += [mutable true]; }
// Do a pairwise comparison among all variants still in the candidate set. // Do a pairwise comparison among all variants still in the candidate set.
// Throw out any variant that we know has size and alignment at least as // Throw out any variant that we know has size and alignment at least as
// small as some other variant. // small as some other variant.
let i = 0u; let mut i = 0u;
while i < vec::len(ranges) - 1u { while i < vec::len(ranges) - 1u {
if candidates[i] { if candidates[i] {
let j = i + 1u; let mut j = i + 1u;
while j < vec::len(ranges) { while j < vec::len(ranges) {
if candidates[j] { if candidates[j] {
if ranges[i].size.bounded && ranges[i].align.bounded && if ranges[i].size.bounded && ranges[i].align.bounded &&
@ -159,8 +159,8 @@ fn largest_variants(ccx: @crate_ctxt, tag_id: ast::def_id) -> [uint] {
} }
// Return the resulting set. // Return the resulting set.
let result = []; let mut result = [];
i = 0u; let mut i = 0u;
while i < vec::len(candidates) { while i < vec::len(candidates) {
if candidates[i] { result += [i]; } if candidates[i] { result += [i]; }
i += 1u; i += 1u;
@ -178,12 +178,12 @@ type size_align = {size: u16, align: u8};
fn compute_static_enum_size(ccx: @crate_ctxt, largest_variants: [uint], fn compute_static_enum_size(ccx: @crate_ctxt, largest_variants: [uint],
did: ast::def_id) -> size_align { did: ast::def_id) -> size_align {
let max_size = 0u16; let mut max_size = 0u16;
let max_align = 1u8; let mut max_align = 1u8;
let variants = ty::enum_variants(ccx.tcx, did); let variants = ty::enum_variants(ccx.tcx, did);
for vid: uint in largest_variants { for vid: uint in largest_variants {
// We increment a "virtual data pointer" to compute the size. // We increment a "virtual data pointer" to compute the size.
let lltys = []; let mut lltys = [];
for typ: ty::t in variants[vid].args { for typ: ty::t in variants[vid].args {
lltys += [type_of::type_of(ccx, typ)]; lltys += [type_of::type_of(ccx, typ)];
} }
@ -308,7 +308,7 @@ fn shape_of(ccx: @crate_ctxt, t: ty::t, ty_param_map: [uint]) -> [u8] {
ty::ty_float(ast::ty_f32) { [shape_f32] } ty::ty_float(ast::ty_f32) { [shape_f32] }
ty::ty_float(ast::ty_f64) { [shape_f64] } ty::ty_float(ast::ty_f64) { [shape_f64] }
ty::ty_str { ty::ty_str {
let s = [shape_vec]; let mut s = [shape_vec];
add_bool(s, true); // type is POD add_bool(s, true); // type is POD
let unit_ty = ty::mk_mach_uint(ccx.tcx, ast::ty_u8); let unit_ty = ty::mk_mach_uint(ccx.tcx, ast::ty_u8);
add_substr(s, shape_of(ccx, unit_ty, ty_param_map)); add_substr(s, shape_of(ccx, unit_ty, ty_param_map));
@ -320,7 +320,7 @@ fn shape_of(ccx: @crate_ctxt, t: ty::t, ty_param_map: [uint]) -> [u8] {
tk_unit { [s_variant_enum_t(ccx.tcx)] } tk_unit { [s_variant_enum_t(ccx.tcx)] }
tk_enum { [s_variant_enum_t(ccx.tcx)] } tk_enum { [s_variant_enum_t(ccx.tcx)] }
tk_newtype | tk_complex { tk_newtype | tk_complex {
let s = [shape_enum], id; let mut s = [shape_enum], id;
alt ccx.shape_cx.tag_id_to_index.find(did) { alt ccx.shape_cx.tag_id_to_index.find(did) {
none { none {
id = ccx.shape_cx.next_tag_id; id = ccx.shape_cx.next_tag_id;
@ -344,18 +344,18 @@ fn shape_of(ccx: @crate_ctxt, t: ty::t, ty_param_map: [uint]) -> [u8] {
} }
ty::ty_box(_) | ty::ty_opaque_box { [shape_box] } ty::ty_box(_) | ty::ty_opaque_box { [shape_box] }
ty::ty_uniq(mt) { ty::ty_uniq(mt) {
let s = [shape_uniq]; let mut s = [shape_uniq];
add_substr(s, shape_of(ccx, mt.ty, ty_param_map)); add_substr(s, shape_of(ccx, mt.ty, ty_param_map));
s s
} }
ty::ty_vec(mt) { ty::ty_vec(mt) {
let s = [shape_vec]; let mut s = [shape_vec];
add_bool(s, ty::type_is_pod(ccx.tcx, mt.ty)); add_bool(s, ty::type_is_pod(ccx.tcx, mt.ty));
add_substr(s, shape_of(ccx, mt.ty, ty_param_map)); add_substr(s, shape_of(ccx, mt.ty, ty_param_map));
s s
} }
ty::ty_rec(fields) { ty::ty_rec(fields) {
let s = [shape_struct], sub = []; let mut s = [shape_struct], sub = [];
for f: field in fields { for f: field in fields {
sub += shape_of(ccx, f.mt.ty, ty_param_map); sub += shape_of(ccx, f.mt.ty, ty_param_map);
} }
@ -363,7 +363,7 @@ fn shape_of(ccx: @crate_ctxt, t: ty::t, ty_param_map: [uint]) -> [u8] {
s s
} }
ty::ty_tup(elts) { ty::ty_tup(elts) {
let s = [shape_struct], sub = []; let mut s = [shape_struct], sub = [];
for elt in elts { for elt in elts {
sub += shape_of(ccx, elt, ty_param_map); sub += shape_of(ccx, elt, ty_param_map);
} }
@ -373,7 +373,7 @@ fn shape_of(ccx: @crate_ctxt, t: ty::t, ty_param_map: [uint]) -> [u8] {
ty::ty_iface(_, _) { [shape_box_fn] } ty::ty_iface(_, _) { [shape_box_fn] }
ty::ty_class(_, _) { [shape_class] } ty::ty_class(_, _) { [shape_class] }
ty::ty_rptr(_, tm) { ty::ty_rptr(_, tm) {
let s = [shape_rptr]; let mut s = [shape_rptr];
add_substr(s, shape_of(ccx, tm.ty, ty_param_map)); add_substr(s, shape_of(ccx, tm.ty, ty_param_map));
s s
} }
@ -382,7 +382,7 @@ fn shape_of(ccx: @crate_ctxt, t: ty::t, ty_param_map: [uint]) -> [u8] {
let ri = {did: did, tps: tps}; let ri = {did: did, tps: tps};
let id = interner::intern(ccx.shape_cx.resources, ri); let id = interner::intern(ccx.shape_cx.resources, ri);
let s = [shape_res]; let mut s = [shape_res];
add_u16(s, id as u16); add_u16(s, id as u16);
add_u16(s, vec::len(tps) as u16); add_u16(s, vec::len(tps) as u16);
for tp: ty::t in tps { for tp: ty::t in tps {
@ -414,11 +414,11 @@ fn shape_of(ccx: @crate_ctxt, t: ty::t, ty_param_map: [uint]) -> [u8] {
// FIXME: We might discover other variants as we traverse these. Handle this. // FIXME: We might discover other variants as we traverse these. Handle this.
fn shape_of_variant(ccx: @crate_ctxt, v: ty::variant_info, fn shape_of_variant(ccx: @crate_ctxt, v: ty::variant_info,
ty_param_count: uint) -> [u8] { ty_param_count: uint) -> [u8] {
let ty_param_map = []; let mut ty_param_map = [];
let i = 0u; let mut i = 0u;
while i < ty_param_count { ty_param_map += [i]; i += 1u; } while i < ty_param_count { ty_param_map += [i]; i += 1u; }
let s = []; let mut s = [];
for t: ty::t in v.args { s += shape_of(ccx, t, ty_param_map); } for t: ty::t in v.args { s += shape_of(ccx, t, ty_param_map); }
ret s; ret s;
} }
@ -427,9 +427,9 @@ fn gen_enum_shapes(ccx: @crate_ctxt) -> ValueRef {
// Loop over all the enum variants and write their shapes into a // Loop over all the enum variants and write their shapes into a
// data buffer. As we do this, it's possible for us to discover // data buffer. As we do this, it's possible for us to discover
// new enums, so we must do this first. // new enums, so we must do this first.
let i = 0u; let mut i = 0u;
let data = []; let mut data = [];
let offsets = []; let mut offsets = [];
while i < vec::len(ccx.shape_cx.tag_order) { while i < vec::len(ccx.shape_cx.tag_order) {
let did = ccx.shape_cx.tag_order[i]; let did = ccx.shape_cx.tag_order[i];
let variants = ty::enum_variants(ccx.tcx, did); let variants = ty::enum_variants(ccx.tcx, did);
@ -453,12 +453,12 @@ fn gen_enum_shapes(ccx: @crate_ctxt) -> ValueRef {
// info records for each enum) and the info space (which contains offsets // info records for each enum) and the info space (which contains offsets
// to each variant shape). As we do so, build up the header. // to each variant shape). As we do so, build up the header.
let header = []; let mut header = [];
let info = []; let mut info = [];
let header_sz = 2u16 * ccx.shape_cx.next_tag_id; let header_sz = 2u16 * ccx.shape_cx.next_tag_id;
let data_sz = vec::len(data) as u16; let data_sz = vec::len(data) as u16;
let info_sz = 0u16; let mut info_sz = 0u16;
for did_: ast::def_id in ccx.shape_cx.tag_order { for did_: ast::def_id in ccx.shape_cx.tag_order {
let did = did_; // Satisfy alias checker. let did = did_; // Satisfy alias checker.
let num_variants = vec::len(*ty::enum_variants(ccx.tcx, did)) as u16; let num_variants = vec::len(*ty::enum_variants(ccx.tcx, did)) as u16;
@ -470,7 +470,7 @@ fn gen_enum_shapes(ccx: @crate_ctxt) -> ValueRef {
// variant. Also construct the largest-variant table for each enum, which // variant. Also construct the largest-variant table for each enum, which
// contains the variants that the size-of operation needs to look at. // contains the variants that the size-of operation needs to look at.
let lv_table = []; let mut lv_table = [];
i = 0u; i = 0u;
for did_: ast::def_id in ccx.shape_cx.tag_order { for did_: ast::def_id in ccx.shape_cx.tag_order {
let did = did_; // Satisfy alias checker. let did = did_; // Satisfy alias checker.
@ -518,8 +518,8 @@ fn gen_enum_shapes(ccx: @crate_ctxt) -> ValueRef {
} }
fn gen_resource_shapes(ccx: @crate_ctxt) -> ValueRef { fn gen_resource_shapes(ccx: @crate_ctxt) -> ValueRef {
let dtors = []; let mut dtors = [];
let i = 0u; let mut i = 0u;
let len = interner::len(ccx.shape_cx.resources); let len = interner::len(ccx.shape_cx.resources);
while i < len { while i < len {
let ri = interner::get(ccx.shape_cx.resources, i); let ri = interner::get(ccx.shape_cx.resources, i);
@ -593,13 +593,13 @@ fn static_size_of_enum(cx: @crate_ctxt, t: ty::t) -> uint {
alt ty::get(t).struct { alt ty::get(t).struct {
ty::ty_enum(tid, subtys) { ty::ty_enum(tid, subtys) {
// Compute max(variant sizes). // Compute max(variant sizes).
let max_size = 0u; let mut max_size = 0u;
let variants = ty::enum_variants(cx.tcx, tid); let variants = ty::enum_variants(cx.tcx, tid);
for variant: ty::variant_info in *variants { for variant: ty::variant_info in *variants {
let tup_ty = simplify_type(cx.tcx, let tup_ty = simplify_type(cx.tcx,
ty::mk_tup(cx.tcx, variant.args)); ty::mk_tup(cx.tcx, variant.args));
// Perform any type parameter substitutions. // Perform any type parameter substitutions.
tup_ty = ty::substitute_type_params(cx.tcx, subtys, tup_ty); let tup_ty = ty::substitute_type_params(cx.tcx, subtys, tup_ty);
// Here we possibly do a recursive call. // Here we possibly do a recursive call.
let this_size = let this_size =
llsize_of_real(cx, type_of::type_of(cx, tup_ty)); llsize_of_real(cx, type_of::type_of(cx, tup_ty));

View File

@ -67,7 +67,7 @@ fn duplicate(bcx: block, vptr: ValueRef, vec_ty: ty::t) -> result {
let size = Add(bcx, fill, llsize_of(ccx, ccx.opaque_vec_type)); let size = Add(bcx, fill, llsize_of(ccx, ccx.opaque_vec_type));
let {bcx: bcx, val: newptr} = let {bcx: bcx, val: newptr} =
trans_shared_malloc(bcx, val_ty(vptr), size); trans_shared_malloc(bcx, val_ty(vptr), size);
let bcx = call_memmove(bcx, newptr, vptr, size).bcx; let mut bcx = call_memmove(bcx, newptr, vptr, size).bcx;
let unit_ty = ty::sequence_element_type(bcx.tcx(), vec_ty); let unit_ty = ty::sequence_element_type(bcx.tcx(), vec_ty);
Store(bcx, fill, GEPi(bcx, newptr, [0, abi::vec_elt_alloc])); Store(bcx, fill, GEPi(bcx, newptr, [0, abi::vec_elt_alloc]));
if ty::type_needs_drop(bcx.tcx(), unit_ty) { if ty::type_needs_drop(bcx.tcx(), unit_ty) {
@ -88,7 +88,8 @@ fn make_free_glue(bcx: block, vptr: ValueRef, vec_ty: ty::t) ->
fn trans_vec(bcx: block, args: [@ast::expr], id: ast::node_id, fn trans_vec(bcx: block, args: [@ast::expr], id: ast::node_id,
dest: dest) -> block { dest: dest) -> block {
let ccx = bcx.ccx(), bcx = bcx; let ccx = bcx.ccx();
let mut bcx = bcx;
if dest == base::ignore { if dest == base::ignore {
for arg in args { for arg in args {
bcx = base::trans_expr(bcx, arg, base::ignore); bcx = base::trans_expr(bcx, arg, base::ignore);
@ -96,16 +97,15 @@ fn trans_vec(bcx: block, args: [@ast::expr], id: ast::node_id,
ret bcx; ret bcx;
} }
let vec_ty = node_id_type(bcx, id); let vec_ty = node_id_type(bcx, id);
let {bcx: bcx, let mut {bcx: bcx,
val: vptr, val: vptr,
unit_ty: unit_ty, unit_ty: unit_ty,
llunitty: llunitty} = llunitty: llunitty} = alloc(bcx, vec_ty, args.len());
alloc(bcx, vec_ty, args.len());
add_clean_free(bcx, vptr, true); add_clean_free(bcx, vptr, true);
// Store the individual elements. // Store the individual elements.
let dataptr = get_dataptr(bcx, vptr, llunitty); let dataptr = get_dataptr(bcx, vptr, llunitty);
let i = 0u, temp_cleanups = [vptr]; let mut i = 0u, temp_cleanups = [vptr];
for e in args { for e in args {
let lleltptr = InBoundsGEP(bcx, dataptr, [C_uint(ccx, i)]); let lleltptr = InBoundsGEP(bcx, dataptr, [C_uint(ccx, i)]);
bcx = base::trans_expr_save_in(bcx, e, lleltptr); bcx = base::trans_expr_save_in(bcx, e, lleltptr);
@ -145,7 +145,7 @@ fn trans_append(bcx: block, vec_ty: ty::t, lhsptr: ValueRef,
let self_append = ICmp(bcx, lib::llvm::IntEQ, lhs, rhs); let self_append = ICmp(bcx, lib::llvm::IntEQ, lhs, rhs);
let lfill = get_fill(bcx, lhs); let lfill = get_fill(bcx, lhs);
let rfill = get_fill(bcx, rhs); let rfill = get_fill(bcx, rhs);
let new_fill = Add(bcx, lfill, rfill); let mut new_fill = Add(bcx, lfill, rfill);
if strings { new_fill = Sub(bcx, new_fill, C_int(ccx, 1)); } if strings { new_fill = Sub(bcx, new_fill, C_int(ccx, 1)); }
let opaque_lhs = PointerCast(bcx, lhsptr, let opaque_lhs = PointerCast(bcx, lhsptr,
T_ptr(T_ptr(ccx.opaque_vec_type))); T_ptr(T_ptr(ccx.opaque_vec_type)));
@ -156,7 +156,7 @@ fn trans_append(bcx: block, vec_ty: ty::t, lhsptr: ValueRef,
let rhs = Select(bcx, self_append, lhs, rhs); let rhs = Select(bcx, self_append, lhs, rhs);
let lhs_data = get_dataptr(bcx, lhs, llunitty); let lhs_data = get_dataptr(bcx, lhs, llunitty);
let lhs_off = lfill; let mut lhs_off = lfill;
if strings { lhs_off = Sub(bcx, lhs_off, C_int(ccx, 1)); } if strings { lhs_off = Sub(bcx, lhs_off, C_int(ccx, 1)); }
let write_ptr = pointer_add(bcx, lhs_data, lhs_off); let write_ptr = pointer_add(bcx, lhs_data, lhs_off);
let write_ptr_ptr = do_spill_noroot(bcx, write_ptr); let write_ptr_ptr = do_spill_noroot(bcx, write_ptr);
@ -174,8 +174,8 @@ fn trans_append_literal(bcx: block, vptrptr: ValueRef, vec_ty: ty::t,
vals: [@ast::expr]) -> block { vals: [@ast::expr]) -> block {
let ccx = bcx.ccx(); let ccx = bcx.ccx();
let elt_ty = ty::sequence_element_type(bcx.tcx(), vec_ty); let elt_ty = ty::sequence_element_type(bcx.tcx(), vec_ty);
let ti = none; let mut ti = none;
let {bcx: bcx, val: td} = get_tydesc(bcx, elt_ty, ti); let mut {bcx: bcx, val: td} = get_tydesc(bcx, elt_ty, ti);
base::lazily_emit_tydesc_glue(ccx, abi::tydesc_field_take_glue, ti); base::lazily_emit_tydesc_glue(ccx, abi::tydesc_field_take_glue, ti);
let opaque_v = PointerCast(bcx, vptrptr, let opaque_v = PointerCast(bcx, vptrptr,
T_ptr(T_ptr(ccx.opaque_vec_type))); T_ptr(T_ptr(ccx.opaque_vec_type)));
@ -206,7 +206,7 @@ fn trans_add(bcx: block, vec_ty: ty::t, lhs: ValueRef,
let lhs_fill = get_fill(bcx, lhs); let lhs_fill = get_fill(bcx, lhs);
let rhs_fill = get_fill(bcx, rhs); let rhs_fill = get_fill(bcx, rhs);
let new_fill = Add(bcx, lhs_fill, rhs_fill); let new_fill = Add(bcx, lhs_fill, rhs_fill);
let {bcx: bcx, val: new_vec_ptr} = alloc_raw(bcx, new_fill, new_fill); let mut {bcx: bcx, val: new_vec_ptr} = alloc_raw(bcx, new_fill, new_fill);
new_vec_ptr = PointerCast(bcx, new_vec_ptr, T_ptr(T_vec(ccx, llunitty))); new_vec_ptr = PointerCast(bcx, new_vec_ptr, T_ptr(T_vec(ccx, llunitty)));
let write_ptr_ptr = do_spill_noroot let write_ptr_ptr = do_spill_noroot
@ -223,7 +223,7 @@ fn trans_add(bcx: block, vec_ty: ty::t, lhs: ValueRef,
}; };
let bcx = iter_vec_raw(bcx, lhs, vec_ty, lhs_fill, copy_fn); let bcx = iter_vec_raw(bcx, lhs, vec_ty, lhs_fill, copy_fn);
bcx = iter_vec_raw(bcx, rhs, vec_ty, rhs_fill, copy_fn); let bcx = iter_vec_raw(bcx, rhs, vec_ty, rhs_fill, copy_fn);
ret base::store_in_dest(bcx, new_vec_ptr, dest); ret base::store_in_dest(bcx, new_vec_ptr, dest);
} }
@ -253,7 +253,7 @@ fn iter_vec_raw(bcx: block, vptr: ValueRef, vec_ty: ty::t,
let body_cx = sub_block(header_cx, "iter_vec_loop_body"); let body_cx = sub_block(header_cx, "iter_vec_loop_body");
let next_cx = sub_block(header_cx, "iter_vec_next"); let next_cx = sub_block(header_cx, "iter_vec_next");
CondBr(header_cx, not_yet_at_end, body_cx.llbb, next_cx.llbb); CondBr(header_cx, not_yet_at_end, body_cx.llbb, next_cx.llbb);
body_cx = f(body_cx, data_ptr, unit_ty); let body_cx = f(body_cx, data_ptr, unit_ty);
AddIncomingToPhi(data_ptr, InBoundsGEP(body_cx, data_ptr, AddIncomingToPhi(data_ptr, InBoundsGEP(body_cx, data_ptr,
[C_int(ccx, 1)]), body_cx.llbb); [C_int(ccx, 1)]), body_cx.llbb);
Br(body_cx, header_cx.llbb); Br(body_cx, header_cx.llbb);

View File

@ -19,7 +19,7 @@ fn type_of_explicit_args(cx: @crate_ctxt, inputs: [ty::arg]) -> [TypeRef] {
} }
fn type_of_fn(cx: @crate_ctxt, inputs: [ty::arg], output: ty::t) -> TypeRef { fn type_of_fn(cx: @crate_ctxt, inputs: [ty::arg], output: ty::t) -> TypeRef {
let atys: [TypeRef] = []; let mut atys: [TypeRef] = [];
// Arg 0: Output pointer. // Arg 0: Output pointer.
atys += [T_ptr(type_of(cx, output))]; atys += [T_ptr(type_of(cx, output))];
@ -57,7 +57,7 @@ fn type_of(cx: @crate_ctxt, t: ty::t) -> TypeRef {
ty::ty_ptr(mt) { T_ptr(type_of(cx, mt.ty)) } ty::ty_ptr(mt) { T_ptr(type_of(cx, mt.ty)) }
ty::ty_rptr(_, mt) { T_ptr(type_of(cx, mt.ty)) } ty::ty_rptr(_, mt) { T_ptr(type_of(cx, mt.ty)) }
ty::ty_rec(fields) { ty::ty_rec(fields) {
let tys: [TypeRef] = []; let mut tys: [TypeRef] = [];
for f: ty::field in fields { for f: ty::field in fields {
let mt_ty = f.mt.ty; let mt_ty = f.mt.ty;
tys += [type_of(cx, mt_ty)]; tys += [type_of(cx, mt_ty)];
@ -73,7 +73,7 @@ fn type_of(cx: @crate_ctxt, t: ty::t) -> TypeRef {
ty::ty_param(_, _) { T_typaram(cx.tn) } ty::ty_param(_, _) { T_typaram(cx.tn) }
ty::ty_type { T_ptr(cx.tydesc_type) } ty::ty_type { T_ptr(cx.tydesc_type) }
ty::ty_tup(elts) { ty::ty_tup(elts) {
let tys = []; let mut tys = [];
for elt in elts { for elt in elts {
tys += [type_of(cx, elt)]; tys += [type_of(cx, elt)];
} }
@ -82,14 +82,12 @@ fn type_of(cx: @crate_ctxt, t: ty::t) -> TypeRef {
ty::ty_opaque_closure_ptr(_) { T_opaque_box_ptr(cx) } ty::ty_opaque_closure_ptr(_) { T_opaque_box_ptr(cx) }
ty::ty_constr(subt,_) { type_of(cx, subt) } ty::ty_constr(subt,_) { type_of(cx, subt) }
ty::ty_class(did, _) { ty::ty_class(did, _) {
let tys: [TypeRef] = [];
// only instance vars are record fields at runtime // only instance vars are record fields at runtime
let fields = lookup_class_fields(cx.tcx, did); let fields = lookup_class_fields(cx.tcx, did);
for f in fields { let tys = vec::map(fields) {|f|
let t = ty::lookup_field_type(cx.tcx, did, f.id); let t = ty::lookup_field_type(cx.tcx, did, f.id);
let fty = type_of(cx, t); type_of(cx, t)
tys += [fty]; };
}
T_struct(tys) T_struct(tys)
} }
ty::ty_self(_) { cx.tcx.sess.unimpl("type_of: ty_self \ ty::ty_self(_) { cx.tcx.sess.unimpl("type_of: ty_self \

View File

@ -86,7 +86,7 @@ fn type_uses_for(ccx: @crate_ctxt, fn_id: def_id, n_tps: uint)
} }
fn type_needs(cx: ctx, use: uint, ty: ty::t) { fn type_needs(cx: ctx, use: uint, ty: ty::t) {
let done = true; let mut done = true;
// Optimization -- don't descend type if all params already have this use // Optimization -- don't descend type if all params already have this use
for u in cx.uses { if u & use != use { done = false } } for u in cx.uses { if u & use != use { done = false } }
if !done { type_needs_inner(cx, use, ty); } if !done { type_needs_inner(cx, use, ty); }

View File

@ -12,7 +12,7 @@ fn trans_uniq(bcx: block, contents: @ast::expr,
let uniq_ty = node_id_type(bcx, node_id); let uniq_ty = node_id_type(bcx, node_id);
let {bcx, val: llptr} = alloc_uniq(bcx, uniq_ty); let {bcx, val: llptr} = alloc_uniq(bcx, uniq_ty);
add_clean_free(bcx, llptr, true); add_clean_free(bcx, llptr, true);
bcx = trans_expr_save_in(bcx, contents, llptr); let bcx = trans_expr_save_in(bcx, contents, llptr);
revoke_clean(bcx, llptr); revoke_clean(bcx, llptr);
ret store_in_dest(bcx, llptr, dest); ret store_in_dest(bcx, llptr, dest);
} }

View File

@ -35,8 +35,8 @@ fn def_id_to_str(d: def_id) -> str {
} }
fn comma_str(args: [@constr_arg_use]) -> str { fn comma_str(args: [@constr_arg_use]) -> str {
let rslt = ""; let mut rslt = "";
let comma = false; let mut comma = false;
for a: @constr_arg_use in args { for a: @constr_arg_use in args {
if comma { rslt += ", "; } else { comma = true; } if comma { rslt += ", "; } else { comma = true; }
alt a.node { alt a.node {
@ -64,8 +64,8 @@ fn constraint_to_str(tcx: ty::ctxt, c: sp_constr) -> str {
} }
fn tritv_to_str(fcx: fn_ctxt, v: tritv::t) -> str { fn tritv_to_str(fcx: fn_ctxt, v: tritv::t) -> str {
let s = ""; let mut s = "";
let comma = false; let mut comma = false;
for p: norm_constraint in constraints(fcx) { for p: norm_constraint in constraints(fcx) {
alt tritv_get(v, p.bit_num) { alt tritv_get(v, p.bit_num) {
dont_care { } dont_care { }
@ -101,7 +101,7 @@ fn log_tritv_err(fcx: fn_ctxt, v: tritv::t) {
} }
fn tos(v: [uint]) -> str { fn tos(v: [uint]) -> str {
let rslt = ""; let mut rslt = "";
for i: uint in v { for i: uint in v {
if i == 0u { if i == 0u {
rslt += "0"; rslt += "0";
@ -524,7 +524,7 @@ fn norm_a_constraint(id: def_id, c: constraint) -> [norm_constraint] {
ret [{bit_num: n, c: respan(sp, ninit(id.node, i))}]; ret [{bit_num: n, c: respan(sp, ninit(id.node, i))}];
} }
cpred(p, descs) { cpred(p, descs) {
let rslt: [norm_constraint] = []; let mut rslt: [norm_constraint] = [];
for pd: pred_args in *descs { for pd: pred_args in *descs {
rslt += rslt +=
[{bit_num: pd.node.bit_num, [{bit_num: pd.node.bit_num,
@ -539,7 +539,7 @@ fn norm_a_constraint(id: def_id, c: constraint) -> [norm_constraint] {
// Tried to write this as an iterator, but I got a // Tried to write this as an iterator, but I got a
// non-exhaustive match in trans. // non-exhaustive match in trans.
fn constraints(fcx: fn_ctxt) -> [norm_constraint] { fn constraints(fcx: fn_ctxt) -> [norm_constraint] {
let rslt: [norm_constraint] = []; let mut rslt: [norm_constraint] = [];
fcx.enclosing.constrs.items {|key, val| fcx.enclosing.constrs.items {|key, val|
rslt += norm_a_constraint(key, val); rslt += norm_a_constraint(key, val);
}; };
@ -604,7 +604,7 @@ fn expr_to_constr_arg(tcx: ty::ctxt, e: @expr) -> @constr_arg_use {
fn exprs_to_constr_args(tcx: ty::ctxt, args: [@expr]) -> [@constr_arg_use] { fn exprs_to_constr_args(tcx: ty::ctxt, args: [@expr]) -> [@constr_arg_use] {
let f = bind expr_to_constr_arg(tcx, _); let f = bind expr_to_constr_arg(tcx, _);
let rslt: [@constr_arg_use] = []; let mut rslt: [@constr_arg_use] = [];
for e: @expr in args { rslt += [f(e)]; } for e: @expr in args { rslt += [f(e)]; }
rslt rslt
} }
@ -638,7 +638,7 @@ fn pred_args_to_str(p: pred_args) -> str {
fn substitute_constr_args(cx: ty::ctxt, actuals: [@expr], c: @ty::constr) -> fn substitute_constr_args(cx: ty::ctxt, actuals: [@expr], c: @ty::constr) ->
tsconstr { tsconstr {
let rslt: [@constr_arg_use] = []; let mut rslt: [@constr_arg_use] = [];
for a: @constr_arg in c.node.args { for a: @constr_arg in c.node.args {
rslt += [substitute_arg(cx, actuals, a)]; rslt += [substitute_arg(cx, actuals, a)];
} }
@ -663,7 +663,7 @@ fn substitute_arg(cx: ty::ctxt, actuals: [@expr], a: @constr_arg) ->
fn pred_args_matches(pattern: [constr_arg_general_<inst>], desc: pred_args) -> fn pred_args_matches(pattern: [constr_arg_general_<inst>], desc: pred_args) ->
bool { bool {
let i = 0u; let mut i = 0u;
for c: @constr_arg_use in desc.node.args { for c: @constr_arg_use in desc.node.args {
let n = pattern[i]; let n = pattern[i];
alt c.node { alt c.node {
@ -700,7 +700,7 @@ type subst = [{from: inst, to: inst}];
fn find_instances(_fcx: fn_ctxt, subst: subst, c: constraint) -> fn find_instances(_fcx: fn_ctxt, subst: subst, c: constraint) ->
[{from: uint, to: uint}] { [{from: uint, to: uint}] {
let rslt = []; let mut rslt = [];
if vec::len(subst) == 0u { ret rslt; } if vec::len(subst) == 0u { ret rslt; }
alt c { alt c {
@ -733,7 +733,7 @@ fn find_in_subst_bool(s: subst, id: node_id) -> bool {
} }
fn insts_to_str(stuff: [constr_arg_general_<inst>]) -> str { fn insts_to_str(stuff: [constr_arg_general_<inst>]) -> str {
let rslt = "<"; let mut rslt = "<";
for i: constr_arg_general_<inst> in stuff { for i: constr_arg_general_<inst> in stuff {
rslt += rslt +=
" " + " " +
@ -748,7 +748,7 @@ fn insts_to_str(stuff: [constr_arg_general_<inst>]) -> str {
} }
fn replace(subst: subst, d: pred_args) -> [constr_arg_general_<inst>] { fn replace(subst: subst, d: pred_args) -> [constr_arg_general_<inst>] {
let rslt: [constr_arg_general_<inst>] = []; let mut rslt: [constr_arg_general_<inst>] = [];
for c: @constr_arg_use in d.node.args { for c: @constr_arg_use in d.node.args {
alt c.node { alt c.node {
carg_ident(p) { carg_ident(p) {
@ -844,7 +844,7 @@ fn copy_in_poststate(fcx: fn_ctxt, post: poststate, dest: inst, src: inst,
fn copy_in_poststate_two(fcx: fn_ctxt, src_post: poststate, fn copy_in_poststate_two(fcx: fn_ctxt, src_post: poststate,
target_post: poststate, dest: inst, src: inst, target_post: poststate, dest: inst, src: inst,
ty: oper_type) { ty: oper_type) {
let subst; let mut subst;
alt ty { alt ty {
oper_swap { subst = [{from: dest, to: src}, {from: src, to: dest}]; } oper_swap { subst = [{from: dest, to: src}, {from: src, to: dest}]; }
oper_assign_op { oper_assign_op {
@ -911,7 +911,7 @@ fn forget_in_poststate(fcx: fn_ctxt, p: poststate, dead_v: node_id) -> bool {
// In the poststate given by parent_exp, clear the bits // In the poststate given by parent_exp, clear the bits
// for any constraints mentioning dead_v // for any constraints mentioning dead_v
let d = local_node_id_to_local_def_id(fcx, dead_v); let d = local_node_id_to_local_def_id(fcx, dead_v);
let changed = false; let mut changed = false;
alt d { alt d {
some(d_id) { some(d_id) {
for c: norm_constraint in constraints(fcx) { for c: norm_constraint in constraints(fcx) {
@ -930,7 +930,7 @@ fn forget_in_poststate_still_init(fcx: fn_ctxt, p: poststate, dead_v: node_id)
// In the poststate given by parent_exp, clear the bits // In the poststate given by parent_exp, clear the bits
// for any constraints mentioning dead_v // for any constraints mentioning dead_v
let d = local_node_id_to_local_def_id(fcx, dead_v); let d = local_node_id_to_local_def_id(fcx, dead_v);
let changed = false; let mut changed = false;
alt d { alt d {
some(d_id) { some(d_id) {
for c: norm_constraint in constraints(fcx) { for c: norm_constraint in constraints(fcx) {
@ -1013,7 +1013,7 @@ fn do_nothing<T>(_fk: visit::fn_kind, _decl: fn_decl, _body: blk,
fn args_to_constr_args(tcx: ty::ctxt, args: [arg], fn args_to_constr_args(tcx: ty::ctxt, args: [arg],
indices: [@sp_constr_arg<uint>]) -> [@constr_arg_use] { indices: [@sp_constr_arg<uint>]) -> [@constr_arg_use] {
let actuals: [@constr_arg_use] = []; let mut actuals: [@constr_arg_use] = [];
let num_args = vec::len(args); let num_args = vec::len(args);
for a: @sp_constr_arg<uint> in indices { for a: @sp_constr_arg<uint> in indices {
actuals += actuals +=
@ -1052,7 +1052,7 @@ fn ast_constr_to_sp_constr(tcx: ty::ctxt, args: [arg], c: @constr) ->
type binding = {lhs: [inst], rhs: option<initializer>}; type binding = {lhs: [inst], rhs: option<initializer>};
fn local_to_bindings(tcx: ty::ctxt, loc: @local) -> binding { fn local_to_bindings(tcx: ty::ctxt, loc: @local) -> binding {
let lhs = []; let mut lhs = [];
pat_bindings(tcx.def_map, loc.node.pat) {|p_id, _s, name| pat_bindings(tcx.def_map, loc.node.pat) {|p_id, _s, name|
lhs += [{ident: path_to_ident(name), node: p_id}]; lhs += [{ident: path_to_ident(name), node: p_id}];
}; };
@ -1060,7 +1060,7 @@ fn local_to_bindings(tcx: ty::ctxt, loc: @local) -> binding {
} }
fn locals_to_bindings(tcx: ty::ctxt, locals: [@local]) -> [binding] { fn locals_to_bindings(tcx: ty::ctxt, locals: [@local]) -> [binding] {
let rslt = []; let mut rslt = [];
for loc in locals { rslt += [local_to_bindings(tcx, loc)]; } for loc in locals { rslt += [local_to_bindings(tcx, loc)]; }
ret rslt; ret rslt;
} }
@ -1070,7 +1070,7 @@ fn callee_modes(fcx: fn_ctxt, callee: node_id) -> [mode] {
ty::node_id_to_type(fcx.ccx.tcx, callee)); ty::node_id_to_type(fcx.ccx.tcx, callee));
alt ty::get(ty).struct { alt ty::get(ty).struct {
ty::ty_fn({inputs: args, _}) { ty::ty_fn({inputs: args, _}) {
let modes = []; let mut modes = [];
for arg: ty::arg in args { modes += [arg.mode]; } for arg: ty::arg in args { modes += [arg.mode]; }
ret modes; ret modes;
} }
@ -1092,8 +1092,8 @@ fn callee_arg_init_ops(fcx: fn_ctxt, callee: node_id) -> [init_op] {
} }
fn anon_bindings(ops: [init_op], es: [@expr]) -> [binding] { fn anon_bindings(ops: [init_op], es: [@expr]) -> [binding] {
let bindings: [binding] = []; let mut bindings: [binding] = [];
let i = 0; let mut i = 0;
for op: init_op in ops { for op: init_op in ops {
bindings += [{lhs: [], rhs: some({op: op, expr: es[i]})}]; bindings += [{lhs: [], rhs: some({op: op, expr: es[i]})}];
i += 1; i += 1;

View File

@ -55,7 +55,7 @@ fn seq_trit(u: trit, v: trit) -> trit {
// 1 in q and 0 in p, it's 1 in the result; however, // 1 in q and 0 in p, it's 1 in the result; however,
// if it's 0 in q and 1 in p, it's 0 in the result // if it's 0 in q and 1 in p, it's 0 in the result
fn seq_tritv(p: postcond, q: postcond) { fn seq_tritv(p: postcond, q: postcond) {
let i = 0u; let mut i = 0u;
assert (p.nbits == q.nbits); assert (p.nbits == q.nbits);
while i < p.nbits { while i < p.nbits {
tritv_set(i, p, seq_trit(tritv_get(p, i), tritv_get(q, i))); tritv_set(i, p, seq_trit(tritv_get(p, i), tritv_get(q, i)));
@ -146,7 +146,7 @@ fn relax_precond_block_inner(b: blk, cx: relax_ctxt,
fn relax_precond_block(fcx: fn_ctxt, i: node_id, b: blk) { fn relax_precond_block(fcx: fn_ctxt, i: node_id, b: blk) {
let cx = {fcx: fcx, i: i}; let cx = {fcx: fcx, i: i};
let visitor = visit::default_visitor::<relax_ctxt>(); let visitor = visit::default_visitor::<relax_ctxt>();
visitor = let visitor =
@{visit_block: relax_precond_block_inner, @{visit_block: relax_precond_block_inner,
visit_expr: relax_precond_expr, visit_expr: relax_precond_expr,
visit_stmt: relax_precond_stmt, visit_stmt: relax_precond_stmt,

View File

@ -49,7 +49,7 @@ fn check_states_expr(e: @expr, fcx: fn_ctxt, v: visit::vt<fn_ctxt>) {
*/ */
if !implies(pres, prec) { if !implies(pres, prec) {
let s = ""; let mut s = "";
let diff = first_difference_string(fcx, prec, pres); let diff = first_difference_string(fcx, prec, pres);
s += s +=
"unsatisfied precondition constraint (for example, " + diff + "unsatisfied precondition constraint (for example, " + diff +
@ -79,7 +79,7 @@ fn check_states_stmt(s: @stmt, fcx: fn_ctxt, v: visit::vt<fn_ctxt>) {
log_tritv(fcx, pres); log_tritv(fcx, pres);
if !implies(pres, prec) { if !implies(pres, prec) {
let ss = ""; let mut ss = "";
let diff = first_difference_string(fcx, prec, pres); let diff = first_difference_string(fcx, prec, pres);
ss += ss +=
"unsatisfied precondition constraint (for example, " + diff + "unsatisfied precondition constraint (for example, " + diff +
@ -182,13 +182,13 @@ fn check_crate(cx: ty::ctxt, crate: @crate) {
/* Compute the pre and postcondition for every subexpression */ /* Compute the pre and postcondition for every subexpression */
let vtor = visit::default_visitor::<crate_ctxt>(); let vtor = visit::default_visitor::<crate_ctxt>();
vtor = @{visit_fn: fn_pre_post with *vtor}; let vtor = @{visit_fn: fn_pre_post with *vtor};
visit::visit_crate(*crate, ccx, visit::mk_vt(vtor)); visit::visit_crate(*crate, ccx, visit::mk_vt(vtor));
/* Check the pre- and postcondition against the pre- and poststate /* Check the pre- and postcondition against the pre- and poststate
for every expression */ for every expression */
let vtor = visit::default_visitor::<crate_ctxt>(); let vtor = visit::default_visitor::<crate_ctxt>();
vtor = @{visit_fn: fn_states with *vtor}; let vtor = @{visit_fn: fn_states with *vtor};
visit::visit_crate(*crate, ccx, visit::mk_vt(vtor)); visit::visit_crate(*crate, ccx, visit::mk_vt(vtor));
} }
// //

View File

@ -48,8 +48,7 @@ fn find_locals(tcx: ty::ctxt,
id: node_id) -> ctxt { id: node_id) -> ctxt {
let cx: ctxt = {cs: @mutable [], tcx: tcx}; let cx: ctxt = {cs: @mutable [], tcx: tcx};
let visitor = visit::default_visitor::<ctxt>(); let visitor = visit::default_visitor::<ctxt>();
let visitor =
visitor =
@{visit_local: collect_local, @{visit_local: collect_local,
visit_expr: collect_pred, visit_expr: collect_pred,
visit_fn: bind do_nothing(_, _, _, _, _, _, _) visit_fn: bind do_nothing(_, _, _, _, _, _, _)
@ -100,7 +99,7 @@ fn mk_fn_info(ccx: crate_ctxt,
id: node_id) { id: node_id) {
let name = visit::name_of_fn(fk); let name = visit::name_of_fn(fk);
let res_map = new_def_hash::<constraint>(); let res_map = new_def_hash::<constraint>();
let next: uint = 0u; let mut next: uint = 0u;
let cx: ctxt = find_locals(ccx.tcx, fk, f_decl, f_body, f_sp, id); let cx: ctxt = find_locals(ccx.tcx, fk, f_decl, f_body, f_sp, id);
/* now we have to add bit nums for both the constraints /* now we have to add bit nums for both the constraints
@ -111,7 +110,7 @@ fn mk_fn_info(ccx: crate_ctxt,
} }
/* if this function has any constraints, instantiate them to the /* if this function has any constraints, instantiate them to the
argument names and add them */ argument names and add them */
let sc; let mut sc;
for c: @constr in f_decl.constraints { for c: @constr in f_decl.constraints {
sc = ast_constr_to_sp_constr(cx.tcx, f_decl.inputs, c); sc = ast_constr_to_sp_constr(cx.tcx, f_decl.inputs, c);
next = add_constraint(cx.tcx, sc, next, res_map); next = add_constraint(cx.tcx, sc, next, res_map);

View File

@ -299,7 +299,7 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) {
expr_call(operator, operands, _) { expr_call(operator, operands, _) {
/* copy */ /* copy */
let args = operands; let mut args = operands;
args += [operator]; args += [operator];
find_pre_post_exprs(fcx, args, e.id); find_pre_post_exprs(fcx, args, e.id);
@ -355,7 +355,7 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) {
set_pre_and_post(fcx.ccx, e.id, p.precondition, p.postcondition); set_pre_and_post(fcx.ccx, e.id, p.precondition, p.postcondition);
} }
expr_rec(fields, maybe_base) { expr_rec(fields, maybe_base) {
let es = field_exprs(fields); let mut es = field_exprs(fields);
alt maybe_base { none {/* no-op */ } some(b) { es += [b]; } } alt maybe_base { none {/* no-op */ } some(b) { es += [b]; } }
find_pre_post_exprs(fcx, es, e.id); find_pre_post_exprs(fcx, es, e.id);
} }
@ -433,7 +433,7 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) {
expr_do_while(body, test) { expr_do_while(body, test) {
find_pre_post_block(fcx, body); find_pre_post_block(fcx, body);
find_pre_post_expr(fcx, test); find_pre_post_expr(fcx, test);
let loop_postcond = let mut loop_postcond =
seq_postconds(fcx, seq_postconds(fcx,
[block_postcond(fcx.ccx, body), [block_postcond(fcx.ccx, body),
expr_postcond(fcx.ccx, test)]); expr_postcond(fcx.ccx, test)]);
@ -452,7 +452,7 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) {
expr_loop(body) { expr_loop(body) {
find_pre_post_block(fcx, body); find_pre_post_block(fcx, body);
/* Infinite loop: if control passes it, everything is true. */ /* Infinite loop: if control passes it, everything is true. */
let loop_postcond = false_postcond(num_local_vars); let mut loop_postcond = false_postcond(num_local_vars);
/* Conservative approximation: if the body has any nonlocal exits, /* Conservative approximation: if the body has any nonlocal exits,
the poststate is blank since we don't know what parts of it the poststate is blank since we don't know what parts of it
execute. */ execute. */
@ -476,7 +476,7 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) {
find_pre_post_block(fcx, an_alt.body); find_pre_post_block(fcx, an_alt.body);
ret block_pp(fcx.ccx, an_alt.body); ret block_pp(fcx.ccx, an_alt.body);
} }
let alt_pps = []; let mut alt_pps = [];
for a: arm in alts { alt_pps += [do_an_alt(fcx, a)]; } for a: arm in alts { alt_pps += [do_an_alt(fcx, a)]; }
fn combine_pp(antec: pre_and_post, fcx: fn_ctxt, &&pp: pre_and_post, fn combine_pp(antec: pre_and_post, fcx: fn_ctxt, &&pp: pre_and_post,
&&next: pre_and_post) -> pre_and_post { &&next: pre_and_post) -> pre_and_post {
@ -499,7 +499,7 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) {
copy_pre_post(fcx.ccx, e.id, operator); copy_pre_post(fcx.ccx, e.id, operator);
} }
expr_fail(maybe_val) { expr_fail(maybe_val) {
let prestate; let mut prestate;
alt maybe_val { alt maybe_val {
none { prestate = empty_prestate(num_local_vars); } none { prestate = empty_prestate(num_local_vars); }
some(fail_val) { some(fail_val) {
@ -534,10 +534,10 @@ fn find_pre_post_expr(fcx: fn_ctxt, e: @expr) {
expr_bind(operator, maybe_args) { expr_bind(operator, maybe_args) {
let args = []; let mut args = [];
let cmodes = callee_modes(fcx, operator.id); let mut cmodes = callee_modes(fcx, operator.id);
let modes = []; let mut modes = [];
let i = 0; let mut i = 0;
for expr_opt: option<@expr> in maybe_args { for expr_opt: option<@expr> in maybe_args {
alt expr_opt { alt expr_opt {
none {/* no-op */ } none {/* no-op */ }
@ -562,7 +562,7 @@ fn find_pre_post_stmt(fcx: fn_ctxt, s: stmt) {
stmt_decl(adecl, id) { stmt_decl(adecl, id) {
alt adecl.node { alt adecl.node {
decl_local(alocals) { decl_local(alocals) {
let e_pp; let mut e_pp;
let prev_pp = empty_pre_post(num_constraints(fcx.enclosing)); let prev_pp = empty_pre_post(num_constraints(fcx.enclosing));
for alocal in alocals { for alocal in alocals {
alt alocal.node.init { alt alocal.node.init {
@ -578,7 +578,7 @@ fn find_pre_post_stmt(fcx: fn_ctxt, s: stmt) {
initialized to the postcondition */ initialized to the postcondition */
copy_pre_post(fcx.ccx, id, an_init.expr); copy_pre_post(fcx.ccx, id, an_init.expr);
let p = none; let mut p = none;
alt an_init.expr.node { alt an_init.expr.node {
expr_path(_p) { p = some(_p); } expr_path(_p) { p = some(_p); }
_ { } _ { }
@ -676,7 +676,7 @@ fn find_pre_post_block(fcx: fn_ctxt, b: blk) {
let do_inner = bind do_inner_(fcx, _); let do_inner = bind do_inner_(fcx, _);
option::map::<@expr, ()>(b.node.expr, do_inner); option::map::<@expr, ()>(b.node.expr, do_inner);
let pps: [pre_and_post] = []; let mut pps: [pre_and_post] = [];
for s: @stmt in b.node.stmts { pps += [stmt_pp(fcx.ccx, *s)]; } for s: @stmt in b.node.stmts { pps += [stmt_pp(fcx.ccx, *s)]; }
alt b.node.expr { alt b.node.expr {
none {/* no-op */ } none {/* no-op */ }
@ -685,14 +685,14 @@ fn find_pre_post_block(fcx: fn_ctxt, b: blk) {
let block_precond = seq_preconds(fcx, pps); let block_precond = seq_preconds(fcx, pps);
let postconds = []; let mut postconds = [];
for pp: pre_and_post in pps { postconds += [get_post(pp)]; } for pp: pre_and_post in pps { postconds += [get_post(pp)]; }
/* A block may be empty, so this next line ensures that the postconds /* A block may be empty, so this next line ensures that the postconds
vector is non-empty. */ vector is non-empty. */
postconds += [block_precond]; postconds += [block_precond];
let block_postcond = empty_poststate(nv); let mut block_postcond = empty_poststate(nv);
/* conservative approximation */ /* conservative approximation */
if !has_nonlocal_exits(b) { if !has_nonlocal_exits(b) {

View File

@ -65,8 +65,8 @@ fn handle_fail(fcx: fn_ctxt, pres:prestate, post:poststate) {
fn seq_states(fcx: fn_ctxt, pres: prestate, bindings: [binding]) -> fn seq_states(fcx: fn_ctxt, pres: prestate, bindings: [binding]) ->
{changed: bool, post: poststate} { {changed: bool, post: poststate} {
let changed = false; let mut changed = false;
let post = tritv_clone(pres); let mut post = tritv_clone(pres);
for b: binding in bindings { for b: binding in bindings {
alt b.rhs { alt b.rhs {
some(an_init) { some(an_init) {
@ -103,7 +103,7 @@ fn seq_states(fcx: fn_ctxt, pres: prestate, bindings: [binding]) ->
fn find_pre_post_state_sub(fcx: fn_ctxt, pres: prestate, e: @expr, fn find_pre_post_state_sub(fcx: fn_ctxt, pres: prestate, e: @expr,
parent: node_id, c: option<tsconstr>) -> bool { parent: node_id, c: option<tsconstr>) -> bool {
let changed = find_pre_post_state_expr(fcx, pres, e); let mut changed = find_pre_post_state_expr(fcx, pres, e);
changed = set_prestate_ann(fcx.ccx, parent, pres) || changed; changed = set_prestate_ann(fcx.ccx, parent, pres) || changed;
@ -120,7 +120,7 @@ fn find_pre_post_state_sub(fcx: fn_ctxt, pres: prestate, e: @expr,
fn find_pre_post_state_two(fcx: fn_ctxt, pres: prestate, lhs: @expr, fn find_pre_post_state_two(fcx: fn_ctxt, pres: prestate, lhs: @expr,
rhs: @expr, parent: node_id, ty: oper_type) -> rhs: @expr, parent: node_id, ty: oper_type) ->
bool { bool {
let changed = set_prestate_ann(fcx.ccx, parent, pres); let mut changed = set_prestate_ann(fcx.ccx, parent, pres);
changed = find_pre_post_state_expr(fcx, pres, lhs) || changed; changed = find_pre_post_state_expr(fcx, pres, lhs) || changed;
changed = changed =
find_pre_post_state_expr(fcx, expr_poststate(fcx.ccx, lhs), rhs) || find_pre_post_state_expr(fcx, expr_poststate(fcx.ccx, lhs), rhs) ||
@ -183,7 +183,7 @@ fn find_pre_post_state_two(fcx: fn_ctxt, pres: prestate, lhs: @expr,
fn find_pre_post_state_call(fcx: fn_ctxt, pres: prestate, a: @expr, fn find_pre_post_state_call(fcx: fn_ctxt, pres: prestate, a: @expr,
id: node_id, ops: [init_op], bs: [@expr], id: node_id, ops: [init_op], bs: [@expr],
cf: ret_style) -> bool { cf: ret_style) -> bool {
let changed = find_pre_post_state_expr(fcx, pres, a); let mut changed = find_pre_post_state_expr(fcx, pres, a);
// FIXME: This could be a typestate constraint // FIXME: This could be a typestate constraint
if vec::len(bs) != vec::len(ops) { if vec::len(bs) != vec::len(ops) {
fcx.ccx.tcx.sess.span_bug(a.span, fcx.ccx.tcx.sess.span_bug(a.span,
@ -199,7 +199,7 @@ fn find_pre_post_state_exprs(fcx: fn_ctxt, pres: prestate, id: node_id,
ops: [init_op], es: [@expr], cf: ret_style) -> ops: [init_op], es: [@expr], cf: ret_style) ->
bool { bool {
let rs = seq_states(fcx, pres, anon_bindings(ops, es)); let rs = seq_states(fcx, pres, anon_bindings(ops, es));
let changed = rs.changed | set_prestate_ann(fcx.ccx, id, pres); let mut changed = rs.changed | set_prestate_ann(fcx.ccx, id, pres);
/* if this is a failing call, it sets everything as initialized */ /* if this is a failing call, it sets everything as initialized */
alt cf { alt cf {
noreturn { noreturn {
@ -218,7 +218,7 @@ fn find_pre_post_state_loop(fcx: fn_ctxt, pres: prestate, l: @local,
// ever grow larger? It seems like it can't? // ever grow larger? It seems like it can't?
let loop_pres = intersect_states(pres, block_poststate(fcx.ccx, body)); let loop_pres = intersect_states(pres, block_poststate(fcx.ccx, body));
let changed = let mut changed =
set_prestate_ann(fcx.ccx, id, loop_pres) | set_prestate_ann(fcx.ccx, id, loop_pres) |
find_pre_post_state_expr(fcx, pres, index); find_pre_post_state_expr(fcx, pres, index);
@ -260,7 +260,7 @@ fn gen_if_local(fcx: fn_ctxt, p: poststate, e: @expr) -> bool {
fn join_then_else(fcx: fn_ctxt, antec: @expr, conseq: blk, fn join_then_else(fcx: fn_ctxt, antec: @expr, conseq: blk,
maybe_alt: option<@expr>, id: node_id, chk: if_ty, maybe_alt: option<@expr>, id: node_id, chk: if_ty,
pres: prestate) -> bool { pres: prestate) -> bool {
let changed = let mut changed =
set_prestate_ann(fcx.ccx, id, pres) | set_prestate_ann(fcx.ccx, id, pres) |
find_pre_post_state_expr(fcx, pres, antec); find_pre_post_state_expr(fcx, pres, antec);
@ -305,7 +305,7 @@ fn join_then_else(fcx: fn_ctxt, antec: @expr, conseq: blk,
find_pre_post_state_expr(fcx, expr_poststate(fcx.ccx, antec), find_pre_post_state_expr(fcx, expr_poststate(fcx.ccx, antec),
altern); altern);
let conseq_prestate = expr_poststate(fcx.ccx, antec); let mut conseq_prestate = expr_poststate(fcx.ccx, antec);
alt chk { alt chk {
if_check { if_check {
let c: sp_constr = expr_to_constr(fcx.ccx.tcx, antec); let c: sp_constr = expr_to_constr(fcx.ccx.tcx, antec);
@ -371,10 +371,10 @@ fn find_pre_post_state_expr(fcx: fn_ctxt, pres: prestate, e: @expr) -> bool {
controlflow_expr(fcx.ccx, operator)); controlflow_expr(fcx.ccx, operator));
} }
expr_bind(operator, maybe_args) { expr_bind(operator, maybe_args) {
let args = []; let mut args = [];
let callee_ops = callee_arg_init_ops(fcx, operator.id); let callee_ops = callee_arg_init_ops(fcx, operator.id);
let ops = []; let mut ops = [];
let i = 0; let mut i = 0;
for a_opt: option<@expr> in maybe_args { for a_opt: option<@expr> in maybe_args {
alt a_opt { alt a_opt {
none {/* no-op */ } none {/* no-op */ }
@ -402,7 +402,7 @@ fn find_pre_post_state_expr(fcx: fn_ctxt, pres: prestate, e: @expr) -> bool {
} }
expr_rec(fields, maybe_base) { expr_rec(fields, maybe_base) {
let exs = field_exprs(fields); let exs = field_exprs(fields);
let changed = let mut changed =
find_pre_post_state_exprs(fcx, pres, e.id, find_pre_post_state_exprs(fcx, pres, e.id,
vec::from_elem(vec::len(fields), vec::from_elem(vec::len(fields),
init_assign), init_assign),
@ -435,7 +435,7 @@ fn find_pre_post_state_expr(fcx: fn_ctxt, pres: prestate, e: @expr) -> bool {
// lhs and rhs in constraints // lhs and rhs in constraints
} }
expr_ret(maybe_ret_val) { expr_ret(maybe_ret_val) {
let changed = set_prestate_ann(fcx.ccx, e.id, pres); let mut changed = set_prestate_ann(fcx.ccx, e.id, pres);
/* normally, everything is true if execution continues after /* normally, everything is true if execution continues after
a ret expression (since execution never continues locally a ret expression (since execution never continues locally
after a ret expression */ after a ret expression */
@ -455,7 +455,7 @@ fn find_pre_post_state_expr(fcx: fn_ctxt, pres: prestate, e: @expr) -> bool {
ret changed; ret changed;
} }
expr_be(val) { expr_be(val) {
let changed = set_prestate_ann(fcx.ccx, e.id, pres); let mut changed = set_prestate_ann(fcx.ccx, e.id, pres);
let post = false_postcond(num_constrs); let post = false_postcond(num_constrs);
// except for the "diverges" bit... // except for the "diverges" bit...
kill_poststate_(fcx, fcx.enclosing.i_diverge, post); kill_poststate_(fcx, fcx.enclosing.i_diverge, post);
@ -468,7 +468,7 @@ fn find_pre_post_state_expr(fcx: fn_ctxt, pres: prestate, e: @expr) -> bool {
} }
expr_binary(bop, l, r) { expr_binary(bop, l, r) {
if lazy_binop(bop) { if lazy_binop(bop) {
let changed = find_pre_post_state_expr(fcx, pres, l); let mut changed = find_pre_post_state_expr(fcx, pres, l);
changed |= changed |=
find_pre_post_state_expr(fcx, expr_poststate(fcx.ccx, l), r); find_pre_post_state_expr(fcx, expr_poststate(fcx.ccx, l), r);
ret changed | set_prestate_ann(fcx.ccx, e.id, pres) | ret changed | set_prestate_ann(fcx.ccx, e.id, pres) |
@ -492,7 +492,7 @@ fn find_pre_post_state_expr(fcx: fn_ctxt, pres: prestate, e: @expr) -> bool {
let loop_pres = let loop_pres =
intersect_states(block_poststate(fcx.ccx, body), pres); intersect_states(block_poststate(fcx.ccx, body), pres);
let changed = let mut changed =
set_prestate_ann(fcx.ccx, e.id, loop_pres) | set_prestate_ann(fcx.ccx, e.id, loop_pres) |
find_pre_post_state_expr(fcx, loop_pres, test) | find_pre_post_state_expr(fcx, loop_pres, test) |
find_pre_post_state_block(fcx, expr_poststate(fcx.ccx, test), find_pre_post_state_block(fcx, expr_poststate(fcx.ccx, test),
@ -514,7 +514,7 @@ fn find_pre_post_state_expr(fcx: fn_ctxt, pres: prestate, e: @expr) -> bool {
expr_do_while(body, test) { expr_do_while(body, test) {
let loop_pres = intersect_states(expr_poststate(fcx.ccx, test), pres); let loop_pres = intersect_states(expr_poststate(fcx.ccx, test), pres);
let changed = set_prestate_ann(fcx.ccx, e.id, loop_pres); let mut changed = set_prestate_ann(fcx.ccx, e.id, loop_pres);
changed |= find_pre_post_state_block(fcx, loop_pres, body); changed |= find_pre_post_state_block(fcx, loop_pres, body);
/* conservative approximination: if the body of the loop /* conservative approximination: if the body of the loop
could break or cont, we revert to the prestate could break or cont, we revert to the prestate
@ -548,7 +548,7 @@ fn find_pre_post_state_expr(fcx: fn_ctxt, pres: prestate, e: @expr) -> bool {
expr_loop(body) { expr_loop(body) {
let loop_pres = let loop_pres =
intersect_states(block_poststate(fcx.ccx, body), pres); intersect_states(block_poststate(fcx.ccx, body), pres);
let changed = set_prestate_ann(fcx.ccx, e.id, loop_pres) let mut changed = set_prestate_ann(fcx.ccx, e.id, loop_pres)
| find_pre_post_state_block(fcx, loop_pres, body); | find_pre_post_state_block(fcx, loop_pres, body);
/* conservative approximation: if a loop contains a break /* conservative approximation: if a loop contains a break
or cont, we assume nothing about the poststate */ or cont, we assume nothing about the poststate */
@ -569,11 +569,11 @@ fn find_pre_post_state_expr(fcx: fn_ctxt, pres: prestate, e: @expr) -> bool {
ret find_pre_post_state_two(fcx, pres, val, sub, e.id, oper_pure); ret find_pre_post_state_two(fcx, pres, val, sub, e.id, oper_pure);
} }
expr_alt(val, alts, _) { expr_alt(val, alts, _) {
let changed = let mut changed =
set_prestate_ann(fcx.ccx, e.id, pres) | set_prestate_ann(fcx.ccx, e.id, pres) |
find_pre_post_state_expr(fcx, pres, val); find_pre_post_state_expr(fcx, pres, val);
let e_post = expr_poststate(fcx.ccx, val); let e_post = expr_poststate(fcx.ccx, val);
let a_post; let mut a_post;
if vec::len(alts) > 0u { if vec::len(alts) > 0u {
a_post = false_postcond(num_constrs); a_post = false_postcond(num_constrs);
for an_alt: arm in alts { for an_alt: arm in alts {
@ -660,7 +660,7 @@ fn find_pre_post_state_stmt(fcx: fn_ctxt, pres: prestate, s: @stmt) -> bool {
termination (don't want to set changed to true termination (don't want to set changed to true
for intermediate changes) */ for intermediate changes) */
let changed = let mut changed =
set_poststate(stmt_ann, c_and_p.post) | c_and_p.changed; set_poststate(stmt_ann, c_and_p.post) | c_and_p.changed;
#debug("Summary: stmt = "); #debug("Summary: stmt = ");
@ -681,7 +681,7 @@ fn find_pre_post_state_stmt(fcx: fn_ctxt, pres: prestate, s: @stmt) -> bool {
} }
} }
stmt_expr(ex, _) | stmt_semi(ex, _) { stmt_expr(ex, _) | stmt_semi(ex, _) {
let changed = let mut changed =
find_pre_post_state_expr(fcx, pres, ex) | find_pre_post_state_expr(fcx, pres, ex) |
set_prestate(stmt_ann, expr_prestate(fcx.ccx, ex)) | set_prestate(stmt_ann, expr_prestate(fcx.ccx, ex)) |
set_poststate(stmt_ann, expr_poststate(fcx.ccx, ex)); set_poststate(stmt_ann, expr_poststate(fcx.ccx, ex));
@ -707,17 +707,17 @@ fn find_pre_post_state_stmt(fcx: fn_ctxt, pres: prestate, s: @stmt) -> bool {
fn find_pre_post_state_block(fcx: fn_ctxt, pres0: prestate, b: blk) -> bool { fn find_pre_post_state_block(fcx: fn_ctxt, pres0: prestate, b: blk) -> bool {
/* First, set the pre-states and post-states for every expression */ /* First, set the pre-states and post-states for every expression */
let pres = pres0; let mut pres = pres0;
/* Iterate over each stmt. The new prestate is <pres>. The poststate /* Iterate over each stmt. The new prestate is <pres>. The poststate
consist of improving <pres> with whatever variables this stmt consist of improving <pres> with whatever variables this stmt
initializes. Then <pres> becomes the new poststate. */ initializes. Then <pres> becomes the new poststate. */
let changed = false; let mut changed = false;
for s: @stmt in b.node.stmts { for s: @stmt in b.node.stmts {
changed |= find_pre_post_state_stmt(fcx, pres, s); changed |= find_pre_post_state_stmt(fcx, pres, s);
pres = stmt_poststate(fcx.ccx, *s); pres = stmt_poststate(fcx.ccx, *s);
} }
let post = pres; let mut post = pres;
alt b.node.expr { alt b.node.expr {
none { } none { }
some(e) { some(e) {
@ -766,7 +766,7 @@ fn find_pre_post_state_fn(fcx: fn_ctxt,
set_in_prestate_constr(fcx, tsc, block_pre); set_in_prestate_constr(fcx, tsc, block_pre);
} }
let changed = find_pre_post_state_block(fcx, block_pre, f_body); let mut changed = find_pre_post_state_block(fcx, block_pre, f_body);
// Treat the tail expression as a return statement // Treat the tail expression as a return statement
alt f_body.node.expr { alt f_body.node.expr {

View File

@ -160,10 +160,10 @@ fn change(changed: bool, old: trit, newv: trit) -> bool {
} }
fn tritv_difference(p1: t, p2: t) -> bool { fn tritv_difference(p1: t, p2: t) -> bool {
let i: uint = 0u; let mut i: uint = 0u;
assert (p1.nbits == p2.nbits); assert (p1.nbits == p2.nbits);
let sz: uint = p1.nbits; let sz: uint = p1.nbits;
let changed = false; let mut changed = false;
while i < sz { while i < sz {
let old = tritv_get(p1, i); let old = tritv_get(p1, i);
let newv = trit_minus(old, tritv_get(p2, i)); let newv = trit_minus(old, tritv_get(p2, i));
@ -175,10 +175,10 @@ fn tritv_difference(p1: t, p2: t) -> bool {
} }
fn tritv_union(p1: t, p2: t) -> bool { fn tritv_union(p1: t, p2: t) -> bool {
let i: uint = 0u; let mut i: uint = 0u;
assert (p1.nbits == p2.nbits); assert (p1.nbits == p2.nbits);
let sz: uint = p1.nbits; let sz: uint = p1.nbits;
let changed = false; let mut changed = false;
while i < sz { while i < sz {
let old = tritv_get(p1, i); let old = tritv_get(p1, i);
let newv = trit_or(old, tritv_get(p2, i)); let newv = trit_or(old, tritv_get(p2, i));
@ -190,10 +190,10 @@ fn tritv_union(p1: t, p2: t) -> bool {
} }
fn tritv_intersect(p1: t, p2: t) -> bool { fn tritv_intersect(p1: t, p2: t) -> bool {
let i: uint = 0u; let mut i: uint = 0u;
assert (p1.nbits == p2.nbits); assert (p1.nbits == p2.nbits);
let sz: uint = p1.nbits; let sz: uint = p1.nbits;
let changed = false; let mut changed = false;
while i < sz { while i < sz {
let old = tritv_get(p1, i); let old = tritv_get(p1, i);
let newv = trit_and(old, tritv_get(p2, i)); let newv = trit_and(old, tritv_get(p2, i));
@ -238,17 +238,17 @@ fn tritv_copy(target: t, source: t) -> bool {
} }
fn tritv_set_all(v: t) { fn tritv_set_all(v: t) {
let i: uint = 0u; let mut i: uint = 0u;
while i < v.nbits { tritv_set(i, v, ttrue); i += 1u; } while i < v.nbits { tritv_set(i, v, ttrue); i += 1u; }
} }
fn tritv_clear(v: t) { fn tritv_clear(v: t) {
let i: uint = 0u; let mut i: uint = 0u;
while i < v.nbits { tritv_set(i, v, dont_care); i += 1u; } while i < v.nbits { tritv_set(i, v, dont_care); i += 1u; }
} }
fn tritv_kill(v: t) { fn tritv_kill(v: t) {
let i: uint = 0u; let mut i: uint = 0u;
while i < v.nbits { tritv_set(i, v, tfalse); i += 1u; } while i < v.nbits { tritv_set(i, v, tfalse); i += 1u; }
} }
@ -259,7 +259,7 @@ fn tritv_clone(v: t) -> t {
} }
fn tritv_doesntcare(v: t) -> bool { fn tritv_doesntcare(v: t) -> bool {
let i: uint = 0u; let mut i: uint = 0u;
while i < v.nbits { while i < v.nbits {
if tritv_get(v, i) != dont_care { ret false; } if tritv_get(v, i) != dont_care { ret false; }
i += 1u; i += 1u;
@ -268,8 +268,8 @@ fn tritv_doesntcare(v: t) -> bool {
} }
fn to_vec(v: t) -> [uint] { fn to_vec(v: t) -> [uint] {
let i: uint = 0u; let mut i: uint = 0u;
let rslt: [uint] = []; let mut rslt: [uint] = [];
while i < v.nbits { while i < v.nbits {
rslt += rslt +=
[alt tritv_get(v, i) { [alt tritv_get(v, i) {
@ -283,8 +283,8 @@ fn to_vec(v: t) -> [uint] {
} }
fn to_str(v: t) -> str { fn to_str(v: t) -> str {
let i: uint = 0u; let mut i: uint = 0u;
let rs: str = ""; let mut rs: str = "";
while i < v.nbits { while i < v.nbits {
rs += rs +=
alt tritv_get(v, i) { alt tritv_get(v, i) {

View File

@ -316,7 +316,7 @@ enum param_bound {
} }
fn param_bounds_to_kind(bounds: param_bounds) -> kind { fn param_bounds_to_kind(bounds: param_bounds) -> kind {
let kind = kind_noncopyable; let mut kind = kind_noncopyable;
for bound in *bounds { for bound in *bounds {
alt bound { alt bound {
bound_copy { bound_copy {
@ -392,7 +392,7 @@ fn mk_t_with_id(cx: ctxt, st: sty, o_def_id: option<ast::def_id>) -> t {
some(t) { unsafe { ret unsafe::reinterpret_cast(t); } } some(t) { unsafe { ret unsafe::reinterpret_cast(t); } }
_ {} _ {}
} }
let has_params = false, has_vars = false, has_rptrs = false; let mut has_params = false, has_vars = false, has_rptrs = false;
fn derive_flags(&has_params: bool, &has_vars: bool, &has_rptrs: bool, fn derive_flags(&has_params: bool, &has_vars: bool, &has_rptrs: bool,
tt: t) { tt: t) {
let t = get(tt); let t = get(tt);
@ -595,7 +595,7 @@ enum fold_mode {
} }
fn fold_ty(cx: ctxt, fld: fold_mode, ty_0: t) -> t { fn fold_ty(cx: ctxt, fld: fold_mode, ty_0: t) -> t {
let ty = ty_0; let mut ty = ty_0;
let tb = get(ty); let tb = get(ty);
alt fld { alt fld {
@ -631,7 +631,7 @@ fn fold_ty(cx: ctxt, fld: fold_mode, ty_0: t) -> t {
ty = mk_self(cx, vec::map(subtys, {|t| fold_ty(cx, fld, t) })); ty = mk_self(cx, vec::map(subtys, {|t| fold_ty(cx, fld, t) }));
} }
ty_rec(fields) { ty_rec(fields) {
let new_fields: [field] = []; let mut new_fields: [field] = [];
for fl: field in fields { for fl: field in fields {
let new_ty = fold_ty(cx, fld, fl.mt.ty); let new_ty = fold_ty(cx, fld, fl.mt.ty);
let new_mt = {ty: new_ty, mutbl: fl.mt.mutbl}; let new_mt = {ty: new_ty, mutbl: fl.mt.mutbl};
@ -640,12 +640,12 @@ fn fold_ty(cx: ctxt, fld: fold_mode, ty_0: t) -> t {
ty = mk_rec(cx, new_fields); ty = mk_rec(cx, new_fields);
} }
ty_tup(ts) { ty_tup(ts) {
let new_ts = []; let mut new_ts = [];
for tt in ts { new_ts += [fold_ty(cx, fld, tt)]; } for tt in ts { new_ts += [fold_ty(cx, fld, tt)]; }
ty = mk_tup(cx, new_ts); ty = mk_tup(cx, new_ts);
} }
ty_fn(f) { ty_fn(f) {
let new_args: [arg] = []; let mut new_args: [arg] = [];
for a: arg in f.inputs { for a: arg in f.inputs {
let new_ty = fold_ty(cx, fld, a.ty); let new_ty = fold_ty(cx, fld, a.ty);
new_args += [{mode: a.mode, ty: new_ty}]; new_args += [{mode: a.mode, ty: new_ty}];
@ -655,7 +655,7 @@ fn fold_ty(cx: ctxt, fld: fold_mode, ty_0: t) -> t {
with f}); with f});
} }
ty_res(did, subty, tps) { ty_res(did, subty, tps) {
let new_tps = []; let mut new_tps = [];
for tp: t in tps { new_tps += [fold_ty(cx, fld, tp)]; } for tp: t in tps { new_tps += [fold_ty(cx, fld, tp)]; }
ty = mk_res(cx, did, fold_ty(cx, fld, subty), new_tps); ty = mk_res(cx, did, fold_ty(cx, fld, subty), new_tps);
} }
@ -805,7 +805,7 @@ fn type_needs_drop(cx: ctxt, ty: t) -> bool {
none {/* fall through */ } none {/* fall through */ }
} }
let accum = false; let mut accum = false;
let result = alt get(ty).struct { let result = alt get(ty).struct {
// scalar types // scalar types
ty_nil | ty_bot | ty_bool | ty_int(_) | ty_float(_) | ty_uint(_) | ty_nil | ty_bot | ty_bool | ty_int(_) | ty_float(_) | ty_uint(_) |
@ -906,19 +906,19 @@ fn type_kind(cx: ctxt, ty: t) -> kind {
ty_vec(tm) | ty_uniq(tm) { type_kind(cx, tm.ty) } ty_vec(tm) | ty_uniq(tm) { type_kind(cx, tm.ty) }
// Records lower to the lowest of their members. // Records lower to the lowest of their members.
ty_rec(flds) { ty_rec(flds) {
let lowest = kind_sendable; let mut lowest = kind_sendable;
for f in flds { lowest = lower_kind(lowest, type_kind(cx, f.mt.ty)); } for f in flds { lowest = lower_kind(lowest, type_kind(cx, f.mt.ty)); }
lowest lowest
} }
// Tuples lower to the lowest of their members. // Tuples lower to the lowest of their members.
ty_tup(tys) { ty_tup(tys) {
let lowest = kind_sendable; let mut lowest = kind_sendable;
for ty in tys { lowest = lower_kind(lowest, type_kind(cx, ty)); } for ty in tys { lowest = lower_kind(lowest, type_kind(cx, ty)); }
lowest lowest
} }
// Enums lower to the lowest of their variants. // Enums lower to the lowest of their variants.
ty_enum(did, tps) { ty_enum(did, tps) {
let lowest = kind_sendable; let mut lowest = kind_sendable;
for variant in *enum_variants(cx, did) { for variant in *enum_variants(cx, did) {
for aty in variant.args { for aty in variant.args {
// Perform any type parameter substitutions. // Perform any type parameter substitutions.
@ -1038,7 +1038,7 @@ fn type_is_signed(ty: t) -> bool {
// Whether a type is Plain Old Data -- meaning it does not contain pointers // Whether a type is Plain Old Data -- meaning it does not contain pointers
// that the cycle collector might care about. // that the cycle collector might care about.
fn type_is_pod(cx: ctxt, ty: t) -> bool { fn type_is_pod(cx: ctxt, ty: t) -> bool {
let result = true; let mut result = true;
alt get(ty).struct { alt get(ty).struct {
// Scalar types // Scalar types
ty_nil | ty_bot | ty_bool | ty_int(_) | ty_float(_) | ty_uint(_) | ty_nil | ty_bot | ty_bool | ty_int(_) | ty_float(_) | ty_uint(_) |
@ -1053,7 +1053,7 @@ fn type_is_pod(cx: ctxt, ty: t) -> bool {
let tup_ty = mk_tup(cx, variant.args); let tup_ty = mk_tup(cx, variant.args);
// Perform any type parameter substitutions. // Perform any type parameter substitutions.
tup_ty = substitute_type_params(cx, tps, tup_ty); let tup_ty = substitute_type_params(cx, tps, tup_ty);
if !type_is_pod(cx, tup_ty) { result = false; } if !type_is_pod(cx, tup_ty) { result = false; }
} }
} }
@ -1108,7 +1108,7 @@ fn type_param(ty: t) -> option<uint> {
// Returns a vec of all the type variables // Returns a vec of all the type variables
// occurring in t. It may contain duplicates. // occurring in t. It may contain duplicates.
fn vars_in_type(ty: t) -> [int] { fn vars_in_type(ty: t) -> [int] {
let rslt = []; let mut rslt = [];
walk_ty(ty) {|ty| walk_ty(ty) {|ty|
alt get(ty).struct { ty_var(v) { rslt += [v]; } _ { } } alt get(ty).struct { ty_var(v) { rslt += [v]; } _ { } }
} }
@ -1116,7 +1116,7 @@ fn vars_in_type(ty: t) -> [int] {
} }
fn type_autoderef(cx: ctxt, t: t) -> t { fn type_autoderef(cx: ctxt, t: t) -> t {
let t1 = t; let mut t1 = t;
loop { loop {
alt get(t1).struct { alt get(t1).struct {
ty_box(mt) | ty_uniq(mt) | ty::ty_rptr(_, mt) { t1 = mt.ty; } ty_box(mt) | ty_uniq(mt) | ty::ty_rptr(_, mt) { t1 = mt.ty; }
@ -1145,12 +1145,12 @@ fn hash_type_structure(st: sty) -> uint {
} }
fn hash_subty(id: uint, subty: t) -> uint { (id << 2u) + type_id(subty) } fn hash_subty(id: uint, subty: t) -> uint { (id << 2u) + type_id(subty) }
fn hash_subtys(id: uint, subtys: [t]) -> uint { fn hash_subtys(id: uint, subtys: [t]) -> uint {
let h = id; let mut h = id;
for s in subtys { h = (h << 2u) + type_id(s) } for s in subtys { h = (h << 2u) + type_id(s) }
h h
} }
fn hash_type_constr(id: uint, c: @type_constr) -> uint { fn hash_type_constr(id: uint, c: @type_constr) -> uint {
let h = id; let mut h = id;
h = (h << 2u) + hash_def(h, c.node.id); h = (h << 2u) + hash_def(h, c.node.id);
// FIXME this makes little sense // FIXME this makes little sense
for a in c.node.args { for a in c.node.args {
@ -1190,27 +1190,27 @@ fn hash_type_structure(st: sty) -> uint {
} }
ty_str { 17u } ty_str { 17u }
ty_enum(did, tys) { ty_enum(did, tys) {
let h = hash_def(18u, did); let mut h = hash_def(18u, did);
for typ: t in tys { h = hash_subty(h, typ); } for typ: t in tys { h = hash_subty(h, typ); }
h h
} }
ty_box(mt) { hash_subty(19u, mt.ty) } ty_box(mt) { hash_subty(19u, mt.ty) }
ty_vec(mt) { hash_subty(21u, mt.ty) } ty_vec(mt) { hash_subty(21u, mt.ty) }
ty_rec(fields) { ty_rec(fields) {
let h = 26u; let mut h = 26u;
for f in fields { h = hash_subty(h, f.mt.ty); } for f in fields { h = hash_subty(h, f.mt.ty); }
h h
} }
ty_tup(ts) { hash_subtys(25u, ts) } ty_tup(ts) { hash_subtys(25u, ts) }
ty_fn(f) { ty_fn(f) {
let h = 27u; let mut h = 27u;
for a in f.inputs { h = hash_subty(h, a.ty); } for a in f.inputs { h = hash_subty(h, a.ty); }
hash_subty(h, f.output) hash_subty(h, f.output)
} }
ty_var(v) { hash_uint(30u, v as uint) } ty_var(v) { hash_uint(30u, v as uint) }
ty_param(pid, did) { hash_def(hash_uint(31u, pid), did) } ty_param(pid, did) { hash_def(hash_uint(31u, pid), did) }
ty_self(ts) { ty_self(ts) {
let h = 28u; let mut h = 28u;
for t in ts { h = hash_subty(h, t); } for t in ts { h = hash_subty(h, t); }
h h
} }
@ -1218,21 +1218,21 @@ fn hash_type_structure(st: sty) -> uint {
ty_bot { 34u } ty_bot { 34u }
ty_ptr(mt) { hash_subty(35u, mt.ty) } ty_ptr(mt) { hash_subty(35u, mt.ty) }
ty_rptr(region, mt) { ty_rptr(region, mt) {
let h = (46u << 2u) + hash_region(region); let mut h = (46u << 2u) + hash_region(region);
hash_subty(h, mt.ty) hash_subty(h, mt.ty)
} }
ty_res(did, sub, tps) { ty_res(did, sub, tps) {
let h = hash_subty(hash_def(18u, did), sub); let mut h = hash_subty(hash_def(18u, did), sub);
hash_subtys(h, tps) hash_subtys(h, tps)
} }
ty_constr(t, cs) { ty_constr(t, cs) {
let h = hash_subty(36u, t); let mut h = hash_subty(36u, t);
for c in cs { h = (h << 2u) + hash_type_constr(h, c); } for c in cs { h = (h << 2u) + hash_type_constr(h, c); }
h h
} }
ty_uniq(mt) { hash_subty(37u, mt.ty) } ty_uniq(mt) { hash_subty(37u, mt.ty) }
ty_iface(did, tys) { ty_iface(did, tys) {
let h = hash_def(40u, did); let mut h = hash_def(40u, did);
for typ: t in tys { h = hash_subty(h, typ); } for typ: t in tys { h = hash_subty(h, typ); }
h h
} }
@ -1241,7 +1241,7 @@ fn hash_type_structure(st: sty) -> uint {
ty_opaque_closure_ptr(ck_uniq) { 43u } ty_opaque_closure_ptr(ck_uniq) { 43u }
ty_opaque_box { 44u } ty_opaque_box { 44u }
ty_class(did, tys) { ty_class(did, tys) {
let h = hash_def(45u, did); let mut h = hash_def(45u, did);
for typ: t in tys { h = hash_subty(h, typ); } for typ: t in tys { h = hash_subty(h, typ); }
h h
} }
@ -1270,7 +1270,7 @@ fn arg_eq<T>(eq: fn(T, T) -> bool,
fn args_eq<T>(eq: fn(T, T) -> bool, fn args_eq<T>(eq: fn(T, T) -> bool,
a: [@sp_constr_arg<T>], a: [@sp_constr_arg<T>],
b: [@sp_constr_arg<T>]) -> bool { b: [@sp_constr_arg<T>]) -> bool {
let i: uint = 0u; let mut i: uint = 0u;
for arg: @sp_constr_arg<T> in a { for arg: @sp_constr_arg<T> in a {
if !arg_eq(eq, arg, b[i]) { ret false; } if !arg_eq(eq, arg, b[i]) { ret false; }
i += 1u; i += 1u;
@ -1287,7 +1287,7 @@ fn constr_eq(c: @constr, d: @constr) -> bool {
fn constrs_eq(cs: [@constr], ds: [@constr]) -> bool { fn constrs_eq(cs: [@constr], ds: [@constr]) -> bool {
if vec::len(cs) != vec::len(ds) { ret false; } if vec::len(cs) != vec::len(ds) { ret false; }
let i = 0u; let mut i = 0u;
for c: @constr in cs { if !constr_eq(c, ds[i]) { ret false; } i += 1u; } for c: @constr in cs { if !constr_eq(c, ds[i]) { ret false; } i += 1u; }
ret true; ret true;
} }
@ -1411,7 +1411,7 @@ fn stmt_node_id(s: @ast::stmt) -> ast::node_id {
} }
fn field_idx(id: ast::ident, fields: [field]) -> option<uint> { fn field_idx(id: ast::ident, fields: [field]) -> option<uint> {
let i = 0u; let mut i = 0u;
for f in fields { if f.ident == id { ret some(i); } i += 1u; } for f in fields { if f.ident == id { ret some(i); } i += 1u; }
ret none; ret none;
} }
@ -1429,7 +1429,7 @@ fn get_fields(rec_ty:t) -> [field] {
} }
fn method_idx(id: ast::ident, meths: [method]) -> option<uint> { fn method_idx(id: ast::ident, meths: [method]) -> option<uint> {
let i = 0u; let mut i = 0u;
for m in meths { if m.ident == id { ret some(i); } i += 1u; } for m in meths { if m.ident == id { ret some(i); } i += 1u; }
ret none; ret none;
} }
@ -1676,7 +1676,7 @@ mod unify {
ufind::grow(vb.sets, (key as uint) + 1u); ufind::grow(vb.sets, (key as uint) + 1u);
let root = ufind::find(vb.sets, key as uint); let root = ufind::find(vb.sets, key as uint);
let result_type = typ; let mut result_type = typ;
alt smallintmap::find(vb.types, root) { alt smallintmap::find(vb.types, root) {
some(old_type) { some(old_type) {
alt unify_step(cx, old_type, typ, variance, {|v| ok(v)}) { alt unify_step(cx, old_type, typ, variance, {|v| ok(v)}) {
@ -1756,10 +1756,9 @@ mod unify {
let expected_arg_len = vec::len(expected.node.args); let expected_arg_len = vec::len(expected.node.args);
let actual_arg_len = vec::len(actual_constr.node.args); let actual_arg_len = vec::len(actual_constr.node.args);
if expected_arg_len != actual_arg_len { ret err_res; } if expected_arg_len != actual_arg_len { ret err_res; }
let i = 0u; let mut i = 0u;
let actual;
for a: @ty_constr_arg in expected.node.args { for a: @ty_constr_arg in expected.node.args {
actual = actual_constr.node.args[i]; let actual = actual_constr.node.args[i];
alt a.node { alt a.node {
carg_base { carg_base {
alt actual.node { carg_base { } _ { ret err_res; } } alt actual.node { carg_base { } _ { ret err_res; } }
@ -2010,18 +2009,17 @@ mod unify {
cx: @uctxt, e_region: region, a_region: region, cx: @uctxt, e_region: region, a_region: region,
variance: variance, variance: variance,
nxt: fn(region) -> ures<T>) -> ures<T> { nxt: fn(region) -> ures<T>) -> ures<T> {
let sub, super; let {sub, super} = alt variance {
alt variance { covariant { {sub: a_region, super: e_region} }
covariant { super = e_region; sub = a_region; } contravariant { {sub: e_region, super: a_region} }
contravariant { super = a_region; sub = e_region; } invariant {
invariant { ret if e_region == a_region {
ret if e_region == a_region {
nxt(e_region) nxt(e_region)
} else { } else {
err(terr_regions_differ(true, e_region, a_region)) err(terr_regions_differ(true, e_region, a_region))
}; };
} }
} };
// FIXME: This is wrong. We should be keeping a set of region bindings // FIXME: This is wrong. We should be keeping a set of region bindings
// around. // around.
@ -2187,19 +2185,18 @@ mod unify {
ret unify_step(cx, expected, actual, covariant, {|v| ok(v)}); ret unify_step(cx, expected, actual, covariant, {|v| ok(v)});
} }
fn dump_var_bindings(tcx: ctxt, vb: @var_bindings) { fn dump_var_bindings(tcx: ctxt, vb: @var_bindings) {
let i = 0u; let mut i = 0u;
while i < vec::len::<ufind::node>(vb.sets.nodes) { while i < vec::len::<ufind::node>(vb.sets.nodes) {
let sets = ""; let mut sets = "";
let j = 0u; let mut j = 0u;
while j < vec::len::<option<uint>>(vb.sets.nodes) { while j < vec::len::<option<uint>>(vb.sets.nodes) {
if ufind::find(vb.sets, j) == i { sets += #fmt[" %u", j]; } if ufind::find(vb.sets, j) == i { sets += #fmt[" %u", j]; }
j += 1u; j += 1u;
} }
let typespec; let typespec = alt smallintmap::find::<t>(vb.types, i) {
alt smallintmap::find::<t>(vb.types, i) { none { "" }
none { typespec = ""; } some(typ) { " =" + ty_to_str(tcx, typ) }
some(typ) { typespec = " =" + ty_to_str(tcx, typ); } };
}
#error("set %u:%s%s", i, typespec, sets); #error("set %u:%s%s", i, typespec, sets);
i += 1u; i += 1u;
} }
@ -2224,7 +2221,7 @@ mod unify {
alt smallintmap::find::<t>(vb.types, root_id) { alt smallintmap::find::<t>(vb.types, root_id) {
none { *unresolved = some(vid); ret mk_var(tcx, vid); } none { *unresolved = some(vid); ret mk_var(tcx, vid); }
some(rt) { some(rt) {
let give_up = false; let mut give_up = false;
std::list::iter(vars_seen) {|v| std::list::iter(vars_seen) {|v|
if v == vid { if v == vid {
give_up = true; give_up = true;
@ -2466,7 +2463,7 @@ fn enum_variants(cx: ctxt, id: ast::def_id) -> @[variant_info] {
// moved there to avoid having to call eval_const_expr twice. // moved there to avoid having to call eval_const_expr twice.
alt cx.items.get(id.node) { alt cx.items.get(id.node) {
ast_map::node_item(@{node: ast::item_enum(variants, _), _}, _) { ast_map::node_item(@{node: ast::item_enum(variants, _), _}, _) {
let disr_val = -1; let mut disr_val = -1;
@vec::map(variants, {|variant| @vec::map(variants, {|variant|
let ctor_ty = node_id_to_type(cx, variant.node.id); let ctor_ty = node_id_to_type(cx, variant.node.id);
let arg_tys = if vec::len(variant.node.args) > 0u { let arg_tys = if vec::len(variant.node.args) > 0u {
@ -2502,7 +2499,7 @@ fn enum_variants(cx: ctxt, id: ast::def_id) -> @[variant_info] {
fn enum_variant_with_id(cx: ctxt, enum_id: ast::def_id, fn enum_variant_with_id(cx: ctxt, enum_id: ast::def_id,
variant_id: ast::def_id) -> variant_info { variant_id: ast::def_id) -> variant_info {
let variants = enum_variants(cx, enum_id); let variants = enum_variants(cx, enum_id);
let i = 0u; let mut i = 0u;
while i < vec::len::<variant_info>(*variants) { while i < vec::len::<variant_info>(*variants) {
let variant = variants[i]; let variant = variants[i];
if def_eq(variant.id, variant_id) { ret variant; } if def_eq(variant.id, variant_id) { ret variant; }
@ -2614,7 +2611,7 @@ fn lookup_class_method_by_name(cx:ctxt, did: ast::def_id, name: ident,
} }
fn class_field_tys(items: [@class_item]) -> [field_ty] { fn class_field_tys(items: [@class_item]) -> [field_ty] {
let rslt = []; let mut rslt = [];
for it in items { for it in items {
alt it.node.decl { alt it.node.decl {
instance_var(nm, _, _, id) { instance_var(nm, _, _, id) {
@ -2630,7 +2627,7 @@ fn class_field_tys(items: [@class_item]) -> [field_ty] {
// Return a list of fields corresponding to the class's items // Return a list of fields corresponding to the class's items
// (as if the class was a record). trans uses this // (as if the class was a record). trans uses this
fn class_items_as_fields(cx:ctxt, did: ast::def_id) -> [field] { fn class_items_as_fields(cx:ctxt, did: ast::def_id) -> [field] {
let rslt = []; let mut rslt = [];
for f in lookup_class_fields(cx, did) { for f in lookup_class_fields(cx, did) {
// consider all instance vars mutable, because the // consider all instance vars mutable, because the
// constructor may mutate all vars // constructor may mutate all vars

View File

@ -307,7 +307,7 @@ fn ast_ty_to_ty(tcx: ty::ctxt, mode: mode, &&ast_ty: @ast::ty) -> ty::t {
} }
// The typedef is type-parametric. Do the type substitution. // The typedef is type-parametric. Do the type substitution.
let param_bindings: [ty::t] = []; let mut param_bindings: [ty::t] = [];
if vec::len(args) != vec::len(*ty_param_bounds_and_ty.bounds) { if vec::len(args) != vec::len(*ty_param_bounds_and_ty.bounds) {
tcx.sess.span_fatal(sp, "wrong number of type arguments for a \ tcx.sess.span_fatal(sp, "wrong number of type arguments for a \
polymorphic type"); polymorphic type");
@ -377,11 +377,10 @@ fn ast_ty_to_ty(tcx: ty::ctxt, mode: mode, &&ast_ty: @ast::ty) -> ty::t {
ty::mk_tup(tcx, flds) ty::mk_tup(tcx, flds)
} }
ast::ty_rec(fields) { ast::ty_rec(fields) {
let flds: [field] = []; let flds = vec::map(fields) {|f|
for f: ast::ty_field in fields {
let tm = ast_mt_to_mt(tcx, use_site, mode, f.node.mt); let tm = ast_mt_to_mt(tcx, use_site, mode, f.node.mt);
flds += [{ident: f.node.ident, mt: tm}]; {ident: f.node.ident, mt: tm}
} };
ty::mk_rec(tcx, flds) ty::mk_rec(tcx, flds)
} }
ast::ty_fn(proto, decl) { ast::ty_fn(proto, decl) {
@ -456,10 +455,9 @@ fn ast_ty_to_ty(tcx: ty::ctxt, mode: mode, &&ast_ty: @ast::ty) -> ty::t {
} }
} }
ast::ty_constr(t, cs) { ast::ty_constr(t, cs) {
let out_cs = []; let out_cs = vec::map(cs) {|constr|
for constr: @ast::ty_constr in cs { ty::ast_constr_to_constr(tcx, constr)
out_cs += [ty::ast_constr_to_constr(tcx, constr)]; };
}
ty::mk_constr(tcx, do_ast_ty_to_ty(tcx, use_site, mode, t), ty::mk_constr(tcx, do_ast_ty_to_ty(tcx, use_site, mode, t),
out_cs) out_cs)
} }
@ -608,10 +606,9 @@ fn ty_of_fn_decl(tcx: ty::ctxt,
let input_tys = vec::map(decl.inputs) {|a| ty_of_arg(tcx, mode, a) }; let input_tys = vec::map(decl.inputs) {|a| ty_of_arg(tcx, mode, a) };
let output_ty = ast_ty_to_ty(tcx, mode, decl.output); let output_ty = ast_ty_to_ty(tcx, mode, decl.output);
let out_constrs = []; let out_constrs = vec::map(decl.constraints) {|constr|
for constr: @ast::constr in decl.constraints { ty::ast_constr_to_constr(tcx, constr)
out_constrs += [ty::ast_constr_to_constr(tcx, constr)]; };
}
{proto: proto, inputs: input_tys, {proto: proto, inputs: input_tys,
output: output_ty, ret_style: decl.cf, constraints: out_constrs} output: output_ty, ret_style: decl.cf, constraints: out_constrs}
} }
@ -627,10 +624,10 @@ fn ty_of_fn(tcx: ty::ctxt, mode: mode, decl: ast::fn_decl,
fn ty_of_native_fn_decl(tcx: ty::ctxt, mode: mode, decl: ast::fn_decl, fn ty_of_native_fn_decl(tcx: ty::ctxt, mode: mode, decl: ast::fn_decl,
ty_params: [ast::ty_param], def_id: ast::def_id) ty_params: [ast::ty_param], def_id: ast::def_id)
-> ty::ty_param_bounds_and_ty { -> ty::ty_param_bounds_and_ty {
let input_tys = [], bounds = ty_param_bounds(tcx, mode, ty_params); let bounds = ty_param_bounds(tcx, mode, ty_params);
for a: ast::arg in decl.inputs { let input_tys = vec::map(decl.inputs) {|a|
input_tys += [ty_of_arg(tcx, mode, a)]; ty_of_arg(tcx, mode, a)
} };
let output_ty = ast_ty_to_ty(tcx, mode, decl.output); let output_ty = ast_ty_to_ty(tcx, mode, decl.output);
let t_fn = ty::mk_fn(tcx, {proto: ast::proto_bare, let t_fn = ty::mk_fn(tcx, {proto: ast::proto_bare,
@ -644,12 +641,12 @@ fn ty_of_native_fn_decl(tcx: ty::ctxt, mode: mode, decl: ast::fn_decl,
} }
fn ty_param_bounds(tcx: ty::ctxt, mode: mode, params: [ast::ty_param]) fn ty_param_bounds(tcx: ty::ctxt, mode: mode, params: [ast::ty_param])
-> @[ty::param_bounds] { -> @[ty::param_bounds] {
let result = []; let mut result = [];
for param in params { for param in params {
result += [alt tcx.ty_param_bounds.find(param.id) { result += [alt tcx.ty_param_bounds.find(param.id) {
some(bs) { bs } some(bs) { bs }
none { none {
let bounds = []; let mut bounds = [];
for b in *param.bounds { for b in *param.bounds {
bounds += [alt b { bounds += [alt b {
ast::bound_send { ty::bound_send } ast::bound_send { ty::bound_send }
@ -728,7 +725,8 @@ fn write_bot(tcx: ty::ctxt, node_id: ast::node_id) {
fn mk_ty_params(tcx: ty::ctxt, atps: [ast::ty_param]) fn mk_ty_params(tcx: ty::ctxt, atps: [ast::ty_param])
-> {bounds: @[ty::param_bounds], params: [ty::t]} { -> {bounds: @[ty::param_bounds], params: [ty::t]} {
let i = 0u, bounds = ty_param_bounds(tcx, m_collect, atps); let mut i = 0u;
let bounds = ty_param_bounds(tcx, m_collect, atps);
{bounds: bounds, {bounds: bounds,
params: vec::map(atps, {|atp| params: vec::map(atps, {|atp|
let t = ty::mk_param(tcx, i, local_def(atp.id)); let t = ty::mk_param(tcx, i, local_def(atp.id));
@ -766,7 +764,7 @@ fn compare_impl_method(tcx: ty::ctxt, sp: span, impl_m: ty::method,
let substs = substs + vec::from_fn(vec::len(*if_m.tps), {|i| let substs = substs + vec::from_fn(vec::len(*if_m.tps), {|i|
ty::mk_param(tcx, i + impl_tps, {crate: 0, node: 0}) ty::mk_param(tcx, i + impl_tps, {crate: 0, node: 0})
}); });
let if_fty = ty::mk_fn(tcx, if_m.fty); let mut if_fty = ty::mk_fn(tcx, if_m.fty);
if_fty = ty::substitute_type_params(tcx, substs, if_fty); if_fty = ty::substitute_type_params(tcx, substs, if_fty);
if ty::type_has_vars(if_fty) { if ty::type_has_vars(if_fty) {
if_fty = fixup_self_in_method_ty(tcx, if_fty, substs, if_fty = fixup_self_in_method_ty(tcx, if_fty, substs,
@ -800,7 +798,7 @@ fn fixup_self_in_method_ty(cx: ty::ctxt, mty: ty::t, m_substs: [ty::t],
if vec::len(tps) > 0u { if vec::len(tps) > 0u {
// Move the substs into the type param system of the // Move the substs into the type param system of the
// context. // context.
let substs = vec::map(tps, {|t| let mut substs = vec::map(tps, {|t|
let f = fixup_self_in_method_ty(cx, t, m_substs, let f = fixup_self_in_method_ty(cx, t, m_substs,
self); self);
ty::substitute_type_params(cx, m_substs, f) ty::substitute_type_params(cx, m_substs, f)
@ -876,7 +874,7 @@ mod collect {
} else { } else {
// As above, tell ast_ty_to_ty() that trans_ty_item_to_ty() // As above, tell ast_ty_to_ty() that trans_ty_item_to_ty()
// should be called to resolve named types. // should be called to resolve named types.
let args: [arg] = []; let mut args: [arg] = [];
for va: ast::variant_arg in variant.node.args { for va: ast::variant_arg in variant.node.args {
let arg_ty = ast_ty_to_ty(tcx, m_collect, va.ty); let arg_ty = ast_ty_to_ty(tcx, m_collect, va.ty);
args += [{mode: ast::expl(ast::by_copy), ty: arg_ty}]; args += [{mode: ast::expl(ast::by_copy), ty: arg_ty}];
@ -925,7 +923,7 @@ mod collect {
fn convert_methods(tcx: ty::ctxt, ms: [@ast::method], fn convert_methods(tcx: ty::ctxt, ms: [@ast::method],
i_bounds: @[ty::param_bounds], maybe_self: option<ty::t>) i_bounds: @[ty::param_bounds], maybe_self: option<ty::t>)
-> [{mty: ty::method, id: ast::node_id, span: span}] { -> [{mty: ty::method, id: ast::node_id, span: span}] {
let my_methods = []; let mut my_methods = [];
for m in ms { for m in ms {
alt maybe_self { alt maybe_self {
some(selfty) { some(selfty) {
@ -1111,7 +1109,7 @@ mod unify {
// FIXME This is almost a duplicate of ty::type_autoderef, with structure_of // FIXME This is almost a duplicate of ty::type_autoderef, with structure_of
// instead of ty::struct. // instead of ty::struct.
fn do_autoderef(fcx: @fn_ctxt, sp: span, t: ty::t) -> ty::t { fn do_autoderef(fcx: @fn_ctxt, sp: span, t: ty::t) -> ty::t {
let t1 = t; let mut t1 = t;
loop { loop {
alt structure_of(fcx, sp, t1) { alt structure_of(fcx, sp, t1) {
ty::ty_box(inner) | ty::ty_uniq(inner) | ty::ty_rptr(_, inner) { ty::ty_box(inner) | ty::ty_uniq(inner) | ty::ty_rptr(_, inner) {
@ -1185,8 +1183,8 @@ mod demand {
ty_param_substs_0: [ty::t]) -> ty_param_substs_0: [ty::t]) ->
ty_param_substs_and_ty { ty_param_substs_and_ty {
let ty_param_substs: [mutable ty::t] = [mutable]; let mut ty_param_substs: [mutable ty::t] = [mutable];
let ty_param_subst_var_ids: [int] = []; let mut ty_param_subst_var_ids: [int] = [];
for ty_param_subst: ty::t in ty_param_substs_0 { for ty_param_subst: ty::t in ty_param_substs_0 {
// Generate a type variable and unify it with the type parameter // Generate a type variable and unify it with the type parameter
// substitution. We will then pull out these type variables. // substitution. We will then pull out these type variables.
@ -1199,7 +1197,7 @@ mod demand {
fn mk_result(fcx: @fn_ctxt, result_ty: ty::t, fn mk_result(fcx: @fn_ctxt, result_ty: ty::t,
ty_param_subst_var_ids: [int]) -> ty_param_subst_var_ids: [int]) ->
ty_param_substs_and_ty { ty_param_substs_and_ty {
let result_ty_param_substs: [ty::t] = []; let mut result_ty_param_substs: [ty::t] = [];
for var_id: int in ty_param_subst_var_ids { for var_id: int in ty_param_subst_var_ids {
let tp_subst = ty::mk_var(fcx.ccx.tcx, var_id); let tp_subst = ty::mk_var(fcx.ccx.tcx, var_id);
result_ty_param_substs += [tp_subst]; result_ty_param_substs += [tp_subst];
@ -1241,7 +1239,7 @@ fn are_compatible(fcx: @fn_ctxt, expected: ty::t, actual: ty::t) -> bool {
// Returns the types of the arguments to a enum variant. // Returns the types of the arguments to a enum variant.
fn variant_arg_types(ccx: @crate_ctxt, _sp: span, vid: ast::def_id, fn variant_arg_types(ccx: @crate_ctxt, _sp: span, vid: ast::def_id,
enum_ty_params: [ty::t]) -> [ty::t] { enum_ty_params: [ty::t]) -> [ty::t] {
let result: [ty::t] = []; let mut result: [ty::t] = [];
let tpt = ty::lookup_item_type(ccx.tcx, vid); let tpt = ty::lookup_item_type(ccx.tcx, vid);
alt ty::get(tpt.ty).struct { alt ty::get(tpt.ty).struct {
ty::ty_fn(f) { ty::ty_fn(f) {
@ -1298,7 +1296,7 @@ mod writeback {
write_ty(tcx, id, t); write_ty(tcx, id, t);
alt tcx.node_type_substs.find(id) { alt tcx.node_type_substs.find(id) {
some(substs) { some(substs) {
let new_substs = []; let mut new_substs = [];
for subst: ty::t in substs { for subst: ty::t in substs {
alt resolve_type_vars_in_type(fcx, sp, subst) { alt resolve_type_vars_in_type(fcx, sp, subst) {
some(t) { new_substs += [t]; } some(t) { new_substs += [t]; }
@ -1454,7 +1452,7 @@ fn gather_locals(ccx: @crate_ctxt,
// Add formal parameters. // Add formal parameters.
let args = ty::ty_fn_args(ty::node_id_to_type(ccx.tcx, id)); let args = ty::ty_fn_args(ty::node_id_to_type(ccx.tcx, id));
let i = 0u; let mut i = 0u;
for arg: ty::arg in args { for arg: ty::arg in args {
assign(decl.inputs[i].id, some(arg.ty)); assign(decl.inputs[i].id, some(arg.ty));
i += 1u; i += 1u;
@ -1600,7 +1598,7 @@ fn check_pat_variant(pcx: pat_ctxt, pat: @ast::pat, path: @ast::path,
// Get the number of arguments in this enum variant. // Get the number of arguments in this enum variant.
let arg_types = variant_arg_types(pcx.fcx.ccx, pat.span, let arg_types = variant_arg_types(pcx.fcx.ccx, pat.span,
v_def_ids.var, expected_tps); v_def_ids.var, expected_tps);
arg_types = vec::map(arg_types, let arg_types = vec::map(arg_types,
bind instantiate_self_regions(pcx.fcx.ccx.tcx, bind instantiate_self_regions(pcx.fcx.ccx.tcx,
pcx.pat_region, pcx.pat_region,
_)); _));
@ -1669,7 +1667,7 @@ fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) {
ast::pat_ident(name, sub) ast::pat_ident(name, sub)
if !pat_util::pat_is_variant(tcx.def_map, pat) { if !pat_util::pat_is_variant(tcx.def_map, pat) {
let vid = lookup_local(pcx.fcx, pat.span, pat.id); let vid = lookup_local(pcx.fcx, pat.span, pat.id);
let typ = ty::mk_var(tcx, vid); let mut typ = ty::mk_var(tcx, vid);
typ = demand::simple(pcx.fcx, pat.span, expected, typ); typ = demand::simple(pcx.fcx, pat.span, expected, typ);
let canon_id = pcx.map.get(path_to_ident(name)); let canon_id = pcx.map.get(path_to_ident(name));
if canon_id != pat.id { if canon_id != pat.id {
@ -1690,16 +1688,15 @@ fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) {
check_pat_variant(pcx, pat, path, subpats, expected); check_pat_variant(pcx, pat, path, subpats, expected);
} }
ast::pat_rec(fields, etc) { ast::pat_rec(fields, etc) {
let ex_fields; let ex_fields = alt structure_of(pcx.fcx, pat.span, expected) {
alt structure_of(pcx.fcx, pat.span, expected) { ty::ty_rec(fields) { fields }
ty::ty_rec(fields) { ex_fields = fields; }
_ { _ {
tcx.sess.span_fatal tcx.sess.span_fatal
(pat.span, (pat.span,
#fmt["mismatched types: expected `%s` but found record", #fmt["mismatched types: expected `%s` but found record",
ty_to_str(tcx, expected)]); ty_to_str(tcx, expected)]);
} }
} };
let f_count = vec::len(fields); let f_count = vec::len(fields);
let ex_f_count = vec::len(ex_fields); let ex_f_count = vec::len(ex_fields);
if ex_f_count < f_count || !etc && ex_f_count > f_count { if ex_f_count < f_count || !etc && ex_f_count > f_count {
@ -1728,16 +1725,15 @@ fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) {
write_ty(tcx, pat.id, expected); write_ty(tcx, pat.id, expected);
} }
ast::pat_tup(elts) { ast::pat_tup(elts) {
let ex_elts; let ex_elts = alt structure_of(pcx.fcx, pat.span, expected) {
alt structure_of(pcx.fcx, pat.span, expected) { ty::ty_tup(elts) { elts }
ty::ty_tup(elts) { ex_elts = elts; }
_ { _ {
tcx.sess.span_fatal tcx.sess.span_fatal
(pat.span, (pat.span,
#fmt["mismatched types: expected `%s`, found tuple", #fmt["mismatched types: expected `%s`, found tuple",
ty_to_str(tcx, expected)]); ty_to_str(tcx, expected)]);
} }
} };
let e_count = vec::len(elts); let e_count = vec::len(elts);
if e_count != vec::len(ex_elts) { if e_count != vec::len(ex_elts) {
tcx.sess.span_fatal tcx.sess.span_fatal
@ -1745,7 +1741,7 @@ fn check_pat(pcx: pat_ctxt, pat: @ast::pat, expected: ty::t) {
with %u fields, found one with %u \ with %u fields, found one with %u \
fields", vec::len(ex_elts), e_count]); fields", vec::len(ex_elts), e_count]);
} }
let i = 0u; let mut i = 0u;
for elt in elts { for elt in elts {
check_pat(pcx, elt, ex_elts[i]); check_pat(pcx, elt, ex_elts[i]);
i += 1u; i += 1u;
@ -1878,7 +1874,8 @@ fn lookup_method(fcx: @fn_ctxt, expr: @ast::expr, node_id: ast::node_id,
alt lookup_method_inner(fcx, expr, name, ty) { alt lookup_method_inner(fcx, expr, name, ty) {
some({method_ty: fty, n_tps: method_n_tps, substs, origin, self_sub}) { some({method_ty: fty, n_tps: method_n_tps, substs, origin, self_sub}) {
let tcx = fcx.ccx.tcx; let tcx = fcx.ccx.tcx;
let substs = substs, n_tps = vec::len(substs), n_tys = vec::len(tps); let mut substs = substs;
let n_tps = vec::len(substs), n_tys = vec::len(tps);
let has_self = ty::type_has_vars(fty); let has_self = ty::type_has_vars(fty);
if method_n_tps + n_tps > 0u { if method_n_tps + n_tps > 0u {
if n_tys == 0u || n_tys != method_n_tps { if n_tys == 0u || n_tys != method_n_tps {
@ -1904,13 +1901,13 @@ fn lookup_method(fcx: @fn_ctxt, expr: @ast::expr, node_id: ast::node_id,
} }
if has_self && !option::is_none(self_sub) { if has_self && !option::is_none(self_sub) {
let fty = ty::node_id_to_type(tcx, node_id); let fty = ty::node_id_to_type(tcx, node_id);
fty = fixup_self_in_method_ty( let fty = fixup_self_in_method_ty(
tcx, fty, substs, option::get(self_sub)); tcx, fty, substs, option::get(self_sub));
write_ty(tcx, node_id, fty); write_ty(tcx, node_id, fty);
} }
if ty::type_has_rptrs(ty::ty_fn_ret(fty)) { if ty::type_has_rptrs(ty::ty_fn_ret(fty)) {
let fty = ty::node_id_to_type(tcx, node_id); let fty = ty::node_id_to_type(tcx, node_id);
fty = fixup_self_region_in_method_ty(fcx, fty, expr); let fty = fixup_self_region_in_method_ty(fcx, fty, expr);
write_ty(tcx, node_id, fty); write_ty(tcx, node_id, fty);
} }
some(origin) some(origin)
@ -1929,7 +1926,7 @@ fn lookup_method_inner_(tcx: ty::ctxt, ms: [ty::method],
-> option<{method_ty: ty::t, n_tps: uint, substs: [ty::t], -> option<{method_ty: ty::t, n_tps: uint, substs: [ty::t],
origin: method_origin, self_sub: option<self_subst>}> { origin: method_origin, self_sub: option<self_subst>}> {
#debug("lookup_method_inner_: %? %? %s", ms, parent, name); #debug("lookup_method_inner_: %? %? %s", ms, parent, name);
let i = 0u; let mut i = 0u;
for m in ms { for m in ms {
if m.ident == name { if m.ident == name {
let fty = ty::mk_fn(tcx, {proto: ast::proto_box with m.fty}); let fty = ty::mk_fn(tcx, {proto: ast::proto_box with m.fty});
@ -1977,7 +1974,7 @@ fn lookup_method_inner(fcx: @fn_ctxt, expr: @ast::expr,
// First, see whether this is an interface-bounded parameter // First, see whether this is an interface-bounded parameter
alt ty::get(ty).struct { alt ty::get(ty).struct {
ty::ty_param(n, did) { ty::ty_param(n, did) {
let bound_n = 0u; let mut bound_n = 0u;
for bound in *tcx.ty_param_bounds.get(did.node) { for bound in *tcx.ty_param_bounds.get(did.node) {
alt bound { alt bound {
ty::bound_iface(t) { ty::bound_iface(t) {
@ -2038,7 +2035,7 @@ fn lookup_method_inner(fcx: @fn_ctxt, expr: @ast::expr,
} }
} }
let result = none, complained = false; let mut result = none, complained = false;
std::list::iter(fcx.ccx.impl_map.get(expr.id)) {|impls| std::list::iter(fcx.ccx.impl_map.get(expr.id)) {|impls|
if option::is_some(result) { ret; } if option::is_some(result) { ret; }
for @{did, methods, _} in *impls { for @{did, methods, _} in *impls {
@ -2224,7 +2221,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
let sty = structure_of(fcx, sp, fty); let sty = structure_of(fcx, sp, fty);
// Grab the argument types // Grab the argument types
let arg_tys = alt sty { let mut arg_tys = alt sty {
ty::ty_fn({inputs: arg_tys, _}) { arg_tys } ty::ty_fn({inputs: arg_tys, _}) { arg_tys }
_ { _ {
fcx.ccx.tcx.sess.span_fatal(sp, "mismatched types: \ fcx.ccx.tcx.sess.span_fatal(sp, "mismatched types: \
@ -2269,8 +2266,8 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
// of arguments when we typecheck the functions. This isn't really the // of arguments when we typecheck the functions. This isn't really the
// right way to do this. // right way to do this.
let check_args = fn@(check_blocks: bool) -> bool { let check_args = fn@(check_blocks: bool) -> bool {
let i = 0u; let mut i = 0u;
let bot = false; let mut bot = false;
for a_opt in args { for a_opt in args {
alt a_opt { alt a_opt {
some(a) { some(a) {
@ -2306,7 +2303,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
fn check_call(fcx: @fn_ctxt, sp: span, id: ast::node_id, f: @ast::expr, fn check_call(fcx: @fn_ctxt, sp: span, id: ast::node_id, f: @ast::expr,
args: [@ast::expr]) args: [@ast::expr])
-> check_call_or_bind_result { -> check_call_or_bind_result {
let args_opt_0: [option<@ast::expr>] = []; let mut args_opt_0: [option<@ast::expr>] = [];
for arg: @ast::expr in args { for arg: @ast::expr in args {
args_opt_0 += [some::<@ast::expr>(arg)]; args_opt_0 += [some::<@ast::expr>(arg)];
} }
@ -2451,7 +2448,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
let tcx = fcx.ccx.tcx; let tcx = fcx.ccx.tcx;
let id = expr.id; let id = expr.id;
let bot = false; let mut bot = false;
alt expr.node { alt expr.node {
ast::expr_lit(lit) { ast::expr_lit(lit) {
let typ = check_lit(fcx.ccx, lit); let typ = check_lit(fcx.ccx, lit);
@ -2484,7 +2481,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
} }
ast::expr_unary(unop, oper) { ast::expr_unary(unop, oper) {
bot = check_expr(fcx, oper); bot = check_expr(fcx, oper);
let oper_t = expr_ty(tcx, oper); let mut oper_t = expr_ty(tcx, oper);
alt unop { alt unop {
ast::box(mutbl) { ast::box(mutbl) {
oper_t = ty::mk_box(tcx, {ty: oper_t, mutbl: mutbl}); oper_t = ty::mk_box(tcx, {ty: oper_t, mutbl: mutbl});
@ -2540,7 +2537,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
} }
ast::expr_addr_of(mutbl, oper) { ast::expr_addr_of(mutbl, oper) {
bot = check_expr(fcx, oper); bot = check_expr(fcx, oper);
let oper_t = expr_ty(tcx, oper); let mut oper_t = expr_ty(tcx, oper);
let region = region_of(fcx, oper); let region = region_of(fcx, oper);
let tm = { ty: oper_t, mutbl: mutbl }; let tm = { ty: oper_t, mutbl: mutbl };
@ -2637,7 +2634,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
} }
ast::expr_for(decl, seq, body) { ast::expr_for(decl, seq, body) {
bot = check_expr(fcx, seq); bot = check_expr(fcx, seq);
let elt_ty; let mut elt_ty;
let ety = expr_ty(tcx, seq); let ety = expr_ty(tcx, seq);
alt structure_of(fcx, expr.span, ety) { alt structure_of(fcx, expr.span, ety) {
ty::ty_vec(vec_elt_ty) { elt_ty = vec_elt_ty.ty; } ty::ty_vec(vec_elt_ty) { elt_ty = vec_elt_ty.ty; }
@ -2687,8 +2684,8 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
} }
} }
// Now typecheck the blocks. // Now typecheck the blocks.
let result_ty = next_ty_var(fcx); let mut result_ty = next_ty_var(fcx);
let arm_non_bot = false; let mut arm_non_bot = false;
for arm: ast::arm in arms { for arm: ast::arm in arms {
alt arm.guard { alt arm.guard {
some(e) { check_expr_with(fcx, e, ty::mk_bool(tcx)); } some(e) { check_expr_with(fcx, e, ty::mk_bool(tcx)); }
@ -2739,7 +2736,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
// TODO: Perform substitutions on the return type. // TODO: Perform substitutions on the return type.
// Pull the argument and return types out. // Pull the argument and return types out.
let proto, arg_tys, rt, cf, constrs; let mut proto, arg_tys, rt, cf, constrs;
alt structure_of(fcx, expr.span, expr_ty(tcx, f)) { alt structure_of(fcx, expr.span, expr_ty(tcx, f)) {
// FIXME: // FIXME:
// probably need to munge the constrs to drop constraints // probably need to munge the constrs to drop constraints
@ -2768,8 +2765,8 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
// For each blank argument, add the type of that argument // For each blank argument, add the type of that argument
// to the resulting function type. // to the resulting function type.
let out_args = []; let mut out_args = [];
let i = 0u; let mut i = 0u;
while i < vec::len(args) { while i < vec::len(args) {
alt args[i] { alt args[i] {
some(_) {/* no-op */ } some(_) {/* no-op */ }
@ -2826,7 +2823,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
write_ty(tcx, id, typ); write_ty(tcx, id, typ);
} }
ast::expr_tup(elts) { ast::expr_tup(elts) {
let elt_ts = []; let mut elt_ts = [];
vec::reserve(elt_ts, vec::len(elts)); vec::reserve(elt_ts, vec::len(elts));
for e in elts { for e in elts {
check_expr(fcx, e); check_expr(fcx, e);
@ -2838,7 +2835,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
} }
ast::expr_rec(fields, base) { ast::expr_rec(fields, base) {
alt base { none {/* no-op */ } some(b_0) { check_expr(fcx, b_0); } } alt base { none {/* no-op */ } some(b_0) { check_expr(fcx, b_0); } }
let fields_t: [spanned<field>] = []; let mut fields_t: [spanned<field>] = [];
for f: ast::field in fields { for f: ast::field in fields {
bot |= check_expr(fcx, f.node.expr); bot |= check_expr(fcx, f.node.expr);
let expr_t = expr_ty(tcx, f.node.expr); let expr_t = expr_ty(tcx, f.node.expr);
@ -2858,7 +2855,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
some(bexpr) { some(bexpr) {
bot |= check_expr(fcx, bexpr); bot |= check_expr(fcx, bexpr);
let bexpr_t = expr_ty(tcx, bexpr); let bexpr_t = expr_ty(tcx, bexpr);
let base_fields: [field] = []; let mut base_fields: [field] = [];
alt structure_of(fcx, expr.span, bexpr_t) { alt structure_of(fcx, expr.span, bexpr_t) {
ty::ty_rec(flds) { base_fields = flds; } ty::ty_rec(flds) { base_fields = flds; }
_ { _ {
@ -2868,7 +2865,7 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
} }
write_ty(tcx, id, bexpr_t); write_ty(tcx, id, bexpr_t);
for f: spanned<ty::field> in fields_t { for f: spanned<ty::field> in fields_t {
let found = false; let mut found = false;
for bf: ty::field in base_fields { for bf: ty::field in base_fields {
if str::eq(f.node.ident, bf.ident) { if str::eq(f.node.ident, bf.ident) {
demand::simple(fcx, f.span, bf.mt.ty, f.node.mt.ty); demand::simple(fcx, f.span, bf.mt.ty, f.node.mt.ty);
@ -2889,7 +2886,8 @@ fn check_expr_with_unifier(fcx: @fn_ctxt, expr: @ast::expr, unify: unifier,
let expr_t = structurally_resolved_type(fcx, expr.span, let expr_t = structurally_resolved_type(fcx, expr.span,
expr_ty(tcx, base)); expr_ty(tcx, base));
let base_t = do_autoderef(fcx, expr.span, expr_t); let base_t = do_autoderef(fcx, expr.span, expr_t);
let handled = false, n_tys = vec::len(tys); let mut handled = false;
let n_tys = vec::len(tys);
alt structure_of(fcx, expr.span, base_t) { alt structure_of(fcx, expr.span, base_t) {
ty::ty_rec(fields) { ty::ty_rec(fields) {
alt ty::field_idx(field, fields) { alt ty::field_idx(field, fields) {
@ -3060,7 +3058,7 @@ fn check_decl_initializer(fcx: @fn_ctxt, nid: ast::node_id,
} }
fn check_decl_local(fcx: @fn_ctxt, local: @ast::local) -> bool { fn check_decl_local(fcx: @fn_ctxt, local: @ast::local) -> bool {
let bot = false; let mut bot = false;
let t = ty::mk_var(fcx.ccx.tcx, fcx.locals.get(local.node.id)); let t = ty::mk_var(fcx.ccx.tcx, fcx.locals.get(local.node.id));
write_ty(fcx.ccx.tcx, local.node.id, t); write_ty(fcx.ccx.tcx, local.node.id, t);
@ -3086,8 +3084,8 @@ fn check_decl_local(fcx: @fn_ctxt, local: @ast::local) -> bool {
} }
fn check_stmt(fcx: @fn_ctxt, stmt: @ast::stmt) -> bool { fn check_stmt(fcx: @fn_ctxt, stmt: @ast::stmt) -> bool {
let node_id; let mut node_id;
let bot = false; let mut bot = false;
alt stmt.node { alt stmt.node {
ast::stmt_decl(decl, id) { ast::stmt_decl(decl, id) {
node_id = id; node_id = id;
@ -3127,8 +3125,8 @@ fn check_block(fcx0: @fn_ctxt, blk: ast::blk) -> bool {
ast::unsafe_blk { @{purity: ast::unsafe_fn with *fcx0} } ast::unsafe_blk { @{purity: ast::unsafe_fn with *fcx0} }
ast::default_blk { fcx0 } ast::default_blk { fcx0 }
}; };
let bot = false; let mut bot = false;
let warned = false; let mut warned = false;
for s: @ast::stmt in blk.node.stmts { for s: @ast::stmt in blk.node.stmts {
if bot && !warned && if bot && !warned &&
alt s.node { alt s.node {
@ -3191,8 +3189,8 @@ fn check_enum_variants(ccx: @crate_ctxt, sp: span, vs: [ast::variant],
locals: int_hash::<int>(), locals: int_hash::<int>(),
next_var_id: @mutable 0, next_var_id: @mutable 0,
ccx: ccx}; ccx: ccx};
let disr_vals: [int] = []; let mut disr_vals: [int] = [];
let disr_val = 0; let mut disr_val = 0;
for v in vs { for v in vs {
alt v.node.disr_expr { alt v.node.disr_expr {
some(e) { some(e) {
@ -3223,7 +3221,7 @@ fn check_enum_variants(ccx: @crate_ctxt, sp: span, vs: [ast::variant],
disr_vals += [disr_val]; disr_vals += [disr_val];
disr_val += 1; disr_val += 1;
} }
let outer = true, did = local_def(id); let mut outer = true, did = local_def(id);
if ty::type_structurally_contains(ccx.tcx, rty, {|sty| if ty::type_structurally_contains(ccx.tcx, rty, {|sty|
alt sty { alt sty {
ty::ty_enum(id, _) if id == did { ty::ty_enum(id, _) if id == did {
@ -3287,7 +3285,7 @@ fn check_pred_expr(fcx: @fn_ctxt, e: @ast::expr) -> bool {
} }
fn check_constraints(fcx: @fn_ctxt, cs: [@ast::constr], args: [ast::arg]) { fn check_constraints(fcx: @fn_ctxt, cs: [@ast::constr], args: [ast::arg]) {
let c_args; let mut c_args;
let num_args = vec::len(args); let num_args = vec::len(args);
for c: @ast::constr in cs { for c: @ast::constr in cs {
c_args = []; c_args = [];
@ -3384,7 +3382,7 @@ fn check_fn(ccx: @crate_ctxt,
} }
let args = ty::ty_fn_args(ty::node_id_to_type(ccx.tcx, id)); let args = ty::ty_fn_args(ty::node_id_to_type(ccx.tcx, id));
let i = 0u; let mut i = 0u;
for arg: ty::arg in args { for arg: ty::arg in args {
write_ty(ccx.tcx, decl.inputs[i].id, arg.ty); write_ty(ccx.tcx, decl.inputs[i].id, arg.ty);
i += 1u; i += 1u;
@ -3442,7 +3440,7 @@ fn check_item(ccx: @crate_ctxt, it: @ast::item) {
ast::item_impl(tps, _, ty, ms) { ast::item_impl(tps, _, ty, ms) {
let self_ty = ast_ty_to_ty(ccx.tcx, m_check, ty); let self_ty = ast_ty_to_ty(ccx.tcx, m_check, ty);
let self_region = ty::re_self({crate: ast::local_crate, node: it.id}); let self_region = ty::re_self({crate: ast::local_crate, node: it.id});
self_ty = instantiate_self_regions(ccx.tcx, self_region, self_ty); let self_ty = instantiate_self_regions(ccx.tcx, self_region, self_ty);
ccx.self_infos += [self_impl(self_ty)]; ccx.self_infos += [self_impl(self_ty)];
for m in ms { check_method(ccx, m); } for m in ms { check_method(ccx, m); }
vec::pop(ccx.self_infos); vec::pop(ccx.self_infos);
@ -3493,7 +3491,7 @@ fn check_main_fn_ty(tcx: ty::ctxt, main_id: ast::node_id, main_span: span) {
} }
_ {} _ {}
} }
let ok = vec::len(constraints) == 0u; let mut ok = vec::len(constraints) == 0u;
ok &= ty::type_is_nil(output); ok &= ty::type_is_nil(output);
let num_args = vec::len(inputs); let num_args = vec::len(inputs);
ok &= num_args == 0u || num_args == 1u && ok &= num_args == 0u || num_args == 1u &&
@ -3534,7 +3532,8 @@ mod vtable {
fn lookup_vtables(fcx: @fn_ctxt, isc: resolve::iscopes, sp: span, fn lookup_vtables(fcx: @fn_ctxt, isc: resolve::iscopes, sp: span,
bounds: @[ty::param_bounds], tys: [ty::t], bounds: @[ty::param_bounds], tys: [ty::t],
allow_unsafe: bool) -> vtable_res { allow_unsafe: bool) -> vtable_res {
let tcx = fcx.ccx.tcx, result = [], i = 0u; let tcx = fcx.ccx.tcx;
let mut result = [], i = 0u;
for ty in tys { for ty in tys {
for bound in *bounds[i] { for bound in *bounds[i] {
alt bound { alt bound {
@ -3561,7 +3560,7 @@ mod vtable {
let ty = fixup_ty(fcx, sp, ty); let ty = fixup_ty(fcx, sp, ty);
alt ty::get(ty).struct { alt ty::get(ty).struct {
ty::ty_param(n, did) { ty::ty_param(n, did) {
let n_bound = 0u; let mut n_bound = 0u;
for bound in *tcx.ty_param_bounds.get(did.node) { for bound in *tcx.ty_param_bounds.get(did.node) {
alt bound { alt bound {
ty::bound_iface(ity) { ty::bound_iface(ity) {
@ -3594,7 +3593,7 @@ mod vtable {
ret vtable_iface(did, tps); ret vtable_iface(did, tps);
} }
_ { _ {
let found = none; let mut found = none;
std::list::iter(isc) {|impls| std::list::iter(isc) {|impls|
if option::is_some(found) { ret; } if option::is_some(found) { ret; }
for im in *impls { for im in *impls {

View File

@ -142,8 +142,8 @@ fn float_ty_to_str(t: float_ty) -> str {
} }
fn is_exported(i: ident, m: _mod) -> bool { fn is_exported(i: ident, m: _mod) -> bool {
let local = false; let mut local = false;
let parent_enum : option<ident> = none; let mut parent_enum : option<ident> = none;
for it: @item in m.items { for it: @item in m.items {
if it.ident == i { local = true; } if it.ident == i { local = true; }
alt it.node { alt it.node {
@ -159,7 +159,7 @@ fn is_exported(i: ident, m: _mod) -> bool {
} }
if local { break; } if local { break; }
} }
let has_explicit_exports = false; let mut has_explicit_exports = false;
for vi: @view_item in m.view_items { for vi: @view_item in m.view_items {
alt vi.node { alt vi.node {
view_item_export(vps) { view_item_export(vps) {
@ -438,7 +438,7 @@ pure fn class_item_ident(ci: @class_item) -> ident {
type ivar = {ident: ident, ty: @ty, cm: class_mutability, id: node_id}; type ivar = {ident: ident, ty: @ty, cm: class_mutability, id: node_id};
fn split_class_items(cs: [@class_item]) -> ([ivar], [@method]) { fn split_class_items(cs: [@class_item]) -> ([ivar], [@method]) {
let vs = [], ms = []; let mut vs = [], ms = [];
for c in cs { for c in cs {
alt c.node.decl { alt c.node.decl {
instance_var(i, t, cm, id) { instance_var(i, t, cm, id) {

View File

@ -56,8 +56,8 @@ fn lookup_line(map: codemap, pos: uint, lookup: lookup_fn)
-> {fm: filemap, line: uint} -> {fm: filemap, line: uint}
{ {
let len = vec::len(map.files); let len = vec::len(map.files);
let a = 0u; let mut a = 0u;
let b = len; let mut b = len;
while b - a > 1u { while b - a > 1u {
let m = (a + b) / 2u; let m = (a + b) / 2u;
if lookup(map.files[m].start_pos) > pos { b = m; } else { a = m; } if lookup(map.files[m].start_pos) > pos { b = m; } else { a = m; }
@ -150,7 +150,7 @@ type file_lines = {file: filemap, lines: [uint]};
fn span_to_lines(sp: span, cm: codemap::codemap) -> @file_lines { fn span_to_lines(sp: span, cm: codemap::codemap) -> @file_lines {
let lo = lookup_char_pos(cm, sp.lo); let lo = lookup_char_pos(cm, sp.lo);
let hi = lookup_char_pos(cm, sp.hi); let hi = lookup_char_pos(cm, sp.hi);
let lines = []; let mut lines = [];
uint::range(lo.line - 1u, hi.line as uint) {|i| lines += [i]; }; uint::range(lo.line - 1u, hi.line as uint) {|i| lines += [i]; };
ret @{file: lo.file, lines: lines}; ret @{file: lo.file, lines: lines};
} }

View File

@ -67,7 +67,7 @@ fn mk_vec_e(cx: ext_ctxt, sp: span, exprs: [@ast::expr]) ->
fn mk_rec_e(cx: ext_ctxt, sp: span, fn mk_rec_e(cx: ext_ctxt, sp: span,
fields: [{ident: ast::ident, ex: @ast::expr}]) -> fields: [{ident: ast::ident, ex: @ast::expr}]) ->
@ast::expr { @ast::expr {
let astfields: [ast::field] = []; let mut astfields: [ast::field] = [];
for field: {ident: ast::ident, ex: @ast::expr} in fields { for field: {ident: ast::ident, ex: @ast::expr} in fields {
let ident = field.ident; let ident = field.ident;
let val = field.ex; let val = field.ex;

View File

@ -11,7 +11,7 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
cx.span_fatal(sp, "#concat_idents requires a vector argument .") cx.span_fatal(sp, "#concat_idents requires a vector argument .")
} }
}; };
let res: ast::ident = ""; let mut res: ast::ident = "";
for e: @ast::expr in args { for e: @ast::expr in args {
res += expr_to_ident(cx, e, "expected an ident"); res += expr_to_ident(cx, e, "expected an ident");
} }

View File

@ -56,9 +56,9 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
fn make_rt_conv_expr(cx: ext_ctxt, sp: span, cnv: conv) -> @ast::expr { fn make_rt_conv_expr(cx: ext_ctxt, sp: span, cnv: conv) -> @ast::expr {
fn make_flags(cx: ext_ctxt, sp: span, flags: [flag]) -> @ast::expr { fn make_flags(cx: ext_ctxt, sp: span, flags: [flag]) -> @ast::expr {
let flagexprs: [@ast::expr] = []; let mut flagexprs: [@ast::expr] = [];
for f: flag in flags { for f: flag in flags {
let fstr; let mut fstr;
alt f { alt f {
flag_left_justify { fstr = "flag_left_justify"; } flag_left_justify { fstr = "flag_left_justify"; }
flag_left_zero_pad { fstr = "flag_left_zero_pad"; } flag_left_zero_pad { fstr = "flag_left_zero_pad"; }
@ -85,7 +85,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
} }
} }
fn make_ty(cx: ext_ctxt, sp: span, t: ty) -> @ast::expr { fn make_ty(cx: ext_ctxt, sp: span, t: ty) -> @ast::expr {
let rt_type; let mut rt_type;
alt t { alt t {
ty_hex(c) { ty_hex(c) {
alt c { alt c {
@ -249,8 +249,8 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span, pieces: [piece], args: [@ast::expr])
} }
} }
let fmt_sp = args[0].span; let fmt_sp = args[0].span;
let n = 0u; let mut n = 0u;
let tmp_expr = mk_str(cx, sp, ""); let mut tmp_expr = mk_str(cx, sp, "");
let nargs = vec::len::<@ast::expr>(args); let nargs = vec::len::<@ast::expr>(args);
for pc: piece in pieces { for pc: piece in pieces {
alt pc { alt pc {

View File

@ -136,7 +136,7 @@ fn expand_ast(ecx: ext_ctxt, _sp: span,
arg: ast::mac_arg, body: ast::mac_body) arg: ast::mac_arg, body: ast::mac_body)
-> @ast::expr -> @ast::expr
{ {
let what = "expr"; let mut what = "expr";
option::may(arg) {|arg| option::may(arg) {|arg|
let args: [@ast::expr] = let args: [@ast::expr] =
alt arg.node { alt arg.node {
@ -211,10 +211,10 @@ fn finish<T: qq_helper>
// ^^ check that the spans are non-overlapping // ^^ check that the spans are non-overlapping
} }
let str2 = ""; let mut str2 = "";
enum state {active, skip(uint), blank}; enum state {active, skip(uint), blank};
let state = active; let mut state = active;
let i = 0u, j = 0u; let mut i = 0u, j = 0u;
let g_len = vec::len(cx.gather); let g_len = vec::len(cx.gather);
str::chars_iter(*str) {|ch| str::chars_iter(*str) {|ch|
if (j < g_len && i == cx.gather[j].lo) { if (j < g_len && i == cx.gather[j].lo) {
@ -260,7 +260,7 @@ fn finish<T: qq_helper>
"cfg"), "cfg"),
mk_access_(cx,sp, session_call(), "parse_sess")] mk_access_(cx,sp, session_call(), "parse_sess")]
); );
let rcall = pcall; let mut rcall = pcall;
if (g_len > 0u) { if (g_len > 0u) {
rcall = mk_call(cx,sp, rcall = mk_call(cx,sp,
["syntax", "ext", "qquote", "replace"], ["syntax", "ext", "qquote", "replace"],

View File

@ -74,8 +74,8 @@ type selector = fn@(matchable) -> match_result;
fn elts_to_ell(cx: ext_ctxt, elts: [@expr]) -> fn elts_to_ell(cx: ext_ctxt, elts: [@expr]) ->
{pre: [@expr], rep: option<@expr>, post: [@expr]} { {pre: [@expr], rep: option<@expr>, post: [@expr]} {
let idx: uint = 0u; let mut idx: uint = 0u;
let res = none; let mut res = none;
for elt: @expr in elts { for elt: @expr in elts {
alt elt.node { alt elt.node {
expr_mac(m) { expr_mac(m) {
@ -104,7 +104,7 @@ fn elts_to_ell(cx: ext_ctxt, elts: [@expr]) ->
fn option_flatten_map<T: copy, U: copy>(f: fn@(T) -> option<U>, v: [T]) -> fn option_flatten_map<T: copy, U: copy>(f: fn@(T) -> option<U>, v: [T]) ->
option<[U]> { option<[U]> {
let res = []; let mut res = [];
for elem: T in v { for elem: T in v {
alt f(elem) { none { ret none; } some(fv) { res += [fv]; } } alt f(elem) { none { ret none; } some(fv) { res += [fv]; } }
} }
@ -169,7 +169,7 @@ fn use_selectors_to_bind(b: binders, e: @expr) -> option<bindings> {
for sel: selector in b.literal_ast_matchers { for sel: selector in b.literal_ast_matchers {
alt sel(match_expr(e)) { none { ret none; } _ { } } alt sel(match_expr(e)) { none { ret none; } _ { } }
} }
let never_mind: bool = false; let mut never_mind: bool = false;
b.real_binders.items {|key, val| b.real_binders.items {|key, val|
alt val(match_expr(e)) { alt val(match_expr(e)) {
none { never_mind = true; } none { never_mind = true; }
@ -211,7 +211,7 @@ fn transcribe(cx: ext_ctxt, b: bindings, body: @expr) -> @expr {
/* helper: descend into a matcher */ /* helper: descend into a matcher */
fn follow(m: arb_depth<matchable>, idx_path: @mutable [uint]) -> fn follow(m: arb_depth<matchable>, idx_path: @mutable [uint]) ->
arb_depth<matchable> { arb_depth<matchable> {
let res: arb_depth<matchable> = m; let mut res: arb_depth<matchable> = m;
for idx: uint in *idx_path { for idx: uint in *idx_path {
alt res { alt res {
leaf(_) { ret res;/* end of the line */ } leaf(_) { ret res;/* end of the line */ }
@ -263,11 +263,11 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mutable [uint],
recur: fn@(&&@expr) -> @expr, exprs: [@expr]) -> [@expr] { recur: fn@(&&@expr) -> @expr, exprs: [@expr]) -> [@expr] {
alt elts_to_ell(cx, exprs) { alt elts_to_ell(cx, exprs) {
{pre: pre, rep: repeat_me_maybe, post: post} { {pre: pre, rep: repeat_me_maybe, post: post} {
let res = vec::map(pre, recur); let mut res = vec::map(pre, recur);
alt repeat_me_maybe { alt repeat_me_maybe {
none { } none { }
some(repeat_me) { some(repeat_me) {
let repeat: option<{rep_count: uint, name: ident}> = none; let mut repeat: option<{rep_count: uint, name: ident}> = none;
/* we need to walk over all the free vars in lockstep, except for /* we need to walk over all the free vars in lockstep, except for
the leaves, which are just duplicated */ the leaves, which are just duplicated */
free_vars(b, repeat_me) {|fv| free_vars(b, repeat_me) {|fv|
@ -301,7 +301,7 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mutable [uint],
} }
some({rep_count: rc, _}) { some({rep_count: rc, _}) {
/* Whew, we now know how how many times to repeat */ /* Whew, we now know how how many times to repeat */
let idx: uint = 0u; let mut idx: uint = 0u;
while idx < rc { while idx < rc {
*idx_path += [idx]; *idx_path += [idx];
res += [recur(repeat_me)]; // whew! res += [recur(repeat_me)]; // whew!
@ -598,8 +598,8 @@ fn p_t_s_r_ellipses(cx: ext_ctxt, repeat_me: @expr, offset: uint, s: selector,
match_expr(e) { match_expr(e) {
alt e.node { alt e.node {
expr_vec(arg_elts, _) { expr_vec(arg_elts, _) {
let elts = []; let mut elts = [];
let idx = offset; let mut idx = offset;
while idx < vec::len(arg_elts) { while idx < vec::len(arg_elts) {
elts += [leaf(match_expr(arg_elts[idx]))]; elts += [leaf(match_expr(arg_elts[idx]))];
idx += 1u; idx += 1u;
@ -645,7 +645,7 @@ fn p_t_s_r_length(cx: ext_ctxt, len: uint, at_least: bool, s: selector,
fn p_t_s_r_actual_vector(cx: ext_ctxt, elts: [@expr], _repeat_after: bool, fn p_t_s_r_actual_vector(cx: ext_ctxt, elts: [@expr], _repeat_after: bool,
s: selector, b: binders) { s: selector, b: binders) {
let idx: uint = 0u; let mut idx: uint = 0u;
while idx < vec::len(elts) { while idx < vec::len(elts) {
fn select(cx: ext_ctxt, m: matchable, idx: uint) -> match_result { fn select(cx: ext_ctxt, m: matchable, idx: uint) -> match_result {
ret alt m { ret alt m {
@ -678,8 +678,8 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
} }
}; };
let macro_name: option<str> = none; let mut macro_name: option<str> = none;
let clauses: [@clause] = []; let mut clauses: [@clause] = [];
for arg: @expr in args { for arg: @expr in args {
alt arg.node { alt arg.node {
expr_vec(elts, mutbl) { expr_vec(elts, mutbl) {

View File

@ -339,7 +339,7 @@ fn noop_fold_pat(p: pat_, fld: ast_fold) -> pat_ {
pat_enum(fld.fold_path(pth), vec::map(pats, fld.fold_pat)) pat_enum(fld.fold_path(pth), vec::map(pats, fld.fold_pat))
} }
pat_rec(fields, etc) { pat_rec(fields, etc) {
let fs = []; let mut fs = [];
for f: ast::field_pat in fields { for f: ast::field_pat in fields {
fs += [{ident: f.ident, pat: fld.fold_pat(f.pat)}]; fs += [{ident: f.ident, pat: fld.fold_pat(f.pat)}];
} }

View File

@ -28,8 +28,8 @@ fn eval_crate_directives_to_mod(cx: ctx, cdirs: [@ast::crate_directive],
option::from_maybe(suffix, "none")); option::from_maybe(suffix, "none"));
let (cview_items, citems, cattrs) let (cview_items, citems, cattrs)
= parse_companion_mod(cx, prefix, suffix); = parse_companion_mod(cx, prefix, suffix);
let view_items: [@ast::view_item] = []; let mut view_items: [@ast::view_item] = [];
let items: [@ast::item] = []; let mut items: [@ast::item] = [];
eval_crate_directives(cx, cdirs, prefix, view_items, items); eval_crate_directives(cx, cdirs, prefix, view_items, items);
ret ({view_items: view_items + cview_items, ret ({view_items: view_items + cview_items,
items: items + citems}, items: items + citems},

View File

@ -129,7 +129,7 @@ fn consume_any_line_comment(rdr: reader) {
} }
fn consume_block_comment(rdr: reader) { fn consume_block_comment(rdr: reader) {
let level: int = 1; let mut level: int = 1;
while level > 0 { while level > 0 {
if rdr.is_eof() { rdr.fatal("unterminated block comment"); } if rdr.is_eof() { rdr.fatal("unterminated block comment"); }
if rdr.curr == '/' && rdr.next() == '*' { if rdr.curr == '/' && rdr.next() == '*' {
@ -150,8 +150,8 @@ fn consume_block_comment(rdr: reader) {
} }
fn scan_exponent(rdr: reader) -> option<str> { fn scan_exponent(rdr: reader) -> option<str> {
let c = rdr.curr; let mut c = rdr.curr;
let rslt = ""; let mut rslt = "";
if c == 'e' || c == 'E' { if c == 'e' || c == 'E' {
str::push_char(rslt, c); str::push_char(rslt, c);
rdr.bump(); rdr.bump();
@ -168,7 +168,7 @@ fn scan_exponent(rdr: reader) -> option<str> {
} }
fn scan_digits(rdr: reader, radix: uint) -> str { fn scan_digits(rdr: reader, radix: uint) -> str {
let rslt = ""; let mut rslt = "";
loop { loop {
let c = rdr.curr; let c = rdr.curr;
if c == '_' { rdr.bump(); cont; } if c == '_' { rdr.bump(); cont; }
@ -183,7 +183,7 @@ fn scan_digits(rdr: reader, radix: uint) -> str {
} }
fn scan_number(c: char, rdr: reader) -> token::token { fn scan_number(c: char, rdr: reader) -> token::token {
let num_str, base = 10u, c = c, n = rdr.next(); let mut num_str, base = 10u, c = c, n = rdr.next();
if c == '0' && n == 'x' { if c == '0' && n == 'x' {
rdr.bump(); rdr.bump();
rdr.bump(); rdr.bump();
@ -197,8 +197,11 @@ fn scan_number(c: char, rdr: reader) -> token::token {
c = rdr.curr; c = rdr.curr;
n = rdr.next(); n = rdr.next();
if c == 'u' || c == 'i' { if c == 'u' || c == 'i' {
let signed = c == 'i', tp = if signed { either::left(ast::ty_i) } let signed = c == 'i';
else { either::right(ast::ty_u) }; let mut tp = {
if signed { either::left(ast::ty_i) }
else { either::right(ast::ty_u) }
};
rdr.bump(); rdr.bump();
c = rdr.curr; c = rdr.curr;
if c == '8' { if c == '8' {
@ -232,7 +235,7 @@ fn scan_number(c: char, rdr: reader) -> token::token {
either::right(t) { ret token::LIT_UINT(parsed, t); } either::right(t) { ret token::LIT_UINT(parsed, t); }
} }
} }
let is_float = false; let mut is_float = false;
if rdr.curr == '.' && !(is_alpha(rdr.next()) || rdr.next() == '_') { if rdr.curr == '.' && !(is_alpha(rdr.next()) || rdr.next() == '_') {
is_float = true; is_float = true;
rdr.bump(); rdr.bump();
@ -279,7 +282,7 @@ fn scan_number(c: char, rdr: reader) -> token::token {
} }
fn scan_numeric_escape(rdr: reader, n_hex_digits: uint) -> char { fn scan_numeric_escape(rdr: reader, n_hex_digits: uint) -> char {
let accum_int = 0, i = n_hex_digits; let mut accum_int = 0, i = n_hex_digits;
while i != 0u { while i != 0u {
let n = rdr.curr; let n = rdr.curr;
rdr.bump(); rdr.bump();
@ -302,8 +305,8 @@ fn next_token(rdr: reader) -> {tok: token::token, chpos: uint, bpos: uint} {
} }
fn next_token_inner(rdr: reader) -> token::token { fn next_token_inner(rdr: reader) -> token::token {
let accum_str = ""; let mut accum_str = "";
let c = rdr.curr; let mut c = rdr.curr;
if (c >= 'a' && c <= 'z') if (c >= 'a' && c <= 'z')
|| (c >= 'A' && c <= 'Z') || (c >= 'A' && c <= 'Z')
|| c == '_' || c == '_'
@ -377,7 +380,7 @@ fn next_token_inner(rdr: reader) -> token::token {
'$' { '$' {
rdr.bump(); rdr.bump();
if is_dec_digit(rdr.curr) { if is_dec_digit(rdr.curr) {
let val = dec_digit_val(rdr.curr) as uint; let mut val = dec_digit_val(rdr.curr) as uint;
while is_dec_digit(rdr.next()) { while is_dec_digit(rdr.next()) {
rdr.bump(); rdr.bump();
val = val * 10u + (dec_digit_val(rdr.curr) as uint); val = val * 10u + (dec_digit_val(rdr.curr) as uint);
@ -441,7 +444,7 @@ fn next_token_inner(rdr: reader) -> token::token {
} }
'\'' { '\'' {
rdr.bump(); rdr.bump();
let c2 = rdr.curr; let mut c2 = rdr.curr;
rdr.bump(); rdr.bump();
if c2 == '\\' { if c2 == '\\' {
let escaped = rdr.curr; let escaped = rdr.curr;
@ -548,7 +551,7 @@ enum cmnt_style {
type cmnt = {style: cmnt_style, lines: [str], pos: uint}; type cmnt = {style: cmnt_style, lines: [str], pos: uint};
fn read_to_eol(rdr: reader) -> str { fn read_to_eol(rdr: reader) -> str {
let val = ""; let mut val = "";
while rdr.curr != '\n' && !rdr.is_eof() { while rdr.curr != '\n' && !rdr.is_eof() {
str::push_char(val, rdr.curr); str::push_char(val, rdr.curr);
rdr.bump(); rdr.bump();
@ -591,7 +594,7 @@ fn consume_whitespace_counting_blank_lines(rdr: reader, &comments: [cmnt]) {
fn read_line_comments(rdr: reader, code_to_the_left: bool) -> cmnt { fn read_line_comments(rdr: reader, code_to_the_left: bool) -> cmnt {
#debug(">>> line comments"); #debug(">>> line comments");
let p = rdr.chpos; let p = rdr.chpos;
let lines: [str] = []; let mut lines: [str] = [];
while rdr.curr == '/' && rdr.next() == '/' { while rdr.curr == '/' && rdr.next() == '/' {
let line = read_one_line_comment(rdr); let line = read_one_line_comment(rdr);
log(debug, line); log(debug, line);
@ -605,14 +608,14 @@ fn read_line_comments(rdr: reader, code_to_the_left: bool) -> cmnt {
} }
fn all_whitespace(s: str, begin: uint, end: uint) -> bool { fn all_whitespace(s: str, begin: uint, end: uint) -> bool {
let i: uint = begin; let mut i: uint = begin;
while i != end { if !is_whitespace(s[i] as char) { ret false; } i += 1u; } while i != end { if !is_whitespace(s[i] as char) { ret false; } i += 1u; }
ret true; ret true;
} }
fn trim_whitespace_prefix_and_push_line(&lines: [str], fn trim_whitespace_prefix_and_push_line(&lines: [str],
s: str, col: uint) unsafe { s: str, col: uint) unsafe {
let s1; let mut s1;
if all_whitespace(s, 0u, col) { if all_whitespace(s, 0u, col) {
if col < str::len(s) { if col < str::len(s) {
s1 = str::slice(s, col, str::len(s)); s1 = str::slice(s, col, str::len(s));
@ -625,12 +628,12 @@ fn trim_whitespace_prefix_and_push_line(&lines: [str],
fn read_block_comment(rdr: reader, code_to_the_left: bool) -> cmnt { fn read_block_comment(rdr: reader, code_to_the_left: bool) -> cmnt {
#debug(">>> block comment"); #debug(">>> block comment");
let p = rdr.chpos; let p = rdr.chpos;
let lines: [str] = []; let mut lines: [str] = [];
let col: uint = rdr.col; let mut col: uint = rdr.col;
rdr.bump(); rdr.bump();
rdr.bump(); rdr.bump();
let curr_line = "/*"; let mut curr_line = "/*";
let level: int = 1; let mut level: int = 1;
while level > 0 { while level > 0 {
#debug("=== block comment level %d", level); #debug("=== block comment level %d", level);
if rdr.is_eof() { rdr.fatal("unterminated block comment"); } if rdr.is_eof() { rdr.fatal("unterminated block comment"); }
@ -658,7 +661,7 @@ fn read_block_comment(rdr: reader, code_to_the_left: bool) -> cmnt {
if str::len(curr_line) != 0u { if str::len(curr_line) != 0u {
trim_whitespace_prefix_and_push_line(lines, curr_line, col); trim_whitespace_prefix_and_push_line(lines, curr_line, col);
} }
let style = if code_to_the_left { trailing } else { isolated }; let mut style = if code_to_the_left { trailing } else { isolated };
consume_non_eol_whitespace(rdr); consume_non_eol_whitespace(rdr);
if !rdr.is_eof() && rdr.curr != '\n' && vec::len(lines) == 1u { if !rdr.is_eof() && rdr.curr != '\n' && vec::len(lines) == 1u {
style = mixed; style = mixed;
@ -704,12 +707,12 @@ fn gather_comments_and_literals(cm: codemap::codemap,
let itr = @interner::mk::<str>(str::hash, str::eq); let itr = @interner::mk::<str>(str::hash, str::eq);
let rdr = new_reader(cm, span_diagnostic, let rdr = new_reader(cm, span_diagnostic,
codemap::new_filemap(path, src, 0u, 0u), itr); codemap::new_filemap(path, src, 0u, 0u), itr);
let comments: [cmnt] = []; let mut comments: [cmnt] = [];
let literals: [lit] = []; let mut literals: [lit] = [];
let first_read: bool = true; let mut first_read: bool = true;
while !rdr.is_eof() { while !rdr.is_eof() {
loop { loop {
let code_to_the_left = !first_read; let mut code_to_the_left = !first_read;
consume_non_eol_whitespace(rdr); consume_non_eol_whitespace(rdr);
if rdr.curr == '\n' { if rdr.curr == '\n' {
code_to_the_left = false; code_to_the_left = false;

View File

@ -169,7 +169,7 @@ fn expect(p: parser, t: token::token) {
if p.token == t { if p.token == t {
p.bump(); p.bump();
} else { } else {
let s: str = "expecting '"; let mut s: str = "expecting '";
s += token::to_str(p.reader, t); s += token::to_str(p.reader, t);
s += "' but found '"; s += "' but found '";
s += token::to_str(p.reader, p.token); s += token::to_str(p.reader, p.token);
@ -185,7 +185,7 @@ fn expect_gt(p: parser) {
} else if p.token == token::BINOP(token::ASR) { } else if p.token == token::BINOP(token::ASR) {
p.swap(token::BINOP(token::LSR), p.span.lo + 1u, p.span.hi); p.swap(token::BINOP(token::LSR), p.span.lo + 1u, p.span.hi);
} else { } else {
let s: str = "expecting "; let mut s: str = "expecting ";
s += token::to_str(p.reader, token::GT); s += token::to_str(p.reader, token::GT);
s += ", found "; s += ", found ";
s += token::to_str(p.reader, p.token); s += token::to_str(p.reader, p.token);
@ -313,14 +313,14 @@ fn parse_ty_field(p: parser) -> ast::ty_field {
// if i is the jth ident in args, return j // if i is the jth ident in args, return j
// otherwise, fail // otherwise, fail
fn ident_index(p: parser, args: [ast::arg], i: ast::ident) -> uint { fn ident_index(p: parser, args: [ast::arg], i: ast::ident) -> uint {
let j = 0u; let mut j = 0u;
for a: ast::arg in args { if a.ident == i { ret j; } j += 1u; } for a: ast::arg in args { if a.ident == i { ret j; } j += 1u; }
p.fatal("unbound variable `" + i + "` in constraint arg"); p.fatal("unbound variable `" + i + "` in constraint arg");
} }
fn parse_type_constr_arg(p: parser) -> @ast::ty_constr_arg { fn parse_type_constr_arg(p: parser) -> @ast::ty_constr_arg {
let sp = p.span; let sp = p.span;
let carg = ast::carg_base; let mut carg = ast::carg_base;
expect(p, token::BINOP(token::STAR)); expect(p, token::BINOP(token::STAR));
if p.token == token::DOT { if p.token == token::DOT {
// "*..." notation for record fields // "*..." notation for record fields
@ -334,7 +334,7 @@ fn parse_type_constr_arg(p: parser) -> @ast::ty_constr_arg {
fn parse_constr_arg(args: [ast::arg], p: parser) -> @ast::constr_arg { fn parse_constr_arg(args: [ast::arg], p: parser) -> @ast::constr_arg {
let sp = p.span; let sp = p.span;
let carg = ast::carg_base; let mut carg = ast::carg_base;
if p.token == token::BINOP(token::STAR) { if p.token == token::BINOP(token::STAR) {
p.bump(); p.bump();
} else { } else {
@ -369,7 +369,7 @@ fn parse_constr_in_type(p: parser) -> @ast::ty_constr {
fn parse_constrs<T: copy>(pser: fn(parser) -> @ast::constr_general<T>, fn parse_constrs<T: copy>(pser: fn(parser) -> @ast::constr_general<T>,
p: parser) -> p: parser) ->
[@ast::constr_general<T>] { [@ast::constr_general<T>] {
let constrs: [@ast::constr_general<T>] = []; let mut constrs: [@ast::constr_general<T>] = [];
loop { loop {
let constr = pser(p); let constr = pser(p);
constrs += [constr]; constrs += [constr];
@ -463,7 +463,7 @@ fn parse_ty(p: parser, colons_before_params: bool) -> @ast::ty {
p.bump(); p.bump();
ast::ty_nil ast::ty_nil
} else { } else {
let ts = [parse_ty(p, false)]; let mut ts = [parse_ty(p, false)];
while p.token == token::COMMA { while p.token == token::COMMA {
p.bump(); p.bump();
ts += [parse_ty(p, false)]; ts += [parse_ty(p, false)];
@ -564,8 +564,8 @@ fn parse_fn_block_arg(p: parser) -> ast::arg {
fn parse_seq_to_before_gt<T: copy>(sep: option<token::token>, fn parse_seq_to_before_gt<T: copy>(sep: option<token::token>,
f: fn(parser) -> T, f: fn(parser) -> T,
p: parser) -> [T] { p: parser) -> [T] {
let first = true; let mut first = true;
let v = []; let mut v = [];
while p.token != token::GT && p.token != token::BINOP(token::LSR) && while p.token != token::GT && p.token != token::BINOP(token::LSR) &&
p.token != token::BINOP(token::ASR) { p.token != token::BINOP(token::ASR) {
alt sep { alt sep {
@ -622,8 +622,8 @@ fn seq_sep_none() -> seq_sep {
fn parse_seq_to_before_end<T: copy>(ket: token::token, fn parse_seq_to_before_end<T: copy>(ket: token::token,
sep: seq_sep, sep: seq_sep,
f: fn(parser) -> T, p: parser) -> [T] { f: fn(parser) -> T, p: parser) -> [T] {
let first: bool = true; let mut first: bool = true;
let v: [T] = []; let mut v: [T] = [];
while p.token != ket { while p.token != ket {
alt sep.sep { alt sep.sep {
some(t) { if first { first = false; } else { expect(p, t); } } some(t) { if first { first = false; } else { expect(p, t); } }
@ -701,7 +701,8 @@ fn is_plain_ident(p: parser) -> bool {
fn parse_path(p: parser) -> @ast::path { fn parse_path(p: parser) -> @ast::path {
let lo = p.span.lo; let lo = p.span.lo;
let global = eat(p, token::MOD_SEP), ids = [parse_ident(p)]; let global = eat(p, token::MOD_SEP);
let mut ids = [parse_ident(p)];
while p.look_ahead(1u) != token::LT && eat(p, token::MOD_SEP) { while p.look_ahead(1u) != token::LT && eat(p, token::MOD_SEP) {
ids += [parse_ident(p)]; ids += [parse_ident(p)];
} }
@ -800,9 +801,9 @@ fn to_expr(e: pexpr) -> @ast::expr {
fn parse_bottom_expr(p: parser) -> pexpr { fn parse_bottom_expr(p: parser) -> pexpr {
let lo = p.span.lo; let lo = p.span.lo;
let hi = p.span.hi; let mut hi = p.span.hi;
let ex: ast::expr_; let mut ex: ast::expr_;
alt have_dollar(p) { alt have_dollar(p) {
some(x) {ret pexpr(mk_mac_expr(p, lo, p.span.hi, x));} some(x) {ret pexpr(mk_mac_expr(p, lo, p.span.hi, x));}
@ -817,7 +818,7 @@ fn parse_bottom_expr(p: parser) -> pexpr {
let lit = @spanned(lo, hi, ast::lit_nil); let lit = @spanned(lo, hi, ast::lit_nil);
ret mk_pexpr(p, lo, hi, ast::expr_lit(lit)); ret mk_pexpr(p, lo, hi, ast::expr_lit(lit));
} }
let es = [parse_expr(p)]; let mut es = [parse_expr(p)];
while p.token == token::COMMA { p.bump(); es += [parse_expr(p)]; } while p.token == token::COMMA { p.bump(); es += [parse_expr(p)]; }
hi = p.span.hi; hi = p.span.hi;
expect(p, token::RPAREN); expect(p, token::RPAREN);
@ -832,8 +833,8 @@ fn parse_bottom_expr(p: parser) -> pexpr {
p.bump(); p.bump();
if is_word(p, "mut") || is_word(p, "mutable") || if is_word(p, "mut") || is_word(p, "mutable") ||
is_plain_ident(p) && p.look_ahead(1u) == token::COLON { is_plain_ident(p) && p.look_ahead(1u) == token::COLON {
let fields = [parse_field(p, token::COLON)]; let mut fields = [parse_field(p, token::COLON)];
let base = none; let mut base = none;
while p.token != token::RBRACE { while p.token != token::RBRACE {
if eat_word(p, "with") { base = some(parse_expr(p)); break; } if eat_word(p, "with") { base = some(parse_expr(p)); break; }
expect(p, token::COMMA); expect(p, token::COMMA);
@ -1011,7 +1012,7 @@ fn parse_syntax_ext_naked(p: parser, lo: uint) -> @ast::expr {
let pth = parse_path(p); let pth = parse_path(p);
//temporary for a backwards-compatible cycle: //temporary for a backwards-compatible cycle:
let sep = seq_sep(token::COMMA); let sep = seq_sep(token::COMMA);
let e = none; let mut e = none;
if (p.token == token::LPAREN || p.token == token::LBRACKET) { if (p.token == token::LPAREN || p.token == token::LBRACKET) {
let es = let es =
if p.token == token::LPAREN { if p.token == token::LPAREN {
@ -1025,11 +1026,11 @@ fn parse_syntax_ext_naked(p: parser, lo: uint) -> @ast::expr {
e = some(mk_expr(p, es.span.lo, hi, e = some(mk_expr(p, es.span.lo, hi,
ast::expr_vec(es.node, ast::m_imm))); ast::expr_vec(es.node, ast::m_imm)));
} }
let b = none; let mut b = none;
if p.token == token::LBRACE { if p.token == token::LBRACE {
p.bump(); p.bump();
let lo = p.span.lo; let lo = p.span.lo;
let depth = 1u; let mut depth = 1u;
while (depth > 0u) { while (depth > 0u) {
alt (p.token) { alt (p.token) {
token::LBRACE {depth += 1u;} token::LBRACE {depth += 1u;}
@ -1055,9 +1056,9 @@ fn permits_call(p: parser) -> bool {
} }
fn parse_dot_or_call_expr_with(p: parser, e0: pexpr) -> pexpr { fn parse_dot_or_call_expr_with(p: parser, e0: pexpr) -> pexpr {
let e = e0; let mut e = e0;
let lo = e.span.lo; let lo = e.span.lo;
let hi = e.span.hi; let mut hi = e.span.hi;
loop { loop {
// expr.f // expr.f
if eat(p, token::DOT) { if eat(p, token::DOT) {
@ -1132,9 +1133,9 @@ fn parse_dot_or_call_expr_with(p: parser, e0: pexpr) -> pexpr {
fn parse_prefix_expr(p: parser) -> pexpr { fn parse_prefix_expr(p: parser) -> pexpr {
let lo = p.span.lo; let lo = p.span.lo;
let hi = p.span.hi; let mut hi = p.span.hi;
let ex; let mut ex;
alt p.token { alt p.token {
token::NOT { token::NOT {
p.bump(); p.bump();
@ -1261,7 +1262,7 @@ fn parse_assign_expr(p: parser) -> @ast::expr {
token::BINOPEQ(op) { token::BINOPEQ(op) {
p.bump(); p.bump();
let rhs = parse_expr(p); let rhs = parse_expr(p);
let aop = ast::add; let mut aop;
alt op { alt op {
token::PLUS { aop = ast::add; } token::PLUS { aop = ast::add; }
token::MINUS { aop = ast::subtract; } token::MINUS { aop = ast::subtract; }
@ -1302,8 +1303,8 @@ fn parse_if_expr_1(p: parser) ->
let lo = p.last_span.lo; let lo = p.last_span.lo;
let cond = parse_expr(p); let cond = parse_expr(p);
let thn = parse_block(p); let thn = parse_block(p);
let els: option<@ast::expr> = none; let mut els: option<@ast::expr> = none;
let hi = thn.span.hi; let mut hi = thn.span.hi;
if eat_word(p, "else") { if eat_word(p, "else") {
let elexpr = parse_else_expr(p); let elexpr = parse_else_expr(p);
els = some(elexpr); els = some(elexpr);
@ -1337,7 +1338,7 @@ fn parse_capture_clause(p: parser) -> @ast::capture_clause {
} }
fn eat_ident_list(p: parser) -> [@ast::capture_item] { fn eat_ident_list(p: parser) -> [@ast::capture_item] {
let res = []; let mut res = [];
loop { loop {
alt p.token { alt p.token {
token::IDENT(_, _) { token::IDENT(_, _) {
@ -1355,8 +1356,8 @@ fn parse_capture_clause(p: parser) -> @ast::capture_clause {
}; };
} }
let copies = []; let mut copies = [];
let moves = []; let mut moves = [];
if eat(p, token::LBRACKET) { if eat(p, token::LBRACKET) {
while !eat(p, token::RBRACKET) { while !eat(p, token::RBRACKET) {
@ -1407,7 +1408,7 @@ fn parse_for_expr(p: parser) -> @ast::expr {
expect_word(p, "in"); expect_word(p, "in");
let seq = parse_expr(p); let seq = parse_expr(p);
let body = parse_block_no_value(p); let body = parse_block_no_value(p);
let hi = body.span.hi; let mut hi = body.span.hi;
ret mk_expr(p, lo, hi, ast::expr_for(decl, seq, body)); ret mk_expr(p, lo, hi, ast::expr_for(decl, seq, body));
} }
@ -1415,7 +1416,7 @@ fn parse_while_expr(p: parser) -> @ast::expr {
let lo = p.last_span.lo; let lo = p.last_span.lo;
let cond = parse_expr(p); let cond = parse_expr(p);
let body = parse_block_no_value(p); let body = parse_block_no_value(p);
let hi = body.span.hi; let mut hi = body.span.hi;
ret mk_expr(p, lo, hi, ast::expr_while(cond, body)); ret mk_expr(p, lo, hi, ast::expr_while(cond, body));
} }
@ -1424,14 +1425,14 @@ fn parse_do_while_expr(p: parser) -> @ast::expr {
let body = parse_block_no_value(p); let body = parse_block_no_value(p);
expect_word(p, "while"); expect_word(p, "while");
let cond = parse_expr(p); let cond = parse_expr(p);
let hi = cond.span.hi; let mut hi = cond.span.hi;
ret mk_expr(p, lo, hi, ast::expr_do_while(body, cond)); ret mk_expr(p, lo, hi, ast::expr_do_while(body, cond));
} }
fn parse_loop_expr(p: parser) -> @ast::expr { fn parse_loop_expr(p: parser) -> @ast::expr {
let lo = p.last_span.lo; let lo = p.last_span.lo;
let body = parse_block_no_value(p); let body = parse_block_no_value(p);
let hi = body.span.hi; let mut hi = body.span.hi;
ret mk_expr(p, lo, hi, ast::expr_loop(body)); ret mk_expr(p, lo, hi, ast::expr_loop(body));
} }
@ -1441,15 +1442,15 @@ fn parse_alt_expr(p: parser) -> @ast::expr {
else { ast::alt_exhaustive }; else { ast::alt_exhaustive };
let discriminant = parse_expr(p); let discriminant = parse_expr(p);
expect(p, token::LBRACE); expect(p, token::LBRACE);
let arms: [ast::arm] = []; let mut arms: [ast::arm] = [];
while p.token != token::RBRACE { while p.token != token::RBRACE {
let pats = parse_pats(p); let pats = parse_pats(p);
let guard = none; let mut guard = none;
if eat_word(p, "if") { guard = some(parse_expr(p)); } if eat_word(p, "if") { guard = some(parse_expr(p)); }
let blk = parse_block(p); let blk = parse_block(p);
arms += [{pats: pats, guard: guard, body: blk}]; arms += [{pats: pats, guard: guard, body: blk}];
} }
let hi = p.span.hi; let mut hi = p.span.hi;
p.bump(); p.bump();
ret mk_expr(p, lo, hi, ast::expr_alt(discriminant, arms, mode)); ret mk_expr(p, lo, hi, ast::expr_alt(discriminant, arms, mode));
} }
@ -1497,7 +1498,7 @@ fn parse_initializer(p: parser) -> option<ast::initializer> {
} }
fn parse_pats(p: parser) -> [@ast::pat] { fn parse_pats(p: parser) -> [@ast::pat] {
let pats = []; let mut pats = [];
loop { loop {
pats += [parse_pat(p)]; pats += [parse_pat(p)];
if p.token == token::BINOP(token::OR) { p.bump(); } else { ret pats; } if p.token == token::BINOP(token::OR) { p.bump(); } else { ret pats; }
@ -1506,8 +1507,8 @@ fn parse_pats(p: parser) -> [@ast::pat] {
fn parse_pat(p: parser) -> @ast::pat { fn parse_pat(p: parser) -> @ast::pat {
let lo = p.span.lo; let lo = p.span.lo;
let hi = p.span.hi; let mut hi = p.span.hi;
let pat; let mut pat;
alt p.token { alt p.token {
token::UNDERSCORE { p.bump(); pat = ast::pat_wild; } token::UNDERSCORE { p.bump(); pat = ast::pat_wild; }
token::AT { token::AT {
@ -1524,9 +1525,9 @@ fn parse_pat(p: parser) -> @ast::pat {
} }
token::LBRACE { token::LBRACE {
p.bump(); p.bump();
let fields = []; let mut fields = [];
let etc = false; let mut etc = false;
let first = true; let mut first = true;
while p.token != token::RBRACE { while p.token != token::RBRACE {
if first { first = false; } else { expect(p, token::COMMA); } if first { first = false; } else { expect(p, token::COMMA); }
@ -1545,7 +1546,7 @@ fn parse_pat(p: parser) -> @ast::pat {
let hi1 = p.last_span.lo; let hi1 = p.last_span.lo;
let fieldpath = ast_util::ident_to_path(ast_util::mk_sp(lo1, hi1), let fieldpath = ast_util::ident_to_path(ast_util::mk_sp(lo1, hi1),
fieldname); fieldname);
let subpat; let mut subpat;
if p.token == token::COLON { if p.token == token::COLON {
p.bump(); p.bump();
subpat = parse_pat(p); subpat = parse_pat(p);
@ -1572,7 +1573,7 @@ fn parse_pat(p: parser) -> @ast::pat {
let expr = mk_expr(p, lo, hi, ast::expr_lit(lit)); let expr = mk_expr(p, lo, hi, ast::expr_lit(lit));
pat = ast::pat_lit(expr); pat = ast::pat_lit(expr);
} else { } else {
let fields = [parse_pat(p)]; let mut fields = [parse_pat(p)];
while p.token == token::COMMA { while p.token == token::COMMA {
p.bump(); p.bump();
fields += [parse_pat(p)]; fields += [parse_pat(p)];
@ -1606,7 +1607,7 @@ fn parse_pat(p: parser) -> @ast::pat {
} else { } else {
let enum_path = parse_path_and_ty_param_substs(p, true); let enum_path = parse_path_and_ty_param_substs(p, true);
hi = enum_path.span.hi; hi = enum_path.span.hi;
let args: [@ast::pat]; let mut args: [@ast::pat];
alt p.token { alt p.token {
token::LPAREN { token::LPAREN {
let a = let a =
@ -1635,9 +1636,9 @@ fn parse_local(p: parser, is_mutbl: bool,
allow_init: bool) -> @ast::local { allow_init: bool) -> @ast::local {
let lo = p.span.lo; let lo = p.span.lo;
let pat = parse_pat(p); let pat = parse_pat(p);
let ty = @{id: p.get_id(), let mut ty = @{id: p.get_id(),
node: ast::ty_infer, node: ast::ty_infer,
span: ast_util::mk_sp(lo, lo)}; span: ast_util::mk_sp(lo, lo)};
if eat(p, token::COLON) { ty = parse_ty(p, false); } if eat(p, token::COLON) { ty = parse_ty(p, false); }
let init = if allow_init { parse_initializer(p) } else { none }; let init = if allow_init { parse_initializer(p) } else { none };
ret @spanned(lo, p.last_span.hi, ret @spanned(lo, p.last_span.hi,
@ -1648,7 +1649,7 @@ fn parse_local(p: parser, is_mutbl: bool,
fn parse_let(p: parser) -> @ast::decl { fn parse_let(p: parser) -> @ast::decl {
let is_mutbl = eat_word(p, "mut"); let is_mutbl = eat_word(p, "mut");
let lo = p.span.lo; let lo = p.span.lo;
let locals = [parse_local(p, is_mutbl, true)]; let mut locals = [parse_local(p, is_mutbl, true)];
while eat(p, token::COMMA) { while eat(p, token::COMMA) {
locals += [parse_local(p, is_mutbl, true)]; locals += [parse_local(p, is_mutbl, true)];
} }
@ -1657,7 +1658,7 @@ fn parse_let(p: parser) -> @ast::decl {
/* assumes "let" token has already been consumed */ /* assumes "let" token has already been consumed */
fn parse_instance_var(p:parser) -> (ast::class_member, codemap::span) { fn parse_instance_var(p:parser) -> (ast::class_member, codemap::span) {
let is_mutbl = ast::class_immutable; let mut is_mutbl = ast::class_immutable;
let lo = p.span.lo; let lo = p.span.lo;
if eat_word(p, "mut") || eat_word(p, "mutable") { if eat_word(p, "mut") || eat_word(p, "mutable") {
is_mutbl = ast::class_mutable; is_mutbl = ast::class_mutable;
@ -1687,7 +1688,7 @@ fn parse_stmt(p: parser, first_item_attrs: [ast::attribute]) -> @ast::stmt {
let decl = parse_let(p); let decl = parse_let(p);
ret @spanned(lo, decl.span.hi, ast::stmt_decl(decl, p.get_id())); ret @spanned(lo, decl.span.hi, ast::stmt_decl(decl, p.get_id()));
} else { } else {
let item_attrs; let mut item_attrs;
alt parse_outer_attrs_or_ext(p, first_item_attrs) { alt parse_outer_attrs_or_ext(p, first_item_attrs) {
none { item_attrs = []; } none { item_attrs = []; }
some(left(attrs)) { item_attrs = attrs; } some(left(attrs)) { item_attrs = attrs; }
@ -1700,7 +1701,7 @@ fn parse_stmt(p: parser, first_item_attrs: [ast::attribute]) -> @ast::stmt {
alt parse_item(p, item_attrs) { alt parse_item(p, item_attrs) {
some(i) { some(i) {
let hi = i.span.hi; let mut hi = i.span.hi;
let decl = @spanned(lo, hi, ast::decl_item(i)); let decl = @spanned(lo, hi, ast::decl_item(i));
ret @spanned(lo, hi, ast::stmt_decl(decl, p.get_id())); ret @spanned(lo, hi, ast::stmt_decl(decl, p.get_id()));
} }
@ -1805,10 +1806,10 @@ fn parse_block_tail(p: parser, lo: uint, s: ast::blk_check_mode) -> ast::blk {
fn parse_block_tail_(p: parser, lo: uint, s: ast::blk_check_mode, fn parse_block_tail_(p: parser, lo: uint, s: ast::blk_check_mode,
first_item_attrs: [ast::attribute]) -> ast::blk { first_item_attrs: [ast::attribute]) -> ast::blk {
let stmts = []; let mut stmts = [];
let expr = none; let mut expr = none;
let view_items = maybe_parse_view_import_only(p, first_item_attrs); let view_items = maybe_parse_view_import_only(p, first_item_attrs);
let initial_attrs = first_item_attrs; let mut initial_attrs = first_item_attrs;
if p.token == token::RBRACE && !vec::is_empty(initial_attrs) { if p.token == token::RBRACE && !vec::is_empty(initial_attrs) {
p.fatal("expected item"); p.fatal("expected item");
@ -1854,7 +1855,7 @@ fn parse_block_tail_(p: parser, lo: uint, s: ast::blk_check_mode,
} }
} }
} }
let hi = p.span.hi; let mut hi = p.span.hi;
p.bump(); p.bump();
let bloc = {view_items: view_items, stmts: stmts, expr: expr, let bloc = {view_items: view_items, stmts: stmts, expr: expr,
id: p.get_id(), rules: s}; id: p.get_id(), rules: s};
@ -1862,7 +1863,7 @@ fn parse_block_tail_(p: parser, lo: uint, s: ast::blk_check_mode,
} }
fn parse_ty_param(p: parser) -> ast::ty_param { fn parse_ty_param(p: parser) -> ast::ty_param {
let bounds = []; let mut bounds = [];
let ident = parse_ident(p); let ident = parse_ident(p);
if eat(p, token::COLON) { if eat(p, token::COLON) {
while p.token != token::COMMA && p.token != token::GT { while p.token != token::COMMA && p.token != token::GT {
@ -1888,7 +1889,7 @@ fn parse_fn_decl(p: parser, purity: ast::purity)
// Use the args list to translate each bound variable // Use the args list to translate each bound variable
// mentioned in a constraint to an arg index. // mentioned in a constraint to an arg index.
// Seems weird to do this in the parser, but I'm not sure how else to. // Seems weird to do this in the parser, but I'm not sure how else to.
let constrs = []; let mut constrs = [];
if p.token == token::COLON { if p.token == token::COLON {
p.bump(); p.bump();
constrs = parse_constrs({|x| parse_ty_constr(inputs.node, x) }, p); constrs = parse_constrs({|x| parse_ty_constr(inputs.node, x) }, p);
@ -1990,7 +1991,7 @@ fn parse_item_impl(p: parser, attrs: [ast::attribute]) -> @ast::item {
fn wrap_path(p: parser, pt: @ast::path) -> @ast::ty { fn wrap_path(p: parser, pt: @ast::path) -> @ast::ty {
@{id: p.get_id(), node: ast::ty_path(pt, p.get_id()), span: pt.span} @{id: p.get_id(), node: ast::ty_path(pt, p.get_id()), span: pt.span}
} }
let (ident, tps) = if !is_word(p, "of") { let mut (ident, tps) = if !is_word(p, "of") {
if p.token == token::LT { (none, parse_ty_params(p)) } if p.token == token::LT { (none, parse_ty_params(p)) }
else { (some(parse_ident(p)), parse_ty_params(p)) } else { (some(parse_ident(p)), parse_ty_params(p)) }
} else { (none, []) }; } else { (none, []) };
@ -2006,7 +2007,8 @@ fn parse_item_impl(p: parser, attrs: [ast::attribute]) -> @ast::item {
none { expect_word(p, "of"); fail; } none { expect_word(p, "of"); fail; }
}; };
expect_word(p, "for"); expect_word(p, "for");
let ty = parse_ty(p, false), meths = []; let ty = parse_ty(p, false);
let mut meths = [];
expect(p, token::LBRACE); expect(p, token::LBRACE);
while !eat(p, token::RBRACE) { meths += [parse_method(p)]; } while !eat(p, token::RBRACE) { meths += [parse_method(p)]; }
ret mk_item(p, lo, p.last_span.hi, ident, ret mk_item(p, lo, p.last_span.hi, ident,
@ -2044,9 +2046,9 @@ fn parse_item_class(p: parser, attrs: [ast::attribute]) -> @ast::item {
let class_path = ident_to_path(p.last_span, class_name); let class_path = ident_to_path(p.last_span, class_name);
let ty_params = parse_ty_params(p); let ty_params = parse_ty_params(p);
expect(p, token::LBRACE); expect(p, token::LBRACE);
let items: [@ast::class_item] = []; let mut items: [@ast::class_item] = [];
let ctor_id = p.get_id(); let ctor_id = p.get_id();
let the_ctor : option<(ast::fn_decl, ast::blk, codemap::span)> = none; let mut the_ctor : option<(ast::fn_decl, ast::blk, codemap::span)> = none;
while p.token != token::RBRACE { while p.token != token::RBRACE {
alt parse_class_item(p, class_path) { alt parse_class_item(p, class_path) {
ctor_decl(a_fn_decl, blk, s) { ctor_decl(a_fn_decl, blk, s) {
@ -2109,7 +2111,7 @@ enum class_contents { ctor_decl(ast::fn_decl, ast::blk, codemap::span),
// FIXME: refactor // FIXME: refactor
else if eat_word(p, "priv") { else if eat_word(p, "priv") {
expect(p, token::LBRACE); expect(p, token::LBRACE);
let results = []; let mut results = [];
while p.token != token::RBRACE { while p.token != token::RBRACE {
if eat_word(p, "let") { if eat_word(p, "let") {
let a_var = parse_instance_var(p); let a_var = parse_instance_var(p);
@ -2142,8 +2144,8 @@ fn parse_mod_items(p: parser, term: token::token,
first_item_attrs: [ast::attribute]) -> ast::_mod { first_item_attrs: [ast::attribute]) -> ast::_mod {
// Shouldn't be any view items since we've already parsed an item attr // Shouldn't be any view items since we've already parsed an item attr
let view_items = maybe_parse_view(p, first_item_attrs); let view_items = maybe_parse_view(p, first_item_attrs);
let items: [@ast::item] = []; let mut items: [@ast::item] = [];
let initial_attrs = first_item_attrs; let mut initial_attrs = first_item_attrs;
while p.token != term { while p.token != term {
let attrs = initial_attrs + parse_outer_attributes(p); let attrs = initial_attrs + parse_outer_attributes(p);
#debug["parse_mod_items: parse_item(attrs=%?)", attrs]; #debug["parse_mod_items: parse_item(attrs=%?)", attrs];
@ -2173,7 +2175,7 @@ fn parse_item_const(p: parser, attrs: [ast::attribute]) -> @ast::item {
let ty = parse_ty(p, false); let ty = parse_ty(p, false);
expect(p, token::EQ); expect(p, token::EQ);
let e = parse_expr(p); let e = parse_expr(p);
let hi = p.span.hi; let mut hi = p.span.hi;
expect(p, token::SEMI); expect(p, token::SEMI);
ret mk_item(p, lo, hi, id, ast::item_const(ty, e), attrs); ret mk_item(p, lo, hi, id, ast::item_const(ty, e), attrs);
} }
@ -2185,7 +2187,7 @@ fn parse_item_mod(p: parser, attrs: [ast::attribute]) -> @ast::item {
let inner_attrs = parse_inner_attrs_and_next(p); let inner_attrs = parse_inner_attrs_and_next(p);
let first_item_outer_attrs = inner_attrs.next; let first_item_outer_attrs = inner_attrs.next;
let m = parse_mod_items(p, token::RBRACE, first_item_outer_attrs); let m = parse_mod_items(p, token::RBRACE, first_item_outer_attrs);
let hi = p.span.hi; let mut hi = p.span.hi;
expect(p, token::RBRACE); expect(p, token::RBRACE);
ret mk_item(p, lo, hi, id, ast::item_mod(m), attrs + inner_attrs.inner); ret mk_item(p, lo, hi, id, ast::item_mod(m), attrs + inner_attrs.inner);
} }
@ -2195,7 +2197,7 @@ fn parse_item_native_fn(p: parser, attrs: [ast::attribute],
let lo = p.last_span.lo; let lo = p.last_span.lo;
let t = parse_fn_header(p); let t = parse_fn_header(p);
let decl = parse_fn_decl(p, purity); let decl = parse_fn_decl(p, purity);
let hi = p.span.hi; let mut hi = p.span.hi;
expect(p, token::SEMI); expect(p, token::SEMI);
ret @{ident: t.ident, ret @{ident: t.ident,
attrs: attrs, attrs: attrs,
@ -2223,8 +2225,8 @@ fn parse_native_mod_items(p: parser, first_item_attrs: [ast::attribute]) ->
if vec::len(first_item_attrs) == 0u { if vec::len(first_item_attrs) == 0u {
parse_native_view(p) parse_native_view(p)
} else { [] }; } else { [] };
let items: [@ast::native_item] = []; let mut items: [@ast::native_item] = [];
let initial_attrs = first_item_attrs; let mut initial_attrs = first_item_attrs;
while p.token != token::RBRACE { while p.token != token::RBRACE {
let attrs = initial_attrs + parse_outer_attributes(p); let attrs = initial_attrs + parse_outer_attributes(p);
initial_attrs = []; initial_attrs = [];
@ -2243,7 +2245,7 @@ fn parse_item_native_mod(p: parser, attrs: [ast::attribute]) -> @ast::item {
let inner_attrs = more_attrs.inner; let inner_attrs = more_attrs.inner;
let first_item_outer_attrs = more_attrs.next; let first_item_outer_attrs = more_attrs.next;
let m = parse_native_mod_items(p, first_item_outer_attrs); let m = parse_native_mod_items(p, first_item_outer_attrs);
let hi = p.span.hi; let mut hi = p.span.hi;
expect(p, token::RBRACE); expect(p, token::RBRACE);
ret mk_item(p, lo, hi, id, ast::item_native_mod(m), attrs + inner_attrs); ret mk_item(p, lo, hi, id, ast::item_native_mod(m), attrs + inner_attrs);
} }
@ -2259,7 +2261,7 @@ fn parse_item_type(p: parser, attrs: [ast::attribute]) -> @ast::item {
let tps = parse_ty_params(p); let tps = parse_ty_params(p);
expect(p, token::EQ); expect(p, token::EQ);
let ty = parse_ty(p, false); let ty = parse_ty(p, false);
let hi = p.span.hi; let mut hi = p.span.hi;
expect(p, token::SEMI); expect(p, token::SEMI);
ret mk_item(p, t.lo, hi, t.ident, ast::item_ty(ty, tps), attrs); ret mk_item(p, t.lo, hi, t.ident, ast::item_ty(ty, tps), attrs);
} }
@ -2268,7 +2270,7 @@ fn parse_item_enum(p: parser, attrs: [ast::attribute]) -> @ast::item {
let lo = p.last_span.lo; let lo = p.last_span.lo;
let id = parse_ident(p); let id = parse_ident(p);
let ty_params = parse_ty_params(p); let ty_params = parse_ty_params(p);
let variants: [ast::variant] = []; let mut variants: [ast::variant] = [];
// Newtype syntax // Newtype syntax
if p.token == token::EQ { if p.token == token::EQ {
if p.bad_expr_words.contains_key(id) { if p.bad_expr_words.contains_key(id) {
@ -2289,13 +2291,13 @@ fn parse_item_enum(p: parser, attrs: [ast::attribute]) -> @ast::item {
} }
expect(p, token::LBRACE); expect(p, token::LBRACE);
let all_nullary = true, have_disr = false; let mut all_nullary = true, have_disr = false;
while p.token != token::RBRACE { while p.token != token::RBRACE {
let variant_attrs = parse_outer_attributes(p); let variant_attrs = parse_outer_attributes(p);
let vlo = p.span.lo; let vlo = p.span.lo;
let ident = parse_value_ident(p); let ident = parse_value_ident(p);
let args = [], disr_expr = none; let mut args = [], disr_expr = none;
if p.token == token::LPAREN { if p.token == token::LPAREN {
all_nullary = false; all_nullary = false;
let arg_tys = parse_seq(token::LPAREN, token::RPAREN, let arg_tys = parse_seq(token::LPAREN, token::RPAREN,
@ -2416,7 +2418,7 @@ fn parse_outer_attrs_or_ext(
// Parse attributes that appear before an item // Parse attributes that appear before an item
fn parse_outer_attributes(p: parser) -> [ast::attribute] { fn parse_outer_attributes(p: parser) -> [ast::attribute] {
let attrs: [ast::attribute] = []; let mut attrs: [ast::attribute] = [];
while p.token == token::POUND { while p.token == token::POUND {
attrs += [parse_attribute(p, ast::attr_outer)]; attrs += [parse_attribute(p, ast::attr_outer)];
} }
@ -2434,7 +2436,7 @@ fn parse_attribute_naked(p: parser, style: ast::attr_style, lo: uint) ->
expect(p, token::LBRACKET); expect(p, token::LBRACKET);
let meta_item = parse_meta_item(p); let meta_item = parse_meta_item(p);
expect(p, token::RBRACKET); expect(p, token::RBRACKET);
let hi = p.span.hi; let mut hi = p.span.hi;
ret spanned(lo, hi, {style: style, value: *meta_item}); ret spanned(lo, hi, {style: style, value: *meta_item});
} }
@ -2446,8 +2448,8 @@ fn parse_attribute_naked(p: parser, style: ast::attr_style, lo: uint) ->
// until we see the semi). // until we see the semi).
fn parse_inner_attrs_and_next(p: parser) -> fn parse_inner_attrs_and_next(p: parser) ->
{inner: [ast::attribute], next: [ast::attribute]} { {inner: [ast::attribute], next: [ast::attribute]} {
let inner_attrs: [ast::attribute] = []; let mut inner_attrs: [ast::attribute] = [];
let next_outer_attrs: [ast::attribute] = []; let mut next_outer_attrs: [ast::attribute] = [];
while p.token == token::POUND { while p.token == token::POUND {
if p.look_ahead(1u) != token::LBRACKET { if p.look_ahead(1u) != token::LBRACKET {
// This is an extension // This is an extension
@ -2476,16 +2478,16 @@ fn parse_meta_item(p: parser) -> @ast::meta_item {
token::EQ { token::EQ {
p.bump(); p.bump();
let lit = parse_lit(p); let lit = parse_lit(p);
let hi = p.span.hi; let mut hi = p.span.hi;
ret @spanned(lo, hi, ast::meta_name_value(ident, lit)); ret @spanned(lo, hi, ast::meta_name_value(ident, lit));
} }
token::LPAREN { token::LPAREN {
let inner_items = parse_meta_seq(p); let inner_items = parse_meta_seq(p);
let hi = p.span.hi; let mut hi = p.span.hi;
ret @spanned(lo, hi, ast::meta_list(ident, inner_items)); ret @spanned(lo, hi, ast::meta_list(ident, inner_items));
} }
_ { _ {
let hi = p.span.hi; let mut hi = p.span.hi;
ret @spanned(lo, hi, ast::meta_word(ident)); ret @spanned(lo, hi, ast::meta_word(ident));
} }
} }
@ -2509,7 +2511,7 @@ fn parse_use(p: parser) -> ast::view_item_ {
fn parse_view_path(p: parser) -> @ast::view_path { fn parse_view_path(p: parser) -> @ast::view_path {
let lo = p.span.lo; let lo = p.span.lo;
let first_ident = parse_ident(p); let first_ident = parse_ident(p);
let path = [first_ident]; let mut path = [first_ident];
#debug("parsed view_path: %s", first_ident); #debug("parsed view_path: %s", first_ident);
alt p.token { alt p.token {
token::EQ { token::EQ {
@ -2521,7 +2523,7 @@ fn parse_view_path(p: parser) -> @ast::view_path {
let id = parse_ident(p); let id = parse_ident(p);
path += [id]; path += [id];
} }
let hi = p.span.hi; let mut hi = p.span.hi;
ret @spanned(lo, hi, ret @spanned(lo, hi,
ast::view_path_simple(first_ident, ast::view_path_simple(first_ident,
@path, p.get_id())); @path, p.get_id()));
@ -2545,7 +2547,7 @@ fn parse_view_path(p: parser) -> @ast::view_path {
parse_seq(token::LBRACE, token::RBRACE, parse_seq(token::LBRACE, token::RBRACE,
seq_sep(token::COMMA), seq_sep(token::COMMA),
parse_path_list_ident, p).node; parse_path_list_ident, p).node;
let hi = p.span.hi; let mut hi = p.span.hi;
ret @spanned(lo, hi, ret @spanned(lo, hi,
ast::view_path_list(@path, idents, ast::view_path_list(@path, idents,
p.get_id())); p.get_id()));
@ -2554,7 +2556,7 @@ fn parse_view_path(p: parser) -> @ast::view_path {
// foo::bar::* // foo::bar::*
token::BINOP(token::STAR) { token::BINOP(token::STAR) {
p.bump(); p.bump();
let hi = p.span.hi; let mut hi = p.span.hi;
ret @spanned(lo, hi, ret @spanned(lo, hi,
ast::view_path_glob(@path, ast::view_path_glob(@path,
p.get_id())); p.get_id()));
@ -2566,7 +2568,7 @@ fn parse_view_path(p: parser) -> @ast::view_path {
} }
_ { } _ { }
} }
let hi = p.span.hi; let mut hi = p.span.hi;
let last = path[vec::len(path) - 1u]; let last = path[vec::len(path) - 1u];
ret @spanned(lo, hi, ret @spanned(lo, hi,
ast::view_path_simple(last, @path, ast::view_path_simple(last, @path,
@ -2574,7 +2576,7 @@ fn parse_view_path(p: parser) -> @ast::view_path {
} }
fn parse_view_paths(p: parser) -> [@ast::view_path] { fn parse_view_paths(p: parser) -> [@ast::view_path] {
let vp = [parse_view_path(p)]; let mut vp = [parse_view_path(p)];
while p.token == token::COMMA { while p.token == token::COMMA {
p.bump(); p.bump();
vp += [parse_view_path(p)]; vp += [parse_view_path(p)];
@ -2594,7 +2596,7 @@ fn parse_view_item(p: parser) -> @ast::view_item {
} else { } else {
fail fail
}; };
let hi = p.span.lo; let mut hi = p.span.lo;
expect(p, token::SEMI); expect(p, token::SEMI);
ret @spanned(lo, hi, the_item); ret @spanned(lo, hi, the_item);
} }
@ -2630,7 +2632,7 @@ fn maybe_parse_view_while(
f: fn@(parser) -> bool) -> [@ast::view_item] { f: fn@(parser) -> bool) -> [@ast::view_item] {
if vec::len(first_item_attrs) == 0u { if vec::len(first_item_attrs) == 0u {
let items = []; let mut items = [];
while f(p) { items += [parse_view_item(p)]; } while f(p) { items += [parse_view_item(p)]; }
ret items; ret items;
} else { } else {
@ -2729,7 +2731,7 @@ fn parse_crate_directive(p: parser, first_outer_attr: [ast::attribute]) ->
alt p.token { alt p.token {
// mod x = "foo.rs"; // mod x = "foo.rs";
token::SEMI { token::SEMI {
let hi = p.span.hi; let mut hi = p.span.hi;
p.bump(); p.bump();
ret spanned(lo, hi, ast::cdir_src_mod(id, outer_attrs)); ret spanned(lo, hi, ast::cdir_src_mod(id, outer_attrs));
} }
@ -2741,7 +2743,7 @@ fn parse_crate_directive(p: parser, first_outer_attr: [ast::attribute]) ->
let next_outer_attr = inner_attrs.next; let next_outer_attr = inner_attrs.next;
let cdirs = let cdirs =
parse_crate_directives(p, token::RBRACE, next_outer_attr); parse_crate_directives(p, token::RBRACE, next_outer_attr);
let hi = p.span.hi; let mut hi = p.span.hi;
expect(p, token::RBRACE); expect(p, token::RBRACE);
ret spanned(lo, hi, ret spanned(lo, hi,
ast::cdir_dir_mod(id, cdirs, mod_attrs)); ast::cdir_dir_mod(id, cdirs, mod_attrs));
@ -2765,8 +2767,8 @@ fn parse_crate_directives(p: parser, term: token::token,
expect_word(p, "mod"); expect_word(p, "mod");
} }
let cdirs: [@ast::crate_directive] = []; let mut cdirs: [@ast::crate_directive] = [];
let first_outer_attr = first_outer_attr; let mut first_outer_attr = first_outer_attr;
while p.token != term { while p.token != term {
let cdir = @parse_crate_directive(p, first_outer_attr); let cdir = @parse_crate_directive(p, first_outer_attr);
cdirs += [cdir]; cdirs += [cdir];
@ -2793,7 +2795,7 @@ fn parse_crate_from_crate_file(input: str, cfg: ast::crate_cfg,
let (companionmod, _) = path::splitext(path::basename(input)); let (companionmod, _) = path::splitext(path::basename(input));
let (m, attrs) = eval::eval_crate_directives_to_mod( let (m, attrs) = eval::eval_crate_directives_to_mod(
cx, cdirs, prefix, option::some(companionmod)); cx, cdirs, prefix, option::some(companionmod));
let hi = p.span.hi; let mut hi = p.span.hi;
expect(p, token::EOF); expect(p, token::EOF);
ret @spanned(lo, hi, ret @spanned(lo, hi,
{directives: cdirs, {directives: cdirs,

View File

@ -137,7 +137,7 @@ fn to_str(r: reader, t: token) -> str {
/* Literals */ /* Literals */
LIT_INT(c, ast::ty_char) { LIT_INT(c, ast::ty_char) {
// FIXME: escape. // FIXME: escape.
let tmp = "'"; let mut tmp = "'";
str::push_char(tmp, c as char); str::push_char(tmp, c as char);
str::push_char(tmp, '\''); str::push_char(tmp, '\'');
ret tmp; ret tmp;

View File

@ -74,9 +74,9 @@ fn buf_str(toks: [mutable token], szs: [mutable int], left: uint, right: uint,
lim: uint) -> str { lim: uint) -> str {
let n = vec::len(toks); let n = vec::len(toks);
assert (n == vec::len(szs)); assert (n == vec::len(szs));
let i = left; let mut i = left;
let L = lim; let mut L = lim;
let s = "["; let mut s = "[";
while i != right && L != 0u { while i != right && L != 0u {
L -= 1u; L -= 1u;
if i != left { s += ", "; } if i != left { s += ", "; }
@ -399,7 +399,8 @@ impl printer for printer {
} }
fn get_top() -> print_stack_elt { fn get_top() -> print_stack_elt {
let n = vec::len(self.print_stack); let n = vec::len(self.print_stack);
let top: print_stack_elt = {offset: 0, pbreak: broken(inconsistent)}; let mut top: print_stack_elt =
{offset: 0, pbreak: broken(inconsistent)};
if n != 0u { top = self.print_stack[n - 1u]; } if n != 0u { top = self.print_stack[n - 1u]; }
ret top; ret top;
} }

View File

@ -268,7 +268,7 @@ fn synth_comment(s: ps, text: str) {
fn commasep<IN>(s: ps, b: breaks, elts: [IN], op: fn(ps, IN)) { fn commasep<IN>(s: ps, b: breaks, elts: [IN], op: fn(ps, IN)) {
box(s, 0u, b); box(s, 0u, b);
let first = true; let mut first = true;
for elt: IN in elts { for elt: IN in elts {
if first { first = false; } else { word_space(s, ","); } if first { first = false; } else { word_space(s, ","); }
op(s, elt); op(s, elt);
@ -281,7 +281,7 @@ fn commasep_cmnt<IN>(s: ps, b: breaks, elts: [IN], op: fn(ps, IN),
get_span: fn(IN) -> codemap::span) { get_span: fn(IN) -> codemap::span) {
box(s, 0u, b); box(s, 0u, b);
let len = vec::len::<IN>(elts); let len = vec::len::<IN>(elts);
let i = 0u; let mut i = 0u;
for elt: IN in elts { for elt: IN in elts {
maybe_print_comment(s, get_span(elt).hi); maybe_print_comment(s, get_span(elt).hi);
op(s, elt); op(s, elt);
@ -626,7 +626,7 @@ fn print_method(s: ps, meth: @ast::method) {
} }
fn print_outer_attributes(s: ps, attrs: [ast::attribute]) { fn print_outer_attributes(s: ps, attrs: [ast::attribute]) {
let count = 0; let mut count = 0;
for attr: ast::attribute in attrs { for attr: ast::attribute in attrs {
alt attr.node.style { alt attr.node.style {
ast::attr_outer { print_attribute(s, attr); count += 1; } ast::attr_outer { print_attribute(s, attr); count += 1; }
@ -637,7 +637,7 @@ fn print_outer_attributes(s: ps, attrs: [ast::attribute]) {
} }
fn print_inner_attributes(s: ps, attrs: [ast::attribute]) { fn print_inner_attributes(s: ps, attrs: [ast::attribute]) {
let count = 0; let mut count = 0;
for attr: ast::attribute in attrs { for attr: ast::attribute in attrs {
alt attr.node.style { alt attr.node.style {
ast::attr_inner { ast::attr_inner {
@ -856,7 +856,7 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
} }
ast::expr_call(func, args, has_block) { ast::expr_call(func, args, has_block) {
print_expr_parens_if_not_bot(s, func); print_expr_parens_if_not_bot(s, func);
let base_args = args, blk = none; let mut base_args = args, blk = none;
if has_block { blk = some(vec::pop(base_args)); } if has_block { blk = some(vec::pop(base_args)); }
if !has_block || vec::len(base_args) > 0u { if !has_block || vec::len(base_args) > 0u {
popen(s); popen(s);
@ -952,7 +952,7 @@ fn print_expr(s: ps, &&expr: @ast::expr) {
space(s.s); space(s.s);
cbox(s, alt_indent_unit); cbox(s, alt_indent_unit);
ibox(s, 0u); ibox(s, 0u);
let first = true; let mut first = true;
for p: @ast::pat in arm.pats { for p: @ast::pat in arm.pats {
if first { if first {
first = false; first = false;
@ -1169,7 +1169,7 @@ fn print_for_decl(s: ps, loc: @ast::local, coll: @ast::expr) {
fn print_path(s: ps, &&path: @ast::path, colons_before_params: bool) { fn print_path(s: ps, &&path: @ast::path, colons_before_params: bool) {
maybe_print_comment(s, path.span.lo); maybe_print_comment(s, path.span.lo);
if path.node.global { word(s.s, "::"); } if path.node.global { word(s.s, "::"); }
let first = true; let mut first = true;
for id: ast::ident in path.node.idents { for id: ast::ident in path.node.idents {
if first { first = false; } else { word(s.s, "::"); } if first { first = false; } else { word(s.s, "::"); }
word(s.s, id); word(s.s, id);
@ -1383,7 +1383,7 @@ fn print_meta_item(s: ps, &&item: @ast::meta_item) {
} }
fn print_simple_path(s: ps, path: ast::simple_path) { fn print_simple_path(s: ps, path: ast::simple_path) {
let first = true; let mut first = true;
for id in path { for id in path {
if first { first = false; } else { word(s.s, "::"); } if first { first = false; } else { word(s.s, "::"); }
word(s.s, id); word(s.s, id);
@ -1530,14 +1530,14 @@ fn print_ty_fn(s: ps, opt_proto: option<ast::proto>,
fn maybe_print_trailing_comment(s: ps, span: codemap::span, fn maybe_print_trailing_comment(s: ps, span: codemap::span,
next_pos: option<uint>) { next_pos: option<uint>) {
let cm; let mut cm;
alt s.cm { some(ccm) { cm = ccm; } _ { ret; } } alt s.cm { some(ccm) { cm = ccm; } _ { ret; } }
alt next_comment(s) { alt next_comment(s) {
some(cmnt) { some(cmnt) {
if cmnt.style != lexer::trailing { ret; } if cmnt.style != lexer::trailing { ret; }
let span_line = codemap::lookup_char_pos(cm, span.hi); let span_line = codemap::lookup_char_pos(cm, span.hi);
let comment_line = codemap::lookup_char_pos(cm, cmnt.pos); let comment_line = codemap::lookup_char_pos(cm, cmnt.pos);
let next = cmnt.pos + 1u; let mut next = cmnt.pos + 1u;
alt next_pos { none { } some(p) { next = p; } } alt next_pos { none { } some(p) { next = p; } }
if span.hi < cmnt.pos && cmnt.pos < next && if span.hi < cmnt.pos && cmnt.pos < next &&
span_line.line == comment_line.line { span_line.line == comment_line.line {
@ -1689,9 +1689,9 @@ fn print_string(s: ps, st: str) {
} }
fn escape_str(st: str, to_escape: char) -> str { fn escape_str(st: str, to_escape: char) -> str {
let out: str = ""; let mut out: str = "";
let len = str::len(st); let len = str::len(st);
let i = 0u; let mut i = 0u;
while i < len { while i < len {
alt st[i] as char { alt st[i] as char {
'\n' { out += "\\n"; } '\n' { out += "\\n"; }
@ -1731,8 +1731,8 @@ fn next_comment(s: ps) -> option<lexer::cmnt> {
fn constr_args_to_str<T>(f: fn@(T) -> str, args: [@ast::sp_constr_arg<T>]) -> fn constr_args_to_str<T>(f: fn@(T) -> str, args: [@ast::sp_constr_arg<T>]) ->
str { str {
let comma = false; let mut comma = false;
let s = "("; let mut s = "(";
for a: @ast::sp_constr_arg<T> in args { for a: @ast::sp_constr_arg<T> in args {
if comma { s += ", "; } else { comma = true; } if comma { s += ", "; } else { comma = true; }
s += constr_arg_to_str::<T>(f, a.node); s += constr_arg_to_str::<T>(f, a.node);
@ -1775,7 +1775,7 @@ fn ty_constr_to_str(&&c: @ast::ty_constr) -> str {
} }
fn constrs_str<T>(constrs: [T], elt: fn(T) -> str) -> str { fn constrs_str<T>(constrs: [T], elt: fn(T) -> str) -> str {
let s = "", colon = true; let mut s = "", colon = true;
for c in constrs { for c in constrs {
if colon { s += " : "; colon = false; } else { s += ", "; } if colon { s += " : "; colon = false; } else { s += ", "; }
s += elt(c); s += elt(c);

View File

@ -12,7 +12,7 @@ fn def_eq(a: ast::def_id, b: ast::def_id) -> bool {
} }
fn hash_def(d: ast::def_id) -> uint { fn hash_def(d: ast::def_id) -> uint {
let h = 5381u; let mut h = 5381u;
h = (h << 5u) + h ^ (d.crate as uint); h = (h << 5u) + h ^ (d.crate as uint);
h = (h << 5u) + h ^ (d.node as uint); h = (h << 5u) + h ^ (d.node as uint);
ret h; ret h;
@ -27,7 +27,7 @@ fn new_def_hash<V: copy>() -> std::map::hashmap<ast::def_id, V> {
fn field_expr(f: ast::field) -> @ast::expr { ret f.node.expr; } fn field_expr(f: ast::field) -> @ast::expr { ret f.node.expr; }
fn field_exprs(fields: [ast::field]) -> [@ast::expr] { fn field_exprs(fields: [ast::field]) -> [@ast::expr] {
let es = []; let mut es = [];
for f: ast::field in fields { es += [f.node.expr]; } for f: ast::field in fields { es += [f.node.expr]; }
ret es; ret es;
} }

View File

@ -130,10 +130,10 @@ fn get_cargo_root() -> result<path, str> {
fn get_cargo_root_nearest() -> result<path, str> { fn get_cargo_root_nearest() -> result<path, str> {
result::chain(get_cargo_root()) { |p| result::chain(get_cargo_root()) { |p|
let cwd = os::getcwd(); let cwd = os::getcwd();
let dirname = path::dirname(cwd); let mut dirname = path::dirname(cwd);
let dirpath = path::split(dirname); let mut dirpath = path::split(dirname);
let cwd_cargo = path::connect(cwd, ".cargo"); let cwd_cargo = path::connect(cwd, ".cargo");
let par_cargo = path::connect(dirname, ".cargo"); let mut par_cargo = path::connect(dirname, ".cargo");
if os::path_is_dir(cwd_cargo) || cwd_cargo == p { if os::path_is_dir(cwd_cargo) || cwd_cargo == p {
ret result::ok(cwd_cargo); ret result::ok(cwd_cargo);

View File

@ -60,10 +60,10 @@ fn ty_to_str(cx: ctxt, typ: t) -> str {
fn fn_to_str(cx: ctxt, proto: ast::proto, ident: option<ast::ident>, fn fn_to_str(cx: ctxt, proto: ast::proto, ident: option<ast::ident>,
inputs: [arg], output: t, cf: ast::ret_style, inputs: [arg], output: t, cf: ast::ret_style,
constrs: [@constr]) -> str { constrs: [@constr]) -> str {
let s = proto_to_str(proto); let mut s = proto_to_str(proto);
alt ident { some(i) { s += " "; s += i; } _ { } } alt ident { some(i) { s += " "; s += i; } _ { } }
s += "("; s += "(";
let strs = []; let mut strs = [];
for a: arg in inputs { strs += [fn_input_to_str(cx, a)]; } for a: arg in inputs { strs += [fn_input_to_str(cx, a)]; }
s += str::connect(strs, ", "); s += str::connect(strs, ", ");
s += ")"; s += ")";
@ -137,12 +137,12 @@ fn ty_to_str(cx: ctxt, typ: t) -> str {
ty_vec(tm) { "[" + mt_to_str(cx, tm) + "]" } ty_vec(tm) { "[" + mt_to_str(cx, tm) + "]" }
ty_type { "type" } ty_type { "type" }
ty_rec(elems) { ty_rec(elems) {
let strs: [str] = []; let mut strs: [str] = [];
for fld: field in elems { strs += [field_to_str(cx, fld)]; } for fld: field in elems { strs += [field_to_str(cx, fld)]; }
"{" + str::connect(strs, ",") + "}" "{" + str::connect(strs, ",") + "}"
} }
ty_tup(elems) { ty_tup(elems) {
let strs = []; let mut strs = [];
for elem in elems { strs += [ty_to_str(cx, elem)]; } for elem in elems { strs += [ty_to_str(cx, elem)]; }
"(" + str::connect(strs, ",") + ")" "(" + str::connect(strs, ",") + ")"
} }
@ -165,7 +165,7 @@ fn ty_to_str(cx: ctxt, typ: t) -> str {
} }
fn ty_to_short_str(cx: ctxt, typ: t) -> str { fn ty_to_short_str(cx: ctxt, typ: t) -> str {
let s = encoder::encoded_ty(cx, typ); let mut s = encoder::encoded_ty(cx, typ);
if str::len(s) >= 32u { s = str::slice(s, 0u, 32u); } if str::len(s) >= 32u { s = str::slice(s, 0u, 32u); }
ret s; ret s;
} }
@ -176,8 +176,8 @@ fn constr_to_str(c: @constr) -> str {
} }
fn constrs_str(constrs: [@constr]) -> str { fn constrs_str(constrs: [@constr]) -> str {
let s = ""; let mut s = "";
let colon = true; let mut colon = true;
for c: @constr in constrs { for c: @constr in constrs {
if colon { s += " : "; colon = false; } else { s += ", "; } if colon { s += " : "; colon = false; } else { s += ", "; }
s += constr_to_str(c); s += constr_to_str(c);