mirror of
https://github.com/rust-lang/rust.git
synced 2025-05-14 02:49:40 +00:00
m1!{...}
-> m1!(...)
This commit is contained in:
parent
226fd87199
commit
29f32b4a72
@ -371,7 +371,7 @@ fn rest(s: ~str, start: uint) -> ~str {
|
||||
fn need_dir(s: ~str) {
|
||||
if os::path_is_dir(s) { return; }
|
||||
if !os::make_dir(s, 493_i32 /* oct: 755 */) {
|
||||
fail fmt!{"can't make_dir %s", s};
|
||||
fail fmt!("can't make_dir %s", s);
|
||||
}
|
||||
}
|
||||
|
||||
@ -389,7 +389,7 @@ fn valid_pkg_name(s: ~str) -> bool {
|
||||
|
||||
fn parse_source(name: ~str, j: json::json) -> source {
|
||||
if !valid_pkg_name(name) {
|
||||
fail fmt!{"'%s' is an invalid source name", name};
|
||||
fail fmt!("'%s' is an invalid source name", name);
|
||||
}
|
||||
|
||||
match j {
|
||||
@ -432,11 +432,11 @@ fn try_parse_sources(filename: ~str, sources: map::hashmap<~str, source>) {
|
||||
ok(json::dict(j)) => {
|
||||
for j.each |k, v| {
|
||||
sources.insert(k, parse_source(k, v));
|
||||
debug!{"source: %s", k};
|
||||
debug!("source: %s", k);
|
||||
}
|
||||
}
|
||||
ok(_) => fail ~"malformed sources.json",
|
||||
err(e) => fail fmt!{"%s:%s", filename, e.to_str()}
|
||||
err(e) => fail fmt!("%s:%s", filename, e.to_str())
|
||||
}
|
||||
}
|
||||
|
||||
@ -559,7 +559,7 @@ fn load_source_info(c: cargo, src: source) {
|
||||
~"(source info is not a dict)");
|
||||
}
|
||||
err(e) => {
|
||||
warn(fmt!{"%s:%s", src.name, e.to_str()});
|
||||
warn(fmt!("%s:%s", src.name, e.to_str()));
|
||||
}
|
||||
};
|
||||
}
|
||||
@ -588,7 +588,7 @@ fn load_source_packages(c: cargo, src: source) {
|
||||
~"(packages is not a list)");
|
||||
}
|
||||
err(e) => {
|
||||
warn(fmt!{"%s:%s", src.name, e.to_str()});
|
||||
warn(fmt!("%s:%s", src.name, e.to_str()));
|
||||
}
|
||||
};
|
||||
}
|
||||
@ -597,7 +597,7 @@ fn build_cargo_options(argv: ~[~str]) -> options {
|
||||
let matches = match getopts::getopts(argv, opts()) {
|
||||
result::ok(m) => m,
|
||||
result::err(f) => {
|
||||
fail fmt!{"%s", getopts::fail_str(f)};
|
||||
fail fmt!("%s", getopts::fail_str(f));
|
||||
}
|
||||
};
|
||||
|
||||
@ -707,11 +707,11 @@ fn run_in_buildpath(what: ~str, path: ~str, subdir: ~str, cf: ~str,
|
||||
extra_flags: ~[~str]) -> option<~str> {
|
||||
let buildpath = path::connect(path, subdir);
|
||||
need_dir(buildpath);
|
||||
debug!{"%s: %s -> %s", what, cf, buildpath};
|
||||
debug!("%s: %s -> %s", what, cf, buildpath);
|
||||
let p = run::program_output(rustc_sysroot(),
|
||||
~[~"--out-dir", buildpath, cf] + extra_flags);
|
||||
if p.status != 0 {
|
||||
error(fmt!{"rustc failed: %d\n%s\n%s", p.status, p.err, p.out});
|
||||
error(fmt!("rustc failed: %d\n%s\n%s", p.status, p.err, p.out));
|
||||
return none;
|
||||
}
|
||||
some(buildpath)
|
||||
@ -738,7 +738,7 @@ fn install_one_crate(c: cargo, path: ~str, cf: ~str) {
|
||||
if (exec_suffix != ~"" && str::ends_with(ct, exec_suffix)) ||
|
||||
(exec_suffix == ~"" && !str::starts_with(path::basename(ct),
|
||||
~"lib")) {
|
||||
debug!{" bin: %s", ct};
|
||||
debug!(" bin: %s", ct);
|
||||
install_to_dir(ct, c.bindir);
|
||||
if c.opts.mode == system_mode {
|
||||
// FIXME (#2662): Put this file in PATH / symlink it so it can
|
||||
@ -746,7 +746,7 @@ fn install_one_crate(c: cargo, path: ~str, cf: ~str) {
|
||||
// `cargo install -G rustray` and `rustray file.obj`
|
||||
}
|
||||
} else {
|
||||
debug!{" lib: %s", ct};
|
||||
debug!(" lib: %s", ct);
|
||||
install_to_dir(ct, c.libdir);
|
||||
}
|
||||
}
|
||||
@ -758,7 +758,7 @@ fn rustc_sysroot() -> ~str {
|
||||
some(path) => {
|
||||
let path = ~[path, ~"..", ~"bin", ~"rustc"];
|
||||
let rustc = path::normalize(path::connect_many(path));
|
||||
debug!{" rustc: %s", rustc};
|
||||
debug!(" rustc: %s", rustc);
|
||||
rustc
|
||||
}
|
||||
none => ~"rustc"
|
||||
@ -766,7 +766,7 @@ fn rustc_sysroot() -> ~str {
|
||||
}
|
||||
|
||||
fn install_source(c: cargo, path: ~str) {
|
||||
debug!{"source: %s", path};
|
||||
debug!("source: %s", path);
|
||||
os::change_dir(path);
|
||||
|
||||
let mut cratefiles = ~[];
|
||||
@ -792,7 +792,7 @@ fn install_source(c: cargo, path: ~str) {
|
||||
let wd_base = c.workdir + path::path_sep();
|
||||
let wd = match tempfile::mkdtemp(wd_base, ~"") {
|
||||
some(wd) => wd,
|
||||
none => fail fmt!{"needed temp dir: %s", wd_base}
|
||||
none => fail fmt!("needed temp dir: %s", wd_base)
|
||||
};
|
||||
|
||||
install_query(c, wd, query);
|
||||
@ -825,7 +825,7 @@ fn install_curl(c: cargo, wd: ~str, url: ~str) {
|
||||
let p = run::program_output(~"curl", ~[~"-f", ~"-s", ~"-o",
|
||||
tarpath, url]);
|
||||
if p.status != 0 {
|
||||
fail fmt!{"fetch of %s failed: %s", url, p.err};
|
||||
fail fmt!("fetch of %s failed: %s", url, p.err);
|
||||
}
|
||||
run::run_program(~"tar", ~[~"-x", ~"--strip-components=1",
|
||||
~"-C", wd, ~"-f", tarpath]);
|
||||
@ -846,7 +846,7 @@ fn install_package(c: cargo, src: ~str, wd: ~str, pkg: package) {
|
||||
_ => ~"curl"
|
||||
};
|
||||
|
||||
info(fmt!{"installing %s/%s via %s...", src, pkg.name, method});
|
||||
info(fmt!("installing %s/%s via %s...", src, pkg.name, method));
|
||||
|
||||
match method {
|
||||
~"git" => install_git(c, wd, url, copy pkg.reference),
|
||||
@ -1076,7 +1076,7 @@ fn cmd_install(c: cargo) unsafe {
|
||||
let wd_base = c.workdir + path::path_sep();
|
||||
let wd = match tempfile::mkdtemp(wd_base, ~"") {
|
||||
some(wd) => wd,
|
||||
none => fail fmt!{"needed temp dir: %s", wd_base}
|
||||
none => fail fmt!("needed temp dir: %s", wd_base)
|
||||
};
|
||||
|
||||
if vec::len(c.opts.free) == 2u {
|
||||
@ -1084,7 +1084,7 @@ fn cmd_install(c: cargo) unsafe {
|
||||
let status = run::run_program(~"cp", ~[~"-R", cwd, wd]);
|
||||
|
||||
if status != 0 {
|
||||
fail fmt!{"could not copy directory: %s", cwd};
|
||||
fail fmt!("could not copy directory: %s", cwd);
|
||||
}
|
||||
|
||||
install_source(c, wd);
|
||||
@ -1120,7 +1120,7 @@ fn sync_one_file(c: cargo, dir: ~str, src: source) -> bool {
|
||||
let mut has_src_file = false;
|
||||
|
||||
if !os::copy_file(path::connect(url, ~"packages.json"), pkgfile) {
|
||||
error(fmt!{"fetch for source %s (url %s) failed", name, url});
|
||||
error(fmt!("fetch for source %s (url %s) failed", name, url));
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -1136,7 +1136,7 @@ fn sync_one_file(c: cargo, dir: ~str, src: source) -> bool {
|
||||
let p = run::program_output(~"curl",
|
||||
~[~"-f", ~"-s", ~"-o", keyfile, u]);
|
||||
if p.status != 0 {
|
||||
error(fmt!{"fetch for source %s (key %s) failed", name, u});
|
||||
error(fmt!("fetch for source %s (key %s) failed", name, u));
|
||||
return false;
|
||||
}
|
||||
pgp::add(c.root, keyfile);
|
||||
@ -1148,8 +1148,8 @@ fn sync_one_file(c: cargo, dir: ~str, src: source) -> bool {
|
||||
let r = pgp::verify(c.root, pkgfile, sigfile, f);
|
||||
|
||||
if !r {
|
||||
error(fmt!{"signature verification failed for source %s",
|
||||
name});
|
||||
error(fmt!("signature verification failed for source %s",
|
||||
name));
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -1157,8 +1157,8 @@ fn sync_one_file(c: cargo, dir: ~str, src: source) -> bool {
|
||||
let e = pgp::verify(c.root, srcfile, srcsigfile, f);
|
||||
|
||||
if !e {
|
||||
error(fmt!{"signature verification failed for source %s",
|
||||
name});
|
||||
error(fmt!("signature verification failed for source %s",
|
||||
name));
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -1178,7 +1178,7 @@ fn sync_one_file(c: cargo, dir: ~str, src: source) -> bool {
|
||||
os::remove_file(pkgfile);
|
||||
os::remove_file(sigfile);
|
||||
|
||||
info(fmt!{"synced source: %s", name});
|
||||
info(fmt!("synced source: %s", name));
|
||||
|
||||
return true;
|
||||
}
|
||||
@ -1194,7 +1194,7 @@ fn sync_one_git(c: cargo, dir: ~str, src: source) -> bool {
|
||||
|
||||
fn rollback(name: ~str, dir: ~str, insecure: bool) {
|
||||
fn msg(name: ~str, insecure: bool) {
|
||||
error(fmt!{"could not rollback source: %s", name});
|
||||
error(fmt!("could not rollback source: %s", name));
|
||||
|
||||
if insecure {
|
||||
warn(~"a past security check failed on source " +
|
||||
@ -1220,20 +1220,20 @@ fn sync_one_git(c: cargo, dir: ~str, src: source) -> bool {
|
||||
let p = run::program_output(~"git", ~[~"clone", url, dir]);
|
||||
|
||||
if p.status != 0 {
|
||||
error(fmt!{"fetch for source %s (url %s) failed", name, url});
|
||||
error(fmt!("fetch for source %s (url %s) failed", name, url));
|
||||
return false;
|
||||
}
|
||||
}
|
||||
else {
|
||||
if !os::change_dir(dir) {
|
||||
error(fmt!{"fetch for source %s (url %s) failed", name, url});
|
||||
error(fmt!("fetch for source %s (url %s) failed", name, url));
|
||||
return false;
|
||||
}
|
||||
|
||||
let p = run::program_output(~"git", ~[~"pull"]);
|
||||
|
||||
if p.status != 0 {
|
||||
error(fmt!{"fetch for source %s (url %s) failed", name, url});
|
||||
error(fmt!("fetch for source %s (url %s) failed", name, url));
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -1245,7 +1245,7 @@ fn sync_one_git(c: cargo, dir: ~str, src: source) -> bool {
|
||||
let p = run::program_output(~"curl",
|
||||
~[~"-f", ~"-s", ~"-o", keyfile, u]);
|
||||
if p.status != 0 {
|
||||
error(fmt!{"fetch for source %s (key %s) failed", name, u});
|
||||
error(fmt!("fetch for source %s (key %s) failed", name, u));
|
||||
rollback(name, dir, false);
|
||||
return false;
|
||||
}
|
||||
@ -1258,8 +1258,8 @@ fn sync_one_git(c: cargo, dir: ~str, src: source) -> bool {
|
||||
let r = pgp::verify(c.root, pkgfile, sigfile, f);
|
||||
|
||||
if !r {
|
||||
error(fmt!{"signature verification failed for source %s",
|
||||
name});
|
||||
error(fmt!("signature verification failed for source %s",
|
||||
name));
|
||||
rollback(name, dir, false);
|
||||
return false;
|
||||
}
|
||||
@ -1268,8 +1268,8 @@ fn sync_one_git(c: cargo, dir: ~str, src: source) -> bool {
|
||||
let e = pgp::verify(c.root, srcfile, srcsigfile, f);
|
||||
|
||||
if !e {
|
||||
error(fmt!{"signature verification failed for source %s",
|
||||
name});
|
||||
error(fmt!("signature verification failed for source %s",
|
||||
name));
|
||||
rollback(name, dir, false);
|
||||
return false;
|
||||
}
|
||||
@ -1280,7 +1280,7 @@ fn sync_one_git(c: cargo, dir: ~str, src: source) -> bool {
|
||||
|
||||
os::remove_file(keyfile);
|
||||
|
||||
info(fmt!{"synced source: %s", name});
|
||||
info(fmt!("synced source: %s", name));
|
||||
|
||||
return true;
|
||||
}
|
||||
@ -1306,7 +1306,7 @@ fn sync_one_curl(c: cargo, dir: ~str, src: source) -> bool {
|
||||
~[~"-f", ~"-s", ~"-o", pkgfile, url]);
|
||||
|
||||
if p.status != 0 {
|
||||
error(fmt!{"fetch for source %s (url %s) failed", name, url});
|
||||
error(fmt!("fetch for source %s (url %s) failed", name, url));
|
||||
return false;
|
||||
}
|
||||
if smart {
|
||||
@ -1325,7 +1325,7 @@ fn sync_one_curl(c: cargo, dir: ~str, src: source) -> bool {
|
||||
let p = run::program_output(~"curl",
|
||||
~[~"-f", ~"-s", ~"-o", keyfile, u]);
|
||||
if p.status != 0 {
|
||||
error(fmt!{"fetch for source %s (key %s) failed", name, u});
|
||||
error(fmt!("fetch for source %s (key %s) failed", name, u));
|
||||
return false;
|
||||
}
|
||||
pgp::add(c.root, keyfile);
|
||||
@ -1344,15 +1344,15 @@ fn sync_one_curl(c: cargo, dir: ~str, src: source) -> bool {
|
||||
let mut p = run::program_output(~"curl", ~[~"-f", ~"-s", ~"-o",
|
||||
sigfile, url]);
|
||||
if p.status != 0 {
|
||||
error(fmt!{"fetch for source %s (sig %s) failed", name, url});
|
||||
error(fmt!("fetch for source %s (sig %s) failed", name, url));
|
||||
return false;
|
||||
}
|
||||
|
||||
let r = pgp::verify(c.root, pkgfile, sigfile, f);
|
||||
|
||||
if !r {
|
||||
error(fmt!{"signature verification failed for source %s",
|
||||
name});
|
||||
error(fmt!("signature verification failed for source %s",
|
||||
name));
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -1363,8 +1363,8 @@ fn sync_one_curl(c: cargo, dir: ~str, src: source) -> bool {
|
||||
~[~"-f", ~"-s", ~"-o",
|
||||
srcsigfile, url]);
|
||||
if p.status != 0 {
|
||||
error(fmt!{"fetch for source %s (sig %s) failed",
|
||||
name, url});
|
||||
error(fmt!("fetch for source %s (sig %s) failed",
|
||||
name, url));
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -1392,7 +1392,7 @@ fn sync_one_curl(c: cargo, dir: ~str, src: source) -> bool {
|
||||
os::remove_file(pkgfile);
|
||||
os::remove_file(sigfile);
|
||||
|
||||
info(fmt!{"synced source: %s", name});
|
||||
info(fmt!("synced source: %s", name));
|
||||
|
||||
return true;
|
||||
}
|
||||
@ -1401,7 +1401,7 @@ fn sync_one(c: cargo, src: source) {
|
||||
let name = src.name;
|
||||
let dir = path::connect(c.sourcedir, name);
|
||||
|
||||
info(fmt!{"syncing source: %s...", name});
|
||||
info(fmt!("syncing source: %s...", name));
|
||||
|
||||
need_dir(dir);
|
||||
|
||||
@ -1428,20 +1428,20 @@ fn cmd_init(c: cargo) {
|
||||
let p =
|
||||
run::program_output(~"curl", ~[~"-f", ~"-s", ~"-o", srcfile, srcurl]);
|
||||
if p.status != 0 {
|
||||
error(fmt!{"fetch of sources.json failed: %s", p.out});
|
||||
error(fmt!("fetch of sources.json failed: %s", p.out));
|
||||
return;
|
||||
}
|
||||
|
||||
let p =
|
||||
run::program_output(~"curl", ~[~"-f", ~"-s", ~"-o", sigfile, sigurl]);
|
||||
if p.status != 0 {
|
||||
error(fmt!{"fetch of sources.json.sig failed: %s", p.out});
|
||||
error(fmt!("fetch of sources.json.sig failed: %s", p.out));
|
||||
return;
|
||||
}
|
||||
|
||||
let r = pgp::verify(c.root, srcfile, sigfile, pgp::signing_key_fp());
|
||||
if !r {
|
||||
error(fmt!{"signature verification failed for '%s'", srcfile});
|
||||
error(fmt!("signature verification failed for '%s'", srcfile));
|
||||
return;
|
||||
}
|
||||
|
||||
@ -1449,7 +1449,7 @@ fn cmd_init(c: cargo) {
|
||||
os::remove_file(srcfile);
|
||||
os::remove_file(sigfile);
|
||||
|
||||
info(fmt!{"initialized .cargo in %s", c.root});
|
||||
info(fmt!("initialized .cargo in %s", c.root));
|
||||
}
|
||||
|
||||
fn print_pkg(s: source, p: package) {
|
||||
@ -1490,14 +1490,14 @@ fn cmd_list(c: cargo) {
|
||||
if vec::len(c.opts.free) >= 3u {
|
||||
do vec::iter_between(c.opts.free, 2u, vec::len(c.opts.free)) |name| {
|
||||
if !valid_pkg_name(name) {
|
||||
error(fmt!{"'%s' is an invalid source name", name});
|
||||
error(fmt!("'%s' is an invalid source name", name));
|
||||
} else {
|
||||
match c.sources.find(name) {
|
||||
some(source) => {
|
||||
print_source(source);
|
||||
}
|
||||
none => {
|
||||
error(fmt!{"no such source: %s", name});
|
||||
error(fmt!("no such source: %s", name));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1527,7 +1527,7 @@ fn cmd_search(c: cargo) {
|
||||
n += 1;
|
||||
}
|
||||
});
|
||||
info(fmt!{"found %d packages", n});
|
||||
info(fmt!("found %d packages", n));
|
||||
}
|
||||
|
||||
fn install_to_dir(srcfile: ~str, destdir: ~str) {
|
||||
@ -1535,9 +1535,9 @@ fn install_to_dir(srcfile: ~str, destdir: ~str) {
|
||||
|
||||
let status = run::run_program(~"cp", ~[~"-r", srcfile, newfile]);
|
||||
if status == 0 {
|
||||
info(fmt!{"installed: '%s'", newfile});
|
||||
info(fmt!("installed: '%s'", newfile));
|
||||
} else {
|
||||
error(fmt!{"could not install: '%s'", newfile});
|
||||
error(fmt!("could not install: '%s'", newfile));
|
||||
}
|
||||
}
|
||||
|
||||
@ -1595,22 +1595,22 @@ fn dump_sources(c: cargo) {
|
||||
writer.write_str(json::to_str(root));
|
||||
}
|
||||
result::err(e) => {
|
||||
error(fmt!{"could not dump sources: %s", e});
|
||||
error(fmt!("could not dump sources: %s", e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn copy_warn(srcfile: ~str, destfile: ~str) {
|
||||
if !os::copy_file(srcfile, destfile) {
|
||||
warn(fmt!{"copying %s to %s failed", srcfile, destfile});
|
||||
warn(fmt!("copying %s to %s failed", srcfile, destfile));
|
||||
}
|
||||
}
|
||||
|
||||
fn cmd_sources(c: cargo) {
|
||||
if vec::len(c.opts.free) < 3u {
|
||||
for c.sources.each_value |v| {
|
||||
info(fmt!{"%s (%s) via %s",
|
||||
v.name, v.url, v.method});
|
||||
info(fmt!("%s (%s) via %s",
|
||||
v.name, v.url, v.method));
|
||||
}
|
||||
return;
|
||||
}
|
||||
@ -1635,13 +1635,13 @@ fn cmd_sources(c: cargo) {
|
||||
let url = c.opts.free[4u];
|
||||
|
||||
if !valid_pkg_name(name) {
|
||||
error(fmt!{"'%s' is an invalid source name", name});
|
||||
error(fmt!("'%s' is an invalid source name", name));
|
||||
return;
|
||||
}
|
||||
|
||||
match c.sources.find(name) {
|
||||
some(source) => {
|
||||
error(fmt!{"source already exists: %s", name});
|
||||
error(fmt!("source already exists: %s", name));
|
||||
}
|
||||
none => {
|
||||
c.sources.insert(name, @{
|
||||
@ -1652,7 +1652,7 @@ fn cmd_sources(c: cargo) {
|
||||
mut keyfp: none,
|
||||
mut packages: ~[mut]
|
||||
});
|
||||
info(fmt!{"added source: %s", name});
|
||||
info(fmt!("added source: %s", name));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1665,17 +1665,17 @@ fn cmd_sources(c: cargo) {
|
||||
let name = c.opts.free[3u];
|
||||
|
||||
if !valid_pkg_name(name) {
|
||||
error(fmt!{"'%s' is an invalid source name", name});
|
||||
error(fmt!("'%s' is an invalid source name", name));
|
||||
return;
|
||||
}
|
||||
|
||||
match c.sources.find(name) {
|
||||
some(source) => {
|
||||
c.sources.remove(name);
|
||||
info(fmt!{"removed source: %s", name});
|
||||
info(fmt!("removed source: %s", name));
|
||||
}
|
||||
none => {
|
||||
error(fmt!{"no such source: %s", name});
|
||||
error(fmt!("no such source: %s", name));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1689,7 +1689,7 @@ fn cmd_sources(c: cargo) {
|
||||
let url = c.opts.free[4u];
|
||||
|
||||
if !valid_pkg_name(name) {
|
||||
error(fmt!{"'%s' is an invalid source name", name});
|
||||
error(fmt!("'%s' is an invalid source name", name));
|
||||
return;
|
||||
}
|
||||
|
||||
@ -1703,10 +1703,10 @@ fn cmd_sources(c: cargo) {
|
||||
|
||||
c.sources.insert(name, source);
|
||||
|
||||
info(fmt!{"changed source url: '%s' to '%s'", old, url});
|
||||
info(fmt!("changed source url: '%s' to '%s'", old, url));
|
||||
}
|
||||
none => {
|
||||
error(fmt!{"no such source: %s", name});
|
||||
error(fmt!("no such source: %s", name));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1720,7 +1720,7 @@ fn cmd_sources(c: cargo) {
|
||||
let method = c.opts.free[4u];
|
||||
|
||||
if !valid_pkg_name(name) {
|
||||
error(fmt!{"'%s' is an invalid source name", name});
|
||||
error(fmt!("'%s' is an invalid source name", name));
|
||||
return;
|
||||
}
|
||||
|
||||
@ -1736,11 +1736,11 @@ fn cmd_sources(c: cargo) {
|
||||
|
||||
c.sources.insert(name, source);
|
||||
|
||||
info(fmt!{"changed source method: '%s' to '%s'", old,
|
||||
method});
|
||||
info(fmt!("changed source method: '%s' to '%s'", old,
|
||||
method));
|
||||
}
|
||||
none => {
|
||||
error(fmt!{"no such source: %s", name});
|
||||
error(fmt!("no such source: %s", name));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1754,11 +1754,11 @@ fn cmd_sources(c: cargo) {
|
||||
let newn = c.opts.free[4u];
|
||||
|
||||
if !valid_pkg_name(name) {
|
||||
error(fmt!{"'%s' is an invalid source name", name});
|
||||
error(fmt!("'%s' is an invalid source name", name));
|
||||
return;
|
||||
}
|
||||
if !valid_pkg_name(newn) {
|
||||
error(fmt!{"'%s' is an invalid source name", newn});
|
||||
error(fmt!("'%s' is an invalid source name", newn));
|
||||
return;
|
||||
}
|
||||
|
||||
@ -1766,10 +1766,10 @@ fn cmd_sources(c: cargo) {
|
||||
some(source) => {
|
||||
c.sources.remove(name);
|
||||
c.sources.insert(newn, source);
|
||||
info(fmt!{"renamed source: %s to %s", name, newn});
|
||||
info(fmt!("renamed source: %s to %s", name, newn));
|
||||
}
|
||||
none => {
|
||||
error(fmt!{"no such source: %s", name});
|
||||
error(fmt!("no such source: %s", name));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -63,20 +63,20 @@ fn parse_config(args: ~[~str]) -> config {
|
||||
|
||||
fn log_config(config: config) {
|
||||
let c = config;
|
||||
logv(c, fmt!{"configuration:"});
|
||||
logv(c, fmt!{"compile_lib_path: %s", config.compile_lib_path});
|
||||
logv(c, fmt!{"run_lib_path: %s", config.run_lib_path});
|
||||
logv(c, fmt!{"rustc_path: %s", config.rustc_path});
|
||||
logv(c, fmt!{"src_base: %s", config.src_base});
|
||||
logv(c, fmt!{"build_base: %s", config.build_base});
|
||||
logv(c, fmt!{"stage_id: %s", config.stage_id});
|
||||
logv(c, fmt!{"mode: %s", mode_str(config.mode)});
|
||||
logv(c, fmt!{"run_ignored: %b", config.run_ignored});
|
||||
logv(c, fmt!{"filter: %s", opt_str(config.filter)});
|
||||
logv(c, fmt!{"runtool: %s", opt_str(config.runtool)});
|
||||
logv(c, fmt!{"rustcflags: %s", opt_str(config.rustcflags)});
|
||||
logv(c, fmt!{"verbose: %b", config.verbose});
|
||||
logv(c, fmt!{"\n"});
|
||||
logv(c, fmt!("configuration:"));
|
||||
logv(c, fmt!("compile_lib_path: %s", config.compile_lib_path));
|
||||
logv(c, fmt!("run_lib_path: %s", config.run_lib_path));
|
||||
logv(c, fmt!("rustc_path: %s", config.rustc_path));
|
||||
logv(c, fmt!("src_base: %s", config.src_base));
|
||||
logv(c, fmt!("build_base: %s", config.build_base));
|
||||
logv(c, fmt!("stage_id: %s", config.stage_id));
|
||||
logv(c, fmt!("mode: %s", mode_str(config.mode)));
|
||||
logv(c, fmt!("run_ignored: %b", config.run_ignored));
|
||||
logv(c, fmt!("filter: %s", opt_str(config.filter)));
|
||||
logv(c, fmt!("runtool: %s", opt_str(config.runtool)));
|
||||
logv(c, fmt!("rustcflags: %s", opt_str(config.rustcflags)));
|
||||
logv(c, fmt!("verbose: %b", config.verbose));
|
||||
logv(c, fmt!("\n"));
|
||||
}
|
||||
|
||||
fn opt_str(maybestr: option<~str>) -> ~str {
|
||||
@ -129,11 +129,11 @@ fn test_opts(config: config) -> test::test_opts {
|
||||
}
|
||||
|
||||
fn make_tests(config: config) -> ~[test::test_desc] {
|
||||
debug!{"making tests from %s", config.src_base};
|
||||
debug!("making tests from %s", config.src_base);
|
||||
let mut tests = ~[];
|
||||
for os::list_dir_path(config.src_base).each |file| {
|
||||
let file = file;
|
||||
debug!{"inspecting file %s", file};
|
||||
debug!("inspecting file %s", file);
|
||||
if is_test(config, file) {
|
||||
vec::push(tests, make_test(config, file))
|
||||
}
|
||||
@ -175,7 +175,7 @@ fn make_test(config: config, testfile: ~str) ->
|
||||
}
|
||||
|
||||
fn make_test_name(config: config, testfile: ~str) -> ~str {
|
||||
fmt!{"[%s] %s", mode_str(config.mode), testfile}
|
||||
fmt!("[%s] %s", mode_str(config.mode), testfile)
|
||||
}
|
||||
|
||||
fn make_test_closure(config: config, testfile: ~str) -> test::test_fn {
|
||||
|
@ -45,7 +45,7 @@ fn parse_expected(line_num: uint, line: ~str) -> ~[expected_error] unsafe {
|
||||
while idx < len && line[idx] == (' ' as u8) { idx += 1u; }
|
||||
let msg = str::slice(line, idx, len);
|
||||
|
||||
debug!{"line=%u kind=%s msg=%s", line_num - adjust_line, kind, msg};
|
||||
debug!("line=%u kind=%s msg=%s", line_num - adjust_line, kind, msg);
|
||||
|
||||
return ~[{line: line_num - adjust_line, kind: kind, msg: msg}];
|
||||
}
|
||||
|
@ -109,7 +109,7 @@ fn parse_exec_env(line: ~str) -> option<(~str, ~str)> {
|
||||
match strs.len() {
|
||||
1u => (strs[0], ~""),
|
||||
2u => (strs[0], strs[1]),
|
||||
n => fail fmt!{"Expected 1 or 2 strings, not %u", n}
|
||||
n => fail fmt!("Expected 1 or 2 strings, not %u", n)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -138,7 +138,7 @@ fn parse_name_value_directive(line: ~str,
|
||||
option::some(colon) => {
|
||||
let value = str::slice(line, colon + str::len(keycolon),
|
||||
str::len(line));
|
||||
debug!{"%s: %s", directive, value};
|
||||
debug!("%s: %s", directive, value);
|
||||
option::some(value)
|
||||
}
|
||||
option::none => option::none
|
||||
|
@ -16,7 +16,7 @@ fn run(config: config, testfile: ~str) {
|
||||
// We're going to be dumping a lot of info. Start on a new line.
|
||||
io::stdout().write_str(~"\n\n");
|
||||
}
|
||||
debug!{"running %s", testfile};
|
||||
debug!("running %s", testfile);
|
||||
let props = load_props(testfile);
|
||||
match config.mode {
|
||||
mode_compile_fail => run_cfail_test(config, props, testfile),
|
||||
@ -68,8 +68,8 @@ fn check_correct_failure_status(procres: procres) {
|
||||
const rust_err: int = 101;
|
||||
if procres.status != rust_err {
|
||||
fatal_procres(
|
||||
fmt!{"failure produced the wrong error code: %d",
|
||||
procres.status},
|
||||
fmt!("failure produced the wrong error code: %d",
|
||||
procres.status),
|
||||
procres);
|
||||
}
|
||||
}
|
||||
@ -96,11 +96,11 @@ fn run_pretty_test(config: config, props: test_props, testfile: ~str) {
|
||||
|
||||
let mut round = 0;
|
||||
while round < rounds {
|
||||
logv(config, fmt!{"pretty-printing round %d", round});
|
||||
logv(config, fmt!("pretty-printing round %d", round));
|
||||
let procres = print_source(config, testfile, srcs[round]);
|
||||
|
||||
if procres.status != 0 {
|
||||
fatal_procres(fmt!{"pretty-printing failed in round %d", round},
|
||||
fatal_procres(fmt!("pretty-printing failed in round %d", round),
|
||||
procres);
|
||||
}
|
||||
|
||||
@ -151,7 +151,7 @@ fn run_pretty_test(config: config, props: test_props, testfile: ~str) {
|
||||
if expected != actual {
|
||||
error(~"pretty-printed source does not match expected source");
|
||||
let msg =
|
||||
fmt!{"\n\
|
||||
fmt!("\n\
|
||||
expected:\n\
|
||||
------------------------------------------\n\
|
||||
%s\n\
|
||||
@ -161,7 +161,7 @@ actual:\n\
|
||||
%s\n\
|
||||
------------------------------------------\n\
|
||||
\n",
|
||||
expected, actual};
|
||||
expected, actual);
|
||||
io::stdout().write_str(msg);
|
||||
fail;
|
||||
}
|
||||
@ -201,10 +201,10 @@ fn check_error_patterns(props: test_props,
|
||||
let mut done = false;
|
||||
for str::split_char(procres.stderr, '\n').each |line| {
|
||||
if str::contains(line, next_err_pat) {
|
||||
debug!{"found error pattern %s", next_err_pat};
|
||||
debug!("found error pattern %s", next_err_pat);
|
||||
next_err_idx += 1u;
|
||||
if next_err_idx == vec::len(props.error_patterns) {
|
||||
debug!{"found all error patterns"};
|
||||
debug!("found all error patterns");
|
||||
done = true;
|
||||
break;
|
||||
}
|
||||
@ -217,11 +217,11 @@ fn check_error_patterns(props: test_props,
|
||||
vec::slice(props.error_patterns, next_err_idx,
|
||||
vec::len(props.error_patterns));
|
||||
if vec::len(missing_patterns) == 1u {
|
||||
fatal_procres(fmt!{"error pattern '%s' not found!",
|
||||
missing_patterns[0]}, procres);
|
||||
fatal_procres(fmt!("error pattern '%s' not found!",
|
||||
missing_patterns[0]), procres);
|
||||
} else {
|
||||
for missing_patterns.each |pattern| {
|
||||
error(fmt!{"error pattern '%s' not found!", pattern});
|
||||
error(fmt!("error pattern '%s' not found!", pattern));
|
||||
}
|
||||
fatal_procres(~"multiple error patterns not found", procres);
|
||||
}
|
||||
@ -240,7 +240,7 @@ fn check_expected_errors(expected_errors: ~[errors::expected_error],
|
||||
}
|
||||
|
||||
let prefixes = vec::map(expected_errors, |ee| {
|
||||
fmt!{"%s:%u:", testfile, ee.line}
|
||||
fmt!("%s:%u:", testfile, ee.line)
|
||||
});
|
||||
|
||||
// Scan and extract our error/warning messages,
|
||||
@ -253,8 +253,8 @@ fn check_expected_errors(expected_errors: ~[errors::expected_error],
|
||||
let mut was_expected = false;
|
||||
for vec::eachi(expected_errors) |i, ee| {
|
||||
if !found_flags[i] {
|
||||
debug!{"prefix=%s ee.kind=%s ee.msg=%s line=%s",
|
||||
prefixes[i], ee.kind, ee.msg, line};
|
||||
debug!("prefix=%s ee.kind=%s ee.msg=%s line=%s",
|
||||
prefixes[i], ee.kind, ee.msg, line);
|
||||
if (str::starts_with(line, prefixes[i]) &&
|
||||
str::contains(line, ee.kind) &&
|
||||
str::contains(line, ee.msg)) {
|
||||
@ -272,7 +272,7 @@ fn check_expected_errors(expected_errors: ~[errors::expected_error],
|
||||
|
||||
if !was_expected && (str::contains(line, ~"error") ||
|
||||
str::contains(line, ~"warning")) {
|
||||
fatal_procres(fmt!{"unexpected error pattern '%s'!", line},
|
||||
fatal_procres(fmt!("unexpected error pattern '%s'!", line),
|
||||
procres);
|
||||
}
|
||||
}
|
||||
@ -280,8 +280,8 @@ fn check_expected_errors(expected_errors: ~[errors::expected_error],
|
||||
for uint::range(0u, vec::len(found_flags)) |i| {
|
||||
if !found_flags[i] {
|
||||
let ee = expected_errors[i];
|
||||
fatal_procres(fmt!{"expected %s on line %u not found: %s",
|
||||
ee.kind, ee.line, ee.msg}, procres);
|
||||
fatal_procres(fmt!("expected %s on line %u not found: %s",
|
||||
ee.kind, ee.line, ee.msg), procres);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -330,7 +330,7 @@ fn compose_and_run_compiler(
|
||||
config.compile_lib_path, option::none);
|
||||
if auxres.status != 0 {
|
||||
fatal_procres(
|
||||
fmt!{"auxiliary build of %s failed to compile: ", abs_ab},
|
||||
fmt!("auxiliary build of %s failed to compile: ", abs_ab),
|
||||
auxres);
|
||||
}
|
||||
}
|
||||
@ -342,7 +342,7 @@ fn compose_and_run_compiler(
|
||||
fn ensure_dir(path: Path) {
|
||||
if os::path_is_dir(path) { return; }
|
||||
if !os::make_dir(path, 0x1c0i32) {
|
||||
fail fmt!{"can't make dir %s", path};
|
||||
fail fmt!("can't make dir %s", path);
|
||||
}
|
||||
}
|
||||
|
||||
@ -414,7 +414,7 @@ fn program_output(config: config, testfile: ~str, lib_path: ~str, prog: ~str,
|
||||
let cmdline =
|
||||
{
|
||||
let cmdline = make_cmdline(lib_path, prog, args);
|
||||
logv(config, fmt!{"executing %s", cmdline});
|
||||
logv(config, fmt!("executing %s", cmdline));
|
||||
cmdline
|
||||
};
|
||||
let res = procsrv::run(lib_path, prog, args, env, input);
|
||||
@ -430,19 +430,19 @@ fn program_output(config: config, testfile: ~str, lib_path: ~str, prog: ~str,
|
||||
#[cfg(target_os = "macos")]
|
||||
#[cfg(target_os = "freebsd")]
|
||||
fn make_cmdline(_libpath: ~str, prog: ~str, args: ~[~str]) -> ~str {
|
||||
fmt!{"%s %s", prog, str::connect(args, ~" ")}
|
||||
fmt!("%s %s", prog, str::connect(args, ~" "))
|
||||
}
|
||||
|
||||
#[cfg(target_os = "win32")]
|
||||
fn make_cmdline(libpath: ~str, prog: ~str, args: ~[~str]) -> ~str {
|
||||
fmt!{"%s %s %s", lib_path_cmd_prefix(libpath), prog,
|
||||
str::connect(args, ~" ")}
|
||||
fmt!("%s %s %s", lib_path_cmd_prefix(libpath), prog,
|
||||
str::connect(args, ~" "))
|
||||
}
|
||||
|
||||
// Build the LD_LIBRARY_PATH variable as it would be seen on the command line
|
||||
// for diagnostic purposes
|
||||
fn lib_path_cmd_prefix(path: ~str) -> ~str {
|
||||
fmt!{"%s=\"%s\"", util::lib_path_env_var(), util::make_new_path(path)}
|
||||
fmt!("%s=\"%s\"", util::lib_path_env_var(), util::make_new_path(path))
|
||||
}
|
||||
|
||||
fn dump_output(config: config, testfile: ~str, out: ~str, err: ~str) {
|
||||
@ -475,13 +475,13 @@ fn output_testname(testfile: ~str) -> ~str {
|
||||
fn output_base_name(config: config, testfile: ~str) -> ~str {
|
||||
let base = config.build_base;
|
||||
let filename = output_testname(testfile);
|
||||
fmt!{"%s%s.%s", base, filename, config.stage_id}
|
||||
fmt!("%s%s.%s", base, filename, config.stage_id)
|
||||
}
|
||||
|
||||
fn maybe_dump_to_stdout(config: config, out: ~str, err: ~str) {
|
||||
if config.verbose {
|
||||
let sep1 = fmt!{"------%s------------------------------", ~"stdout"};
|
||||
let sep2 = fmt!{"------%s------------------------------", ~"stderr"};
|
||||
let sep1 = fmt!("------%s------------------------------", ~"stdout");
|
||||
let sep2 = fmt!("------%s------------------------------", ~"stderr");
|
||||
let sep3 = ~"------------------------------------------";
|
||||
io::stdout().write_line(sep1);
|
||||
io::stdout().write_line(out);
|
||||
@ -491,13 +491,13 @@ fn maybe_dump_to_stdout(config: config, out: ~str, err: ~str) {
|
||||
}
|
||||
}
|
||||
|
||||
fn error(err: ~str) { io::stdout().write_line(fmt!{"\nerror: %s", err}); }
|
||||
fn error(err: ~str) { io::stdout().write_line(fmt!("\nerror: %s", err)); }
|
||||
|
||||
fn fatal(err: ~str) -> ! { error(err); fail; }
|
||||
|
||||
fn fatal_procres(err: ~str, procres: procres) -> ! {
|
||||
let msg =
|
||||
fmt!{"\n\
|
||||
fmt!("\n\
|
||||
error: %s\n\
|
||||
command: %s\n\
|
||||
stdout:\n\
|
||||
@ -509,7 +509,7 @@ stderr:\n\
|
||||
%s\n\
|
||||
------------------------------------------\n\
|
||||
\n",
|
||||
err, procres.cmdline, procres.stdout, procres.stderr};
|
||||
err, procres.cmdline, procres.stdout, procres.stderr);
|
||||
io::stdout().write_str(msg);
|
||||
fail;
|
||||
}
|
||||
|
@ -9,7 +9,7 @@ fn make_new_path(path: ~str) -> ~str {
|
||||
// maintain the current value while adding our own
|
||||
match getenv(lib_path_env_var()) {
|
||||
option::some(curr) => {
|
||||
fmt!{"%s%s%s", path, path_div(), curr}
|
||||
fmt!("%s%s%s", path, path_div(), curr)
|
||||
}
|
||||
option::none => path
|
||||
}
|
||||
|
@ -26,5 +26,5 @@ fn main() {
|
||||
assert (!vec_equal(~[5, 5], ~[4, 5], builtin_equal_int));
|
||||
assert (vec_equal(~[5, 5], ~[5, 5], builtin_equal_int));
|
||||
|
||||
error!{"Pass"};
|
||||
error!("Pass");
|
||||
}
|
||||
|
@ -242,8 +242,8 @@ fn check_variants_T<T: copy>(
|
||||
replacer: fn@(ast::crate, uint, T, test_mode) -> ast::crate,
|
||||
cx: context
|
||||
) {
|
||||
error!{"%s contains %u %s objects", filename,
|
||||
vec::len(things), thing_label};
|
||||
error!("%s contains %u %s objects", filename,
|
||||
vec::len(things), thing_label);
|
||||
|
||||
// Assuming we're not generating any token_trees
|
||||
let intr = syntax::parse::token::mk_fake_ident_interner();
|
||||
@ -274,9 +274,9 @@ fn check_variants_T<T: copy>(
|
||||
check_roundtrip_convergence(str3, 1u);
|
||||
}
|
||||
tm_run => {
|
||||
let file_label = fmt!{"rusttmp/%s_%s_%u_%u",
|
||||
let file_label = fmt!("rusttmp/%s_%s_%u_%u",
|
||||
last_part(filename),
|
||||
thing_label, i, j};
|
||||
thing_label, i, j);
|
||||
let safe_to_run = !(content_is_dangerous_to_run(*str3)
|
||||
|| has_raw_pointers(*crate2));
|
||||
check_whole_compiler(*str3, file_label, safe_to_run);
|
||||
@ -386,7 +386,7 @@ fn check_compiling(filename: ~str) -> happiness {
|
||||
stage1/bin/rustc",
|
||||
~[filename]);
|
||||
|
||||
//error!{"Status: %d", p.status};
|
||||
//error!("Status: %d", p.status);
|
||||
if p.status == 0 {
|
||||
passed
|
||||
} else if p.err != ~"" {
|
||||
@ -515,9 +515,9 @@ fn check_roundtrip_convergence(code: @~str, maxIters: uint) {
|
||||
}
|
||||
|
||||
if oldv == newv {
|
||||
error!{"Converged after %u iterations", i};
|
||||
error!("Converged after %u iterations", i);
|
||||
} else {
|
||||
error!{"Did not converge after %u iterations!", i};
|
||||
error!("Did not converge after %u iterations!", i);
|
||||
write_file(~"round-trip-a.rs", *oldv);
|
||||
write_file(~"round-trip-b.rs", *newv);
|
||||
run::run_program(~"diff",
|
||||
@ -528,12 +528,12 @@ fn check_roundtrip_convergence(code: @~str, maxIters: uint) {
|
||||
}
|
||||
|
||||
fn check_convergence(files: ~[~str]) {
|
||||
error!{"pp convergence tests: %u files", vec::len(files)};
|
||||
error!("pp convergence tests: %u files", vec::len(files));
|
||||
for files.each |file| {
|
||||
if !file_might_not_converge(file) {
|
||||
let s = @result::get(io::read_whole_file_str(file));
|
||||
if !content_might_not_converge(*s) {
|
||||
error!{"pp converge: %s", file};
|
||||
error!("pp converge: %s", file);
|
||||
// Change from 7u to 2u once
|
||||
// https://github.com/mozilla/rust/issues/850 is fixed
|
||||
check_roundtrip_convergence(s, 7u);
|
||||
@ -545,8 +545,8 @@ fn check_convergence(files: ~[~str]) {
|
||||
fn check_variants(files: ~[~str], cx: context) {
|
||||
for files.each |file| {
|
||||
if cx.mode == tm_converge && file_might_not_converge(file) {
|
||||
error!{"Skipping convergence test based on\
|
||||
file_might_not_converge"};
|
||||
error!("Skipping convergence test based on\
|
||||
file_might_not_converge");
|
||||
again;
|
||||
}
|
||||
|
||||
@ -568,7 +568,7 @@ fn check_variants(files: ~[~str], cx: context) {
|
||||
file,
|
||||
s, ~[], sess);
|
||||
io::with_str_reader(*s, |rdr| {
|
||||
error!{"%s",
|
||||
error!("%s",
|
||||
as_str(|a| pprust::print_crate(
|
||||
sess.cm,
|
||||
// Assuming no token_trees
|
||||
@ -578,7 +578,7 @@ fn check_variants(files: ~[~str], cx: context) {
|
||||
file,
|
||||
rdr, a,
|
||||
pprust::no_ann(),
|
||||
false) )}
|
||||
false) ))
|
||||
});
|
||||
check_variants_of_ast(*crate, sess.cm, file, cx);
|
||||
}
|
||||
@ -586,21 +586,21 @@ fn check_variants(files: ~[~str], cx: context) {
|
||||
|
||||
fn main(args: ~[~str]) {
|
||||
if vec::len(args) != 2u {
|
||||
error!{"usage: %s <testdir>", args[0]};
|
||||
error!("usage: %s <testdir>", args[0]);
|
||||
return;
|
||||
}
|
||||
let mut files = ~[];
|
||||
let root = args[1];
|
||||
|
||||
find_rust_files(files, root);
|
||||
error!{"== check_convergence =="};
|
||||
error!("== check_convergence ==");
|
||||
check_convergence(files);
|
||||
error!{"== check_variants: converge =="};
|
||||
error!("== check_variants: converge ==");
|
||||
check_variants(files, { mode: tm_converge });
|
||||
error!{"== check_variants: run =="};
|
||||
error!("== check_variants: run ==");
|
||||
check_variants(files, { mode: tm_run });
|
||||
|
||||
error!{"Fuzzer done"};
|
||||
error!("Fuzzer done");
|
||||
}
|
||||
|
||||
// Local Variables:
|
||||
|
@ -21,7 +21,7 @@ combinations at the moment.
|
||||
|
||||
Example:
|
||||
|
||||
debug!{"hello, %s!", "world"};
|
||||
debug!("hello, %s!", "world");
|
||||
|
||||
*/
|
||||
|
||||
@ -446,7 +446,7 @@ mod test {
|
||||
#[test]
|
||||
fn fmt_slice() {
|
||||
let s = "abc";
|
||||
let _s = fmt!{"%s", s};
|
||||
let _s = fmt!("%s", s);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -11,7 +11,7 @@
|
||||
* ~~~
|
||||
* let delayed_fib = future::spawn {|| fib(5000) };
|
||||
* make_a_sandwich();
|
||||
* io::println(fmt!{"fib(5000) = %?", delayed_fib.get()})
|
||||
* io::println(fmt!("fib(5000) = %?", delayed_fib.get()))
|
||||
* ~~~
|
||||
*/
|
||||
|
||||
@ -64,9 +64,9 @@ fn from_value<A>(+val: A) -> Future<A> {
|
||||
})
|
||||
}
|
||||
|
||||
macro_rules! move_it {
|
||||
macro_rules! move_it (
|
||||
{$x:expr} => { unsafe { let y <- *ptr::addr_of($x); y } }
|
||||
}
|
||||
)
|
||||
|
||||
fn from_port<A:send>(+port: future_pipe::client::waiting<A>) -> Future<A> {
|
||||
#[doc = "
|
||||
@ -82,7 +82,7 @@ fn from_port<A:send>(+port: future_pipe::client::waiting<A>) -> Future<A> {
|
||||
port_ <-> *port;
|
||||
let port = option::unwrap(port_);
|
||||
match recv(port) {
|
||||
future_pipe::completed(data) => move_it!{data}
|
||||
future_pipe::completed(data) => move_it!(data)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -174,11 +174,11 @@ impl &SipState : io::Writer {
|
||||
t += 1;
|
||||
}
|
||||
|
||||
let m = u8to64_le!{self.tail, 0};
|
||||
let m = u8to64_le!(self.tail, 0);
|
||||
|
||||
self.v3 ^= m;
|
||||
compress!{self.v0, self.v1, self.v2, self.v3};
|
||||
compress!{self.v0, self.v1, self.v2, self.v3};
|
||||
compress!(self.v0, self.v1, self.v2, self.v3);
|
||||
compress!(self.v0, self.v1, self.v2, self.v3);
|
||||
self.v0 ^= m;
|
||||
|
||||
self.ntail = 0;
|
||||
@ -191,11 +191,11 @@ impl &SipState : io::Writer {
|
||||
|
||||
let mut i = needed;
|
||||
while i < end {
|
||||
let mi = u8to64_le!{msg, i};
|
||||
let mi = u8to64_le!(msg, i);
|
||||
|
||||
self.v3 ^= mi;
|
||||
compress!{self.v0, self.v1, self.v2, self.v3};
|
||||
compress!{self.v0, self.v1, self.v2, self.v3};
|
||||
compress!(self.v0, self.v1, self.v2, self.v3);
|
||||
compress!(self.v0, self.v1, self.v2, self.v3);
|
||||
self.v0 ^= mi;
|
||||
|
||||
i += 8;
|
||||
@ -246,15 +246,15 @@ impl &SipState : Streaming {
|
||||
if self.ntail > 6 { b |= self.tail[6] as u64 << 48; }
|
||||
|
||||
v3 ^= b;
|
||||
compress!{v0, v1, v2, v3};
|
||||
compress!{v0, v1, v2, v3};
|
||||
compress!(v0, v1, v2, v3);
|
||||
compress!(v0, v1, v2, v3);
|
||||
v0 ^= b;
|
||||
|
||||
v2 ^= 0xff;
|
||||
compress!{v0, v1, v2, v3};
|
||||
compress!{v0, v1, v2, v3};
|
||||
compress!{v0, v1, v2, v3};
|
||||
compress!{v0, v1, v2, v3};
|
||||
compress!(v0, v1, v2, v3);
|
||||
compress!(v0, v1, v2, v3);
|
||||
compress!(v0, v1, v2, v3);
|
||||
compress!(v0, v1, v2, v3);
|
||||
|
||||
return (v0 ^ v1 ^ v2 ^ v3);
|
||||
}
|
||||
@ -373,10 +373,10 @@ fn test_siphash() {
|
||||
}
|
||||
|
||||
while t < 64 {
|
||||
debug!{"siphash test %?", t};
|
||||
let vec = u8to64_le!{vecs[t], 0};
|
||||
debug!("siphash test %?", t);
|
||||
let vec = u8to64_le!(vecs[t], 0);
|
||||
let out = hash_bytes_keyed(buf, k0, k1);
|
||||
debug!{"got %?, expected %?", out, vec};
|
||||
debug!("got %?, expected %?", out, vec);
|
||||
assert vec == out;
|
||||
|
||||
stream_full.reset();
|
||||
@ -384,7 +384,7 @@ fn test_siphash() {
|
||||
let f = stream_full.result_str();
|
||||
let i = stream_inc.result_str();
|
||||
let v = to_hex_str(&vecs[t]);
|
||||
debug!{"%d: (%s) => inc=%s full=%s", t, v, i, f};
|
||||
debug!("%d: (%s) => inc=%s full=%s", t, v, i, f);
|
||||
|
||||
assert f == i && f == v;
|
||||
|
||||
|
@ -97,8 +97,8 @@ impl T: iter::Times {
|
||||
`for int::range(0, x) |_i| { /* anything */ }`."]
|
||||
pure fn times(it: fn() -> bool) {
|
||||
if self < 0 {
|
||||
fail fmt!{"The .times method expects a nonnegative number, \
|
||||
but found %?", self};
|
||||
fail fmt!("The .times method expects a nonnegative number, \
|
||||
but found %?", self);
|
||||
}
|
||||
let mut i = self;
|
||||
while i > 0 {
|
||||
@ -114,8 +114,8 @@ impl T: iter::TimesIx {
|
||||
pure fn timesi(it: fn(uint) -> bool) {
|
||||
let slf = self as uint;
|
||||
if slf < 0u {
|
||||
fail fmt!{"The .timesi method expects a nonnegative number, \
|
||||
but found %?", self};
|
||||
fail fmt!("The .timesi method expects a nonnegative number, \
|
||||
but found %?", self);
|
||||
}
|
||||
let mut i = 0u;
|
||||
while i < slf {
|
||||
|
@ -292,7 +292,7 @@ impl ByteBuf: Reader {
|
||||
return b as int;
|
||||
}
|
||||
// FIXME (#2738): implement this
|
||||
fn unread_byte(_byte: int) { error!{"Unimplemented: unread_byte"}; fail; }
|
||||
fn unread_byte(_byte: int) { error!("Unimplemented: unread_byte"); fail; }
|
||||
fn eof() -> bool { self.pos == self.len }
|
||||
fn seek(offset: int, whence: SeekStyle) {
|
||||
let pos = self.pos;
|
||||
@ -358,7 +358,7 @@ impl *libc::FILE: Writer {
|
||||
let nout = libc::fwrite(vbuf as *c_void, len as size_t,
|
||||
1u as size_t, self);
|
||||
if nout < 1 as size_t {
|
||||
error!{"error writing buffer"};
|
||||
error!("error writing buffer");
|
||||
log(error, os::last_os_error());
|
||||
fail;
|
||||
}
|
||||
@ -393,7 +393,7 @@ impl fd_t: Writer {
|
||||
let vb = ptr::const_offset(vbuf, count) as *c_void;
|
||||
let nout = libc::write(self, vb, len as size_t);
|
||||
if nout < 0 as ssize_t {
|
||||
error!{"error writing buffer"};
|
||||
error!("error writing buffer");
|
||||
log(error, os::last_os_error());
|
||||
fail;
|
||||
}
|
||||
@ -402,11 +402,11 @@ impl fd_t: Writer {
|
||||
}
|
||||
}
|
||||
fn seek(_offset: int, _whence: SeekStyle) {
|
||||
error!{"need 64-bit foreign calls for seek, sorry"};
|
||||
error!("need 64-bit foreign calls for seek, sorry");
|
||||
fail;
|
||||
}
|
||||
fn tell() -> uint {
|
||||
error!{"need 64-bit foreign calls for tell, sorry"};
|
||||
error!("need 64-bit foreign calls for tell, sorry");
|
||||
fail;
|
||||
}
|
||||
fn flush() -> int { 0 }
|
||||
@ -453,7 +453,7 @@ fn mk_file_writer(path: ~str, flags: ~[FileFlag])
|
||||
(S_IRUSR | S_IWUSR) as c_int)
|
||||
};
|
||||
if fd < (0 as c_int) {
|
||||
result::err(fmt!{"error opening %s: %s", path, os::last_os_error()})
|
||||
result::err(fmt!("error opening %s: %s", path, os::last_os_error()))
|
||||
} else {
|
||||
result::ok(fd_writer(fd, true))
|
||||
}
|
||||
|
@ -176,7 +176,7 @@ pure fn find<A: copy,IA:BaseIter<A>>(self: IA,
|
||||
#[test]
|
||||
fn test_enumerate() {
|
||||
enumerate(["0", "1", "2"]) {|i,j|
|
||||
assert fmt!{"%u",i} == j;
|
||||
assert fmt!("%u",i) == j;
|
||||
}
|
||||
}
|
||||
|
||||
@ -254,7 +254,7 @@ fn test_flat_map_with_list() {
|
||||
let a = bind vec::iter(~[0, 1, 2, 3], _);
|
||||
let b = bind flat_map(a, repeat, _);
|
||||
let c = to_vec(b);
|
||||
debug!{"c = %?", c};
|
||||
debug!("c = %?", c);
|
||||
assert c == ~[1, 2, 2, 3, 3, 3];
|
||||
}
|
||||
|
||||
@ -265,7 +265,7 @@ fn test_repeat() {
|
||||
c += ~[(i * i)];
|
||||
i += 1u;
|
||||
};
|
||||
debug!{"c = %?", c};
|
||||
debug!("c = %?", c);
|
||||
assert c == ~[0u, 1u, 4u, 9u, 16u];
|
||||
}
|
||||
|
||||
|
@ -1036,7 +1036,7 @@ mod tests {
|
||||
assert (libc::fclose(ostream) == (0u as c_int));
|
||||
let rs = os::copy_file(in, out);
|
||||
if (!os::path_exists(in)) {
|
||||
fail (fmt!{"%s doesn't exist", in});
|
||||
fail (fmt!("%s doesn't exist", in));
|
||||
}
|
||||
assert(rs);
|
||||
let rslt = run::run_program(~"diff", ~[in, out]);
|
||||
|
@ -103,9 +103,9 @@ export rt;
|
||||
#[doc(hidden)]
|
||||
const SPIN_COUNT: uint = 0;
|
||||
|
||||
macro_rules! move_it {
|
||||
macro_rules! move_it (
|
||||
{ $x:expr } => { unsafe { let y <- *ptr::addr_of($x); y } }
|
||||
}
|
||||
)
|
||||
|
||||
#[doc(hidden)]
|
||||
enum state {
|
||||
@ -335,15 +335,15 @@ struct buffer_resource<T: send> {
|
||||
let buffer: ~buffer<T>;
|
||||
new(+b: ~buffer<T>) {
|
||||
//let p = ptr::addr_of(*b);
|
||||
//error!{"take %?", p};
|
||||
//error!("take %?", p);
|
||||
atomic_add_acq(&mut b.header.ref_count, 1);
|
||||
self.buffer = b;
|
||||
}
|
||||
|
||||
drop unsafe {
|
||||
let b = move_it!{self.buffer};
|
||||
let b = move_it!(self.buffer);
|
||||
//let p = ptr::addr_of(*b);
|
||||
//error!{"drop %?", p};
|
||||
//error!("drop %?", p);
|
||||
let old_count = atomic_sub_rel(&mut b.header.ref_count, 1);
|
||||
//let old_count = atomic_xchng_rel(b.header.ref_count, 0);
|
||||
if old_count == 1 {
|
||||
@ -377,7 +377,7 @@ fn send<T: send, Tbuffer: send>(+p: send_packet_buffered<T, Tbuffer>,
|
||||
}
|
||||
full => fail ~"duplicate send",
|
||||
blocked => {
|
||||
debug!{"waking up task for %?", p_};
|
||||
debug!("waking up task for %?", p_);
|
||||
let old_task = swap_task(&mut p.header.blocked_task, ptr::null());
|
||||
if !old_task.is_null() {
|
||||
rustrt::task_signal_event(
|
||||
@ -461,7 +461,7 @@ fn try_recv<T: send, Tbuffer: send>(+p: recv_packet_buffered<T, Tbuffer>)
|
||||
blocked);
|
||||
match old_state {
|
||||
empty => {
|
||||
debug!{"no data available on %?, going to sleep.", p_};
|
||||
debug!("no data available on %?, going to sleep.", p_);
|
||||
if count == 0 {
|
||||
wait_event(this);
|
||||
}
|
||||
@ -474,7 +474,7 @@ fn try_recv<T: send, Tbuffer: send>(+p: recv_packet_buffered<T, Tbuffer>)
|
||||
// sometimes blocking the thing we are waiting on.
|
||||
task::yield();
|
||||
}
|
||||
debug!{"woke up, p.state = %?", copy p.header.state};
|
||||
debug!("woke up, p.state = %?", copy p.header.state);
|
||||
}
|
||||
blocked => if first {
|
||||
fail ~"blocking on already blocked packet"
|
||||
@ -603,7 +603,7 @@ fn wait_many<T: selectable>(pkts: &[T]) -> uint {
|
||||
}
|
||||
|
||||
while !data_avail {
|
||||
debug!{"sleeping on %? packets", pkts.len()};
|
||||
debug!("sleeping on %? packets", pkts.len());
|
||||
let event = wait_event(this) as *packet_header;
|
||||
let pos = vec::position(pkts, |p| p.header() == event);
|
||||
|
||||
@ -612,11 +612,11 @@ fn wait_many<T: selectable>(pkts: &[T]) -> uint {
|
||||
ready_packet = i;
|
||||
data_avail = true;
|
||||
}
|
||||
none => debug!{"ignoring spurious event, %?", event}
|
||||
none => debug!("ignoring spurious event, %?", event)
|
||||
}
|
||||
}
|
||||
|
||||
debug!{"%?", pkts[ready_packet]};
|
||||
debug!("%?", pkts[ready_packet]);
|
||||
|
||||
for pkts.each |p| { unsafe{ (*p.header()).unblock()} }
|
||||
|
||||
@ -725,7 +725,7 @@ struct send_packet_buffered<T: send, Tbuffer: send> {
|
||||
let mut p: option<*packet<T>>;
|
||||
let mut buffer: option<buffer_resource<Tbuffer>>;
|
||||
new(p: *packet<T>) {
|
||||
//debug!{"take send %?", p};
|
||||
//debug!("take send %?", p);
|
||||
self.p = some(p);
|
||||
unsafe {
|
||||
self.buffer = some(
|
||||
@ -735,17 +735,17 @@ struct send_packet_buffered<T: send, Tbuffer: send> {
|
||||
}
|
||||
drop {
|
||||
//if self.p != none {
|
||||
// debug!{"drop send %?", option::get(self.p)};
|
||||
// debug!("drop send %?", option::get(self.p));
|
||||
//}
|
||||
if self.p != none {
|
||||
let mut p = none;
|
||||
p <-> self.p;
|
||||
sender_terminate(option::unwrap(p))
|
||||
}
|
||||
//unsafe { error!{"send_drop: %?",
|
||||
//unsafe { error!("send_drop: %?",
|
||||
// if self.buffer == none {
|
||||
// "none"
|
||||
// } else { "some" }}; }
|
||||
// } else { "some" }); }
|
||||
}
|
||||
fn unwrap() -> *packet<T> {
|
||||
let mut p = none;
|
||||
@ -766,7 +766,7 @@ struct send_packet_buffered<T: send, Tbuffer: send> {
|
||||
}
|
||||
|
||||
fn reuse_buffer() -> buffer_resource<Tbuffer> {
|
||||
//error!{"send reuse_buffer"};
|
||||
//error!("send reuse_buffer");
|
||||
let mut tmp = none;
|
||||
tmp <-> self.buffer;
|
||||
option::unwrap(tmp)
|
||||
@ -786,7 +786,7 @@ struct recv_packet_buffered<T: send, Tbuffer: send> : selectable {
|
||||
let mut p: option<*packet<T>>;
|
||||
let mut buffer: option<buffer_resource<Tbuffer>>;
|
||||
new(p: *packet<T>) {
|
||||
//debug!{"take recv %?", p};
|
||||
//debug!("take recv %?", p);
|
||||
self.p = some(p);
|
||||
unsafe {
|
||||
self.buffer = some(
|
||||
@ -796,17 +796,17 @@ struct recv_packet_buffered<T: send, Tbuffer: send> : selectable {
|
||||
}
|
||||
drop {
|
||||
//if self.p != none {
|
||||
// debug!{"drop recv %?", option::get(self.p)};
|
||||
// debug!("drop recv %?", option::get(self.p));
|
||||
//}
|
||||
if self.p != none {
|
||||
let mut p = none;
|
||||
p <-> self.p;
|
||||
receiver_terminate(option::unwrap(p))
|
||||
}
|
||||
//unsafe { error!{"recv_drop: %?",
|
||||
//unsafe { error!("recv_drop: %?",
|
||||
// if self.buffer == none {
|
||||
// "none"
|
||||
// } else { "some" }}; }
|
||||
// } else { "some" }); }
|
||||
}
|
||||
fn unwrap() -> *packet<T> {
|
||||
let mut p = none;
|
||||
@ -827,7 +827,7 @@ struct recv_packet_buffered<T: send, Tbuffer: send> : selectable {
|
||||
}
|
||||
|
||||
fn reuse_buffer() -> buffer_resource<Tbuffer> {
|
||||
//error!{"recv reuse_buffer"};
|
||||
//error!("recv reuse_buffer");
|
||||
let mut tmp = none;
|
||||
tmp <-> self.buffer;
|
||||
option::unwrap(tmp)
|
||||
@ -991,8 +991,8 @@ impl<T: send> port<T>: recv<T> {
|
||||
endp <-> self.endp;
|
||||
match move pipes::try_recv(unwrap(endp)) {
|
||||
some(streamp::data(x, endp)) => {
|
||||
self.endp = some(move_it!{endp});
|
||||
some(move_it!{x})
|
||||
self.endp = some(move_it!(endp));
|
||||
some(move_it!(x))
|
||||
}
|
||||
none => none
|
||||
}
|
||||
|
@ -69,7 +69,7 @@ unsafe fn chan_from_global_ptr<T: send>(
|
||||
log(debug,~"BEFORE COMPARE AND SWAP");
|
||||
let swapped = compare_and_swap(
|
||||
global, 0u, unsafe::reinterpret_cast(ch));
|
||||
log(debug,fmt!{"AFTER .. swapped? %?", swapped});
|
||||
log(debug,fmt!("AFTER .. swapped? %?", swapped));
|
||||
|
||||
if swapped {
|
||||
// Success!
|
||||
|
@ -328,7 +328,7 @@ mod tests {
|
||||
let ra = rand::seeded_rng(seed);
|
||||
// Regression test that isaac is actually using the above vector
|
||||
let r = ra.next();
|
||||
error!{"%?", r};
|
||||
error!("%?", r);
|
||||
assert r == 890007737u32 // on x86_64
|
||||
|| r == 2935188040u32; // on x86
|
||||
}
|
||||
|
@ -21,7 +21,7 @@ pure fn get<T: copy, U>(res: result<T, U>) -> T {
|
||||
match res {
|
||||
ok(t) => t,
|
||||
err(the_err) => unchecked {
|
||||
fail fmt!{"get called on error result: %?", the_err}
|
||||
fail fmt!("get called on error result: %?", the_err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -106,7 +106,7 @@ fn with_envp<T>(env: &option<~[(~str,~str)]>,
|
||||
|
||||
for vec::each(es) |e| {
|
||||
let (k,v) = e;
|
||||
let t = @(fmt!{"%s=%s", k, v});
|
||||
let t = @(fmt!("%s=%s", k, v));
|
||||
vec::push(tmps, t);
|
||||
vec::push_all(ptrs, str::as_c_str(*t, |b| ~[b]));
|
||||
}
|
||||
@ -131,7 +131,7 @@ fn with_envp<T>(env: &option<~[(~str,~str)]>,
|
||||
let mut blk : ~[u8] = ~[];
|
||||
for vec::each(es) |e| {
|
||||
let (k,v) = e;
|
||||
let t = fmt!{"%s=%s", k, v};
|
||||
let t = fmt!("%s=%s", k, v);
|
||||
let mut v : ~[u8] = ::unsafe::reinterpret_cast(t);
|
||||
blk += v;
|
||||
::unsafe::forget(v);
|
||||
@ -332,7 +332,7 @@ fn program_output(prog: &str, args: &[~str]) ->
|
||||
fn writeclose(fd: c_int, s: &str) {
|
||||
import io::WriterUtil;
|
||||
|
||||
error!{"writeclose %d, %s", fd as int, s};
|
||||
error!("writeclose %d, %s", fd as int, s);
|
||||
let writer = io::fd_writer(fd, false);
|
||||
writer.write_str(s);
|
||||
|
||||
|
@ -76,7 +76,7 @@ mod linear {
|
||||
len_buckets: uint) -> uint {
|
||||
let n = (idx + 1) % len_buckets;
|
||||
unsafe{ // argh. log not considered pure.
|
||||
debug!{"next_bucket(%?, %?) = %?", idx, len_buckets, n};
|
||||
debug!("next_bucket(%?, %?) = %?", idx, len_buckets, n);
|
||||
}
|
||||
return n;
|
||||
}
|
||||
@ -154,15 +154,15 @@ mod linear {
|
||||
match self.bucket_for_key_with_hash(self.buckets, hash, &k) {
|
||||
TableFull => {fail ~"Internal logic error";}
|
||||
FoundHole(idx) => {
|
||||
debug!{"insert fresh (%?->%?) at idx %?, hash %?",
|
||||
k, v, idx, hash};
|
||||
debug!("insert fresh (%?->%?) at idx %?, hash %?",
|
||||
k, v, idx, hash);
|
||||
self.buckets[idx] = some({hash: hash, key: k, value: v});
|
||||
self.size += 1;
|
||||
return true;
|
||||
}
|
||||
FoundEntry(idx) => {
|
||||
debug!{"insert overwrite (%?->%?) at idx %?, hash %?",
|
||||
k, v, idx, hash};
|
||||
debug!("insert overwrite (%?->%?) at idx %?, hash %?",
|
||||
k, v, idx, hash);
|
||||
self.buckets[idx] = some({hash: hash, key: k, value: v});
|
||||
return false;
|
||||
}
|
||||
@ -308,7 +308,7 @@ mod linear {
|
||||
fn get(&const self, k: &K) -> V {
|
||||
let value = self.find(k);
|
||||
if value.is_none() {
|
||||
fail fmt!{"No entry found for key: %?", k};
|
||||
fail fmt!("No entry found for key: %?", k);
|
||||
}
|
||||
option::unwrap(value)
|
||||
}
|
||||
|
@ -16,7 +16,7 @@ struct Frame {
|
||||
|
||||
fn walk_stack(visit: fn(Frame) -> bool) {
|
||||
|
||||
debug!{"beginning stack walk"};
|
||||
debug!("beginning stack walk");
|
||||
|
||||
do frame_address |frame_pointer| {
|
||||
let mut frame_address: *Word = unsafe {
|
||||
@ -25,14 +25,14 @@ fn walk_stack(visit: fn(Frame) -> bool) {
|
||||
loop {
|
||||
let fr = Frame(frame_address);
|
||||
|
||||
debug!{"frame: %x", unsafe { reinterpret_cast(fr.fp) }};
|
||||
debug!("frame: %x", unsafe { reinterpret_cast(fr.fp) });
|
||||
visit(fr);
|
||||
|
||||
unsafe {
|
||||
let next_fp: **Word = reinterpret_cast(frame_address);
|
||||
frame_address = *next_fp;
|
||||
if *frame_address == 0u {
|
||||
debug!{"encountered task_start_wrapper. ending walk"};
|
||||
debug!("encountered task_start_wrapper. ending walk");
|
||||
// This is the task_start_wrapper_frame. There is
|
||||
// no stack beneath it and it is a foreign frame.
|
||||
break;
|
||||
|
@ -1464,7 +1464,7 @@ pure fn is_char_boundary(s: &str, index: uint) -> bool {
|
||||
* let i = 0u;
|
||||
* while i < str::len(s) {
|
||||
* let {ch, next} = str::char_range_at(s, i);
|
||||
* std::io::println(fmt!{"%u: %c",i,ch});
|
||||
* std::io::println(fmt!("%u: %c",i,ch));
|
||||
* i = next;
|
||||
* }
|
||||
* ~~~
|
||||
@ -2138,7 +2138,7 @@ mod tests {
|
||||
fn t(s: ~str, c: char, u: ~[~str]) {
|
||||
log(debug, ~"split_byte: " + s);
|
||||
let v = split_char(s, c);
|
||||
debug!{"split_byte to: %?", v};
|
||||
debug!("split_byte to: %?", v);
|
||||
assert vec::all2(v, u, |a,b| a == b);
|
||||
}
|
||||
t(~"abc.hello.there", '.', ~[~"abc", ~"hello", ~"there"]);
|
||||
@ -2167,8 +2167,8 @@ mod tests {
|
||||
fn t(s: ~str, c: char, n: uint, u: ~[~str]) {
|
||||
log(debug, ~"splitn_byte: " + s);
|
||||
let v = splitn_char(s, c, n);
|
||||
debug!{"split_byte to: %?", v};
|
||||
debug!{"comparing vs. %?", u};
|
||||
debug!("split_byte to: %?", v);
|
||||
debug!("comparing vs. %?", u);
|
||||
assert vec::all2(v, u, |a,b| a == b);
|
||||
}
|
||||
t(~"abc.hello.there", '.', 0u, ~[~"abc.hello.there"]);
|
||||
|
@ -1031,10 +1031,10 @@ fn kill_taskgroup(state: TaskGroupInner, me: *rust_task, is_main: bool) {
|
||||
|
||||
// FIXME (#2912): Work around core-vs-coretest function duplication. Can't use
|
||||
// a proper closure because the #[test]s won't understand. Have to fake it.
|
||||
macro_rules! taskgroup_key {
|
||||
macro_rules! taskgroup_key (
|
||||
// Use a "code pointer" value that will never be a real code pointer.
|
||||
{} => (unsafe::transmute((-2 as uint, 0u)))
|
||||
}
|
||||
)
|
||||
|
||||
fn gen_child_taskgroup(linked: bool, supervised: bool)
|
||||
-> (TaskGroupArc, AncestorList, bool) {
|
||||
|
@ -274,37 +274,37 @@ mod tests {
|
||||
fn reccyeq(a: reccy, b: reccy) -> bool {
|
||||
return a.x == b.x && a.y == b.y && taggyeq(a.t, b.t);
|
||||
}
|
||||
debug!{"*** test boxes"};
|
||||
debug!("*** test boxes");
|
||||
test_boxes(@5, @72, @64, @175);
|
||||
debug!{"*** end test boxes"};
|
||||
debug!{"test parameterized: int"};
|
||||
debug!("*** end test boxes");
|
||||
debug!("test parameterized: int");
|
||||
let eq1: eqfn<int> = inteq;
|
||||
test_parameterized::<int>(eq1, 5, 72, 64, 175);
|
||||
debug!{"*** test parameterized: @int"};
|
||||
debug!("*** test parameterized: @int");
|
||||
let eq2: eqfn<@int> = intboxeq;
|
||||
test_parameterized::<@int>(eq2, @5, @72, @64, @175);
|
||||
debug!{"*** end test parameterized @int"};
|
||||
debug!{"test parameterized: taggy"};
|
||||
debug!("*** end test parameterized @int");
|
||||
debug!("test parameterized: taggy");
|
||||
let eq3: eqfn<taggy> = taggyeq;
|
||||
test_parameterized::<taggy>(eq3, one(1), two(1, 2), three(1, 2, 3),
|
||||
two(17, 42));
|
||||
|
||||
debug!{"*** test parameterized: taggypar<int>"};
|
||||
debug!("*** test parameterized: taggypar<int>");
|
||||
let eq4: eqfn<taggypar<int>> = |x,y| taggypareq::<int>(x, y);
|
||||
test_parameterized::<taggypar<int>>(eq4, onepar::<int>(1),
|
||||
twopar::<int>(1, 2),
|
||||
threepar::<int>(1, 2, 3),
|
||||
twopar::<int>(17, 42));
|
||||
debug!{"*** end test parameterized: taggypar::<int>"};
|
||||
debug!("*** end test parameterized: taggypar::<int>");
|
||||
|
||||
debug!{"*** test parameterized: reccy"};
|
||||
debug!("*** test parameterized: reccy");
|
||||
let reccy1: reccy = {x: 1, y: 2, t: one(1)};
|
||||
let reccy2: reccy = {x: 345, y: 2, t: two(1, 2)};
|
||||
let reccy3: reccy = {x: 1, y: 777, t: three(1, 2, 3)};
|
||||
let reccy4: reccy = {x: 19, y: 252, t: two(17, 42)};
|
||||
let eq5: eqfn<reccy> = reccyeq;
|
||||
test_parameterized::<reccy>(eq5, reccy1, reccy2, reccy3, reccy4);
|
||||
debug!{"*** end test parameterized: reccy"};
|
||||
debug!{"*** done"};
|
||||
debug!("*** end test parameterized: reccy");
|
||||
debug!("*** done");
|
||||
}
|
||||
}
|
||||
|
@ -70,7 +70,7 @@ fn vuint_at(data: &[u8], start: uint) -> {val: uint, next: uint} {
|
||||
(data[start + 2u] as uint) << 8u |
|
||||
(data[start + 3u] as uint),
|
||||
next: start + 4u};
|
||||
} else { error!{"vint too big"}; fail; }
|
||||
} else { error!("vint too big"); fail; }
|
||||
}
|
||||
|
||||
fn doc(data: @~[u8]) -> doc {
|
||||
@ -106,7 +106,7 @@ fn get_doc(d: doc, tg: uint) -> doc {
|
||||
match maybe_get_doc(d, tg) {
|
||||
some(d) => return d,
|
||||
none => {
|
||||
error!{"failed to find block with tag %u", tg};
|
||||
error!("failed to find block with tag %u", tg);
|
||||
fail;
|
||||
}
|
||||
}
|
||||
@ -186,7 +186,7 @@ fn write_sized_vuint(w: io::Writer, n: uint, size: uint) {
|
||||
n as u8]),
|
||||
4u => w.write(&[0x10u8 | ((n >> 24_u) as u8), (n >> 16_u) as u8,
|
||||
(n >> 8_u) as u8, n as u8]),
|
||||
_ => fail fmt!{"vint to write too big: %?", n}
|
||||
_ => fail fmt!("vint to write too big: %?", n)
|
||||
};
|
||||
}
|
||||
|
||||
@ -195,7 +195,7 @@ fn write_vuint(w: io::Writer, n: uint) {
|
||||
if n < 0x4000_u { write_sized_vuint(w, n, 2u); return; }
|
||||
if n < 0x200000_u { write_sized_vuint(w, n, 3u); return; }
|
||||
if n < 0x10000000_u { write_sized_vuint(w, n, 4u); return; }
|
||||
fail fmt!{"vint to write too big: %?", n};
|
||||
fail fmt!("vint to write too big: %?", n);
|
||||
}
|
||||
|
||||
fn writer(w: io::Writer) -> writer {
|
||||
@ -206,7 +206,7 @@ fn writer(w: io::Writer) -> writer {
|
||||
// FIXME (#2741): Provide a function to write the standard ebml header.
|
||||
impl writer {
|
||||
fn start_tag(tag_id: uint) {
|
||||
debug!{"Start tag %u", tag_id};
|
||||
debug!("Start tag %u", tag_id);
|
||||
|
||||
// Write the enum ID:
|
||||
write_vuint(self.writer, tag_id);
|
||||
@ -225,7 +225,7 @@ impl writer {
|
||||
write_sized_vuint(self.writer, size, 4u);
|
||||
self.writer.seek(cur_pos as int, io::SeekSet);
|
||||
|
||||
debug!{"End tag (size = %u)", size};
|
||||
debug!("End tag (size = %u)", size);
|
||||
}
|
||||
|
||||
fn wr_tag(tag_id: uint, blk: fn()) {
|
||||
@ -289,12 +289,12 @@ impl writer {
|
||||
}
|
||||
|
||||
fn wr_bytes(b: &[u8]) {
|
||||
debug!{"Write %u bytes", vec::len(b)};
|
||||
debug!("Write %u bytes", vec::len(b));
|
||||
self.writer.write(b);
|
||||
}
|
||||
|
||||
fn wr_str(s: ~str) {
|
||||
debug!{"Write str: %?", s};
|
||||
debug!("Write str: %?", s);
|
||||
self.writer.write(str::bytes(s));
|
||||
}
|
||||
}
|
||||
@ -417,29 +417,29 @@ priv impl ebml_deserializer {
|
||||
self.pos = r_doc.end;
|
||||
let str = ebml::doc_as_str(r_doc);
|
||||
if lbl != str {
|
||||
fail fmt!{"Expected label %s but found %s", lbl, str};
|
||||
fail fmt!("Expected label %s but found %s", lbl, str);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn next_doc(exp_tag: ebml_serializer_tag) -> ebml::doc {
|
||||
debug!{". next_doc(exp_tag=%?)", exp_tag};
|
||||
debug!(". next_doc(exp_tag=%?)", exp_tag);
|
||||
if self.pos >= self.parent.end {
|
||||
fail ~"no more documents in current node!";
|
||||
}
|
||||
let {tag: r_tag, doc: r_doc} =
|
||||
ebml::doc_at(self.parent.data, self.pos);
|
||||
debug!{"self.parent=%?-%? self.pos=%? r_tag=%? r_doc=%?-%?",
|
||||
debug!("self.parent=%?-%? self.pos=%? r_tag=%? r_doc=%?-%?",
|
||||
copy self.parent.start, copy self.parent.end,
|
||||
copy self.pos, r_tag, r_doc.start, r_doc.end};
|
||||
copy self.pos, r_tag, r_doc.start, r_doc.end);
|
||||
if r_tag != (exp_tag as uint) {
|
||||
fail fmt!{"expected EMBL doc with tag %? but found tag %?",
|
||||
exp_tag, r_tag};
|
||||
fail fmt!("expected EMBL doc with tag %? but found tag %?",
|
||||
exp_tag, r_tag);
|
||||
}
|
||||
if r_doc.end > self.parent.end {
|
||||
fail fmt!{"invalid EBML, child extends to 0x%x, parent to 0x%x",
|
||||
r_doc.end, self.parent.end};
|
||||
fail fmt!("invalid EBML, child extends to 0x%x, parent to 0x%x",
|
||||
r_doc.end, self.parent.end);
|
||||
}
|
||||
self.pos = r_doc.end;
|
||||
return r_doc;
|
||||
@ -458,7 +458,7 @@ priv impl ebml_deserializer {
|
||||
|
||||
fn _next_uint(exp_tag: ebml_serializer_tag) -> uint {
|
||||
let r = ebml::doc_as_u32(self.next_doc(exp_tag));
|
||||
debug!{"_next_uint exp_tag=%? result=%?", exp_tag, r};
|
||||
debug!("_next_uint exp_tag=%? result=%?", exp_tag, r);
|
||||
return r as uint;
|
||||
}
|
||||
}
|
||||
@ -473,7 +473,7 @@ impl ebml_deserializer: serialization::deserializer {
|
||||
fn read_uint() -> uint {
|
||||
let v = ebml::doc_as_u64(self.next_doc(es_uint));
|
||||
if v > (core::uint::max_value as u64) {
|
||||
fail fmt!{"uint %? too large for this architecture", v};
|
||||
fail fmt!("uint %? too large for this architecture", v);
|
||||
}
|
||||
return v as uint;
|
||||
}
|
||||
@ -485,7 +485,7 @@ impl ebml_deserializer: serialization::deserializer {
|
||||
fn read_int() -> int {
|
||||
let v = ebml::doc_as_u64(self.next_doc(es_int)) as i64;
|
||||
if v > (int::max_value as i64) || v < (int::min_value as i64) {
|
||||
fail fmt!{"int %? out of range for this architecture", v};
|
||||
fail fmt!("int %? out of range for this architecture", v);
|
||||
}
|
||||
return v as int;
|
||||
}
|
||||
@ -500,67 +500,67 @@ impl ebml_deserializer: serialization::deserializer {
|
||||
|
||||
// Compound types:
|
||||
fn read_enum<T>(name: ~str, f: fn() -> T) -> T {
|
||||
debug!{"read_enum(%s)", name};
|
||||
debug!("read_enum(%s)", name);
|
||||
self._check_label(name);
|
||||
self.push_doc(self.next_doc(es_enum), f)
|
||||
}
|
||||
|
||||
fn read_enum_variant<T>(f: fn(uint) -> T) -> T {
|
||||
debug!{"read_enum_variant()"};
|
||||
debug!("read_enum_variant()");
|
||||
let idx = self._next_uint(es_enum_vid);
|
||||
debug!{" idx=%u", idx};
|
||||
debug!(" idx=%u", idx);
|
||||
do self.push_doc(self.next_doc(es_enum_body)) {
|
||||
f(idx)
|
||||
}
|
||||
}
|
||||
|
||||
fn read_enum_variant_arg<T>(idx: uint, f: fn() -> T) -> T {
|
||||
debug!{"read_enum_variant_arg(idx=%u)", idx};
|
||||
debug!("read_enum_variant_arg(idx=%u)", idx);
|
||||
f()
|
||||
}
|
||||
|
||||
fn read_vec<T>(f: fn(uint) -> T) -> T {
|
||||
debug!{"read_vec()"};
|
||||
debug!("read_vec()");
|
||||
do self.push_doc(self.next_doc(es_vec)) {
|
||||
let len = self._next_uint(es_vec_len);
|
||||
debug!{" len=%u", len};
|
||||
debug!(" len=%u", len);
|
||||
f(len)
|
||||
}
|
||||
}
|
||||
|
||||
fn read_vec_elt<T>(idx: uint, f: fn() -> T) -> T {
|
||||
debug!{"read_vec_elt(idx=%u)", idx};
|
||||
debug!("read_vec_elt(idx=%u)", idx);
|
||||
self.push_doc(self.next_doc(es_vec_elt), f)
|
||||
}
|
||||
|
||||
fn read_box<T>(f: fn() -> T) -> T {
|
||||
debug!{"read_box()"};
|
||||
debug!("read_box()");
|
||||
f()
|
||||
}
|
||||
|
||||
fn read_uniq<T>(f: fn() -> T) -> T {
|
||||
debug!{"read_uniq()"};
|
||||
debug!("read_uniq()");
|
||||
f()
|
||||
}
|
||||
|
||||
fn read_rec<T>(f: fn() -> T) -> T {
|
||||
debug!{"read_rec()"};
|
||||
debug!("read_rec()");
|
||||
f()
|
||||
}
|
||||
|
||||
fn read_rec_field<T>(f_name: ~str, f_idx: uint, f: fn() -> T) -> T {
|
||||
debug!{"read_rec_field(%s, idx=%u)", f_name, f_idx};
|
||||
debug!("read_rec_field(%s, idx=%u)", f_name, f_idx);
|
||||
self._check_label(f_name);
|
||||
f()
|
||||
}
|
||||
|
||||
fn read_tup<T>(sz: uint, f: fn() -> T) -> T {
|
||||
debug!{"read_tup(sz=%u)", sz};
|
||||
debug!("read_tup(sz=%u)", sz);
|
||||
f()
|
||||
}
|
||||
|
||||
fn read_tup_elt<T>(idx: uint, f: fn() -> T) -> T {
|
||||
debug!{"read_tup_elt(idx=%u)", idx};
|
||||
debug!("read_tup_elt(idx=%u)", idx);
|
||||
f()
|
||||
}
|
||||
}
|
||||
@ -613,14 +613,14 @@ fn test_option_int() {
|
||||
}
|
||||
|
||||
fn test_v(v: option<int>) {
|
||||
debug!{"v == %?", v};
|
||||
debug!("v == %?", v);
|
||||
let mbuf = io::mem_buffer();
|
||||
let ebml_w = ebml::writer(io::mem_buffer_writer(mbuf));
|
||||
serialize_0(ebml_w, v);
|
||||
let ebml_doc = ebml::doc(@io::mem_buffer_buf(mbuf));
|
||||
let deser = ebml_deserializer(ebml_doc);
|
||||
let v1 = deserialize_0(deser);
|
||||
debug!{"v1 == %?", v1};
|
||||
debug!("v1 == %?", v1);
|
||||
assert v == v1;
|
||||
}
|
||||
|
||||
|
@ -314,7 +314,7 @@ fn opt_vals(m: matches, nm: ~str) -> ~[optval] {
|
||||
return match find_opt(m.opts, mkname(nm)) {
|
||||
some(id) => m.vals[id],
|
||||
none => {
|
||||
error!{"No option '%s' defined", nm};
|
||||
error!("No option '%s' defined", nm);
|
||||
fail
|
||||
}
|
||||
};
|
||||
|
@ -628,7 +628,7 @@ impl json: to_str::ToStr {
|
||||
|
||||
impl error: to_str::ToStr {
|
||||
fn to_str() -> ~str {
|
||||
fmt!{"%u:%u: %s", self.line, self.col, *self.msg}
|
||||
fmt!("%u:%u: %s", self.line, self.col, *self.msg)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -137,16 +137,16 @@ mod chained {
|
||||
loop {
|
||||
match copy e0.next {
|
||||
none => {
|
||||
debug!{"search_tbl: absent, comp %u, hash %u, idx %u",
|
||||
comp, h, idx};
|
||||
debug!("search_tbl: absent, comp %u, hash %u, idx %u",
|
||||
comp, h, idx);
|
||||
return not_found;
|
||||
}
|
||||
some(e1) => {
|
||||
comp += 1u;
|
||||
if e1.hash == h && self.eqer(&e1.key, k) {
|
||||
debug!{"search_tbl: present, comp %u, \
|
||||
debug!("search_tbl: present, comp %u, \
|
||||
hash %u, idx %u",
|
||||
comp, h, idx};
|
||||
comp, h, idx);
|
||||
return found_after(e0, e1);
|
||||
} else {
|
||||
e0 = e1;
|
||||
@ -160,14 +160,14 @@ mod chained {
|
||||
let idx = h % vec::len(self.chains);
|
||||
match copy self.chains[idx] {
|
||||
none => {
|
||||
debug!{"search_tbl: none, comp %u, hash %u, idx %u",
|
||||
0u, h, idx};
|
||||
debug!("search_tbl: none, comp %u, hash %u, idx %u",
|
||||
0u, h, idx);
|
||||
return not_found;
|
||||
}
|
||||
some(e) => {
|
||||
if e.hash == h && self.eqer(&e.key, k) {
|
||||
debug!{"search_tbl: present, comp %u, hash %u, idx %u",
|
||||
1u, h, idx};
|
||||
debug!("search_tbl: present, comp %u, hash %u, idx %u",
|
||||
1u, h, idx);
|
||||
return found_first(idx, e);
|
||||
} else {
|
||||
return self.search_rem(k, h, idx, e);
|
||||
@ -277,7 +277,7 @@ mod chained {
|
||||
fn get(+k: K) -> V {
|
||||
let opt_v = self.find(k);
|
||||
if opt_v.is_none() {
|
||||
fail fmt!{"Key not found in table: %?", k};
|
||||
fail fmt!("Key not found in table: %?", k);
|
||||
}
|
||||
option::unwrap(opt_v)
|
||||
}
|
||||
@ -563,14 +563,14 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_simple() {
|
||||
debug!{"*** starting test_simple"};
|
||||
debug!("*** starting test_simple");
|
||||
pure fn eq_uint(x: &uint, y: &uint) -> bool { *x == *y }
|
||||
pure fn uint_id(x: &uint) -> uint { *x }
|
||||
let hasher_uint: map::hashfn<uint> = uint_id;
|
||||
let eqer_uint: map::eqfn<uint> = eq_uint;
|
||||
let hasher_str: map::hashfn<~str> = str::hash;
|
||||
let eqer_str: map::eqfn<~str> = str::eq;
|
||||
debug!{"uint -> uint"};
|
||||
debug!("uint -> uint");
|
||||
let hm_uu: map::hashmap<uint, uint> =
|
||||
map::hashmap::<uint, uint>(copy hasher_uint, copy eqer_uint);
|
||||
assert (hm_uu.insert(10u, 12u));
|
||||
@ -586,7 +586,7 @@ mod tests {
|
||||
let ten: ~str = ~"ten";
|
||||
let eleven: ~str = ~"eleven";
|
||||
let twelve: ~str = ~"twelve";
|
||||
debug!{"str -> uint"};
|
||||
debug!("str -> uint");
|
||||
let hm_su: map::hashmap<~str, uint> =
|
||||
map::hashmap::<~str, uint>(copy hasher_str, copy eqer_str);
|
||||
assert (hm_su.insert(~"ten", 12u));
|
||||
@ -600,7 +600,7 @@ mod tests {
|
||||
assert (hm_su.get(~"twelve") == 14u);
|
||||
assert (!hm_su.insert(~"twelve", 12u));
|
||||
assert (hm_su.get(~"twelve") == 12u);
|
||||
debug!{"uint -> str"};
|
||||
debug!("uint -> str");
|
||||
let hm_us: map::hashmap<uint, ~str> =
|
||||
map::hashmap::<uint, ~str>(copy hasher_uint, copy eqer_uint);
|
||||
assert (hm_us.insert(10u, ~"twelve"));
|
||||
@ -613,7 +613,7 @@ mod tests {
|
||||
assert hm_us.get(12u) == ~"fourteen";
|
||||
assert (!hm_us.insert(12u, ~"twelve"));
|
||||
assert hm_us.get(12u) == ~"twelve";
|
||||
debug!{"str -> str"};
|
||||
debug!("str -> str");
|
||||
let hm_ss: map::hashmap<~str, ~str> =
|
||||
map::hashmap::<~str, ~str>(copy hasher_str, copy eqer_str);
|
||||
assert (hm_ss.insert(ten, ~"twelve"));
|
||||
@ -626,7 +626,7 @@ mod tests {
|
||||
assert hm_ss.get(~"twelve") == ~"fourteen";
|
||||
assert (!hm_ss.insert(~"twelve", ~"twelve"));
|
||||
assert hm_ss.get(~"twelve") == ~"twelve";
|
||||
debug!{"*** finished test_simple"};
|
||||
debug!("*** finished test_simple");
|
||||
}
|
||||
|
||||
|
||||
@ -635,11 +635,11 @@ mod tests {
|
||||
*/
|
||||
#[test]
|
||||
fn test_growth() {
|
||||
debug!{"*** starting test_growth"};
|
||||
debug!("*** starting test_growth");
|
||||
let num_to_insert: uint = 64u;
|
||||
pure fn eq_uint(x: &uint, y: &uint) -> bool { *x == *y }
|
||||
pure fn uint_id(x: &uint) -> uint { *x }
|
||||
debug!{"uint -> uint"};
|
||||
debug!("uint -> uint");
|
||||
let hasher_uint: map::hashfn<uint> = uint_id;
|
||||
let eqer_uint: map::eqfn<uint> = eq_uint;
|
||||
let hm_uu: map::hashmap<uint, uint> =
|
||||
@ -647,26 +647,26 @@ mod tests {
|
||||
let mut i: uint = 0u;
|
||||
while i < num_to_insert {
|
||||
assert (hm_uu.insert(i, i * i));
|
||||
debug!{"inserting %u -> %u", i, i*i};
|
||||
debug!("inserting %u -> %u", i, i*i);
|
||||
i += 1u;
|
||||
}
|
||||
debug!{"-----"};
|
||||
debug!("-----");
|
||||
i = 0u;
|
||||
while i < num_to_insert {
|
||||
debug!{"get(%u) = %u", i, hm_uu.get(i)};
|
||||
debug!("get(%u) = %u", i, hm_uu.get(i));
|
||||
assert (hm_uu.get(i) == i * i);
|
||||
i += 1u;
|
||||
}
|
||||
assert (hm_uu.insert(num_to_insert, 17u));
|
||||
assert (hm_uu.get(num_to_insert) == 17u);
|
||||
debug!{"-----"};
|
||||
debug!("-----");
|
||||
i = 0u;
|
||||
while i < num_to_insert {
|
||||
debug!{"get(%u) = %u", i, hm_uu.get(i)};
|
||||
debug!("get(%u) = %u", i, hm_uu.get(i));
|
||||
assert (hm_uu.get(i) == i * i);
|
||||
i += 1u;
|
||||
}
|
||||
debug!{"str -> str"};
|
||||
debug!("str -> str");
|
||||
let hasher_str: map::hashfn<~str> = str::hash;
|
||||
let eqer_str: map::eqfn<~str> = str::eq;
|
||||
let hm_ss: map::hashmap<~str, ~str> =
|
||||
@ -674,17 +674,17 @@ mod tests {
|
||||
i = 0u;
|
||||
while i < num_to_insert {
|
||||
assert hm_ss.insert(uint::to_str(i, 2u), uint::to_str(i * i, 2u));
|
||||
debug!{"inserting \"%s\" -> \"%s\"",
|
||||
debug!("inserting \"%s\" -> \"%s\"",
|
||||
uint::to_str(i, 2u),
|
||||
uint::to_str(i*i, 2u)};
|
||||
uint::to_str(i*i, 2u));
|
||||
i += 1u;
|
||||
}
|
||||
debug!{"-----"};
|
||||
debug!("-----");
|
||||
i = 0u;
|
||||
while i < num_to_insert {
|
||||
debug!{"get(\"%s\") = \"%s\"",
|
||||
debug!("get(\"%s\") = \"%s\"",
|
||||
uint::to_str(i, 2u),
|
||||
hm_ss.get(uint::to_str(i, 2u))};
|
||||
hm_ss.get(uint::to_str(i, 2u)));
|
||||
assert hm_ss.get(uint::to_str(i, 2u)) == uint::to_str(i * i, 2u);
|
||||
i += 1u;
|
||||
}
|
||||
@ -692,21 +692,21 @@ mod tests {
|
||||
uint::to_str(17u, 2u)));
|
||||
assert hm_ss.get(uint::to_str(num_to_insert, 2u)) ==
|
||||
uint::to_str(17u, 2u);
|
||||
debug!{"-----"};
|
||||
debug!("-----");
|
||||
i = 0u;
|
||||
while i < num_to_insert {
|
||||
debug!{"get(\"%s\") = \"%s\"",
|
||||
debug!("get(\"%s\") = \"%s\"",
|
||||
uint::to_str(i, 2u),
|
||||
hm_ss.get(uint::to_str(i, 2u))};
|
||||
hm_ss.get(uint::to_str(i, 2u)));
|
||||
assert hm_ss.get(uint::to_str(i, 2u)) == uint::to_str(i * i, 2u);
|
||||
i += 1u;
|
||||
}
|
||||
debug!{"*** finished test_growth"};
|
||||
debug!("*** finished test_growth");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_removal() {
|
||||
debug!{"*** starting test_removal"};
|
||||
debug!("*** starting test_removal");
|
||||
let num_to_insert: uint = 64u;
|
||||
fn eq(x: &uint, y: &uint) -> bool { *x == *y }
|
||||
fn hash(u: &uint) -> uint {
|
||||
@ -724,12 +724,12 @@ mod tests {
|
||||
let mut i: uint = 0u;
|
||||
while i < num_to_insert {
|
||||
assert (hm.insert(i, i * i));
|
||||
debug!{"inserting %u -> %u", i, i*i};
|
||||
debug!("inserting %u -> %u", i, i*i);
|
||||
i += 1u;
|
||||
}
|
||||
assert (hm.size() == num_to_insert);
|
||||
debug!{"-----"};
|
||||
debug!{"removing evens"};
|
||||
debug!("-----");
|
||||
debug!("removing evens");
|
||||
i = 0u;
|
||||
while i < num_to_insert {
|
||||
let v = hm.remove(i);
|
||||
@ -737,44 +737,44 @@ mod tests {
|
||||
i += 2u;
|
||||
}
|
||||
assert (hm.size() == num_to_insert / 2u);
|
||||
debug!{"-----"};
|
||||
debug!("-----");
|
||||
i = 1u;
|
||||
while i < num_to_insert {
|
||||
debug!{"get(%u) = %u", i, hm.get(i)};
|
||||
debug!("get(%u) = %u", i, hm.get(i));
|
||||
assert (hm.get(i) == i * i);
|
||||
i += 2u;
|
||||
}
|
||||
debug!{"-----"};
|
||||
debug!("-----");
|
||||
i = 1u;
|
||||
while i < num_to_insert {
|
||||
debug!{"get(%u) = %u", i, hm.get(i)};
|
||||
debug!("get(%u) = %u", i, hm.get(i));
|
||||
assert (hm.get(i) == i * i);
|
||||
i += 2u;
|
||||
}
|
||||
debug!{"-----"};
|
||||
debug!("-----");
|
||||
i = 0u;
|
||||
while i < num_to_insert {
|
||||
assert (hm.insert(i, i * i));
|
||||
debug!{"inserting %u -> %u", i, i*i};
|
||||
debug!("inserting %u -> %u", i, i*i);
|
||||
i += 2u;
|
||||
}
|
||||
assert (hm.size() == num_to_insert);
|
||||
debug!{"-----"};
|
||||
debug!("-----");
|
||||
i = 0u;
|
||||
while i < num_to_insert {
|
||||
debug!{"get(%u) = %u", i, hm.get(i)};
|
||||
debug!("get(%u) = %u", i, hm.get(i));
|
||||
assert (hm.get(i) == i * i);
|
||||
i += 1u;
|
||||
}
|
||||
debug!{"-----"};
|
||||
debug!("-----");
|
||||
assert (hm.size() == num_to_insert);
|
||||
i = 0u;
|
||||
while i < num_to_insert {
|
||||
debug!{"get(%u) = %u", i, hm.get(i)};
|
||||
debug!("get(%u) = %u", i, hm.get(i));
|
||||
assert (hm.get(i) == i * i);
|
||||
i += 1u;
|
||||
}
|
||||
debug!{"*** finished test_removal"};
|
||||
debug!("*** finished test_removal");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -88,7 +88,7 @@ fn get_addr(++node: ~str, iotask: iotask)
|
||||
-> result::result<~[ip_addr], ip_get_addr_err> unsafe {
|
||||
do core::comm::listen |output_ch| {
|
||||
do str::as_buf(node) |node_ptr, len| {
|
||||
log(debug, fmt!{"slice len %?", len});
|
||||
log(debug, fmt!("slice len %?", len));
|
||||
let handle = create_uv_getaddrinfo_t();
|
||||
let handle_ptr = ptr::addr_of(handle);
|
||||
let handle_data: get_addr_data = {
|
||||
@ -161,10 +161,10 @@ mod v4 {
|
||||
}
|
||||
});
|
||||
if vec::len(parts) != 4u {
|
||||
result::err(fmt!{"'%s' doesn't have 4 parts", ip})
|
||||
result::err(fmt!("'%s' doesn't have 4 parts", ip))
|
||||
}
|
||||
else if vec::contains(parts, 256u) {
|
||||
result::err(fmt!{"invalid octal in addr '%s'", ip})
|
||||
result::err(fmt!("invalid octal in addr '%s'", ip))
|
||||
}
|
||||
else {
|
||||
result::ok({a: parts[0] as u8, b: parts[1] as u8,
|
||||
@ -185,8 +185,8 @@ mod v4 {
|
||||
|
||||
let new_addr = uv_ip4_addr(ip, 22);
|
||||
let reformatted_name = uv_ip4_name(&new_addr);
|
||||
log(debug, fmt!{"try_parse_addr: input ip: %s reparsed ip: %s",
|
||||
ip, reformatted_name});
|
||||
log(debug, fmt!("try_parse_addr: input ip: %s reparsed ip: %s",
|
||||
ip, reformatted_name));
|
||||
let ref_ip_rep_result = parse_to_ipv4_rep(reformatted_name);
|
||||
if result::is_err(ref_ip_rep_result) {
|
||||
let err_str = result::get_err(ref_ip_rep_result);
|
||||
@ -230,13 +230,13 @@ mod v6 {
|
||||
// need to figure out how to establish a parse failure..
|
||||
let new_addr = uv_ip6_addr(ip, 22);
|
||||
let reparsed_name = uv_ip6_name(&new_addr);
|
||||
log(debug, fmt!{"v6::try_parse_addr ip: '%s' reparsed '%s'",
|
||||
ip, reparsed_name});
|
||||
log(debug, fmt!("v6::try_parse_addr ip: '%s' reparsed '%s'",
|
||||
ip, reparsed_name));
|
||||
// '::' appears to be uv_ip6_name() returns for bogus
|
||||
// parses..
|
||||
if ip != ~"::" && reparsed_name == ~"::" {
|
||||
result::err({err_msg:fmt!{"failed to parse '%s'",
|
||||
ip}})
|
||||
result::err({err_msg:fmt!("failed to parse '%s'",
|
||||
ip)})
|
||||
}
|
||||
else {
|
||||
result::ok(ipv6(new_addr))
|
||||
@ -257,7 +257,7 @@ extern fn get_addr_cb(handle: *uv_getaddrinfo_t, status: libc::c_int,
|
||||
if status == 0i32 {
|
||||
if res != (ptr::null::<addrinfo>()) {
|
||||
let mut out_vec = ~[];
|
||||
log(debug, fmt!{"initial addrinfo: %?", res});
|
||||
log(debug, fmt!("initial addrinfo: %?", res));
|
||||
let mut curr_addr = res;
|
||||
loop {
|
||||
let new_ip_addr = if ll::is_ipv4_addrinfo(curr_addr) {
|
||||
@ -284,11 +284,11 @@ extern fn get_addr_cb(handle: *uv_getaddrinfo_t, status: libc::c_int,
|
||||
}
|
||||
else {
|
||||
curr_addr = next_addr;
|
||||
log(debug, fmt!{"next_addr addrinfo: %?", curr_addr});
|
||||
log(debug, fmt!("next_addr addrinfo: %?", curr_addr));
|
||||
}
|
||||
}
|
||||
log(debug, fmt!{"successful process addrinfo result, len: %?",
|
||||
vec::len(out_vec)});
|
||||
log(debug, fmt!("successful process addrinfo result, len: %?",
|
||||
vec::len(out_vec)));
|
||||
(*handle_data).output_ch.send(result::ok(out_vec));
|
||||
}
|
||||
else {
|
||||
@ -320,19 +320,19 @@ mod test {
|
||||
fn test_ip_ipv6_parse_and_format_ip() {
|
||||
let localhost_str = ~"::1";
|
||||
let format_result = format_addr(v6::parse_addr(localhost_str));
|
||||
log(debug, fmt!{"results: expected: '%s' actual: '%s'",
|
||||
localhost_str, format_result});
|
||||
log(debug, fmt!("results: expected: '%s' actual: '%s'",
|
||||
localhost_str, format_result));
|
||||
assert format_result == localhost_str;
|
||||
}
|
||||
#[test]
|
||||
fn test_ip_ipv4_bad_parse() {
|
||||
match v4::try_parse_addr(~"b4df00d") {
|
||||
result::err(err_info) => {
|
||||
log(debug, fmt!{"got error as expected %?", err_info});
|
||||
log(debug, fmt!("got error as expected %?", err_info));
|
||||
assert true;
|
||||
}
|
||||
result::ok(addr) => {
|
||||
fail fmt!{"Expected failure, but got addr %?", addr};
|
||||
fail fmt!("Expected failure, but got addr %?", addr);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -341,11 +341,11 @@ mod test {
|
||||
fn test_ip_ipv6_bad_parse() {
|
||||
match v6::try_parse_addr(~"::,~2234k;") {
|
||||
result::err(err_info) => {
|
||||
log(debug, fmt!{"got error as expected %?", err_info});
|
||||
log(debug, fmt!("got error as expected %?", err_info));
|
||||
assert true;
|
||||
}
|
||||
result::ok(addr) => {
|
||||
fail fmt!{"Expected failure, but got addr %?", addr};
|
||||
fail fmt!("Expected failure, but got addr %?", addr);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -361,15 +361,15 @@ mod test {
|
||||
// note really sure how to realiably test/assert
|
||||
// this.. mostly just wanting to see it work, atm.
|
||||
let results = result::unwrap(ga_result);
|
||||
log(debug, fmt!{"test_get_addr: Number of results for %s: %?",
|
||||
localhost_name, vec::len(results)});
|
||||
log(debug, fmt!("test_get_addr: Number of results for %s: %?",
|
||||
localhost_name, vec::len(results)));
|
||||
for vec::each(results) |r| {
|
||||
let ipv_prefix = match r {
|
||||
ipv4(_) => ~"IPv4",
|
||||
ipv6(_) => ~"IPv6"
|
||||
};
|
||||
log(debug, fmt!{"test_get_addr: result %s: '%s'",
|
||||
ipv_prefix, format_addr(r)});
|
||||
log(debug, fmt!("test_get_addr: result %s: '%s'",
|
||||
ipv_prefix, format_addr(r)));
|
||||
}
|
||||
// at least one result.. this is going to vary from system
|
||||
// to system, based on stuff like the contents of /etc/hosts
|
||||
|
@ -140,15 +140,15 @@ fn connect(-input_ip: ip::ip_addr, port: uint,
|
||||
iotask: iotask
|
||||
};
|
||||
let socket_data_ptr = ptr::addr_of(*socket_data);
|
||||
log(debug, fmt!{"tcp_connect result_ch %?", conn_data.result_ch});
|
||||
log(debug, fmt!("tcp_connect result_ch %?", conn_data.result_ch));
|
||||
// get an unsafe representation of our stream_handle_ptr that
|
||||
// we can send into the interact cb to be handled in libuv..
|
||||
log(debug, fmt!{"stream_handle_ptr outside interact %?",
|
||||
stream_handle_ptr});
|
||||
log(debug, fmt!("stream_handle_ptr outside interact %?",
|
||||
stream_handle_ptr));
|
||||
do iotask::interact(iotask) |loop_ptr| {
|
||||
log(debug, ~"in interact cb for tcp client connect..");
|
||||
log(debug, fmt!{"stream_handle_ptr in interact %?",
|
||||
stream_handle_ptr});
|
||||
log(debug, fmt!("stream_handle_ptr in interact %?",
|
||||
stream_handle_ptr));
|
||||
match uv::ll::tcp_init( loop_ptr, stream_handle_ptr) {
|
||||
0i32 => {
|
||||
log(debug, ~"tcp_init successful");
|
||||
@ -165,7 +165,7 @@ fn connect(-input_ip: ip::ip_addr, port: uint,
|
||||
// info.. should probably add an additional
|
||||
// rust type that actually is closer to
|
||||
// what the libuv API expects (ip str + port num)
|
||||
log(debug, fmt!{"addr: %?", addr});
|
||||
log(debug, fmt!("addr: %?", addr));
|
||||
let in_addr = uv::ll::ip4_addr(addr_str, port as int);
|
||||
uv::ll::tcp_connect(
|
||||
connect_req_ptr,
|
||||
@ -174,7 +174,7 @@ fn connect(-input_ip: ip::ip_addr, port: uint,
|
||||
tcp_connect_on_connect_cb)
|
||||
}
|
||||
ip::ipv6(addr) => {
|
||||
log(debug, fmt!{"addr: %?", addr});
|
||||
log(debug, fmt!("addr: %?", addr));
|
||||
let in_addr = uv::ll::ip6_addr(addr_str, port as int);
|
||||
uv::ll::tcp_connect6(
|
||||
connect_req_ptr,
|
||||
@ -333,7 +333,7 @@ fn read_start(sock: tcp_socket)
|
||||
fn read_stop(sock: tcp_socket,
|
||||
-read_port: comm::Port<result::result<~[u8], tcp_err_data>>) ->
|
||||
result::result<(), tcp_err_data> unsafe {
|
||||
log(debug, fmt!{"taking the read_port out of commission %?", read_port});
|
||||
log(debug, fmt!("taking the read_port out of commission %?", read_port));
|
||||
let socket_data = ptr::addr_of(*sock.socket_data);
|
||||
read_stop_common_impl(socket_data)
|
||||
}
|
||||
@ -617,13 +617,13 @@ fn listen_common(-host_ip: ip::ip_addr, port: uint, backlog: uint,
|
||||
let addr_str = ip::format_addr(loc_ip);
|
||||
let bind_result = match loc_ip {
|
||||
ip::ipv4(addr) => {
|
||||
log(debug, fmt!{"addr: %?", addr});
|
||||
log(debug, fmt!("addr: %?", addr));
|
||||
let in_addr = uv::ll::ip4_addr(addr_str, port as int);
|
||||
uv::ll::tcp_bind(server_stream_ptr,
|
||||
ptr::addr_of(in_addr))
|
||||
}
|
||||
ip::ipv6(addr) => {
|
||||
log(debug, fmt!{"addr: %?", addr});
|
||||
log(debug, fmt!("addr: %?", addr));
|
||||
let in_addr = uv::ll::ip6_addr(addr_str, port as int);
|
||||
uv::ll::tcp_bind6(server_stream_ptr,
|
||||
ptr::addr_of(in_addr))
|
||||
@ -661,8 +661,8 @@ fn listen_common(-host_ip: ip::ip_addr, port: uint, backlog: uint,
|
||||
match setup_result {
|
||||
some(err_data) => {
|
||||
do iotask::interact(iotask) |loop_ptr| {
|
||||
log(debug, fmt!{"tcp::listen post-kill recv hl interact %?",
|
||||
loop_ptr});
|
||||
log(debug, fmt!("tcp::listen post-kill recv hl interact %?",
|
||||
loop_ptr));
|
||||
(*server_data_ptr).active = false;
|
||||
uv::ll::close(server_stream_ptr, tcp_lfc_close_cb);
|
||||
};
|
||||
@ -677,8 +677,8 @@ fn listen_common(-host_ip: ip::ip_addr, port: uint, backlog: uint,
|
||||
result::err(address_in_use)
|
||||
}
|
||||
_ => {
|
||||
log(debug, fmt!{"Got '%s' '%s' libuv error",
|
||||
err_data.err_name, err_data.err_msg});
|
||||
log(debug, fmt!("Got '%s' '%s' libuv error",
|
||||
err_data.err_name, err_data.err_msg));
|
||||
result::err(
|
||||
generic_listen_err(err_data.err_name, err_data.err_msg))
|
||||
}
|
||||
@ -688,8 +688,8 @@ fn listen_common(-host_ip: ip::ip_addr, port: uint, backlog: uint,
|
||||
on_establish_cb(kill_ch);
|
||||
let kill_result = core::comm::recv(kill_po);
|
||||
do iotask::interact(iotask) |loop_ptr| {
|
||||
log(debug, fmt!{"tcp::listen post-kill recv hl interact %?",
|
||||
loop_ptr});
|
||||
log(debug, fmt!("tcp::listen post-kill recv hl interact %?",
|
||||
loop_ptr));
|
||||
(*server_data_ptr).active = false;
|
||||
uv::ll::close(server_stream_ptr, tcp_lfc_close_cb);
|
||||
};
|
||||
@ -765,8 +765,8 @@ impl @tcp_socket_buf: io::Reader {
|
||||
if err_data.err_name == ~"EOF" {
|
||||
break;
|
||||
} else {
|
||||
debug!{"ERROR sock_buf as io::reader.read err %? %?",
|
||||
err_data.err_name, err_data.err_msg};
|
||||
debug!("ERROR sock_buf as io::reader.read err %? %?",
|
||||
err_data.err_name, err_data.err_msg);
|
||||
|
||||
return 0;
|
||||
}
|
||||
@ -798,7 +798,7 @@ impl @tcp_socket_buf: io::Reader {
|
||||
false // noop
|
||||
}
|
||||
fn seek(dist: int, seek: io::SeekStyle) {
|
||||
log(debug, fmt!{"tcp_socket_buf seek stub %? %?", dist, seek});
|
||||
log(debug, fmt!("tcp_socket_buf seek stub %? %?", dist, seek));
|
||||
// noop
|
||||
}
|
||||
fn tell() -> uint {
|
||||
@ -815,12 +815,12 @@ impl @tcp_socket_buf: io::Writer {
|
||||
vec::slice(data, 0, vec::len(data)));
|
||||
if w_result.is_err() {
|
||||
let err_data = w_result.get_err();
|
||||
log(debug, fmt!{"ERROR sock_buf as io::writer.writer err: %? %?",
|
||||
err_data.err_name, err_data.err_msg});
|
||||
log(debug, fmt!("ERROR sock_buf as io::writer.writer err: %? %?",
|
||||
err_data.err_name, err_data.err_msg));
|
||||
}
|
||||
}
|
||||
fn seek(dist: int, seek: io::SeekStyle) {
|
||||
log(debug, fmt!{"tcp_socket_buf seek stub %? %?", dist, seek});
|
||||
log(debug, fmt!("tcp_socket_buf seek stub %? %?", dist, seek));
|
||||
// noop
|
||||
}
|
||||
fn tell() -> uint {
|
||||
@ -845,14 +845,14 @@ fn tear_down_socket_data(socket_data: @tcp_socket_data) unsafe {
|
||||
let close_data_ptr = ptr::addr_of(close_data);
|
||||
let stream_handle_ptr = (*socket_data).stream_handle_ptr;
|
||||
do iotask::interact((*socket_data).iotask) |loop_ptr| {
|
||||
log(debug, fmt!{"interact dtor for tcp_socket stream %? loop %?",
|
||||
stream_handle_ptr, loop_ptr});
|
||||
log(debug, fmt!("interact dtor for tcp_socket stream %? loop %?",
|
||||
stream_handle_ptr, loop_ptr));
|
||||
uv::ll::set_data_for_uv_handle(stream_handle_ptr,
|
||||
close_data_ptr);
|
||||
uv::ll::close(stream_handle_ptr, tcp_socket_dtor_close_cb);
|
||||
};
|
||||
core::comm::recv(closed_po);
|
||||
log(debug, fmt!{"about to free socket_data at %?", socket_data});
|
||||
log(debug, fmt!("about to free socket_data at %?", socket_data));
|
||||
rustrt::rust_uv_current_kernel_free(stream_handle_ptr
|
||||
as *libc::c_void);
|
||||
log(debug, ~"exiting dtor for tcp_socket");
|
||||
@ -931,7 +931,7 @@ fn read_start_common_impl(socket_data: *tcp_socket_data)
|
||||
let start_ch = core::comm::chan(start_po);
|
||||
log(debug, ~"in tcp::read_start before interact loop");
|
||||
do iotask::interact((*socket_data).iotask) |loop_ptr| {
|
||||
log(debug, fmt!{"in tcp::read_start interact cb %?", loop_ptr});
|
||||
log(debug, fmt!("in tcp::read_start interact cb %?", loop_ptr));
|
||||
match uv::ll::read_start(stream_handle_ptr as *uv::ll::uv_stream_t,
|
||||
on_alloc_cb,
|
||||
on_tcp_read_cb) {
|
||||
@ -971,7 +971,7 @@ fn write_common_impl(socket_data_ptr: *tcp_socket_data,
|
||||
};
|
||||
let write_data_ptr = ptr::addr_of(write_data);
|
||||
do iotask::interact((*socket_data_ptr).iotask) |loop_ptr| {
|
||||
log(debug, fmt!{"in interact cb for tcp::write %?", loop_ptr});
|
||||
log(debug, fmt!("in interact cb for tcp::write %?", loop_ptr));
|
||||
match uv::ll::write(write_req_ptr,
|
||||
stream_handle_ptr,
|
||||
write_buf_vec_ptr,
|
||||
@ -1075,8 +1075,8 @@ impl uv::ll::uv_err_data: to_tcp_err {
|
||||
extern fn on_tcp_read_cb(stream: *uv::ll::uv_stream_t,
|
||||
nread: libc::ssize_t,
|
||||
++buf: uv::ll::uv_buf_t) unsafe {
|
||||
log(debug, fmt!{"entering on_tcp_read_cb stream: %? nread: %?",
|
||||
stream, nread});
|
||||
log(debug, fmt!("entering on_tcp_read_cb stream: %? nread: %?",
|
||||
stream, nread));
|
||||
let loop_ptr = uv::ll::get_loop_for_uv_handle(stream);
|
||||
let socket_data_ptr = uv::ll::get_data_for_uv_handle(stream)
|
||||
as *tcp_socket_data;
|
||||
@ -1084,8 +1084,8 @@ extern fn on_tcp_read_cb(stream: *uv::ll::uv_stream_t,
|
||||
// incoming err.. probably eof
|
||||
-1 => {
|
||||
let err_data = uv::ll::get_last_err_data(loop_ptr).to_tcp_err();
|
||||
log(debug, fmt!{"on_tcp_read_cb: incoming err.. name %? msg %?",
|
||||
err_data.err_name, err_data.err_msg});
|
||||
log(debug, fmt!("on_tcp_read_cb: incoming err.. name %? msg %?",
|
||||
err_data.err_name, err_data.err_msg));
|
||||
let reader_ch = (*socket_data_ptr).reader_ch;
|
||||
core::comm::send(reader_ch, result::err(err_data));
|
||||
}
|
||||
@ -1094,7 +1094,7 @@ extern fn on_tcp_read_cb(stream: *uv::ll::uv_stream_t,
|
||||
// have data
|
||||
_ => {
|
||||
// we have data
|
||||
log(debug, fmt!{"tcp on_read_cb nread: %d", nread as int});
|
||||
log(debug, fmt!("tcp on_read_cb nread: %d", nread as int));
|
||||
let reader_ch = (*socket_data_ptr).reader_ch;
|
||||
let buf_base = uv::ll::get_base_from_buf(buf);
|
||||
let new_bytes = vec::unsafe::from_buf(buf_base, nread as uint);
|
||||
@ -1110,10 +1110,10 @@ extern fn on_alloc_cb(handle: *libc::c_void,
|
||||
-> uv::ll::uv_buf_t unsafe {
|
||||
log(debug, ~"tcp read on_alloc_cb!");
|
||||
let char_ptr = uv::ll::malloc_buf_base_of(suggested_size);
|
||||
log(debug, fmt!{"tcp read on_alloc_cb h: %? char_ptr: %u sugsize: %u",
|
||||
log(debug, fmt!("tcp read on_alloc_cb h: %? char_ptr: %u sugsize: %u",
|
||||
handle,
|
||||
char_ptr as uint,
|
||||
suggested_size as uint});
|
||||
suggested_size as uint));
|
||||
uv::ll::buf_init(char_ptr, suggested_size as uint)
|
||||
}
|
||||
|
||||
@ -1160,11 +1160,11 @@ extern fn stream_error_close_cb(handle: *uv::ll::uv_tcp_t) unsafe {
|
||||
let data = uv::ll::get_data_for_uv_handle(handle) as
|
||||
*connect_req_data;
|
||||
core::comm::send((*data).closed_signal_ch, ());
|
||||
log(debug, fmt!{"exiting steam_error_close_cb for %?", handle});
|
||||
log(debug, fmt!("exiting steam_error_close_cb for %?", handle));
|
||||
}
|
||||
|
||||
extern fn tcp_connect_close_cb(handle: *uv::ll::uv_tcp_t) unsafe {
|
||||
log(debug, fmt!{"closed client tcp handle %?", handle});
|
||||
log(debug, fmt!("closed client tcp handle %?", handle));
|
||||
}
|
||||
|
||||
extern fn tcp_connect_on_connect_cb(connect_req_ptr: *uv::ll::uv_connect_t,
|
||||
@ -1172,7 +1172,7 @@ extern fn tcp_connect_on_connect_cb(connect_req_ptr: *uv::ll::uv_connect_t,
|
||||
let conn_data_ptr = (uv::ll::get_data_for_req(connect_req_ptr)
|
||||
as *connect_req_data);
|
||||
let result_ch = (*conn_data_ptr).result_ch;
|
||||
log(debug, fmt!{"tcp_connect result_ch %?", result_ch});
|
||||
log(debug, fmt!("tcp_connect result_ch %?", result_ch));
|
||||
let tcp_stream_ptr =
|
||||
uv::ll::get_stream_handle_from_connect_req(connect_req_ptr);
|
||||
match status {
|
||||
@ -1184,8 +1184,8 @@ extern fn tcp_connect_on_connect_cb(connect_req_ptr: *uv::ll::uv_connect_t,
|
||||
log(debug, ~"error in tcp_connect_on_connect_cb");
|
||||
let loop_ptr = uv::ll::get_loop_for_uv_handle(tcp_stream_ptr);
|
||||
let err_data = uv::ll::get_last_err_data(loop_ptr);
|
||||
log(debug, fmt!{"err_data %? %?", err_data.err_name,
|
||||
err_data.err_msg});
|
||||
log(debug, fmt!("err_data %? %?", err_data.err_name,
|
||||
err_data.err_msg));
|
||||
core::comm::send(result_ch, conn_failure(err_data));
|
||||
uv::ll::set_data_for_uv_handle(tcp_stream_ptr,
|
||||
conn_data_ptr);
|
||||
@ -1314,10 +1314,10 @@ mod test {
|
||||
assert actual_resp_result.is_ok();
|
||||
let actual_resp = actual_resp_result.get();
|
||||
let actual_req = core::comm::recv(server_result_po);
|
||||
log(debug, fmt!{"REQ: expected: '%s' actual: '%s'",
|
||||
expected_req, actual_req});
|
||||
log(debug, fmt!{"RESP: expected: '%s' actual: '%s'",
|
||||
expected_resp, actual_resp});
|
||||
log(debug, fmt!("REQ: expected: '%s' actual: '%s'",
|
||||
expected_req, actual_req));
|
||||
log(debug, fmt!("RESP: expected: '%s' actual: '%s'",
|
||||
expected_resp, actual_resp));
|
||||
assert str::contains(actual_req, expected_req);
|
||||
assert str::contains(actual_resp, expected_resp);
|
||||
}
|
||||
@ -1453,27 +1453,27 @@ mod test {
|
||||
};
|
||||
|
||||
let actual_req = core::comm::recv(server_result_po);
|
||||
log(debug, fmt!{"REQ: expected: '%s' actual: '%s'",
|
||||
expected_req, actual_req});
|
||||
log(debug, fmt!{"RESP: expected: '%s' actual: '%s'",
|
||||
expected_resp, actual_resp});
|
||||
log(debug, fmt!("REQ: expected: '%s' actual: '%s'",
|
||||
expected_req, actual_req));
|
||||
log(debug, fmt!("RESP: expected: '%s' actual: '%s'",
|
||||
expected_resp, actual_resp));
|
||||
assert str::contains(actual_req, expected_req);
|
||||
assert str::contains(actual_resp, expected_resp);
|
||||
}
|
||||
|
||||
fn buf_write(+w: io::Writer, val: ~str) {
|
||||
log(debug, fmt!{"BUF_WRITE: val len %?", str::len(val)});
|
||||
log(debug, fmt!("BUF_WRITE: val len %?", str::len(val)));
|
||||
do str::byte_slice(val) |b_slice| {
|
||||
log(debug, fmt!{"BUF_WRITE: b_slice len %?",
|
||||
vec::len(b_slice)});
|
||||
log(debug, fmt!("BUF_WRITE: b_slice len %?",
|
||||
vec::len(b_slice)));
|
||||
w.write(b_slice)
|
||||
}
|
||||
}
|
||||
|
||||
fn buf_read(+r: io::Reader, len: uint) -> ~str {
|
||||
let new_bytes = r.read_bytes(len);
|
||||
log(debug, fmt!{"in buf_read.. new_bytes len: %?",
|
||||
vec::len(new_bytes)});
|
||||
log(debug, fmt!("in buf_read.. new_bytes len: %?",
|
||||
vec::len(new_bytes)));
|
||||
str::from_bytes(new_bytes)
|
||||
}
|
||||
|
||||
@ -1485,8 +1485,8 @@ mod test {
|
||||
let listen_result = listen(server_ip_addr, server_port, 128u, iotask,
|
||||
// on_establish_cb -- called when listener is set up
|
||||
|kill_ch| {
|
||||
log(debug, fmt!{"establish_cb %?",
|
||||
kill_ch});
|
||||
log(debug, fmt!("establish_cb %?",
|
||||
kill_ch));
|
||||
core::comm::send(cont_ch, ());
|
||||
},
|
||||
// risky to run this on the loop, but some users
|
||||
@ -1518,8 +1518,8 @@ mod test {
|
||||
match received_req_bytes {
|
||||
result::ok(data) => {
|
||||
log(debug, ~"SERVER: got REQ str::from_bytes..");
|
||||
log(debug, fmt!{"SERVER: REQ data len: %?",
|
||||
vec::len(data)});
|
||||
log(debug, fmt!("SERVER: REQ data len: %?",
|
||||
vec::len(data)));
|
||||
server_ch.send(
|
||||
str::from_bytes(data));
|
||||
log(debug, ~"SERVER: before write");
|
||||
@ -1528,8 +1528,8 @@ mod test {
|
||||
core::comm::send(kill_ch, none);
|
||||
}
|
||||
result::err(err_data) => {
|
||||
log(debug, fmt!{"SERVER: error recvd: %s %s",
|
||||
err_data.err_name, err_data.err_msg});
|
||||
log(debug, fmt!("SERVER: error recvd: %s %s",
|
||||
err_data.err_name, err_data.err_msg));
|
||||
core::comm::send(kill_ch, some(err_data));
|
||||
server_ch.send(~"");
|
||||
}
|
||||
@ -1546,8 +1546,8 @@ mod test {
|
||||
if result::is_err(listen_result) {
|
||||
match result::get_err(listen_result) {
|
||||
generic_listen_err(name, msg) => {
|
||||
fail fmt!{"SERVER: exited abnormally name %s msg %s",
|
||||
name, msg};
|
||||
fail fmt!("SERVER: exited abnormally name %s msg %s",
|
||||
name, msg);
|
||||
}
|
||||
access_denied => {
|
||||
fail ~"SERVER: exited abnormally, got access denied..";
|
||||
@ -1558,7 +1558,7 @@ mod test {
|
||||
}
|
||||
}
|
||||
let ret_val = server_ch.recv();
|
||||
log(debug, fmt!{"SERVER: exited and got return val: '%s'", ret_val});
|
||||
log(debug, fmt!("SERVER: exited and got return val: '%s'", ret_val));
|
||||
ret_val
|
||||
}
|
||||
|
||||
@ -1568,12 +1568,12 @@ mod test {
|
||||
let listen_result = listen(server_ip_addr, server_port, 128u, iotask,
|
||||
// on_establish_cb -- called when listener is set up
|
||||
|kill_ch| {
|
||||
log(debug, fmt!{"establish_cb %?",
|
||||
kill_ch});
|
||||
log(debug, fmt!("establish_cb %?",
|
||||
kill_ch));
|
||||
},
|
||||
|new_conn, kill_ch| {
|
||||
fail fmt!{"SERVER: shouldn't be called.. %? %?",
|
||||
new_conn, kill_ch};
|
||||
fail fmt!("SERVER: shouldn't be called.. %? %?",
|
||||
new_conn, kill_ch);
|
||||
});
|
||||
// err check on listen_result
|
||||
if result::is_err(listen_result) {
|
||||
@ -1609,8 +1609,8 @@ mod test {
|
||||
else {
|
||||
client_ch.send(str::from_bytes(read_result.get()));
|
||||
let ret_val = client_ch.recv();
|
||||
log(debug, fmt!{"CLIENT: after client_ch recv ret: '%s'",
|
||||
ret_val});
|
||||
log(debug, fmt!("CLIENT: after client_ch recv ret: '%s'",
|
||||
ret_val));
|
||||
ok(ret_val)
|
||||
}
|
||||
}
|
||||
@ -1622,8 +1622,8 @@ mod test {
|
||||
if result::is_err(write_result) {
|
||||
log(debug, ~"tcp_write_single: write failed!");
|
||||
let err_data = result::get_err(write_result);
|
||||
log(debug, fmt!{"tcp_write_single err name: %s msg: %s",
|
||||
err_data.err_name, err_data.err_msg});
|
||||
log(debug, fmt!("tcp_write_single err name: %s msg: %s",
|
||||
err_data.err_name, err_data.err_msg));
|
||||
// meh. torn on what to do here.
|
||||
fail ~"tcp_write_single failed";
|
||||
}
|
||||
|
@ -47,11 +47,11 @@ fn map_slices<A: copy send, B: copy send>(
|
||||
let len = end - base;
|
||||
let slice = (ptr::offset(p, base),
|
||||
len * sys::size_of::<A>());
|
||||
log(info, fmt!{"pre-slice: %?", (base, slice)});
|
||||
log(info, fmt!("pre-slice: %?", (base, slice)));
|
||||
let slice : &[A] =
|
||||
unsafe::reinterpret_cast(slice);
|
||||
log(info, fmt!{"slice: %?",
|
||||
(base, vec::len(slice), end - base)});
|
||||
log(info, fmt!("slice: %?",
|
||||
(base, vec::len(slice), end - base)));
|
||||
assert(vec::len(slice) == end - base);
|
||||
f(base, slice)
|
||||
}
|
||||
@ -62,7 +62,7 @@ fn map_slices<A: copy send, B: copy send>(
|
||||
}
|
||||
log(info, ~"tasks spawned");
|
||||
|
||||
log(info, fmt!{"num_tasks: %?", (num_tasks, futures.len())});
|
||||
log(info, fmt!("num_tasks: %?", (num_tasks, futures.len())));
|
||||
assert(num_tasks == futures.len());
|
||||
|
||||
let r = do futures.map() |ys| {
|
||||
|
@ -8,63 +8,63 @@ impl Writer: serializer {
|
||||
}
|
||||
|
||||
fn emit_uint(v: uint) {
|
||||
self.write_str(fmt!{"%?u", v});
|
||||
self.write_str(fmt!("%?u", v));
|
||||
}
|
||||
|
||||
fn emit_u64(v: u64) {
|
||||
self.write_str(fmt!{"%?_u64", v});
|
||||
self.write_str(fmt!("%?_u64", v));
|
||||
}
|
||||
|
||||
fn emit_u32(v: u32) {
|
||||
self.write_str(fmt!{"%?_u32", v});
|
||||
self.write_str(fmt!("%?_u32", v));
|
||||
}
|
||||
|
||||
fn emit_u16(v: u16) {
|
||||
self.write_str(fmt!{"%?_u16", v});
|
||||
self.write_str(fmt!("%?_u16", v));
|
||||
}
|
||||
|
||||
fn emit_u8(v: u8) {
|
||||
self.write_str(fmt!{"%?_u8", v});
|
||||
self.write_str(fmt!("%?_u8", v));
|
||||
}
|
||||
|
||||
fn emit_int(v: int) {
|
||||
self.write_str(fmt!{"%?", v});
|
||||
self.write_str(fmt!("%?", v));
|
||||
}
|
||||
|
||||
fn emit_i64(v: i64) {
|
||||
self.write_str(fmt!{"%?_i64", v});
|
||||
self.write_str(fmt!("%?_i64", v));
|
||||
}
|
||||
|
||||
fn emit_i32(v: i32) {
|
||||
self.write_str(fmt!{"%?_i32", v});
|
||||
self.write_str(fmt!("%?_i32", v));
|
||||
}
|
||||
|
||||
fn emit_i16(v: i16) {
|
||||
self.write_str(fmt!{"%?_i16", v});
|
||||
self.write_str(fmt!("%?_i16", v));
|
||||
}
|
||||
|
||||
fn emit_i8(v: i8) {
|
||||
self.write_str(fmt!{"%?_i8", v});
|
||||
self.write_str(fmt!("%?_i8", v));
|
||||
}
|
||||
|
||||
fn emit_bool(v: bool) {
|
||||
self.write_str(fmt!{"%b", v});
|
||||
self.write_str(fmt!("%b", v));
|
||||
}
|
||||
|
||||
fn emit_float(v: float) {
|
||||
self.write_str(fmt!{"%?_f", v});
|
||||
self.write_str(fmt!("%?_f", v));
|
||||
}
|
||||
|
||||
fn emit_f64(v: f64) {
|
||||
self.write_str(fmt!{"%?_f64", v});
|
||||
self.write_str(fmt!("%?_f64", v));
|
||||
}
|
||||
|
||||
fn emit_f32(v: f32) {
|
||||
self.write_str(fmt!{"%?_f32", v});
|
||||
self.write_str(fmt!("%?_f32", v));
|
||||
}
|
||||
|
||||
fn emit_str(v: &str) {
|
||||
self.write_str(fmt!{"%?", v});
|
||||
self.write_str(fmt!("%?", v));
|
||||
}
|
||||
|
||||
fn emit_enum(_name: ~str, f: fn()) {
|
||||
|
@ -27,7 +27,7 @@ fn mk<T: copy>() -> smallintmap<T> {
|
||||
*/
|
||||
#[inline(always)]
|
||||
fn insert<T: copy>(self: smallintmap<T>, key: uint, val: T) {
|
||||
//io::println(fmt!{"%?", key});
|
||||
//io::println(fmt!("%?", key));
|
||||
self.v.grow_set_elt(key, none, some(val));
|
||||
}
|
||||
|
||||
@ -50,7 +50,7 @@ pure fn find<T: copy>(self: smallintmap<T>, key: uint) -> option<T> {
|
||||
pure fn get<T: copy>(self: smallintmap<T>, key: uint) -> T {
|
||||
match find(self, key) {
|
||||
none => {
|
||||
error!{"smallintmap::get(): key not present"};
|
||||
error!("smallintmap::get(): key not present");
|
||||
fail;
|
||||
}
|
||||
some(v) => return v
|
||||
|
@ -250,7 +250,7 @@ mod test_qsort {
|
||||
let pairs = vec::zip(expected, immut_names);
|
||||
for vec::each(pairs) |p| {
|
||||
let (a, b) = p;
|
||||
debug!{"%d %d", a, b};
|
||||
debug!("%d %d", a, b);
|
||||
assert (a == b);
|
||||
}
|
||||
}
|
||||
|
@ -110,9 +110,9 @@ fn run_tests_console(opts: test_opts,
|
||||
te_filtered(filtered_tests) => {
|
||||
st.total = vec::len(filtered_tests);
|
||||
let noun = if st.total != 1u { ~"tests" } else { ~"test" };
|
||||
st.out.write_line(fmt!{"\nrunning %u %s", st.total, noun});
|
||||
st.out.write_line(fmt!("\nrunning %u %s", st.total, noun));
|
||||
}
|
||||
te_wait(test) => st.out.write_str(fmt!{"test %s ... ", test.name}),
|
||||
te_wait(test) => st.out.write_str(fmt!("test %s ... ", test.name)),
|
||||
te_result(test, result) => {
|
||||
match st.log_out {
|
||||
some(f) => write_log(f, result, test),
|
||||
@ -145,7 +145,7 @@ fn run_tests_console(opts: test_opts,
|
||||
~[io::Create, io::Truncate]) {
|
||||
result::ok(w) => some(w),
|
||||
result::err(s) => {
|
||||
fail(fmt!{"can't open output file: %s", s})
|
||||
fail(fmt!("can't open output file: %s", s))
|
||||
}
|
||||
},
|
||||
none => none
|
||||
@ -170,23 +170,23 @@ fn run_tests_console(opts: test_opts,
|
||||
print_failures(st);
|
||||
}
|
||||
|
||||
st.out.write_str(fmt!{"\nresult: "});
|
||||
st.out.write_str(fmt!("\nresult: "));
|
||||
if success {
|
||||
// There's no parallelism at this point so it's safe to use color
|
||||
write_ok(st.out, true);
|
||||
} else { write_failed(st.out, true); }
|
||||
st.out.write_str(fmt!{". %u passed; %u failed; %u ignored\n\n", st.passed,
|
||||
st.failed, st.ignored});
|
||||
st.out.write_str(fmt!(". %u passed; %u failed; %u ignored\n\n", st.passed,
|
||||
st.failed, st.ignored));
|
||||
|
||||
return success;
|
||||
|
||||
fn write_log(out: io::Writer, result: test_result, test: test_desc) {
|
||||
out.write_line(fmt!{"%s %s",
|
||||
out.write_line(fmt!("%s %s",
|
||||
match result {
|
||||
tr_ok => ~"ok",
|
||||
tr_failed => ~"failed",
|
||||
tr_ignored => ~"ignored"
|
||||
}, test.name});
|
||||
}, test.name));
|
||||
}
|
||||
|
||||
fn write_ok(out: io::Writer, use_color: bool) {
|
||||
@ -218,7 +218,7 @@ fn print_failures(st: console_test_state) {
|
||||
let failures = vec::map(failures, |test| test.name);
|
||||
let failures = sort::merge_sort(str::le, failures);
|
||||
for vec::each(failures) |name| {
|
||||
st.out.write_line(fmt!{" %s", name});
|
||||
st.out.write_line(fmt!(" %s", name));
|
||||
}
|
||||
}
|
||||
|
||||
@ -279,7 +279,7 @@ fn run_tests(opts: test_opts, tests: ~[test_desc],
|
||||
// It's tempting to just spawn all the tests at once, but since we have
|
||||
// many tests that run in other processes we would be making a big mess.
|
||||
let concurrency = get_concurrency();
|
||||
debug!{"using %u test tasks", concurrency};
|
||||
debug!("using %u test tasks", concurrency);
|
||||
|
||||
let total = vec::len(filtered_tests);
|
||||
let mut run_idx = 0u;
|
||||
|
@ -200,9 +200,9 @@ fn strptime(s: ~str, format: ~str) -> result<tm, ~str> {
|
||||
if c == ch {
|
||||
ok(next)
|
||||
} else {
|
||||
err(fmt!{"Expected %?, found %?",
|
||||
err(fmt!("Expected %?, found %?",
|
||||
str::from_char(c),
|
||||
str::from_char(ch)})
|
||||
str::from_char(ch)))
|
||||
}
|
||||
}
|
||||
|
||||
@ -511,7 +511,7 @@ fn strptime(s: ~str, format: ~str) -> result<tm, ~str> {
|
||||
}
|
||||
'%' => parse_char(s, pos, '%'),
|
||||
ch => {
|
||||
err(fmt!{"unknown formatting type: %?", str::from_char(ch)})
|
||||
err(fmt!("unknown formatting type: %?", str::from_char(ch)))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -625,70 +625,70 @@ fn strftime(format: ~str, tm: tm) -> ~str {
|
||||
11 => ~"Dec",
|
||||
_ => die()
|
||||
},
|
||||
'C' => fmt!{"%02d", (tm.tm_year as int + 1900) / 100},
|
||||
'C' => fmt!("%02d", (tm.tm_year as int + 1900) / 100),
|
||||
'c' => {
|
||||
fmt!{"%s %s %s %s %s",
|
||||
fmt!("%s %s %s %s %s",
|
||||
parse_type('a', tm),
|
||||
parse_type('b', tm),
|
||||
parse_type('e', tm),
|
||||
parse_type('T', tm),
|
||||
parse_type('Y', tm)}
|
||||
parse_type('Y', tm))
|
||||
}
|
||||
'D' | 'x' => {
|
||||
fmt!{"%s/%s/%s",
|
||||
fmt!("%s/%s/%s",
|
||||
parse_type('m', tm),
|
||||
parse_type('d', tm),
|
||||
parse_type('y', tm)}
|
||||
parse_type('y', tm))
|
||||
}
|
||||
'd' => fmt!{"%02d", tm.tm_mday as int},
|
||||
'e' => fmt!{"%2d", tm.tm_mday as int},
|
||||
'd' => fmt!("%02d", tm.tm_mday as int),
|
||||
'e' => fmt!("%2d", tm.tm_mday as int),
|
||||
'F' => {
|
||||
fmt!{"%s-%s-%s",
|
||||
fmt!("%s-%s-%s",
|
||||
parse_type('Y', tm),
|
||||
parse_type('m', tm),
|
||||
parse_type('d', tm)}
|
||||
parse_type('d', tm))
|
||||
}
|
||||
//'G' {}
|
||||
//'g' {}
|
||||
'H' => fmt!{"%02d", tm.tm_hour as int},
|
||||
'H' => fmt!("%02d", tm.tm_hour as int),
|
||||
'I' => {
|
||||
let mut h = tm.tm_hour as int;
|
||||
if h == 0 { h = 12 }
|
||||
if h > 12 { h -= 12 }
|
||||
fmt!{"%02d", h}
|
||||
fmt!("%02d", h)
|
||||
}
|
||||
'j' => fmt!{"%03d", tm.tm_yday as int + 1},
|
||||
'k' => fmt!{"%2d", tm.tm_hour as int},
|
||||
'j' => fmt!("%03d", tm.tm_yday as int + 1),
|
||||
'k' => fmt!("%2d", tm.tm_hour as int),
|
||||
'l' => {
|
||||
let mut h = tm.tm_hour as int;
|
||||
if h == 0 { h = 12 }
|
||||
if h > 12 { h -= 12 }
|
||||
fmt!{"%2d", h}
|
||||
fmt!("%2d", h)
|
||||
}
|
||||
'M' => fmt!{"%02d", tm.tm_min as int},
|
||||
'm' => fmt!{"%02d", tm.tm_mon as int + 1},
|
||||
'M' => fmt!("%02d", tm.tm_min as int),
|
||||
'm' => fmt!("%02d", tm.tm_mon as int + 1),
|
||||
'n' => ~"\n",
|
||||
'P' => if tm.tm_hour as int < 12 { ~"am" } else { ~"pm" },
|
||||
'p' => if tm.tm_hour as int < 12 { ~"AM" } else { ~"PM" },
|
||||
'R' => {
|
||||
fmt!{"%s:%s",
|
||||
fmt!("%s:%s",
|
||||
parse_type('H', tm),
|
||||
parse_type('M', tm)}
|
||||
parse_type('M', tm))
|
||||
}
|
||||
'r' => {
|
||||
fmt!{"%s:%s:%s %s",
|
||||
fmt!("%s:%s:%s %s",
|
||||
parse_type('I', tm),
|
||||
parse_type('M', tm),
|
||||
parse_type('S', tm),
|
||||
parse_type('p', tm)}
|
||||
parse_type('p', tm))
|
||||
}
|
||||
'S' => fmt!{"%02d", tm.tm_sec as int},
|
||||
's' => fmt!{"%d", tm.to_timespec().sec as int},
|
||||
'S' => fmt!("%02d", tm.tm_sec as int),
|
||||
's' => fmt!("%d", tm.to_timespec().sec as int),
|
||||
'T' | 'X' => {
|
||||
fmt!{"%s:%s:%s",
|
||||
fmt!("%s:%s:%s",
|
||||
parse_type('H', tm),
|
||||
parse_type('M', tm),
|
||||
parse_type('S', tm)}
|
||||
parse_type('S', tm))
|
||||
}
|
||||
't' => ~"\t",
|
||||
//'U' {}
|
||||
@ -698,24 +698,24 @@ fn strftime(format: ~str, tm: tm) -> ~str {
|
||||
}
|
||||
//'V' {}
|
||||
'v' => {
|
||||
fmt!{"%s-%s-%s",
|
||||
fmt!("%s-%s-%s",
|
||||
parse_type('e', tm),
|
||||
parse_type('b', tm),
|
||||
parse_type('Y', tm)}
|
||||
parse_type('Y', tm))
|
||||
}
|
||||
//'W' {}
|
||||
'w' => int::str(tm.tm_wday as int),
|
||||
//'X' {}
|
||||
//'x' {}
|
||||
'Y' => int::str(tm.tm_year as int + 1900),
|
||||
'y' => fmt!{"%02d", (tm.tm_year as int + 1900) % 100},
|
||||
'y' => fmt!("%02d", (tm.tm_year as int + 1900) % 100),
|
||||
'Z' => tm.tm_zone,
|
||||
'z' => {
|
||||
let sign = if tm.tm_gmtoff > 0_i32 { '+' } else { '-' };
|
||||
let mut m = i32::abs(tm.tm_gmtoff) / 60_i32;
|
||||
let h = m / 60_i32;
|
||||
m -= h * 60_i32;
|
||||
fmt!{"%c%02d%02d", sign, h as int, m as int}
|
||||
fmt!("%c%02d%02d", sign, h as int, m as int)
|
||||
}
|
||||
//'+' {}
|
||||
'%' => ~"%",
|
||||
@ -807,7 +807,7 @@ impl tm {
|
||||
let mut m = i32::abs(self.tm_gmtoff) / 60_i32;
|
||||
let h = m / 60_i32;
|
||||
m -= h * 60_i32;
|
||||
s + fmt!{"%c%02d:%02d", sign, h as int, m as int}
|
||||
s + fmt!("%c%02d:%02d", sign, h as int, m as int)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -888,7 +888,7 @@ mod tests {
|
||||
let time = { sec: 1234567890_i64, nsec: 54321_i32 };
|
||||
let local = at(time);
|
||||
|
||||
error!{"time_at: %?", local};
|
||||
error!("time_at: %?", local);
|
||||
|
||||
assert local.tm_sec == 30_i32;
|
||||
assert local.tm_min == 31_i32;
|
||||
@ -1094,7 +1094,7 @@ mod tests {
|
||||
let utc = at_utc(time);
|
||||
let local = at(time);
|
||||
|
||||
error!{"test_ctime: %? %?", utc.ctime(), local.ctime()};
|
||||
error!("test_ctime: %? %?", utc.ctime(), local.ctime());
|
||||
|
||||
assert utc.ctime() == ~"Fri Feb 13 23:31:30 2009";
|
||||
assert local.ctime() == ~"Fri Feb 13 15:31:30 2009";
|
||||
|
@ -108,8 +108,8 @@ fn recv_timeout<T: copy send>(iotask: iotask,
|
||||
// FIXME: This could be written clearer (#2618)
|
||||
either::either(
|
||||
|left_val| {
|
||||
log(debug, fmt!{"recv_time .. left_val %?",
|
||||
left_val});
|
||||
log(debug, fmt!("recv_time .. left_val %?",
|
||||
left_val));
|
||||
none
|
||||
}, |right_val| {
|
||||
some(*right_val)
|
||||
@ -120,7 +120,7 @@ fn recv_timeout<T: copy send>(iotask: iotask,
|
||||
// INTERNAL API
|
||||
extern fn delayed_send_cb(handle: *uv::ll::uv_timer_t,
|
||||
status: libc::c_int) unsafe {
|
||||
log(debug, fmt!{"delayed_send_cb handle %? status %?", handle, status});
|
||||
log(debug, fmt!("delayed_send_cb handle %? status %?", handle, status));
|
||||
let timer_done_ch =
|
||||
*(uv::ll::get_data_for_uv_handle(handle) as *comm::Chan<()>);
|
||||
let stop_result = uv::ll::timer_stop(handle);
|
||||
@ -136,7 +136,7 @@ extern fn delayed_send_cb(handle: *uv::ll::uv_timer_t,
|
||||
}
|
||||
|
||||
extern fn delayed_send_close_cb(handle: *uv::ll::uv_timer_t) unsafe {
|
||||
log(debug, fmt!{"delayed_send_close_cb handle %?", handle});
|
||||
log(debug, fmt!("delayed_send_close_cb handle %?", handle));
|
||||
let timer_done_ch =
|
||||
*(uv::ll::get_data_for_uv_handle(handle) as *comm::Chan<()>);
|
||||
comm::send(timer_done_ch, ());
|
||||
|
@ -37,10 +37,10 @@ fn get_monitor_task_gl() -> iotask unsafe {
|
||||
|
||||
let monitor_loop_chan_ptr = rustrt::rust_uv_get_kernel_global_chan_ptr();
|
||||
|
||||
debug!{"ENTERING global_loop::get() loop chan: %?",
|
||||
monitor_loop_chan_ptr};
|
||||
debug!("ENTERING global_loop::get() loop chan: %?",
|
||||
monitor_loop_chan_ptr);
|
||||
|
||||
debug!{"before priv::chan_from_global_ptr"};
|
||||
debug!("before priv::chan_from_global_ptr");
|
||||
type monchan = Chan<iotask>;
|
||||
|
||||
let monitor_ch =
|
||||
@ -50,31 +50,31 @@ fn get_monitor_task_gl() -> iotask unsafe {
|
||||
(task::SingleThreaded)
|
||||
.unlinked()
|
||||
}) |msg_po| {
|
||||
debug!{"global monitor task starting"};
|
||||
debug!("global monitor task starting");
|
||||
|
||||
// As a weak task the runtime will notify us when to exit
|
||||
do weaken_task() |weak_exit_po| {
|
||||
debug!{"global monitor task is now weak"};
|
||||
debug!("global monitor task is now weak");
|
||||
let hl_loop = spawn_loop();
|
||||
loop {
|
||||
debug!{"in outer_loop..."};
|
||||
debug!("in outer_loop...");
|
||||
match select2(weak_exit_po, msg_po) {
|
||||
Left(weak_exit) => {
|
||||
// all normal tasks have ended, tell the
|
||||
// libuv loop to tear_down, then exit
|
||||
debug!{"weak_exit_po recv'd msg: %?", weak_exit};
|
||||
debug!("weak_exit_po recv'd msg: %?", weak_exit);
|
||||
iotask::exit(hl_loop);
|
||||
break;
|
||||
}
|
||||
Right(fetch_ch) => {
|
||||
debug!{"hl_loop req recv'd: %?", fetch_ch};
|
||||
debug!("hl_loop req recv'd: %?", fetch_ch);
|
||||
fetch_ch.send(hl_loop);
|
||||
}
|
||||
}
|
||||
}
|
||||
debug!{"global monitor task is leaving weakend state"};
|
||||
debug!("global monitor task is leaving weakend state");
|
||||
};
|
||||
debug!{"global monitor task exiting"};
|
||||
debug!("global monitor task exiting");
|
||||
};
|
||||
|
||||
// once we have a chan to the monitor loop, we ask it for
|
||||
@ -91,14 +91,14 @@ fn spawn_loop() -> iotask unsafe {
|
||||
// The I/O loop task also needs to be weak so it doesn't keep
|
||||
// the runtime alive
|
||||
do weaken_task |weak_exit_po| {
|
||||
debug!{"global libuv task is now weak %?", weak_exit_po};
|
||||
debug!("global libuv task is now weak %?", weak_exit_po);
|
||||
task_body();
|
||||
|
||||
// We don't wait for the exit message on weak_exit_po
|
||||
// because the monitor task will tell the uv loop when to
|
||||
// exit
|
||||
|
||||
debug!{"global libuv task is leaving weakened state"};
|
||||
debug!("global libuv task is leaving weakened state");
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -112,8 +112,8 @@ mod test {
|
||||
timer_ptr as *libc::c_void) as *comm::Chan<bool>;
|
||||
let exit_ch = *exit_ch_ptr;
|
||||
core::comm::send(exit_ch, true);
|
||||
log(debug, fmt!{"EXIT_CH_PTR simple_timer_close_cb exit_ch_ptr: %?",
|
||||
exit_ch_ptr});
|
||||
log(debug, fmt!("EXIT_CH_PTR simple_timer_close_cb exit_ch_ptr: %?",
|
||||
exit_ch_ptr));
|
||||
}
|
||||
extern fn simple_timer_cb(timer_ptr: *ll::uv_timer_t,
|
||||
_status: libc::c_int) unsafe {
|
||||
@ -133,8 +133,8 @@ mod test {
|
||||
let exit_po = core::comm::port::<bool>();
|
||||
let exit_ch = core::comm::chan(exit_po);
|
||||
let exit_ch_ptr = ptr::addr_of(exit_ch);
|
||||
log(debug, fmt!{"EXIT_CH_PTR newly created exit_ch_ptr: %?",
|
||||
exit_ch_ptr});
|
||||
log(debug, fmt!("EXIT_CH_PTR newly created exit_ch_ptr: %?",
|
||||
exit_ch_ptr));
|
||||
let timer_handle = ll::timer_t();
|
||||
let timer_ptr = ptr::addr_of(timer_handle);
|
||||
do iotask::interact(iotask) |loop_ptr| {
|
||||
|
@ -30,9 +30,9 @@ fn spawn_iotask(-task: task::TaskBuilder) -> iotask {
|
||||
do listen |iotask_ch| {
|
||||
|
||||
do task.sched_mode(task::SingleThreaded).spawn {
|
||||
debug!{"entering libuv task"};
|
||||
debug!("entering libuv task");
|
||||
run_loop(iotask_ch);
|
||||
debug!{"libuv task exiting"};
|
||||
debug!("libuv task exiting");
|
||||
};
|
||||
|
||||
iotask_ch.recv()
|
||||
@ -137,8 +137,8 @@ fn send_msg(iotask: iotask,
|
||||
extern fn wake_up_cb(async_handle: *ll::uv_async_t,
|
||||
status: int) unsafe {
|
||||
|
||||
log(debug, fmt!{"wake_up_cb extern.. handle: %? status: %?",
|
||||
async_handle, status});
|
||||
log(debug, fmt!("wake_up_cb extern.. handle: %? status: %?",
|
||||
async_handle, status));
|
||||
|
||||
let loop_ptr = ll::get_loop_for_uv_handle(async_handle);
|
||||
let data = ll::get_data_for_uv_handle(async_handle) as *iotask_loop_data;
|
||||
@ -161,22 +161,22 @@ fn begin_teardown(data: *iotask_loop_data) unsafe {
|
||||
extern fn tear_down_close_cb(handle: *ll::uv_async_t) unsafe {
|
||||
let loop_ptr = ll::get_loop_for_uv_handle(handle);
|
||||
let loop_refs = ll::loop_refcount(loop_ptr);
|
||||
log(debug, fmt!{"tear_down_close_cb called, closing handle at %? refs %?",
|
||||
handle, loop_refs});
|
||||
log(debug, fmt!("tear_down_close_cb called, closing handle at %? refs %?",
|
||||
handle, loop_refs));
|
||||
assert loop_refs == 1i32;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
extern fn async_close_cb(handle: *ll::uv_async_t) unsafe {
|
||||
log(debug, fmt!{"async_close_cb handle %?", handle});
|
||||
log(debug, fmt!("async_close_cb handle %?", handle));
|
||||
let exit_ch = (*(ll::get_data_for_uv_handle(handle)
|
||||
as *ah_data)).exit_ch;
|
||||
core::comm::send(exit_ch, ());
|
||||
}
|
||||
extern fn async_handle_cb(handle: *ll::uv_async_t, status: libc::c_int)
|
||||
unsafe {
|
||||
log(debug, fmt!{"async_handle_cb handle %? status %?",handle,status});
|
||||
log(debug, fmt!("async_handle_cb handle %? status %?",handle,status));
|
||||
ll::close(handle, async_close_cb);
|
||||
}
|
||||
type ah_data = {
|
||||
@ -214,13 +214,13 @@ mod test {
|
||||
}
|
||||
|
||||
extern fn lifetime_handle_close(handle: *libc::c_void) unsafe {
|
||||
log(debug, fmt!{"lifetime_handle_close ptr %?", handle});
|
||||
log(debug, fmt!("lifetime_handle_close ptr %?", handle));
|
||||
}
|
||||
|
||||
extern fn lifetime_async_callback(handle: *libc::c_void,
|
||||
status: libc::c_int) {
|
||||
log(debug, fmt!{"lifetime_handle_close ptr %? status %?",
|
||||
handle, status});
|
||||
log(debug, fmt!("lifetime_handle_close ptr %? status %?",
|
||||
handle, status));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -707,8 +707,8 @@ unsafe fn tcp_connect(connect_ptr: *uv_connect_t,
|
||||
addr_ptr: *sockaddr_in,
|
||||
++after_connect_cb: *u8)
|
||||
-> libc::c_int {
|
||||
log(debug, fmt!{"b4 foreign tcp_connect--addr port: %u cb: %u",
|
||||
(*addr_ptr).sin_port as uint, after_connect_cb as uint});
|
||||
log(debug, fmt!("b4 foreign tcp_connect--addr port: %u cb: %u",
|
||||
(*addr_ptr).sin_port as uint, after_connect_cb as uint));
|
||||
return rustrt::rust_uv_tcp_connect(connect_ptr, tcp_handle_ptr,
|
||||
after_connect_cb, addr_ptr);
|
||||
}
|
||||
@ -788,10 +788,10 @@ unsafe fn async_send(async_handle: *uv_async_t) {
|
||||
unsafe fn buf_init(++input: *u8, len: uint) -> uv_buf_t {
|
||||
let out_buf = { base: ptr::null(), len: 0 as libc::size_t };
|
||||
let out_buf_ptr = ptr::addr_of(out_buf);
|
||||
log(debug, fmt!{"buf_init - input %u len %u out_buf: %u",
|
||||
log(debug, fmt!("buf_init - input %u len %u out_buf: %u",
|
||||
input as uint,
|
||||
len as uint,
|
||||
out_buf_ptr as uint});
|
||||
out_buf_ptr as uint));
|
||||
// yuck :/
|
||||
rustrt::rust_uv_buf_init(out_buf_ptr, input, len as size_t);
|
||||
//let result = rustrt::rust_uv_buf_init_2(input, len as size_t);
|
||||
@ -799,9 +799,9 @@ unsafe fn buf_init(++input: *u8, len: uint) -> uv_buf_t {
|
||||
let res_base = get_base_from_buf(out_buf);
|
||||
let res_len = get_len_from_buf(out_buf);
|
||||
//let res_base = get_base_from_buf(result);
|
||||
log(debug, fmt!{"buf_init - result %u len %u",
|
||||
log(debug, fmt!("buf_init - result %u len %u",
|
||||
res_base as uint,
|
||||
res_len as uint});
|
||||
res_len as uint));
|
||||
return out_buf;
|
||||
//return result;
|
||||
}
|
||||
@ -845,8 +845,8 @@ unsafe fn ip6_name(src: &sockaddr_in6) -> ~str {
|
||||
0u8,0u8,0u8,0u8,0u8,0u8];
|
||||
do vec::as_buf(dst) |dst_buf, size| {
|
||||
let src_unsafe_ptr = assimilate(src);
|
||||
log(debug, fmt!{"val of src *sockaddr_in6: %? sockaddr_in6: %?",
|
||||
src_unsafe_ptr, src});
|
||||
log(debug, fmt!("val of src *sockaddr_in6: %? sockaddr_in6: %?",
|
||||
src_unsafe_ptr, src));
|
||||
let result = rustrt::rust_uv_ip6_name(src_unsafe_ptr,
|
||||
dst_buf, size as libc::size_t);
|
||||
match result {
|
||||
@ -962,8 +962,8 @@ unsafe fn get_last_err_info(uv_loop: *libc::c_void) -> ~str {
|
||||
let err_ptr = ptr::addr_of(err);
|
||||
let err_name = str::unsafe::from_c_str(err_name(err_ptr));
|
||||
let err_msg = str::unsafe::from_c_str(strerror(err_ptr));
|
||||
return fmt!{"LIBUV ERROR: name: %s msg: %s",
|
||||
err_name, err_msg};
|
||||
return fmt!("LIBUV ERROR: name: %s msg: %s",
|
||||
err_name, err_msg);
|
||||
}
|
||||
|
||||
unsafe fn get_last_err_data(uv_loop: *libc::c_void) -> uv_err_data {
|
||||
@ -1013,8 +1013,8 @@ mod test {
|
||||
};
|
||||
|
||||
extern fn after_close_cb(handle: *libc::c_void) {
|
||||
log(debug, fmt!{"after uv_close! handle ptr: %?",
|
||||
handle});
|
||||
log(debug, fmt!("after uv_close! handle ptr: %?",
|
||||
handle));
|
||||
}
|
||||
|
||||
extern fn on_alloc_cb(handle: *libc::c_void,
|
||||
@ -1022,10 +1022,10 @@ mod test {
|
||||
-> uv_buf_t unsafe {
|
||||
log(debug, ~"on_alloc_cb!");
|
||||
let char_ptr = malloc_buf_base_of(suggested_size);
|
||||
log(debug, fmt!{"on_alloc_cb h: %? char_ptr: %u sugsize: %u",
|
||||
log(debug, fmt!("on_alloc_cb h: %? char_ptr: %u sugsize: %u",
|
||||
handle,
|
||||
char_ptr as uint,
|
||||
suggested_size as uint});
|
||||
suggested_size as uint));
|
||||
return buf_init(char_ptr, suggested_size as uint);
|
||||
}
|
||||
|
||||
@ -1033,11 +1033,11 @@ mod test {
|
||||
nread: libc::ssize_t,
|
||||
++buf: uv_buf_t) unsafe {
|
||||
let nread = nread as int;
|
||||
log(debug, fmt!{"CLIENT entering on_read_cb nred: %d",
|
||||
nread});
|
||||
log(debug, fmt!("CLIENT entering on_read_cb nred: %d",
|
||||
nread));
|
||||
if (nread > 0) {
|
||||
// we have data
|
||||
log(debug, fmt!{"CLIENT read: data! nread: %d", nread});
|
||||
log(debug, fmt!("CLIENT read: data! nread: %d", nread));
|
||||
read_stop(stream);
|
||||
let client_data =
|
||||
get_data_for_uv_handle(stream as *libc::c_void)
|
||||
@ -1065,20 +1065,20 @@ mod test {
|
||||
|
||||
extern fn on_write_complete_cb(write_req: *uv_write_t,
|
||||
status: libc::c_int) unsafe {
|
||||
log(debug, fmt!{"CLIENT beginning on_write_complete_cb status: %d",
|
||||
status as int});
|
||||
log(debug, fmt!("CLIENT beginning on_write_complete_cb status: %d",
|
||||
status as int));
|
||||
let stream = get_stream_handle_from_write_req(write_req);
|
||||
log(debug, fmt!{"CLIENT on_write_complete_cb: tcp:%d write_handle:%d",
|
||||
stream as int, write_req as int});
|
||||
log(debug, fmt!("CLIENT on_write_complete_cb: tcp:%d write_handle:%d",
|
||||
stream as int, write_req as int));
|
||||
let result = read_start(stream, on_alloc_cb, on_read_cb);
|
||||
log(debug, fmt!{"CLIENT ending on_write_complete_cb .. status: %d",
|
||||
result as int});
|
||||
log(debug, fmt!("CLIENT ending on_write_complete_cb .. status: %d",
|
||||
result as int));
|
||||
}
|
||||
|
||||
extern fn on_connect_cb(connect_req_ptr: *uv_connect_t,
|
||||
status: libc::c_int) unsafe {
|
||||
log(debug, fmt!{"beginning on_connect_cb .. status: %d",
|
||||
status as int});
|
||||
log(debug, fmt!("beginning on_connect_cb .. status: %d",
|
||||
status as int));
|
||||
let stream =
|
||||
get_stream_handle_from_connect_req(connect_req_ptr);
|
||||
if (status == 0i32) {
|
||||
@ -1087,14 +1087,14 @@ mod test {
|
||||
connect_req_ptr as *libc::c_void)
|
||||
as *request_wrapper;
|
||||
let write_handle = (*client_data).write_req;
|
||||
log(debug, fmt!{"on_connect_cb: tcp: %d write_hdl: %d",
|
||||
stream as int, write_handle as int});
|
||||
log(debug, fmt!("on_connect_cb: tcp: %d write_hdl: %d",
|
||||
stream as int, write_handle as int));
|
||||
let write_result = write(write_handle,
|
||||
stream as *libc::c_void,
|
||||
(*client_data).req_buf,
|
||||
on_write_complete_cb);
|
||||
log(debug, fmt!{"on_connect_cb: write() status: %d",
|
||||
write_result as int});
|
||||
log(debug, fmt!("on_connect_cb: write() status: %d",
|
||||
write_result as int));
|
||||
}
|
||||
else {
|
||||
let test_loop = get_loop_for_uv_handle(
|
||||
@ -1121,7 +1121,7 @@ mod test {
|
||||
// data field in our uv_connect_t struct
|
||||
let req_str_bytes = str::bytes(req_str);
|
||||
let req_msg_ptr: *u8 = vec::unsafe::to_ptr(req_str_bytes);
|
||||
log(debug, fmt!{"req_msg ptr: %u", req_msg_ptr as uint});
|
||||
log(debug, fmt!("req_msg ptr: %u", req_msg_ptr as uint));
|
||||
let req_msg = ~[
|
||||
buf_init(req_msg_ptr, vec::len(req_str_bytes))
|
||||
];
|
||||
@ -1129,9 +1129,9 @@ mod test {
|
||||
// this to C..
|
||||
let write_handle = write_t();
|
||||
let write_handle_ptr = ptr::addr_of(write_handle);
|
||||
log(debug, fmt!{"tcp req: tcp stream: %d write_handle: %d",
|
||||
log(debug, fmt!("tcp req: tcp stream: %d write_handle: %d",
|
||||
tcp_handle_ptr as int,
|
||||
write_handle_ptr as int});
|
||||
write_handle_ptr as int));
|
||||
let client_data = { writer_handle: write_handle_ptr,
|
||||
req_buf: ptr::addr_of(req_msg),
|
||||
read_chan: client_chan };
|
||||
@ -1145,12 +1145,12 @@ mod test {
|
||||
let addr = ip4_addr(ip, port);
|
||||
// FIXME ref #2064
|
||||
let addr_ptr = ptr::addr_of(addr);
|
||||
log(debug, fmt!{"after build addr in rust. port: %u",
|
||||
addr.sin_port as uint});
|
||||
log(debug, fmt!("after build addr in rust. port: %u",
|
||||
addr.sin_port as uint));
|
||||
|
||||
// this should set up the connection request..
|
||||
log(debug, fmt!{"b4 call tcp_connect connect cb: %u ",
|
||||
on_connect_cb as uint});
|
||||
log(debug, fmt!("b4 call tcp_connect connect cb: %u ",
|
||||
on_connect_cb as uint));
|
||||
let tcp_connect_result = tcp_connect(
|
||||
connect_req_ptr, tcp_handle_ptr,
|
||||
addr_ptr, on_connect_cb);
|
||||
@ -1181,8 +1181,8 @@ mod test {
|
||||
}
|
||||
|
||||
extern fn server_after_close_cb(handle: *libc::c_void) unsafe {
|
||||
log(debug, fmt!{"SERVER server stream closed, should exit.. h: %?",
|
||||
handle});
|
||||
log(debug, fmt!("SERVER server stream closed, should exit.. h: %?",
|
||||
handle));
|
||||
}
|
||||
|
||||
extern fn client_stream_after_close_cb(handle: *libc::c_void)
|
||||
@ -1210,15 +1210,15 @@ mod test {
|
||||
let nread = nread as int;
|
||||
if (nread > 0) {
|
||||
// we have data
|
||||
log(debug, fmt!{"SERVER read: data! nread: %d", nread});
|
||||
log(debug, fmt!("SERVER read: data! nread: %d", nread));
|
||||
|
||||
// pull out the contents of the write from the client
|
||||
let buf_base = get_base_from_buf(buf);
|
||||
let buf_len = get_len_from_buf(buf) as uint;
|
||||
log(debug, fmt!{"SERVER buf base: %u, len: %u, nread: %d",
|
||||
log(debug, fmt!("SERVER buf base: %u, len: %u, nread: %d",
|
||||
buf_base as uint,
|
||||
buf_len as uint,
|
||||
nread});
|
||||
nread));
|
||||
let bytes = vec::unsafe::from_buf(buf_base, buf_len);
|
||||
let request_str = str::from_bytes(bytes);
|
||||
|
||||
@ -1238,8 +1238,8 @@ mod test {
|
||||
client_stream_ptr as *libc::c_void,
|
||||
(*client_data).server_resp_buf,
|
||||
after_server_resp_write);
|
||||
log(debug, fmt!{"SERVER: resp write result: %d",
|
||||
write_result as int});
|
||||
log(debug, fmt!("SERVER: resp write result: %d",
|
||||
write_result as int));
|
||||
if (write_result != 0i32) {
|
||||
log(debug, ~"bad result for server resp write()");
|
||||
log(debug, get_last_err_info(
|
||||
@ -1273,8 +1273,8 @@ mod test {
|
||||
server_stream_ptr as *libc::c_void);
|
||||
if status != 0i32 {
|
||||
let err_msg = get_last_err_info(test_loop);
|
||||
log(debug, fmt!{"server_connect_cb: non-zero status: %?",
|
||||
err_msg});
|
||||
log(debug, fmt!("server_connect_cb: non-zero status: %?",
|
||||
err_msg));
|
||||
return;
|
||||
}
|
||||
let server_data = get_data_for_uv_handle(
|
||||
@ -1301,20 +1301,20 @@ mod test {
|
||||
log(debug, ~"successful server read start");
|
||||
}
|
||||
else {
|
||||
log(debug, fmt!{"server_connection_cb: bad read:%d",
|
||||
read_result as int});
|
||||
log(debug, fmt!("server_connection_cb: bad read:%d",
|
||||
read_result as int));
|
||||
assert false;
|
||||
}
|
||||
}
|
||||
else {
|
||||
log(debug, fmt!{"server_connection_cb: bad accept: %d",
|
||||
accept_result as int});
|
||||
log(debug, fmt!("server_connection_cb: bad accept: %d",
|
||||
accept_result as int));
|
||||
assert false;
|
||||
}
|
||||
}
|
||||
else {
|
||||
log(debug, fmt!{"server_connection_cb: bad client init: %d",
|
||||
client_init_result as int});
|
||||
log(debug, fmt!("server_connection_cb: bad client init: %d",
|
||||
client_init_result as int));
|
||||
assert false;
|
||||
}
|
||||
}
|
||||
@ -1333,8 +1333,8 @@ mod test {
|
||||
};
|
||||
|
||||
extern fn async_close_cb(handle: *libc::c_void) {
|
||||
log(debug, fmt!{"SERVER: closing async cb... h: %?",
|
||||
handle});
|
||||
log(debug, fmt!("SERVER: closing async cb... h: %?",
|
||||
handle));
|
||||
}
|
||||
|
||||
extern fn continue_async_cb(async_handle: *uv_async_t,
|
||||
@ -1369,7 +1369,7 @@ mod test {
|
||||
|
||||
let resp_str_bytes = str::bytes(server_resp_msg);
|
||||
let resp_msg_ptr: *u8 = vec::unsafe::to_ptr(resp_str_bytes);
|
||||
log(debug, fmt!{"resp_msg ptr: %u", resp_msg_ptr as uint});
|
||||
log(debug, fmt!("resp_msg ptr: %u", resp_msg_ptr as uint));
|
||||
let resp_msg = ~[
|
||||
buf_init(resp_msg_ptr, vec::len(resp_str_bytes))
|
||||
];
|
||||
@ -1428,26 +1428,26 @@ mod test {
|
||||
log(debug, ~"server uv::run() has returned");
|
||||
}
|
||||
else {
|
||||
log(debug, fmt!{"uv_async_init failure: %d",
|
||||
async_result as int});
|
||||
log(debug, fmt!("uv_async_init failure: %d",
|
||||
async_result as int));
|
||||
assert false;
|
||||
}
|
||||
}
|
||||
else {
|
||||
log(debug, fmt!{"non-zero result on uv_listen: %d",
|
||||
listen_result as int});
|
||||
log(debug, fmt!("non-zero result on uv_listen: %d",
|
||||
listen_result as int));
|
||||
assert false;
|
||||
}
|
||||
}
|
||||
else {
|
||||
log(debug, fmt!{"non-zero result on uv_tcp_bind: %d",
|
||||
bind_result as int});
|
||||
log(debug, fmt!("non-zero result on uv_tcp_bind: %d",
|
||||
bind_result as int));
|
||||
assert false;
|
||||
}
|
||||
}
|
||||
else {
|
||||
log(debug, fmt!{"non-zero result on uv_tcp_init: %d",
|
||||
tcp_init_result as int});
|
||||
log(debug, fmt!("non-zero result on uv_tcp_init: %d",
|
||||
tcp_init_result as int));
|
||||
assert false;
|
||||
}
|
||||
loop_delete(test_loop);
|
||||
@ -1524,8 +1524,8 @@ mod test {
|
||||
fn test_uv_ll_struct_size_uv_tcp_t() {
|
||||
let foreign_handle_size = rustrt::rust_uv_helper_uv_tcp_t_size();
|
||||
let rust_handle_size = sys::size_of::<uv_tcp_t>();
|
||||
let output = fmt!{"uv_tcp_t -- foreign: %u rust: %u",
|
||||
foreign_handle_size as uint, rust_handle_size};
|
||||
let output = fmt!("uv_tcp_t -- foreign: %u rust: %u",
|
||||
foreign_handle_size as uint, rust_handle_size);
|
||||
log(debug, output);
|
||||
assert foreign_handle_size as uint == rust_handle_size;
|
||||
}
|
||||
@ -1535,8 +1535,8 @@ mod test {
|
||||
let foreign_handle_size =
|
||||
rustrt::rust_uv_helper_uv_connect_t_size();
|
||||
let rust_handle_size = sys::size_of::<uv_connect_t>();
|
||||
let output = fmt!{"uv_connect_t -- foreign: %u rust: %u",
|
||||
foreign_handle_size as uint, rust_handle_size};
|
||||
let output = fmt!("uv_connect_t -- foreign: %u rust: %u",
|
||||
foreign_handle_size as uint, rust_handle_size);
|
||||
log(debug, output);
|
||||
assert foreign_handle_size as uint == rust_handle_size;
|
||||
}
|
||||
@ -1546,8 +1546,8 @@ mod test {
|
||||
let foreign_handle_size =
|
||||
rustrt::rust_uv_helper_uv_buf_t_size();
|
||||
let rust_handle_size = sys::size_of::<uv_buf_t>();
|
||||
let output = fmt!{"uv_buf_t -- foreign: %u rust: %u",
|
||||
foreign_handle_size as uint, rust_handle_size};
|
||||
let output = fmt!("uv_buf_t -- foreign: %u rust: %u",
|
||||
foreign_handle_size as uint, rust_handle_size);
|
||||
log(debug, output);
|
||||
assert foreign_handle_size as uint == rust_handle_size;
|
||||
}
|
||||
@ -1557,8 +1557,8 @@ mod test {
|
||||
let foreign_handle_size =
|
||||
rustrt::rust_uv_helper_uv_write_t_size();
|
||||
let rust_handle_size = sys::size_of::<uv_write_t>();
|
||||
let output = fmt!{"uv_write_t -- foreign: %u rust: %u",
|
||||
foreign_handle_size as uint, rust_handle_size};
|
||||
let output = fmt!("uv_write_t -- foreign: %u rust: %u",
|
||||
foreign_handle_size as uint, rust_handle_size);
|
||||
log(debug, output);
|
||||
assert foreign_handle_size as uint == rust_handle_size;
|
||||
}
|
||||
@ -1569,8 +1569,8 @@ mod test {
|
||||
let foreign_handle_size =
|
||||
rustrt::rust_uv_helper_sockaddr_in_size();
|
||||
let rust_handle_size = sys::size_of::<sockaddr_in>();
|
||||
let output = fmt!{"sockaddr_in -- foreign: %u rust: %u",
|
||||
foreign_handle_size as uint, rust_handle_size};
|
||||
let output = fmt!("sockaddr_in -- foreign: %u rust: %u",
|
||||
foreign_handle_size as uint, rust_handle_size);
|
||||
log(debug, output);
|
||||
assert foreign_handle_size as uint == rust_handle_size;
|
||||
}
|
||||
@ -1580,8 +1580,8 @@ mod test {
|
||||
let foreign_handle_size =
|
||||
rustrt::rust_uv_helper_sockaddr_in6_size();
|
||||
let rust_handle_size = sys::size_of::<sockaddr_in6>();
|
||||
let output = fmt!{"sockaddr_in6 -- foreign: %u rust: %u",
|
||||
foreign_handle_size as uint, rust_handle_size};
|
||||
let output = fmt!("sockaddr_in6 -- foreign: %u rust: %u",
|
||||
foreign_handle_size as uint, rust_handle_size);
|
||||
log(debug, output);
|
||||
// FIXME #1645 .. rust appears to pad structs to the nearest byte..?
|
||||
// .. can't get the uv::ll::sockaddr_in6 to == 28 :/
|
||||
@ -1595,8 +1595,8 @@ mod test {
|
||||
let foreign_handle_size =
|
||||
rustrt::rust_uv_helper_addr_in_size();
|
||||
let rust_handle_size = sys::size_of::<addr_in>();
|
||||
let output = fmt!{"addr_in -- foreign: %u rust: %u",
|
||||
foreign_handle_size as uint, rust_handle_size};
|
||||
let output = fmt!("addr_in -- foreign: %u rust: %u",
|
||||
foreign_handle_size as uint, rust_handle_size);
|
||||
log(debug, output);
|
||||
// FIXME #1645 .. see note above about struct padding
|
||||
assert (4u+foreign_handle_size as uint) == rust_handle_size;
|
||||
@ -1608,8 +1608,8 @@ mod test {
|
||||
let foreign_handle_size =
|
||||
rustrt::rust_uv_helper_uv_async_t_size();
|
||||
let rust_handle_size = sys::size_of::<uv_async_t>();
|
||||
let output = fmt!{"uv_async_t -- foreign: %u rust: %u",
|
||||
foreign_handle_size as uint, rust_handle_size};
|
||||
let output = fmt!("uv_async_t -- foreign: %u rust: %u",
|
||||
foreign_handle_size as uint, rust_handle_size);
|
||||
log(debug, output);
|
||||
assert foreign_handle_size as uint == rust_handle_size;
|
||||
}
|
||||
@ -1620,8 +1620,8 @@ mod test {
|
||||
let foreign_handle_size =
|
||||
rustrt::rust_uv_helper_uv_timer_t_size();
|
||||
let rust_handle_size = sys::size_of::<uv_timer_t>();
|
||||
let output = fmt!{"uv_timer_t -- foreign: %u rust: %u",
|
||||
foreign_handle_size as uint, rust_handle_size};
|
||||
let output = fmt!("uv_timer_t -- foreign: %u rust: %u",
|
||||
foreign_handle_size as uint, rust_handle_size);
|
||||
log(debug, output);
|
||||
assert foreign_handle_size as uint == rust_handle_size;
|
||||
}
|
||||
@ -1633,8 +1633,8 @@ mod test {
|
||||
let foreign_handle_size =
|
||||
rustrt::rust_uv_helper_uv_getaddrinfo_t_size();
|
||||
let rust_handle_size = sys::size_of::<uv_getaddrinfo_t>();
|
||||
let output = fmt!{"uv_getaddrinfo_t -- foreign: %u rust: %u",
|
||||
foreign_handle_size as uint, rust_handle_size};
|
||||
let output = fmt!("uv_getaddrinfo_t -- foreign: %u rust: %u",
|
||||
foreign_handle_size as uint, rust_handle_size);
|
||||
log(debug, output);
|
||||
assert foreign_handle_size as uint == rust_handle_size;
|
||||
}
|
||||
@ -1646,8 +1646,8 @@ mod test {
|
||||
let foreign_handle_size =
|
||||
rustrt::rust_uv_helper_addrinfo_size();
|
||||
let rust_handle_size = sys::size_of::<addrinfo>();
|
||||
let output = fmt!{"addrinfo -- foreign: %u rust: %u",
|
||||
foreign_handle_size as uint, rust_handle_size};
|
||||
let output = fmt!("addrinfo -- foreign: %u rust: %u",
|
||||
foreign_handle_size as uint, rust_handle_size);
|
||||
log(debug, output);
|
||||
assert foreign_handle_size as uint == rust_handle_size;
|
||||
}
|
||||
|
@ -25,7 +25,7 @@ fn path_ident_to_str(p: path, i: ident, itr: ident_interner) -> ~str {
|
||||
//FIXME /* FIXME (#2543) */ copy *i
|
||||
*itr.get(i)
|
||||
} else {
|
||||
fmt!{"%s::%s", path_to_str(p, itr), *itr.get(i)}
|
||||
fmt!("%s::%s", path_to_str(p, itr), *itr.get(i))
|
||||
}
|
||||
}
|
||||
|
||||
@ -296,7 +296,7 @@ fn map_stmt(stmt: @stmt, cx: ctx, v: vt) {
|
||||
fn node_id_to_str(map: map, id: node_id, itr: ident_interner) -> ~str {
|
||||
match map.find(id) {
|
||||
none => {
|
||||
fmt!{"unknown node (id=%d)", id}
|
||||
fmt!("unknown node (id=%d)", id)
|
||||
}
|
||||
some(node_item(item, path)) => {
|
||||
let path_str = path_ident_to_str(*path, item.ident, itr);
|
||||
@ -315,48 +315,48 @@ fn node_id_to_str(map: map, id: node_id, itr: ident_interner) -> ~str {
|
||||
fmt!("%s %s (id=%?)", item_str, path_str, id)
|
||||
}
|
||||
some(node_foreign_item(item, abi, path)) => {
|
||||
fmt!{"foreign item %s with abi %? (id=%?)",
|
||||
path_ident_to_str(*path, item.ident, itr), abi, id}
|
||||
fmt!("foreign item %s with abi %? (id=%?)",
|
||||
path_ident_to_str(*path, item.ident, itr), abi, id)
|
||||
}
|
||||
some(node_method(m, impl_did, path)) => {
|
||||
fmt!{"method %s in %s (id=%?)",
|
||||
*itr.get(m.ident), path_to_str(*path, itr), id}
|
||||
fmt!("method %s in %s (id=%?)",
|
||||
*itr.get(m.ident), path_to_str(*path, itr), id)
|
||||
}
|
||||
some(node_trait_method(tm, impl_did, path)) => {
|
||||
let m = ast_util::trait_method_to_ty_method(*tm);
|
||||
fmt!{"method %s in %s (id=%?)",
|
||||
*itr.get(m.ident), path_to_str(*path, itr), id}
|
||||
fmt!("method %s in %s (id=%?)",
|
||||
*itr.get(m.ident), path_to_str(*path, itr), id)
|
||||
}
|
||||
some(node_variant(variant, def_id, path)) => {
|
||||
fmt!{"variant %s in %s (id=%?)",
|
||||
*itr.get(variant.node.name), path_to_str(*path, itr), id}
|
||||
fmt!("variant %s in %s (id=%?)",
|
||||
*itr.get(variant.node.name), path_to_str(*path, itr), id)
|
||||
}
|
||||
some(node_expr(expr)) => {
|
||||
fmt!{"expr %s (id=%?)", pprust::expr_to_str(expr, itr), id}
|
||||
fmt!("expr %s (id=%?)", pprust::expr_to_str(expr, itr), id)
|
||||
}
|
||||
some(node_stmt(stmt)) => {
|
||||
fmt!{"stmt %s (id=%?)",
|
||||
pprust::stmt_to_str(*stmt, itr), id}
|
||||
fmt!("stmt %s (id=%?)",
|
||||
pprust::stmt_to_str(*stmt, itr), id)
|
||||
}
|
||||
// FIXMEs are as per #2410
|
||||
some(node_export(_, path)) => {
|
||||
fmt!{"export %s (id=%?)", // add more info here
|
||||
path_to_str(*path, itr), id}
|
||||
fmt!("export %s (id=%?)", // add more info here
|
||||
path_to_str(*path, itr), id)
|
||||
}
|
||||
some(node_arg(_, _)) => { // add more info here
|
||||
fmt!{"arg (id=%?)", id}
|
||||
fmt!("arg (id=%?)", id)
|
||||
}
|
||||
some(node_local(_)) => { // add more info here
|
||||
fmt!{"local (id=%?)", id}
|
||||
fmt!("local (id=%?)", id)
|
||||
}
|
||||
some(node_ctor(*)) => { // add more info here
|
||||
fmt!{"node_ctor (id=%?)", id}
|
||||
fmt!("node_ctor (id=%?)", id)
|
||||
}
|
||||
some(node_dtor(*)) => { // add more info here
|
||||
fmt!{"node_dtor (id=%?)", id}
|
||||
fmt!("node_dtor (id=%?)", id)
|
||||
}
|
||||
some(node_block(_)) => {
|
||||
fmt!{"block"}
|
||||
fmt!("block")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -368,7 +368,7 @@ fn require_unique_names(diagnostic: span_handler,
|
||||
// FIXME: How do I silence the warnings? --pcw (#2619)
|
||||
if map.contains_key(name) {
|
||||
diagnostic.span_fatal(meta.span,
|
||||
fmt!{"duplicate meta item `%s`", name});
|
||||
fmt!("duplicate meta item `%s`", name));
|
||||
}
|
||||
map.insert(name, ());
|
||||
}
|
||||
|
@ -73,7 +73,7 @@ fn new_filemap(+filename: filename, src: @~str,
|
||||
fn mk_substr_filename(cm: codemap, sp: span) -> ~str
|
||||
{
|
||||
let pos = lookup_char_pos(cm, sp.lo);
|
||||
return fmt!{"<%s:%u:%u>", pos.file.name, pos.line, pos.col};
|
||||
return fmt!("<%s:%u:%u>", pos.file.name, pos.line, pos.col);
|
||||
}
|
||||
|
||||
fn next_line(file: filemap, chpos: uint, byte_pos: uint) {
|
||||
@ -93,7 +93,7 @@ fn lookup_line(map: codemap, pos: uint, lookup: lookup_fn)
|
||||
if lookup(map.files[m].start_pos) > pos { b = m; } else { a = m; }
|
||||
}
|
||||
if (a >= len) {
|
||||
fail fmt!{"position %u does not resolve to a source location", pos}
|
||||
fail fmt!("position %u does not resolve to a source location", pos)
|
||||
}
|
||||
let f = map.files[a];
|
||||
a = 0u;
|
||||
@ -166,15 +166,15 @@ type span = {lo: uint, hi: uint, expn_info: expn_info};
|
||||
fn span_to_str_no_adj(sp: span, cm: codemap) -> ~str {
|
||||
let lo = lookup_char_pos(cm, sp.lo);
|
||||
let hi = lookup_char_pos(cm, sp.hi);
|
||||
return fmt!{"%s:%u:%u: %u:%u", lo.file.name,
|
||||
lo.line, lo.col, hi.line, hi.col}
|
||||
return fmt!("%s:%u:%u: %u:%u", lo.file.name,
|
||||
lo.line, lo.col, hi.line, hi.col)
|
||||
}
|
||||
|
||||
fn span_to_str(sp: span, cm: codemap) -> ~str {
|
||||
let lo = lookup_char_pos_adj(cm, sp.lo);
|
||||
let hi = lookup_char_pos_adj(cm, sp.hi);
|
||||
return fmt!{"%s:%u:%u: %u:%u", lo.filename,
|
||||
lo.line, lo.col, hi.line, hi.col}
|
||||
return fmt!("%s:%u:%u: %u:%u", lo.filename,
|
||||
lo.line, lo.col, hi.line, hi.col)
|
||||
}
|
||||
|
||||
type file_lines = {file: filemap, lines: ~[uint]};
|
||||
|
@ -91,8 +91,8 @@ impl handler_t: handler {
|
||||
0u => return,
|
||||
1u => s = ~"aborting due to previous error",
|
||||
_ => {
|
||||
s = fmt!{"aborting due to %u previous errors",
|
||||
self.err_count};
|
||||
s = fmt!("aborting due to %u previous errors",
|
||||
self.err_count);
|
||||
}
|
||||
}
|
||||
self.fatal(s);
|
||||
@ -113,7 +113,7 @@ impl handler_t: handler {
|
||||
}
|
||||
|
||||
fn ice_msg(msg: ~str) -> ~str {
|
||||
fmt!{"internal compiler error: %s", msg}
|
||||
fmt!("internal compiler error: %s", msg)
|
||||
}
|
||||
|
||||
fn mk_span_handler(handler: handler, cm: codemap::codemap) -> span_handler {
|
||||
@ -168,16 +168,16 @@ fn print_diagnostic(topic: ~str, lvl: level, msg: ~str) {
|
||||
let use_color = term::color_supported() &&
|
||||
io::stderr().get_type() == io::Screen;
|
||||
if str::is_not_empty(topic) {
|
||||
io::stderr().write_str(fmt!{"%s ", topic});
|
||||
io::stderr().write_str(fmt!("%s ", topic));
|
||||
}
|
||||
if use_color {
|
||||
term::fg(io::stderr(), diagnosticcolor(lvl));
|
||||
}
|
||||
io::stderr().write_str(fmt!{"%s:", diagnosticstr(lvl)});
|
||||
io::stderr().write_str(fmt!("%s:", diagnosticstr(lvl)));
|
||||
if use_color {
|
||||
term::reset(io::stderr());
|
||||
}
|
||||
io::stderr().write_str(fmt!{" %s\n", msg});
|
||||
io::stderr().write_str(fmt!(" %s\n", msg));
|
||||
}
|
||||
|
||||
fn emit(cmsp: option<(codemap::codemap, span)>,
|
||||
@ -212,13 +212,13 @@ fn highlight_lines(cm: codemap::codemap, sp: span,
|
||||
}
|
||||
// Print the offending lines
|
||||
for display_lines.each |line| {
|
||||
io::stderr().write_str(fmt!{"%s:%u ", fm.name, line + 1u});
|
||||
io::stderr().write_str(fmt!("%s:%u ", fm.name, line + 1u));
|
||||
let s = codemap::get_line(fm, line as int) + ~"\n";
|
||||
io::stderr().write_str(s);
|
||||
}
|
||||
if elided {
|
||||
let last_line = display_lines[vec::len(display_lines) - 1u];
|
||||
let s = fmt!{"%s:%u ", fm.name, last_line + 1u};
|
||||
let s = fmt!("%s:%u ", fm.name, last_line + 1u);
|
||||
let mut indent = str::len(s);
|
||||
let mut out = ~"";
|
||||
while indent > 0u { out += ~" "; indent -= 1u; }
|
||||
@ -257,7 +257,7 @@ fn print_macro_backtrace(cm: codemap::codemap, sp: span) {
|
||||
let ss = option::map_default(ei.callie.span, @~"",
|
||||
|span| @codemap::span_to_str(span, cm));
|
||||
print_diagnostic(*ss, note,
|
||||
fmt!{"in expansion of #%s", ei.callie.name});
|
||||
fmt!("in expansion of #%s", ei.callie.name));
|
||||
let ss = codemap::span_to_str(ei.call_site, cm);
|
||||
print_diagnostic(ss, note, ~"expansion site");
|
||||
print_macro_backtrace(cm, ei.call_site);
|
||||
|
@ -356,7 +356,7 @@ fn ser_variant(cx: ext_ctxt,
|
||||
argfn: fn(-@ast::expr, uint, ast::blk) -> @ast::expr)
|
||||
-> ast::arm {
|
||||
let vnames = do vec::from_fn(vec::len(tys)) |i| {
|
||||
cx.parse_sess().interner.intern(@fmt!{"__v%u", i})
|
||||
cx.parse_sess().interner.intern(@fmt!("__v%u", i))
|
||||
};
|
||||
let pats = do vec::from_fn(vec::len(tys)) |i| {
|
||||
cx.binder_pat(tys[i].span, vnames[i])
|
||||
@ -406,7 +406,7 @@ fn ser_ty(cx: ext_ctxt, tps: ser_tps_map,
|
||||
|
||||
ast::ty_bot => {
|
||||
cx.span_err(
|
||||
ty.span, fmt!{"Cannot serialize bottom type"});
|
||||
ty.span, fmt!("Cannot serialize bottom type"));
|
||||
~[]
|
||||
}
|
||||
|
||||
@ -553,7 +553,7 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident,
|
||||
ident: cx.ident_of(~"__s" + cx.str_of(tp.ident)),
|
||||
id: cx.next_id()});
|
||||
|
||||
debug!{"tp_inputs = %?", tp_inputs};
|
||||
debug!("tp_inputs = %?", tp_inputs);
|
||||
|
||||
|
||||
let ser_inputs: ~[ast::arg] =
|
||||
@ -574,7 +574,7 @@ fn mk_ser_fn(cx: ext_ctxt, span: span, name: ast::ident,
|
||||
tp.ident,
|
||||
fn@(v: @ast::expr) -> ~[@ast::stmt] {
|
||||
let f = cx.var_ref(span, arg_ident);
|
||||
debug!{"serializing type arg %s", cx.str_of(arg_ident)};
|
||||
debug!("serializing type arg %s", cx.str_of(arg_ident));
|
||||
~[#ast[stmt]{$(f)($(v));}]
|
||||
});
|
||||
}
|
||||
@ -765,7 +765,7 @@ fn mk_deser_fn(cx: ext_ctxt, span: span,
|
||||
ident: cx.ident_of(~"__d" + cx.str_of(tp.ident)),
|
||||
id: cx.next_id()});
|
||||
|
||||
debug!{"tp_inputs = %?", tp_inputs};
|
||||
debug!("tp_inputs = %?", tp_inputs);
|
||||
|
||||
let deser_inputs: ~[ast::arg] =
|
||||
vec::append(~[{mode: ast::expl(ast::by_ref),
|
||||
|
@ -267,21 +267,21 @@ fn get_mac_args(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
|
||||
match max {
|
||||
some(max) if ! (min <= elts_len && elts_len <= max) => {
|
||||
cx.span_fatal(sp,
|
||||
fmt!{"#%s takes between %u and %u arguments.",
|
||||
name, min, max});
|
||||
fmt!("#%s takes between %u and %u arguments.",
|
||||
name, min, max));
|
||||
}
|
||||
none if ! (min <= elts_len) => {
|
||||
cx.span_fatal(sp, fmt!{"#%s needs at least %u arguments.",
|
||||
name, min});
|
||||
cx.span_fatal(sp, fmt!("#%s needs at least %u arguments.",
|
||||
name, min));
|
||||
}
|
||||
_ => return elts /* we are good */
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
cx.span_fatal(sp, fmt!{"#%s: malformed invocation", name})
|
||||
cx.span_fatal(sp, fmt!("#%s: malformed invocation", name))
|
||||
}
|
||||
},
|
||||
none => cx.span_fatal(sp, fmt!{"#%s: missing arguments", name})
|
||||
none => cx.span_fatal(sp, fmt!("#%s: missing arguments", name))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -31,12 +31,12 @@ fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
|
||||
match exts.find(*extname) {
|
||||
none => {
|
||||
cx.span_fatal(pth.span,
|
||||
fmt!{"macro undefined: '%s'", *extname})
|
||||
fmt!("macro undefined: '%s'", *extname))
|
||||
}
|
||||
some(item_decorator(_)) => {
|
||||
cx.span_fatal(
|
||||
pth.span,
|
||||
fmt!{"%s can only be used as a decorator", *extname});
|
||||
fmt!("%s can only be used as a decorator", *extname));
|
||||
}
|
||||
some(normal({expander: exp, span: exp_sp})) => {
|
||||
let expanded = exp(cx, mac.span, args, body);
|
||||
@ -56,8 +56,8 @@ fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
|
||||
}
|
||||
some(expr_tt(_)) => {
|
||||
cx.span_fatal(pth.span,
|
||||
fmt!{"this tt-style macro should be \
|
||||
invoked '%s!{...}'", *extname})
|
||||
fmt!("this tt-style macro should be \
|
||||
invoked '%s!(...)'", *extname))
|
||||
}
|
||||
some(item_tt(*)) => {
|
||||
cx.span_fatal(pth.span,
|
||||
@ -76,14 +76,14 @@ fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
|
||||
match exts.find(*extname) {
|
||||
none => {
|
||||
cx.span_fatal(pth.span,
|
||||
fmt!{"macro undefined: '%s'", *extname})
|
||||
fmt!("macro undefined: '%s'", *extname))
|
||||
}
|
||||
some(expr_tt({expander: exp, span: exp_sp})) => {
|
||||
let expanded = match exp(cx, mac.span, tts) {
|
||||
mr_expr(e) => e,
|
||||
_ => cx.span_fatal(
|
||||
pth.span, fmt!{"non-expr macro in expr pos: %s",
|
||||
*extname})
|
||||
pth.span, fmt!("non-expr macro in expr pos: %s",
|
||||
*extname))
|
||||
};
|
||||
|
||||
cx.bt_push(expanded_from({call_site: s,
|
||||
@ -110,8 +110,8 @@ fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
|
||||
}
|
||||
_ => {
|
||||
cx.span_fatal(pth.span,
|
||||
fmt!{"'%s' is not a tt-style macro",
|
||||
*extname})
|
||||
fmt!("'%s' is not a tt-style macro",
|
||||
*extname))
|
||||
}
|
||||
|
||||
}
|
||||
@ -202,7 +202,7 @@ fn expand_item_mac(exts: hashmap<~str, syntax_extension>,
|
||||
match exts.find(*extname) {
|
||||
none => {
|
||||
cx.span_fatal(pth.span,
|
||||
fmt!{"macro undefined: '%s'", *extname})
|
||||
fmt!("macro undefined: '%s'", *extname))
|
||||
}
|
||||
some(item_tt(expand)) => {
|
||||
let expanded = expand.expander(cx, it.span, it.ident, tts);
|
||||
@ -223,7 +223,7 @@ fn expand_item_mac(exts: hashmap<~str, syntax_extension>,
|
||||
return maybe_it
|
||||
}
|
||||
_ => cx.span_fatal(it.span,
|
||||
fmt!{"%s is not a legal here", *extname})
|
||||
fmt!("%s is not a legal here", *extname))
|
||||
}
|
||||
}
|
||||
_ => cx.span_bug(it.span, ~"invalid item macro invocation")
|
||||
|
@ -18,7 +18,7 @@ fn expand_syntax_ext(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
|
||||
expr_to_str(cx, args[0],
|
||||
~"first argument to #fmt must be a string literal.");
|
||||
let fmtspan = args[0].span;
|
||||
debug!{"Format string:"};
|
||||
debug!("Format string:");
|
||||
log(debug, fmt);
|
||||
fn parse_fmt_err_(cx: ext_ctxt, sp: span, msg: ~str) -> ! {
|
||||
cx.span_fatal(sp, msg);
|
||||
@ -193,15 +193,15 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
|
||||
fn log_conv(c: conv) {
|
||||
match c.param {
|
||||
some(p) => { log(debug, ~"param: " + int::to_str(p, 10u)); }
|
||||
_ => debug!{"param: none"}
|
||||
_ => debug!("param: none")
|
||||
}
|
||||
for c.flags.each |f| {
|
||||
match f {
|
||||
flag_left_justify => debug!{"flag: left justify"},
|
||||
flag_left_zero_pad => debug!{"flag: left zero pad"},
|
||||
flag_space_for_sign => debug!{"flag: left space pad"},
|
||||
flag_sign_always => debug!{"flag: sign always"},
|
||||
flag_alternate => debug!{"flag: alternate"}
|
||||
flag_left_justify => debug!("flag: left justify"),
|
||||
flag_left_zero_pad => debug!("flag: left zero pad"),
|
||||
flag_space_for_sign => debug!("flag: left space pad"),
|
||||
flag_sign_always => debug!("flag: sign always"),
|
||||
flag_alternate => debug!("flag: alternate")
|
||||
}
|
||||
}
|
||||
match c.width {
|
||||
@ -209,33 +209,33 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
|
||||
debug, ~"width: count is " + int::to_str(i, 10u)),
|
||||
count_is_param(i) => log(
|
||||
debug, ~"width: count is param " + int::to_str(i, 10u)),
|
||||
count_is_next_param => debug!{"width: count is next param"},
|
||||
count_implied => debug!{"width: count is implied"}
|
||||
count_is_next_param => debug!("width: count is next param"),
|
||||
count_implied => debug!("width: count is implied")
|
||||
}
|
||||
match c.precision {
|
||||
count_is(i) => log(
|
||||
debug, ~"prec: count is " + int::to_str(i, 10u)),
|
||||
count_is_param(i) => log(
|
||||
debug, ~"prec: count is param " + int::to_str(i, 10u)),
|
||||
count_is_next_param => debug!{"prec: count is next param"},
|
||||
count_implied => debug!{"prec: count is implied"}
|
||||
count_is_next_param => debug!("prec: count is next param"),
|
||||
count_implied => debug!("prec: count is implied")
|
||||
}
|
||||
match c.ty {
|
||||
ty_bool => debug!{"type: bool"},
|
||||
ty_str => debug!{"type: str"},
|
||||
ty_char => debug!{"type: char"},
|
||||
ty_bool => debug!("type: bool"),
|
||||
ty_str => debug!("type: str"),
|
||||
ty_char => debug!("type: char"),
|
||||
ty_int(s) => match s {
|
||||
signed => debug!{"type: signed"},
|
||||
unsigned => debug!{"type: unsigned"}
|
||||
signed => debug!("type: signed"),
|
||||
unsigned => debug!("type: unsigned")
|
||||
},
|
||||
ty_bits => debug!{"type: bits"},
|
||||
ty_bits => debug!("type: bits"),
|
||||
ty_hex(cs) => match cs {
|
||||
case_upper => debug!{"type: uhex"},
|
||||
case_lower => debug!{"type: lhex"},
|
||||
case_upper => debug!("type: uhex"),
|
||||
case_lower => debug!("type: lhex"),
|
||||
},
|
||||
ty_octal => debug!{"type: octal"},
|
||||
ty_float => debug!{"type: float"},
|
||||
ty_poly => debug!{"type: poly"}
|
||||
ty_octal => debug!("type: octal"),
|
||||
ty_float => debug!("type: float"),
|
||||
ty_poly => debug!("type: poly")
|
||||
}
|
||||
}
|
||||
let fmt_sp = args[0].span;
|
||||
@ -254,7 +254,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
|
||||
~"not enough arguments to #fmt " +
|
||||
~"for the given format string");
|
||||
}
|
||||
debug!{"Building conversion:"};
|
||||
debug!("Building conversion:");
|
||||
log_conv(conv);
|
||||
let arg_expr = args[n];
|
||||
let c_expr = make_new_conv(cx, fmt_sp, conv, arg_expr);
|
||||
@ -266,8 +266,8 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
|
||||
|
||||
if expected_nargs < nargs {
|
||||
cx.span_fatal
|
||||
(sp, fmt!{"too many arguments to #fmt. found %u, expected %u",
|
||||
nargs, expected_nargs});
|
||||
(sp, fmt!("too many arguments to #fmt. found %u, expected %u",
|
||||
nargs, expected_nargs));
|
||||
}
|
||||
|
||||
let arg_vec = mk_fixed_vec_e(cx, fmt_sp, piece_exprs);
|
||||
|
@ -32,9 +32,9 @@ impl ext_ctxt: proto::visitor<(), (), ()> {
|
||||
if state.messages.len() == 0 {
|
||||
self.span_warn(
|
||||
state.span, // use a real span!
|
||||
fmt!{"state %s contains no messages, \
|
||||
fmt!("state %s contains no messages, \
|
||||
consider stepping to a terminal state instead",
|
||||
state.name})
|
||||
state.name))
|
||||
}
|
||||
}
|
||||
|
||||
@ -48,8 +48,8 @@ impl ext_ctxt: proto::visitor<(), (), ()> {
|
||||
// track span information.
|
||||
self.span_err(
|
||||
proto.get_state(next).span,
|
||||
fmt!{"message %s steps to undefined state, %s",
|
||||
name, next});
|
||||
fmt!("message %s steps to undefined state, %s",
|
||||
name, next));
|
||||
}
|
||||
else {
|
||||
let next = proto.get_state(next);
|
||||
@ -57,11 +57,11 @@ impl ext_ctxt: proto::visitor<(), (), ()> {
|
||||
if next.ty_params.len() != next_tys.len() {
|
||||
self.span_err(
|
||||
next.span, // use a real span
|
||||
fmt!{"message %s target (%s) \
|
||||
fmt!("message %s target (%s) \
|
||||
needs %u type parameters, but got %u",
|
||||
name, next.name,
|
||||
next.ty_params.len(),
|
||||
next_tys.len()});
|
||||
next_tys.len()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -32,7 +32,7 @@ import std::bitv::{bitv};
|
||||
import ast_builder::empty_span;
|
||||
|
||||
fn analyze(proto: protocol, _cx: ext_ctxt) {
|
||||
debug!{"initializing colive analysis"};
|
||||
debug!("initializing colive analysis");
|
||||
let num_states = proto.num_states();
|
||||
let colive = do (copy proto.states).map_to_vec |state| {
|
||||
let bv = ~bitv(num_states, false);
|
||||
@ -46,7 +46,7 @@ fn analyze(proto: protocol, _cx: ext_ctxt) {
|
||||
let mut changed = true;
|
||||
while changed {
|
||||
changed = false;
|
||||
debug!{"colive iteration %?", i};
|
||||
debug!("colive iteration %?", i);
|
||||
for colive.eachi |i, this_colive| {
|
||||
let this = proto.get_state_by_id(i);
|
||||
for this_colive.ones |j| {
|
||||
@ -59,7 +59,7 @@ fn analyze(proto: protocol, _cx: ext_ctxt) {
|
||||
i += 1;
|
||||
}
|
||||
|
||||
debug!{"colive analysis complete"};
|
||||
debug!("colive analysis complete");
|
||||
|
||||
// Determine if we're bounded
|
||||
let mut self_live = ~[];
|
||||
@ -72,20 +72,20 @@ fn analyze(proto: protocol, _cx: ext_ctxt) {
|
||||
if self_live.len() > 0 {
|
||||
let states = str::connect(self_live.map(|s| s.name), ~" ");
|
||||
|
||||
debug!{"protocol %s is unbounded due to loops involving: %s",
|
||||
proto.name, states};
|
||||
debug!("protocol %s is unbounded due to loops involving: %s",
|
||||
proto.name, states);
|
||||
|
||||
// Someday this will be configurable with a warning
|
||||
//cx.span_warn(empty_span(),
|
||||
// fmt!{"protocol %s is unbounded due to loops \
|
||||
// fmt!("protocol %s is unbounded due to loops \
|
||||
// involving these states: %s",
|
||||
// *proto.name,
|
||||
// states});
|
||||
// states));
|
||||
|
||||
proto.bounded = some(false);
|
||||
}
|
||||
else {
|
||||
debug!{"protocol %s is bounded. yay!", proto.name};
|
||||
debug!("protocol %s is bounded. yay!", proto.name);
|
||||
proto.bounded = some(true);
|
||||
}
|
||||
}
|
@ -39,7 +39,7 @@ trait gen_init {
|
||||
|
||||
impl message: gen_send {
|
||||
fn gen_send(cx: ext_ctxt, try: bool) -> @ast::item {
|
||||
debug!{"pipec: gen_send"};
|
||||
debug!("pipec: gen_send");
|
||||
match self {
|
||||
message(id, span, tys, this,
|
||||
some({state: next, tys: next_tys})) => {
|
||||
@ -75,9 +75,9 @@ impl message: gen_send {
|
||||
body += fmt!("let %s = pipes::send_packet_buffered(\
|
||||
ptr::addr_of(b.buffer.data.%s));\n",
|
||||
sp, next.name);
|
||||
body += fmt!{"let %s = pipes::recv_packet_buffered(\
|
||||
body += fmt!("let %s = pipes::recv_packet_buffered(\
|
||||
ptr::addr_of(b.buffer.data.%s));\n",
|
||||
rp, next.name};
|
||||
rp, next.name);
|
||||
}
|
||||
else {
|
||||
let pat = match (this.dir, next.dir) {
|
||||
@ -97,7 +97,7 @@ impl message: gen_send {
|
||||
~", "));
|
||||
|
||||
if !try {
|
||||
body += fmt!{"pipes::send(pipe, message);\n"};
|
||||
body += fmt!("pipes::send(pipe, message);\n");
|
||||
// return the new channel
|
||||
body += ~"c }";
|
||||
}
|
||||
@ -127,7 +127,7 @@ impl message: gen_send {
|
||||
}
|
||||
|
||||
message(id, span, tys, this, none) => {
|
||||
debug!{"pipec: no next state"};
|
||||
debug!("pipec: no next state");
|
||||
let arg_names = tys.mapi(|i, _ty| (~"x_" + i.to_str()));
|
||||
|
||||
let args_ast = (arg_names, tys).map(
|
||||
@ -150,13 +150,13 @@ impl message: gen_send {
|
||||
};
|
||||
|
||||
let mut body = ~"{ ";
|
||||
body += fmt!{"let message = %s::%s%s;\n",
|
||||
body += fmt!("let message = %s::%s%s;\n",
|
||||
this.proto.name,
|
||||
self.name(),
|
||||
message_args};
|
||||
message_args);
|
||||
|
||||
if !try {
|
||||
body += fmt!{"pipes::send(pipe, message);\n"};
|
||||
body += fmt!("pipes::send(pipe, message);\n");
|
||||
body += ~" }";
|
||||
} else {
|
||||
body += fmt!("if pipes::send(pipe, message) { \
|
||||
@ -192,7 +192,7 @@ impl message: gen_send {
|
||||
|
||||
impl state: to_type_decls {
|
||||
fn to_type_decls(cx: ext_ctxt) -> ~[@ast::item] {
|
||||
debug!{"pipec: to_type_decls"};
|
||||
debug!("pipec: to_type_decls");
|
||||
// This compiles into two different type declarations. Say the
|
||||
// state is called ping. This will generate both `ping` and
|
||||
// `ping_message`. The first contains data that the user cares
|
||||
@ -238,7 +238,7 @@ impl state: to_type_decls {
|
||||
}
|
||||
|
||||
fn to_endpoint_decls(cx: ext_ctxt, dir: direction) -> ~[@ast::item] {
|
||||
debug!{"pipec: to_endpoint_decls"};
|
||||
debug!("pipec: to_endpoint_decls");
|
||||
let dir = match dir {
|
||||
send => (*self).dir,
|
||||
recv => (*self).dir.reverse()
|
||||
@ -293,7 +293,7 @@ impl protocol: gen_init {
|
||||
fn gen_init(cx: ext_ctxt) -> @ast::item {
|
||||
let ext_cx = cx;
|
||||
|
||||
debug!{"gen_init"};
|
||||
debug!("gen_init");
|
||||
let start_state = self.states[0];
|
||||
|
||||
let body = if !self.is_bounded() {
|
||||
@ -320,12 +320,12 @@ impl protocol: gen_init {
|
||||
}
|
||||
};
|
||||
|
||||
cx.parse_item(fmt!{"fn init%s() -> (client::%s, server::%s)\
|
||||
cx.parse_item(fmt!("fn init%s() -> (client::%s, server::%s)\
|
||||
{ import pipes::has_buffer; %s }",
|
||||
start_state.ty_params.to_source(cx),
|
||||
start_state.to_ty(cx).to_source(cx),
|
||||
start_state.to_ty(cx).to_source(cx),
|
||||
body.to_source(cx)})
|
||||
body.to_source(cx)))
|
||||
}
|
||||
|
||||
fn gen_buffer_init(ext_cx: ext_ctxt) -> @ast::expr {
|
||||
@ -337,7 +337,7 @@ impl protocol: gen_init {
|
||||
}
|
||||
|
||||
fn gen_init_bounded(ext_cx: ext_ctxt) -> @ast::expr {
|
||||
debug!{"gen_init_bounded"};
|
||||
debug!("gen_init_bounded");
|
||||
let buffer_fields = self.gen_buffer_init(ext_cx);
|
||||
|
||||
let buffer = #ast {
|
||||
@ -349,11 +349,11 @@ impl protocol: gen_init {
|
||||
ext_cx.block(
|
||||
self.states.map_to_vec(
|
||||
|s| ext_cx.parse_stmt(
|
||||
fmt!{"data.%s.set_buffer(buffer)",
|
||||
s.name})),
|
||||
fmt!("data.%s.set_buffer(buffer)",
|
||||
s.name))),
|
||||
ext_cx.parse_expr(
|
||||
fmt!{"ptr::addr_of(data.%s)",
|
||||
self.states[0].name})));
|
||||
fmt!("ptr::addr_of(data.%s)",
|
||||
self.states[0].name))));
|
||||
|
||||
#ast {{
|
||||
let buffer = $(buffer);
|
||||
@ -490,7 +490,7 @@ impl ext_ctxt: ext_ctxt_parse_utils {
|
||||
match res {
|
||||
some(ast) => ast,
|
||||
none => {
|
||||
error!{"Parse error with ```\n%s\n```", s};
|
||||
error!("Parse error with ```\n%s\n```", s);
|
||||
fail
|
||||
}
|
||||
}
|
||||
|
@ -152,9 +152,9 @@ struct protocol_ {
|
||||
let bounded = self.bounded.get();
|
||||
bounded
|
||||
//if bounded && self.has_ty_params() {
|
||||
// debug!{"protocol %s has is bounded, but type parameters\
|
||||
// debug!("protocol %s has is bounded, but type parameters\
|
||||
// are not yet supported.",
|
||||
// *self.name};
|
||||
// *self.name);
|
||||
// false
|
||||
//}
|
||||
//else { bounded }
|
||||
|
@ -205,7 +205,7 @@ fn finish<T: qq_helper>
|
||||
{
|
||||
let cm = ecx.codemap();
|
||||
let str = @codemap::span_to_snippet(body.span, cm);
|
||||
debug!{"qquote--str==%?", str};
|
||||
debug!("qquote--str==%?", str);
|
||||
let fname = codemap::mk_substr_filename(cm, body.span);
|
||||
let node = parse_from_source_str
|
||||
(f, fname, codemap::fss_internal(body.span), str,
|
||||
@ -231,7 +231,7 @@ fn finish<T: qq_helper>
|
||||
do str::chars_iter(*str) |ch| {
|
||||
if (j < g_len && i == cx.gather[j].lo) {
|
||||
assert ch == '$';
|
||||
let repl = fmt!{"$%u ", j};
|
||||
let repl = fmt!("$%u ", j);
|
||||
state = skip(str::char_len(repl));
|
||||
str2 += repl;
|
||||
}
|
||||
|
@ -255,9 +255,9 @@ fn free_vars(b: bindings, e: @expr, it: fn(ident)) {
|
||||
|
||||
fn wrong_occurs(cx: ext_ctxt, l: ident, l_c: uint, r: ident, r_c: uint)
|
||||
-> ~str {
|
||||
fmt!{"'%s' occurs %u times, but '%s' occurs %u times",
|
||||
fmt!("'%s' occurs %u times, but '%s' occurs %u times",
|
||||
*cx.parse_sess().interner.get(l), l_c,
|
||||
*cx.parse_sess().interner.get(r), r_c}
|
||||
*cx.parse_sess().interner.get(r), r_c)
|
||||
}
|
||||
|
||||
/* handle sequences (anywhere in the AST) of exprs, either real or ...ed */
|
||||
|
@ -13,7 +13,7 @@ export expand_include;
|
||||
export expand_include_str;
|
||||
export expand_include_bin;
|
||||
|
||||
/* line!{}: expands to the current line number */
|
||||
/* line!(): expands to the current line number */
|
||||
fn expand_line(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
|
||||
_body: ast::mac_body) -> @ast::expr {
|
||||
get_mac_args(cx, sp, arg, 0u, option::some(0u), ~"line");
|
||||
@ -21,7 +21,7 @@ fn expand_line(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
|
||||
return mk_uint(cx, sp, loc.line);
|
||||
}
|
||||
|
||||
/* col!{}: expands to the current column number */
|
||||
/* col!(): expands to the current column number */
|
||||
fn expand_col(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
|
||||
_body: ast::mac_body) -> @ast::expr {
|
||||
get_mac_args(cx, sp, arg, 0u, option::some(0u), ~"col");
|
||||
@ -29,7 +29,7 @@ fn expand_col(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
|
||||
return mk_uint(cx, sp, loc.col);
|
||||
}
|
||||
|
||||
/* file!{}: expands to the current filename */
|
||||
/* file!(): expands to the current filename */
|
||||
/* The filemap (`loc.file`) contains a bunch more information we could spit
|
||||
* out if we wanted. */
|
||||
fn expand_file(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
|
||||
|
@ -274,15 +274,15 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
|
||||
let nts = str::connect(vec::map(bb_eis, |ei| {
|
||||
match ei.elts[ei.idx].node {
|
||||
match_nonterminal(bind,name,_) => {
|
||||
fmt!{"%s ('%s')", *sess.interner.get(name),
|
||||
*sess.interner.get(bind)}
|
||||
fmt!("%s ('%s')", *sess.interner.get(name),
|
||||
*sess.interner.get(bind))
|
||||
}
|
||||
_ => fail
|
||||
} }), ~" or ");
|
||||
return error(sp, fmt!{
|
||||
return error(sp, fmt!(
|
||||
"Local ambiguity: multiple parsing options: \
|
||||
built-in NTs %s or %u other options.",
|
||||
nts, next_eis.len()});
|
||||
nts, next_eis.len()));
|
||||
} else if (bb_eis.len() == 0u && next_eis.len() == 0u) {
|
||||
return failure(sp, ~"No rules expected the token "
|
||||
+ to_str(rdr.interner(), tok));
|
||||
|
@ -112,9 +112,9 @@ fn lockstep_iter_size(t: token_tree, r: tt_reader) -> lis {
|
||||
lis_constraint(r_len, r_id) => {
|
||||
let l_n = *r.interner.get(l_id);
|
||||
let r_n = *r.interner.get(r_id);
|
||||
lis_contradiction(fmt!{"Inconsistent lockstep iteration: \
|
||||
lis_contradiction(fmt!("Inconsistent lockstep iteration: \
|
||||
'%s' has %u items, but '%s' has %u",
|
||||
l_n, l_len, r_n, r_len})
|
||||
l_n, l_len, r_n, r_len))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -231,8 +231,8 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} {
|
||||
matched_seq(*) => {
|
||||
r.sp_diag.span_fatal(
|
||||
copy r.cur_span, /* blame the macro writer */
|
||||
fmt!{"variable '%s' is still repeating at this depth",
|
||||
*r.interner.get(ident)});
|
||||
fmt!("variable '%s' is still repeating at this depth",
|
||||
*r.interner.get(ident)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -118,7 +118,7 @@ fn consume_non_eol_whitespace(rdr: string_reader) {
|
||||
}
|
||||
|
||||
fn push_blank_line_comment(rdr: string_reader, &comments: ~[cmnt]) {
|
||||
debug!{">>> blank-line comment"};
|
||||
debug!(">>> blank-line comment");
|
||||
let v: ~[~str] = ~[];
|
||||
vec::push(comments, {style: blank_line, lines: v, pos: rdr.chpos});
|
||||
}
|
||||
@ -136,9 +136,9 @@ fn consume_whitespace_counting_blank_lines(rdr: string_reader,
|
||||
|
||||
fn read_shebang_comment(rdr: string_reader, code_to_the_left: bool,
|
||||
&comments: ~[cmnt]) {
|
||||
debug!{">>> shebang comment"};
|
||||
debug!(">>> shebang comment");
|
||||
let p = rdr.chpos;
|
||||
debug!{"<<< shebang comment"};
|
||||
debug!("<<< shebang comment");
|
||||
vec::push(comments, {
|
||||
style: if code_to_the_left { trailing } else { isolated },
|
||||
lines: ~[read_one_line_comment(rdr)],
|
||||
@ -148,7 +148,7 @@ fn read_shebang_comment(rdr: string_reader, code_to_the_left: bool,
|
||||
|
||||
fn read_line_comments(rdr: string_reader, code_to_the_left: bool,
|
||||
&comments: ~[cmnt]) {
|
||||
debug!{">>> line comments"};
|
||||
debug!(">>> line comments");
|
||||
let p = rdr.chpos;
|
||||
let mut lines: ~[~str] = ~[];
|
||||
while rdr.curr == '/' && nextch(rdr) == '/' {
|
||||
@ -160,7 +160,7 @@ fn read_line_comments(rdr: string_reader, code_to_the_left: bool,
|
||||
vec::push(lines, line);
|
||||
consume_non_eol_whitespace(rdr);
|
||||
}
|
||||
debug!{"<<< line comments"};
|
||||
debug!("<<< line comments");
|
||||
if !lines.is_empty() {
|
||||
vec::push(comments, {
|
||||
style: if code_to_the_left { trailing } else { isolated },
|
||||
@ -193,7 +193,7 @@ fn trim_whitespace_prefix_and_push_line(&lines: ~[~str],
|
||||
|
||||
fn read_block_comment(rdr: string_reader, code_to_the_left: bool,
|
||||
&comments: ~[cmnt]) {
|
||||
debug!{">>> block comment"};
|
||||
debug!(">>> block comment");
|
||||
let p = rdr.chpos;
|
||||
let mut lines: ~[~str] = ~[];
|
||||
let mut col: uint = rdr.col;
|
||||
@ -215,7 +215,7 @@ fn read_block_comment(rdr: string_reader, code_to_the_left: bool,
|
||||
let mut curr_line = ~"/*";
|
||||
let mut level: int = 1;
|
||||
while level > 0 {
|
||||
debug!{"=== block comment level %d", level};
|
||||
debug!("=== block comment level %d", level);
|
||||
if is_eof(rdr) {(rdr as reader).fatal(~"unterminated block comment");}
|
||||
if rdr.curr == '\n' {
|
||||
trim_whitespace_prefix_and_push_line(lines, curr_line, col);
|
||||
@ -246,7 +246,7 @@ fn read_block_comment(rdr: string_reader, code_to_the_left: bool,
|
||||
if !is_eof(rdr) && rdr.curr != '\n' && vec::len(lines) == 1u {
|
||||
style = mixed;
|
||||
}
|
||||
debug!{"<<< block comment"};
|
||||
debug!("<<< block comment");
|
||||
vec::push(comments, {style: style, lines: lines, pos: p});
|
||||
}
|
||||
|
||||
@ -258,7 +258,7 @@ fn peeking_at_comment(rdr: string_reader) -> bool {
|
||||
|
||||
fn consume_comment(rdr: string_reader, code_to_the_left: bool,
|
||||
&comments: ~[cmnt]) {
|
||||
debug!{">>> consume comment"};
|
||||
debug!(">>> consume comment");
|
||||
if rdr.curr == '/' && nextch(rdr) == '/' {
|
||||
read_line_comments(rdr, code_to_the_left, comments);
|
||||
} else if rdr.curr == '/' && nextch(rdr) == '*' {
|
||||
@ -266,7 +266,7 @@ fn consume_comment(rdr: string_reader, code_to_the_left: bool,
|
||||
} else if rdr.curr == '#' && nextch(rdr) == '!' {
|
||||
read_shebang_comment(rdr, code_to_the_left, comments);
|
||||
} else { fail; }
|
||||
debug!{"<<< consume comment"};
|
||||
debug!("<<< consume comment");
|
||||
}
|
||||
|
||||
type lit = {lit: ~str, pos: uint};
|
||||
|
@ -115,7 +115,7 @@ impl parser: parser_common {
|
||||
// A sanity check that the word we are asking for is a known keyword
|
||||
fn require_keyword(word: ~str) {
|
||||
if !self.keywords.contains_key_ref(&word) {
|
||||
self.bug(fmt!{"unknown keyword: %s", word});
|
||||
self.bug(fmt!("unknown keyword: %s", word));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -20,9 +20,9 @@ fn eval_crate_directives(cx: ctx,
|
||||
fn eval_crate_directives_to_mod(cx: ctx, cdirs: ~[@ast::crate_directive],
|
||||
prefix: ~str, suffix: option<~str>)
|
||||
-> (ast::_mod, ~[ast::attribute]) {
|
||||
debug!{"eval crate prefix: %s", prefix};
|
||||
debug!{"eval crate suffix: %s",
|
||||
option::get_default(suffix, ~"none")};
|
||||
debug!("eval crate prefix: %s", prefix);
|
||||
debug!("eval crate suffix: %s",
|
||||
option::get_default(suffix, ~"none"));
|
||||
let (cview_items, citems, cattrs)
|
||||
= parse_companion_mod(cx, prefix, suffix);
|
||||
let mut view_items: ~[@ast::view_item] = ~[];
|
||||
@ -63,9 +63,9 @@ fn parse_companion_mod(cx: ctx, prefix: ~str, suffix: option<~str>)
|
||||
}
|
||||
|
||||
let modpath = companion_file(prefix, suffix);
|
||||
debug!{"looking for companion mod %s", modpath};
|
||||
debug!("looking for companion mod %s", modpath);
|
||||
if file_exists(modpath) {
|
||||
debug!{"found companion mod"};
|
||||
debug!("found companion mod");
|
||||
let (p0, r0) = new_parser_etc_from_file(cx.sess, cx.cfg,
|
||||
modpath, SOURCE_FILE);
|
||||
let inner_attrs = p0.parse_inner_attrs_and_next();
|
||||
|
@ -419,8 +419,8 @@ fn scan_number(c: char, rdr: string_reader) -> token::token {
|
||||
}
|
||||
let parsed = option::get(u64::from_str_radix(num_str, base as u64));
|
||||
|
||||
debug!{"lexing %s as an unsuffixed integer literal",
|
||||
num_str};
|
||||
debug!("lexing %s as an unsuffixed integer literal",
|
||||
num_str);
|
||||
return token::LIT_INT_UNSUFFIXED(parsed as i64);
|
||||
}
|
||||
}
|
||||
@ -431,7 +431,7 @@ fn scan_numeric_escape(rdr: string_reader, n_hex_digits: uint) -> char {
|
||||
let n = rdr.curr;
|
||||
bump(rdr);
|
||||
if !is_hex_digit(n) {
|
||||
rdr.fatal(fmt!{"illegal numeric character escape: %d", n as int});
|
||||
rdr.fatal(fmt!("illegal numeric character escape: %d", n as int));
|
||||
}
|
||||
accum_int *= 16;
|
||||
accum_int += hex_digit_val(n);
|
||||
@ -578,7 +578,7 @@ fn next_token_inner(rdr: string_reader) -> token::token {
|
||||
'u' => { c2 = scan_numeric_escape(rdr, 4u); }
|
||||
'U' => { c2 = scan_numeric_escape(rdr, 8u); }
|
||||
c2 => {
|
||||
rdr.fatal(fmt!{"unknown character escape: %d", c2 as int});
|
||||
rdr.fatal(fmt!("unknown character escape: %d", c2 as int));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -593,8 +593,8 @@ fn next_token_inner(rdr: string_reader) -> token::token {
|
||||
bump(rdr);
|
||||
while rdr.curr != '"' {
|
||||
if is_eof(rdr) {
|
||||
rdr.fatal(fmt!{"unterminated double quote string: %s",
|
||||
get_str_from(rdr, n)});
|
||||
rdr.fatal(fmt!("unterminated double quote string: %s",
|
||||
get_str_from(rdr, n)));
|
||||
}
|
||||
|
||||
let ch = rdr.curr;
|
||||
@ -621,7 +621,7 @@ fn next_token_inner(rdr: string_reader) -> token::token {
|
||||
str::push_char(accum_str, scan_numeric_escape(rdr, 8u));
|
||||
}
|
||||
c2 => {
|
||||
rdr.fatal(fmt!{"unknown string escape: %d", c2 as int});
|
||||
rdr.fatal(fmt!("unknown string escape: %d", c2 as int));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -656,7 +656,7 @@ fn next_token_inner(rdr: string_reader) -> token::token {
|
||||
'/' => { return binop(rdr, token::SLASH); }
|
||||
'^' => { return binop(rdr, token::CARET); }
|
||||
'%' => { return binop(rdr, token::PERCENT); }
|
||||
c => { rdr.fatal(fmt!{"unknown start of token: %d", c as int}); }
|
||||
c => { rdr.fatal(fmt!("unknown start of token: %d", c as int)); }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -126,7 +126,7 @@ enum view_item_parse_mode {
|
||||
/* The expr situation is not as complex as I thought it would be.
|
||||
The important thing is to make sure that lookahead doesn't balk
|
||||
at INTERPOLATED tokens */
|
||||
macro_rules! maybe_whole_expr {
|
||||
macro_rules! maybe_whole_expr (
|
||||
{$p:expr} => { match copy $p.token {
|
||||
INTERPOLATED(token::nt_expr(e)) => {
|
||||
$p.bump();
|
||||
@ -139,9 +139,9 @@ macro_rules! maybe_whole_expr {
|
||||
}
|
||||
_ => ()
|
||||
}}
|
||||
}
|
||||
)
|
||||
|
||||
macro_rules! maybe_whole {
|
||||
macro_rules! maybe_whole (
|
||||
{$p:expr, $constructor:ident} => { match copy $p.token {
|
||||
INTERPOLATED(token::$constructor(x)) => { $p.bump(); return x; }
|
||||
_ => ()
|
||||
@ -166,7 +166,7 @@ macro_rules! maybe_whole {
|
||||
_ => ()
|
||||
}}
|
||||
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
pure fn maybe_append(+lhs: ~[attribute], rhs: option<~[attribute]>)
|
||||
@ -325,13 +325,13 @@ struct parser {
|
||||
let self_ty = if is_static { static_sty } else { self_ty };
|
||||
|
||||
let hi = p.last_span.hi;
|
||||
debug!{"parse_trait_methods(): trait method signature ends in \
|
||||
debug!("parse_trait_methods(): trait method signature ends in \
|
||||
`%s`",
|
||||
token_to_str(p.reader, p.token)};
|
||||
token_to_str(p.reader, p.token));
|
||||
match p.token {
|
||||
token::SEMI => {
|
||||
p.bump();
|
||||
debug!{"parse_trait_methods(): parsing required method"};
|
||||
debug!("parse_trait_methods(): parsing required method");
|
||||
// NB: at the moment, visibility annotations on required
|
||||
// methods are ignored; this could change.
|
||||
required({ident: ident, attrs: attrs,
|
||||
@ -340,7 +340,7 @@ struct parser {
|
||||
id: p.get_id(), span: mk_sp(lo, hi)})
|
||||
}
|
||||
token::LBRACE => {
|
||||
debug!{"parse_trait_methods(): parsing provided method"};
|
||||
debug!("parse_trait_methods(): parsing provided method");
|
||||
let (inner_attrs, body) =
|
||||
p.parse_inner_attrs_and_block(true);
|
||||
let attrs = vec::append(attrs, inner_attrs);
|
||||
@ -441,7 +441,7 @@ struct parser {
|
||||
}
|
||||
|
||||
fn parse_ty(colons_before_params: bool) -> @ty {
|
||||
maybe_whole!{self, nt_ty};
|
||||
maybe_whole!(self, nt_ty);
|
||||
|
||||
let lo = self.span.lo;
|
||||
|
||||
@ -708,7 +708,7 @@ struct parser {
|
||||
parse_ident: fn(parser) -> ident,
|
||||
parse_last_ident: fn(parser) -> ident) -> @path {
|
||||
|
||||
maybe_whole!{self, nt_path};
|
||||
maybe_whole!(self, nt_path);
|
||||
let lo = self.span.lo;
|
||||
let global = self.eat(token::MOD_SEP);
|
||||
let mut ids = ~[];
|
||||
@ -735,9 +735,9 @@ struct parser {
|
||||
}
|
||||
|
||||
fn parse_path_with_tps(colons: bool) -> @path {
|
||||
debug!{"parse_path_with_tps(colons=%b)", colons};
|
||||
debug!("parse_path_with_tps(colons=%b)", colons);
|
||||
|
||||
maybe_whole!{self, nt_path};
|
||||
maybe_whole!(self, nt_path);
|
||||
let lo = self.span.lo;
|
||||
let path = self.parse_path_without_tps();
|
||||
if colons && !self.eat(token::MOD_SEP) {
|
||||
@ -829,7 +829,7 @@ struct parser {
|
||||
}
|
||||
|
||||
fn parse_bottom_expr() -> pexpr {
|
||||
maybe_whole_expr!{self};
|
||||
maybe_whole_expr!(self);
|
||||
let lo = self.span.lo;
|
||||
let mut hi = self.span.hi;
|
||||
|
||||
@ -1197,7 +1197,7 @@ struct parser {
|
||||
}
|
||||
|
||||
fn parse_token_tree() -> token_tree {
|
||||
maybe_whole!{deref self, nt_tt};
|
||||
maybe_whole!(deref self, nt_tt);
|
||||
|
||||
fn parse_tt_tok(p: parser, delim_ok: bool) -> token_tree {
|
||||
match p.token {
|
||||
@ -1249,7 +1249,7 @@ struct parser {
|
||||
fn parse_matchers() -> ~[matcher] {
|
||||
// unification of matchers and token_trees would vastly improve
|
||||
// the interpolation of matchers
|
||||
maybe_whole!{self, nt_matchers};
|
||||
maybe_whole!(self, nt_matchers);
|
||||
let name_idx = @mut 0u;
|
||||
return match self.token {
|
||||
token::LBRACE | token::LPAREN | token::LBRACKET => {
|
||||
@ -1598,9 +1598,9 @@ struct parser {
|
||||
// There may be other types of expressions that can
|
||||
// represent the callee in `for` and `do` expressions
|
||||
// but they aren't represented by tests
|
||||
debug!{"sugary call on %?", e.node};
|
||||
debug!("sugary call on %?", e.node);
|
||||
self.span_fatal(
|
||||
lo, fmt!{"`%s` must be followed by a block call", keyword});
|
||||
lo, fmt!("`%s` must be followed by a block call", keyword));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1801,7 +1801,7 @@ struct parser {
|
||||
}
|
||||
|
||||
fn parse_pat(refutable: bool) -> @pat {
|
||||
maybe_whole!{self, nt_pat};
|
||||
maybe_whole!(self, nt_pat);
|
||||
|
||||
let lo = self.span.lo;
|
||||
let mut hi = self.span.hi;
|
||||
@ -2077,7 +2077,7 @@ struct parser {
|
||||
}
|
||||
|
||||
fn parse_stmt(+first_item_attrs: ~[attribute]) -> @stmt {
|
||||
maybe_whole!{self, nt_stmt};
|
||||
maybe_whole!(self, nt_stmt);
|
||||
|
||||
fn check_expected_item(p: parser, current_attrs: ~[attribute]) {
|
||||
// If we have attributes then we should have an item
|
||||
@ -2140,7 +2140,7 @@ struct parser {
|
||||
fn parse_inner_attrs_and_block(parse_attrs: bool)
|
||||
-> (~[attribute], blk) {
|
||||
|
||||
maybe_whole!{pair_empty self, nt_block};
|
||||
maybe_whole!(pair_empty self, nt_block);
|
||||
|
||||
fn maybe_parse_inner_attrs_and_next(p: parser, parse_attrs: bool) ->
|
||||
{inner: ~[attribute], next: ~[attribute]} {
|
||||
@ -2812,7 +2812,7 @@ struct parser {
|
||||
token_to_str(self.reader, self.token) + ~"`");
|
||||
}
|
||||
}
|
||||
debug!{"parse_mod_items: attrs=%?", attrs};
|
||||
debug!("parse_mod_items: attrs=%?", attrs);
|
||||
}
|
||||
|
||||
if first && attrs_remaining.len() > 0u {
|
||||
@ -3139,7 +3139,7 @@ struct parser {
|
||||
|
||||
fn parse_item_or_view_item(+attrs: ~[attribute], items_allowed: bool)
|
||||
-> item_or_view_item {
|
||||
maybe_whole!{iovi self,nt_item};
|
||||
maybe_whole!(iovi self,nt_item);
|
||||
let lo = self.span.lo;
|
||||
|
||||
let visibility;
|
||||
@ -3303,7 +3303,7 @@ struct parser {
|
||||
let lo = self.span.lo;
|
||||
let first_ident = self.parse_ident();
|
||||
let mut path = ~[first_ident];
|
||||
debug!{"parsed view_path: %s", *self.id_to_str(first_ident)};
|
||||
debug!("parsed view_path: %s", *self.id_to_str(first_ident));
|
||||
match self.token {
|
||||
token::EQ => {
|
||||
// x = foo::bar
|
||||
|
@ -63,7 +63,7 @@ enum token { STRING(@~str, int), BREAK(break_t), BEGIN(begin_t), END, EOF, }
|
||||
|
||||
fn tok_str(++t: token) -> ~str {
|
||||
match t {
|
||||
STRING(s, len) => return fmt!{"STR(%s,%d)", *s, len},
|
||||
STRING(s, len) => return fmt!("STR(%s,%d)", *s, len),
|
||||
BREAK(_) => return ~"BREAK",
|
||||
BEGIN(_) => return ~"BEGIN",
|
||||
END => return ~"END",
|
||||
@ -81,7 +81,7 @@ fn buf_str(toks: ~[mut token], szs: ~[mut int], left: uint, right: uint,
|
||||
while i != right && L != 0u {
|
||||
L -= 1u;
|
||||
if i != left { s += ~", "; }
|
||||
s += fmt!{"%d=%s", szs[i], tok_str(toks[i])};
|
||||
s += fmt!("%d=%s", szs[i], tok_str(toks[i]));
|
||||
i += 1u;
|
||||
i %= n;
|
||||
}
|
||||
@ -99,7 +99,7 @@ fn mk_printer(out: io::Writer, linewidth: uint) -> printer {
|
||||
// Yes 3, it makes the ring buffers big enough to never
|
||||
// fall behind.
|
||||
let n: uint = 3u * linewidth;
|
||||
debug!{"mk_printer %u", linewidth};
|
||||
debug!("mk_printer %u", linewidth);
|
||||
let token: ~[mut token] = vec::to_mut(vec::from_elem(n, EOF));
|
||||
let size: ~[mut int] = vec::to_mut(vec::from_elem(n, 0));
|
||||
let scan_stack: ~[mut uint] = vec::to_mut(vec::from_elem(n, 0u));
|
||||
@ -237,7 +237,7 @@ impl printer {
|
||||
// be very careful with this!
|
||||
fn replace_last_token(t: token) { self.token[self.right] = t; }
|
||||
fn pretty_print(t: token) {
|
||||
debug!{"pp ~[%u,%u]", self.left, self.right};
|
||||
debug!("pp ~[%u,%u]", self.left, self.right);
|
||||
match t {
|
||||
EOF => {
|
||||
if !self.scan_stack_empty {
|
||||
@ -254,18 +254,18 @@ impl printer {
|
||||
self.left = 0u;
|
||||
self.right = 0u;
|
||||
} else { self.advance_right(); }
|
||||
debug!{"pp BEGIN(%d)/buffer ~[%u,%u]",
|
||||
b.offset, self.left, self.right};
|
||||
debug!("pp BEGIN(%d)/buffer ~[%u,%u]",
|
||||
b.offset, self.left, self.right);
|
||||
self.token[self.right] = t;
|
||||
self.size[self.right] = -self.right_total;
|
||||
self.scan_push(self.right);
|
||||
}
|
||||
END => {
|
||||
if self.scan_stack_empty {
|
||||
debug!{"pp END/print ~[%u,%u]", self.left, self.right};
|
||||
debug!("pp END/print ~[%u,%u]", self.left, self.right);
|
||||
self.print(t, 0);
|
||||
} else {
|
||||
debug!{"pp END/buffer ~[%u,%u]", self.left, self.right};
|
||||
debug!("pp END/buffer ~[%u,%u]", self.left, self.right);
|
||||
self.advance_right();
|
||||
self.token[self.right] = t;
|
||||
self.size[self.right] = -1;
|
||||
@ -279,8 +279,8 @@ impl printer {
|
||||
self.left = 0u;
|
||||
self.right = 0u;
|
||||
} else { self.advance_right(); }
|
||||
debug!{"pp BREAK(%d)/buffer ~[%u,%u]",
|
||||
b.offset, self.left, self.right};
|
||||
debug!("pp BREAK(%d)/buffer ~[%u,%u]",
|
||||
b.offset, self.left, self.right);
|
||||
self.check_stack(0);
|
||||
self.scan_push(self.right);
|
||||
self.token[self.right] = t;
|
||||
@ -289,12 +289,12 @@ impl printer {
|
||||
}
|
||||
STRING(s, len) => {
|
||||
if self.scan_stack_empty {
|
||||
debug!{"pp STRING('%s')/print ~[%u,%u]",
|
||||
*s, self.left, self.right};
|
||||
debug!("pp STRING('%s')/print ~[%u,%u]",
|
||||
*s, self.left, self.right);
|
||||
self.print(t, len);
|
||||
} else {
|
||||
debug!{"pp STRING('%s')/buffer ~[%u,%u]",
|
||||
*s, self.left, self.right};
|
||||
debug!("pp STRING('%s')/buffer ~[%u,%u]",
|
||||
*s, self.left, self.right);
|
||||
self.advance_right();
|
||||
self.token[self.right] = t;
|
||||
self.size[self.right] = len;
|
||||
@ -305,14 +305,14 @@ impl printer {
|
||||
}
|
||||
}
|
||||
fn check_stream() {
|
||||
debug!{"check_stream ~[%u, %u] with left_total=%d, right_total=%d",
|
||||
self.left, self.right, self.left_total, self.right_total};
|
||||
debug!("check_stream ~[%u, %u] with left_total=%d, right_total=%d",
|
||||
self.left, self.right, self.left_total, self.right_total);
|
||||
if self.right_total - self.left_total > self.space {
|
||||
debug!{"scan window is %d, longer than space on line (%d)",
|
||||
self.right_total - self.left_total, self.space};
|
||||
debug!("scan window is %d, longer than space on line (%d)",
|
||||
self.right_total - self.left_total, self.space);
|
||||
if !self.scan_stack_empty {
|
||||
if self.left == self.scan_stack[self.bottom] {
|
||||
debug!{"setting %u to infinity and popping", self.left};
|
||||
debug!("setting %u to infinity and popping", self.left);
|
||||
self.size[self.scan_pop_bottom()] = size_infinity;
|
||||
}
|
||||
}
|
||||
@ -321,7 +321,7 @@ impl printer {
|
||||
}
|
||||
}
|
||||
fn scan_push(x: uint) {
|
||||
debug!{"scan_push %u", x};
|
||||
debug!("scan_push %u", x);
|
||||
if self.scan_stack_empty {
|
||||
self.scan_stack_empty = false;
|
||||
} else {
|
||||
@ -357,8 +357,8 @@ impl printer {
|
||||
assert (self.right != self.left);
|
||||
}
|
||||
fn advance_left(++x: token, L: int) {
|
||||
debug!{"advnce_left ~[%u,%u], sizeof(%u)=%d", self.left, self.right,
|
||||
self.left, L};
|
||||
debug!("advnce_left ~[%u,%u], sizeof(%u)=%d", self.left, self.right,
|
||||
self.left, L);
|
||||
if L >= 0 {
|
||||
self.print(x, L);
|
||||
match x {
|
||||
@ -398,13 +398,13 @@ impl printer {
|
||||
}
|
||||
}
|
||||
fn print_newline(amount: int) {
|
||||
debug!{"NEWLINE %d", amount};
|
||||
debug!("NEWLINE %d", amount);
|
||||
self.out.write_str(~"\n");
|
||||
self.pending_indentation = 0;
|
||||
self.indent(amount);
|
||||
}
|
||||
fn indent(amount: int) {
|
||||
debug!{"INDENT %d", amount};
|
||||
debug!("INDENT %d", amount);
|
||||
self.pending_indentation += amount;
|
||||
}
|
||||
fn get_top() -> print_stack_elt {
|
||||
@ -423,24 +423,24 @@ impl printer {
|
||||
self.out.write_str(s);
|
||||
}
|
||||
fn print(x: token, L: int) {
|
||||
debug!{"print %s %d (remaining line space=%d)", tok_str(x), L,
|
||||
self.space};
|
||||
debug!("print %s %d (remaining line space=%d)", tok_str(x), L,
|
||||
self.space);
|
||||
log(debug, buf_str(self.token, self.size, self.left, self.right, 6u));
|
||||
match x {
|
||||
BEGIN(b) => {
|
||||
if L > self.space {
|
||||
let col = self.margin - self.space + b.offset;
|
||||
debug!{"print BEGIN -> push broken block at col %d", col};
|
||||
debug!("print BEGIN -> push broken block at col %d", col);
|
||||
self.print_stack.push({offset: col,
|
||||
pbreak: broken(b.breaks)});
|
||||
} else {
|
||||
debug!{"print BEGIN -> push fitting block"};
|
||||
debug!("print BEGIN -> push fitting block");
|
||||
self.print_stack.push({offset: 0,
|
||||
pbreak: fits});
|
||||
}
|
||||
}
|
||||
END => {
|
||||
debug!{"print END -> pop END"};
|
||||
debug!("print END -> pop END");
|
||||
assert (self.print_stack.len() != 0u);
|
||||
self.print_stack.pop();
|
||||
}
|
||||
@ -448,25 +448,25 @@ impl printer {
|
||||
let top = self.get_top();
|
||||
match top.pbreak {
|
||||
fits => {
|
||||
debug!{"print BREAK(%d) in fitting block", b.blank_space};
|
||||
debug!("print BREAK(%d) in fitting block", b.blank_space);
|
||||
self.space -= b.blank_space;
|
||||
self.indent(b.blank_space);
|
||||
}
|
||||
broken(consistent) => {
|
||||
debug!{"print BREAK(%d+%d) in consistent block",
|
||||
top.offset, b.offset};
|
||||
debug!("print BREAK(%d+%d) in consistent block",
|
||||
top.offset, b.offset);
|
||||
self.print_newline(top.offset + b.offset);
|
||||
self.space = self.margin - (top.offset + b.offset);
|
||||
}
|
||||
broken(inconsistent) => {
|
||||
if L > self.space {
|
||||
debug!{"print BREAK(%d+%d) w/ newline in inconsistent",
|
||||
top.offset, b.offset};
|
||||
debug!("print BREAK(%d+%d) w/ newline in inconsistent",
|
||||
top.offset, b.offset);
|
||||
self.print_newline(top.offset + b.offset);
|
||||
self.space = self.margin - (top.offset + b.offset);
|
||||
} else {
|
||||
debug!{"print BREAK(%d) w/o newline in inconsistent",
|
||||
b.blank_space};
|
||||
debug!("print BREAK(%d) w/o newline in inconsistent",
|
||||
b.blank_space);
|
||||
self.indent(b.blank_space);
|
||||
self.space -= b.blank_space;
|
||||
}
|
||||
@ -474,7 +474,7 @@ impl printer {
|
||||
}
|
||||
}
|
||||
STRING(s, len) => {
|
||||
debug!{"print STRING(%s)", *s};
|
||||
debug!("print STRING(%s)", *s);
|
||||
assert (L == len);
|
||||
// assert L <= space;
|
||||
self.space -= len;
|
||||
|
@ -937,14 +937,14 @@ fn print_mac(s: ps, m: ast::mac) {
|
||||
bclose(s, m.span);
|
||||
}
|
||||
ast::mac_ellipsis => word(s.s, ~"..."),
|
||||
ast::mac_var(v) => word(s.s, fmt!{"$%u", v}),
|
||||
ast::mac_var(v) => word(s.s, fmt!("$%u", v)),
|
||||
_ => { /* fixme */ }
|
||||
}
|
||||
}
|
||||
|
||||
fn print_vstore(s: ps, t: ast::vstore) {
|
||||
match t {
|
||||
ast::vstore_fixed(some(i)) => word(s.s, fmt!{"%u", i}),
|
||||
ast::vstore_fixed(some(i)) => word(s.s, fmt!("%u", i)),
|
||||
ast::vstore_fixed(none) => word(s.s, ~"_"),
|
||||
ast::vstore_uniq => word(s.s, ~"~"),
|
||||
ast::vstore_box => word(s.s, ~"@"),
|
||||
|
@ -343,7 +343,7 @@ fn build_link_meta(sess: session, c: ast::crate, output: ~str,
|
||||
metas: provided_metas,
|
||||
dep_hashes: ~[~str]) -> ~str {
|
||||
fn len_and_str(s: ~str) -> ~str {
|
||||
return fmt!{"%u_%s", str::len(s), s};
|
||||
return fmt!("%u_%s", str::len(s), s);
|
||||
}
|
||||
|
||||
fn len_and_str_lit(l: ast::lit) -> ~str {
|
||||
@ -379,8 +379,8 @@ fn build_link_meta(sess: session, c: ast::crate, output: ~str,
|
||||
|
||||
fn warn_missing(sess: session, name: ~str, default: ~str) {
|
||||
if !sess.building_library { return; }
|
||||
sess.warn(fmt!{"missing crate link meta `%s`, using `%s` as default",
|
||||
name, default});
|
||||
sess.warn(fmt!("missing crate link meta `%s`, using `%s` as default",
|
||||
name, default));
|
||||
}
|
||||
|
||||
fn crate_meta_name(sess: session, _crate: ast::crate,
|
||||
@ -393,8 +393,8 @@ fn build_link_meta(sess: session, c: ast::crate, output: ~str,
|
||||
let mut os =
|
||||
str::split_char(path::basename(output), '.');
|
||||
if (vec::len(os) < 2u) {
|
||||
sess.fatal(fmt!{"output file name `%s` doesn't\
|
||||
appear to have an extension", output});
|
||||
sess.fatal(fmt!("output file name `%s` doesn't\
|
||||
appear to have an extension", output));
|
||||
}
|
||||
vec::pop(os);
|
||||
str::connect(os, ~".")
|
||||
@ -505,7 +505,7 @@ fn mangle(sess: session, ss: path) -> ~str {
|
||||
for ss.each |s| {
|
||||
match s { path_name(s) | path_mod(s) => {
|
||||
let sani = sanitize(sess.str_of(s));
|
||||
n += fmt!{"%u%s", str::len(sani), sani};
|
||||
n += fmt!("%u%s", str::len(sani), sani);
|
||||
} }
|
||||
}
|
||||
n += ~"E"; // End name-sequence.
|
||||
@ -581,12 +581,12 @@ fn link_binary(sess: session,
|
||||
|
||||
let output = if sess.building_library {
|
||||
let long_libname =
|
||||
os::dll_filename(fmt!{"%s-%s-%s",
|
||||
lm.name, lm.extras_hash, lm.vers});
|
||||
debug!{"link_meta.name: %s", lm.name};
|
||||
debug!{"long_libname: %s", long_libname};
|
||||
debug!{"out_filename: %s", out_filename};
|
||||
debug!{"dirname(out_filename): %s", path::dirname(out_filename)};
|
||||
os::dll_filename(fmt!("%s-%s-%s",
|
||||
lm.name, lm.extras_hash, lm.vers));
|
||||
debug!("link_meta.name: %s", lm.name);
|
||||
debug!("long_libname: %s", long_libname);
|
||||
debug!("out_filename: %s", out_filename);
|
||||
debug!("dirname(out_filename): %s", path::dirname(out_filename));
|
||||
|
||||
path::connect(path::dirname(out_filename), long_libname)
|
||||
} else { out_filename };
|
||||
@ -703,14 +703,14 @@ fn link_binary(sess: session,
|
||||
// extern libraries might live, based on the addl_lib_search_paths
|
||||
vec::push_all(cc_args, rpath::get_rpath_flags(sess, output));
|
||||
|
||||
debug!{"%s link args: %s", cc_prog, str::connect(cc_args, ~" ")};
|
||||
debug!("%s link args: %s", cc_prog, str::connect(cc_args, ~" "));
|
||||
// We run 'cc' here
|
||||
let prog = run::program_output(cc_prog, cc_args);
|
||||
if 0 != prog.status {
|
||||
sess.err(fmt!{"linking with `%s` failed with code %d",
|
||||
cc_prog, prog.status});
|
||||
sess.note(fmt!{"%s arguments: %s",
|
||||
cc_prog, str::connect(cc_args, ~" ")});
|
||||
sess.err(fmt!("linking with `%s` failed with code %d",
|
||||
cc_prog, prog.status));
|
||||
sess.note(fmt!("%s arguments: %s",
|
||||
cc_prog, str::connect(cc_args, ~" ")));
|
||||
sess.note(prog.err + prog.out);
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
@ -723,8 +723,8 @@ fn link_binary(sess: session,
|
||||
// Remove the temporary object file if we aren't saving temps
|
||||
if !sess.opts.save_temps {
|
||||
if ! os::remove_file(obj_filename) {
|
||||
sess.warn(fmt!{"failed to delete object file `%s`",
|
||||
obj_filename});
|
||||
sess.warn(fmt!("failed to delete object file `%s`",
|
||||
obj_filename));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -21,7 +21,7 @@ fn get_rpath_flags(sess: session::session, out_filename: ~str) -> ~[~str] {
|
||||
return ~[];
|
||||
}
|
||||
|
||||
debug!{"preparing the RPATH!"};
|
||||
debug!("preparing the RPATH!");
|
||||
|
||||
let cwd = os::getcwd();
|
||||
let sysroot = sess.filesearch.sysroot();
|
||||
@ -45,20 +45,20 @@ fn get_sysroot_absolute_rt_lib(sess: session::session) -> path::Path {
|
||||
}
|
||||
|
||||
fn rpaths_to_flags(rpaths: ~[~str]) -> ~[~str] {
|
||||
vec::map(rpaths, |rpath| fmt!{"-Wl,-rpath,%s",rpath} )
|
||||
vec::map(rpaths, |rpath| fmt!("-Wl,-rpath,%s",rpath) )
|
||||
}
|
||||
|
||||
fn get_rpaths(os: session::os, cwd: path::Path, sysroot: path::Path,
|
||||
output: path::Path, libs: ~[path::Path],
|
||||
target_triple: ~str) -> ~[~str] {
|
||||
debug!{"cwd: %s", cwd};
|
||||
debug!{"sysroot: %s", sysroot};
|
||||
debug!{"output: %s", output};
|
||||
debug!{"libs:"};
|
||||
debug!("cwd: %s", cwd);
|
||||
debug!("sysroot: %s", sysroot);
|
||||
debug!("output: %s", output);
|
||||
debug!("libs:");
|
||||
for libs.each |libpath| {
|
||||
debug!{" %s", libpath};
|
||||
debug!(" %s", libpath);
|
||||
}
|
||||
debug!{"target_triple: %s", target_triple};
|
||||
debug!("target_triple: %s", target_triple);
|
||||
|
||||
// Use relative paths to the libraries. Binaries can be moved
|
||||
// as long as they maintain the relative relationship to the
|
||||
@ -73,9 +73,9 @@ fn get_rpaths(os: session::os, cwd: path::Path, sysroot: path::Path,
|
||||
let fallback_rpaths = ~[get_install_prefix_rpath(cwd, target_triple)];
|
||||
|
||||
fn log_rpaths(desc: ~str, rpaths: ~[~str]) {
|
||||
debug!{"%s rpaths:", desc};
|
||||
debug!("%s rpaths:", desc);
|
||||
for rpaths.each |rpath| {
|
||||
debug!{" %s", rpath};
|
||||
debug!(" %s", rpath);
|
||||
}
|
||||
}
|
||||
|
||||
@ -124,8 +124,8 @@ fn get_rpath_relative_to_output(os: session::os,
|
||||
fn get_relative_to(abs1: path::Path, abs2: path::Path) -> path::Path {
|
||||
assert path::path_is_absolute(abs1);
|
||||
assert path::path_is_absolute(abs2);
|
||||
debug!{"finding relative path from %s to %s",
|
||||
abs1, abs2};
|
||||
debug!("finding relative path from %s to %s",
|
||||
abs1, abs2);
|
||||
let normal1 = path::normalize(abs1);
|
||||
let normal2 = path::normalize(abs2);
|
||||
let split1 = path::split(normal1);
|
||||
@ -171,7 +171,7 @@ fn get_absolute(cwd: path::Path, lib: path::Path) -> path::Path {
|
||||
}
|
||||
|
||||
fn get_install_prefix_rpath(cwd: path::Path, target_triple: ~str) -> ~str {
|
||||
let install_prefix = env!{"CFG_PREFIX"};
|
||||
let install_prefix = env!("CFG_PREFIX");
|
||||
|
||||
if install_prefix == ~"" {
|
||||
fail ~"rustc compiled without CFG_PREFIX environment variable";
|
||||
@ -222,7 +222,7 @@ mod test {
|
||||
#[test]
|
||||
fn test_prefix_rpath() {
|
||||
let res = get_install_prefix_rpath(~"/usr/lib", ~"triple");
|
||||
let d = path::connect(env!{"CFG_PREFIX"}, ~"/lib/rustc/triple/lib");
|
||||
let d = path::connect(env!("CFG_PREFIX"), ~"/lib/rustc/triple/lib");
|
||||
assert str::ends_with(res, d);
|
||||
}
|
||||
|
||||
|
@ -116,8 +116,8 @@ fn time<T>(do_it: bool, what: ~str, thunk: fn() -> T) -> T {
|
||||
let start = std::time::precise_time_s();
|
||||
let rv = thunk();
|
||||
let end = std::time::precise_time_s();
|
||||
io::stdout().write_str(fmt!{"time: %3.3f s\t%s\n",
|
||||
end - start, what});
|
||||
io::stdout().write_str(fmt!("time: %3.3f s\t%s\n",
|
||||
end - start, what));
|
||||
return rv;
|
||||
}
|
||||
|
||||
@ -403,7 +403,7 @@ fn host_triple() -> ~str {
|
||||
// FIXME (#2400): Instead of grabbing the host triple we really should
|
||||
// be grabbing (at compile time) the target triple that this rustc is
|
||||
// built with and calling that (at runtime) the host triple.
|
||||
let ht = env!{"CFG_HOST_TRIPLE"};
|
||||
let ht = env!("CFG_HOST_TRIPLE");
|
||||
return if ht != ~"" {
|
||||
ht
|
||||
} else {
|
||||
@ -438,8 +438,8 @@ fn build_session_options(matches: getopts::matches,
|
||||
let lint_name = str::replace(lint_name, ~"-", ~"_");
|
||||
match lint_dict.find(lint_name) {
|
||||
none => {
|
||||
early_error(demitter, fmt!{"unknown %s flag: %s",
|
||||
level_name, lint_name});
|
||||
early_error(demitter, fmt!("unknown %s flag: %s",
|
||||
level_name, lint_name));
|
||||
}
|
||||
some(lint) => {
|
||||
vec::push(lint_opts, (lint.lint, level));
|
||||
@ -458,7 +458,7 @@ fn build_session_options(matches: getopts::matches,
|
||||
if name == debug_flag { this_bit = bit; break; }
|
||||
}
|
||||
if this_bit == 0u {
|
||||
early_error(demitter, fmt!{"unknown debug flag: %s", debug_flag})
|
||||
early_error(demitter, fmt!("unknown debug flag: %s", debug_flag))
|
||||
}
|
||||
debugging_opts |= this_bit;
|
||||
}
|
||||
|
@ -21,14 +21,14 @@ import rustc::middle::lint;
|
||||
|
||||
fn version(argv0: ~str) {
|
||||
let mut vers = ~"unknown version";
|
||||
let env_vers = env!{"CFG_VERSION"};
|
||||
let env_vers = env!("CFG_VERSION");
|
||||
if str::len(env_vers) != 0u { vers = env_vers; }
|
||||
io::println(fmt!{"%s %s", argv0, vers});
|
||||
io::println(fmt!{"host: %s", host_triple()});
|
||||
io::println(fmt!("%s %s", argv0, vers));
|
||||
io::println(fmt!("host: %s", host_triple()));
|
||||
}
|
||||
|
||||
fn usage(argv0: ~str) {
|
||||
io::println(fmt!{"Usage: %s [options] <input>\n", argv0} +
|
||||
io::println(fmt!("Usage: %s [options] <input>\n", argv0) +
|
||||
~"
|
||||
Options:
|
||||
|
||||
@ -85,14 +85,14 @@ fn describe_warnings() {
|
||||
fn padded(max: uint, s: ~str) -> ~str {
|
||||
str::from_bytes(vec::from_elem(max - s.len(), ' ' as u8)) + s
|
||||
}
|
||||
io::println(fmt!{"\nAvailable lint checks:\n"});
|
||||
io::println(fmt!{" %s %7.7s %s",
|
||||
padded(max_key, ~"name"), ~"default", ~"meaning"});
|
||||
io::println(fmt!{" %s %7.7s %s\n",
|
||||
padded(max_key, ~"----"), ~"-------", ~"-------"});
|
||||
io::println(fmt!("\nAvailable lint checks:\n"));
|
||||
io::println(fmt!(" %s %7.7s %s",
|
||||
padded(max_key, ~"name"), ~"default", ~"meaning"));
|
||||
io::println(fmt!(" %s %7.7s %s\n",
|
||||
padded(max_key, ~"----"), ~"-------", ~"-------"));
|
||||
for lint_dict.each |k, v| {
|
||||
let k = str::replace(k, ~"_", ~"-");
|
||||
io::println(fmt!{" %s %7.7s %s",
|
||||
io::println(fmt!(" %s %7.7s %s",
|
||||
padded(max_key, k),
|
||||
match v.default {
|
||||
lint::allow => ~"allow",
|
||||
@ -100,16 +100,16 @@ fn describe_warnings() {
|
||||
lint::deny => ~"deny",
|
||||
lint::forbid => ~"forbid"
|
||||
},
|
||||
v.desc});
|
||||
v.desc));
|
||||
}
|
||||
io::println(~"");
|
||||
}
|
||||
|
||||
fn describe_debug_flags() {
|
||||
io::println(fmt!{"\nAvailable debug options:\n"});
|
||||
io::println(fmt!("\nAvailable debug options:\n"));
|
||||
for session::debugging_opts_map().each |pair| {
|
||||
let (name, desc, _) = pair;
|
||||
io::println(fmt!{" -Z%-20s -- %s", name, desc});
|
||||
io::println(fmt!(" -Z%-20s -- %s", name, desc));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7,7 +7,7 @@ export inject_intrinsic;
|
||||
fn inject_intrinsic(sess: session,
|
||||
crate: @ast::crate) -> @ast::crate {
|
||||
|
||||
let intrinsic_module = @include_str!{"intrinsic.rs"};
|
||||
let intrinsic_module = @include_str!("intrinsic.rs");
|
||||
|
||||
let item = parse::parse_item_from_source_str(~"<intrinsic>",
|
||||
intrinsic_module,
|
||||
|
@ -100,8 +100,8 @@ fn fold_item(cx: test_ctxt, &&i: @ast::item, fld: fold::ast_fold) ->
|
||||
option<@ast::item> {
|
||||
|
||||
vec::push(cx.path, i.ident);
|
||||
debug!{"current path: %s",
|
||||
ast_util::path_name_i(cx.path, cx.sess.parse_sess.interner)};
|
||||
debug!("current path: %s",
|
||||
ast_util::path_name_i(cx.path, cx.sess.parse_sess.interner));
|
||||
|
||||
if is_test_fn(i) {
|
||||
match i.node {
|
||||
@ -111,12 +111,12 @@ fn fold_item(cx: test_ctxt, &&i: @ast::item, fld: fold::ast_fold) ->
|
||||
~"unsafe functions cannot be used for tests");
|
||||
}
|
||||
_ => {
|
||||
debug!{"this is a test function"};
|
||||
debug!("this is a test function");
|
||||
let test = {span: i.span,
|
||||
path: cx.path, ignore: is_ignored(cx, i),
|
||||
should_fail: should_fail(i)};
|
||||
cx.testfns.push(test);
|
||||
debug!{"have %u test functions", cx.testfns.len()};
|
||||
debug!("have %u test functions", cx.testfns.len());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -203,8 +203,8 @@ fn mk_test_module(cx: test_ctxt) -> @ast::item {
|
||||
vis: ast::public,
|
||||
span: dummy_sp()};
|
||||
|
||||
debug!{"Synthetic test module:\n%s\n",
|
||||
pprust::item_to_str(@item, cx.sess.intr())};
|
||||
debug!("Synthetic test module:\n%s\n",
|
||||
pprust::item_to_str(@item, cx.sess.intr()));
|
||||
|
||||
return @item;
|
||||
}
|
||||
@ -280,7 +280,7 @@ fn mk_test_desc_vec_ty(cx: test_ctxt) -> @ast::ty {
|
||||
}
|
||||
|
||||
fn mk_test_desc_vec(cx: test_ctxt) -> @ast::expr {
|
||||
debug!{"building test vector from %u tests", cx.testfns.len()};
|
||||
debug!("building test vector from %u tests", cx.testfns.len());
|
||||
let mut descs = ~[];
|
||||
for cx.testfns.each |test| {
|
||||
vec::push(descs, mk_test_desc_rec(cx, test));
|
||||
@ -300,8 +300,8 @@ fn mk_test_desc_rec(cx: test_ctxt, test: test) -> @ast::expr {
|
||||
let span = test.span;
|
||||
let path = test.path;
|
||||
|
||||
debug!{"encoding %s", ast_util::path_name_i(path,
|
||||
cx.sess.parse_sess.interner)};
|
||||
debug!("encoding %s", ast_util::path_name_i(path,
|
||||
cx.sess.parse_sess.interner));
|
||||
|
||||
let name_lit: ast::lit =
|
||||
nospan(ast::lit_str(@ast_util::path_name_i(path, cx.sess.parse_sess
|
||||
|
@ -1090,7 +1090,7 @@ fn type_to_str_inner(names: type_names, outer0: ~[TypeRef], ty: TypeRef) ->
|
||||
if addrspace == 0u {
|
||||
~""
|
||||
} else {
|
||||
fmt!{"addrspace(%u)", addrspace}
|
||||
fmt!("addrspace(%u)", addrspace)
|
||||
}
|
||||
};
|
||||
return addrstr + ~"*" +
|
||||
|
@ -45,11 +45,11 @@ type cache_entry = {
|
||||
};
|
||||
|
||||
fn dump_crates(crate_cache: DVec<cache_entry>) {
|
||||
debug!{"resolved crates:"};
|
||||
debug!("resolved crates:");
|
||||
for crate_cache.each |entry| {
|
||||
debug!{"cnum: %?", entry.cnum};
|
||||
debug!{"span: %?", entry.span};
|
||||
debug!{"hash: %?", entry.hash};
|
||||
debug!("cnum: %?", entry.cnum);
|
||||
debug!("span: %?", entry.span);
|
||||
debug!("hash: %?", entry.hash);
|
||||
}
|
||||
}
|
||||
|
||||
@ -73,7 +73,7 @@ fn warn_if_multiple_versions(e: env, diag: span_handler,
|
||||
|
||||
if matches.len() != 1u {
|
||||
diag.handler().warn(
|
||||
fmt!{"using multiple versions of crate `%s`", name});
|
||||
fmt!("using multiple versions of crate `%s`", name));
|
||||
for matches.each |match_| {
|
||||
diag.span_note(match_.span, ~"used here");
|
||||
let attrs = ~[
|
||||
@ -99,7 +99,7 @@ type env = @{diag: span_handler,
|
||||
fn visit_view_item(e: env, i: @ast::view_item) {
|
||||
match i.node {
|
||||
ast::view_item_use(ident, meta_items, id) => {
|
||||
debug!{"resolving use stmt. ident: %?, meta: %?", ident, meta_items};
|
||||
debug!("resolving use stmt. ident: %?, meta: %?", ident, meta_items);
|
||||
let cnum = resolve_crate(e, ident, meta_items, ~"", i.span);
|
||||
cstore::add_use_stmt_cnum(e.cstore, id, cnum);
|
||||
}
|
||||
@ -236,7 +236,7 @@ fn resolve_crate(e: env, ident: ast::ident, metas: ~[@ast::meta_item],
|
||||
|
||||
// Go through the crate metadata and load any crates that it references
|
||||
fn resolve_crate_deps(e: env, cdata: @~[u8]) -> cstore::cnum_map {
|
||||
debug!{"resolving deps of external crate"};
|
||||
debug!("resolving deps of external crate");
|
||||
// The map from crate numbers in the crate we're resolving to local crate
|
||||
// numbers
|
||||
let cnum_map = int_hash::<ast::crate_num>();
|
||||
@ -244,17 +244,17 @@ fn resolve_crate_deps(e: env, cdata: @~[u8]) -> cstore::cnum_map {
|
||||
let extrn_cnum = dep.cnum;
|
||||
let cname = dep.name;
|
||||
let cmetas = metas_with(dep.vers, ~"vers", ~[]);
|
||||
debug!{"resolving dep crate %s ver: %s hash: %s",
|
||||
*e.intr.get(dep.name), dep.vers, dep.hash};
|
||||
debug!("resolving dep crate %s ver: %s hash: %s",
|
||||
*e.intr.get(dep.name), dep.vers, dep.hash);
|
||||
match existing_match(e, metas_with_ident(*e.intr.get(cname), cmetas),
|
||||
dep.hash) {
|
||||
some(local_cnum) => {
|
||||
debug!{"already have it"};
|
||||
debug!("already have it");
|
||||
// We've already seen this crate
|
||||
cnum_map.insert(extrn_cnum, local_cnum);
|
||||
}
|
||||
none => {
|
||||
debug!{"need to load it"};
|
||||
debug!("need to load it");
|
||||
// This is a new one so we've got to load it
|
||||
// FIXME (#2404): Need better error reporting than just a bogus
|
||||
// span.
|
||||
|
@ -137,17 +137,17 @@ fn get_field_type(tcx: ty::ctxt, class_id: ast::def_id,
|
||||
let cstore = tcx.cstore;
|
||||
let cdata = cstore::get_crate_data(cstore, class_id.crate);
|
||||
let all_items = ebml::get_doc(ebml::doc(cdata.data), tag_items);
|
||||
debug!{"Looking up %?", class_id};
|
||||
debug!("Looking up %?", class_id);
|
||||
let class_doc = expect(tcx.diag,
|
||||
decoder::maybe_find_item(class_id.node, all_items),
|
||||
|| fmt!{"get_field_type: class ID %? not found",
|
||||
class_id} );
|
||||
debug!{"looking up %? : %?", def, class_doc};
|
||||
|| fmt!("get_field_type: class ID %? not found",
|
||||
class_id) );
|
||||
debug!("looking up %? : %?", def, class_doc);
|
||||
let the_field = expect(tcx.diag,
|
||||
decoder::maybe_find_item(def.node, class_doc),
|
||||
|| fmt!{"get_field_type: in class %?, field ID %? not found",
|
||||
class_id, def} );
|
||||
debug!{"got field data %?", the_field};
|
||||
|| fmt!("get_field_type: in class %?, field ID %? not found",
|
||||
class_id, def) );
|
||||
debug!("got field data %?", the_field);
|
||||
let ty = decoder::item_type(def, the_field, tcx, cdata);
|
||||
return {bounds: @~[],
|
||||
region_param: none,
|
||||
|
@ -163,14 +163,14 @@ fn get_dep_hashes(cstore: cstore) -> ~[~str] {
|
||||
for p(cstore).use_crate_map.each_value |cnum| {
|
||||
let cdata = cstore::get_crate_data(cstore, cnum);
|
||||
let hash = decoder::get_crate_hash(cdata.data);
|
||||
debug!{"Add hash[%s]: %s", cdata.name, hash};
|
||||
debug!("Add hash[%s]: %s", cdata.name, hash);
|
||||
vec::push(result, {name: cdata.name, hash: hash});
|
||||
};
|
||||
pure fn lteq(a: &crate_hash, b: &crate_hash) -> bool {a.name <= b.name}
|
||||
let sorted = std::sort::merge_sort(lteq, result);
|
||||
debug!{"sorted:"};
|
||||
debug!("sorted:");
|
||||
for sorted.each |x| {
|
||||
debug!{" hash[%s]: %s", x.name, x.hash};
|
||||
debug!(" hash[%s]: %s", x.name, x.hash);
|
||||
}
|
||||
fn mapper(ch: crate_hash) -> ~str { return ch.hash; }
|
||||
return vec::map(sorted, mapper);
|
||||
|
@ -100,7 +100,7 @@ fn find_item(item_id: int, items: ebml::doc) -> ebml::doc {
|
||||
fn lookup_item(item_id: int, data: @~[u8]) -> ebml::doc {
|
||||
let items = ebml::get_doc(ebml::doc(data), tag_items);
|
||||
match maybe_find_item(item_id, items) {
|
||||
none => fail(fmt!{"lookup_item: id not found: %d", item_id}),
|
||||
none => fail(fmt!("lookup_item: id not found: %d", item_id)),
|
||||
some(d) => d
|
||||
}
|
||||
}
|
||||
@ -379,8 +379,8 @@ fn get_class_method(intr: ident_interner, cdata: cmd, id: ast::node_id,
|
||||
let mut found = none;
|
||||
let cls_items = match maybe_find_item(id, items) {
|
||||
some(it) => it,
|
||||
none => fail (fmt!{"get_class_method: class id not found \
|
||||
when looking up method %s", *intr.get(name)})
|
||||
none => fail (fmt!("get_class_method: class id not found \
|
||||
when looking up method %s", *intr.get(name)))
|
||||
};
|
||||
for ebml::tagged_docs(cls_items, tag_item_trait_method) |mid| {
|
||||
let m_did = item_def_id(mid, cdata);
|
||||
@ -390,8 +390,8 @@ fn get_class_method(intr: ident_interner, cdata: cmd, id: ast::node_id,
|
||||
}
|
||||
match found {
|
||||
some(found) => found,
|
||||
none => fail (fmt!{"get_class_method: no method named %s",
|
||||
*intr.get(name)})
|
||||
none => fail (fmt!("get_class_method: no method named %s",
|
||||
*intr.get(name)))
|
||||
}
|
||||
}
|
||||
|
||||
@ -400,8 +400,8 @@ fn class_dtor(cdata: cmd, id: ast::node_id) -> option<ast::def_id> {
|
||||
let mut found = none;
|
||||
let cls_items = match maybe_find_item(id, items) {
|
||||
some(it) => it,
|
||||
none => fail (fmt!{"class_dtor: class id not found \
|
||||
when looking up dtor for %d", id})
|
||||
none => fail (fmt!("class_dtor: class id not found \
|
||||
when looking up dtor for %d", id))
|
||||
};
|
||||
for ebml::tagged_docs(cls_items, tag_item_dtor) |doc| {
|
||||
let doc1 = ebml::get_doc(doc, tag_def_id);
|
||||
@ -461,7 +461,7 @@ fn each_path(intr: ident_interner, cdata: cmd, f: fn(path_entry) -> bool) {
|
||||
let def_id = item_def_id(item_doc, cdata);
|
||||
|
||||
// Construct the def for this item.
|
||||
debug!{"(each_path) yielding explicit item: %s", path};
|
||||
debug!("(each_path) yielding explicit item: %s", path);
|
||||
let def_like = item_to_def_like(item_doc, def_id, cdata.cnum);
|
||||
|
||||
// Hand the information off to the iteratee.
|
||||
@ -539,7 +539,7 @@ fn maybe_get_item_ast(intr: ident_interner, cdata: cmd, tcx: ty::ctxt,
|
||||
id: ast::node_id,
|
||||
decode_inlined_item: decode_inlined_item
|
||||
) -> csearch::found_ast {
|
||||
debug!{"Looking up item: %d", id};
|
||||
debug!("Looking up item: %d", id);
|
||||
let item_doc = lookup_item(id, cdata.data);
|
||||
let path = vec::init(item_path(intr, item_doc));
|
||||
match decode_inlined_item(cdata, tcx, path, item_doc) {
|
||||
@ -609,7 +609,7 @@ fn get_self_ty(item: ebml::doc) -> ast::self_ty_ {
|
||||
'm' => { ast::m_mutbl }
|
||||
'c' => { ast::m_const }
|
||||
_ => {
|
||||
fail fmt!{"unknown mutability character: `%c`", ch as char}
|
||||
fail fmt!("unknown mutability character: `%c`", ch as char)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -626,7 +626,7 @@ fn get_self_ty(item: ebml::doc) -> ast::self_ty_ {
|
||||
'~' => { return ast::sty_uniq(get_mutability(string[1])); }
|
||||
'&' => { return ast::sty_region(get_mutability(string[1])); }
|
||||
_ => {
|
||||
fail fmt!{"unknown self type code: `%c`", self_ty_kind as char};
|
||||
fail fmt!("unknown self type code: `%c`", self_ty_kind as char);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -658,8 +658,8 @@ fn get_impls_for_mod(intr: ident_interner, cdata: cmd,
|
||||
for ebml::tagged_docs(mod_item, tag_mod_impl) |doc| {
|
||||
let did = ebml::with_doc_data(doc, |d| parse_def_id(d));
|
||||
let local_did = translate_def_id(cdata, did);
|
||||
debug!{"(get impls for mod) getting did %? for '%?'",
|
||||
local_did, name};
|
||||
debug!("(get impls for mod) getting did %? for '%?'",
|
||||
local_did, name);
|
||||
// The impl may be defined in a different crate. Ask the caller
|
||||
// to give us the metadata
|
||||
let impl_cdata = get_cdata(local_did.crate);
|
||||
@ -797,7 +797,7 @@ fn describe_def(items: ebml::doc, id: ast::def_id) -> ~str {
|
||||
if id.crate != ast::local_crate { return ~"external"; }
|
||||
let it = match maybe_find_item(id.node, items) {
|
||||
some(it) => it,
|
||||
none => fail (fmt!{"describe_def: item not found %?", id})
|
||||
none => fail (fmt!("describe_def: item not found %?", id))
|
||||
};
|
||||
return item_family_to_str(item_family(it));
|
||||
}
|
||||
@ -877,16 +877,16 @@ fn get_attributes(md: ebml::doc) -> ~[ast::attribute] {
|
||||
fn list_meta_items(intr: ident_interner,
|
||||
meta_items: ebml::doc, out: io::Writer) {
|
||||
for get_meta_items(meta_items).each |mi| {
|
||||
out.write_str(fmt!{"%s\n", pprust::meta_item_to_str(mi, intr)});
|
||||
out.write_str(fmt!("%s\n", pprust::meta_item_to_str(mi, intr)));
|
||||
}
|
||||
}
|
||||
|
||||
fn list_crate_attributes(intr: ident_interner, md: ebml::doc, hash: ~str,
|
||||
out: io::Writer) {
|
||||
out.write_str(fmt!{"=Crate Attributes (%s)=\n", hash});
|
||||
out.write_str(fmt!("=Crate Attributes (%s)=\n", hash));
|
||||
|
||||
for get_attributes(md).each |attr| {
|
||||
out.write_str(fmt!{"%s\n", pprust::attribute_to_str(attr, intr)});
|
||||
out.write_str(fmt!("%s\n", pprust::attribute_to_str(attr, intr)));
|
||||
}
|
||||
|
||||
out.write_str(~"\n\n");
|
||||
@ -922,8 +922,8 @@ fn list_crate_deps(intr: ident_interner, data: @~[u8], out: io::Writer) {
|
||||
|
||||
for get_crate_deps(intr, data).each |dep| {
|
||||
out.write_str(
|
||||
fmt!{"%d %s-%s-%s\n",
|
||||
dep.cnum, *intr.get(dep.name), dep.hash, dep.vers});
|
||||
fmt!("%d %s-%s-%s\n",
|
||||
dep.cnum, *intr.get(dep.name), dep.hash, dep.vers));
|
||||
}
|
||||
|
||||
out.write_str(~"\n");
|
||||
|
@ -126,7 +126,7 @@ fn encode_family(ebml_w: ebml::writer, c: char) {
|
||||
ebml_w.end_tag();
|
||||
}
|
||||
|
||||
fn def_to_str(did: def_id) -> ~str { fmt!{"%d:%d", did.crate, did.node} }
|
||||
fn def_to_str(did: def_id) -> ~str { fmt!("%d:%d", did.crate, did.node) }
|
||||
|
||||
fn encode_ty_type_param_bounds(ebml_w: ebml::writer, ecx: @encode_ctxt,
|
||||
params: @~[ty::param_bounds]) {
|
||||
@ -178,7 +178,7 @@ fn encode_symbol(ecx: @encode_ctxt, ebml_w: ebml::writer, id: node_id) {
|
||||
some(x) => x,
|
||||
none => {
|
||||
ecx.diag.handler().bug(
|
||||
fmt!{"encode_symbol: id not found %d", id});
|
||||
fmt!("encode_symbol: id not found %d", id));
|
||||
}
|
||||
};
|
||||
ebml_w.writer.write(str::bytes(sym));
|
||||
@ -265,20 +265,20 @@ fn encode_info_for_mod(ecx: @encode_ctxt, ebml_w: ebml::writer, md: _mod,
|
||||
encode_def_id(ebml_w, local_def(id));
|
||||
encode_family(ebml_w, 'm');
|
||||
encode_name(ecx, ebml_w, name);
|
||||
debug!{"(encoding info for module) encoding info for module ID %d", id};
|
||||
debug!("(encoding info for module) encoding info for module ID %d", id);
|
||||
|
||||
// Encode info about all the module children.
|
||||
for md.items.each |item| {
|
||||
match item.node {
|
||||
item_impl(*) | item_class(*) => {
|
||||
let (ident, did) = (item.ident, item.id);
|
||||
debug!{"(encoding info for module) ... encoding impl %s \
|
||||
debug!("(encoding info for module) ... encoding impl %s \
|
||||
(%?/%?), exported? %?",
|
||||
ecx.tcx.sess.str_of(ident),
|
||||
did,
|
||||
ast_map::node_id_to_str(ecx.tcx.items, did, ecx.tcx
|
||||
.sess.parse_sess.interner),
|
||||
ast_util::is_exported(ident, md)};
|
||||
ast_util::is_exported(ident, md));
|
||||
|
||||
ebml_w.start_tag(tag_mod_impl);
|
||||
ebml_w.wr_str(def_to_str(local_def(did)));
|
||||
@ -378,8 +378,8 @@ fn encode_info_for_class(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
||||
vec::push(*global_index, {val: id,
|
||||
pos: ebml_w.writer.tell()});
|
||||
ebml_w.start_tag(tag_items_data_item);
|
||||
debug!{"encode_info_for_class: doing %s %d",
|
||||
tcx.sess.str_of(nm), id};
|
||||
debug!("encode_info_for_class: doing %s %d",
|
||||
tcx.sess.str_of(nm), id);
|
||||
encode_visibility(ebml_w, vis);
|
||||
encode_name(ecx, ebml_w, nm);
|
||||
encode_path(ecx, ebml_w, path, ast_map::path_name(nm));
|
||||
@ -400,8 +400,8 @@ fn encode_info_for_class(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
||||
{val: m.id, pos: ebml_w.writer.tell()});
|
||||
let impl_path = vec::append_one(path,
|
||||
ast_map::path_name(m.ident));
|
||||
debug!{"encode_info_for_class: doing %s %d",
|
||||
ecx.tcx.sess.str_of(m.ident), m.id};
|
||||
debug!("encode_info_for_class: doing %s %d",
|
||||
ecx.tcx.sess.str_of(m.ident), m.id);
|
||||
encode_info_for_method(ecx, ebml_w, impl_path,
|
||||
should_inline(m.attrs), id, m,
|
||||
vec::append(class_tps, m.tps));
|
||||
@ -423,9 +423,9 @@ fn encode_info_for_fn(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
||||
encode_family(ebml_w, purity_fn_family(decl.purity));
|
||||
encode_type_param_bounds(ebml_w, ecx, tps);
|
||||
let its_ty = node_id_to_type(ecx.tcx, id);
|
||||
debug!{"fn name = %s ty = %s its node id = %d",
|
||||
debug!("fn name = %s ty = %s its node id = %d",
|
||||
ecx.tcx.sess.str_of(ident),
|
||||
util::ppaux::ty_to_str(ecx.tcx, its_ty), id};
|
||||
util::ppaux::ty_to_str(ecx.tcx, its_ty), id);
|
||||
encode_type(ecx, ebml_w, its_ty);
|
||||
encode_path(ecx, ebml_w, path, ast_map::path_name(ident));
|
||||
match item {
|
||||
@ -443,8 +443,8 @@ fn encode_info_for_method(ecx: @encode_ctxt, ebml_w: ebml::writer,
|
||||
impl_path: ast_map::path, should_inline: bool,
|
||||
parent_id: node_id,
|
||||
m: @method, all_tps: ~[ty_param]) {
|
||||
debug!{"encode_info_for_method: %d %s %u", m.id,
|
||||
ecx.tcx.sess.str_of(m.ident), all_tps.len()};
|
||||
debug!("encode_info_for_method: %d %s %u", m.id,
|
||||
ecx.tcx.sess.str_of(m.ident), all_tps.len());
|
||||
ebml_w.start_tag(tag_items_data_item);
|
||||
encode_def_id(ebml_w, local_def(m.id));
|
||||
encode_family(ebml_w, purity_fn_family(m.decl.purity));
|
||||
@ -668,8 +668,8 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::writer, item: @item,
|
||||
|
||||
/* Encode the constructor */
|
||||
for struct_def.ctor.each |ctor| {
|
||||
debug!{"encoding info for ctor %s %d",
|
||||
ecx.tcx.sess.str_of(item.ident), ctor.node.id};
|
||||
debug!("encoding info for ctor %s %d",
|
||||
ecx.tcx.sess.str_of(item.ident), ctor.node.id);
|
||||
vec::push(*index, {
|
||||
val: ctor.node.id,
|
||||
pos: ebml_w.writer.tell()
|
||||
|
@ -62,7 +62,7 @@ fn mk_filesearch(maybe_sysroot: option<Path>,
|
||||
}
|
||||
|
||||
let sysroot = get_sysroot(maybe_sysroot);
|
||||
debug!{"using sysroot = %s", sysroot};
|
||||
debug!("using sysroot = %s", sysroot);
|
||||
{sysroot: sysroot,
|
||||
addl_lib_search_paths: addl_lib_search_paths,
|
||||
target_triple: target_triple} as filesearch
|
||||
@ -71,16 +71,16 @@ fn mk_filesearch(maybe_sysroot: option<Path>,
|
||||
fn search<T: copy>(filesearch: filesearch, pick: pick<T>) -> option<T> {
|
||||
let mut rslt = none;
|
||||
for filesearch.lib_search_paths().each |lib_search_path| {
|
||||
debug!{"searching %s", lib_search_path};
|
||||
debug!("searching %s", lib_search_path);
|
||||
for os::list_dir_path(lib_search_path).each |path| {
|
||||
debug!{"testing %s", path};
|
||||
debug!("testing %s", path);
|
||||
let maybe_picked = pick(path);
|
||||
if option::is_some(maybe_picked) {
|
||||
debug!{"picked %s", path};
|
||||
debug!("picked %s", path);
|
||||
rslt = maybe_picked;
|
||||
break;
|
||||
} else {
|
||||
debug!{"rejected %s", path};
|
||||
debug!("rejected %s", path);
|
||||
}
|
||||
}
|
||||
if option::is_some(rslt) { break; }
|
||||
@ -168,7 +168,7 @@ fn get_cargo_lib_path_nearest() -> result<Path, ~str> {
|
||||
// The name of the directory rustc expects libraries to be located.
|
||||
// On Unix should be "lib", on windows "bin"
|
||||
fn libdir() -> ~str {
|
||||
let libdir = env!{"CFG_LIBDIR"};
|
||||
let libdir = env!("CFG_LIBDIR");
|
||||
if str::is_empty(libdir) {
|
||||
fail ~"rustc compiled without CFG_LIBDIR environment variable";
|
||||
}
|
||||
|
@ -43,8 +43,8 @@ fn load_library_crate(cx: ctxt) -> {ident: ~str, data: @~[u8]} {
|
||||
some(t) => return t,
|
||||
none => {
|
||||
cx.diag.span_fatal(
|
||||
cx.span, fmt!{"can't find crate for `%s`",
|
||||
*cx.intr.get(cx.ident)});
|
||||
cx.span, fmt!("can't find crate for `%s`",
|
||||
*cx.intr.get(cx.ident)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -74,27 +74,27 @@ fn find_library_crate_aux(cx: ctxt,
|
||||
|
||||
let mut matches = ~[];
|
||||
filesearch::search(filesearch, |path| {
|
||||
debug!{"inspecting file %s", path};
|
||||
debug!("inspecting file %s", path);
|
||||
let f: ~str = path::basename(path);
|
||||
if !(str::starts_with(f, prefix) && str::ends_with(f, suffix)) {
|
||||
debug!{"skipping %s, doesn't look like %s*%s", path, prefix,
|
||||
suffix};
|
||||
debug!("skipping %s, doesn't look like %s*%s", path, prefix,
|
||||
suffix);
|
||||
option::none::<()>
|
||||
} else {
|
||||
debug!{"%s is a candidate", path};
|
||||
debug!("%s is a candidate", path);
|
||||
match get_metadata_section(cx.os, path) {
|
||||
option::some(cvec) => {
|
||||
if !crate_matches(cvec, cx.metas, cx.hash) {
|
||||
debug!{"skipping %s, metadata doesn't match", path};
|
||||
debug!("skipping %s, metadata doesn't match", path);
|
||||
option::none::<()>
|
||||
} else {
|
||||
debug!{"found %s with matching metadata", path};
|
||||
debug!("found %s with matching metadata", path);
|
||||
vec::push(matches, {ident: path, data: cvec});
|
||||
option::none::<()>
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
debug!{"could not load metadata for %s", path};
|
||||
debug!("could not load metadata for %s", path);
|
||||
option::none::<()>
|
||||
}
|
||||
}
|
||||
@ -107,10 +107,10 @@ fn find_library_crate_aux(cx: ctxt,
|
||||
some(matches[0])
|
||||
} else {
|
||||
cx.diag.span_err(
|
||||
cx.span, fmt!{"multiple matching crates for `%s`", crate_name});
|
||||
cx.span, fmt!("multiple matching crates for `%s`", crate_name));
|
||||
cx.diag.handler().note(~"candidates:");
|
||||
for matches.each |match_| {
|
||||
cx.diag.handler().note(fmt!{"path: %s", match_.ident});
|
||||
cx.diag.handler().note(fmt!("path: %s", match_.ident));
|
||||
let attrs = decoder::get_crate_attributes(match_.data);
|
||||
note_linkage_attrs(cx.intr, cx.diag, attrs);
|
||||
}
|
||||
@ -137,8 +137,8 @@ fn crate_name_from_metas(metas: ~[@ast::meta_item]) -> ~str {
|
||||
fn note_linkage_attrs(intr: ident_interner, diag: span_handler,
|
||||
attrs: ~[ast::attribute]) {
|
||||
for attr::find_linkage_metas(attrs).each |mi| {
|
||||
diag.handler().note(fmt!{"meta: %s",
|
||||
pprust::meta_item_to_str(mi,intr)});
|
||||
diag.handler().note(fmt!("meta: %s",
|
||||
pprust::meta_item_to_str(mi,intr)));
|
||||
}
|
||||
}
|
||||
|
||||
@ -156,8 +156,8 @@ fn crate_matches(crate_data: @~[u8], metas: ~[@ast::meta_item],
|
||||
fn metadata_matches(extern_metas: ~[@ast::meta_item],
|
||||
local_metas: ~[@ast::meta_item]) -> bool {
|
||||
|
||||
debug!{"matching %u metadata requirements against %u items",
|
||||
vec::len(local_metas), vec::len(extern_metas)};
|
||||
debug!("matching %u metadata requirements against %u items",
|
||||
vec::len(local_metas), vec::len(extern_metas));
|
||||
|
||||
for local_metas.each |needed| {
|
||||
if !attr::contains(extern_metas, needed) {
|
||||
|
@ -308,16 +308,16 @@ fn parse_ty(st: @pstate, conv: conv_did) -> ty::t {
|
||||
}
|
||||
'B' => ty::mk_opaque_box(st.tcx),
|
||||
'a' => {
|
||||
debug!{"saw a class"};
|
||||
debug!("saw a class");
|
||||
assert (next(st) == '[');
|
||||
debug!{"saw a ["};
|
||||
debug!("saw a [");
|
||||
let did = parse_def(st, conv);
|
||||
debug!{"parsed a def_id %?", did};
|
||||
debug!("parsed a def_id %?", did);
|
||||
let substs = parse_substs(st, conv);
|
||||
assert (next(st) == ']');
|
||||
return ty::mk_class(st.tcx, did, substs);
|
||||
}
|
||||
c => { error!{"unexpected char in type string: %c", c}; fail;}
|
||||
c => { error!("unexpected char in type string: %c", c); fail;}
|
||||
}
|
||||
}
|
||||
|
||||
@ -403,7 +403,7 @@ fn parse_def_id(buf: &[u8]) -> ast::def_id {
|
||||
let len = vec::len(buf);
|
||||
while colon_idx < len && buf[colon_idx] != ':' as u8 { colon_idx += 1u; }
|
||||
if colon_idx == len {
|
||||
error!{"didn't find ':' when parsing def id"};
|
||||
error!("didn't find ':' when parsing def id");
|
||||
fail;
|
||||
}
|
||||
|
||||
@ -412,13 +412,13 @@ fn parse_def_id(buf: &[u8]) -> ast::def_id {
|
||||
|
||||
let crate_num = match uint::parse_buf(crate_part, 10u) {
|
||||
some(cn) => cn as int,
|
||||
none => fail (fmt!{"internal error: parse_def_id: crate number \
|
||||
expected, but found %?", crate_part})
|
||||
none => fail (fmt!("internal error: parse_def_id: crate number \
|
||||
expected, but found %?", crate_part))
|
||||
};
|
||||
let def_num = match uint::parse_buf(def_part, 10u) {
|
||||
some(dn) => dn as int,
|
||||
none => fail (fmt!{"internal error: parse_def_id: id expected, but \
|
||||
found %?", def_part})
|
||||
none => fail (fmt!("internal error: parse_def_id: id expected, but \
|
||||
found %?", def_part))
|
||||
};
|
||||
return {crate: crate_num, node: def_num};
|
||||
}
|
||||
|
@ -298,15 +298,15 @@ fn enc_sty(w: io::Writer, cx: @ctxt, st: ty::sty) {
|
||||
ty::ty_opaque_closure_ptr(ty::ck_uniq) => w.write_str(&"C~"),
|
||||
ty::ty_opaque_box => w.write_char('B'),
|
||||
ty::ty_class(def, substs) => {
|
||||
debug!{"~~~~ %s", ~"a["};
|
||||
debug!("~~~~ %s", ~"a[");
|
||||
w.write_str(&"a[");
|
||||
let s = cx.ds(def);
|
||||
debug!{"~~~~ %s", s};
|
||||
debug!("~~~~ %s", s);
|
||||
w.write_str(s);
|
||||
debug!{"~~~~ %s", ~"|"};
|
||||
debug!("~~~~ %s", ~"|");
|
||||
w.write_char('|');
|
||||
enc_substs(w, cx, substs);
|
||||
debug!{"~~~~ %s", ~"]"};
|
||||
debug!("~~~~ %s", ~"]");
|
||||
w.write_char(']');
|
||||
}
|
||||
}
|
||||
|
@ -82,10 +82,10 @@ fn encode_inlined_item(ecx: @e::encode_ctxt,
|
||||
path: ast_map::path,
|
||||
ii: ast::inlined_item,
|
||||
maps: maps) {
|
||||
debug!{"> Encoding inlined item: %s::%s (%u)",
|
||||
debug!("> Encoding inlined item: %s::%s (%u)",
|
||||
ast_map::path_to_str(path, ecx.tcx.sess.parse_sess.interner),
|
||||
ecx.tcx.sess.str_of(ii.ident()),
|
||||
ebml_w.writer.tell()};
|
||||
ebml_w.writer.tell());
|
||||
|
||||
let id_range = ast_util::compute_id_range_for_inlined_item(ii);
|
||||
do ebml_w.wr_tag(c::tag_ast as uint) {
|
||||
@ -94,10 +94,10 @@ fn encode_inlined_item(ecx: @e::encode_ctxt,
|
||||
encode_side_tables_for_ii(ecx, maps, ebml_w, ii);
|
||||
}
|
||||
|
||||
debug!{"< Encoded inlined fn: %s::%s (%u)",
|
||||
debug!("< Encoded inlined fn: %s::%s (%u)",
|
||||
ast_map::path_to_str(path, ecx.tcx.sess.parse_sess.interner),
|
||||
ecx.tcx.sess.str_of(ii.ident()),
|
||||
ebml_w.writer.tell()};
|
||||
ebml_w.writer.tell());
|
||||
}
|
||||
|
||||
fn decode_inlined_item(cdata: cstore::crate_metadata,
|
||||
@ -109,8 +109,8 @@ fn decode_inlined_item(cdata: cstore::crate_metadata,
|
||||
match par_doc.opt_child(c::tag_ast) {
|
||||
none => none,
|
||||
some(ast_doc) => {
|
||||
debug!{"> Decoding inlined fn: %s::?",
|
||||
ast_map::path_to_str(path, tcx.sess.parse_sess.interner)};
|
||||
debug!("> Decoding inlined fn: %s::?",
|
||||
ast_map::path_to_str(path, tcx.sess.parse_sess.interner));
|
||||
let ast_dsr = ebml::ebml_deserializer(ast_doc);
|
||||
let from_id_range = ast_util::deserialize_id_range(ast_dsr);
|
||||
let to_id_range = reserve_id_range(dcx.tcx.sess, from_id_range);
|
||||
@ -121,15 +121,15 @@ fn decode_inlined_item(cdata: cstore::crate_metadata,
|
||||
let ii = renumber_ast(xcx, raw_ii);
|
||||
ast_map::map_decoded_item(tcx.sess.diagnostic(),
|
||||
dcx.tcx.items, path, ii);
|
||||
debug!{"Fn named: %s", tcx.sess.str_of(ii.ident())};
|
||||
debug!("Fn named: %s", tcx.sess.str_of(ii.ident()));
|
||||
decode_side_tables(xcx, ast_doc);
|
||||
debug!{"< Decoded inlined fn: %s::%s",
|
||||
debug!("< Decoded inlined fn: %s::%s",
|
||||
ast_map::path_to_str(path, tcx.sess.parse_sess.interner),
|
||||
tcx.sess.str_of(ii.ident())};
|
||||
tcx.sess.str_of(ii.ident()));
|
||||
match ii {
|
||||
ast::ii_item(i) => {
|
||||
debug!{">>> DECODED ITEM >>>\n%s\n<<< DECODED ITEM <<<",
|
||||
syntax::print::pprust::item_to_str(i, tcx.sess.intr())};
|
||||
debug!(">>> DECODED ITEM >>>\n%s\n<<< DECODED ITEM <<<",
|
||||
syntax::print::pprust::item_to_str(i, tcx.sess.intr()));
|
||||
}
|
||||
_ => { }
|
||||
}
|
||||
@ -650,7 +650,7 @@ fn encode_side_tables_for_id(ecx: @e::encode_ctxt,
|
||||
id: ast::node_id) {
|
||||
let tcx = ecx.tcx;
|
||||
|
||||
debug!{"Encoding side tables for id %d", id};
|
||||
debug!("Encoding side tables for id %d", id);
|
||||
|
||||
do option::iter(tcx.def_map.find(id)) |def| {
|
||||
do ebml_w.tag(c::tag_table_def) {
|
||||
@ -838,9 +838,9 @@ fn decode_side_tables(xcx: extended_decode_ctxt,
|
||||
let id0 = entry_doc[c::tag_table_id as uint].as_int();
|
||||
let id = xcx.tr_id(id0);
|
||||
|
||||
debug!{">> Side table document with tag 0x%x \
|
||||
debug!(">> Side table document with tag 0x%x \
|
||||
found for id %d (orig %d)",
|
||||
tag, id, id0};
|
||||
tag, id, id0);
|
||||
|
||||
if tag == (c::tag_table_mutbl as uint) {
|
||||
dcx.maps.mutbl_map.insert(id, ());
|
||||
@ -889,11 +889,11 @@ fn decode_side_tables(xcx: extended_decode_ctxt,
|
||||
dcx.tcx.borrowings.insert(id, borrow);
|
||||
} else {
|
||||
xcx.dcx.tcx.sess.bug(
|
||||
fmt!{"unknown tag found in side tables: %x", tag});
|
||||
fmt!("unknown tag found in side tables: %x", tag));
|
||||
}
|
||||
}
|
||||
|
||||
debug!{">< Side table doc loaded"};
|
||||
debug!(">< Side table doc loaded");
|
||||
}
|
||||
}
|
||||
|
||||
@ -947,8 +947,8 @@ fn roundtrip(in_item: @ast::item) {
|
||||
let out_str =
|
||||
io::with_str_writer(|w| ast::serialize_item(w, *out_item) );
|
||||
|
||||
debug!{"expected string: %s", exp_str};
|
||||
debug!{"actual string : %s", out_str};
|
||||
debug!("expected string: %s", exp_str);
|
||||
debug!("actual string : %s", out_str);
|
||||
|
||||
assert exp_str == out_str;
|
||||
}
|
||||
|
@ -255,16 +255,16 @@ fn check_crate(tcx: ty::ctxt,
|
||||
|
||||
if tcx.sess.borrowck_stats() {
|
||||
io::println(~"--- borrowck stats ---");
|
||||
io::println(fmt!{"paths requiring guarantees: %u",
|
||||
bccx.guaranteed_paths});
|
||||
io::println(fmt!{"paths requiring loans : %s",
|
||||
make_stat(bccx, bccx.loaned_paths_same)});
|
||||
io::println(fmt!{"paths requiring imm loans : %s",
|
||||
make_stat(bccx, bccx.loaned_paths_imm)});
|
||||
io::println(fmt!{"stable paths : %s",
|
||||
make_stat(bccx, bccx.stable_paths)});
|
||||
io::println(fmt!{"paths requiring purity : %s",
|
||||
make_stat(bccx, bccx.req_pure_paths)});
|
||||
io::println(fmt!("paths requiring guarantees: %u",
|
||||
bccx.guaranteed_paths));
|
||||
io::println(fmt!("paths requiring loans : %s",
|
||||
make_stat(bccx, bccx.loaned_paths_same)));
|
||||
io::println(fmt!("paths requiring imm loans : %s",
|
||||
make_stat(bccx, bccx.loaned_paths_imm)));
|
||||
io::println(fmt!("stable paths : %s",
|
||||
make_stat(bccx, bccx.stable_paths)));
|
||||
io::println(fmt!("paths requiring purity : %s",
|
||||
make_stat(bccx, bccx.req_pure_paths)));
|
||||
}
|
||||
|
||||
return (bccx.root_map, bccx.mutbl_map);
|
||||
@ -272,7 +272,7 @@ fn check_crate(tcx: ty::ctxt,
|
||||
fn make_stat(bccx: borrowck_ctxt, stat: uint) -> ~str {
|
||||
let stat_f = stat as float;
|
||||
let total = bccx.guaranteed_paths as float;
|
||||
fmt!{"%u (%.0f%%)", stat , stat_f * 100f / total}
|
||||
fmt!("%u (%.0f%%)", stat , stat_f * 100f / total)
|
||||
}
|
||||
}
|
||||
|
||||
@ -412,8 +412,8 @@ impl borrowck_ctxt {
|
||||
fn report(err: bckerr) {
|
||||
self.span_err(
|
||||
err.cmt.span,
|
||||
fmt!{"illegal borrow: %s",
|
||||
self.bckerr_code_to_str(err.code)});
|
||||
fmt!("illegal borrow: %s",
|
||||
self.bckerr_code_to_str(err.code)));
|
||||
}
|
||||
|
||||
fn span_err(s: span, m: ~str) {
|
||||
@ -439,8 +439,8 @@ impl borrowck_ctxt {
|
||||
fn bckerr_code_to_str(code: bckerr_code) -> ~str {
|
||||
match code {
|
||||
err_mutbl(req, act) => {
|
||||
fmt!{"creating %s alias to aliasable, %s memory",
|
||||
self.mut_to_str(req), self.mut_to_str(act)}
|
||||
fmt!("creating %s alias to aliasable, %s memory",
|
||||
self.mut_to_str(req), self.mut_to_str(act))
|
||||
}
|
||||
err_mut_uniq => {
|
||||
~"unique value in aliasable, mutable location"
|
||||
@ -455,16 +455,16 @@ impl borrowck_ctxt {
|
||||
~"rooting is not permitted"
|
||||
}
|
||||
err_out_of_root_scope(super_scope, sub_scope) => {
|
||||
fmt!{"managed value would have to be rooted for %s, \
|
||||
fmt!("managed value would have to be rooted for %s, \
|
||||
but can only be rooted for %s",
|
||||
explain_region(self.tcx, sub_scope),
|
||||
explain_region(self.tcx, super_scope)}
|
||||
explain_region(self.tcx, super_scope))
|
||||
}
|
||||
err_out_of_scope(super_scope, sub_scope) => {
|
||||
fmt!{"borrowed pointer must be valid for %s, \
|
||||
fmt!("borrowed pointer must be valid for %s, \
|
||||
but the borrowed value is only valid for %s",
|
||||
explain_region(self.tcx, sub_scope),
|
||||
explain_region(self.tcx, super_scope)}
|
||||
explain_region(self.tcx, super_scope))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -156,12 +156,12 @@ impl check_loan_ctxt {
|
||||
callee_span: span) {
|
||||
let tcx = self.tcx();
|
||||
|
||||
debug!{"check_pure_callee_or_arg(pc=%?, expr=%?, \
|
||||
debug!("check_pure_callee_or_arg(pc=%?, expr=%?, \
|
||||
callee_id=%d, ty=%s)",
|
||||
pc,
|
||||
opt_expr.map(|e| pprust::expr_to_str(e, tcx.sess.intr()) ),
|
||||
callee_id,
|
||||
ty_to_str(self.tcx(), ty::node_id_to_type(tcx, callee_id))};
|
||||
ty_to_str(self.tcx(), ty::node_id_to_type(tcx, callee_id)));
|
||||
|
||||
// Purity rules: an expr B is a legal callee or argument to a
|
||||
// call within a pure function A if at least one of the
|
||||
@ -204,8 +204,8 @@ impl check_loan_ctxt {
|
||||
ast::impure_fn | ast::unsafe_fn | ast::extern_fn => {
|
||||
self.report_purity_error(
|
||||
pc, callee_span,
|
||||
fmt!{"access to %s function",
|
||||
pprust::purity_to_str(fn_ty.purity)});
|
||||
fmt!("access to %s function",
|
||||
pprust::purity_to_str(fn_ty.purity)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -256,14 +256,14 @@ impl check_loan_ctxt {
|
||||
(m_mutbl, m_imm) | (m_imm, m_mutbl) => {
|
||||
self.bccx.span_err(
|
||||
new_loan.cmt.span,
|
||||
fmt!{"loan of %s as %s \
|
||||
fmt!("loan of %s as %s \
|
||||
conflicts with prior loan",
|
||||
self.bccx.cmt_to_str(new_loan.cmt),
|
||||
self.bccx.mut_to_str(new_loan.mutbl)});
|
||||
self.bccx.mut_to_str(new_loan.mutbl)));
|
||||
self.bccx.span_note(
|
||||
old_loan.cmt.span,
|
||||
fmt!{"prior loan as %s granted here",
|
||||
self.bccx.mut_to_str(old_loan.mutbl)});
|
||||
fmt!("prior loan as %s granted here",
|
||||
self.bccx.mut_to_str(old_loan.mutbl)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -293,8 +293,8 @@ impl check_loan_ctxt {
|
||||
fn check_assignment(at: assignment_type, ex: @ast::expr) {
|
||||
let cmt = self.bccx.cat_expr(ex);
|
||||
|
||||
debug!{"check_assignment(cmt=%s)",
|
||||
self.bccx.cmt_to_repr(cmt)};
|
||||
debug!("check_assignment(cmt=%s)",
|
||||
self.bccx.cmt_to_repr(cmt));
|
||||
|
||||
if self.in_ctor && self.is_self_field(cmt)
|
||||
&& at.checked_by_liveness() {
|
||||
@ -375,12 +375,12 @@ impl check_loan_ctxt {
|
||||
m_imm => {
|
||||
self.bccx.span_err(
|
||||
ex.span,
|
||||
fmt!{"%s prohibited due to outstanding loan",
|
||||
at.ing_form(self.bccx.cmt_to_str(cmt))});
|
||||
fmt!("%s prohibited due to outstanding loan",
|
||||
at.ing_form(self.bccx.cmt_to_str(cmt))));
|
||||
self.bccx.span_note(
|
||||
loan.cmt.span,
|
||||
fmt!{"loan of %s granted here",
|
||||
self.bccx.cmt_to_str(loan.cmt)});
|
||||
fmt!("loan of %s granted here",
|
||||
self.bccx.cmt_to_str(loan.cmt)));
|
||||
return;
|
||||
}
|
||||
}
|
||||
@ -407,17 +407,17 @@ impl check_loan_ctxt {
|
||||
pc_pure_fn => {
|
||||
self.tcx().sess.span_err(
|
||||
sp,
|
||||
fmt!{"%s prohibited in pure context", msg});
|
||||
fmt!("%s prohibited in pure context", msg));
|
||||
}
|
||||
pc_cmt(e) => {
|
||||
if self.reported.insert(e.cmt.id, ()) {
|
||||
self.tcx().sess.span_err(
|
||||
e.cmt.span,
|
||||
fmt!{"illegal borrow unless pure: %s",
|
||||
self.bccx.bckerr_code_to_str(e.code)});
|
||||
fmt!("illegal borrow unless pure: %s",
|
||||
self.bccx.bckerr_code_to_str(e.code)));
|
||||
self.tcx().sess.span_note(
|
||||
sp,
|
||||
fmt!{"impure due to %s", msg});
|
||||
fmt!("impure due to %s", msg));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -429,8 +429,8 @@ impl check_loan_ctxt {
|
||||
}
|
||||
|
||||
fn check_move_out_from_cmt(cmt: cmt) {
|
||||
debug!{"check_move_out_from_cmt(cmt=%s)",
|
||||
self.bccx.cmt_to_repr(cmt)};
|
||||
debug!("check_move_out_from_cmt(cmt=%s)",
|
||||
self.bccx.cmt_to_repr(cmt));
|
||||
|
||||
match cmt.cat {
|
||||
// Rvalues, locals, and arguments can be moved:
|
||||
@ -447,7 +447,7 @@ impl check_loan_ctxt {
|
||||
_ => {
|
||||
self.bccx.span_err(
|
||||
cmt.span,
|
||||
fmt!{"moving out of %s", self.bccx.cmt_to_str(cmt)});
|
||||
fmt!("moving out of %s", self.bccx.cmt_to_str(cmt)));
|
||||
return;
|
||||
}
|
||||
}
|
||||
@ -462,12 +462,12 @@ impl check_loan_ctxt {
|
||||
for self.walk_loans_of(cmt.id, lp) |loan| {
|
||||
self.bccx.span_err(
|
||||
cmt.span,
|
||||
fmt!{"moving out of %s prohibited due to outstanding loan",
|
||||
self.bccx.cmt_to_str(cmt)});
|
||||
fmt!("moving out of %s prohibited due to outstanding loan",
|
||||
self.bccx.cmt_to_str(cmt)));
|
||||
self.bccx.span_note(
|
||||
loan.cmt.span,
|
||||
fmt!{"loan of %s granted here",
|
||||
self.bccx.cmt_to_str(loan.cmt)});
|
||||
fmt!("loan of %s granted here",
|
||||
self.bccx.cmt_to_str(loan.cmt)));
|
||||
return;
|
||||
}
|
||||
}
|
||||
@ -482,8 +482,8 @@ impl check_loan_ctxt {
|
||||
some(lp) => lp
|
||||
};
|
||||
for self.walk_loans_of(cmt.id, lp) |_loan| {
|
||||
debug!{"Removing last use entry %? due to outstanding loan",
|
||||
expr.id};
|
||||
debug!("Removing last use entry %? due to outstanding loan",
|
||||
expr.id);
|
||||
self.bccx.last_use_map.remove(expr.id);
|
||||
return;
|
||||
}
|
||||
@ -527,7 +527,7 @@ fn check_loans_in_fn(fk: visit::fn_kind, decl: ast::fn_decl, body: ast::blk,
|
||||
sp: span, id: ast::node_id, &&self: check_loan_ctxt,
|
||||
visitor: visit::vt<check_loan_ctxt>) {
|
||||
|
||||
debug!{"purity on entry=%?", copy self.declared_purity};
|
||||
debug!("purity on entry=%?", copy self.declared_purity);
|
||||
do save_and_restore(self.in_ctor) {
|
||||
do save_and_restore(self.declared_purity) {
|
||||
do save_and_restore(self.fn_args) {
|
||||
@ -563,7 +563,7 @@ fn check_loans_in_fn(fk: visit::fn_kind, decl: ast::fn_decl, body: ast::blk,
|
||||
}
|
||||
}
|
||||
}
|
||||
debug!{"purity on exit=%?", copy self.declared_purity};
|
||||
debug!("purity on exit=%?", copy self.declared_purity);
|
||||
}
|
||||
|
||||
fn check_loans_in_local(local: @ast::local,
|
||||
|
@ -90,8 +90,8 @@ fn req_loans_in_expr(ex: @ast::expr,
|
||||
let tcx = bccx.tcx;
|
||||
let old_root_ub = self.root_ub;
|
||||
|
||||
debug!{"req_loans_in_expr(ex=%s)",
|
||||
pprust::expr_to_str(ex, tcx.sess.intr())};
|
||||
debug!("req_loans_in_expr(ex=%s)",
|
||||
pprust::expr_to_str(ex, tcx.sess.intr()));
|
||||
|
||||
// If this expression is borrowed, have to ensure it remains valid:
|
||||
for tcx.borrowings.find(ex.id).each |borrow| {
|
||||
@ -257,10 +257,10 @@ impl gather_loan_ctxt {
|
||||
|
||||
self.bccx.guaranteed_paths += 1;
|
||||
|
||||
debug!{"guarantee_valid(cmt=%s, req_mutbl=%s, scope_r=%s)",
|
||||
debug!("guarantee_valid(cmt=%s, req_mutbl=%s, scope_r=%s)",
|
||||
self.bccx.cmt_to_repr(cmt),
|
||||
self.bccx.mut_to_str(req_mutbl),
|
||||
region_to_str(self.tcx(), scope_r)};
|
||||
region_to_str(self.tcx(), scope_r));
|
||||
let _i = indenter();
|
||||
|
||||
match cmt.lp {
|
||||
@ -289,7 +289,7 @@ impl gather_loan_ctxt {
|
||||
if self.tcx().sess.borrowck_note_loan() {
|
||||
self.bccx.span_note(
|
||||
cmt.span,
|
||||
fmt!{"immutable loan required"});
|
||||
fmt!("immutable loan required"));
|
||||
}
|
||||
} else {
|
||||
self.bccx.loaned_paths_same += 1;
|
||||
@ -343,7 +343,7 @@ impl gather_loan_ctxt {
|
||||
if self.tcx().sess.borrowck_note_pure() {
|
||||
self.bccx.span_note(
|
||||
cmt.span,
|
||||
fmt!{"purity required"});
|
||||
fmt!("purity required"));
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
|
@ -57,9 +57,9 @@ impl loan_ctxt {
|
||||
}
|
||||
|
||||
fn loan(cmt: cmt, req_mutbl: ast::mutability) -> bckres<()> {
|
||||
debug!{"loan(%s, %s)",
|
||||
debug!("loan(%s, %s)",
|
||||
self.bccx.cmt_to_repr(cmt),
|
||||
self.bccx.mut_to_str(req_mutbl)};
|
||||
self.bccx.mut_to_str(req_mutbl));
|
||||
let _i = indenter();
|
||||
|
||||
// see stable() above; should only be called when `cmt` is lendable
|
||||
|
@ -58,9 +58,9 @@ priv impl &preserve_ctxt {
|
||||
fn tcx() -> ty::ctxt { self.bccx.tcx }
|
||||
|
||||
fn preserve(cmt: cmt) -> bckres<preserve_condition> {
|
||||
debug!{"preserve(cmt=%s, root_ub=%?, root_managed_data=%b)",
|
||||
debug!("preserve(cmt=%s, root_ub=%?, root_managed_data=%b)",
|
||||
self.bccx.cmt_to_repr(cmt), self.root_ub,
|
||||
self.root_managed_data};
|
||||
self.root_managed_data);
|
||||
let _i = indenter();
|
||||
|
||||
match cmt.cat {
|
||||
@ -155,7 +155,7 @@ priv impl &preserve_ctxt {
|
||||
// otherwise we have no guarantee the pointer will stay
|
||||
// live, so we must root the pointer (i.e., inc the ref
|
||||
// count) for the duration of the loan.
|
||||
debug!{"base.mutbl = %?", self.bccx.mut_to_str(base.mutbl)};
|
||||
debug!("base.mutbl = %?", self.bccx.mut_to_str(base.mutbl));
|
||||
if base.mutbl == m_imm {
|
||||
let non_rooting_ctxt =
|
||||
preserve_ctxt({root_managed_data: false with **self});
|
||||
@ -164,12 +164,12 @@ priv impl &preserve_ctxt {
|
||||
ok(pc_ok)
|
||||
}
|
||||
ok(pc_if_pure(_)) => {
|
||||
debug!{"must root @T, otherwise purity req'd"};
|
||||
debug!("must root @T, otherwise purity req'd");
|
||||
self.attempt_root(cmt, base, derefs)
|
||||
}
|
||||
err(e) => {
|
||||
debug!{"must root @T, err: %s",
|
||||
self.bccx.bckerr_code_to_str(e.code)};
|
||||
debug!("must root @T, err: %s",
|
||||
self.bccx.bckerr_code_to_str(e.code));
|
||||
self.attempt_root(cmt, base, derefs)
|
||||
}
|
||||
}
|
||||
|
@ -43,16 +43,16 @@ fn check_capture_clause(tcx: ty::ctxt,
|
||||
if !vec::any(*freevars, |fv| fv.def == cap_def ) {
|
||||
tcx.sess.span_warn(
|
||||
cap_item.span,
|
||||
fmt!{"captured variable `%s` not used in closure",
|
||||
tcx.sess.str_of(cap_item.name)});
|
||||
fmt!("captured variable `%s` not used in closure",
|
||||
tcx.sess.str_of(cap_item.name)));
|
||||
}
|
||||
|
||||
let cap_def_id = ast_util::def_id_of_def(cap_def).node;
|
||||
if !seen_defs.insert(cap_def_id, ()) {
|
||||
tcx.sess.span_err(
|
||||
cap_item.span,
|
||||
fmt!{"variable `%s` captured more than once",
|
||||
tcx.sess.str_of(cap_item.name)});
|
||||
fmt!("variable `%s` captured more than once",
|
||||
tcx.sess.str_of(cap_item.name)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -67,8 +67,8 @@ fn compute_capture_vars(tcx: ty::ctxt,
|
||||
// first add entries for anything explicitly named in the cap clause
|
||||
|
||||
for (*cap_clause).each |cap_item| {
|
||||
debug!{"Doing capture var: %s (%?)",
|
||||
tcx.sess.str_of(cap_item.name), cap_item.id};
|
||||
debug!("Doing capture var: %s (%?)",
|
||||
tcx.sess.str_of(cap_item.name), cap_item.id);
|
||||
|
||||
let cap_def = tcx.def_map.get(cap_item.id);
|
||||
let cap_def_id = ast_util::def_id_of_def(cap_def).node;
|
||||
|
@ -214,7 +214,7 @@ fn check_block(b: blk, cx: ctx, v: visit::vt<ctx>) {
|
||||
}
|
||||
|
||||
fn check_expr(e: @expr, cx: ctx, v: visit::vt<ctx>) {
|
||||
debug!{"kind::check_expr(%s)", expr_to_str(e, cx.tcx.sess.intr())};
|
||||
debug!("kind::check_expr(%s)", expr_to_str(e, cx.tcx.sess.intr()));
|
||||
|
||||
// Handle any kind bounds on type parameters
|
||||
do option::iter(cx.tcx.node_type_substs.find(e.id)) |ts| {
|
||||
@ -433,7 +433,7 @@ fn check_imm_free_var(cx: ctx, def: def, sp: span) {
|
||||
_ => {
|
||||
cx.tcx.sess.span_bug(
|
||||
sp,
|
||||
fmt!{"unknown def for free variable: %?", def});
|
||||
fmt!("unknown def for free variable: %?", def));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -524,7 +524,7 @@ fn check_cast_for_escaping_regions(
|
||||
some(ty::re_bound(*)) | some(ty::re_var(*)) => {
|
||||
cx.tcx.sess.span_bug(
|
||||
source.span,
|
||||
fmt!{"bad region found in kind: %?", target_substs.self_r});
|
||||
fmt!("bad region found in kind: %?", target_substs.self_r));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -126,8 +126,8 @@ struct LanguageItemCollector {
|
||||
some(original_def_id)
|
||||
if original_def_id != item_def_id => {
|
||||
|
||||
self.session.err(fmt!{"duplicate entry for `%s`",
|
||||
value});
|
||||
self.session.err(fmt!("duplicate entry for `%s`",
|
||||
value));
|
||||
}
|
||||
some(_) | none => {
|
||||
// OK.
|
||||
@ -185,7 +185,7 @@ struct LanguageItemCollector {
|
||||
for self.item_refs.each |key, item_ref| {
|
||||
match copy *item_ref {
|
||||
none => {
|
||||
self.session.err(fmt!{"no item found for `%s`", key});
|
||||
self.session.err(fmt!("no item found for `%s`", key));
|
||||
}
|
||||
some(did) => {
|
||||
// OK.
|
||||
|
@ -251,8 +251,8 @@ impl ctxt {
|
||||
self.span_lint(
|
||||
new_ctxt.get_level(unrecognized_lint),
|
||||
meta.span,
|
||||
fmt!{"unknown `%s` attribute: `%s`",
|
||||
level_to_str(level), lintname});
|
||||
fmt!("unknown `%s` attribute: `%s`",
|
||||
level_to_str(level), lintname));
|
||||
}
|
||||
some(lint) => {
|
||||
|
||||
@ -261,9 +261,9 @@ impl ctxt {
|
||||
self.span_lint(
|
||||
forbid,
|
||||
meta.span,
|
||||
fmt!{"%s(%s) overruled by outer forbid(%s)",
|
||||
fmt!("%s(%s) overruled by outer forbid(%s)",
|
||||
level_to_str(level),
|
||||
lintname, lintname});
|
||||
lintname, lintname));
|
||||
}
|
||||
|
||||
// we do multiple unneeded copies of the
|
||||
@ -486,7 +486,7 @@ fn check_item_non_camel_case_types(cx: ty::ctxt, it: @ast::item) {
|
||||
}
|
||||
|
||||
fn check_pat(tcx: ty::ctxt, pat: @ast::pat) {
|
||||
debug!{"lint check_pat pat=%s", pat_to_str(pat, tcx.sess.intr())};
|
||||
debug!("lint check_pat pat=%s", pat_to_str(pat, tcx.sess.intr()));
|
||||
|
||||
do pat_bindings(tcx.def_map, pat) |binding_mode, id, span, path| {
|
||||
match binding_mode {
|
||||
@ -498,8 +498,8 @@ fn check_pat(tcx: ty::ctxt, pat: @ast::pat) {
|
||||
tcx.sess.span_lint(
|
||||
deprecated_pattern, id, id,
|
||||
span,
|
||||
fmt!{"binding `%s` should use ref or copy mode",
|
||||
tcx.sess.str_of(path_to_ident(path))});
|
||||
fmt!("binding `%s` should use ref or copy mode",
|
||||
tcx.sess.str_of(path_to_ident(path))));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -508,7 +508,7 @@ fn check_pat(tcx: ty::ctxt, pat: @ast::pat) {
|
||||
|
||||
fn check_fn(tcx: ty::ctxt, fk: visit::fn_kind, decl: ast::fn_decl,
|
||||
_body: ast::blk, span: span, id: ast::node_id) {
|
||||
debug!{"lint check_fn fk=%? id=%?", fk, id};
|
||||
debug!("lint check_fn fk=%? id=%?", fk, id);
|
||||
|
||||
// don't complain about blocks, since they tend to get their modes
|
||||
// specified from the outside
|
||||
@ -523,10 +523,10 @@ fn check_fn(tcx: ty::ctxt, fk: visit::fn_kind, decl: ast::fn_decl,
|
||||
let mut counter = 0;
|
||||
do vec::iter2(fn_ty.inputs, decl.inputs) |arg_ty, arg_ast| {
|
||||
counter += 1;
|
||||
debug!{"arg %d, ty=%s, mode=%s",
|
||||
debug!("arg %d, ty=%s, mode=%s",
|
||||
counter,
|
||||
ty_to_str(tcx, arg_ty.ty),
|
||||
mode_to_str(arg_ast.mode)};
|
||||
mode_to_str(arg_ast.mode));
|
||||
match arg_ast.mode {
|
||||
ast::expl(ast::by_copy) => {
|
||||
/* always allow by-copy */
|
||||
@ -536,7 +536,7 @@ fn check_fn(tcx: ty::ctxt, fk: visit::fn_kind, decl: ast::fn_decl,
|
||||
tcx.sess.span_lint(
|
||||
deprecated_mode, id, id,
|
||||
span,
|
||||
fmt!{"argument %d uses an explicit mode", counter});
|
||||
fmt!("argument %d uses an explicit mode", counter));
|
||||
}
|
||||
|
||||
ast::infer(_) => {
|
||||
@ -545,9 +545,9 @@ fn check_fn(tcx: ty::ctxt, fk: visit::fn_kind, decl: ast::fn_decl,
|
||||
tcx.sess.span_lint(
|
||||
deprecated_mode, id, id,
|
||||
span,
|
||||
fmt!{"argument %d uses the default mode \
|
||||
fmt!("argument %d uses the default mode \
|
||||
but shouldn't",
|
||||
counter});
|
||||
counter));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -153,11 +153,11 @@ fn check_crate(tcx: ty::ctxt,
|
||||
}
|
||||
|
||||
impl live_node: to_str::ToStr {
|
||||
fn to_str() -> ~str { fmt!{"ln(%u)", *self} }
|
||||
fn to_str() -> ~str { fmt!("ln(%u)", *self) }
|
||||
}
|
||||
|
||||
impl variable: to_str::ToStr {
|
||||
fn to_str() -> ~str { fmt!{"v(%u)", *self} }
|
||||
fn to_str() -> ~str { fmt!("v(%u)", *self) }
|
||||
}
|
||||
|
||||
// ______________________________________________________________________
|
||||
@ -243,7 +243,7 @@ struct ir_maps {
|
||||
vec::push(self.lnks, lnk);
|
||||
self.num_live_nodes += 1u;
|
||||
|
||||
debug!{"%s is of kind %?", ln.to_str(), lnk};
|
||||
debug!("%s is of kind %?", ln.to_str(), lnk);
|
||||
|
||||
ln
|
||||
}
|
||||
@ -252,7 +252,7 @@ struct ir_maps {
|
||||
let ln = self.add_live_node(lnk);
|
||||
self.live_node_map.insert(node_id, ln);
|
||||
|
||||
debug!{"%s is node %d", ln.to_str(), node_id};
|
||||
debug!("%s is node %d", ln.to_str(), node_id);
|
||||
}
|
||||
|
||||
fn add_variable(vk: var_kind) -> variable {
|
||||
@ -271,7 +271,7 @@ struct ir_maps {
|
||||
}
|
||||
}
|
||||
|
||||
debug!{"%s is %?", v.to_str(), vk};
|
||||
debug!("%s is %?", v.to_str(), vk);
|
||||
|
||||
v
|
||||
}
|
||||
@ -281,7 +281,7 @@ struct ir_maps {
|
||||
some(var) => var,
|
||||
none => {
|
||||
self.tcx.sess.span_bug(
|
||||
span, fmt!{"No variable registered for id %d", node_id});
|
||||
span, fmt!("No variable registered for id %d", node_id));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -314,7 +314,7 @@ struct ir_maps {
|
||||
|
||||
fn add_last_use(expr_id: node_id, var: variable) {
|
||||
let vk = self.var_kinds[*var];
|
||||
debug!{"Node %d is a last use of variable %?", expr_id, vk};
|
||||
debug!("Node %d is a last use of variable %?", expr_id, vk);
|
||||
match vk {
|
||||
vk_arg(id, name, by_move) |
|
||||
vk_arg(id, name, by_copy) |
|
||||
@ -332,7 +332,7 @@ struct ir_maps {
|
||||
}
|
||||
vk_arg(_, _, by_ref) | vk_arg(_, _, by_mutbl_ref) |
|
||||
vk_arg(_, _, by_val) | vk_self | vk_field(_) | vk_implicit_ret => {
|
||||
debug!{"--but it is not owned"};
|
||||
debug!("--but it is not owned");
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -340,17 +340,17 @@ struct ir_maps {
|
||||
|
||||
fn visit_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
|
||||
sp: span, id: node_id, &&self: @ir_maps, v: vt<@ir_maps>) {
|
||||
debug!{"visit_fn: id=%d", id};
|
||||
debug!("visit_fn: id=%d", id);
|
||||
let _i = util::common::indenter();
|
||||
|
||||
// swap in a new set of IR maps for this function body:
|
||||
let fn_maps = @ir_maps(self.tcx, self.method_map,
|
||||
self.last_use_map);
|
||||
|
||||
debug!{"creating fn_maps: %x", ptr::addr_of(*fn_maps) as uint};
|
||||
debug!("creating fn_maps: %x", ptr::addr_of(*fn_maps) as uint);
|
||||
|
||||
for decl.inputs.each |arg| {
|
||||
debug!{"adding argument %d", arg.id};
|
||||
debug!("adding argument %d", arg.id);
|
||||
let mode = ty::resolved_mode(self.tcx, arg.mode);
|
||||
(*fn_maps).add_variable(vk_arg(arg.id, arg.ident, mode));
|
||||
};
|
||||
@ -405,7 +405,7 @@ fn add_class_fields(self: @ir_maps, did: def_id) {
|
||||
fn visit_local(local: @local, &&self: @ir_maps, vt: vt<@ir_maps>) {
|
||||
let def_map = self.tcx.def_map;
|
||||
do pat_util::pat_bindings(def_map, local.node.pat) |_bm, p_id, sp, path| {
|
||||
debug!{"adding local variable %d", p_id};
|
||||
debug!("adding local variable %d", p_id);
|
||||
let name = ast_util::path_to_ident(path);
|
||||
(*self).add_live_node_for_node(p_id, lnk_vdef(sp));
|
||||
(*self).add_variable(vk_local(p_id, name));
|
||||
@ -418,7 +418,7 @@ fn visit_expr(expr: @expr, &&self: @ir_maps, vt: vt<@ir_maps>) {
|
||||
// live nodes required for uses or definitions of variables:
|
||||
expr_path(_) => {
|
||||
let def = self.tcx.def_map.get(expr.id);
|
||||
debug!{"expr %d: path that leads to %?", expr.id, def};
|
||||
debug!("expr %d: path that leads to %?", expr.id, def);
|
||||
if relevant_def(def).is_some() {
|
||||
(*self).add_live_node_for_node(expr.id, lnk_expr(expr.span));
|
||||
}
|
||||
@ -541,8 +541,8 @@ struct liveness {
|
||||
// code have to agree about which AST nodes are worth
|
||||
// creating liveness nodes for.
|
||||
self.tcx.sess.span_bug(
|
||||
span, fmt!{"No live node registered for node %d",
|
||||
node_id});
|
||||
span, fmt!("No live node registered for node %d",
|
||||
node_id));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -665,7 +665,7 @@ struct liveness {
|
||||
wr.write_str(~"[ln(");
|
||||
wr.write_uint(*ln);
|
||||
wr.write_str(~") of kind ");
|
||||
wr.write_str(fmt!{"%?", copy self.ir.lnks[*ln]});
|
||||
wr.write_str(fmt!("%?", copy self.ir.lnks[*ln]));
|
||||
wr.write_str(~" reads");
|
||||
self.write_vars(wr, ln, |idx| self.users[idx].reader );
|
||||
wr.write_str(~" writes");
|
||||
@ -696,8 +696,8 @@ struct liveness {
|
||||
self.indices2(ln, succ_ln, |idx, succ_idx| {
|
||||
self.users[idx] = self.users[succ_idx]
|
||||
});
|
||||
debug!{"init_from_succ(ln=%s, succ=%s)",
|
||||
self.ln_str(ln), self.ln_str(succ_ln)};
|
||||
debug!("init_from_succ(ln=%s, succ=%s)",
|
||||
self.ln_str(ln), self.ln_str(succ_ln));
|
||||
}
|
||||
|
||||
fn merge_from_succ(ln: live_node, succ_ln: live_node,
|
||||
@ -716,8 +716,8 @@ struct liveness {
|
||||
}
|
||||
}
|
||||
|
||||
debug!{"merge_from_succ(ln=%s, succ=%s, first_merge=%b, changed=%b)",
|
||||
ln.to_str(), self.ln_str(succ_ln), first_merge, changed};
|
||||
debug!("merge_from_succ(ln=%s, succ=%s, first_merge=%b, changed=%b)",
|
||||
ln.to_str(), self.ln_str(succ_ln), first_merge, changed);
|
||||
return changed;
|
||||
|
||||
fn copy_if_invalid(src: live_node, &dst: live_node) -> bool {
|
||||
@ -739,8 +739,8 @@ struct liveness {
|
||||
self.users[idx].reader = invalid_node();
|
||||
self.users[idx].writer = invalid_node();
|
||||
|
||||
debug!{"%s defines %s (idx=%u): %s", writer.to_str(), var.to_str(),
|
||||
idx, self.ln_str(writer)};
|
||||
debug!("%s defines %s (idx=%u): %s", writer.to_str(), var.to_str(),
|
||||
idx, self.ln_str(writer));
|
||||
}
|
||||
|
||||
// Either read, write, or both depending on the acc bitset
|
||||
@ -763,8 +763,8 @@ struct liveness {
|
||||
self.users[idx].used = true;
|
||||
}
|
||||
|
||||
debug!{"%s accesses[%x] %s: %s",
|
||||
ln.to_str(), acc, var.to_str(), self.ln_str(ln)};
|
||||
debug!("%s accesses[%x] %s: %s",
|
||||
ln.to_str(), acc, var.to_str(), self.ln_str(ln));
|
||||
}
|
||||
|
||||
// _______________________________________________________________________
|
||||
@ -779,14 +779,14 @@ struct liveness {
|
||||
});
|
||||
|
||||
// hack to skip the loop unless #debug is enabled:
|
||||
debug!{"^^ liveness computation results for body %d (entry=%s)",
|
||||
debug!("^^ liveness computation results for body %d (entry=%s)",
|
||||
{
|
||||
for uint::range(0u, self.ir.num_live_nodes) |ln_idx| {
|
||||
#debug["%s", self.ln_str(live_node(ln_idx))];
|
||||
}
|
||||
body.node.id
|
||||
},
|
||||
entry_ln.to_str()};
|
||||
entry_ln.to_str());
|
||||
|
||||
entry_ln
|
||||
}
|
||||
@ -1372,7 +1372,7 @@ fn check_local(local: @local, &&self: @liveness, vt: vt<@liveness>) {
|
||||
// No initializer: the variable might be unused; if not, it
|
||||
// should not be live at this point.
|
||||
|
||||
debug!{"check_local() with no initializer"};
|
||||
debug!("check_local() with no initializer");
|
||||
do (*self).pat_bindings(local.node.pat) |ln, var, sp| {
|
||||
if !self.warn_about_unused(sp, ln, var) {
|
||||
match (*self).live_on_exit(ln, var) {
|
||||
@ -1492,8 +1492,8 @@ impl @liveness {
|
||||
none => { /* ok */ }
|
||||
some(lnk_exit) => {
|
||||
self.tcx.sess.span_err(
|
||||
sp, fmt!{"field `self.%s` is never initialized",
|
||||
self.tcx.sess.str_of(nm)});
|
||||
sp, fmt!("field `self.%s` is never initialized",
|
||||
self.tcx.sess.str_of(nm)));
|
||||
}
|
||||
some(lnk) => {
|
||||
self.report_illegal_read(
|
||||
@ -1531,8 +1531,8 @@ impl @liveness {
|
||||
}
|
||||
|
||||
fn check_move_from_var(span: span, ln: live_node, var: variable) {
|
||||
debug!{"check_move_from_var(%s, %s)",
|
||||
ln.to_str(), var.to_str()};
|
||||
debug!("check_move_from_var(%s, %s)",
|
||||
ln.to_str(), var.to_str());
|
||||
|
||||
match (*self).live_on_exit(ln, var) {
|
||||
none => {}
|
||||
@ -1548,8 +1548,8 @@ impl @liveness {
|
||||
}
|
||||
|
||||
fn check_move_from_expr(expr: @expr, vt: vt<@liveness>) {
|
||||
debug!{"check_move_from_expr(node %d: %s)",
|
||||
expr.id, expr_to_str(expr, self.tcx.sess.intr())};
|
||||
debug!("check_move_from_expr(node %d: %s)",
|
||||
expr.id, expr_to_str(expr, self.tcx.sess.intr()));
|
||||
|
||||
if self.ir.method_map.contains_key(expr.id) {
|
||||
// actually an rvalue, since this calls a method
|
||||
@ -1645,7 +1645,7 @@ impl @liveness {
|
||||
some(lnk) => {
|
||||
self.tcx.sess.span_bug(
|
||||
orig_span,
|
||||
fmt!{"illegal writer: %?", lnk});
|
||||
fmt!("illegal writer: %?", lnk));
|
||||
}
|
||||
none => {}
|
||||
}
|
||||
@ -1664,15 +1664,15 @@ impl @liveness {
|
||||
vk_arg(_, name, _) => {
|
||||
self.tcx.sess.span_err(
|
||||
move_span,
|
||||
fmt!{"illegal move from argument `%s`, which is not \
|
||||
copy or move mode", self.tcx.sess.str_of(name)});
|
||||
fmt!("illegal move from argument `%s`, which is not \
|
||||
copy or move mode", self.tcx.sess.str_of(name)));
|
||||
return;
|
||||
}
|
||||
vk_field(name) => {
|
||||
self.tcx.sess.span_err(
|
||||
move_span,
|
||||
fmt!{"illegal move from field `%s`",
|
||||
self.tcx.sess.str_of(name)});
|
||||
fmt!("illegal move from field `%s`",
|
||||
self.tcx.sess.str_of(name)));
|
||||
return;
|
||||
}
|
||||
vk_self => {
|
||||
@ -1685,8 +1685,8 @@ impl @liveness {
|
||||
vk_local(*) | vk_implicit_ret => {
|
||||
self.tcx.sess.span_bug(
|
||||
move_span,
|
||||
fmt!{"illegal reader (%?) for `%?`",
|
||||
lnk, vk});
|
||||
fmt!("illegal reader (%?) for `%?`",
|
||||
lnk, vk));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1713,18 +1713,18 @@ impl @liveness {
|
||||
lnk_freevar(span) => {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
fmt!{"capture of %s: `%s`", msg, name});
|
||||
fmt!("capture of %s: `%s`", msg, name));
|
||||
}
|
||||
lnk_expr(span) => {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
fmt!{"use of %s: `%s`", msg, name});
|
||||
fmt!("use of %s: `%s`", msg, name));
|
||||
}
|
||||
lnk_exit |
|
||||
lnk_vdef(_) => {
|
||||
self.tcx.sess.span_bug(
|
||||
chk_span,
|
||||
fmt!{"illegal reader: %?", lnk});
|
||||
fmt!("illegal reader: %?", lnk));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1781,11 +1781,11 @@ impl @liveness {
|
||||
|
||||
if is_assigned {
|
||||
self.tcx.sess.span_warn(
|
||||
sp, fmt!{"variable `%s` is assigned to, \
|
||||
but never used", name});
|
||||
sp, fmt!("variable `%s` is assigned to, \
|
||||
but never used", name));
|
||||
} else {
|
||||
self.tcx.sess.span_warn(
|
||||
sp, fmt!{"unused variable: `%s`", name});
|
||||
sp, fmt!("unused variable: `%s`", name));
|
||||
}
|
||||
}
|
||||
return true;
|
||||
@ -1798,7 +1798,7 @@ impl @liveness {
|
||||
for self.should_warn(var).each |name| {
|
||||
self.tcx.sess.span_warn(
|
||||
sp,
|
||||
fmt!{"value assigned to `%s` is never read", name});
|
||||
fmt!("value assigned to `%s` is never read", name));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -149,8 +149,8 @@ fn deref_kind(tcx: ty::ctxt, t: ty::t) -> deref_kind {
|
||||
some(k) => k,
|
||||
none => {
|
||||
tcx.sess.bug(
|
||||
fmt!{"deref_cat() invoked on non-derefable type %s",
|
||||
ty_to_str(tcx, t)});
|
||||
fmt!("deref_cat() invoked on non-derefable type %s",
|
||||
ty_to_str(tcx, t)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -262,8 +262,8 @@ impl &mem_categorization_ctxt {
|
||||
}
|
||||
|
||||
fn cat_expr(expr: @ast::expr) -> cmt {
|
||||
debug!{"cat_expr: id=%d expr=%s",
|
||||
expr.id, pprust::expr_to_str(expr, self.tcx.sess.intr())};
|
||||
debug!("cat_expr: id=%d expr=%s",
|
||||
expr.id, pprust::expr_to_str(expr, self.tcx.sess.intr()));
|
||||
|
||||
let tcx = self.tcx;
|
||||
let expr_ty = tcx.ty(expr);
|
||||
@ -279,8 +279,8 @@ impl &mem_categorization_ctxt {
|
||||
none => {
|
||||
tcx.sess.span_bug(
|
||||
e_base.span,
|
||||
fmt!{"Explicit deref of non-derefable type `%s`",
|
||||
ty_to_str(tcx, tcx.ty(e_base))});
|
||||
fmt!("Explicit deref of non-derefable type `%s`",
|
||||
ty_to_str(tcx, tcx.ty(e_base))));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -468,9 +468,9 @@ impl &mem_categorization_ctxt {
|
||||
none => {
|
||||
self.tcx.sess.span_bug(
|
||||
node.span(),
|
||||
fmt!{"Cannot find field `%s` in type `%s`",
|
||||
fmt!("Cannot find field `%s` in type `%s`",
|
||||
self.tcx.sess.str_of(f_name),
|
||||
ty_to_str(self.tcx, base_cmt.ty)});
|
||||
ty_to_str(self.tcx, base_cmt.ty)));
|
||||
}
|
||||
};
|
||||
let m = self.inherited_mutability(base_cmt.mutbl, f_mutbl);
|
||||
@ -533,8 +533,8 @@ impl &mem_categorization_ctxt {
|
||||
none => {
|
||||
self.tcx.sess.span_bug(
|
||||
expr.span,
|
||||
fmt!{"Explicit index of non-index type `%s`",
|
||||
ty_to_str(self.tcx, base_cmt.ty)});
|
||||
fmt!("Explicit index of non-index type `%s`",
|
||||
ty_to_str(self.tcx, base_cmt.ty)));
|
||||
}
|
||||
};
|
||||
|
||||
@ -655,9 +655,9 @@ impl &mem_categorization_ctxt {
|
||||
|
||||
let _i = indenter();
|
||||
let tcx = self.tcx;
|
||||
debug!{"cat_pattern: id=%d pat=%s cmt=%s",
|
||||
debug!("cat_pattern: id=%d pat=%s cmt=%s",
|
||||
pat.id, pprust::pat_to_str(pat, tcx.sess.intr()),
|
||||
self.cmt_to_repr(cmt)};
|
||||
self.cmt_to_repr(cmt));
|
||||
|
||||
match pat.node {
|
||||
ast::pat_wild => {
|
||||
@ -672,7 +672,7 @@ impl &mem_categorization_ctxt {
|
||||
let enum_did = match self.tcx.def_map.find(pat.id) {
|
||||
some(ast::def_variant(enum_did, _)) => enum_did,
|
||||
e => tcx.sess.span_bug(pat.span,
|
||||
fmt!{"resolved to %?, not variant", e})
|
||||
fmt!("resolved to %?, not variant", e))
|
||||
};
|
||||
|
||||
for subpats.each |subpat| {
|
||||
@ -737,15 +737,15 @@ impl &mem_categorization_ctxt {
|
||||
cat_special(sk_heap_upvar) => ~"heap-upvar",
|
||||
cat_stack_upvar(_) => ~"stack-upvar",
|
||||
cat_rvalue => ~"rvalue",
|
||||
cat_local(node_id) => fmt!{"local(%d)", node_id},
|
||||
cat_binding(node_id) => fmt!{"binding(%d)", node_id},
|
||||
cat_arg(node_id) => fmt!{"arg(%d)", node_id},
|
||||
cat_local(node_id) => fmt!("local(%d)", node_id),
|
||||
cat_binding(node_id) => fmt!("binding(%d)", node_id),
|
||||
cat_arg(node_id) => fmt!("arg(%d)", node_id),
|
||||
cat_deref(cmt, derefs, ptr) => {
|
||||
fmt!{"%s->(%s, %u)", self.cat_to_repr(cmt.cat),
|
||||
self.ptr_sigil(ptr), derefs}
|
||||
fmt!("%s->(%s, %u)", self.cat_to_repr(cmt.cat),
|
||||
self.ptr_sigil(ptr), derefs)
|
||||
}
|
||||
cat_comp(cmt, comp) => {
|
||||
fmt!{"%s.%s", self.cat_to_repr(cmt.cat), self.comp_to_repr(comp)}
|
||||
fmt!("%s.%s", self.cat_to_repr(cmt.cat), self.comp_to_repr(comp))
|
||||
}
|
||||
cat_discr(cmt, _) => self.cat_to_repr(cmt.cat)
|
||||
}
|
||||
@ -780,29 +780,29 @@ impl &mem_categorization_ctxt {
|
||||
fn lp_to_str(lp: @loan_path) -> ~str {
|
||||
match *lp {
|
||||
lp_local(node_id) => {
|
||||
fmt!{"local(%d)", node_id}
|
||||
fmt!("local(%d)", node_id)
|
||||
}
|
||||
lp_arg(node_id) => {
|
||||
fmt!{"arg(%d)", node_id}
|
||||
fmt!("arg(%d)", node_id)
|
||||
}
|
||||
lp_deref(lp, ptr) => {
|
||||
fmt!{"%s->(%s)", self.lp_to_str(lp),
|
||||
self.ptr_sigil(ptr)}
|
||||
fmt!("%s->(%s)", self.lp_to_str(lp),
|
||||
self.ptr_sigil(ptr))
|
||||
}
|
||||
lp_comp(lp, comp) => {
|
||||
fmt!{"%s.%s", self.lp_to_str(lp),
|
||||
self.comp_to_repr(comp)}
|
||||
fmt!("%s.%s", self.lp_to_str(lp),
|
||||
self.comp_to_repr(comp))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn cmt_to_repr(cmt: cmt) -> ~str {
|
||||
fmt!{"{%s id:%d m:%s lp:%s ty:%s}",
|
||||
fmt!("{%s id:%d m:%s lp:%s ty:%s}",
|
||||
self.cat_to_repr(cmt.cat),
|
||||
cmt.id,
|
||||
self.mut_to_str(cmt.mutbl),
|
||||
cmt.lp.map_default(~"none", |p| self.lp_to_str(p) ),
|
||||
ty_to_str(self.tcx, cmt.ty)}
|
||||
ty_to_str(self.tcx, cmt.ty))
|
||||
}
|
||||
|
||||
fn cmt_to_str(cmt: cmt) -> ~str {
|
||||
@ -818,8 +818,8 @@ impl &mem_categorization_ctxt {
|
||||
cat_local(_) => mut_str + ~" local variable",
|
||||
cat_binding(_) => ~"pattern binding",
|
||||
cat_arg(_) => ~"argument",
|
||||
cat_deref(_, _, pk) => fmt!{"dereference of %s %s pointer",
|
||||
mut_str, self.ptr_sigil(pk)},
|
||||
cat_deref(_, _, pk) => fmt!("dereference of %s %s pointer",
|
||||
mut_str, self.ptr_sigil(pk)),
|
||||
cat_stack_upvar(_) => {
|
||||
~"captured outer " + mut_str + ~" variable in a stack closure"
|
||||
}
|
||||
|
@ -196,7 +196,7 @@ fn parent_id(cx: ctxt, span: span) -> ast::node_id {
|
||||
/// Records the current parent (if any) as the parent of `child_id`.
|
||||
fn record_parent(cx: ctxt, child_id: ast::node_id) {
|
||||
for cx.parent.each |parent_id| {
|
||||
debug!{"parent of node %d is node %d", child_id, parent_id};
|
||||
debug!("parent of node %d is node %d", child_id, parent_id);
|
||||
cx.region_map.insert(child_id, parent_id);
|
||||
}
|
||||
}
|
||||
@ -255,13 +255,13 @@ fn resolve_expr(expr: @ast::expr, cx: ctxt, visitor: visit::vt<ctxt>) {
|
||||
let mut new_cx = cx;
|
||||
match expr.node {
|
||||
ast::expr_call(*) => {
|
||||
debug!{"node %d: %s", expr.id, pprust::expr_to_str(expr,
|
||||
cx.sess.intr())};
|
||||
debug!("node %d: %s", expr.id, pprust::expr_to_str(expr,
|
||||
cx.sess.intr()));
|
||||
new_cx.parent = some(expr.id);
|
||||
}
|
||||
ast::expr_match(subexpr, _, _) => {
|
||||
debug!{"node %d: %s", expr.id, pprust::expr_to_str(expr,
|
||||
cx.sess.intr())};
|
||||
debug!("node %d: %s", expr.id, pprust::expr_to_str(expr,
|
||||
cx.sess.intr()));
|
||||
new_cx.parent = some(expr.id);
|
||||
}
|
||||
ast::expr_fn(_, _, _, cap_clause) |
|
||||
@ -314,9 +314,9 @@ fn resolve_fn(fk: visit::fn_kind, decl: ast::fn_decl, body: ast::blk,
|
||||
}
|
||||
};
|
||||
|
||||
debug!{"visiting fn with body %d. cx.parent: %? \
|
||||
debug!("visiting fn with body %d. cx.parent: %? \
|
||||
fn_cx.parent: %?",
|
||||
body.node.id, cx.parent, fn_cx.parent};
|
||||
body.node.id, cx.parent, fn_cx.parent);
|
||||
|
||||
for decl.inputs.each |input| {
|
||||
cx.region_map.insert(input.id, body.node.id);
|
||||
@ -441,10 +441,10 @@ impl determine_rp_ctxt {
|
||||
some(v) => join_variance(v, variance)
|
||||
};
|
||||
|
||||
debug!["add_rp() variance for %s: %? == %? ^ %?",
|
||||
debug!("add_rp() variance for %s: %? == %? ^ %?",
|
||||
ast_map::node_id_to_str(self.ast_map, id,
|
||||
self.sess.parse_sess.interner),
|
||||
joined_variance, old_variance, variance];
|
||||
joined_variance, old_variance, variance);
|
||||
|
||||
if some(joined_variance) != old_variance {
|
||||
self.region_paramd_items.insert(id, joined_variance);
|
||||
@ -458,13 +458,13 @@ impl determine_rp_ctxt {
|
||||
/// contains a value of type `from`, so if `from` is
|
||||
/// region-parameterized, so is the current item.
|
||||
fn add_dep(from: ast::node_id) {
|
||||
debug!["add dependency from %d -> %d (%s -> %s) with variance %?",
|
||||
debug!("add dependency from %d -> %d (%s -> %s) with variance %?",
|
||||
from, self.item_id,
|
||||
ast_map::node_id_to_str(self.ast_map, from,
|
||||
self.sess.parse_sess.interner),
|
||||
ast_map::node_id_to_str(self.ast_map, self.item_id,
|
||||
self.sess.parse_sess.interner),
|
||||
copy self.ambient_variance];
|
||||
copy self.ambient_variance);
|
||||
let vec = match self.dep_map.find(from) {
|
||||
some(vec) => vec,
|
||||
none => {
|
||||
@ -525,7 +525,7 @@ impl determine_rp_ctxt {
|
||||
let old_anon_implies_rp = self.anon_implies_rp;
|
||||
self.item_id = item_id;
|
||||
self.anon_implies_rp = anon_implies_rp;
|
||||
debug!{"with_item_id(%d, %b)", item_id, anon_implies_rp};
|
||||
debug!("with_item_id(%d, %b)", item_id, anon_implies_rp);
|
||||
let _i = util::common::indenter();
|
||||
f();
|
||||
self.item_id = old_item_id;
|
||||
@ -590,8 +590,8 @@ fn determine_rp_in_ty(ty: @ast::ty,
|
||||
// locations)
|
||||
match ty.node {
|
||||
ast::ty_rptr(r, _) => {
|
||||
debug!["referenced rptr type %s",
|
||||
pprust::ty_to_str(ty, cx.sess.intr())];
|
||||
debug!("referenced rptr type %s",
|
||||
pprust::ty_to_str(ty, cx.sess.intr()));
|
||||
|
||||
if cx.region_is_relevant(r) {
|
||||
cx.add_rp(cx.item_id, cx.add_variance(rv_contravariant))
|
||||
@ -623,8 +623,8 @@ fn determine_rp_in_ty(ty: @ast::ty,
|
||||
match csearch::get_region_param(cstore, did) {
|
||||
none => {}
|
||||
some(variance) => {
|
||||
debug!["reference to external, rp'd type %s",
|
||||
pprust::ty_to_str(ty, cx.sess.intr())];
|
||||
debug!("reference to external, rp'd type %s",
|
||||
pprust::ty_to_str(ty, cx.sess.intr()));
|
||||
cx.add_rp(cx.item_id, cx.add_variance(variance))
|
||||
}
|
||||
}
|
||||
@ -743,7 +743,7 @@ fn determine_rp_in_crate(sess: session,
|
||||
while cx.worklist.len() != 0 {
|
||||
let c_id = cx.worklist.pop();
|
||||
let c_variance = cx.region_paramd_items.get(c_id);
|
||||
debug!["popped %d from worklist", c_id];
|
||||
debug!("popped %d from worklist", c_id);
|
||||
match cx.dep_map.find(c_id) {
|
||||
none => {}
|
||||
some(deps) => {
|
||||
|
@ -1243,9 +1243,9 @@ struct Resolver {
|
||||
if self.block_needs_anonymous_module(block) {
|
||||
let block_id = block.node.id;
|
||||
|
||||
debug!{"(building reduced graph for block) creating a new \
|
||||
debug!("(building reduced graph for block) creating a new \
|
||||
anonymous module for block %d",
|
||||
block_id};
|
||||
block_id);
|
||||
|
||||
let parent_module = self.get_module_from_parent(parent);
|
||||
let new_module = @Module(BlockParentLink(parent_module, block_id),
|
||||
@ -1414,8 +1414,8 @@ struct Resolver {
|
||||
// Define or reuse the module node.
|
||||
match child_name_bindings.module_def {
|
||||
NoModuleDef => {
|
||||
debug!{"(building reduced graph for external crate) \
|
||||
autovivifying %s", ident_str};
|
||||
debug!("(building reduced graph for external crate) \
|
||||
autovivifying %s", ident_str);
|
||||
let parent_link = self.get_parent_link(new_parent,
|
||||
ident);
|
||||
(*child_name_bindings).define_module(parent_link,
|
||||
@ -1445,12 +1445,12 @@ struct Resolver {
|
||||
// written, we can't process this impl now. We'll get it
|
||||
// later.
|
||||
|
||||
debug!{"(building reduced graph for external crate) \
|
||||
ignoring impl %s", final_ident_str};
|
||||
debug!("(building reduced graph for external crate) \
|
||||
ignoring impl %s", final_ident_str);
|
||||
}
|
||||
dl_field => {
|
||||
debug!{"(building reduced graph for external crate) \
|
||||
ignoring field %s", final_ident_str};
|
||||
debug!("(building reduced graph for external crate) \
|
||||
ignoring field %s", final_ident_str);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1508,14 +1508,14 @@ struct Resolver {
|
||||
let mut i = 0u;
|
||||
let mut prev_unresolved_imports = 0u;
|
||||
loop {
|
||||
debug!{"(resolving imports) iteration %u, %u imports left",
|
||||
i, self.unresolved_imports};
|
||||
debug!("(resolving imports) iteration %u, %u imports left",
|
||||
i, self.unresolved_imports);
|
||||
|
||||
let module_root = (*self.graph_root).get_module();
|
||||
self.resolve_imports_for_module_subtree(module_root);
|
||||
|
||||
if self.unresolved_imports == 0u {
|
||||
debug!{"(resolving imports) success"};
|
||||
debug!("(resolving imports) success");
|
||||
break;
|
||||
}
|
||||
|
||||
@ -1535,8 +1535,8 @@ struct Resolver {
|
||||
* submodules.
|
||||
*/
|
||||
fn resolve_imports_for_module_subtree(module_: @Module) {
|
||||
debug!{"(resolving imports for module subtree) resolving %s",
|
||||
self.module_to_str(module_)};
|
||||
debug!("(resolving imports for module subtree) resolving %s",
|
||||
self.module_to_str(module_));
|
||||
self.resolve_imports_for_module(module_);
|
||||
|
||||
for module_.children.each |_name, child_node| {
|
||||
@ -1558,9 +1558,9 @@ struct Resolver {
|
||||
/// Attempts to resolve imports for the given module only.
|
||||
fn resolve_imports_for_module(module_: @Module) {
|
||||
if (*module_).all_imports_resolved() {
|
||||
debug!{"(resolving imports for module) all imports resolved for \
|
||||
debug!("(resolving imports for module) all imports resolved for \
|
||||
%s",
|
||||
self.module_to_str(module_)};
|
||||
self.module_to_str(module_));
|
||||
return;
|
||||
}
|
||||
|
||||
@ -1616,10 +1616,10 @@ struct Resolver {
|
||||
let mut resolution_result;
|
||||
let module_path = import_directive.module_path;
|
||||
|
||||
debug!{"(resolving import for module) resolving import `%s::...` in \
|
||||
debug!("(resolving import for module) resolving import `%s::...` in \
|
||||
`%s`",
|
||||
self.atoms_to_str((*module_path).get()),
|
||||
self.module_to_str(module_)};
|
||||
self.module_to_str(module_));
|
||||
|
||||
// One-level renaming imports of the form `import foo = bar;` are
|
||||
// handled specially.
|
||||
@ -1700,16 +1700,16 @@ struct Resolver {
|
||||
target: Atom, source: Atom)
|
||||
-> ResolveResult<()> {
|
||||
|
||||
debug!{"(resolving single import) resolving `%s` = `%s::%s` from \
|
||||
debug!("(resolving single import) resolving `%s` = `%s::%s` from \
|
||||
`%s`",
|
||||
self.session.str_of(target),
|
||||
self.module_to_str(containing_module),
|
||||
self.session.str_of(source),
|
||||
self.module_to_str(module_)};
|
||||
self.module_to_str(module_));
|
||||
|
||||
if !self.name_is_exported(containing_module, source) {
|
||||
debug!{"(resolving single import) name `%s` is unexported",
|
||||
self.session.str_of(source)};
|
||||
debug!("(resolving single import) name `%s` is unexported",
|
||||
self.session.str_of(source));
|
||||
return Failed;
|
||||
}
|
||||
|
||||
@ -1756,8 +1756,8 @@ struct Resolver {
|
||||
// able to resolve this import.
|
||||
|
||||
if containing_module.glob_count > 0u {
|
||||
debug!{"(resolving single import) unresolved glob; \
|
||||
bailing out"};
|
||||
debug!("(resolving single import) unresolved glob; \
|
||||
bailing out");
|
||||
return Indeterminate;
|
||||
}
|
||||
|
||||
@ -1820,8 +1820,8 @@ struct Resolver {
|
||||
}
|
||||
some(_) => {
|
||||
// The import is unresolved. Bail out.
|
||||
debug!{"(resolving single import) unresolved import; \
|
||||
bailing out"};
|
||||
debug!("(resolving single import) unresolved import; \
|
||||
bailing out");
|
||||
return Indeterminate;
|
||||
}
|
||||
}
|
||||
@ -1834,13 +1834,13 @@ struct Resolver {
|
||||
|
||||
match module_result {
|
||||
BoundResult(target_module, name_bindings) => {
|
||||
debug!{"(resolving single import) found module binding"};
|
||||
debug!("(resolving single import) found module binding");
|
||||
import_resolution.module_target =
|
||||
some(Target(target_module, name_bindings));
|
||||
}
|
||||
UnboundResult => {
|
||||
debug!{"(resolving single import) didn't find module \
|
||||
binding"};
|
||||
debug!("(resolving single import) didn't find module \
|
||||
binding");
|
||||
}
|
||||
UnknownResult => {
|
||||
fail ~"module result should be known at this point";
|
||||
@ -1880,7 +1880,7 @@ struct Resolver {
|
||||
assert import_resolution.outstanding_references >= 1u;
|
||||
import_resolution.outstanding_references -= 1u;
|
||||
|
||||
debug!{"(resolving single import) successfully resolved import"};
|
||||
debug!("(resolving single import) successfully resolved import");
|
||||
return Success(());
|
||||
}
|
||||
|
||||
@ -1902,8 +1902,8 @@ struct Resolver {
|
||||
// (including globs).
|
||||
|
||||
if !(*containing_module).all_imports_resolved() {
|
||||
debug!{"(resolving glob import) target module has unresolved \
|
||||
imports; bailing out"};
|
||||
debug!("(resolving glob import) target module has unresolved \
|
||||
imports; bailing out");
|
||||
return Indeterminate;
|
||||
}
|
||||
|
||||
@ -1914,15 +1914,15 @@ struct Resolver {
|
||||
|atom, target_import_resolution| {
|
||||
|
||||
if !self.name_is_exported(containing_module, atom) {
|
||||
debug!{"(resolving glob import) name `%s` is unexported",
|
||||
self.session.str_of(atom)};
|
||||
debug!("(resolving glob import) name `%s` is unexported",
|
||||
self.session.str_of(atom));
|
||||
again;
|
||||
}
|
||||
|
||||
debug!{"(resolving glob import) writing module resolution \
|
||||
debug!("(resolving glob import) writing module resolution \
|
||||
%? into `%s`",
|
||||
is_none(target_import_resolution.module_target),
|
||||
self.module_to_str(module_)};
|
||||
self.module_to_str(module_));
|
||||
|
||||
// Here we merge two import resolutions.
|
||||
match module_.import_resolutions.find(atom) {
|
||||
@ -1978,8 +1978,8 @@ struct Resolver {
|
||||
// Add all children from the containing module.
|
||||
for containing_module.children.each |atom, name_bindings| {
|
||||
if !self.name_is_exported(containing_module, atom) {
|
||||
debug!{"(resolving glob import) name `%s` is unexported",
|
||||
self.session.str_of(atom)};
|
||||
debug!("(resolving glob import) name `%s` is unexported",
|
||||
self.session.str_of(atom));
|
||||
again;
|
||||
}
|
||||
|
||||
@ -1997,31 +1997,31 @@ struct Resolver {
|
||||
}
|
||||
|
||||
|
||||
debug!{"(resolving glob import) writing resolution `%s` in `%s` \
|
||||
debug!("(resolving glob import) writing resolution `%s` in `%s` \
|
||||
to `%s`",
|
||||
self.session.str_of(atom),
|
||||
self.module_to_str(containing_module),
|
||||
self.module_to_str(module_)};
|
||||
self.module_to_str(module_));
|
||||
|
||||
// Merge the child item into the import resolution.
|
||||
if (*name_bindings).defined_in_namespace(ModuleNS) {
|
||||
debug!{"(resolving glob import) ... for module target"};
|
||||
debug!("(resolving glob import) ... for module target");
|
||||
dest_import_resolution.module_target =
|
||||
some(Target(containing_module, name_bindings));
|
||||
}
|
||||
if (*name_bindings).defined_in_namespace(ValueNS) {
|
||||
debug!{"(resolving glob import) ... for value target"};
|
||||
debug!("(resolving glob import) ... for value target");
|
||||
dest_import_resolution.value_target =
|
||||
some(Target(containing_module, name_bindings));
|
||||
}
|
||||
if (*name_bindings).defined_in_namespace(TypeNS) {
|
||||
debug!{"(resolving glob import) ... for type target"};
|
||||
debug!("(resolving glob import) ... for type target");
|
||||
dest_import_resolution.type_target =
|
||||
some(Target(containing_module, name_bindings));
|
||||
}
|
||||
}
|
||||
|
||||
debug!{"(resolving glob import) successfully resolved import"};
|
||||
debug!("(resolving glob import) successfully resolved import");
|
||||
return Success(());
|
||||
}
|
||||
|
||||
@ -2050,9 +2050,9 @@ struct Resolver {
|
||||
return Failed;
|
||||
}
|
||||
Indeterminate => {
|
||||
debug!{"(resolving module path for import) module \
|
||||
debug!("(resolving module path for import) module \
|
||||
resolution is indeterminate: %s",
|
||||
self.session.str_of(name)};
|
||||
self.session.str_of(name));
|
||||
return Indeterminate;
|
||||
}
|
||||
Success(target) => {
|
||||
@ -2060,9 +2060,9 @@ struct Resolver {
|
||||
NoModuleDef => {
|
||||
// Not a module.
|
||||
self.session.span_err(span,
|
||||
fmt!{"not a module: %s",
|
||||
fmt!("not a module: %s",
|
||||
self.session.
|
||||
str_of(name)});
|
||||
str_of(name)));
|
||||
return Failed;
|
||||
}
|
||||
ModuleDef(module_) => {
|
||||
@ -2091,10 +2091,10 @@ struct Resolver {
|
||||
let module_path_len = (*module_path).len();
|
||||
assert module_path_len > 0u;
|
||||
|
||||
debug!{"(resolving module path for import) processing `%s` rooted at \
|
||||
debug!("(resolving module path for import) processing `%s` rooted at \
|
||||
`%s`",
|
||||
self.atoms_to_str((*module_path).get()),
|
||||
self.module_to_str(module_)};
|
||||
self.module_to_str(module_));
|
||||
|
||||
// The first element of the module path must be in the current scope
|
||||
// chain.
|
||||
@ -2107,8 +2107,8 @@ struct Resolver {
|
||||
return Failed;
|
||||
}
|
||||
Indeterminate => {
|
||||
debug!{"(resolving module path for import) indeterminate; \
|
||||
bailing"};
|
||||
debug!("(resolving module path for import) indeterminate; \
|
||||
bailing");
|
||||
return Indeterminate;
|
||||
}
|
||||
Success(resulting_module) => {
|
||||
@ -2128,11 +2128,11 @@ struct Resolver {
|
||||
namespace: Namespace)
|
||||
-> ResolveResult<Target> {
|
||||
|
||||
debug!{"(resolving item in lexical scope) resolving `%s` in \
|
||||
debug!("(resolving item in lexical scope) resolving `%s` in \
|
||||
namespace %? in `%s`",
|
||||
self.session.str_of(name),
|
||||
namespace,
|
||||
self.module_to_str(module_)};
|
||||
self.module_to_str(module_));
|
||||
|
||||
// The current module node is handled specially. First, check for
|
||||
// its immediate children.
|
||||
@ -2159,9 +2159,9 @@ struct Resolver {
|
||||
match (*import_resolution).target_for_namespace(namespace) {
|
||||
none => {
|
||||
// Not found; continue.
|
||||
debug!{"(resolving item in lexical scope) found \
|
||||
debug!("(resolving item in lexical scope) found \
|
||||
import resolution, but not in namespace %?",
|
||||
namespace};
|
||||
namespace);
|
||||
}
|
||||
some(target) => {
|
||||
import_resolution.used = true;
|
||||
@ -2178,8 +2178,8 @@ struct Resolver {
|
||||
match search_module.parent_link {
|
||||
NoParentLink => {
|
||||
// No more parents. This module was unresolved.
|
||||
debug!{"(resolving item in lexical scope) unresolved \
|
||||
module"};
|
||||
debug!("(resolving item in lexical scope) unresolved \
|
||||
module");
|
||||
return Failed;
|
||||
}
|
||||
ModuleParentLink(parent_module_node, _) |
|
||||
@ -2198,8 +2198,8 @@ struct Resolver {
|
||||
// We couldn't see through the higher scope because of an
|
||||
// unresolved import higher up. Bail.
|
||||
|
||||
debug!{"(resolving item in lexical scope) indeterminate \
|
||||
higher scope; bailing"};
|
||||
debug!("(resolving item in lexical scope) indeterminate \
|
||||
higher scope; bailing");
|
||||
return Indeterminate;
|
||||
}
|
||||
Success(target) => {
|
||||
@ -2217,8 +2217,8 @@ struct Resolver {
|
||||
Success(target) => {
|
||||
match target.bindings.module_def {
|
||||
NoModuleDef => {
|
||||
error!{"!!! (resolving module in lexical scope) module
|
||||
wasn't actually a module!"};
|
||||
error!("!!! (resolving module in lexical scope) module
|
||||
wasn't actually a module!");
|
||||
return Failed;
|
||||
}
|
||||
ModuleDef(module_) => {
|
||||
@ -2227,13 +2227,13 @@ struct Resolver {
|
||||
}
|
||||
}
|
||||
Indeterminate => {
|
||||
debug!{"(resolving module in lexical scope) indeterminate; \
|
||||
bailing"};
|
||||
debug!("(resolving module in lexical scope) indeterminate; \
|
||||
bailing");
|
||||
return Indeterminate;
|
||||
}
|
||||
Failed => {
|
||||
debug!{"(resolving module in lexical scope) failed to \
|
||||
resolve"};
|
||||
debug!("(resolving module in lexical scope) failed to \
|
||||
resolve");
|
||||
return Failed;
|
||||
}
|
||||
}
|
||||
@ -2255,13 +2255,13 @@ struct Resolver {
|
||||
xray: XrayFlag)
|
||||
-> ResolveResult<Target> {
|
||||
|
||||
debug!{"(resolving name in module) resolving `%s` in `%s`",
|
||||
debug!("(resolving name in module) resolving `%s` in `%s`",
|
||||
self.session.str_of(name),
|
||||
self.module_to_str(module_)};
|
||||
self.module_to_str(module_));
|
||||
|
||||
if xray == NoXray && !self.name_is_exported(module_, name) {
|
||||
debug!{"(resolving name in module) name `%s` is unexported",
|
||||
self.session.str_of(name)};
|
||||
debug!("(resolving name in module) name `%s` is unexported",
|
||||
self.session.str_of(name));
|
||||
return Failed;
|
||||
}
|
||||
|
||||
@ -2270,7 +2270,7 @@ struct Resolver {
|
||||
some(name_bindings)
|
||||
if (*name_bindings).defined_in_namespace(namespace) => {
|
||||
|
||||
debug!{"(resolving name in module) found node as child"};
|
||||
debug!("(resolving name in module) found node as child");
|
||||
return Success(Target(module_, name_bindings));
|
||||
}
|
||||
some(_) | none => {
|
||||
@ -2282,7 +2282,7 @@ struct Resolver {
|
||||
// we bail out; we don't know its imports yet.
|
||||
|
||||
if module_.glob_count > 0u {
|
||||
debug!{"(resolving name in module) module has glob; bailing out"};
|
||||
debug!("(resolving name in module) module has glob; bailing out");
|
||||
return Indeterminate;
|
||||
}
|
||||
|
||||
@ -2290,20 +2290,20 @@ struct Resolver {
|
||||
match module_.import_resolutions.find(name) {
|
||||
some(import_resolution) => {
|
||||
if import_resolution.outstanding_references != 0u {
|
||||
debug!{"(resolving name in module) import unresolved; \
|
||||
bailing out"};
|
||||
debug!("(resolving name in module) import unresolved; \
|
||||
bailing out");
|
||||
return Indeterminate;
|
||||
}
|
||||
|
||||
match (*import_resolution).target_for_namespace(namespace) {
|
||||
none => {
|
||||
debug!{"(resolving name in module) name found, but \
|
||||
debug!("(resolving name in module) name found, but \
|
||||
not in namespace %?",
|
||||
namespace};
|
||||
namespace);
|
||||
}
|
||||
some(target) => {
|
||||
debug!{"(resolving name in module) resolved to \
|
||||
import"};
|
||||
debug!("(resolving name in module) resolved to \
|
||||
import");
|
||||
import_resolution.used = true;
|
||||
return Success(copy target);
|
||||
}
|
||||
@ -2315,8 +2315,8 @@ struct Resolver {
|
||||
}
|
||||
|
||||
// We're out of luck.
|
||||
debug!{"(resolving name in module) failed to resolve %s",
|
||||
self.session.str_of(name)};
|
||||
debug!("(resolving name in module) failed to resolve %s",
|
||||
self.session.str_of(name));
|
||||
return Failed;
|
||||
}
|
||||
|
||||
@ -2341,81 +2341,81 @@ struct Resolver {
|
||||
}
|
||||
}
|
||||
|
||||
debug!{"(resolving one-level naming result) resolving import `%s` = \
|
||||
debug!("(resolving one-level naming result) resolving import `%s` = \
|
||||
`%s` in `%s`",
|
||||
self.session.str_of(target_name),
|
||||
self.session.str_of(source_name),
|
||||
self.module_to_str(module_)};
|
||||
self.module_to_str(module_));
|
||||
|
||||
// Find the matching items in the lexical scope chain for every
|
||||
// namespace. If any of them come back indeterminate, this entire
|
||||
// import is indeterminate.
|
||||
|
||||
let mut module_result;
|
||||
debug!{"(resolving one-level naming result) searching for module"};
|
||||
debug!("(resolving one-level naming result) searching for module");
|
||||
match self.resolve_item_in_lexical_scope(module_,
|
||||
source_name,
|
||||
ModuleNS) {
|
||||
|
||||
Failed => {
|
||||
debug!{"(resolving one-level renaming import) didn't find \
|
||||
module result"};
|
||||
debug!("(resolving one-level renaming import) didn't find \
|
||||
module result");
|
||||
module_result = none;
|
||||
}
|
||||
Indeterminate => {
|
||||
debug!{"(resolving one-level renaming import) module result \
|
||||
is indeterminate; bailing"};
|
||||
debug!("(resolving one-level renaming import) module result \
|
||||
is indeterminate; bailing");
|
||||
return Indeterminate;
|
||||
}
|
||||
Success(name_bindings) => {
|
||||
debug!{"(resolving one-level renaming import) module result \
|
||||
found"};
|
||||
debug!("(resolving one-level renaming import) module result \
|
||||
found");
|
||||
module_result = some(copy name_bindings);
|
||||
}
|
||||
}
|
||||
|
||||
let mut value_result;
|
||||
debug!{"(resolving one-level naming result) searching for value"};
|
||||
debug!("(resolving one-level naming result) searching for value");
|
||||
match self.resolve_item_in_lexical_scope(module_,
|
||||
source_name,
|
||||
ValueNS) {
|
||||
|
||||
Failed => {
|
||||
debug!{"(resolving one-level renaming import) didn't find \
|
||||
value result"};
|
||||
debug!("(resolving one-level renaming import) didn't find \
|
||||
value result");
|
||||
value_result = none;
|
||||
}
|
||||
Indeterminate => {
|
||||
debug!{"(resolving one-level renaming import) value result \
|
||||
is indeterminate; bailing"};
|
||||
debug!("(resolving one-level renaming import) value result \
|
||||
is indeterminate; bailing");
|
||||
return Indeterminate;
|
||||
}
|
||||
Success(name_bindings) => {
|
||||
debug!{"(resolving one-level renaming import) value result \
|
||||
found"};
|
||||
debug!("(resolving one-level renaming import) value result \
|
||||
found");
|
||||
value_result = some(copy name_bindings);
|
||||
}
|
||||
}
|
||||
|
||||
let mut type_result;
|
||||
debug!{"(resolving one-level naming result) searching for type"};
|
||||
debug!("(resolving one-level naming result) searching for type");
|
||||
match self.resolve_item_in_lexical_scope(module_,
|
||||
source_name,
|
||||
TypeNS) {
|
||||
|
||||
Failed => {
|
||||
debug!{"(resolving one-level renaming import) didn't find \
|
||||
type result"};
|
||||
debug!("(resolving one-level renaming import) didn't find \
|
||||
type result");
|
||||
type_result = none;
|
||||
}
|
||||
Indeterminate => {
|
||||
debug!{"(resolving one-level renaming import) type result is \
|
||||
indeterminate; bailing"};
|
||||
debug!("(resolving one-level renaming import) type result is \
|
||||
indeterminate; bailing");
|
||||
return Indeterminate;
|
||||
}
|
||||
Success(name_bindings) => {
|
||||
debug!{"(resolving one-level renaming import) type result \
|
||||
found"};
|
||||
debug!("(resolving one-level renaming import) type result \
|
||||
found");
|
||||
type_result = some(copy name_bindings);
|
||||
}
|
||||
}
|
||||
@ -2455,11 +2455,11 @@ struct Resolver {
|
||||
import resolution name by now";
|
||||
}
|
||||
some(import_resolution) => {
|
||||
debug!{"(resolving one-level renaming import) writing module \
|
||||
debug!("(resolving one-level renaming import) writing module \
|
||||
result %? for `%s` into `%s`",
|
||||
is_none(module_result),
|
||||
self.session.str_of(target_name),
|
||||
self.module_to_str(module_)};
|
||||
self.module_to_str(module_));
|
||||
|
||||
import_resolution.module_target = module_result;
|
||||
import_resolution.value_target = value_result;
|
||||
@ -2470,7 +2470,7 @@ struct Resolver {
|
||||
}
|
||||
}
|
||||
|
||||
debug!{"(resolving one-level renaming import) successfully resolved"};
|
||||
debug!("(resolving one-level renaming import) successfully resolved");
|
||||
return Success(());
|
||||
}
|
||||
|
||||
@ -2526,9 +2526,9 @@ struct Resolver {
|
||||
}
|
||||
some(_) => {
|
||||
// Bail out.
|
||||
debug!{"(recording exports for module subtree) not recording \
|
||||
debug!("(recording exports for module subtree) not recording \
|
||||
exports for `%s`",
|
||||
self.module_to_str(module_)};
|
||||
self.module_to_str(module_));
|
||||
return;
|
||||
}
|
||||
}
|
||||
@ -2638,17 +2638,17 @@ struct Resolver {
|
||||
some(name) => {
|
||||
match orig_module.children.find(name) {
|
||||
none => {
|
||||
debug!{"!!! (with scope) didn't find `%s` in `%s`",
|
||||
debug!("!!! (with scope) didn't find `%s` in `%s`",
|
||||
self.session.str_of(name),
|
||||
self.module_to_str(orig_module)};
|
||||
self.module_to_str(orig_module));
|
||||
}
|
||||
some(name_bindings) => {
|
||||
match (*name_bindings).get_module_if_available() {
|
||||
none => {
|
||||
debug!{"!!! (with scope) didn't find module \
|
||||
debug!("!!! (with scope) didn't find module \
|
||||
for `%s` in `%s`",
|
||||
self.session.str_of(name),
|
||||
self.module_to_str(orig_module)};
|
||||
self.module_to_str(orig_module));
|
||||
}
|
||||
some(module_) => {
|
||||
self.current_module = module_;
|
||||
@ -2794,7 +2794,7 @@ struct Resolver {
|
||||
|
||||
// XXX: This shouldn't be unsafe!
|
||||
fn resolve_crate() unsafe {
|
||||
debug!{"(resolving crate) starting"};
|
||||
debug!("(resolving crate) starting");
|
||||
|
||||
// XXX: This is awful!
|
||||
let this = ptr::addr_of(self);
|
||||
@ -2816,8 +2816,8 @@ struct Resolver {
|
||||
}
|
||||
|
||||
fn resolve_item(item: @item, visitor: ResolveVisitor) {
|
||||
debug!{"(resolving item) resolving %s",
|
||||
self.session.str_of(item.ident)};
|
||||
debug!("(resolving item) resolving %s",
|
||||
self.session.str_of(item.ident));
|
||||
|
||||
// Items with the !resolve_unexported attribute are X-ray contexts.
|
||||
// This is used to allow the test runner to run unexported tests.
|
||||
@ -2874,8 +2874,8 @@ struct Resolver {
|
||||
// definition of the trait into the definition
|
||||
// map.
|
||||
|
||||
debug!{"(resolving trait) found trait def: \
|
||||
%?", def};
|
||||
debug!("(resolving trait) found trait def: \
|
||||
%?", def);
|
||||
|
||||
self.record_def(trt.ref_id, def);
|
||||
}
|
||||
@ -3010,8 +3010,8 @@ struct Resolver {
|
||||
|
||||
for (*type_parameters).eachi |index, type_parameter| {
|
||||
let name = type_parameter.ident;
|
||||
debug!{"with_type_parameter_rib: %d %d", node_id,
|
||||
type_parameter.id};
|
||||
debug!("with_type_parameter_rib: %d %d", node_id,
|
||||
type_parameter.id);
|
||||
let def_like = dl_def(def_ty_param
|
||||
(local_def(type_parameter.id),
|
||||
index + initial_index));
|
||||
@ -3126,8 +3126,8 @@ struct Resolver {
|
||||
|
||||
self.resolve_type(argument.ty, visitor);
|
||||
|
||||
debug!{"(resolving function) recorded argument `%s`",
|
||||
self.session.str_of(name)};
|
||||
debug!("(resolving function) recorded argument `%s`",
|
||||
self.session.str_of(name));
|
||||
}
|
||||
|
||||
self.resolve_type(declaration.output, visitor);
|
||||
@ -3137,7 +3137,7 @@ struct Resolver {
|
||||
// Resolve the function body.
|
||||
self.resolve_block(block, visitor);
|
||||
|
||||
debug!{"(resolving function) leaving function"};
|
||||
debug!("(resolving function) leaving function");
|
||||
}
|
||||
|
||||
(*self.label_ribs).pop();
|
||||
@ -3193,7 +3193,7 @@ struct Resolver {
|
||||
// definition of the trait into the definition
|
||||
// map.
|
||||
|
||||
debug!{"(resolving class) found trait def: %?", def};
|
||||
debug!("(resolving class) found trait def: %?", def);
|
||||
|
||||
self.record_def(trt.ref_id, def);
|
||||
|
||||
@ -3359,7 +3359,7 @@ struct Resolver {
|
||||
visitor: ResolveVisitor) {
|
||||
|
||||
// Write the implementations in scope into the module metadata.
|
||||
debug!{"(resolving module) resolving module ID %d", id};
|
||||
debug!("(resolving module) resolving module ID %d", id);
|
||||
visit_mod(module_, span, id, (), visitor);
|
||||
}
|
||||
|
||||
@ -3411,17 +3411,17 @@ struct Resolver {
|
||||
none => {
|
||||
self.session.span_err(
|
||||
p.span,
|
||||
fmt!{"variable `%s` from pattern #1 is \
|
||||
fmt!("variable `%s` from pattern #1 is \
|
||||
not bound in pattern #%u",
|
||||
self.session.str_of(key), i + 1});
|
||||
self.session.str_of(key), i + 1));
|
||||
}
|
||||
some(binding_i) => {
|
||||
if binding_0.binding_mode != binding_i.binding_mode {
|
||||
self.session.span_err(
|
||||
binding_i.span,
|
||||
fmt!{"variable `%s` is bound with different \
|
||||
fmt!("variable `%s` is bound with different \
|
||||
mode in pattern #%u than in pattern #1",
|
||||
self.session.str_of(key), i + 1});
|
||||
self.session.str_of(key), i + 1));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3431,9 +3431,9 @@ struct Resolver {
|
||||
if !map_0.contains_key(key) {
|
||||
self.session.span_err(
|
||||
binding.span,
|
||||
fmt!{"variable `%s` from pattern #%u is \
|
||||
fmt!("variable `%s` from pattern #%u is \
|
||||
not bound in pattern #1",
|
||||
self.session.str_of(key), i + 1});
|
||||
self.session.str_of(key), i + 1));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3459,7 +3459,7 @@ struct Resolver {
|
||||
}
|
||||
|
||||
fn resolve_block(block: blk, visitor: ResolveVisitor) {
|
||||
debug!{"(resolving block) entering block"};
|
||||
debug!("(resolving block) entering block");
|
||||
(*self.value_ribs).push(@Rib(NormalRibKind));
|
||||
|
||||
// Move down in the graph, if there's an anonymous module rooted here.
|
||||
@ -3467,8 +3467,8 @@ struct Resolver {
|
||||
match self.current_module.anonymous_children.find(block.node.id) {
|
||||
none => { /* Nothing to do. */ }
|
||||
some(anonymous_module) => {
|
||||
debug!{"(resolving block) found anonymous module, moving \
|
||||
down"};
|
||||
debug!("(resolving block) found anonymous module, moving \
|
||||
down");
|
||||
self.current_module = anonymous_module;
|
||||
}
|
||||
}
|
||||
@ -3480,7 +3480,7 @@ struct Resolver {
|
||||
self.current_module = orig_module;
|
||||
|
||||
(*self.value_ribs).pop();
|
||||
debug!{"(resolving block) leaving block"};
|
||||
debug!("(resolving block) leaving block");
|
||||
}
|
||||
|
||||
fn resolve_type(ty: @ty, visitor: ResolveVisitor) {
|
||||
@ -3495,8 +3495,8 @@ struct Resolver {
|
||||
let mut result_def;
|
||||
match self.resolve_path(path, TypeNS, true, visitor) {
|
||||
some(def) => {
|
||||
debug!{"(resolving type) resolved `%s` to type",
|
||||
self.session.str_of(path.idents.last())};
|
||||
debug!("(resolving type) resolved `%s` to type",
|
||||
self.session.str_of(path.idents.last()));
|
||||
result_def = some(def);
|
||||
}
|
||||
none => {
|
||||
@ -3532,19 +3532,19 @@ struct Resolver {
|
||||
match copy result_def {
|
||||
some(def) => {
|
||||
// Write the result into the def map.
|
||||
debug!{"(resolving type) writing resolution for `%s` \
|
||||
debug!("(resolving type) writing resolution for `%s` \
|
||||
(id %d)",
|
||||
connect(path.idents.map(
|
||||
|x| self.session.str_of(x)), ~"::"),
|
||||
path_id};
|
||||
path_id);
|
||||
self.record_def(path_id, def);
|
||||
}
|
||||
none => {
|
||||
self.session.span_err
|
||||
(ty.span, fmt!{"use of undeclared type name `%s`",
|
||||
(ty.span, fmt!("use of undeclared type name `%s`",
|
||||
connect(path.idents.map(
|
||||
|x| self.session.str_of(x)),
|
||||
~"::")});
|
||||
~"::")));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3582,19 +3582,19 @@ struct Resolver {
|
||||
|
||||
match self.resolve_enum_variant_or_const(atom) {
|
||||
FoundEnumVariant(def) if mode == RefutableMode => {
|
||||
debug!{"(resolving pattern) resolving `%s` to \
|
||||
debug!("(resolving pattern) resolving `%s` to \
|
||||
enum variant",
|
||||
self.session.str_of(atom)};
|
||||
self.session.str_of(atom));
|
||||
|
||||
self.record_def(pattern.id, def);
|
||||
}
|
||||
FoundEnumVariant(_) => {
|
||||
self.session.span_err(pattern.span,
|
||||
fmt!{"declaration of `%s` \
|
||||
fmt!("declaration of `%s` \
|
||||
shadows an enum \
|
||||
that's in scope",
|
||||
self.session
|
||||
.str_of(atom)});
|
||||
.str_of(atom)));
|
||||
}
|
||||
FoundConst => {
|
||||
self.session.span_err(pattern.span,
|
||||
@ -3603,8 +3603,8 @@ struct Resolver {
|
||||
in scope");
|
||||
}
|
||||
EnumVariantOrConstNotFound => {
|
||||
debug!{"(resolving pattern) binding `%s`",
|
||||
self.session.str_of(atom)};
|
||||
debug!("(resolving pattern) binding `%s`",
|
||||
self.session.str_of(atom));
|
||||
|
||||
let is_mutable = mutability == Mutable;
|
||||
|
||||
@ -3647,10 +3647,10 @@ struct Resolver {
|
||||
// in the same disjunct, which is an
|
||||
// error
|
||||
self.session.span_err(pattern.span,
|
||||
fmt!{"Identifier %s is bound more \
|
||||
fmt!("Identifier %s is bound more \
|
||||
than once in the same pattern",
|
||||
path_to_str(path, self.session
|
||||
.intr())});
|
||||
.intr())));
|
||||
}
|
||||
// Not bound in the same pattern: do nothing
|
||||
}
|
||||
@ -3678,9 +3678,9 @@ struct Resolver {
|
||||
some(_) => {
|
||||
self.session.span_err(
|
||||
path.span,
|
||||
fmt!{"not an enum variant: %s",
|
||||
fmt!("not an enum variant: %s",
|
||||
self.session.str_of(
|
||||
path.idents.last())});
|
||||
path.idents.last())));
|
||||
}
|
||||
none => {
|
||||
self.session.span_err(path.span,
|
||||
@ -3835,9 +3835,9 @@ struct Resolver {
|
||||
-> NameDefinition {
|
||||
|
||||
if xray == NoXray && !self.name_is_exported(containing_module, name) {
|
||||
debug!{"(resolving definition of name in module) name `%s` is \
|
||||
debug!("(resolving definition of name in module) name `%s` is \
|
||||
unexported",
|
||||
self.session.str_of(name)};
|
||||
self.session.str_of(name));
|
||||
return NoNameDefinition;
|
||||
}
|
||||
|
||||
@ -3918,9 +3918,9 @@ struct Resolver {
|
||||
|
||||
Failed => {
|
||||
self.session.span_err(path.span,
|
||||
fmt!{"use of undeclared module `%s`",
|
||||
fmt!("use of undeclared module `%s`",
|
||||
self.atoms_to_str(
|
||||
(*module_path_atoms).get())});
|
||||
(*module_path_atoms).get())));
|
||||
return none;
|
||||
}
|
||||
|
||||
@ -3942,9 +3942,9 @@ struct Resolver {
|
||||
// We failed to resolve the name. Report an error.
|
||||
self.session.span_err(
|
||||
path.span,
|
||||
fmt!{"unresolved name: %s::%s",
|
||||
fmt!("unresolved name: %s::%s",
|
||||
self.atoms_to_str((*module_path_atoms).get()),
|
||||
self.session.str_of(name)});
|
||||
self.session.str_of(name)));
|
||||
return none;
|
||||
}
|
||||
ChildNameDefinition(def) | ImportNameDefinition(def) => {
|
||||
@ -3971,9 +3971,9 @@ struct Resolver {
|
||||
|
||||
Failed => {
|
||||
self.session.span_err(path.span,
|
||||
fmt!{"use of undeclared module `::%s`",
|
||||
fmt!("use of undeclared module `::%s`",
|
||||
self.atoms_to_str
|
||||
((*module_path_atoms).get())});
|
||||
((*module_path_atoms).get())));
|
||||
return none;
|
||||
}
|
||||
|
||||
@ -3995,9 +3995,9 @@ struct Resolver {
|
||||
// We failed to resolve the name. Report an error.
|
||||
self.session.span_err(
|
||||
path.span,
|
||||
fmt!{"unresolved name: %s::%s", self.atoms_to_str(
|
||||
fmt!("unresolved name: %s::%s", self.atoms_to_str(
|
||||
(*module_path_atoms).get()),
|
||||
self.session.str_of(name)});
|
||||
self.session.str_of(name)));
|
||||
return none;
|
||||
}
|
||||
ChildNameDefinition(def) | ImportNameDefinition(def) => {
|
||||
@ -4028,10 +4028,10 @@ struct Resolver {
|
||||
|
||||
match copy search_result {
|
||||
some(dl_def(def)) => {
|
||||
debug!{"(resolving path in local ribs) resolved `%s` to \
|
||||
debug!("(resolving path in local ribs) resolved `%s` to \
|
||||
local: %?",
|
||||
self.session.str_of(ident),
|
||||
def};
|
||||
def);
|
||||
return some(def);
|
||||
}
|
||||
some(dl_field) | some(dl_impl(_)) | none => {
|
||||
@ -4056,9 +4056,9 @@ struct Resolver {
|
||||
bindings with no def for that namespace?!";
|
||||
}
|
||||
some(def) => {
|
||||
debug!{"(resolving item path in lexical scope) \
|
||||
debug!("(resolving item path in lexical scope) \
|
||||
resolved `%s` to item",
|
||||
self.session.str_of(ident)};
|
||||
self.session.str_of(ident));
|
||||
return some(def.def);
|
||||
}
|
||||
}
|
||||
@ -4090,18 +4090,18 @@ struct Resolver {
|
||||
match self.resolve_path(path, ValueNS, true, visitor) {
|
||||
some(def) => {
|
||||
// Write the result into the def map.
|
||||
debug!{"(resolving expr) resolved `%s`",
|
||||
debug!("(resolving expr) resolved `%s`",
|
||||
connect(path.idents.map(
|
||||
|x| self.session.str_of(x)), ~"::")};
|
||||
|x| self.session.str_of(x)), ~"::"));
|
||||
self.record_def(expr.id, def);
|
||||
}
|
||||
none => {
|
||||
self.session.span_err(
|
||||
expr.span,
|
||||
fmt!{"unresolved name: %s",
|
||||
fmt!("unresolved name: %s",
|
||||
connect(path.idents.map(
|
||||
|x| self.session.str_of(x)),
|
||||
~"::")});
|
||||
~"::")));
|
||||
}
|
||||
}
|
||||
|
||||
@ -4149,10 +4149,10 @@ struct Resolver {
|
||||
_ => {
|
||||
self.session.span_err(
|
||||
path.span,
|
||||
fmt!{"`%s` does not name a structure",
|
||||
fmt!("`%s` does not name a structure",
|
||||
connect(path.idents.map(
|
||||
|x| self.session.str_of(x)),
|
||||
~"::")});
|
||||
~"::")));
|
||||
}
|
||||
}
|
||||
|
||||
@ -4340,11 +4340,11 @@ struct Resolver {
|
||||
|
||||
match self.trait_info.find(trait_def_id) {
|
||||
some(trait_info) if trait_info.contains_key(name) => {
|
||||
debug!{"(adding trait info if containing method) found trait \
|
||||
debug!("(adding trait info if containing method) found trait \
|
||||
%d:%d for method '%s'",
|
||||
trait_def_id.crate,
|
||||
trait_def_id.node,
|
||||
self.session.str_of(name)};
|
||||
self.session.str_of(name));
|
||||
(*found_traits).push(trait_def_id);
|
||||
}
|
||||
some(_) | none => {
|
||||
@ -4360,7 +4360,7 @@ struct Resolver {
|
||||
}
|
||||
|
||||
fn record_def(node_id: node_id, def: def) {
|
||||
debug!{"(recording def) recording %? for %?", def, node_id};
|
||||
debug!("(recording def) recording %? for %?", def, node_id);
|
||||
self.def_map.insert(node_id, def);
|
||||
}
|
||||
|
||||
@ -4393,9 +4393,9 @@ struct Resolver {
|
||||
}
|
||||
some(_) => {
|
||||
// Bail out.
|
||||
debug!{"(checking for unused imports in module subtree) not \
|
||||
debug!("(checking for unused imports in module subtree) not \
|
||||
checking for unused imports for `%s`",
|
||||
self.module_to_str(module_)};
|
||||
self.module_to_str(module_));
|
||||
return;
|
||||
}
|
||||
}
|
||||
@ -4491,14 +4491,14 @@ struct Resolver {
|
||||
}
|
||||
|
||||
fn dump_module(module_: @Module) {
|
||||
debug!{"Dump of module `%s`:", self.module_to_str(module_)};
|
||||
debug!("Dump of module `%s`:", self.module_to_str(module_));
|
||||
|
||||
debug!{"Children:"};
|
||||
debug!("Children:");
|
||||
for module_.children.each |name, _child| {
|
||||
debug!{"* %s", self.session.str_of(name)};
|
||||
debug!("* %s", self.session.str_of(name));
|
||||
}
|
||||
|
||||
debug!{"Import resolutions:"};
|
||||
debug!("Import resolutions:");
|
||||
for module_.import_resolutions.each |name, import_resolution| {
|
||||
let mut module_repr;
|
||||
match (*import_resolution).target_for_namespace(ModuleNS) {
|
||||
@ -4527,9 +4527,9 @@ struct Resolver {
|
||||
}
|
||||
}
|
||||
|
||||
debug!{"* %s:%s%s%s",
|
||||
debug!("* %s:%s%s%s",
|
||||
self.session.str_of(name),
|
||||
module_repr, value_repr, type_repr};
|
||||
module_repr, value_repr, type_repr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -67,8 +67,8 @@ enum dest {
|
||||
|
||||
fn dest_str(ccx: @crate_ctxt, d: dest) -> ~str {
|
||||
match d {
|
||||
by_val(v) => fmt!{"by_val(%s)", val_str(ccx.tn, *v)},
|
||||
save_in(v) => fmt!{"save_in(%s)", val_str(ccx.tn, v)},
|
||||
by_val(v) => fmt!("by_val(%s)", val_str(ccx.tn, *v)),
|
||||
save_in(v) => fmt!("save_in(%s)", val_str(ccx.tn, v)),
|
||||
ignore => ~"ignore"
|
||||
}
|
||||
}
|
||||
@ -100,7 +100,7 @@ trait get_insn_ctxt {
|
||||
|
||||
impl @crate_ctxt: get_insn_ctxt {
|
||||
fn insn_ctxt(s: &str) -> icx_popper {
|
||||
debug!{"new insn_ctxt: %s", s};
|
||||
debug!("new insn_ctxt: %s", s);
|
||||
if self.sess.count_llvm_insns() {
|
||||
vec::push(*self.stats.llvm_insn_ctxt, str::from_slice(s));
|
||||
}
|
||||
@ -504,7 +504,7 @@ fn declare_tydesc(ccx: @crate_ctxt, t: ty::t) -> @tydesc_info {
|
||||
mangle_internal_name_by_type_only(ccx, t, ~"tydesc")
|
||||
} else { mangle_internal_name_by_seq(ccx, ~"tydesc") };
|
||||
note_unique_llvm_symbol(ccx, name);
|
||||
log(debug, fmt!{"+++ declare_tydesc %s %s", ty_to_str(ccx.tcx, t), name});
|
||||
log(debug, fmt!("+++ declare_tydesc %s %s", ty_to_str(ccx.tcx, t), name));
|
||||
let gvar = str::as_c_str(name, |buf| {
|
||||
llvm::LLVMAddGlobal(ccx.llmod, ccx.tydesc_type, buf)
|
||||
});
|
||||
@ -1116,52 +1116,52 @@ fn lazily_emit_tydesc_glue(ccx: @crate_ctxt, field: uint,
|
||||
match ti.take_glue {
|
||||
some(_) => (),
|
||||
none => {
|
||||
debug!{"+++ lazily_emit_tydesc_glue TAKE %s",
|
||||
ppaux::ty_to_str(ccx.tcx, ti.ty)};
|
||||
debug!("+++ lazily_emit_tydesc_glue TAKE %s",
|
||||
ppaux::ty_to_str(ccx.tcx, ti.ty));
|
||||
let glue_fn = declare_generic_glue(ccx, ti.ty, llfnty, ~"take");
|
||||
ti.take_glue = some(glue_fn);
|
||||
make_generic_glue(ccx, ti.ty, glue_fn, make_take_glue, ~"take");
|
||||
debug!{"--- lazily_emit_tydesc_glue TAKE %s",
|
||||
ppaux::ty_to_str(ccx.tcx, ti.ty)};
|
||||
debug!("--- lazily_emit_tydesc_glue TAKE %s",
|
||||
ppaux::ty_to_str(ccx.tcx, ti.ty));
|
||||
}
|
||||
}
|
||||
} else if field == abi::tydesc_field_drop_glue {
|
||||
match ti.drop_glue {
|
||||
some(_) => (),
|
||||
none => {
|
||||
debug!{"+++ lazily_emit_tydesc_glue DROP %s",
|
||||
ppaux::ty_to_str(ccx.tcx, ti.ty)};
|
||||
debug!("+++ lazily_emit_tydesc_glue DROP %s",
|
||||
ppaux::ty_to_str(ccx.tcx, ti.ty));
|
||||
let glue_fn = declare_generic_glue(ccx, ti.ty, llfnty, ~"drop");
|
||||
ti.drop_glue = some(glue_fn);
|
||||
make_generic_glue(ccx, ti.ty, glue_fn, make_drop_glue, ~"drop");
|
||||
debug!{"--- lazily_emit_tydesc_glue DROP %s",
|
||||
ppaux::ty_to_str(ccx.tcx, ti.ty)};
|
||||
debug!("--- lazily_emit_tydesc_glue DROP %s",
|
||||
ppaux::ty_to_str(ccx.tcx, ti.ty));
|
||||
}
|
||||
}
|
||||
} else if field == abi::tydesc_field_free_glue {
|
||||
match ti.free_glue {
|
||||
some(_) => (),
|
||||
none => {
|
||||
debug!{"+++ lazily_emit_tydesc_glue FREE %s",
|
||||
ppaux::ty_to_str(ccx.tcx, ti.ty)};
|
||||
debug!("+++ lazily_emit_tydesc_glue FREE %s",
|
||||
ppaux::ty_to_str(ccx.tcx, ti.ty));
|
||||
let glue_fn = declare_generic_glue(ccx, ti.ty, llfnty, ~"free");
|
||||
ti.free_glue = some(glue_fn);
|
||||
make_generic_glue(ccx, ti.ty, glue_fn, make_free_glue, ~"free");
|
||||
debug!{"--- lazily_emit_tydesc_glue FREE %s",
|
||||
ppaux::ty_to_str(ccx.tcx, ti.ty)};
|
||||
debug!("--- lazily_emit_tydesc_glue FREE %s",
|
||||
ppaux::ty_to_str(ccx.tcx, ti.ty));
|
||||
}
|
||||
}
|
||||
} else if field == abi::tydesc_field_visit_glue {
|
||||
match ti.visit_glue {
|
||||
some(_) => (),
|
||||
none => {
|
||||
debug!{"+++ lazily_emit_tydesc_glue VISIT %s",
|
||||
ppaux::ty_to_str(ccx.tcx, ti.ty)};
|
||||
debug!("+++ lazily_emit_tydesc_glue VISIT %s",
|
||||
ppaux::ty_to_str(ccx.tcx, ti.ty));
|
||||
let glue_fn = declare_generic_glue(ccx, ti.ty, llfnty, ~"visit");
|
||||
ti.visit_glue = some(glue_fn);
|
||||
make_generic_glue(ccx, ti.ty, glue_fn, make_visit_glue, ~"visit");
|
||||
debug!{"--- lazily_emit_tydesc_glue VISIT %s",
|
||||
ppaux::ty_to_str(ccx.tcx, ti.ty)};
|
||||
debug!("--- lazily_emit_tydesc_glue VISIT %s",
|
||||
ppaux::ty_to_str(ccx.tcx, ti.ty));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1722,7 +1722,7 @@ fn trans_eager_binop(cx: block, span: span, op: ast::binop, lhs: ValueRef,
|
||||
|
||||
fn trans_assign_op(bcx: block, ex: @ast::expr, op: ast::binop,
|
||||
dst: @ast::expr, src: @ast::expr) -> block {
|
||||
debug!{"%s", expr_to_str(ex, bcx.tcx().sess.parse_sess.interner)};
|
||||
debug!("%s", expr_to_str(ex, bcx.tcx().sess.parse_sess.interner));
|
||||
let _icx = bcx.insn_ctxt("trans_assign_op");
|
||||
let t = expr_ty(bcx, src);
|
||||
let lhs_res = trans_lval(bcx, dst);
|
||||
@ -1732,9 +1732,9 @@ fn trans_assign_op(bcx: block, ex: @ast::expr, op: ast::binop,
|
||||
match bcx.ccx().maps.method_map.find(ex.id) {
|
||||
some(origin) => {
|
||||
let bcx = lhs_res.bcx;
|
||||
debug!{"user-defined method callee_id: %s",
|
||||
debug!("user-defined method callee_id: %s",
|
||||
ast_map::node_id_to_str(bcx.tcx().items, ex.callee_id,
|
||||
bcx.sess().parse_sess.interner)};
|
||||
bcx.sess().parse_sess.interner));
|
||||
let fty = node_id_type(bcx, ex.callee_id);
|
||||
|
||||
let dty = expr_ty(bcx, dst);
|
||||
@ -1770,7 +1770,7 @@ fn root_value(bcx: block, val: ValueRef, ty: ty::t,
|
||||
if bcx.sess().trace() {
|
||||
trans_trace(
|
||||
bcx, none,
|
||||
fmt!{"preserving until end of scope %d", scope_id});
|
||||
fmt!("preserving until end of scope %d", scope_id));
|
||||
}
|
||||
|
||||
let root_loc = alloca_zeroed(bcx, type_of(bcx.ccx(), ty));
|
||||
@ -1789,9 +1789,9 @@ fn autoderef(cx: block, e_id: ast::node_id,
|
||||
let ccx = cx.ccx();
|
||||
let mut derefs = 0u;
|
||||
while derefs < max {
|
||||
debug!{"autoderef(e_id=%d, v1=%s, t1=%s, derefs=%u)",
|
||||
debug!("autoderef(e_id=%d, v1=%s, t1=%s, derefs=%u)",
|
||||
e_id, val_str(ccx.tn, v1), ppaux::ty_to_str(ccx.tcx, t1),
|
||||
derefs};
|
||||
derefs);
|
||||
|
||||
// root the autoderef'd value, if necessary:
|
||||
derefs += 1u;
|
||||
@ -2140,8 +2140,8 @@ fn monomorphic_fn(ccx: @crate_ctxt, fn_id: ast::def_id,
|
||||
|
||||
match ccx.monomorphized.find(hash_id) {
|
||||
some(val) => {
|
||||
debug!{"leaving monomorphic fn %s",
|
||||
ty::item_path_str(ccx.tcx, fn_id)};
|
||||
debug!("leaving monomorphic fn %s",
|
||||
ty::item_path_str(ccx.tcx, fn_id));
|
||||
return {val: val, must_cast: must_cast};
|
||||
}
|
||||
none => ()
|
||||
@ -2151,9 +2151,9 @@ fn monomorphic_fn(ccx: @crate_ctxt, fn_id: ast::def_id,
|
||||
let mut llitem_ty = tpt.ty;
|
||||
|
||||
let map_node = session::expect(ccx.sess, ccx.tcx.items.find(fn_id.node),
|
||||
|| fmt!{"While monomorphizing %?, couldn't find it in the item map \
|
||||
|| fmt!("While monomorphizing %?, couldn't find it in the item map \
|
||||
(may have attempted to monomorphize an item defined in a different \
|
||||
crate?)", fn_id});
|
||||
crate?)", fn_id));
|
||||
// Get the path so that we can create a symbol
|
||||
let (pt, name, span) = match map_node {
|
||||
ast_map::node_item(i, pt) => (pt, i.ident, i.span),
|
||||
@ -2287,7 +2287,7 @@ fn monomorphic_fn(ccx: @crate_ctxt, fn_id: ast::def_id,
|
||||
};
|
||||
ccx.monomorphizing.insert(fn_id, depth);
|
||||
|
||||
debug!{"leaving monomorphic fn %s", ty::item_path_str(ccx.tcx, fn_id)};
|
||||
debug!("leaving monomorphic fn %s", ty::item_path_str(ccx.tcx, fn_id));
|
||||
{val: lldecl, must_cast: must_cast}
|
||||
}
|
||||
|
||||
@ -2297,8 +2297,8 @@ fn maybe_instantiate_inline(ccx: @crate_ctxt, fn_id: ast::def_id)
|
||||
match ccx.external.find(fn_id) {
|
||||
some(some(node_id)) => {
|
||||
// Already inline
|
||||
debug!{"maybe_instantiate_inline(%s): already inline as node id %d",
|
||||
ty::item_path_str(ccx.tcx, fn_id), node_id};
|
||||
debug!("maybe_instantiate_inline(%s): already inline as node id %d",
|
||||
ty::item_path_str(ccx.tcx, fn_id), node_id);
|
||||
local_def(node_id)
|
||||
}
|
||||
some(none) => fn_id, // Not inlinable
|
||||
@ -2487,8 +2487,8 @@ fn trans_local_var(cx: block, def: ast::def) -> local_var_result {
|
||||
return {val: slf, kind: lv_owned};
|
||||
}
|
||||
_ => {
|
||||
cx.sess().unimpl(fmt!{"unsupported def type in trans_local_var: %?",
|
||||
def});
|
||||
cx.sess().unimpl(fmt!("unsupported def type in trans_local_var: %?",
|
||||
def));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2576,7 +2576,7 @@ fn trans_rec_field_inner(bcx: block, val: ValueRef, ty: ty::t,
|
||||
|
||||
let ix = field_idx_strict(bcx.tcx(), sp, field, fields);
|
||||
|
||||
debug!{"val = %s ix = %u", bcx.val_str(val), ix};
|
||||
debug!("val = %s ix = %u", bcx.val_str(val), ix);
|
||||
|
||||
/* self is a class with a dtor, which means we
|
||||
have to select out the object itself
|
||||
@ -2628,8 +2628,8 @@ fn trans_index(cx: block, ex: @ast::expr, base: @ast::expr,
|
||||
len = Sub(bcx, len, C_uint(bcx.ccx(), 1u));
|
||||
}
|
||||
|
||||
debug!{"trans_index: base %s", val_str(bcx.ccx().tn, base)};
|
||||
debug!{"trans_index: len %s", val_str(bcx.ccx().tn, len)};
|
||||
debug!("trans_index: base %s", val_str(bcx.ccx().tn, base));
|
||||
debug!("trans_index: len %s", val_str(bcx.ccx().tn, len));
|
||||
|
||||
let bounds_check = ICmp(bcx, lib::llvm::IntUGE, scaled_ix, len);
|
||||
let bcx = do with_cond(bcx, bounds_check) |bcx| {
|
||||
@ -2687,8 +2687,8 @@ fn trans_lval(cx: block, e: @ast::expr) -> lval_result {
|
||||
let lv = unrooted(cx, e);
|
||||
|
||||
if !cx.sess().no_asm_comments() {
|
||||
add_comment(cx, fmt!{"preserving until end of scope %d",
|
||||
scope_id});
|
||||
add_comment(cx, fmt!("preserving until end of scope %d",
|
||||
scope_id));
|
||||
}
|
||||
|
||||
let _icx = lv.bcx.insn_ctxt("root_value_lval");
|
||||
@ -2749,7 +2749,7 @@ fn trans_lval(cx: block, e: @ast::expr) -> lval_result {
|
||||
* wrong address space and thus be the wrong type.
|
||||
*/
|
||||
fn non_gc_box_cast(cx: block, val: ValueRef) -> ValueRef {
|
||||
debug!{"non_gc_box_cast"};
|
||||
debug!("non_gc_box_cast");
|
||||
add_comment(cx, ~"non_gc_box_cast");
|
||||
assert(llvm::LLVMGetPointerAddressSpace(val_ty(val)) as uint == 1u);
|
||||
let non_gc_t = T_ptr(llvm::LLVMGetElementType(val_ty(val)));
|
||||
@ -2904,7 +2904,7 @@ fn trans_arg_expr(cx: block, arg: ty::arg, lldestty: TypeRef, e: @ast::expr,
|
||||
-> result {
|
||||
let _icx = cx.insn_ctxt("trans_arg_expr");
|
||||
let ccx = cx.ccx();
|
||||
debug!{"+++ trans_arg_expr on %s", expr_to_str(e, ccx.sess.intr())};
|
||||
debug!("+++ trans_arg_expr on %s", expr_to_str(e, ccx.sess.intr()));
|
||||
let e_ty = expr_ty(cx, e);
|
||||
let is_bot = ty::type_is_bot(e_ty);
|
||||
|
||||
@ -2925,7 +2925,7 @@ fn trans_arg_expr(cx: block, arg: ty::arg, lldestty: TypeRef, e: @ast::expr,
|
||||
|
||||
// auto-deref value as required (this only applies to method
|
||||
// call receivers) of method
|
||||
debug!{" pre-deref value: %s", val_str(lv.bcx.ccx().tn, lv.val)};
|
||||
debug!(" pre-deref value: %s", val_str(lv.bcx.ccx().tn, lv.val));
|
||||
let {lv, e_ty} = if derefs == 0u {
|
||||
{lv: lv, e_ty: e_ty}
|
||||
} else {
|
||||
@ -2937,11 +2937,11 @@ fn trans_arg_expr(cx: block, arg: ty::arg, lldestty: TypeRef, e: @ast::expr,
|
||||
};
|
||||
|
||||
// borrow value (convert from @T to &T and so forth)
|
||||
debug!{" pre-adaptation value: %s", val_str(lv.bcx.ccx().tn, lv.val)};
|
||||
debug!(" pre-adaptation value: %s", val_str(lv.bcx.ccx().tn, lv.val));
|
||||
let {lv, ty: e_ty} = adapt_borrowed_value(lv, e, e_ty);
|
||||
let mut bcx = lv.bcx;
|
||||
let mut val = lv.val;
|
||||
debug!{" adapted value: %s", val_str(bcx.ccx().tn, val)};
|
||||
debug!(" adapted value: %s", val_str(bcx.ccx().tn, val));
|
||||
|
||||
// finally, deal with the various modes
|
||||
let arg_mode = ty::resolved_mode(ccx.tcx, arg.mode);
|
||||
@ -2993,11 +2993,11 @@ fn trans_arg_expr(cx: block, arg: ty::arg, lldestty: TypeRef, e: @ast::expr,
|
||||
}
|
||||
|
||||
if !is_bot && arg.ty != e_ty || ty::type_has_params(arg.ty) {
|
||||
debug!{" casting from %s", val_str(bcx.ccx().tn, val)};
|
||||
debug!(" casting from %s", val_str(bcx.ccx().tn, val));
|
||||
val = PointerCast(bcx, val, lldestty);
|
||||
}
|
||||
|
||||
debug!{"--- trans_arg_expr passing %s", val_str(bcx.ccx().tn, val)};
|
||||
debug!("--- trans_arg_expr passing %s", val_str(bcx.ccx().tn, val));
|
||||
return rslt(bcx, val);
|
||||
}
|
||||
|
||||
@ -3038,9 +3038,9 @@ fn adapt_borrowed_value(lv: lval_result,
|
||||
let (base, len) = tvec::get_base_and_len(bcx, val, e_ty);
|
||||
let p = alloca(bcx, T_struct(~[T_ptr(llunit_ty), ccx.int_type]));
|
||||
|
||||
debug!{"adapt_borrowed_value: adapting %s to %s",
|
||||
debug!("adapt_borrowed_value: adapting %s to %s",
|
||||
val_str(bcx.ccx().tn, val),
|
||||
val_str(bcx.ccx().tn, p)};
|
||||
val_str(bcx.ccx().tn, p));
|
||||
|
||||
Store(bcx, base, GEPi(bcx, p, ~[0u, abi::slice_elt_base]));
|
||||
Store(bcx, len, GEPi(bcx, p, ~[0u, abi::slice_elt_len]));
|
||||
@ -3683,9 +3683,9 @@ fn lval_result_to_result(lv: lval_result, ty: ty::t) -> result {
|
||||
fn add_root_cleanup(bcx: block, scope_id: ast::node_id,
|
||||
root_loc: ValueRef, ty: ty::t) {
|
||||
|
||||
debug!{"add_root_cleanup(bcx=%s, scope_id=%d, root_loc=%s, ty=%s)",
|
||||
debug!("add_root_cleanup(bcx=%s, scope_id=%d, root_loc=%s, ty=%s)",
|
||||
bcx.to_str(), scope_id, val_str(bcx.ccx().tn, root_loc),
|
||||
ppaux::ty_to_str(bcx.ccx().tcx, ty)};
|
||||
ppaux::ty_to_str(bcx.ccx().tcx, ty));
|
||||
|
||||
let bcx_scope = find_bcx_for_scope(bcx, scope_id);
|
||||
add_clean_temp_mem(bcx_scope, root_loc, ty);
|
||||
@ -3700,7 +3700,7 @@ fn add_root_cleanup(bcx: block, scope_id: ast::node_id,
|
||||
_ => {
|
||||
match bcx_sid.parent {
|
||||
none => bcx.tcx().sess.bug(
|
||||
fmt!{"no enclosing scope with id %d", scope_id}),
|
||||
fmt!("no enclosing scope with id %d", scope_id)),
|
||||
some(bcx_par) => bcx_par
|
||||
}
|
||||
}
|
||||
@ -3724,16 +3724,16 @@ fn trans_expr(bcx: block, e: @ast::expr, dest: dest) -> block {
|
||||
return match bcx.ccx().maps.root_map.find({id:e.id, derefs:0u}) {
|
||||
none => unrooted(bcx, e, dest),
|
||||
some(scope_id) => {
|
||||
debug!{"expression %d found in root map with scope %d",
|
||||
e.id, scope_id};
|
||||
debug!("expression %d found in root map with scope %d",
|
||||
e.id, scope_id);
|
||||
|
||||
let ty = expr_ty(bcx, e);
|
||||
let root_loc = alloca_zeroed(bcx, type_of(bcx.ccx(), ty));
|
||||
let bcx = unrooted(bcx, e, save_in(root_loc));
|
||||
|
||||
if !bcx.sess().no_asm_comments() {
|
||||
add_comment(bcx, fmt!{"preserving until end of scope %d",
|
||||
scope_id});
|
||||
add_comment(bcx, fmt!("preserving until end of scope %d",
|
||||
scope_id));
|
||||
}
|
||||
|
||||
let _icx = bcx.insn_ctxt("root_value_expr");
|
||||
@ -3812,9 +3812,9 @@ fn trans_expr(bcx: block, e: @ast::expr, dest: dest) -> block {
|
||||
ast::expr_fn_block(decl, body, cap_clause) => {
|
||||
match check ty::get(expr_ty(bcx, e)).struct {
|
||||
ty::ty_fn({proto, _}) => {
|
||||
debug!{"translating fn_block %s with type %s",
|
||||
debug!("translating fn_block %s with type %s",
|
||||
expr_to_str(e, tcx.sess.intr()),
|
||||
ppaux::ty_to_str(tcx, expr_ty(bcx, e))};
|
||||
ppaux::ty_to_str(tcx, expr_ty(bcx, e)));
|
||||
return closure::trans_expr_fn(bcx, proto, decl, body,
|
||||
e.id, cap_clause, none, dest);
|
||||
}
|
||||
@ -3945,8 +3945,8 @@ fn lval_to_dps(bcx: block, e: @ast::expr, dest: dest) -> block {
|
||||
let ty = expr_ty(bcx, e);
|
||||
let lv = trans_lval(bcx, e);
|
||||
let last_use = (lv.kind == lv_owned && last_use_map.contains_key(e.id));
|
||||
debug!{"is last use (%s) = %b, %d", expr_to_str(e, bcx.ccx().sess.intr()),
|
||||
last_use, lv.kind as int};
|
||||
debug!("is last use (%s) = %b, %d", expr_to_str(e, bcx.ccx().sess.intr()),
|
||||
last_use, lv.kind as int);
|
||||
lval_result_to_dps(lv, ty, last_use, dest)
|
||||
}
|
||||
|
||||
@ -4297,7 +4297,7 @@ fn init_local(bcx: block, local: @ast::local) -> block {
|
||||
|
||||
fn trans_stmt(cx: block, s: ast::stmt) -> block {
|
||||
let _icx = cx.insn_ctxt("trans_stmt");
|
||||
debug!{"trans_stmt(%s)", stmt_to_str(s, cx.tcx().sess.intr())};
|
||||
debug!("trans_stmt(%s)", stmt_to_str(s, cx.tcx().sess.intr()));
|
||||
|
||||
if !cx.sess().no_asm_comments() {
|
||||
add_span_comment(cx, s.span, stmt_to_str(s, cx.ccx().sess.intr()));
|
||||
@ -4434,12 +4434,12 @@ fn cleanup_and_leave(bcx: block, upto: option<BasicBlockRef>,
|
||||
let mut cur = bcx, bcx = bcx;
|
||||
let is_lpad = leave == none;
|
||||
loop {
|
||||
debug!{"cleanup_and_leave: leaving %s", cur.to_str()};
|
||||
debug!("cleanup_and_leave: leaving %s", cur.to_str());
|
||||
|
||||
if bcx.sess().trace() {
|
||||
trans_trace(
|
||||
bcx, none,
|
||||
fmt!{"cleanup_and_leave(%s)", cur.to_str()});
|
||||
fmt!("cleanup_and_leave(%s)", cur.to_str()));
|
||||
}
|
||||
|
||||
match cur.kind {
|
||||
@ -5133,9 +5133,9 @@ fn register_fn_fuller(ccx: @crate_ctxt, sp: span, path: path,
|
||||
let llfn: ValueRef = decl_fn(ccx.llmod, ps, cc, llfty);
|
||||
ccx.item_symbols.insert(node_id, ps);
|
||||
|
||||
debug!{"register_fn_fuller created fn %s for item %d with path %s",
|
||||
debug!("register_fn_fuller created fn %s for item %d with path %s",
|
||||
val_str(ccx.tn, llfn), node_id,
|
||||
ast_map::path_to_str(path, ccx.sess.parse_sess.interner)};
|
||||
ast_map::path_to_str(path, ccx.sess.parse_sess.interner));
|
||||
|
||||
let is_main = is_main_name(path) && !ccx.sess.building_library;
|
||||
if is_main { create_main_wrapper(ccx, sp, llfn, node_type); }
|
||||
@ -5276,8 +5276,8 @@ fn get_dtor_symbol(ccx: @crate_ctxt, path: path, id: ast::node_id,
|
||||
mono_ty)
|
||||
}
|
||||
none => {
|
||||
ccx.sess.bug(fmt!{"get_dtor_symbol: not monomorphizing and \
|
||||
couldn't find a symbol for dtor %?", path});
|
||||
ccx.sess.bug(fmt!("get_dtor_symbol: not monomorphizing and \
|
||||
couldn't find a symbol for dtor %?", path));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -5285,7 +5285,7 @@ fn get_dtor_symbol(ccx: @crate_ctxt, path: path, id: ast::node_id,
|
||||
}
|
||||
|
||||
fn get_item_val(ccx: @crate_ctxt, id: ast::node_id) -> ValueRef {
|
||||
debug!{"get_item_val(id=`%?`)", id};
|
||||
debug!("get_item_val(id=`%?`)", id);
|
||||
let tcx = ccx.tcx;
|
||||
match ccx.item_vals.find(id) {
|
||||
some(v) => v,
|
||||
@ -5316,7 +5316,7 @@ fn get_item_val(ccx: @crate_ctxt, id: ast::node_id) -> ValueRef {
|
||||
}
|
||||
}
|
||||
ast_map::node_trait_method(trait_method, _, pth) => {
|
||||
debug!{"get_item_val(): processing a node_trait_method"};
|
||||
debug!("get_item_val(): processing a node_trait_method");
|
||||
match *trait_method {
|
||||
ast::required(_) => {
|
||||
ccx.sess.bug(~"unexpected variant: required trait method in \
|
||||
@ -5527,7 +5527,7 @@ fn trap(bcx: block) {
|
||||
|
||||
fn push_rtcall(ccx: @crate_ctxt, name: ~str, did: ast::def_id) {
|
||||
if ccx.rtcalls.contains_key(name) {
|
||||
fail fmt!{"multiple definitions for runtime call %s", name};
|
||||
fail fmt!("multiple definitions for runtime call %s", name);
|
||||
}
|
||||
ccx.rtcalls.insert(name, did);
|
||||
}
|
||||
@ -5597,7 +5597,7 @@ fn gather_rtcalls(ccx: @crate_ctxt, crate: @ast::crate) {
|
||||
~[~"exchange_free", ~"exchange_malloc", ~"fail", ~"free", ~"malloc"];
|
||||
for vec::each(expected_rtcalls) |name| {
|
||||
if !ccx.rtcalls.contains_key(name) {
|
||||
fail fmt!{"no definition for runtime call %s", name};
|
||||
fail fmt!("no definition for runtime call %s", name);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -5867,25 +5867,25 @@ fn trans_crate(sess: session::session,
|
||||
write_metadata(ccx, crate);
|
||||
if ccx.sess.trans_stats() {
|
||||
io::println(~"--- trans stats ---");
|
||||
io::println(fmt!{"n_static_tydescs: %u",
|
||||
ccx.stats.n_static_tydescs});
|
||||
io::println(fmt!{"n_glues_created: %u",
|
||||
ccx.stats.n_glues_created});
|
||||
io::println(fmt!{"n_null_glues: %u", ccx.stats.n_null_glues});
|
||||
io::println(fmt!{"n_real_glues: %u", ccx.stats.n_real_glues});
|
||||
io::println(fmt!("n_static_tydescs: %u",
|
||||
ccx.stats.n_static_tydescs));
|
||||
io::println(fmt!("n_glues_created: %u",
|
||||
ccx.stats.n_glues_created));
|
||||
io::println(fmt!("n_null_glues: %u", ccx.stats.n_null_glues));
|
||||
io::println(fmt!("n_real_glues: %u", ccx.stats.n_real_glues));
|
||||
|
||||
// FIXME (#2280): this temporary shouldn't be
|
||||
// necessary, but seems to be, for borrowing.
|
||||
let times = copy *ccx.stats.fn_times;
|
||||
for vec::each(times) |timing| {
|
||||
io::println(fmt!{"time: %s took %d ms", timing.ident,
|
||||
timing.time});
|
||||
io::println(fmt!("time: %s took %d ms", timing.ident,
|
||||
timing.time));
|
||||
}
|
||||
}
|
||||
|
||||
if ccx.sess.count_llvm_insns() {
|
||||
for ccx.stats.llvm_insns.each |k, v| {
|
||||
io::println(fmt!{"%-7u %s", v, k});
|
||||
io::println(fmt!("%-7u %s", v, k));
|
||||
}
|
||||
}
|
||||
return (llmod, link_meta);
|
||||
|
@ -142,10 +142,10 @@ fn Invoke(cx: block, Fn: ValueRef, Args: ~[ValueRef],
|
||||
if cx.unreachable { return; }
|
||||
assert (!cx.terminated);
|
||||
cx.terminated = true;
|
||||
debug!{"Invoke(%s with arguments (%s))",
|
||||
debug!("Invoke(%s with arguments (%s))",
|
||||
val_str(cx.ccx().tn, Fn),
|
||||
str::connect(vec::map(Args, |a| val_str(cx.ccx().tn, a)),
|
||||
~", ")};
|
||||
~", "));
|
||||
unsafe {
|
||||
count_insn(cx, "invoke");
|
||||
llvm::LLVMBuildInvoke(B(cx), Fn, vec::unsafe::to_ptr(Args),
|
||||
@ -413,9 +413,9 @@ fn Load(cx: block, PointerVal: ValueRef) -> ValueRef {
|
||||
|
||||
fn Store(cx: block, Val: ValueRef, Ptr: ValueRef) {
|
||||
if cx.unreachable { return; }
|
||||
debug!{"Store %s -> %s",
|
||||
debug!("Store %s -> %s",
|
||||
val_str(cx.ccx().tn, Val),
|
||||
val_str(cx.ccx().tn, Ptr)};
|
||||
val_str(cx.ccx().tn, Ptr));
|
||||
count_insn(cx, "store");
|
||||
llvm::LLVMBuildStore(B(cx), Val, Ptr);
|
||||
}
|
||||
@ -673,9 +673,9 @@ fn Call(cx: block, Fn: ValueRef, Args: ~[ValueRef]) -> ValueRef {
|
||||
unsafe {
|
||||
count_insn(cx, "call");
|
||||
|
||||
debug!{"Call(Fn=%s, Args=%?)",
|
||||
debug!("Call(Fn=%s, Args=%?)",
|
||||
val_str(cx.ccx().tn, Fn),
|
||||
Args.map(|arg| val_str(cx.ccx().tn, arg))};
|
||||
Args.map(|arg| val_str(cx.ccx().tn, arg)));
|
||||
|
||||
return llvm::LLVMBuildCall(B(cx), Fn, vec::unsafe::to_ptr(Args),
|
||||
Args.len() as c_uint, noname());
|
||||
|
@ -100,12 +100,12 @@ enum environment_value {
|
||||
|
||||
fn ev_to_str(ccx: @crate_ctxt, ev: environment_value) -> ~str {
|
||||
match ev {
|
||||
env_copy(v, t, lk) => fmt!{"copy(%s,%s)", val_str(ccx.tn, v),
|
||||
ty_to_str(ccx.tcx, t)},
|
||||
env_move(v, t, lk) => fmt!{"move(%s,%s)", val_str(ccx.tn, v),
|
||||
ty_to_str(ccx.tcx, t)},
|
||||
env_ref(v, t, lk) => fmt!{"ref(%s,%s)", val_str(ccx.tn, v),
|
||||
ty_to_str(ccx.tcx, t)}
|
||||
env_copy(v, t, lk) => fmt!("copy(%s,%s)", val_str(ccx.tn, v),
|
||||
ty_to_str(ccx.tcx, t)),
|
||||
env_move(v, t, lk) => fmt!("move(%s,%s)", val_str(ccx.tn, v),
|
||||
ty_to_str(ccx.tcx, t)),
|
||||
env_ref(v, t, lk) => fmt!("ref(%s,%s)", val_str(ccx.tn, v),
|
||||
ty_to_str(ccx.tcx, t))
|
||||
}
|
||||
}
|
||||
|
||||
@ -129,7 +129,7 @@ fn mk_closure_tys(tcx: ty::ctxt,
|
||||
});
|
||||
}
|
||||
let cdata_ty = ty::mk_tup(tcx, bound_tys);
|
||||
debug!{"cdata_ty=%s", ty_to_str(tcx, cdata_ty)};
|
||||
debug!("cdata_ty=%s", ty_to_str(tcx, cdata_ty));
|
||||
return cdata_ty;
|
||||
}
|
||||
|
||||
@ -195,16 +195,16 @@ fn store_environment(bcx: block,
|
||||
let cboxptr_ty = ty::mk_ptr(tcx, {ty:cbox_ty, mutbl:ast::m_imm});
|
||||
|
||||
let llbox = PointerCast(bcx, llbox, type_of(ccx, cboxptr_ty));
|
||||
debug!{"tuplify_box_ty = %s", ty_to_str(tcx, cbox_ty)};
|
||||
debug!("tuplify_box_ty = %s", ty_to_str(tcx, cbox_ty));
|
||||
|
||||
// Copy expr values into boxed bindings.
|
||||
let mut bcx = bcx;
|
||||
do vec::iteri(bound_values) |i, bv| {
|
||||
debug!{"Copy %s into closure", ev_to_str(ccx, bv)};
|
||||
debug!("Copy %s into closure", ev_to_str(ccx, bv));
|
||||
|
||||
if !ccx.sess.no_asm_comments() {
|
||||
add_comment(bcx, fmt!{"Copy %s into closure",
|
||||
ev_to_str(ccx, bv)});
|
||||
add_comment(bcx, fmt!("Copy %s into closure",
|
||||
ev_to_str(ccx, bv)));
|
||||
}
|
||||
|
||||
let bound_data = GEPi(bcx, llbox,
|
||||
@ -225,9 +225,9 @@ fn store_environment(bcx: block,
|
||||
bcx = move_val(bcx, INIT, bound_data, src, ty);
|
||||
}
|
||||
env_ref(val, ty, lv_owned) => {
|
||||
debug!{"> storing %s into %s",
|
||||
debug!("> storing %s into %s",
|
||||
val_str(bcx.ccx().tn, val),
|
||||
val_str(bcx.ccx().tn, bound_data)};
|
||||
val_str(bcx.ccx().tn, bound_data));
|
||||
Store(bcx, val, bound_data);
|
||||
}
|
||||
env_ref(val, ty, lv_owned_imm) => {
|
||||
@ -259,13 +259,13 @@ fn build_closure(bcx0: block,
|
||||
|
||||
// Package up the captured upvars
|
||||
do vec::iter(cap_vars) |cap_var| {
|
||||
debug!{"Building closure: captured variable %?", cap_var};
|
||||
debug!("Building closure: captured variable %?", cap_var);
|
||||
let lv = trans_local_var(bcx, cap_var.def);
|
||||
let nid = ast_util::def_id_of_def(cap_var.def).node;
|
||||
debug!{"Node id is %s",
|
||||
debug!("Node id is %s",
|
||||
syntax::ast_map::node_id_to_str
|
||||
(bcx.ccx().tcx.items, nid,
|
||||
bcx.ccx().sess.parse_sess.interner)};
|
||||
bcx.ccx().sess.parse_sess.interner));
|
||||
let mut ty = node_id_type(bcx, nid);
|
||||
match cap_var.mode {
|
||||
capture::cap_ref => {
|
||||
|
@ -256,9 +256,9 @@ fn cleanup_type(cx: ty::ctxt, ty: ty::t) -> cleantype {
|
||||
|
||||
fn add_clean(cx: block, val: ValueRef, ty: ty::t) {
|
||||
if !ty::type_needs_drop(cx.tcx(), ty) { return; }
|
||||
debug!{"add_clean(%s, %s, %s)",
|
||||
debug!("add_clean(%s, %s, %s)",
|
||||
cx.to_str(), val_str(cx.ccx().tn, val),
|
||||
ty_to_str(cx.ccx().tcx, ty)};
|
||||
ty_to_str(cx.ccx().tcx, ty));
|
||||
let cleanup_type = cleanup_type(cx.tcx(), ty);
|
||||
do in_scope_cx(cx) |info| {
|
||||
vec::push(info.cleanups, clean(|a| base::drop_ty(a, val, ty),
|
||||
@ -268,9 +268,9 @@ fn add_clean(cx: block, val: ValueRef, ty: ty::t) {
|
||||
}
|
||||
fn add_clean_temp(cx: block, val: ValueRef, ty: ty::t) {
|
||||
if !ty::type_needs_drop(cx.tcx(), ty) { return; }
|
||||
debug!{"add_clean_temp(%s, %s, %s)",
|
||||
debug!("add_clean_temp(%s, %s, %s)",
|
||||
cx.to_str(), val_str(cx.ccx().tn, val),
|
||||
ty_to_str(cx.ccx().tcx, ty)};
|
||||
ty_to_str(cx.ccx().tcx, ty));
|
||||
let cleanup_type = cleanup_type(cx.tcx(), ty);
|
||||
fn do_drop(bcx: block, val: ValueRef, ty: ty::t) ->
|
||||
block {
|
||||
@ -288,9 +288,9 @@ fn add_clean_temp(cx: block, val: ValueRef, ty: ty::t) {
|
||||
}
|
||||
fn add_clean_temp_mem(cx: block, val: ValueRef, ty: ty::t) {
|
||||
if !ty::type_needs_drop(cx.tcx(), ty) { return; }
|
||||
debug!{"add_clean_temp_mem(%s, %s, %s)",
|
||||
debug!("add_clean_temp_mem(%s, %s, %s)",
|
||||
cx.to_str(), val_str(cx.ccx().tn, val),
|
||||
ty_to_str(cx.ccx().tcx, ty)};
|
||||
ty_to_str(cx.ccx().tcx, ty));
|
||||
let cleanup_type = cleanup_type(cx.tcx(), ty);
|
||||
do in_scope_cx(cx) |info| {
|
||||
vec::push(info.cleanups,
|
||||
@ -475,8 +475,8 @@ fn in_scope_cx(cx: block, f: fn(scope_info)) {
|
||||
fn block_parent(cx: block) -> block {
|
||||
match cx.parent {
|
||||
some(b) => b,
|
||||
none => cx.sess().bug(fmt!{"block_parent called on root block %?",
|
||||
cx})
|
||||
none => cx.sess().bug(fmt!("block_parent called on root block %?",
|
||||
cx))
|
||||
}
|
||||
}
|
||||
|
||||
@ -496,10 +496,10 @@ impl block {
|
||||
fn to_str() -> ~str {
|
||||
match self.node_info {
|
||||
some(node_info) => {
|
||||
fmt!{"[block %d]", node_info.id}
|
||||
fmt!("[block %d]", node_info.id)
|
||||
}
|
||||
none => {
|
||||
fmt!{"[block %x]", ptr::addr_of(*self) as uint}
|
||||
fmt!("[block %x]", ptr::addr_of(*self) as uint)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -876,7 +876,7 @@ fn C_cstr(cx: @crate_ctxt, s: ~str) -> ValueRef {
|
||||
llvm::LLVMConstString(buf, str::len(s) as c_uint, False)
|
||||
};
|
||||
let g =
|
||||
str::as_c_str(fmt!{"str%u", cx.names(~"str")},
|
||||
str::as_c_str(fmt!("str%u", cx.names(~"str")),
|
||||
|buf| llvm::LLVMAddGlobal(cx.llmod, val_ty(sc), buf));
|
||||
llvm::LLVMSetInitializer(g, sc);
|
||||
llvm::LLVMSetGlobalConstant(g, True);
|
||||
@ -930,7 +930,7 @@ fn C_bytes(bytes: ~[u8]) -> ValueRef unsafe {
|
||||
|
||||
fn C_shape(ccx: @crate_ctxt, bytes: ~[u8]) -> ValueRef {
|
||||
let llshape = C_bytes(bytes);
|
||||
let llglobal = str::as_c_str(fmt!{"shape%u", ccx.names(~"shape")}, |buf| {
|
||||
let llglobal = str::as_c_str(fmt!("shape%u", ccx.names(~"shape")), |buf| {
|
||||
llvm::LLVMAddGlobal(ccx.llmod, val_ty(llshape), buf)
|
||||
});
|
||||
llvm::LLVMSetInitializer(llglobal, llshape);
|
||||
@ -1025,8 +1025,8 @@ fn field_idx_strict(cx: ty::ctxt, sp: span, ident: ast::ident,
|
||||
-> uint {
|
||||
match ty::field_idx(ident, fields) {
|
||||
none => cx.sess.span_bug(
|
||||
sp, fmt!{"base expr doesn't appear to \
|
||||
have a field named %s", cx.sess.str_of(ident)}),
|
||||
sp, fmt!("base expr doesn't appear to \
|
||||
have a field named %s", cx.sess.str_of(ident))),
|
||||
some(i) => i
|
||||
}
|
||||
}
|
||||
|
@ -179,7 +179,7 @@ fn create_compile_unit(cx: @crate_ctxt)
|
||||
lli32(DW_LANG_RUST),
|
||||
llstr(crate_name),
|
||||
llstr(work_dir),
|
||||
llstr(env!{"CFG_VERSION"}),
|
||||
llstr(env!("CFG_VERSION")),
|
||||
lli1(true), // deprecated: main compile unit
|
||||
lli1(cx.sess.opts.optimize != session::No),
|
||||
llstr(~""), // flags (???)
|
||||
@ -712,7 +712,7 @@ fn create_function(fcx: fn_ctxt) -> @metadata<subprogram_md> {
|
||||
let cx = fcx.ccx;
|
||||
let dbg_cx = option::get(cx.dbg_cx);
|
||||
|
||||
debug!{"~~"};
|
||||
debug!("~~");
|
||||
log(debug, fcx.id);
|
||||
|
||||
let sp = option::get(fcx.span);
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user