auto merge of #14414 : richo/rust/features/nerf_unused_string_fns, r=alexcrichton

This should block on #14323
This commit is contained in:
bors 2014-05-27 17:46:48 -07:00
commit 911cc9c352
387 changed files with 3016 additions and 3012 deletions

View File

@ -50,7 +50,7 @@ fn start(argc: int, argv: **u8) -> int {
pub fn main() {
let args = os::args();
let config = parse_config(args.move_iter()
.map(|x| x.to_strbuf())
.map(|x| x.to_string())
.collect());
log_config(&config);
run_tests(&config);
@ -134,15 +134,15 @@ pub fn parse_config(args: Vec<String> ) -> Config {
Config {
compile_lib_path: matches.opt_str("compile-lib-path")
.unwrap()
.to_strbuf(),
run_lib_path: matches.opt_str("run-lib-path").unwrap().to_strbuf(),
.to_string(),
run_lib_path: matches.opt_str("run-lib-path").unwrap().to_string(),
rustc_path: opt_path(matches, "rustc-path"),
clang_path: matches.opt_str("clang-path").map(|s| Path::new(s)),
llvm_bin_path: matches.opt_str("llvm-bin-path").map(|s| Path::new(s)),
src_base: opt_path(matches, "src-base"),
build_base: opt_path(matches, "build-base"),
aux_base: opt_path(matches, "aux-base"),
stage_id: matches.opt_str("stage-id").unwrap().to_strbuf(),
stage_id: matches.opt_str("stage-id").unwrap().to_string(),
mode: FromStr::from_str(matches.opt_str("mode")
.unwrap()
.as_slice()).expect("invalid mode"),
@ -156,32 +156,32 @@ pub fn parse_config(args: Vec<String> ) -> Config {
ratchet_noise_percent:
matches.opt_str("ratchet-noise-percent")
.and_then(|s| from_str::<f64>(s.as_slice())),
runtool: matches.opt_str("runtool").map(|x| x.to_strbuf()),
runtool: matches.opt_str("runtool").map(|x| x.to_string()),
host_rustcflags: matches.opt_str("host-rustcflags")
.map(|x| x.to_strbuf()),
.map(|x| x.to_string()),
target_rustcflags: matches.opt_str("target-rustcflags")
.map(|x| x.to_strbuf()),
.map(|x| x.to_string()),
jit: matches.opt_present("jit"),
target: opt_str2(matches.opt_str("target").map(|x| x.to_strbuf())),
host: opt_str2(matches.opt_str("host").map(|x| x.to_strbuf())),
target: opt_str2(matches.opt_str("target").map(|x| x.to_string())),
host: opt_str2(matches.opt_str("host").map(|x| x.to_string())),
android_cross_path: opt_path(matches, "android-cross-path"),
adb_path: opt_str2(matches.opt_str("adb-path")
.map(|x| x.to_strbuf())),
.map(|x| x.to_string())),
adb_test_dir: opt_str2(matches.opt_str("adb-test-dir")
.map(|x| x.to_strbuf())),
.map(|x| x.to_string())),
adb_device_status:
"arm-linux-androideabi" ==
opt_str2(matches.opt_str("target")
.map(|x| x.to_strbuf())).as_slice() &&
.map(|x| x.to_string())).as_slice() &&
"(none)" !=
opt_str2(matches.opt_str("adb-test-dir")
.map(|x| x.to_strbuf())).as_slice() &&
.map(|x| x.to_string())).as_slice() &&
!opt_str2(matches.opt_str("adb-test-dir")
.map(|x| x.to_strbuf())).is_empty(),
.map(|x| x.to_string())).is_empty(),
lldb_python_dir: matches.opt_str("lldb-python-dir")
.map(|x| x.to_strbuf()),
.map(|x| x.to_string()),
test_shard: test::opt_shard(matches.opt_str("test-shard")
.map(|x| x.to_strbuf())),
.map(|x| x.to_string())),
verbose: matches.opt_present("verbose")
}
}
@ -201,7 +201,7 @@ pub fn log_config(config: &Config) {
opt_str(&config.filter
.as_ref()
.map(|re| {
re.to_str().into_strbuf()
re.to_str().into_string()
}))));
logv(c, format_strbuf!("runtool: {}", opt_str(&config.runtool)));
logv(c, format_strbuf!("host-rustcflags: {}",
@ -218,7 +218,7 @@ pub fn log_config(config: &Config) {
logv(c, format_strbuf!("adb_device_status: {}",
config.adb_device_status));
match config.test_shard {
None => logv(c, "test_shard: (all)".to_strbuf()),
None => logv(c, "test_shard: (all)".to_string()),
Some((a,b)) => logv(c, format_strbuf!("test_shard: {}.{}", a, b))
}
logv(c, format_strbuf!("verbose: {}", config.verbose));
@ -234,7 +234,7 @@ pub fn opt_str<'a>(maybestr: &'a Option<String>) -> &'a str {
pub fn opt_str2(maybestr: Option<String>) -> String {
match maybestr {
None => "(none)".to_strbuf(),
None => "(none)".to_string(),
Some(s) => s,
}
}
@ -314,10 +314,10 @@ pub fn is_test(config: &Config, testfile: &Path) -> bool {
// Pretty-printer does not work with .rc files yet
let valid_extensions =
match config.mode {
Pretty => vec!(".rs".to_owned()),
_ => vec!(".rc".to_owned(), ".rs".to_owned())
Pretty => vec!(".rs".to_string()),
_ => vec!(".rc".to_string(), ".rs".to_string())
};
let invalid_prefixes = vec!(".".to_owned(), "#".to_owned(), "~".to_owned());
let invalid_prefixes = vec!(".".to_string(), "#".to_string(), "~".to_string());
let name = testfile.filename_str().unwrap();
let mut valid = false;
@ -367,7 +367,7 @@ pub fn make_test_name(config: &Config, testfile: &Path) -> test::TestName {
pub fn make_test_closure(config: &Config, testfile: &Path) -> test::TestFn {
let config = (*config).clone();
// FIXME (#9639): This needs to handle non-utf8 paths
let testfile = testfile.as_str().unwrap().to_strbuf();
let testfile = testfile.as_str().unwrap().to_string();
test::DynTestFn(proc() {
runtest::run(config, testfile)
})
@ -376,7 +376,7 @@ pub fn make_test_closure(config: &Config, testfile: &Path) -> test::TestFn {
pub fn make_metrics_test_closure(config: &Config, testfile: &Path) -> test::TestFn {
let config = (*config).clone();
// FIXME (#9639): This needs to handle non-utf8 paths
let testfile = testfile.as_str().unwrap().to_strbuf();
let testfile = testfile.as_str().unwrap().to_string();
test::DynMetricFn(proc(mm) {
runtest::run_metrics(config, testfile, mm)
})

View File

@ -31,8 +31,8 @@ pub fn load_errors(re: &Regex, testfile: &Path) -> Vec<ExpectedError> {
fn parse_expected(line_num: uint, line: &str, re: &Regex) -> Option<ExpectedError> {
re.captures(line).and_then(|caps| {
let adjusts = caps.name("adjusts").len();
let kind = caps.name("kind").to_ascii().to_lower().into_str().to_strbuf();
let msg = caps.name("msg").trim().to_strbuf();
let kind = caps.name("kind").to_ascii().to_lower().into_str().to_string();
let msg = caps.name("msg").trim().to_string();
debug!("line={} kind={} msg={}", line_num, kind, msg);
Some(ExpectedError {

View File

@ -170,23 +170,23 @@ fn iter_header(testfile: &Path, it: |&str| -> bool) -> bool {
}
fn parse_error_pattern(line: &str) -> Option<String> {
parse_name_value_directive(line, "error-pattern".to_strbuf())
parse_name_value_directive(line, "error-pattern".to_string())
}
fn parse_aux_build(line: &str) -> Option<String> {
parse_name_value_directive(line, "aux-build".to_strbuf())
parse_name_value_directive(line, "aux-build".to_string())
}
fn parse_compile_flags(line: &str) -> Option<String> {
parse_name_value_directive(line, "compile-flags".to_strbuf())
parse_name_value_directive(line, "compile-flags".to_string())
}
fn parse_run_flags(line: &str) -> Option<String> {
parse_name_value_directive(line, "run-flags".to_strbuf())
parse_name_value_directive(line, "run-flags".to_string())
}
fn parse_check_line(line: &str) -> Option<String> {
parse_name_value_directive(line, "check".to_strbuf())
parse_name_value_directive(line, "check".to_string())
}
fn parse_force_host(line: &str) -> bool {
@ -206,15 +206,15 @@ fn parse_no_pretty_expanded(line: &str) -> bool {
}
fn parse_exec_env(line: &str) -> Option<(String, String)> {
parse_name_value_directive(line, "exec-env".to_strbuf()).map(|nv| {
parse_name_value_directive(line, "exec-env".to_string()).map(|nv| {
// nv is either FOO or FOO=BAR
let mut strs: Vec<String> = nv.as_slice()
.splitn('=', 1)
.map(|s| s.to_strbuf())
.map(|s| s.to_string())
.collect();
match strs.len() {
1u => (strs.pop().unwrap(), "".to_strbuf()),
1u => (strs.pop().unwrap(), "".to_string()),
2u => {
let end = strs.pop().unwrap();
(strs.pop().unwrap(), end)
@ -225,7 +225,7 @@ fn parse_exec_env(line: &str) -> Option<(String, String)> {
}
fn parse_pp_exact(line: &str, testfile: &Path) -> Option<Path> {
match parse_name_value_directive(line, "pp-exact".to_strbuf()) {
match parse_name_value_directive(line, "pp-exact".to_string()) {
Some(s) => Some(Path::new(s)),
None => {
if parse_name_directive(line, "pp-exact") {
@ -247,7 +247,7 @@ pub fn parse_name_value_directive(line: &str, directive: String)
match line.find_str(keycolon.as_slice()) {
Some(colon) => {
let value = line.slice(colon + keycolon.len(),
line.len()).to_strbuf();
line.len()).to_string();
debug!("{}: {}", directive, value);
Some(value)
}

View File

@ -15,7 +15,7 @@ use std::unstable::dynamic_lib::DynamicLibrary;
fn target_env(lib_path: &str, prog: &str) -> Vec<(String, String)> {
let prog = if cfg!(windows) {prog.slice_to(prog.len() - 4)} else {prog};
let mut aux_path = prog.to_strbuf();
let mut aux_path = prog.to_string();
aux_path.push_str(".libaux");
// Need to be sure to put both the lib_path and the aux path in the dylib
@ -27,7 +27,7 @@ fn target_env(lib_path: &str, prog: &str) -> Vec<(String, String)> {
// Remove the previous dylib search path var
let var = DynamicLibrary::envvar();
let mut env: Vec<(String,String)> =
os::env().move_iter().map(|(a,b)|(a.to_strbuf(), b.to_strbuf())).collect();
os::env().move_iter().map(|(a,b)|(a.to_string(), b.to_string())).collect();
match env.iter().position(|&(ref k, _)| k.as_slice() == var) {
Some(i) => { env.remove(i); }
None => {}
@ -35,8 +35,8 @@ fn target_env(lib_path: &str, prog: &str) -> Vec<(String, String)> {
// Add the new dylib search path var
let newpath = DynamicLibrary::create_path(path.as_slice());
env.push((var.to_strbuf(),
str::from_utf8(newpath.as_slice()).unwrap().to_strbuf()));
env.push((var.to_string(),
str::from_utf8(newpath.as_slice()).unwrap().to_string()));
return env;
}
@ -59,8 +59,8 @@ pub fn run(lib_path: &str,
Some(Result {
status: status,
out: str::from_utf8(output.as_slice()).unwrap().to_strbuf(),
err: str::from_utf8(error.as_slice()).unwrap().to_strbuf()
out: str::from_utf8(output.as_slice()).unwrap().to_string(),
err: str::from_utf8(error.as_slice()).unwrap().to_string()
})
},
Err(..) => None

View File

@ -73,7 +73,7 @@ fn run_cfail_test(config: &Config, props: &TestProps, testfile: &Path) {
let proc_res = compile_test(config, props, testfile);
if proc_res.status.success() {
fatal_ProcRes("compile-fail test compiled successfully!".to_strbuf(),
fatal_ProcRes("compile-fail test compiled successfully!".to_string(),
&proc_res);
}
@ -83,7 +83,7 @@ fn run_cfail_test(config: &Config, props: &TestProps, testfile: &Path) {
if !expected_errors.is_empty() {
if !props.error_patterns.is_empty() {
fatal("both error pattern and expected errors \
specified".to_strbuf());
specified".to_string());
}
check_expected_errors(expected_errors, testfile, &proc_res);
} else {
@ -97,7 +97,7 @@ fn run_rfail_test(config: &Config, props: &TestProps, testfile: &Path) {
let proc_res = compile_test(config, props, testfile);
if !proc_res.status.success() {
fatal_ProcRes("compilation failed!".to_strbuf(), &proc_res);
fatal_ProcRes("compilation failed!".to_string(), &proc_res);
}
exec_compiled_test(config, props, testfile)
@ -108,7 +108,7 @@ fn run_rfail_test(config: &Config, props: &TestProps, testfile: &Path) {
// The value our Makefile configures valgrind to return on failure
static VALGRIND_ERR: int = 100;
if proc_res.status.matches_exit_status(VALGRIND_ERR) {
fatal_ProcRes("run-fail test isn't valgrind-clean!".to_strbuf(),
fatal_ProcRes("run-fail test isn't valgrind-clean!".to_string(),
&proc_res);
}
@ -132,35 +132,35 @@ fn run_rpass_test(config: &Config, props: &TestProps, testfile: &Path) {
let mut proc_res = compile_test(config, props, testfile);
if !proc_res.status.success() {
fatal_ProcRes("compilation failed!".to_strbuf(), &proc_res);
fatal_ProcRes("compilation failed!".to_string(), &proc_res);
}
proc_res = exec_compiled_test(config, props, testfile);
if !proc_res.status.success() {
fatal_ProcRes("test run failed!".to_strbuf(), &proc_res);
fatal_ProcRes("test run failed!".to_string(), &proc_res);
}
} else {
let proc_res = jit_test(config, props, testfile);
if !proc_res.status.success() {
fatal_ProcRes("jit failed!".to_strbuf(), &proc_res);
fatal_ProcRes("jit failed!".to_string(), &proc_res);
}
}
}
fn run_pretty_test(config: &Config, props: &TestProps, testfile: &Path) {
if props.pp_exact.is_some() {
logv(config, "testing for exact pretty-printing".to_strbuf());
logv(config, "testing for exact pretty-printing".to_string());
} else {
logv(config, "testing for converging pretty-printing".to_strbuf());
logv(config, "testing for converging pretty-printing".to_string());
}
let rounds =
match props.pp_exact { Some(_) => 1, None => 2 };
let src = File::open(testfile).read_to_end().unwrap();
let src = str::from_utf8(src.as_slice()).unwrap().to_strbuf();
let src = str::from_utf8(src.as_slice()).unwrap().to_string();
let mut srcs = vec!(src);
let mut round = 0;
@ -169,7 +169,7 @@ fn run_pretty_test(config: &Config, props: &TestProps, testfile: &Path) {
let proc_res = print_source(config,
props,
testfile,
(*srcs.get(round)).to_strbuf(),
(*srcs.get(round)).to_string(),
"normal");
if !proc_res.status.success() {
@ -187,7 +187,7 @@ fn run_pretty_test(config: &Config, props: &TestProps, testfile: &Path) {
Some(ref file) => {
let filepath = testfile.dir_path().join(file);
let s = File::open(&filepath).read_to_end().unwrap();
str::from_utf8(s.as_slice()).unwrap().to_strbuf()
str::from_utf8(s.as_slice()).unwrap().to_string()
}
None => { (*srcs.get(srcs.len() - 2u)).clone() }
};
@ -195,9 +195,9 @@ fn run_pretty_test(config: &Config, props: &TestProps, testfile: &Path) {
if props.pp_exact.is_some() {
// Now we have to care about line endings
let cr = "\r".to_strbuf();
actual = actual.replace(cr.as_slice(), "").to_strbuf();
expected = expected.replace(cr.as_slice(), "").to_strbuf();
let cr = "\r".to_string();
actual = actual.replace(cr.as_slice(), "").to_string();
expected = expected.replace(cr.as_slice(), "").to_string();
}
compare_source(expected.as_slice(), actual.as_slice());
@ -206,7 +206,7 @@ fn run_pretty_test(config: &Config, props: &TestProps, testfile: &Path) {
let proc_res = typecheck_source(config, props, testfile, actual);
if !proc_res.status.success() {
fatal_ProcRes("pretty-printed source does not typecheck".to_strbuf(),
fatal_ProcRes("pretty-printed source does not typecheck".to_string(),
&proc_res);
}
if props.no_pretty_expanded { return }
@ -238,7 +238,7 @@ fn run_pretty_test(config: &Config, props: &TestProps, testfile: &Path) {
make_pp_args(config,
props,
testfile,
pretty_type.to_strbuf()),
pretty_type.to_string()),
props.exec_env.clone(),
config.compile_lib_path.as_slice(),
Some(src))
@ -250,16 +250,16 @@ fn run_pretty_test(config: &Config, props: &TestProps, testfile: &Path) {
pretty_type: String) -> ProcArgs {
let aux_dir = aux_output_dir_name(config, testfile);
// FIXME (#9639): This needs to handle non-utf8 paths
let mut args = vec!("-".to_strbuf(),
"--pretty".to_strbuf(),
let mut args = vec!("-".to_string(),
"--pretty".to_string(),
pretty_type,
format_strbuf!("--target={}", config.target),
"-L".to_strbuf(),
aux_dir.as_str().unwrap().to_strbuf());
"-L".to_string(),
aux_dir.as_str().unwrap().to_string());
args.push_all_move(split_maybe_args(&config.target_rustcflags));
args.push_all_move(split_maybe_args(&props.compile_flags));
return ProcArgs {
prog: config.rustc_path.as_str().unwrap().to_strbuf(),
prog: config.rustc_path.as_str().unwrap().to_string(),
args: args,
};
}
@ -267,7 +267,7 @@ fn run_pretty_test(config: &Config, props: &TestProps, testfile: &Path) {
fn compare_source(expected: &str, actual: &str) {
if expected != actual {
error("pretty-printed source does not match expected \
source".to_strbuf());
source".to_string());
println!("\n\
expected:\n\
------------------------------------------\n\
@ -297,19 +297,19 @@ actual:\n\
config.target.as_slice()
};
// FIXME (#9639): This needs to handle non-utf8 paths
let mut args = vec!("-".to_strbuf(),
"--no-trans".to_strbuf(),
"--crate-type=lib".to_strbuf(),
let mut args = vec!("-".to_string(),
"--no-trans".to_string(),
"--crate-type=lib".to_string(),
format_strbuf!("--target={}", target),
"-L".to_strbuf(),
config.build_base.as_str().unwrap().to_strbuf(),
"-L".to_strbuf(),
aux_dir.as_str().unwrap().to_strbuf());
"-L".to_string(),
config.build_base.as_str().unwrap().to_string(),
"-L".to_string(),
aux_dir.as_str().unwrap().to_string());
args.push_all_move(split_maybe_args(&config.target_rustcflags));
args.push_all_move(split_maybe_args(&props.compile_flags));
// FIXME (#9639): This needs to handle non-utf8 paths
return ProcArgs {
prog: config.rustc_path.as_str().unwrap().to_strbuf(),
prog: config.rustc_path.as_str().unwrap().to_string(),
args: args,
};
}
@ -324,12 +324,12 @@ fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testfile: &Path) {
let config = &mut config;
let DebuggerCommands { commands, check_lines, .. } = parse_debugger_commands(testfile, "gdb");
let mut cmds = commands.connect("\n").to_strbuf();
let mut cmds = commands.connect("\n").to_string();
// compile test file (it shoud have 'compile-flags:-g' in the header)
let compiler_run_result = compile_test(config, props, testfile);
if !compiler_run_result.status.success() {
fatal_ProcRes("compilation failed!".to_strbuf(), &compiler_run_result);
fatal_ProcRes("compilation failed!".to_string(), &compiler_run_result);
}
let exe_file = make_exe_name(config, testfile);
@ -339,17 +339,17 @@ fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testfile: &Path) {
match config.target.as_slice() {
"arm-linux-androideabi" => {
cmds = cmds.replace("run", "continue").to_strbuf();
cmds = cmds.replace("run", "continue").to_string();
// write debugger script
let script_str = ["set charset UTF-8".to_strbuf(),
let script_str = ["set charset UTF-8".to_string(),
format_strbuf!("file {}",
exe_file.as_str()
.unwrap()
.to_strbuf()),
"target remote :5039".to_strbuf(),
.to_string()),
"target remote :5039".to_string(),
cmds,
"quit".to_strbuf()].connect("\n");
"quit".to_string()].connect("\n");
debug!("script_str = {}", script_str);
dump_output_file(config,
testfile,
@ -360,24 +360,24 @@ fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testfile: &Path) {
procsrv::run("",
config.adb_path.as_slice(),
[
"push".to_strbuf(),
exe_file.as_str().unwrap().to_strbuf(),
"push".to_string(),
exe_file.as_str().unwrap().to_string(),
config.adb_test_dir.clone()
],
vec!(("".to_strbuf(), "".to_strbuf())),
Some("".to_strbuf()))
vec!(("".to_string(), "".to_string())),
Some("".to_string()))
.expect(format_strbuf!("failed to exec `{}`",
config.adb_path));
procsrv::run("",
config.adb_path.as_slice(),
[
"forward".to_strbuf(),
"tcp:5039".to_strbuf(),
"tcp:5039".to_strbuf()
"forward".to_string(),
"tcp:5039".to_string(),
"tcp:5039".to_string()
],
vec!(("".to_strbuf(), "".to_strbuf())),
Some("".to_strbuf()))
vec!(("".to_string(), "".to_string())),
Some("".to_string()))
.expect(format_strbuf!("failed to exec `{}`", config.adb_path));
let adb_arg = format_strbuf!("export LD_LIBRARY_PATH={}; \
@ -392,12 +392,12 @@ fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testfile: &Path) {
config.adb_path
.as_slice(),
[
"shell".to_strbuf(),
"shell".to_string(),
adb_arg.clone()
],
vec!(("".to_strbuf(),
"".to_strbuf())),
Some("".to_strbuf()))
vec!(("".to_string(),
"".to_string())),
Some("".to_string()))
.expect(format_strbuf!("failed to exec `{}`",
config.adb_path));
loop {
@ -413,16 +413,16 @@ fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testfile: &Path) {
}
let tool_path = match config.android_cross_path.as_str() {
Some(x) => x.to_strbuf(),
None => fatal("cannot find android cross path".to_strbuf())
Some(x) => x.to_string(),
None => fatal("cannot find android cross path".to_string())
};
let debugger_script = make_out_name(config, testfile, "debugger.script");
// FIXME (#9639): This needs to handle non-utf8 paths
let debugger_opts =
vec!("-quiet".to_strbuf(),
"-batch".to_strbuf(),
"-nx".to_strbuf(),
vec!("-quiet".to_string(),
"-batch".to_string(),
"-nx".to_string(),
format_strbuf!("-command={}",
debugger_script.as_str().unwrap()));
@ -434,7 +434,7 @@ fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testfile: &Path) {
} = procsrv::run("",
gdb_path.as_slice(),
debugger_opts.as_slice(),
vec!(("".to_strbuf(), "".to_strbuf())),
vec!(("".to_string(), "".to_string())),
None)
.expect(format_strbuf!("failed to exec `{}`", gdb_path));
let cmdline = {
@ -457,9 +457,9 @@ fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testfile: &Path) {
_=> {
// write debugger script
let script_str = [
"set charset UTF-8".to_strbuf(),
"set charset UTF-8".to_string(),
cmds,
"quit\n".to_strbuf()
"quit\n".to_string()
].connect("\n");
debug!("script_str = {}", script_str);
dump_output_file(config,
@ -470,23 +470,23 @@ fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testfile: &Path) {
// run debugger script with gdb
#[cfg(windows)]
fn debugger() -> String {
"gdb.exe".to_strbuf()
"gdb.exe".to_string()
}
#[cfg(unix)]
fn debugger() -> String {
"gdb".to_strbuf()
"gdb".to_string()
}
let debugger_script = make_out_name(config, testfile, "debugger.script");
// FIXME (#9639): This needs to handle non-utf8 paths
let debugger_opts =
vec!("-quiet".to_strbuf(),
"-batch".to_strbuf(),
"-nx".to_strbuf(),
vec!("-quiet".to_string(),
"-batch".to_string(),
"-nx".to_string(),
format_strbuf!("-command={}",
debugger_script.as_str().unwrap()),
exe_file.as_str().unwrap().to_strbuf());
exe_file.as_str().unwrap().to_string());
proc_args = ProcArgs {
prog: debugger(),
args: debugger_opts,
@ -501,7 +501,7 @@ fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testfile: &Path) {
}
if !debugger_run_result.status.success() {
fatal("gdb failed to execute".to_strbuf());
fatal("gdb failed to execute".to_string());
}
check_debugger_output(&debugger_run_result, check_lines.as_slice());
@ -512,7 +512,7 @@ fn run_debuginfo_lldb_test(config: &Config, props: &TestProps, testfile: &Path)
if config.lldb_python_dir.is_none() {
fatal("Can't run LLDB test because LLDB's python path is not \
set.".to_strbuf());
set.".to_string());
}
let mut config = Config {
@ -526,7 +526,7 @@ fn run_debuginfo_lldb_test(config: &Config, props: &TestProps, testfile: &Path)
// compile test file (it shoud have 'compile-flags:-g' in the header)
let compile_result = compile_test(config, props, testfile);
if !compile_result.status.success() {
fatal_ProcRes("compilation failed!".to_strbuf(), &compile_result);
fatal_ProcRes("compilation failed!".to_string(), &compile_result);
}
let exe_file = make_exe_name(config, testfile);
@ -569,7 +569,7 @@ fn run_debuginfo_lldb_test(config: &Config, props: &TestProps, testfile: &Path)
let debugger_run_result = run_lldb(config, &exe_file, &debugger_script);
if !debugger_run_result.status.success() {
fatal_ProcRes("Error while running LLDB".to_strbuf(),
fatal_ProcRes("Error while running LLDB".to_string(),
&debugger_run_result);
}
@ -589,8 +589,8 @@ fn run_debuginfo_lldb_test(config: &Config, props: &TestProps, testfile: &Path)
process.wait_with_output().unwrap();
(status,
str::from_utf8(output.as_slice()).unwrap().to_strbuf(),
str::from_utf8(error.as_slice()).unwrap().to_strbuf())
str::from_utf8(output.as_slice()).unwrap().to_string(),
str::from_utf8(error.as_slice()).unwrap().to_string())
},
Err(e) => {
fatal(format_strbuf!("Failed to setup Python process for \
@ -636,13 +636,13 @@ fn parse_debugger_commands(file_path: &Path, debugger_prefix: &str)
header::parse_name_value_directive(
line.as_slice(),
command_directive.to_strbuf()).map(|cmd| {
command_directive.to_string()).map(|cmd| {
commands.push(cmd)
});
header::parse_name_value_directive(
line.as_slice(),
check_directive.to_strbuf()).map(|cmd| {
check_directive.to_string()).map(|cmd| {
check_lines.push(cmd)
});
}
@ -669,16 +669,16 @@ fn cleanup_debug_info_options(options: &Option<String>) -> Option<String> {
// Remove options that are either unwanted (-O) or may lead to duplicates due to RUSTFLAGS.
let options_to_remove = [
"-O".to_strbuf(),
"-g".to_strbuf(),
"--debuginfo".to_strbuf()
"-O".to_string(),
"-g".to_string(),
"--debuginfo".to_string()
];
let new_options =
split_maybe_args(options).move_iter()
.filter(|x| !options_to_remove.contains(x))
.collect::<Vec<String>>()
.connect(" ")
.to_strbuf();
.to_string();
Some(new_options)
}
@ -692,7 +692,7 @@ fn check_debugger_output(debugger_run_result: &ProcRes, check_lines: &[String])
s.as_slice()
.trim()
.split_str("[...]")
.map(|x| x.to_strbuf())
.map(|x| x.to_string())
.collect()
}).collect();
// check if each line in props.check_lines appears in the
@ -749,7 +749,7 @@ fn check_error_patterns(props: &TestProps,
}
if proc_res.status.success() {
fatal("process did not return an error status".to_strbuf());
fatal("process did not return an error status".to_string());
}
let mut next_err_idx = 0u;
@ -784,7 +784,7 @@ fn check_error_patterns(props: &TestProps,
for pattern in missing_patterns.iter() {
error(format_strbuf!("error pattern '{}' not found!", *pattern));
}
fatal_ProcRes("multiple error patterns not found".to_strbuf(),
fatal_ProcRes("multiple error patterns not found".to_string(),
proc_res);
}
}
@ -792,7 +792,7 @@ fn check_error_patterns(props: &TestProps,
fn check_no_compiler_crash(proc_res: &ProcRes) {
for line in proc_res.stderr.as_slice().lines() {
if line.starts_with("error: internal compiler error:") {
fatal_ProcRes("compiler encountered internal error".to_strbuf(),
fatal_ProcRes("compiler encountered internal error".to_string(),
proc_res);
}
}
@ -807,7 +807,7 @@ fn check_expected_errors(expected_errors: Vec<errors::ExpectedError> ,
expected_errors.len(), false);
if proc_res.status.success() {
fatal("process did not return an error status".to_strbuf());
fatal("process did not return an error status".to_string());
}
let prefixes = expected_errors.iter().map(|ee| {
@ -824,7 +824,7 @@ fn check_expected_errors(expected_errors: Vec<errors::ExpectedError> ,
c
}
} ).collect();
str::from_chars(c.as_slice()).to_strbuf()
str::from_chars(c.as_slice()).to_string()
}
#[cfg(target_os = "win32")]
@ -983,15 +983,15 @@ fn compile_test(config: &Config, props: &TestProps,
}
fn jit_test(config: &Config, props: &TestProps, testfile: &Path) -> ProcRes {
compile_test_(config, props, testfile, ["--jit".to_strbuf()])
compile_test_(config, props, testfile, ["--jit".to_string()])
}
fn compile_test_(config: &Config, props: &TestProps,
testfile: &Path, extra_args: &[String]) -> ProcRes {
let aux_dir = aux_output_dir_name(config, testfile);
// FIXME (#9639): This needs to handle non-utf8 paths
let link_args = vec!("-L".to_strbuf(),
aux_dir.as_str().unwrap().to_strbuf());
let link_args = vec!("-L".to_string(),
aux_dir.as_str().unwrap().to_string());
let args = make_compile_args(config,
props,
link_args.append(extra_args),
@ -1034,7 +1034,7 @@ fn compose_and_run_compiler(
let aux_dir = aux_output_dir_name(config, testfile);
// FIXME (#9639): This needs to handle non-utf8 paths
let extra_link_args = vec!("-L".to_owned(), aux_dir.as_str().unwrap().to_owned());
let extra_link_args = vec!("-L".to_string(), aux_dir.as_str().unwrap().to_string());
for rel_ab in props.aux_builds.iter() {
let abs_ab = config.aux_base.join(rel_ab.as_slice());
@ -1042,14 +1042,14 @@ fn compose_and_run_compiler(
let crate_type = if aux_props.no_prefer_dynamic {
Vec::new()
} else {
vec!("--crate-type=dylib".to_strbuf())
vec!("--crate-type=dylib".to_string())
};
let aux_args =
make_compile_args(config,
&aux_props,
crate_type.append(
extra_link_args.iter()
.map(|x| x.to_strbuf())
.map(|x| x.to_string())
.collect::<Vec<_>>()
.as_slice()),
|a,b| {
@ -1118,26 +1118,26 @@ fn make_compile_args(config: &Config,
config.target.as_slice()
};
// FIXME (#9639): This needs to handle non-utf8 paths
let mut args = vec!(testfile.as_str().unwrap().to_strbuf(),
"-L".to_strbuf(),
config.build_base.as_str().unwrap().to_strbuf(),
let mut args = vec!(testfile.as_str().unwrap().to_string(),
"-L".to_string(),
config.build_base.as_str().unwrap().to_string(),
format_strbuf!("--target={}", target));
args.push_all(extras.as_slice());
if !props.no_prefer_dynamic {
args.push("-C".to_strbuf());
args.push("prefer-dynamic".to_strbuf());
args.push("-C".to_string());
args.push("prefer-dynamic".to_string());
}
let path = match xform_file {
ThisFile(path) => {
args.push("-o".to_strbuf());
args.push("-o".to_string());
path
}
ThisDirectory(path) => {
args.push("--out-dir".to_strbuf());
args.push("--out-dir".to_string());
path
}
};
args.push(path.as_str().unwrap().to_strbuf());
args.push(path.as_str().unwrap().to_string());
if props.force_host {
args.push_all_move(split_maybe_args(&config.host_rustcflags));
} else {
@ -1145,7 +1145,7 @@ fn make_compile_args(config: &Config,
}
args.push_all_move(split_maybe_args(&props.compile_flags));
return ProcArgs {
prog: config.rustc_path.as_str().unwrap().to_strbuf(),
prog: config.rustc_path.as_str().unwrap().to_string(),
args: args,
};
}
@ -1176,7 +1176,7 @@ fn make_run_args(config: &Config, props: &TestProps, testfile: &Path) ->
let exe_file = make_exe_name(config, testfile);
// FIXME (#9639): This needs to handle non-utf8 paths
args.push(exe_file.as_str().unwrap().to_strbuf());
args.push(exe_file.as_str().unwrap().to_string());
// Add the arguments in the run_flags directive
args.push_all_move(split_maybe_args(&props.run_flags));
@ -1197,7 +1197,7 @@ fn split_maybe_args(argstr: &Option<String>) -> Vec<String> {
if s.is_whitespace() {
None
} else {
Some(s.to_strbuf())
Some(s.to_string())
}
}).collect()
}
@ -1342,7 +1342,7 @@ fn _arm_exec_compiled_test(config: &Config,
let mut tvec: Vec<String> = args.prog
.as_slice()
.split('/')
.map(|ts| ts.to_strbuf())
.map(|ts| ts.to_string())
.collect();
let prog_short = tvec.pop().unwrap();
@ -1350,12 +1350,12 @@ fn _arm_exec_compiled_test(config: &Config,
let copy_result = procsrv::run("",
config.adb_path.as_slice(),
[
"push".to_strbuf(),
"push".to_string(),
args.prog.clone(),
config.adb_test_dir.clone()
],
vec!(("".to_strbuf(), "".to_strbuf())),
Some("".to_strbuf()))
vec!(("".to_string(), "".to_string())),
Some("".to_string()))
.expect(format_strbuf!("failed to exec `{}`", config.adb_path));
if config.verbose {
@ -1371,7 +1371,7 @@ fn _arm_exec_compiled_test(config: &Config,
let mut runargs = Vec::new();
// run test via adb_run_wrapper
runargs.push("shell".to_strbuf());
runargs.push("shell".to_string());
for (key, val) in env.move_iter() {
runargs.push(format_strbuf!("{}={}", key, val));
}
@ -1381,18 +1381,18 @@ fn _arm_exec_compiled_test(config: &Config,
runargs.push(format_strbuf!("{}", prog_short));
for tv in args.args.iter() {
runargs.push(tv.to_strbuf());
runargs.push(tv.to_string());
}
procsrv::run("",
config.adb_path.as_slice(),
runargs.as_slice(),
vec!(("".to_strbuf(), "".to_strbuf())), Some("".to_strbuf()))
vec!(("".to_string(), "".to_string())), Some("".to_string()))
.expect(format_strbuf!("failed to exec `{}`", config.adb_path));
// get exitcode of result
runargs = Vec::new();
runargs.push("shell".to_strbuf());
runargs.push("cat".to_strbuf());
runargs.push("shell".to_string());
runargs.push("cat".to_string());
runargs.push(format_strbuf!("{}/{}.exitcode",
config.adb_test_dir,
prog_short));
@ -1401,8 +1401,8 @@ fn _arm_exec_compiled_test(config: &Config,
procsrv::run("",
config.adb_path.as_slice(),
runargs.as_slice(),
vec!(("".to_strbuf(), "".to_strbuf())),
Some("".to_strbuf()))
vec!(("".to_string(), "".to_string())),
Some("".to_string()))
.expect(format_strbuf!("failed to exec `{}`", config.adb_path));
let mut exitcode: int = 0;
@ -1416,8 +1416,8 @@ fn _arm_exec_compiled_test(config: &Config,
// get stdout of result
runargs = Vec::new();
runargs.push("shell".to_strbuf());
runargs.push("cat".to_strbuf());
runargs.push("shell".to_string());
runargs.push("cat".to_string());
runargs.push(format_strbuf!("{}/{}.stdout",
config.adb_test_dir,
prog_short));
@ -1426,14 +1426,14 @@ fn _arm_exec_compiled_test(config: &Config,
procsrv::run("",
config.adb_path.as_slice(),
runargs.as_slice(),
vec!(("".to_strbuf(), "".to_strbuf())),
Some("".to_strbuf()))
vec!(("".to_string(), "".to_string())),
Some("".to_string()))
.expect(format_strbuf!("failed to exec `{}`", config.adb_path));
// get stderr of result
runargs = Vec::new();
runargs.push("shell".to_strbuf());
runargs.push("cat".to_strbuf());
runargs.push("shell".to_string());
runargs.push("cat".to_string());
runargs.push(format_strbuf!("{}/{}.stderr",
config.adb_test_dir,
prog_short));
@ -1442,8 +1442,8 @@ fn _arm_exec_compiled_test(config: &Config,
procsrv::run("",
config.adb_path.as_slice(),
runargs.as_slice(),
vec!(("".to_strbuf(), "".to_strbuf())),
Some("".to_strbuf()))
vec!(("".to_string(), "".to_string())),
Some("".to_string()))
.expect(format_strbuf!("failed to exec `{}`", config.adb_path));
dump_output(config,
@ -1469,15 +1469,15 @@ fn _arm_push_aux_shared_library(config: &Config, testfile: &Path) {
let copy_result = procsrv::run("",
config.adb_path.as_slice(),
[
"push".to_strbuf(),
"push".to_string(),
file.as_str()
.unwrap()
.to_strbuf(),
config.adb_test_dir.to_strbuf()
.to_string(),
config.adb_test_dir.to_string()
],
vec!(("".to_strbuf(),
"".to_strbuf())),
Some("".to_strbuf()))
vec!(("".to_string(),
"".to_string())),
Some("".to_string()))
.expect(format_strbuf!("failed to exec `{}`",
config.adb_path));
@ -1509,12 +1509,12 @@ fn compile_test_and_save_bitcode(config: &Config, props: &TestProps,
testfile: &Path) -> ProcRes {
let aux_dir = aux_output_dir_name(config, testfile);
// FIXME (#9639): This needs to handle non-utf8 paths
let link_args = vec!("-L".to_strbuf(),
aux_dir.as_str().unwrap().to_strbuf());
let llvm_args = vec!("--emit=obj".to_strbuf(),
"--crate-type=lib".to_strbuf(),
"-C".to_strbuf(),
"save-temps".to_strbuf());
let link_args = vec!("-L".to_string(),
aux_dir.as_str().unwrap().to_string());
let llvm_args = vec!("--emit=obj".to_string(),
"--crate-type=lib".to_string(),
"-C".to_string(),
"save-temps".to_string());
let args = make_compile_args(config,
props,
link_args.append(llvm_args.as_slice()),
@ -1529,12 +1529,12 @@ fn compile_cc_with_clang_and_save_bitcode(config: &Config, _props: &TestProps,
let testcc = testfile.with_extension("cc");
let proc_args = ProcArgs {
// FIXME (#9639): This needs to handle non-utf8 paths
prog: config.clang_path.get_ref().as_str().unwrap().to_strbuf(),
args: vec!("-c".to_strbuf(),
"-emit-llvm".to_strbuf(),
"-o".to_strbuf(),
bitcodefile.as_str().unwrap().to_strbuf(),
testcc.as_str().unwrap().to_strbuf())
prog: config.clang_path.get_ref().as_str().unwrap().to_string(),
args: vec!("-c".to_string(),
"-emit-llvm".to_string(),
"-o".to_string(),
bitcodefile.as_str().unwrap().to_string(),
testcc.as_str().unwrap().to_string())
};
compose_and_run(config, testfile, proc_args, Vec::new(), "", None)
}
@ -1548,10 +1548,10 @@ fn extract_function_from_bitcode(config: &Config, _props: &TestProps,
let prog = config.llvm_bin_path.get_ref().join("llvm-extract");
let proc_args = ProcArgs {
// FIXME (#9639): This needs to handle non-utf8 paths
prog: prog.as_str().unwrap().to_strbuf(),
prog: prog.as_str().unwrap().to_string(),
args: vec!(format_strbuf!("-func={}", fname),
format_strbuf!("-o={}", extracted_bc.as_str().unwrap()),
bitcodefile.as_str().unwrap().to_strbuf())
bitcodefile.as_str().unwrap().to_string())
};
compose_and_run(config, testfile, proc_args, Vec::new(), "", None)
}
@ -1565,9 +1565,9 @@ fn disassemble_extract(config: &Config, _props: &TestProps,
let prog = config.llvm_bin_path.get_ref().join("llvm-dis");
let proc_args = ProcArgs {
// FIXME (#9639): This needs to handle non-utf8 paths
prog: prog.as_str().unwrap().to_strbuf(),
prog: prog.as_str().unwrap().to_string(),
args: vec!(format_strbuf!("-o={}", extracted_ll.as_str().unwrap()),
extracted_bc.as_str().unwrap().to_strbuf())
extracted_bc.as_str().unwrap().to_string())
};
compose_and_run(config, testfile, proc_args, Vec::new(), "", None)
}
@ -1584,44 +1584,44 @@ fn run_codegen_test(config: &Config, props: &TestProps,
testfile: &Path, mm: &mut MetricMap) {
if config.llvm_bin_path.is_none() {
fatal("missing --llvm-bin-path".to_strbuf());
fatal("missing --llvm-bin-path".to_string());
}
if config.clang_path.is_none() {
fatal("missing --clang-path".to_strbuf());
fatal("missing --clang-path".to_string());
}
let mut proc_res = compile_test_and_save_bitcode(config, props, testfile);
if !proc_res.status.success() {
fatal_ProcRes("compilation failed!".to_strbuf(), &proc_res);
fatal_ProcRes("compilation failed!".to_string(), &proc_res);
}
proc_res = extract_function_from_bitcode(config, props, "test", testfile, "");
if !proc_res.status.success() {
fatal_ProcRes("extracting 'test' function failed".to_strbuf(),
fatal_ProcRes("extracting 'test' function failed".to_string(),
&proc_res);
}
proc_res = disassemble_extract(config, props, testfile, "");
if !proc_res.status.success() {
fatal_ProcRes("disassembling extract failed".to_strbuf(), &proc_res);
fatal_ProcRes("disassembling extract failed".to_string(), &proc_res);
}
let mut proc_res = compile_cc_with_clang_and_save_bitcode(config, props, testfile);
if !proc_res.status.success() {
fatal_ProcRes("compilation failed!".to_strbuf(), &proc_res);
fatal_ProcRes("compilation failed!".to_string(), &proc_res);
}
proc_res = extract_function_from_bitcode(config, props, "test", testfile, "clang");
if !proc_res.status.success() {
fatal_ProcRes("extracting 'test' function failed".to_strbuf(),
fatal_ProcRes("extracting 'test' function failed".to_string(),
&proc_res);
}
proc_res = disassemble_extract(config, props, testfile, "clang");
if !proc_res.status.success() {
fatal_ProcRes("disassembling extract failed".to_strbuf(), &proc_res);
fatal_ProcRes("disassembling extract failed".to_string(), &proc_res);
}
let base = output_base_name(config, testfile);

View File

@ -41,15 +41,15 @@ pub fn make_new_path(path: &str) -> String {
Some(curr) => {
format_strbuf!("{}{}{}", path, path_div(), curr)
}
None => path.to_str().to_strbuf()
None => path.to_str().to_string()
}
}
#[cfg(target_os = "win32")]
pub fn lib_path_env_var() -> String { "PATH".to_strbuf() }
pub fn lib_path_env_var() -> String { "PATH".to_string() }
#[cfg(target_os = "win32")]
pub fn path_div() -> String { ";".to_strbuf() }
pub fn path_div() -> String { ";".to_string() }
pub fn logv(config: &Config, s: String) {
debug!("{}", s);

View File

@ -8,7 +8,7 @@ Use [`ToStr`](../std/to_str/trait.ToStr.html).
~~~
let x: int = 42;
let y: String = x.to_str().to_strbuf();
let y: String = x.to_str().to_string();
~~~
**String to int**
@ -65,7 +65,7 @@ To return an Owned String use the str helper function
use std::str;
let x: Option<String> =
str::from_utf8([ 104u8, 105u8 ]).map(|x| x.to_strbuf());
str::from_utf8([ 104u8, 105u8 ]).map(|x| x.to_string());
let y: String = x.unwrap();
~~~
@ -211,13 +211,13 @@ fn open(Door(name): Door<Closed>) -> Door<Open> {
Door::<Open>(name)
}
let _ = close(Door::<Open>("front".to_strbuf()));
let _ = close(Door::<Open>("front".to_string()));
~~~
Attempting to close a closed door is prevented statically:
~~~ {.ignore}
let _ = close(Door::<Closed>("front".to_strbuf())); // error: mismatched types: expected `main::Door<main::Open>` but found `main::Door<main::Closed>`
let _ = close(Door::<Closed>("front".to_string())); // error: mismatched types: expected `main::Door<main::Open>` but found `main::Door<main::Closed>`
~~~
# FFI (Foreign Function Interface)

View File

@ -467,7 +467,7 @@ fn stringifier(channel: &sync::DuplexStream<String, uint>) {
let mut value: uint;
loop {
value = channel.recv();
channel.send(value.to_str().to_strbuf());
channel.send(value.to_str().to_string());
if value == 0 { break; }
}
}
@ -492,7 +492,7 @@ extern crate sync;
# let mut value: uint;
# loop {
# value = channel.recv();
# channel.send(value.to_str().to_strbuf());
# channel.send(value.to_str().to_string());
# if value == 0u { break; }
# }
# }

View File

@ -1264,8 +1264,8 @@ enum Animal {
Cat { name: String, weight: f64 }
}
let mut a: Animal = Dog("Cocoa".to_strbuf(), 37.2);
a = Cat { name: "Spotty".to_strbuf(), weight: 2.7 };
let mut a: Animal = Dog("Cocoa".to_string(), 37.2);
a = Cat { name: "Spotty".to_string(), weight: 2.7 };
~~~~
In this example, `Cat` is a _struct-like enum variant_,
@ -3538,7 +3538,7 @@ allocated on the heap (unlike closures). An example of creating and calling a
procedure:
```rust
let string = "Hello".to_owned();
let string = "Hello".to_string();
// Creates a new procedure, passing it to the `spawn` function.
spawn(proc() {
@ -3578,7 +3578,7 @@ trait Printable {
}
impl Printable for int {
fn to_string(&self) -> String { self.to_str().to_strbuf() }
fn to_string(&self) -> String { self.to_str().to_string() }
}
fn print(a: Box<Printable>) {

View File

@ -2236,7 +2236,7 @@ impl Printable for String {
}
# 1.print();
# ("foo".to_strbuf()).print();
# ("foo".to_string()).print();
~~~~
Methods defined in an impl for a trait may be called just like
@ -2286,7 +2286,7 @@ impl Printable for bool {}
impl Printable for f32 {}
# 1.print();
# ("foo".to_strbuf()).print();
# ("foo".to_string()).print();
# true.print();
# 3.14159.print();
~~~~

View File

@ -547,7 +547,7 @@ mod tests {
let arena = TypedArena::new();
for _ in range(0, 100000) {
arena.alloc(Noncopy {
string: "hello world".to_strbuf(),
string: "hello world".to_string(),
array: vec!( 1, 2, 3, 4, 5 ),
});
}
@ -558,7 +558,7 @@ mod tests {
let arena = TypedArena::new();
b.iter(|| {
arena.alloc(Noncopy {
string: "hello world".to_strbuf(),
string: "hello world".to_string(),
array: vec!( 1, 2, 3, 4, 5 ),
})
})
@ -568,7 +568,7 @@ mod tests {
pub fn bench_noncopy_nonarena(b: &mut Bencher) {
b.iter(|| {
box Noncopy {
string: "hello world".to_strbuf(),
string: "hello world".to_string(),
array: vec!( 1, 2, 3, 4, 5 ),
}
})
@ -579,7 +579,7 @@ mod tests {
let arena = Arena::new();
b.iter(|| {
arena.alloc(|| Noncopy {
string: "hello world".to_strbuf(),
string: "hello world".to_string(),
array: vec!( 1, 2, 3, 4, 5 ),
})
})

View File

@ -988,10 +988,10 @@ mod tests {
#[test]
fn test_to_str() {
let zerolen = Bitv::new(0u, false);
assert_eq!(zerolen.to_str(), "".to_owned());
assert_eq!(zerolen.to_str(), "".to_string());
let eightbits = Bitv::new(8u, false);
assert_eq!(eightbits.to_str(), "00000000".to_owned());
assert_eq!(eightbits.to_str(), "00000000".to_string());
}
#[test]
@ -1014,7 +1014,7 @@ mod tests {
let mut b = bitv::Bitv::new(2, false);
b.set(0, true);
b.set(1, false);
assert_eq!(b.to_str(), "10".to_owned());
assert_eq!(b.to_str(), "10".to_string());
}
#[test]
@ -1343,7 +1343,7 @@ mod tests {
#[test]
fn test_from_bools() {
assert!(from_bools([true, false, true, true]).to_str() ==
"1011".to_owned());
"1011".to_string());
}
#[test]

View File

@ -778,70 +778,70 @@ mod test_btree {
//Tests the functionality of the insert methods (which are unfinished).
#[test]
fn insert_test_one() {
let b = BTree::new(1, "abc".to_owned(), 2);
let is_insert = b.insert(2, "xyz".to_owned());
let b = BTree::new(1, "abc".to_string(), 2);
let is_insert = b.insert(2, "xyz".to_string());
//println!("{}", is_insert.clone().to_str());
assert!(is_insert.root.is_leaf());
}
#[test]
fn insert_test_two() {
let leaf_elt_1 = LeafElt::new(1, "aaa".to_owned());
let leaf_elt_2 = LeafElt::new(2, "bbb".to_owned());
let leaf_elt_3 = LeafElt::new(3, "ccc".to_owned());
let leaf_elt_1 = LeafElt::new(1, "aaa".to_string());
let leaf_elt_2 = LeafElt::new(2, "bbb".to_string());
let leaf_elt_3 = LeafElt::new(3, "ccc".to_string());
let n = Node::new_leaf(vec!(leaf_elt_1, leaf_elt_2, leaf_elt_3));
let b = BTree::new_with_node_len(n, 3, 2);
//println!("{}", b.clone().insert(4, "ddd".to_owned()).to_str());
assert!(b.insert(4, "ddd".to_owned()).root.is_leaf());
//println!("{}", b.clone().insert(4, "ddd".to_string()).to_str());
assert!(b.insert(4, "ddd".to_string()).root.is_leaf());
}
#[test]
fn insert_test_three() {
let leaf_elt_1 = LeafElt::new(1, "aaa".to_owned());
let leaf_elt_2 = LeafElt::new(2, "bbb".to_owned());
let leaf_elt_3 = LeafElt::new(3, "ccc".to_owned());
let leaf_elt_4 = LeafElt::new(4, "ddd".to_owned());
let leaf_elt_1 = LeafElt::new(1, "aaa".to_string());
let leaf_elt_2 = LeafElt::new(2, "bbb".to_string());
let leaf_elt_3 = LeafElt::new(3, "ccc".to_string());
let leaf_elt_4 = LeafElt::new(4, "ddd".to_string());
let n = Node::new_leaf(vec!(leaf_elt_1, leaf_elt_2, leaf_elt_3, leaf_elt_4));
let b = BTree::new_with_node_len(n, 3, 2);
//println!("{}", b.clone().insert(5, "eee".to_owned()).to_str());
assert!(!b.insert(5, "eee".to_owned()).root.is_leaf());
//println!("{}", b.clone().insert(5, "eee".to_string()).to_str());
assert!(!b.insert(5, "eee".to_string()).root.is_leaf());
}
#[test]
fn insert_test_four() {
let leaf_elt_1 = LeafElt::new(1, "aaa".to_owned());
let leaf_elt_2 = LeafElt::new(2, "bbb".to_owned());
let leaf_elt_3 = LeafElt::new(3, "ccc".to_owned());
let leaf_elt_4 = LeafElt::new(4, "ddd".to_owned());
let leaf_elt_1 = LeafElt::new(1, "aaa".to_string());
let leaf_elt_2 = LeafElt::new(2, "bbb".to_string());
let leaf_elt_3 = LeafElt::new(3, "ccc".to_string());
let leaf_elt_4 = LeafElt::new(4, "ddd".to_string());
let n = Node::new_leaf(vec!(leaf_elt_1, leaf_elt_2, leaf_elt_3, leaf_elt_4));
let mut b = BTree::new_with_node_len(n, 3, 2);
b = b.clone().insert(5, "eee".to_owned());
b = b.clone().insert(6, "fff".to_owned());
b = b.clone().insert(7, "ggg".to_owned());
b = b.clone().insert(8, "hhh".to_owned());
b = b.clone().insert(0, "omg".to_owned());
b = b.clone().insert(5, "eee".to_string());
b = b.clone().insert(6, "fff".to_string());
b = b.clone().insert(7, "ggg".to_string());
b = b.clone().insert(8, "hhh".to_string());
b = b.clone().insert(0, "omg".to_string());
//println!("{}", b.clone().to_str());
assert!(!b.root.is_leaf());
}
#[test]
fn bsearch_test_one() {
let b = BTree::new(1, "abc".to_owned(), 2);
let b = BTree::new(1, "abc".to_string(), 2);
assert_eq!(Some(1), b.root.bsearch_node(2));
}
#[test]
fn bsearch_test_two() {
let b = BTree::new(1, "abc".to_owned(), 2);
let b = BTree::new(1, "abc".to_string(), 2);
assert_eq!(Some(0), b.root.bsearch_node(0));
}
#[test]
fn bsearch_test_three() {
let leaf_elt_1 = LeafElt::new(1, "aaa".to_owned());
let leaf_elt_2 = LeafElt::new(2, "bbb".to_owned());
let leaf_elt_3 = LeafElt::new(4, "ccc".to_owned());
let leaf_elt_4 = LeafElt::new(5, "ddd".to_owned());
let leaf_elt_1 = LeafElt::new(1, "aaa".to_string());
let leaf_elt_2 = LeafElt::new(2, "bbb".to_string());
let leaf_elt_3 = LeafElt::new(4, "ccc".to_string());
let leaf_elt_4 = LeafElt::new(5, "ddd".to_string());
let n = Node::new_leaf(vec!(leaf_elt_1, leaf_elt_2, leaf_elt_3, leaf_elt_4));
let b = BTree::new_with_node_len(n, 3, 2);
assert_eq!(Some(2), b.root.bsearch_node(3));
@ -849,10 +849,10 @@ mod test_btree {
#[test]
fn bsearch_test_four() {
let leaf_elt_1 = LeafElt::new(1, "aaa".to_owned());
let leaf_elt_2 = LeafElt::new(2, "bbb".to_owned());
let leaf_elt_3 = LeafElt::new(4, "ccc".to_owned());
let leaf_elt_4 = LeafElt::new(5, "ddd".to_owned());
let leaf_elt_1 = LeafElt::new(1, "aaa".to_string());
let leaf_elt_2 = LeafElt::new(2, "bbb".to_string());
let leaf_elt_3 = LeafElt::new(4, "ccc".to_string());
let leaf_elt_4 = LeafElt::new(5, "ddd".to_string());
let n = Node::new_leaf(vec!(leaf_elt_1, leaf_elt_2, leaf_elt_3, leaf_elt_4));
let b = BTree::new_with_node_len(n, 3, 2);
assert_eq!(Some(4), b.root.bsearch_node(800));
@ -861,15 +861,15 @@ mod test_btree {
//Tests the functionality of the get method.
#[test]
fn get_test() {
let b = BTree::new(1, "abc".to_owned(), 2);
let b = BTree::new(1, "abc".to_string(), 2);
let val = b.get(1);
assert_eq!(val, Some("abc".to_owned()));
assert_eq!(val, Some("abc".to_string()));
}
//Tests the BTree's clone() method.
#[test]
fn btree_clone_test() {
let b = BTree::new(1, "abc".to_owned(), 2);
let b = BTree::new(1, "abc".to_string(), 2);
let b2 = b.clone();
assert!(b.root == b2.root)
}
@ -877,32 +877,32 @@ mod test_btree {
//Tests the BTree's cmp() method when one node is "less than" another.
#[test]
fn btree_cmp_test_less() {
let b = BTree::new(1, "abc".to_owned(), 2);
let b2 = BTree::new(2, "bcd".to_owned(), 2);
let b = BTree::new(1, "abc".to_string(), 2);
let b2 = BTree::new(2, "bcd".to_string(), 2);
assert!(&b.cmp(&b2) == &Less)
}
//Tests the BTree's cmp() method when two nodes are equal.
#[test]
fn btree_cmp_test_eq() {
let b = BTree::new(1, "abc".to_owned(), 2);
let b2 = BTree::new(1, "bcd".to_owned(), 2);
let b = BTree::new(1, "abc".to_string(), 2);
let b2 = BTree::new(1, "bcd".to_string(), 2);
assert!(&b.cmp(&b2) == &Equal)
}
//Tests the BTree's cmp() method when one node is "greater than" another.
#[test]
fn btree_cmp_test_greater() {
let b = BTree::new(1, "abc".to_owned(), 2);
let b2 = BTree::new(2, "bcd".to_owned(), 2);
let b = BTree::new(1, "abc".to_string(), 2);
let b2 = BTree::new(2, "bcd".to_string(), 2);
assert!(&b2.cmp(&b) == &Greater)
}
//Tests the BTree's to_str() method.
#[test]
fn btree_tostr_test() {
let b = BTree::new(1, "abc".to_owned(), 2);
assert_eq!(b.to_str(), "Key: 1, value: abc;".to_owned())
let b = BTree::new(1, "abc".to_string(), 2);
assert_eq!(b.to_str(), "Key: 1, value: abc;".to_string())
}
}

View File

@ -2040,9 +2040,9 @@ mod test_map {
let mut m = HashMap::new();
let (foo, bar, baz) = (1,2,3);
m.insert("foo".to_owned(), foo);
m.insert("bar".to_owned(), bar);
m.insert("baz".to_owned(), baz);
m.insert("foo".to_string(), foo);
m.insert("bar".to_string(), bar);
m.insert("baz".to_string(), baz);
assert_eq!(m.find_equiv(&("foo")), Some(&foo));
@ -2327,8 +2327,8 @@ mod test_set {
let set_str = format!("{}", set);
assert!(set_str == "{1, 2}".to_owned() || set_str == "{2, 1}".to_owned());
assert_eq!(format!("{}", empty), "{}".to_owned());
assert!(set_str == "{1, 2}".to_string() || set_str == "{2, 1}".to_string());
assert_eq!(format!("{}", empty), "{}".to_string());
}
}

View File

@ -271,22 +271,22 @@ mod tests {
#[test]
fn test_put_update() {
let mut cache: LruCache<String, Vec<u8>> = LruCache::new(1);
cache.put("1".to_strbuf(), vec![10, 10]);
cache.put("1".to_strbuf(), vec![10, 19]);
assert_opt_eq(cache.get(&"1".to_strbuf()), vec![10, 19]);
cache.put("1".to_string(), vec![10, 10]);
cache.put("1".to_string(), vec![10, 19]);
assert_opt_eq(cache.get(&"1".to_string()), vec![10, 19]);
assert_eq!(cache.len(), 1);
}
#[test]
fn test_expire_lru() {
let mut cache: LruCache<String, String> = LruCache::new(2);
cache.put("foo1".to_strbuf(), "bar1".to_strbuf());
cache.put("foo2".to_strbuf(), "bar2".to_strbuf());
cache.put("foo3".to_strbuf(), "bar3".to_strbuf());
assert!(cache.get(&"foo1".to_strbuf()).is_none());
cache.put("foo2".to_strbuf(), "bar2update".to_strbuf());
cache.put("foo4".to_strbuf(), "bar4".to_strbuf());
assert!(cache.get(&"foo3".to_strbuf()).is_none());
cache.put("foo1".to_string(), "bar1".to_string());
cache.put("foo2".to_string(), "bar2".to_string());
cache.put("foo3".to_string(), "bar3".to_string());
assert!(cache.get(&"foo1".to_string()).is_none());
cache.put("foo2".to_string(), "bar2update".to_string());
cache.put("foo4".to_string(), "bar4".to_string());
assert!(cache.get(&"foo3".to_string()).is_none());
}
#[test]
@ -319,15 +319,15 @@ mod tests {
cache.put(1, 10);
cache.put(2, 20);
cache.put(3, 30);
assert_eq!(cache.to_str(), "{3: 30, 2: 20, 1: 10}".to_owned());
assert_eq!(cache.to_str(), "{3: 30, 2: 20, 1: 10}".to_string());
cache.put(2, 22);
assert_eq!(cache.to_str(), "{2: 22, 3: 30, 1: 10}".to_owned());
assert_eq!(cache.to_str(), "{2: 22, 3: 30, 1: 10}".to_string());
cache.put(6, 60);
assert_eq!(cache.to_str(), "{6: 60, 2: 22, 3: 30}".to_owned());
assert_eq!(cache.to_str(), "{6: 60, 2: 22, 3: 30}".to_string());
cache.get(&3);
assert_eq!(cache.to_str(), "{3: 30, 6: 60, 2: 22}".to_owned());
assert_eq!(cache.to_str(), "{3: 30, 6: 60, 2: 22}".to_string());
cache.change_capacity(2);
assert_eq!(cache.to_str(), "{3: 30, 6: 60}".to_owned());
assert_eq!(cache.to_str(), "{3: 30, 6: 60}".to_string());
}
#[test]
@ -338,6 +338,6 @@ mod tests {
cache.clear();
assert!(cache.get(&1).is_none());
assert!(cache.get(&2).is_none());
assert_eq!(cache.to_str(), "{}".to_owned());
assert_eq!(cache.to_str(), "{}".to_string());
}
}

View File

@ -140,7 +140,7 @@ pub struct RadixFmt<T, R>(T, R);
///
/// ~~~
/// use std::fmt::radix;
/// assert_eq!(format!("{}", radix(55, 36)), "1j".to_owned());
/// assert_eq!(format!("{}", radix(55, 36)), "1j".to_string());
/// ~~~
pub fn radix<T>(x: T, base: u8) -> RadixFmt<T, Radix> {
RadixFmt(x, Radix::new(base))

View File

@ -552,7 +552,7 @@ mod tests {
#[test]
fn test_replace() {
let mut x = Some("test".to_owned());
let mut x = Some("test".to_string());
let y = replace(&mut x, None);
assert!(x.is_none());
assert!(y.is_some());
@ -576,7 +576,7 @@ mod tests {
}
unsafe {
assert!(Vec::from_slice([76u8]) == transmute("L".to_owned()));
assert!(Vec::from_slice([76u8]) == transmute("L".to_string()));
}
}
}

View File

@ -194,7 +194,7 @@ impl<T> Option<T> {
/// to the value inside the original.
///
/// ```
/// let num_as_str: Option<String> = Some("10".to_strbuf());
/// let num_as_str: Option<String> = Some("10".to_string());
/// // First, cast `Option<String>` to `Option<&String>` with `as_ref`,
/// // then consume *that* with `map`, leaving `num_as_str` on the stack.
/// let num_as_int: Option<uint> = num_as_str.as_ref().map(|n| n.len());
@ -281,7 +281,7 @@ impl<T> Option<T> {
/// Convert an `Option<String>` into an `Option<uint>`, consuming the original:
///
/// ```
/// let num_as_str: Option<String> = Some("10".to_strbuf());
/// let num_as_str: Option<String> = Some("10".to_string());
/// // `Option::map` takes self *by value*, consuming `num_as_str`
/// let num_as_int: Option<uint> = num_as_str.map(|n| n.len());
/// ```
@ -620,7 +620,7 @@ mod tests {
#[test]
fn test_get_str() {
let x = "test".to_strbuf();
let x = "test".to_string();
let addr_x = x.as_slice().as_ptr();
let opt = Some(x);
let y = opt.unwrap();
@ -746,7 +746,7 @@ mod tests {
#[test]
fn test_unwrap() {
assert_eq!(Some(1).unwrap(), 1);
let s = Some("hello".to_strbuf()).unwrap();
let s = Some("hello".to_string()).unwrap();
assert_eq!(s.as_slice(), "hello");
}

View File

@ -925,15 +925,15 @@ pub trait MutableVector<'a, T> {
/// # Example
///
/// ```rust
/// let mut v = ~["foo".to_owned(), "bar".to_owned(), "baz".to_owned()];
/// let mut v = ~["foo".to_string(), "bar".to_string(), "baz".to_string()];
///
/// unsafe {
/// // `"baz".to_owned()` is deallocated.
/// v.unsafe_set(2, "qux".to_owned());
/// // `"baz".to_string()` is deallocated.
/// v.unsafe_set(2, "qux".to_string());
///
/// // Out of bounds: could cause a crash, or overwriting
/// // other data, or something else.
/// // v.unsafe_set(10, "oops".to_owned());
/// // v.unsafe_set(10, "oops".to_string());
/// }
/// ```
unsafe fn unsafe_set(self, index: uint, val: T);
@ -945,10 +945,10 @@ pub trait MutableVector<'a, T> {
/// # Example
///
/// ```rust
/// let mut v = ["foo".to_owned(), "bar".to_owned()];
/// let mut v = ["foo".to_string(), "bar".to_string()];
///
/// // memory leak! `"bar".to_owned()` is not deallocated.
/// unsafe { v.init_elem(1, "baz".to_owned()); }
/// // memory leak! `"bar".to_string()` is not deallocated.
/// unsafe { v.init_elem(1, "baz".to_string()); }
/// ```
unsafe fn init_elem(self, i: uint, val: T);

View File

@ -249,7 +249,7 @@ impl<'a> Parser<'a> {
/// String, but I think it does when this eventually uses conditions so it
/// might as well start using it now.
fn err(&mut self, msg: &str) {
self.errors.push(msg.to_strbuf());
self.errors.push(msg.to_string());
}
/// Optionally consumes the specified character. If the character is not at

View File

@ -50,7 +50,7 @@
//!
//! fn main() {
//! let args: Vec<String> = os::args().iter()
//! .map(|x| x.to_strbuf())
//! .map(|x| x.to_string())
//! .collect();
//!
//! let program = args.get(0).clone();
@ -215,14 +215,14 @@ impl Name {
if nm.len() == 1u {
Short(nm.char_at(0u))
} else {
Long(nm.to_strbuf())
Long(nm.to_string())
}
}
fn to_str(&self) -> String {
match *self {
Short(ch) => ch.to_str().to_strbuf(),
Long(ref s) => s.to_strbuf()
Short(ch) => ch.to_str().to_string(),
Long(ref s) => s.to_string()
}
}
}
@ -362,7 +362,7 @@ impl Matches {
}
match vals.get(0) {
&Val(ref s) => Some((*s).clone()),
_ => Some(def.to_strbuf())
_ => Some(def.to_string())
}
}
@ -394,10 +394,10 @@ pub fn reqopt(short_name: &str, long_name: &str, desc: &str, hint: &str) -> OptG
let len = short_name.len();
assert!(len == 1 || len == 0);
OptGroup {
short_name: short_name.to_strbuf(),
long_name: long_name.to_strbuf(),
hint: hint.to_strbuf(),
desc: desc.to_strbuf(),
short_name: short_name.to_string(),
long_name: long_name.to_string(),
hint: hint.to_string(),
desc: desc.to_string(),
hasarg: Yes,
occur: Req
}
@ -408,10 +408,10 @@ pub fn optopt(short_name: &str, long_name: &str, desc: &str, hint: &str) -> OptG
let len = short_name.len();
assert!(len == 1 || len == 0);
OptGroup {
short_name: short_name.to_strbuf(),
long_name: long_name.to_strbuf(),
hint: hint.to_strbuf(),
desc: desc.to_strbuf(),
short_name: short_name.to_string(),
long_name: long_name.to_string(),
hint: hint.to_string(),
desc: desc.to_string(),
hasarg: Yes,
occur: Optional
}
@ -422,10 +422,10 @@ pub fn optflag(short_name: &str, long_name: &str, desc: &str) -> OptGroup {
let len = short_name.len();
assert!(len == 1 || len == 0);
OptGroup {
short_name: short_name.to_strbuf(),
long_name: long_name.to_strbuf(),
hint: "".to_strbuf(),
desc: desc.to_strbuf(),
short_name: short_name.to_string(),
long_name: long_name.to_string(),
hint: "".to_string(),
desc: desc.to_string(),
hasarg: No,
occur: Optional
}
@ -437,10 +437,10 @@ pub fn optflagmulti(short_name: &str, long_name: &str, desc: &str) -> OptGroup {
let len = short_name.len();
assert!(len == 1 || len == 0);
OptGroup {
short_name: short_name.to_strbuf(),
long_name: long_name.to_strbuf(),
hint: "".to_strbuf(),
desc: desc.to_strbuf(),
short_name: short_name.to_string(),
long_name: long_name.to_string(),
hint: "".to_string(),
desc: desc.to_string(),
hasarg: No,
occur: Multi
}
@ -451,10 +451,10 @@ pub fn optflagopt(short_name: &str, long_name: &str, desc: &str, hint: &str) ->
let len = short_name.len();
assert!(len == 1 || len == 0);
OptGroup {
short_name: short_name.to_strbuf(),
long_name: long_name.to_strbuf(),
hint: hint.to_strbuf(),
desc: desc.to_strbuf(),
short_name: short_name.to_string(),
long_name: long_name.to_string(),
hint: hint.to_string(),
desc: desc.to_string(),
hasarg: Maybe,
occur: Optional
}
@ -466,10 +466,10 @@ pub fn optmulti(short_name: &str, long_name: &str, desc: &str, hint: &str) -> Op
let len = short_name.len();
assert!(len == 1 || len == 0);
OptGroup {
short_name: short_name.to_strbuf(),
long_name: long_name.to_strbuf(),
hint: hint.to_strbuf(),
desc: desc.to_strbuf(),
short_name: short_name.to_string(),
long_name: long_name.to_string(),
hint: hint.to_string(),
desc: desc.to_string(),
hasarg: Yes,
occur: Multi
}
@ -485,10 +485,10 @@ pub fn opt(short_name: &str,
let len = short_name.len();
assert!(len == 1 || len == 0);
OptGroup {
short_name: short_name.to_strbuf(),
long_name: long_name.to_strbuf(),
hint: hint.to_strbuf(),
desc: desc.to_strbuf(),
short_name: short_name.to_string(),
long_name: long_name.to_string(),
hint: hint.to_string(),
desc: desc.to_string(),
hasarg: hasarg,
occur: occur
}
@ -548,11 +548,11 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result {
let tail = cur.as_slice().slice(2, curlen);
let tail_eq: Vec<&str> = tail.split('=').collect();
if tail_eq.len() <= 1 {
names = vec!(Long(tail.to_strbuf()));
names = vec!(Long(tail.to_string()));
} else {
names =
vec!(Long((*tail_eq.get(0)).to_strbuf()));
i_arg = Some((*tail_eq.get(1)).to_strbuf());
vec!(Long((*tail_eq.get(0)).to_string()));
i_arg = Some((*tail_eq.get(1)).to_string());
}
} else {
let mut j = 1;
@ -582,7 +582,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result {
};
if arg_follows && j < curlen {
i_arg = Some(cur.as_slice()
.slice(j, curlen).to_strbuf());
.slice(j, curlen).to_string());
break;
} else {
last_valid_opt_id = None;
@ -671,7 +671,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String {
hasarg: hasarg,
..} = (*optref).clone();
let mut row = String::from_owned_str(" ".repeat(4));
let mut row = " ".repeat(4);
// short option
match short_name.len() {
@ -728,7 +728,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String {
each_split_within(desc_normalized_whitespace.as_slice(),
54,
|substr| {
desc_rows.push(substr.to_owned());
desc_rows.push(substr.to_string());
true
});
@ -900,19 +900,19 @@ fn each_split_within<'a>(ss: &'a str, lim: uint, it: |&'a str| -> bool)
fn test_split_within() {
fn t(s: &str, i: uint, u: &[String]) {
let mut v = Vec::new();
each_split_within(s, i, |s| { v.push(s.to_strbuf()); true });
each_split_within(s, i, |s| { v.push(s.to_string()); true });
assert!(v.iter().zip(u.iter()).all(|(a,b)| a == b));
}
t("", 0, []);
t("", 15, []);
t("hello", 15, ["hello".to_strbuf()]);
t("hello", 15, ["hello".to_string()]);
t("\nMary had a little lamb\nLittle lamb\n", 15, [
"Mary had a".to_strbuf(),
"little lamb".to_strbuf(),
"Little lamb".to_strbuf()
"Mary had a".to_string(),
"little lamb".to_string(),
"Little lamb".to_string()
]);
t("\nMary had a little lamb\nLittle lamb\n", ::std::uint::MAX,
["Mary had a little lamb\nLittle lamb".to_strbuf()]);
["Mary had a little lamb\nLittle lamb".to_string()]);
}
#[cfg(test)]
@ -935,25 +935,25 @@ mod tests {
// Tests for reqopt
#[test]
fn test_reqopt() {
let long_args = vec!("--test=20".to_strbuf());
let long_args = vec!("--test=20".to_string());
let opts = vec!(reqopt("t", "test", "testing", "TEST"));
let rs = getopts(long_args.as_slice(), opts.as_slice());
match rs {
Ok(ref m) => {
assert!(m.opt_present("test"));
assert_eq!(m.opt_str("test").unwrap(), "20".to_strbuf());
assert_eq!(m.opt_str("test").unwrap(), "20".to_string());
assert!(m.opt_present("t"));
assert_eq!(m.opt_str("t").unwrap(), "20".to_strbuf());
assert_eq!(m.opt_str("t").unwrap(), "20".to_string());
}
_ => { fail!("test_reqopt failed (long arg)"); }
}
let short_args = vec!("-t".to_strbuf(), "20".to_strbuf());
let short_args = vec!("-t".to_string(), "20".to_string());
match getopts(short_args.as_slice(), opts.as_slice()) {
Ok(ref m) => {
assert!((m.opt_present("test")));
assert_eq!(m.opt_str("test").unwrap(), "20".to_strbuf());
assert_eq!(m.opt_str("test").unwrap(), "20".to_string());
assert!((m.opt_present("t")));
assert_eq!(m.opt_str("t").unwrap(), "20".to_strbuf());
assert_eq!(m.opt_str("t").unwrap(), "20".to_string());
}
_ => { fail!("test_reqopt failed (short arg)"); }
}
@ -961,7 +961,7 @@ mod tests {
#[test]
fn test_reqopt_missing() {
let args = vec!("blah".to_strbuf());
let args = vec!("blah".to_string());
let opts = vec!(reqopt("t", "test", "testing", "TEST"));
let rs = getopts(args.as_slice(), opts.as_slice());
match rs {
@ -972,14 +972,14 @@ mod tests {
#[test]
fn test_reqopt_no_arg() {
let long_args = vec!("--test".to_strbuf());
let long_args = vec!("--test".to_string());
let opts = vec!(reqopt("t", "test", "testing", "TEST"));
let rs = getopts(long_args.as_slice(), opts.as_slice());
match rs {
Err(f) => check_fail_type(f, ArgumentMissing_),
_ => fail!()
}
let short_args = vec!("-t".to_strbuf());
let short_args = vec!("-t".to_string());
match getopts(short_args.as_slice(), opts.as_slice()) {
Err(f) => check_fail_type(f, ArgumentMissing_),
_ => fail!()
@ -988,7 +988,7 @@ mod tests {
#[test]
fn test_reqopt_multi() {
let args = vec!("--test=20".to_strbuf(), "-t".to_strbuf(), "30".to_strbuf());
let args = vec!("--test=20".to_string(), "-t".to_string(), "30".to_string());
let opts = vec!(reqopt("t", "test", "testing", "TEST"));
let rs = getopts(args.as_slice(), opts.as_slice());
match rs {
@ -1000,25 +1000,25 @@ mod tests {
// Tests for optopt
#[test]
fn test_optopt() {
let long_args = vec!("--test=20".to_strbuf());
let long_args = vec!("--test=20".to_string());
let opts = vec!(optopt("t", "test", "testing", "TEST"));
let rs = getopts(long_args.as_slice(), opts.as_slice());
match rs {
Ok(ref m) => {
assert!(m.opt_present("test"));
assert_eq!(m.opt_str("test").unwrap(), "20".to_strbuf());
assert_eq!(m.opt_str("test").unwrap(), "20".to_string());
assert!((m.opt_present("t")));
assert_eq!(m.opt_str("t").unwrap(), "20".to_strbuf());
assert_eq!(m.opt_str("t").unwrap(), "20".to_string());
}
_ => fail!()
}
let short_args = vec!("-t".to_strbuf(), "20".to_strbuf());
let short_args = vec!("-t".to_string(), "20".to_string());
match getopts(short_args.as_slice(), opts.as_slice()) {
Ok(ref m) => {
assert!((m.opt_present("test")));
assert_eq!(m.opt_str("test").unwrap(), "20".to_strbuf());
assert_eq!(m.opt_str("test").unwrap(), "20".to_string());
assert!((m.opt_present("t")));
assert_eq!(m.opt_str("t").unwrap(), "20".to_strbuf());
assert_eq!(m.opt_str("t").unwrap(), "20".to_string());
}
_ => fail!()
}
@ -1026,7 +1026,7 @@ mod tests {
#[test]
fn test_optopt_missing() {
let args = vec!("blah".to_strbuf());
let args = vec!("blah".to_string());
let opts = vec!(optopt("t", "test", "testing", "TEST"));
let rs = getopts(args.as_slice(), opts.as_slice());
match rs {
@ -1040,14 +1040,14 @@ mod tests {
#[test]
fn test_optopt_no_arg() {
let long_args = vec!("--test".to_strbuf());
let long_args = vec!("--test".to_string());
let opts = vec!(optopt("t", "test", "testing", "TEST"));
let rs = getopts(long_args.as_slice(), opts.as_slice());
match rs {
Err(f) => check_fail_type(f, ArgumentMissing_),
_ => fail!()
}
let short_args = vec!("-t".to_strbuf());
let short_args = vec!("-t".to_string());
match getopts(short_args.as_slice(), opts.as_slice()) {
Err(f) => check_fail_type(f, ArgumentMissing_),
_ => fail!()
@ -1056,7 +1056,7 @@ mod tests {
#[test]
fn test_optopt_multi() {
let args = vec!("--test=20".to_strbuf(), "-t".to_strbuf(), "30".to_strbuf());
let args = vec!("--test=20".to_string(), "-t".to_string(), "30".to_string());
let opts = vec!(optopt("t", "test", "testing", "TEST"));
let rs = getopts(args.as_slice(), opts.as_slice());
match rs {
@ -1068,7 +1068,7 @@ mod tests {
// Tests for optflag
#[test]
fn test_optflag() {
let long_args = vec!("--test".to_strbuf());
let long_args = vec!("--test".to_string());
let opts = vec!(optflag("t", "test", "testing"));
let rs = getopts(long_args.as_slice(), opts.as_slice());
match rs {
@ -1078,7 +1078,7 @@ mod tests {
}
_ => fail!()
}
let short_args = vec!("-t".to_strbuf());
let short_args = vec!("-t".to_string());
match getopts(short_args.as_slice(), opts.as_slice()) {
Ok(ref m) => {
assert!(m.opt_present("test"));
@ -1090,7 +1090,7 @@ mod tests {
#[test]
fn test_optflag_missing() {
let args = vec!("blah".to_strbuf());
let args = vec!("blah".to_string());
let opts = vec!(optflag("t", "test", "testing"));
let rs = getopts(args.as_slice(), opts.as_slice());
match rs {
@ -1104,7 +1104,7 @@ mod tests {
#[test]
fn test_optflag_long_arg() {
let args = vec!("--test=20".to_strbuf());
let args = vec!("--test=20".to_string());
let opts = vec!(optflag("t", "test", "testing"));
let rs = getopts(args.as_slice(), opts.as_slice());
match rs {
@ -1118,7 +1118,7 @@ mod tests {
#[test]
fn test_optflag_multi() {
let args = vec!("--test".to_strbuf(), "-t".to_strbuf());
let args = vec!("--test".to_string(), "-t".to_string());
let opts = vec!(optflag("t", "test", "testing"));
let rs = getopts(args.as_slice(), opts.as_slice());
match rs {
@ -1129,14 +1129,14 @@ mod tests {
#[test]
fn test_optflag_short_arg() {
let args = vec!("-t".to_strbuf(), "20".to_strbuf());
let args = vec!("-t".to_string(), "20".to_string());
let opts = vec!(optflag("t", "test", "testing"));
let rs = getopts(args.as_slice(), opts.as_slice());
match rs {
Ok(ref m) => {
// The next variable after the flag is just a free argument
assert!(*m.free.get(0) == "20".to_strbuf());
assert!(*m.free.get(0) == "20".to_string());
}
_ => fail!()
}
@ -1145,7 +1145,7 @@ mod tests {
// Tests for optflagmulti
#[test]
fn test_optflagmulti_short1() {
let args = vec!("-v".to_strbuf());
let args = vec!("-v".to_string());
let opts = vec!(optflagmulti("v", "verbose", "verbosity"));
let rs = getopts(args.as_slice(), opts.as_slice());
match rs {
@ -1158,7 +1158,7 @@ mod tests {
#[test]
fn test_optflagmulti_short2a() {
let args = vec!("-v".to_strbuf(), "-v".to_strbuf());
let args = vec!("-v".to_string(), "-v".to_string());
let opts = vec!(optflagmulti("v", "verbose", "verbosity"));
let rs = getopts(args.as_slice(), opts.as_slice());
match rs {
@ -1171,7 +1171,7 @@ mod tests {
#[test]
fn test_optflagmulti_short2b() {
let args = vec!("-vv".to_strbuf());
let args = vec!("-vv".to_string());
let opts = vec!(optflagmulti("v", "verbose", "verbosity"));
let rs = getopts(args.as_slice(), opts.as_slice());
match rs {
@ -1184,7 +1184,7 @@ mod tests {
#[test]
fn test_optflagmulti_long1() {
let args = vec!("--verbose".to_strbuf());
let args = vec!("--verbose".to_string());
let opts = vec!(optflagmulti("v", "verbose", "verbosity"));
let rs = getopts(args.as_slice(), opts.as_slice());
match rs {
@ -1197,7 +1197,7 @@ mod tests {
#[test]
fn test_optflagmulti_long2() {
let args = vec!("--verbose".to_strbuf(), "--verbose".to_strbuf());
let args = vec!("--verbose".to_string(), "--verbose".to_string());
let opts = vec!(optflagmulti("v", "verbose", "verbosity"));
let rs = getopts(args.as_slice(), opts.as_slice());
match rs {
@ -1210,8 +1210,8 @@ mod tests {
#[test]
fn test_optflagmulti_mix() {
let args = vec!("--verbose".to_strbuf(), "-v".to_strbuf(),
"-vv".to_strbuf(), "verbose".to_strbuf());
let args = vec!("--verbose".to_string(), "-v".to_string(),
"-vv".to_string(), "verbose".to_string());
let opts = vec!(optflagmulti("v", "verbose", "verbosity"));
let rs = getopts(args.as_slice(), opts.as_slice());
match rs {
@ -1226,25 +1226,25 @@ mod tests {
// Tests for optmulti
#[test]
fn test_optmulti() {
let long_args = vec!("--test=20".to_strbuf());
let long_args = vec!("--test=20".to_string());
let opts = vec!(optmulti("t", "test", "testing", "TEST"));
let rs = getopts(long_args.as_slice(), opts.as_slice());
match rs {
Ok(ref m) => {
assert!((m.opt_present("test")));
assert_eq!(m.opt_str("test").unwrap(), "20".to_strbuf());
assert_eq!(m.opt_str("test").unwrap(), "20".to_string());
assert!((m.opt_present("t")));
assert_eq!(m.opt_str("t").unwrap(), "20".to_strbuf());
assert_eq!(m.opt_str("t").unwrap(), "20".to_string());
}
_ => fail!()
}
let short_args = vec!("-t".to_strbuf(), "20".to_strbuf());
let short_args = vec!("-t".to_string(), "20".to_string());
match getopts(short_args.as_slice(), opts.as_slice()) {
Ok(ref m) => {
assert!((m.opt_present("test")));
assert_eq!(m.opt_str("test").unwrap(), "20".to_strbuf());
assert_eq!(m.opt_str("test").unwrap(), "20".to_string());
assert!((m.opt_present("t")));
assert_eq!(m.opt_str("t").unwrap(), "20".to_strbuf());
assert_eq!(m.opt_str("t").unwrap(), "20".to_string());
}
_ => fail!()
}
@ -1252,7 +1252,7 @@ mod tests {
#[test]
fn test_optmulti_missing() {
let args = vec!("blah".to_strbuf());
let args = vec!("blah".to_string());
let opts = vec!(optmulti("t", "test", "testing", "TEST"));
let rs = getopts(args.as_slice(), opts.as_slice());
match rs {
@ -1266,14 +1266,14 @@ mod tests {
#[test]
fn test_optmulti_no_arg() {
let long_args = vec!("--test".to_strbuf());
let long_args = vec!("--test".to_string());
let opts = vec!(optmulti("t", "test", "testing", "TEST"));
let rs = getopts(long_args.as_slice(), opts.as_slice());
match rs {
Err(f) => check_fail_type(f, ArgumentMissing_),
_ => fail!()
}
let short_args = vec!("-t".to_strbuf());
let short_args = vec!("-t".to_string());
match getopts(short_args.as_slice(), opts.as_slice()) {
Err(f) => check_fail_type(f, ArgumentMissing_),
_ => fail!()
@ -1282,18 +1282,18 @@ mod tests {
#[test]
fn test_optmulti_multi() {
let args = vec!("--test=20".to_strbuf(), "-t".to_strbuf(), "30".to_strbuf());
let args = vec!("--test=20".to_string(), "-t".to_string(), "30".to_string());
let opts = vec!(optmulti("t", "test", "testing", "TEST"));
let rs = getopts(args.as_slice(), opts.as_slice());
match rs {
Ok(ref m) => {
assert!(m.opt_present("test"));
assert_eq!(m.opt_str("test").unwrap(), "20".to_strbuf());
assert_eq!(m.opt_str("test").unwrap(), "20".to_string());
assert!(m.opt_present("t"));
assert_eq!(m.opt_str("t").unwrap(), "20".to_strbuf());
assert_eq!(m.opt_str("t").unwrap(), "20".to_string());
let pair = m.opt_strs("test");
assert!(*pair.get(0) == "20".to_strbuf());
assert!(*pair.get(1) == "30".to_strbuf());
assert!(*pair.get(0) == "20".to_string());
assert!(*pair.get(1) == "30".to_string());
}
_ => fail!()
}
@ -1301,14 +1301,14 @@ mod tests {
#[test]
fn test_unrecognized_option() {
let long_args = vec!("--untest".to_strbuf());
let long_args = vec!("--untest".to_string());
let opts = vec!(optmulti("t", "test", "testing", "TEST"));
let rs = getopts(long_args.as_slice(), opts.as_slice());
match rs {
Err(f) => check_fail_type(f, UnrecognizedOption_),
_ => fail!()
}
let short_args = vec!("-u".to_strbuf());
let short_args = vec!("-u".to_string());
match getopts(short_args.as_slice(), opts.as_slice()) {
Err(f) => check_fail_type(f, UnrecognizedOption_),
_ => fail!()
@ -1318,22 +1318,22 @@ mod tests {
#[test]
fn test_combined() {
let args =
vec!("prog".to_strbuf(),
"free1".to_strbuf(),
"-s".to_strbuf(),
"20".to_strbuf(),
"free2".to_strbuf(),
"--flag".to_strbuf(),
"--long=30".to_strbuf(),
"-f".to_strbuf(),
"-m".to_strbuf(),
"40".to_strbuf(),
"-m".to_strbuf(),
"50".to_strbuf(),
"-n".to_strbuf(),
"-A B".to_strbuf(),
"-n".to_strbuf(),
"-60 70".to_strbuf());
vec!("prog".to_string(),
"free1".to_string(),
"-s".to_string(),
"20".to_string(),
"free2".to_string(),
"--flag".to_string(),
"--long=30".to_string(),
"-f".to_string(),
"-m".to_string(),
"40".to_string(),
"-m".to_string(),
"50".to_string(),
"-n".to_string(),
"-A B".to_string(),
"-n".to_string(),
"-60 70".to_string());
let opts =
vec!(optopt("s", "something", "something", "SOMETHING"),
optflag("", "flag", "a flag"),
@ -1345,19 +1345,19 @@ mod tests {
let rs = getopts(args.as_slice(), opts.as_slice());
match rs {
Ok(ref m) => {
assert!(*m.free.get(0) == "prog".to_strbuf());
assert!(*m.free.get(1) == "free1".to_strbuf());
assert_eq!(m.opt_str("s").unwrap(), "20".to_strbuf());
assert!(*m.free.get(2) == "free2".to_strbuf());
assert!(*m.free.get(0) == "prog".to_string());
assert!(*m.free.get(1) == "free1".to_string());
assert_eq!(m.opt_str("s").unwrap(), "20".to_string());
assert!(*m.free.get(2) == "free2".to_string());
assert!((m.opt_present("flag")));
assert_eq!(m.opt_str("long").unwrap(), "30".to_strbuf());
assert_eq!(m.opt_str("long").unwrap(), "30".to_string());
assert!((m.opt_present("f")));
let pair = m.opt_strs("m");
assert!(*pair.get(0) == "40".to_strbuf());
assert!(*pair.get(1) == "50".to_strbuf());
assert!(*pair.get(0) == "40".to_string());
assert!(*pair.get(1) == "50".to_string());
let pair = m.opt_strs("n");
assert!(*pair.get(0) == "-A B".to_strbuf());
assert!(*pair.get(1) == "-60 70".to_strbuf());
assert!(*pair.get(0) == "-A B".to_string());
assert!(*pair.get(1) == "-60 70".to_string());
assert!((!m.opt_present("notpresent")));
}
_ => fail!()
@ -1370,68 +1370,68 @@ mod tests {
optopt("", "encrypt", "encrypt", "ENCRYPT"),
optopt("f", "", "flag", "FLAG"));
let args_single = vec!("-e".to_strbuf(), "foo".to_strbuf());
let args_single = vec!("-e".to_string(), "foo".to_string());
let matches_single = &match getopts(args_single.as_slice(),
opts.as_slice()) {
result::Ok(m) => m,
result::Err(_) => fail!()
};
assert!(matches_single.opts_present(["e".to_strbuf()]));
assert!(matches_single.opts_present(["encrypt".to_strbuf(), "e".to_strbuf()]));
assert!(matches_single.opts_present(["e".to_strbuf(), "encrypt".to_strbuf()]));
assert!(!matches_single.opts_present(["encrypt".to_strbuf()]));
assert!(!matches_single.opts_present(["thing".to_strbuf()]));
assert!(matches_single.opts_present(["e".to_string()]));
assert!(matches_single.opts_present(["encrypt".to_string(), "e".to_string()]));
assert!(matches_single.opts_present(["e".to_string(), "encrypt".to_string()]));
assert!(!matches_single.opts_present(["encrypt".to_string()]));
assert!(!matches_single.opts_present(["thing".to_string()]));
assert!(!matches_single.opts_present([]));
assert_eq!(matches_single.opts_str(["e".to_strbuf()]).unwrap(), "foo".to_strbuf());
assert_eq!(matches_single.opts_str(["e".to_strbuf(), "encrypt".to_strbuf()]).unwrap(),
"foo".to_strbuf());
assert_eq!(matches_single.opts_str(["encrypt".to_strbuf(), "e".to_strbuf()]).unwrap(),
"foo".to_strbuf());
assert_eq!(matches_single.opts_str(["e".to_string()]).unwrap(), "foo".to_string());
assert_eq!(matches_single.opts_str(["e".to_string(), "encrypt".to_string()]).unwrap(),
"foo".to_string());
assert_eq!(matches_single.opts_str(["encrypt".to_string(), "e".to_string()]).unwrap(),
"foo".to_string());
let args_both = vec!("-e".to_strbuf(), "foo".to_strbuf(), "--encrypt".to_strbuf(),
"foo".to_strbuf());
let args_both = vec!("-e".to_string(), "foo".to_string(), "--encrypt".to_string(),
"foo".to_string());
let matches_both = &match getopts(args_both.as_slice(),
opts.as_slice()) {
result::Ok(m) => m,
result::Err(_) => fail!()
};
assert!(matches_both.opts_present(["e".to_strbuf()]));
assert!(matches_both.opts_present(["encrypt".to_strbuf()]));
assert!(matches_both.opts_present(["encrypt".to_strbuf(), "e".to_strbuf()]));
assert!(matches_both.opts_present(["e".to_strbuf(), "encrypt".to_strbuf()]));
assert!(!matches_both.opts_present(["f".to_strbuf()]));
assert!(!matches_both.opts_present(["thing".to_strbuf()]));
assert!(matches_both.opts_present(["e".to_string()]));
assert!(matches_both.opts_present(["encrypt".to_string()]));
assert!(matches_both.opts_present(["encrypt".to_string(), "e".to_string()]));
assert!(matches_both.opts_present(["e".to_string(), "encrypt".to_string()]));
assert!(!matches_both.opts_present(["f".to_string()]));
assert!(!matches_both.opts_present(["thing".to_string()]));
assert!(!matches_both.opts_present([]));
assert_eq!(matches_both.opts_str(["e".to_strbuf()]).unwrap(), "foo".to_strbuf());
assert_eq!(matches_both.opts_str(["encrypt".to_strbuf()]).unwrap(), "foo".to_strbuf());
assert_eq!(matches_both.opts_str(["e".to_strbuf(), "encrypt".to_strbuf()]).unwrap(),
"foo".to_strbuf());
assert_eq!(matches_both.opts_str(["encrypt".to_strbuf(), "e".to_strbuf()]).unwrap(),
"foo".to_strbuf());
assert_eq!(matches_both.opts_str(["e".to_string()]).unwrap(), "foo".to_string());
assert_eq!(matches_both.opts_str(["encrypt".to_string()]).unwrap(), "foo".to_string());
assert_eq!(matches_both.opts_str(["e".to_string(), "encrypt".to_string()]).unwrap(),
"foo".to_string());
assert_eq!(matches_both.opts_str(["encrypt".to_string(), "e".to_string()]).unwrap(),
"foo".to_string());
}
#[test]
fn test_nospace() {
let args = vec!("-Lfoo".to_strbuf(), "-M.".to_strbuf());
let args = vec!("-Lfoo".to_string(), "-M.".to_string());
let opts = vec!(optmulti("L", "", "library directory", "LIB"),
optmulti("M", "", "something", "MMMM"));
let matches = &match getopts(args.as_slice(), opts.as_slice()) {
result::Ok(m) => m,
result::Err(_) => fail!()
};
assert!(matches.opts_present(["L".to_strbuf()]));
assert_eq!(matches.opts_str(["L".to_strbuf()]).unwrap(), "foo".to_strbuf());
assert!(matches.opts_present(["M".to_strbuf()]));
assert_eq!(matches.opts_str(["M".to_strbuf()]).unwrap(), ".".to_strbuf());
assert!(matches.opts_present(["L".to_string()]));
assert_eq!(matches.opts_str(["L".to_string()]).unwrap(), "foo".to_string());
assert!(matches.opts_present(["M".to_string()]));
assert_eq!(matches.opts_str(["M".to_string()]).unwrap(), ".".to_string());
}
#[test]
fn test_long_to_short() {
let mut short = Opt {
name: Long("banana".to_strbuf()),
name: Long("banana".to_string()),
hasarg: Yes,
occur: Req,
aliases: Vec::new(),
@ -1450,7 +1450,7 @@ mod tests {
let opts = vec!(
optflagmulti("a", "apple", "Desc"));
let args = vec!("-a".to_strbuf(), "--apple".to_strbuf(), "-a".to_strbuf());
let args = vec!("-a".to_string(), "--apple".to_string(), "-a".to_string());
let matches = getopts(args.as_slice(), opts.as_slice()).unwrap();
assert_eq!(3, matches.opt_count("a"));
@ -1477,7 +1477,7 @@ Options:
-k --kiwi Desc
-p [VAL] Desc
-l VAL Desc
".to_strbuf();
".to_string();
let generated_usage = usage("Usage: fruits", optgroups.as_slice());
@ -1504,7 +1504,7 @@ Options:
-k --kiwi This is a long description which won't be wrapped..+..
-a --apple This is a long description which _will_ be
wrapped..+..
".to_strbuf();
".to_string();
let usage = usage("Usage: fruits", optgroups.as_slice());
@ -1530,7 +1530,7 @@ Options:
-a --apple This description has some characters that could
confuse the line wrapping; an apple costs 0.51 in
some parts of Europe.
".to_strbuf();
".to_string();
let usage = usage("Usage: fruits", optgroups.as_slice());
@ -1549,7 +1549,7 @@ Options:
optflagopt("p", "", "Desc", "VAL"),
optmulti("l", "", "Desc", "VAL"));
let expected = "Usage: fruits -b VAL [-a VAL] [-k] [-p [VAL]] [-l VAL]..".to_strbuf();
let expected = "Usage: fruits -b VAL [-a VAL] [-k] [-p [VAL]] [-l VAL]..".to_string();
let generated_usage = short_usage("fruits", optgroups.as_slice());
debug!("expected: <<{}>>", expected);

View File

@ -767,7 +767,7 @@ mod test {
#[test]
fn test_pattern_escape() {
let s = "_[_]_?_*_!_";
assert_eq!(Pattern::escape(s), "_[[]_[]]_[?]_[*]_!_".to_strbuf());
assert_eq!(Pattern::escape(s), "_[[]_[]]_[?]_[*]_!_".to_string());
assert!(Pattern::new(Pattern::escape(s).as_slice()).matches(s));
}

View File

@ -435,8 +435,8 @@ impl<'a> LabelText<'a> {
/// Renders text as string suitable for a label in a .dot file.
pub fn escape(&self) -> String {
match self {
&LabelStr(ref s) => s.as_slice().escape_default().to_strbuf(),
&EscStr(ref s) => LabelText::escape_str(s.as_slice()).to_strbuf(),
&LabelStr(ref s) => s.as_slice().escape_default().to_string(),
&EscStr(ref s) => LabelText::escape_str(s.as_slice()).to_string(),
}
}
}
@ -666,7 +666,7 @@ mod tests {
render(&g, &mut writer).unwrap();
let mut r = BufReader::new(writer.get_ref());
match r.read_to_str() {
Ok(string) => Ok(string.to_strbuf()),
Ok(string) => Ok(string.to_string()),
Err(err) => Err(err),
}
}

View File

@ -75,34 +75,34 @@ fn hex_float_lit_err(s: &str) -> Option<(uint, String)> {
let mut i = 0;
if chars.peek() == Some(&'-') { chars.next(); i+= 1 }
if chars.next() != Some('0') {
return Some((i, "Expected '0'".to_strbuf()));
return Some((i, "Expected '0'".to_string()));
} i+=1;
if chars.next() != Some('x') {
return Some((i, "Expected 'x'".to_strbuf()));
return Some((i, "Expected 'x'".to_string()));
} i+=1;
let mut d_len = 0;
for _ in chars.take_while(|c| c.is_digit_radix(16)) { chars.next(); i+=1; d_len += 1;}
if chars.next() != Some('.') {
return Some((i, "Expected '.'".to_strbuf()));
return Some((i, "Expected '.'".to_string()));
} i+=1;
let mut f_len = 0;
for _ in chars.take_while(|c| c.is_digit_radix(16)) { chars.next(); i+=1; f_len += 1;}
if d_len == 0 && f_len == 0 {
return Some((i, "Expected digits before or after decimal \
point".to_strbuf()));
point".to_string()));
}
if chars.next() != Some('p') {
return Some((i, "Expected 'p'".to_strbuf()));
return Some((i, "Expected 'p'".to_string()));
} i+=1;
if chars.peek() == Some(&'-') { chars.next(); i+= 1 }
let mut e_len = 0;
for _ in chars.take_while(|c| c.is_digit()) { chars.next(); i+=1; e_len += 1}
if e_len == 0 {
return Some((i, "Expected exponent digits".to_strbuf()));
return Some((i, "Expected exponent digits".to_string()));
}
match chars.next() {
None => None,
Some(_) => Some((i, "Expected end of string".to_strbuf()))
Some(_) => Some((i, "Expected end of string".to_string()))
}
}

View File

@ -64,7 +64,7 @@ pub fn parse_logging_spec(spec: &str) -> Vec<LogDirective> {
}
};
dirs.push(LogDirective {
name: name.map(|s| s.to_strbuf()),
name: name.map(|s| s.to_string()),
level: log_level,
});
}
@ -80,13 +80,13 @@ mod tests {
let dirs = parse_logging_spec("crate1::mod1=1,crate1::mod2,crate2=4");
let dirs = dirs.as_slice();
assert_eq!(dirs.len(), 3);
assert_eq!(dirs[0].name, Some("crate1::mod1".to_strbuf()));
assert_eq!(dirs[0].name, Some("crate1::mod1".to_string()));
assert_eq!(dirs[0].level, 1);
assert_eq!(dirs[1].name, Some("crate1::mod2".to_strbuf()));
assert_eq!(dirs[1].name, Some("crate1::mod2".to_string()));
assert_eq!(dirs[1].level, ::MAX_LOG_LEVEL);
assert_eq!(dirs[2].name, Some("crate2".to_strbuf()));
assert_eq!(dirs[2].name, Some("crate2".to_string()));
assert_eq!(dirs[2].level, 4);
}
@ -96,7 +96,7 @@ mod tests {
let dirs = parse_logging_spec("crate1::mod1=1=2,crate2=4");
let dirs = dirs.as_slice();
assert_eq!(dirs.len(), 1);
assert_eq!(dirs[0].name, Some("crate2".to_strbuf()));
assert_eq!(dirs[0].name, Some("crate2".to_string()));
assert_eq!(dirs[0].level, 4);
}
@ -106,7 +106,7 @@ mod tests {
let dirs = parse_logging_spec("crate1::mod1=noNumber,crate2=4");
let dirs = dirs.as_slice();
assert_eq!(dirs.len(), 1);
assert_eq!(dirs[0].name, Some("crate2".to_strbuf()));
assert_eq!(dirs[0].name, Some("crate2".to_string()));
assert_eq!(dirs[0].level, 4);
}
@ -116,7 +116,7 @@ mod tests {
let dirs = parse_logging_spec("crate1::mod1=wrong,crate2=warn");
let dirs = dirs.as_slice();
assert_eq!(dirs.len(), 1);
assert_eq!(dirs[0].name, Some("crate2".to_strbuf()));
assert_eq!(dirs[0].name, Some("crate2".to_string()));
assert_eq!(dirs[0].level, ::WARN);
}
@ -128,7 +128,7 @@ mod tests {
assert_eq!(dirs.len(), 2);
assert_eq!(dirs[0].name, None);
assert_eq!(dirs[0].level, 2);
assert_eq!(dirs[1].name, Some("crate2".to_strbuf()));
assert_eq!(dirs[1].name, Some("crate2".to_string()));
assert_eq!(dirs[1].level, 4);
}
}

View File

@ -366,11 +366,11 @@ mod tests {
fn match_full_path() {
let dirs = [
LogDirective {
name: Some("crate2".to_strbuf()),
name: Some("crate2".to_string()),
level: 3
},
LogDirective {
name: Some("crate1::mod1".to_strbuf()),
name: Some("crate1::mod1".to_string()),
level: 2
}
];
@ -383,8 +383,8 @@ mod tests {
#[test]
fn no_match() {
let dirs = [
LogDirective { name: Some("crate2".to_strbuf()), level: 3 },
LogDirective { name: Some("crate1::mod1".to_strbuf()), level: 2 }
LogDirective { name: Some("crate2".to_string()), level: 3 },
LogDirective { name: Some("crate1::mod1".to_string()), level: 2 }
];
assert!(!enabled(2, "crate3", dirs.iter()));
}
@ -392,8 +392,8 @@ mod tests {
#[test]
fn match_beginning() {
let dirs = [
LogDirective { name: Some("crate2".to_strbuf()), level: 3 },
LogDirective { name: Some("crate1::mod1".to_strbuf()), level: 2 }
LogDirective { name: Some("crate2".to_string()), level: 3 },
LogDirective { name: Some("crate1::mod1".to_string()), level: 2 }
];
assert!(enabled(3, "crate2::mod1", dirs.iter()));
}
@ -401,9 +401,9 @@ mod tests {
#[test]
fn match_beginning_longest_match() {
let dirs = [
LogDirective { name: Some("crate2".to_strbuf()), level: 3 },
LogDirective { name: Some("crate2::mod".to_strbuf()), level: 4 },
LogDirective { name: Some("crate1::mod1".to_strbuf()), level: 2 }
LogDirective { name: Some("crate2".to_string()), level: 3 },
LogDirective { name: Some("crate2::mod".to_string()), level: 4 },
LogDirective { name: Some("crate1::mod1".to_string()), level: 2 }
];
assert!(enabled(4, "crate2::mod1", dirs.iter()));
assert!(!enabled(4, "crate2", dirs.iter()));
@ -413,7 +413,7 @@ mod tests {
fn match_default() {
let dirs = [
LogDirective { name: None, level: 3 },
LogDirective { name: Some("crate1::mod1".to_strbuf()), level: 2 }
LogDirective { name: Some("crate1::mod1".to_string()), level: 2 }
];
assert!(enabled(2, "crate1::mod1", dirs.iter()));
assert!(enabled(3, "crate2::mod2", dirs.iter()));
@ -423,7 +423,7 @@ mod tests {
fn zero_level() {
let dirs = [
LogDirective { name: None, level: 3 },
LogDirective { name: Some("crate1::mod1".to_strbuf()), level: 0 }
LogDirective { name: Some("crate1::mod1".to_string()), level: 0 }
];
assert!(!enabled(1, "crate1::mod1", dirs.iter()));
assert!(enabled(3, "crate2::mod2", dirs.iter()));

View File

@ -106,7 +106,7 @@ fn get_error(s: c_int) -> IoError {
use std::io;
let err_str = unsafe {
CString::new(gai_strerror(s), false).as_str().unwrap().to_strbuf()
CString::new(gai_strerror(s), false).as_str().unwrap().to_string()
};
IoError {
kind: io::OtherIoError,

View File

@ -1103,24 +1103,24 @@ mod tests {
assert_eq!(
test_wrapper("prog", ["aaa", "bbb", "ccc"]),
"prog aaa bbb ccc".to_owned()
"prog aaa bbb ccc".to_string()
);
assert_eq!(
test_wrapper("C:\\Program Files\\blah\\blah.exe", ["aaa"]),
"\"C:\\Program Files\\blah\\blah.exe\" aaa".to_owned()
"\"C:\\Program Files\\blah\\blah.exe\" aaa".to_string()
);
assert_eq!(
test_wrapper("C:\\Program Files\\test", ["aa\"bb"]),
"\"C:\\Program Files\\test\" aa\\\"bb".to_owned()
"\"C:\\Program Files\\test\" aa\\\"bb".to_string()
);
assert_eq!(
test_wrapper("echo", ["a b c"]),
"echo \"a b c\"".to_owned()
"echo \"a b c\"".to_string()
);
assert_eq!(
test_wrapper("\u03c0\u042f\u97f3\u00e6\u221e", []),
"\u03c0\u042f\u97f3\u00e6\u221e".to_owned()
"\u03c0\u042f\u97f3\u00e6\u221e".to_string()
);
}
}

View File

@ -629,7 +629,7 @@ impl ToStrRadix for BigUint {
fn fill_concat(v: &[BigDigit], radix: uint, l: uint) -> String {
if v.is_empty() {
return "0".to_strbuf()
return "0".to_string()
}
let mut s = String::with_capacity(v.len() * l);
for n in v.iter().rev() {
@ -637,7 +637,7 @@ impl ToStrRadix for BigUint {
s.push_str("0".repeat(l - ss.len()).as_slice());
s.push_str(ss.as_slice());
}
s.as_slice().trim_left_chars('0').to_strbuf()
s.as_slice().trim_left_chars('0').to_string()
}
}
}
@ -1214,7 +1214,7 @@ impl ToStrRadix for BigInt {
fn to_str_radix(&self, radix: uint) -> String {
match self.sign {
Plus => self.data.to_str_radix(radix),
Zero => "0".to_strbuf(),
Zero => "0".to_string(),
Minus => format_strbuf!("-{}", self.data.to_str_radix(radix)),
}
}
@ -2032,35 +2032,35 @@ mod biguint_tests {
fn to_str_pairs() -> Vec<(BigUint, Vec<(uint, String)>)> {
let bits = BigDigit::bits;
vec!(( Zero::zero(), vec!(
(2, "0".to_strbuf()), (3, "0".to_strbuf())
(2, "0".to_string()), (3, "0".to_string())
)), ( BigUint::from_slice([ 0xff ]), vec!(
(2, "11111111".to_strbuf()),
(3, "100110".to_strbuf()),
(4, "3333".to_strbuf()),
(5, "2010".to_strbuf()),
(6, "1103".to_strbuf()),
(7, "513".to_strbuf()),
(8, "377".to_strbuf()),
(9, "313".to_strbuf()),
(10, "255".to_strbuf()),
(11, "212".to_strbuf()),
(12, "193".to_strbuf()),
(13, "168".to_strbuf()),
(14, "143".to_strbuf()),
(15, "120".to_strbuf()),
(16, "ff".to_strbuf())
(2, "11111111".to_string()),
(3, "100110".to_string()),
(4, "3333".to_string()),
(5, "2010".to_string()),
(6, "1103".to_string()),
(7, "513".to_string()),
(8, "377".to_string()),
(9, "313".to_string()),
(10, "255".to_string()),
(11, "212".to_string()),
(12, "193".to_string()),
(13, "168".to_string()),
(14, "143".to_string()),
(15, "120".to_string()),
(16, "ff".to_string())
)), ( BigUint::from_slice([ 0xfff ]), vec!(
(2, "111111111111".to_strbuf()),
(4, "333333".to_strbuf()),
(16, "fff".to_strbuf())
(2, "111111111111".to_string()),
(4, "333333".to_string()),
(16, "fff".to_string())
)), ( BigUint::from_slice([ 1, 2 ]), vec!(
(2,
format_strbuf!("10{}1", "0".repeat(bits - 1))),
(4,
format_strbuf!("2{}1", "0".repeat(bits / 2 - 1))),
(10, match bits {
32 => "8589934593".to_strbuf(),
16 => "131073".to_strbuf(),
32 => "8589934593".to_string(),
16 => "131073".to_string(),
_ => fail!()
}),
(16,
@ -2075,8 +2075,8 @@ mod biguint_tests {
"0".repeat(bits / 2 - 1),
"0".repeat(bits / 2 - 1))),
(10, match bits {
32 => "55340232229718589441".to_strbuf(),
16 => "12885032961".to_strbuf(),
32 => "55340232229718589441".to_string(),
16 => "12885032961".to_string(),
_ => fail!()
}),
(16,

View File

@ -349,14 +349,14 @@ mod test {
#[test]
fn test_to_str() {
fn test(c : Complex64, s: String) {
assert_eq!(c.to_str().to_strbuf(), s);
assert_eq!(c.to_str().to_string(), s);
}
test(_0_0i, "0+0i".to_strbuf());
test(_1_0i, "1+0i".to_strbuf());
test(_0_1i, "0+1i".to_strbuf());
test(_1_1i, "1+1i".to_strbuf());
test(_neg1_1i, "-1+1i".to_strbuf());
test(-_neg1_1i, "1-1i".to_strbuf());
test(_05_05i, "0.5+0.5i".to_strbuf());
test(_0_0i, "0+0i".to_string());
test(_1_0i, "1+0i".to_string());
test(_0_1i, "0+1i".to_string());
test(_1_1i, "1+1i".to_string());
test(_neg1_1i, "-1+1i".to_string());
test(-_neg1_1i, "1-1i".to_string());
test(_05_05i, "0.5+0.5i".to_string());
}
}

View File

@ -559,14 +559,14 @@ mod test {
fn test_to_from_str() {
fn test(r: Rational, s: String) {
assert_eq!(FromStr::from_str(s.as_slice()), Some(r));
assert_eq!(r.to_str().to_strbuf(), s);
assert_eq!(r.to_str().to_string(), s);
}
test(_1, "1/1".to_strbuf());
test(_0, "0/1".to_strbuf());
test(_1_2, "1/2".to_strbuf());
test(_3_2, "3/2".to_strbuf());
test(_2, "2/1".to_strbuf());
test(_neg1_2, "-1/2".to_strbuf());
test(_1, "1/1".to_string());
test(_0, "0/1".to_string());
test(_1_2, "1/2".to_string());
test(_3_2, "3/2".to_string());
test(_2, "2/1".to_string());
test(_neg1_2, "-1/2".to_string());
}
#[test]
fn test_from_str_fail() {
@ -586,28 +586,28 @@ mod test {
fn test(r: Rational, s: String, n: uint) {
assert_eq!(FromStrRadix::from_str_radix(s.as_slice(), n),
Some(r));
assert_eq!(r.to_str_radix(n).to_strbuf(), s);
assert_eq!(r.to_str_radix(n).to_string(), s);
}
fn test3(r: Rational, s: String) { test(r, s, 3) }
fn test16(r: Rational, s: String) { test(r, s, 16) }
test3(_1, "1/1".to_strbuf());
test3(_0, "0/1".to_strbuf());
test3(_1_2, "1/2".to_strbuf());
test3(_3_2, "10/2".to_strbuf());
test3(_2, "2/1".to_strbuf());
test3(_neg1_2, "-1/2".to_strbuf());
test3(_neg1_2 / _2, "-1/11".to_strbuf());
test3(_1, "1/1".to_string());
test3(_0, "0/1".to_string());
test3(_1_2, "1/2".to_string());
test3(_3_2, "10/2".to_string());
test3(_2, "2/1".to_string());
test3(_neg1_2, "-1/2".to_string());
test3(_neg1_2 / _2, "-1/11".to_string());
test16(_1, "1/1".to_strbuf());
test16(_0, "0/1".to_strbuf());
test16(_1_2, "1/2".to_strbuf());
test16(_3_2, "3/2".to_strbuf());
test16(_2, "2/1".to_strbuf());
test16(_neg1_2, "-1/2".to_strbuf());
test16(_neg1_2 / _2, "-1/4".to_strbuf());
test16(Ratio::new(13,15), "d/f".to_strbuf());
test16(_1_2*_1_2*_1_2*_1_2, "1/10".to_strbuf());
test16(_1, "1/1".to_string());
test16(_0, "0/1".to_string());
test16(_1_2, "1/2".to_string());
test16(_3_2, "3/2".to_string());
test16(_2, "2/1".to_string());
test16(_neg1_2, "-1/2".to_string());
test16(_neg1_2 / _2, "-1/4".to_string());
test16(Ratio::new(13,15), "d/f".to_string());
test16(_1_2*_1_2*_1_2*_1_2, "1/10".to_string());
}
#[test]

View File

@ -222,7 +222,7 @@ impl<'a> Parser<'a> {
self.caps += 1;
self.stack.push(Paren(self.flags,
self.caps,
"".to_strbuf()))
"".to_string()))
}
}
')' => {
@ -796,7 +796,7 @@ impl<'a> Parser<'a> {
}
if self.cur() == ':' {
// Save the old flags with the opening paren.
self.stack.push(Paren(self.flags, 0, "".to_strbuf()));
self.stack.push(Paren(self.flags, 0, "".to_string()));
}
self.flags = flags;
return Ok(())
@ -922,7 +922,7 @@ impl<'a> Parser<'a> {
fn err<T>(&self, msg: &str) -> Result<T, Error> {
Err(Error {
pos: self.chari,
msg: msg.to_strbuf(),
msg: msg.to_string(),
})
}
@ -942,7 +942,7 @@ impl<'a> Parser<'a> {
}
fn slice(&self, start: uint, end: uint) -> String {
str::from_chars(self.chars.as_slice().slice(start, end)).to_strbuf()
str::from_chars(self.chars.as_slice().slice(start, end)).to_string()
}
}

View File

@ -154,7 +154,7 @@ impl Regex {
let ast = try!(parse::parse(re));
let (prog, names) = Program::new(ast);
Ok(Dynamic(Dynamic {
original: re.to_strbuf(),
original: re.to_string(),
names: names,
prog: prog,
}))
@ -539,8 +539,8 @@ enum NamesIter<'a> {
impl<'a> Iterator<Option<String>> for NamesIter<'a> {
fn next(&mut self) -> Option<Option<String>> {
match *self {
NamesIterNative(ref mut i) => i.next().map(|x| x.map(|s| s.to_strbuf())),
NamesIterDynamic(ref mut i) => i.next().map(|x| x.as_ref().map(|s| s.to_strbuf())),
NamesIterNative(ref mut i) => i.next().map(|x| x.map(|s| s.to_string())),
NamesIterDynamic(ref mut i) => i.next().map(|x| x.as_ref().map(|s| s.to_string())),
}
}
}
@ -764,8 +764,8 @@ impl<'t> Captures<'t> {
format_strbuf!("{}{}",
pre,
match from_str::<uint>(name.as_slice()) {
None => self.name(name).to_strbuf(),
Some(i) => self.at(i).to_strbuf(),
None => self.name(name).to_string(),
Some(i) => self.at(i).to_string(),
})
});
let re = Regex::new(r"\$\$").unwrap();

View File

@ -161,7 +161,7 @@ fn gen_text(n: uint) -> String {
*b = '\n' as u8
}
}
str::from_utf8(bytes.as_slice()).unwrap().to_strbuf()
str::from_utf8(bytes.as_slice()).unwrap().to_string()
}
throughput!(easy0_32, easy0(), 32)

View File

@ -97,7 +97,7 @@ fn native(cx: &mut ExtCtxt, sp: codemap::Span, tts: &[ast::TokenTree])
let mut gen = NfaGen {
cx: &*cx, sp: sp, prog: prog,
names: re.names_iter().collect(), original: re.as_str().to_strbuf(),
names: re.names_iter().collect(), original: re.as_str().to_string(),
};
MacExpr::new(gen.code())
}
@ -611,7 +611,7 @@ fn parse(cx: &mut ExtCtxt, tts: &[ast::TokenTree]) -> Option<String> {
let regex = match entry.node {
ast::ExprLit(lit) => {
match lit.node {
ast::LitStr(ref s, _) => s.to_str().to_strbuf(),
ast::LitStr(ref s, _) => s.to_str().to_string(),
_ => {
cx.span_err(entry.span, format!(
"expected string literal but got `{}`",

View File

@ -139,7 +139,7 @@ impl<'a> Archive<'a> {
let output = str::from_utf8(output.output.as_slice()).unwrap();
// use lines_any because windows delimits output with `\r\n` instead of
// just `\n`
output.lines_any().map(|s| s.to_strbuf()).collect()
output.lines_any().map(|s| s.to_string()).collect()
}
fn add_archive(&mut self, archive: &Path, name: &str,

View File

@ -15,15 +15,15 @@ use syntax::abi;
pub fn get_target_strs(target_triple: String, target_os: abi::Os) -> target_strs::t {
let cc_args = if target_triple.as_slice().contains("thumb") {
vec!("-mthumb".to_strbuf())
vec!("-mthumb".to_string())
} else {
vec!("-marm".to_strbuf())
vec!("-marm".to_string())
};
return target_strs::t {
module_asm: "".to_strbuf(),
module_asm: "".to_string(),
meta_sect_name:
meta_section_name(cfg_os_to_meta_os(target_os)).to_strbuf(),
meta_section_name(cfg_os_to_meta_os(target_os)).to_string(),
data_layout: match target_os {
abi::OsMacos => {
@ -31,7 +31,7 @@ pub fn get_target_strs(target_triple: String, target_os: abi::Os) -> target_strs
-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
-f32:32:32-f64:64:64\
-v64:64:64-v128:64:128\
-a0:0:64-n32".to_strbuf()
-a0:0:64-n32".to_string()
}
abi::OsWin32 => {
@ -39,7 +39,7 @@ pub fn get_target_strs(target_triple: String, target_os: abi::Os) -> target_strs
-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
-f32:32:32-f64:64:64\
-v64:64:64-v128:64:128\
-a0:0:64-n32".to_strbuf()
-a0:0:64-n32".to_string()
}
abi::OsLinux => {
@ -47,7 +47,7 @@ pub fn get_target_strs(target_triple: String, target_os: abi::Os) -> target_strs
-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
-f32:32:32-f64:64:64\
-v64:64:64-v128:64:128\
-a0:0:64-n32".to_strbuf()
-a0:0:64-n32".to_string()
}
abi::OsAndroid => {
@ -55,7 +55,7 @@ pub fn get_target_strs(target_triple: String, target_os: abi::Os) -> target_strs
-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
-f32:32:32-f64:64:64\
-v64:64:64-v128:64:128\
-a0:0:64-n32".to_strbuf()
-a0:0:64-n32".to_string()
}
abi::OsFreebsd => {
@ -63,7 +63,7 @@ pub fn get_target_strs(target_triple: String, target_os: abi::Os) -> target_strs
-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
-f32:32:32-f64:64:64\
-v64:64:64-v128:64:128\
-a0:0:64-n32".to_strbuf()
-a0:0:64-n32".to_string()
}
},

View File

@ -81,7 +81,7 @@ pub fn WriteOutputFile(
let result = llvm::LLVMRustWriteOutputFile(
target, pm, m, output, file_type);
if !result {
llvm_err(sess, "could not write output".to_strbuf());
llvm_err(sess, "could not write output".to_string());
}
})
}
@ -554,7 +554,7 @@ pub fn crate_id_hash(crate_id: &CrateId) -> String {
// not by path.
let mut s = Sha256::new();
s.input_str(crate_id.short_name_with_version().as_slice());
truncated_hash_result(&mut s).as_slice().slice_to(8).to_strbuf()
truncated_hash_result(&mut s).as_slice().slice_to(8).to_string()
}
// FIXME (#9639): This needs to handle non-utf8 `out_filestem` values
@ -570,7 +570,7 @@ pub fn build_link_meta(krate: &ast::Crate, out_filestem: &str) -> LinkMeta {
fn truncated_hash_result(symbol_hasher: &mut Sha256) -> String {
let output = symbol_hasher.result_bytes();
// 64 bits should be enough to avoid collisions.
output.slice_to(8).to_hex().to_strbuf()
output.slice_to(8).to_hex().to_string()
}
@ -597,7 +597,7 @@ fn symbol_hash(tcx: &ty::ctxt,
fn get_symbol_hash(ccx: &CrateContext, t: ty::t) -> String {
match ccx.type_hashcodes.borrow().find(&t) {
Some(h) => return h.to_strbuf(),
Some(h) => return h.to_string(),
None => {}
}
@ -649,7 +649,7 @@ pub fn sanitize(s: &str) -> String {
if result.len() > 0u &&
result.as_slice()[0] != '_' as u8 &&
! char::is_XID_start(result.as_slice()[0] as char) {
return format!("_{}", result.as_slice()).to_strbuf();
return format!("_{}", result.as_slice()).to_string();
}
return result;
@ -703,7 +703,7 @@ pub fn exported_name(path: PathElems, hash: &str, vers: &str) -> String {
let vers = if vers.len() > 0 && !char::is_XID_start(vers.char_at(0)) {
format!("v{}", vers)
} else {
vers.to_owned()
vers.to_string()
};
mangle(path, Some(hash), Some(vers.as_slice()))
@ -759,7 +759,7 @@ pub fn output_lib_filename(id: &CrateId) -> String {
pub fn get_cc_prog(sess: &Session) -> String {
match sess.opts.cg.linker {
Some(ref linker) => return linker.to_strbuf(),
Some(ref linker) => return linker.to_string(),
None => {}
}
@ -770,13 +770,13 @@ pub fn get_cc_prog(sess: &Session) -> String {
match sess.targ_cfg.os {
abi::OsWin32 => "gcc",
_ => "cc",
}.to_strbuf()
}.to_string()
}
pub fn get_ar_prog(sess: &Session) -> String {
match sess.opts.cg.ar {
Some(ref ar) => (*ar).clone(),
None => "ar".to_strbuf()
None => "ar".to_string()
}
}

View File

@ -15,10 +15,10 @@ use syntax::abi;
pub fn get_target_strs(target_triple: String, target_os: abi::Os) -> target_strs::t {
return target_strs::t {
module_asm: "".to_strbuf(),
module_asm: "".to_string(),
meta_sect_name:
meta_section_name(cfg_os_to_meta_os(target_os)).to_strbuf(),
meta_section_name(cfg_os_to_meta_os(target_os)).to_string(),
data_layout: match target_os {
abi::OsMacos => {
@ -26,7 +26,7 @@ pub fn get_target_strs(target_triple: String, target_os: abi::Os) -> target_strs
-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
-f32:32:32-f64:64:64\
-v64:64:64-v128:64:128\
-a0:0:64-n32".to_strbuf()
-a0:0:64-n32".to_string()
}
abi::OsWin32 => {
@ -34,7 +34,7 @@ pub fn get_target_strs(target_triple: String, target_os: abi::Os) -> target_strs
-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
-f32:32:32-f64:64:64\
-v64:64:64-v128:64:128\
-a0:0:64-n32".to_strbuf()
-a0:0:64-n32".to_string()
}
abi::OsLinux => {
@ -42,7 +42,7 @@ pub fn get_target_strs(target_triple: String, target_os: abi::Os) -> target_strs
-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
-f32:32:32-f64:64:64\
-v64:64:64-v128:64:128\
-a0:0:64-n32".to_strbuf()
-a0:0:64-n32".to_string()
}
abi::OsAndroid => {
@ -50,7 +50,7 @@ pub fn get_target_strs(target_triple: String, target_os: abi::Os) -> target_strs
-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
-f32:32:32-f64:64:64\
-v64:64:64-v128:64:128\
-a0:0:64-n32".to_strbuf()
-a0:0:64-n32".to_string()
}
abi::OsFreebsd => {
@ -58,7 +58,7 @@ pub fn get_target_strs(target_triple: String, target_os: abi::Os) -> target_strs
-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64\
-f32:32:32-f64:64:64\
-v64:64:64-v128:64:128\
-a0:0:64-n32".to_strbuf()
-a0:0:64-n32".to_string()
}
},

View File

@ -33,9 +33,9 @@ pub fn get_rpath_flags(sess: &Session, out_filename: &Path) -> Vec<String> {
let mut flags = Vec::new();
if sess.targ_cfg.os == abi::OsFreebsd {
flags.push_all(["-Wl,-rpath,/usr/local/lib/gcc46".to_strbuf(),
"-Wl,-rpath,/usr/local/lib/gcc44".to_strbuf(),
"-Wl,-z,origin".to_strbuf()]);
flags.push_all(["-Wl,-rpath,/usr/local/lib/gcc46".to_string(),
"-Wl,-rpath,/usr/local/lib/gcc44".to_string(),
"-Wl,-z,origin".to_string()]);
}
debug!("preparing the RPATH!");
@ -145,7 +145,7 @@ pub fn get_install_prefix_rpath(sysroot: &Path, target_triple: &str) -> String {
path.push(&tlib);
let path = os::make_absolute(&path);
// FIXME (#9639): This needs to handle non-utf8 paths
path.as_str().expect("non-utf8 component in rpath").to_strbuf()
path.as_str().expect("non-utf8 component in rpath").to_string()
}
pub fn minimize_rpaths(rpaths: &[String]) -> Vec<String> {
@ -169,12 +169,12 @@ mod test {
#[test]
fn test_rpaths_to_flags() {
let flags = rpaths_to_flags([
"path1".to_strbuf(),
"path2".to_strbuf()
"path1".to_string(),
"path2".to_string()
]);
assert_eq!(flags,
vec!("-Wl,-rpath,path1".to_strbuf(),
"-Wl,-rpath,path2".to_strbuf()));
vec!("-Wl,-rpath,path1".to_string(),
"-Wl,-rpath,path2".to_string()));
}
#[test]
@ -201,35 +201,35 @@ mod test {
#[test]
fn test_minimize1() {
let res = minimize_rpaths([
"rpath1".to_strbuf(),
"rpath2".to_strbuf(),
"rpath1".to_strbuf()
"rpath1".to_string(),
"rpath2".to_string(),
"rpath1".to_string()
]);
assert!(res.as_slice() == [
"rpath1".to_strbuf(),
"rpath2".to_strbuf()
"rpath1".to_string(),
"rpath2".to_string()
]);
}
#[test]
fn test_minimize2() {
let res = minimize_rpaths([
"1a".to_strbuf(),
"2".to_strbuf(),
"2".to_strbuf(),
"1a".to_strbuf(),
"4a".to_strbuf(),
"1a".to_strbuf(),
"2".to_strbuf(),
"3".to_strbuf(),
"4a".to_strbuf(),
"3".to_strbuf()
"1a".to_string(),
"2".to_string(),
"2".to_string(),
"1a".to_string(),
"4a".to_string(),
"1a".to_string(),
"2".to_string(),
"3".to_string(),
"4a".to_string(),
"3".to_string()
]);
assert!(res.as_slice() == [
"1a".to_strbuf(),
"2".to_strbuf(),
"4a".to_strbuf(),
"3".to_strbuf()
"1a".to_string(),
"2".to_string(),
"4a".to_string(),
"3".to_string()
]);
}

View File

@ -61,7 +61,7 @@ pub struct Svh {
impl Svh {
pub fn new(hash: &str) -> Svh {
assert!(hash.len() == 16);
Svh { hash: hash.to_strbuf() }
Svh { hash: hash.to_string() }
}
pub fn as_str<'a>(&'a self) -> &'a str {

View File

@ -17,10 +17,10 @@ use syntax::abi;
pub fn get_target_strs(target_triple: String, target_os: abi::Os)
-> target_strs::t {
return target_strs::t {
module_asm: "".to_strbuf(),
module_asm: "".to_string(),
meta_sect_name:
meta_section_name(cfg_os_to_meta_os(target_os)).to_strbuf(),
meta_section_name(cfg_os_to_meta_os(target_os)).to_string(),
data_layout: match target_os {
abi::OsMacos => {
@ -28,27 +28,27 @@ pub fn get_target_strs(target_triple: String, target_os: abi::Os)
-i32:32:32-i64:32:64\
-f32:32:32-f64:32:64-v64:64:64\
-v128:128:128-a0:0:64-f80:128:128\
-n8:16:32".to_strbuf()
-n8:16:32".to_string()
}
abi::OsWin32 => {
"e-p:32:32-f64:64:64-i64:64:64-f80:32:32-n8:16:32".to_strbuf()
"e-p:32:32-f64:64:64-i64:64:64-f80:32:32-n8:16:32".to_string()
}
abi::OsLinux => {
"e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32".to_strbuf()
"e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32".to_string()
}
abi::OsAndroid => {
"e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32".to_strbuf()
"e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32".to_string()
}
abi::OsFreebsd => {
"e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32".to_strbuf()
"e-p:32:32-f64:32:64-i64:32:64-f80:32:32-n8:16:32".to_string()
}
},
target_triple: target_triple,
cc_args: vec!("-m32".to_strbuf()),
cc_args: vec!("-m32".to_string()),
};
}

View File

@ -16,45 +16,45 @@ use syntax::abi;
pub fn get_target_strs(target_triple: String, target_os: abi::Os) -> target_strs::t {
return target_strs::t {
module_asm: "".to_strbuf(),
module_asm: "".to_string(),
meta_sect_name:
meta_section_name(cfg_os_to_meta_os(target_os)).to_strbuf(),
meta_section_name(cfg_os_to_meta_os(target_os)).to_string(),
data_layout: match target_os {
abi::OsMacos => {
"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-\
f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-\
s0:64:64-f80:128:128-n8:16:32:64".to_strbuf()
s0:64:64-f80:128:128-n8:16:32:64".to_string()
}
abi::OsWin32 => {
// FIXME: Test this. Copied from linux (#2398)
"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-\
f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-\
s0:64:64-f80:128:128-n8:16:32:64-S128".to_strbuf()
s0:64:64-f80:128:128-n8:16:32:64-S128".to_string()
}
abi::OsLinux => {
"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-\
f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-\
s0:64:64-f80:128:128-n8:16:32:64-S128".to_strbuf()
s0:64:64-f80:128:128-n8:16:32:64-S128".to_string()
}
abi::OsAndroid => {
"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-\
f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-\
s0:64:64-f80:128:128-n8:16:32:64-S128".to_strbuf()
s0:64:64-f80:128:128-n8:16:32:64-S128".to_string()
}
abi::OsFreebsd => {
"e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-\
f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-\
s0:64:64-f80:128:128-n8:16:32:64-S128".to_strbuf()
s0:64:64-f80:128:128-n8:16:32:64-S128".to_string()
}
},
target_triple: target_triple,
cc_args: vec!("-m64".to_strbuf()),
cc_args: vec!("-m64".to_string()),
};
}

View File

@ -107,7 +107,7 @@ pub fn basic_options() -> Options {
output_types: Vec::new(),
addl_lib_search_paths: RefCell::new(HashSet::new()),
maybe_sysroot: None,
target_triple: driver::host_triple().to_strbuf(),
target_triple: driver::host_triple().to_string(),
cfg: Vec::new(),
test: false,
parse_only: false,
@ -252,14 +252,14 @@ macro_rules! cgoptions(
fn parse_opt_string(slot: &mut Option<String>, v: Option<&str>) -> bool {
match v {
Some(s) => { *slot = Some(s.to_strbuf()); true },
Some(s) => { *slot = Some(s.to_string()); true },
None => false,
}
}
fn parse_string(slot: &mut String, v: Option<&str>) -> bool {
match v {
Some(s) => { *slot = s.to_strbuf(); true },
Some(s) => { *slot = s.to_string(); true },
None => false,
}
}
@ -269,7 +269,7 @@ macro_rules! cgoptions(
match v {
Some(s) => {
for s in s.words() {
slot.push(s.to_strbuf());
slot.push(s.to_string());
}
true
},
@ -287,9 +287,9 @@ cgoptions!(
"system linker to link outputs with"),
link_args: Vec<String> = (Vec::new(), parse_list,
"extra arguments to pass to the linker (space separated)"),
target_cpu: String = ("generic".to_strbuf(), parse_string,
target_cpu: String = ("generic".to_string(), parse_string,
"select target processor (llc -mcpu=help for details)"),
target_feature: String = ("".to_strbuf(), parse_string,
target_feature: String = ("".to_string(), parse_string,
"target specific attributes (llc -mattr=help for details)"),
passes: Vec<String> = (Vec::new(), parse_list,
"a list of extra LLVM passes to run (space separated)"),
@ -311,7 +311,7 @@ cgoptions!(
"prefer dynamic linking to static linking"),
no_integrated_as: bool = (false, parse_bool,
"use an external assembler rather than LLVM's integrated one"),
relocation_model: String = ("pic".to_strbuf(), parse_string,
relocation_model: String = ("pic".to_string(), parse_string,
"choose the relocation model to use (llc -relocation-model for details)"),
)
@ -557,8 +557,8 @@ pub fn optgroups() -> Vec<getopts::OptGroup> {
// Convert strings provided as --cfg [cfgspec] into a crate_cfg
fn parse_cfgspecs(cfgspecs: Vec<String> ) -> ast::CrateConfig {
cfgspecs.move_iter().map(|s| {
parse::parse_meta_from_source_str("cfgspec".to_strbuf(),
s.to_strbuf(),
parse::parse_meta_from_source_str("cfgspec".to_string(),
s.to_string(),
Vec::new(),
&parse::new_parse_sess())
}).collect::<ast::CrateConfig>()
@ -602,7 +602,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
.collect::<Vec<_>>()
.append(matches.opt_strs(level_name).as_slice());
for lint_name in flags.iter() {
let lint_name = lint_name.replace("-", "_").into_strbuf();
let lint_name = lint_name.replace("-", "_").into_string();
match lint_dict.find_equiv(&lint_name) {
None => {
early_error(format!("unknown {} flag: {}",
@ -667,8 +667,8 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
let sysroot_opt = matches.opt_str("sysroot").map(|m| Path::new(m));
let target = match matches.opt_str("target") {
Some(supplied_target) => supplied_target.to_strbuf(),
None => driver::host_triple().to_strbuf(),
Some(supplied_target) => supplied_target.to_string(),
None => driver::host_triple().to_string(),
};
let opt_level = {
if (debugging_opts & NO_OPT) != 0 {
@ -723,7 +723,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
let cfg = parse_cfgspecs(matches.opt_strs("cfg")
.move_iter()
.map(|x| x.to_strbuf())
.map(|x| x.to_string())
.collect());
let test = matches.opt_present("test");
let write_dependency_info = (matches.opt_present("dep-info"),
@ -787,7 +787,7 @@ mod test {
#[test]
fn test_switch_implies_cfg_test() {
let matches =
&match getopts(["--test".to_strbuf()], optgroups().as_slice()) {
&match getopts(["--test".to_string()], optgroups().as_slice()) {
Ok(m) => m,
Err(f) => fail!("test_switch_implies_cfg_test: {}", f.to_err_msg())
};
@ -802,7 +802,7 @@ mod test {
#[test]
fn test_switch_implies_cfg_test_unless_cfg_test() {
let matches =
&match getopts(["--test".to_strbuf(), "--cfg=test".to_strbuf()],
&match getopts(["--test".to_string(), "--cfg=test".to_string()],
optgroups().as_slice()) {
Ok(m) => m,
Err(f) => {

View File

@ -106,13 +106,13 @@ pub fn compile_input(sess: Session,
* (e.g. source from stdin or a string)
*/
pub fn anon_src() -> String {
"<anon>".to_strbuf()
"<anon>".to_string()
}
pub fn source_name(input: &Input) -> String {
match *input {
// FIXME (#9639): This needs to handle non-utf8 paths
FileInput(ref ifile) => ifile.as_str().unwrap().to_strbuf(),
FileInput(ref ifile) => ifile.as_str().unwrap().to_string(),
StrInput(_) => anon_src()
}
}
@ -127,8 +127,8 @@ pub enum Input {
impl Input {
fn filestem(&self) -> String {
match *self {
FileInput(ref ifile) => ifile.filestem_str().unwrap().to_strbuf(),
StrInput(_) => "rust_out".to_strbuf(),
FileInput(ref ifile) => ifile.filestem_str().unwrap().to_string(),
StrInput(_) => "rust_out".to_string(),
}
}
}
@ -142,8 +142,8 @@ pub fn phase_1_parse_input(sess: &Session, cfg: ast::CrateConfig, input: &Input)
parse::parse_crate_from_file(&(*file), cfg.clone(), &sess.parse_sess)
}
StrInput(ref src) => {
parse::parse_crate_from_source_str(anon_src().to_strbuf(),
src.to_strbuf(),
parse::parse_crate_from_source_str(anon_src().to_string(),
src.to_string(),
cfg.clone(),
&sess.parse_sess)
}
@ -497,7 +497,7 @@ fn write_out_deps(sess: &Session,
// write Makefile-compatible dependency rules
let files: Vec<String> = sess.codemap().files.borrow()
.iter().filter(|fmap| fmap.is_real_file())
.map(|fmap| fmap.name.to_strbuf())
.map(|fmap| fmap.name.to_string())
.collect();
let mut file = try!(io::File::create(&deps_filename));
for path in out_filenames.iter() {
@ -533,20 +533,20 @@ impl pprust::PpAnn for IdentifiedAnnotation {
match node {
pprust::NodeItem(item) => {
try!(pp::space(&mut s.s));
s.synth_comment(item.id.to_str().to_strbuf())
s.synth_comment(item.id.to_str().to_string())
}
pprust::NodeBlock(blk) => {
try!(pp::space(&mut s.s));
s.synth_comment((format!("block {}", blk.id)).to_strbuf())
s.synth_comment((format!("block {}", blk.id)).to_string())
}
pprust::NodeExpr(expr) => {
try!(pp::space(&mut s.s));
try!(s.synth_comment(expr.id.to_str().to_strbuf()));
try!(s.synth_comment(expr.id.to_str().to_string()));
s.pclose()
}
pprust::NodePat(pat) => {
try!(pp::space(&mut s.s));
s.synth_comment((format!("pat {}", pat.id)).to_strbuf())
s.synth_comment((format!("pat {}", pat.id)).to_string())
}
}
}
@ -629,7 +629,7 @@ pub fn pretty_print_input(sess: Session,
pprust::print_crate(sess.codemap(),
sess.diagnostic(),
&krate,
src_name.to_strbuf(),
src_name.to_string(),
&mut rdr,
out,
&IdentifiedAnnotation,
@ -644,7 +644,7 @@ pub fn pretty_print_input(sess: Session,
pprust::print_crate(annotation.analysis.ty_cx.sess.codemap(),
annotation.analysis.ty_cx.sess.diagnostic(),
&krate,
src_name.to_strbuf(),
src_name.to_string(),
&mut rdr,
out,
&annotation,
@ -677,7 +677,7 @@ pub fn pretty_print_input(sess: Session,
pprust::print_crate(sess.codemap(),
sess.diagnostic(),
&krate,
src_name.to_strbuf(),
src_name.to_string(),
&mut rdr,
out,
&pprust::NoAnn,
@ -694,7 +694,7 @@ fn print_flowgraph<W:io::Writer>(analysis: CrateAnalysis,
let cfg = cfg::CFG::new(ty_cx, block);
let lcfg = LabelledCFG { ast_map: &ty_cx.map,
cfg: &cfg,
name: format!("block{}", block.id).to_strbuf(), };
name: format!("block{}", block.id).to_string(), };
debug!("cfg: {:?}", cfg);
let r = dot::render(&lcfg, &mut out);
return expand_err_details(r);
@ -705,7 +705,7 @@ fn print_flowgraph<W:io::Writer>(analysis: CrateAnalysis,
let m = "graphviz::render failed";
io::IoError {
detail: Some(match orig_detail {
None => m.into_strbuf(),
None => m.into_string(),
Some(d) => format_strbuf!("{}: {}", m, d)
}),
..ioerr
@ -738,7 +738,7 @@ pub fn collect_crate_types(session: &Session,
ast::CRATE_NODE_ID,
a.span,
"invalid `crate_type` \
value".to_strbuf());
value".to_string());
None
}
_ => {
@ -746,7 +746,7 @@ pub fn collect_crate_types(session: &Session,
ast::CRATE_NODE_ID,
a.span,
"`crate_type` requires a \
value".to_strbuf());
value".to_string());
None
}
}
@ -832,7 +832,7 @@ pub fn build_output_filenames(input: &Input,
let crateid = attr::find_crateid(attrs);
match crateid {
None => {}
Some(crateid) => stem = crateid.name.to_strbuf(),
Some(crateid) => stem = crateid.name.to_string(),
}
OutputFilenames {
out_directory: dirpath,
@ -854,7 +854,7 @@ pub fn build_output_filenames(input: &Input,
}
OutputFilenames {
out_directory: out_file.dir_path(),
out_filestem: out_file.filestem_str().unwrap().to_strbuf(),
out_filestem: out_file.filestem_str().unwrap().to_string(),
single_output_file: ofile,
}
}

View File

@ -57,7 +57,7 @@ fn run_compiler(args: &[String]) {
if ifile == "-" {
let contents = io::stdin().read_to_end().unwrap();
let src = str::from_utf8(contents.as_slice()).unwrap()
.to_strbuf();
.to_string();
(StrInput(src), None)
} else {
(FileInput(Path::new(ifile)), Some(Path::new(ifile)))
@ -84,7 +84,7 @@ fn run_compiler(args: &[String]) {
}
let r = matches.opt_strs("Z");
if r.contains(&("ls".to_strbuf())) {
if r.contains(&("ls".to_string())) {
match input {
FileInput(ref ifile) => {
let mut stdout = io::stdout();
@ -233,7 +233,7 @@ pub fn handle_options(mut args: Vec<String>) -> Option<getopts::Matches> {
return None;
}
if cg_flags.contains(&"passes=list".to_strbuf()) {
if cg_flags.contains(&"passes=list".to_string()) {
unsafe { ::lib::llvm::llvm::LLVMRustPrintPasses(); }
return None;
}
@ -332,8 +332,8 @@ fn parse_crate_attrs(sess: &Session, input: &Input) ->
}
StrInput(ref src) => {
parse::parse_crate_attrs_from_source_str(
driver::anon_src().to_strbuf(),
src.to_strbuf(),
driver::anon_src().to_string(),
src.to_string(),
Vec::new(),
&sess.parse_sess)
}
@ -399,10 +399,10 @@ fn monitor(f: proc():Send) {
}
let xs = [
"the compiler hit an unexpected failure path. this is a bug.".to_owned(),
"the compiler hit an unexpected failure path. this is a bug.".to_string(),
format!("we would appreciate a bug report: {}",
BUG_REPORT_URL),
"run with `RUST_BACKTRACE=1` for a backtrace".to_owned(),
"run with `RUST_BACKTRACE=1` for a backtrace".to_string(),
];
for note in xs.iter() {
emitter.emit(None, note.as_slice(), diagnostic::Note)

View File

@ -361,7 +361,7 @@ pub fn check_crate(sess: &Session, krate: &ast::Crate) {
sess.add_lint(lint::UnknownFeatures,
ast::CRATE_NODE_ID,
mi.span,
"unknown feature".to_strbuf());
"unknown feature".to_string());
}
}
}

View File

@ -168,7 +168,7 @@ fn generate_test_harness(sess: &Session, krate: ast::Crate)
cx.ext_cx.bt_push(ExpnInfo {
call_site: DUMMY_SP,
callee: NameAndSpan {
name: "test".to_strbuf(),
name: "test".to_string(),
format: MacroAttribute,
span: None
}

View File

@ -123,7 +123,7 @@ pub mod lib {
pub fn main() {
let args = std::os::args().iter()
.map(|x| x.to_strbuf())
.map(|x| x.to_string())
.collect::<Vec<_>>();
std::os::set_exit_status(driver::main_args(args.as_slice()));
}

View File

@ -1873,7 +1873,7 @@ impl TypeNames {
}
pub fn associate_type(&self, s: &str, t: &Type) {
assert!(self.named_types.borrow_mut().insert(s.to_strbuf(),
assert!(self.named_types.borrow_mut().insert(s.to_string(),
t.to_ref()));
}
@ -1886,13 +1886,13 @@ impl TypeNames {
let s = llvm::LLVMTypeToString(ty.to_ref());
let ret = from_c_str(s);
free(s as *mut c_void);
ret.to_strbuf()
ret.to_string()
}
}
pub fn types_to_str(&self, tys: &[Type]) -> String {
let strs: Vec<String> = tys.iter().map(|t| self.type_to_str(*t)).collect();
format_strbuf!("[{}]", strs.connect(",").to_strbuf())
format_strbuf!("[{}]", strs.connect(",").to_string())
}
pub fn val_to_str(&self, val: ValueRef) -> String {
@ -1900,7 +1900,7 @@ impl TypeNames {
let s = llvm::LLVMValueToString(val);
let ret = from_c_str(s);
free(s as *mut c_void);
ret.to_strbuf()
ret.to_string()
}
}
}

View File

@ -165,7 +165,7 @@ fn extract_crate_info(e: &Env, i: &ast::ViewItem) -> Option<CrateInfo> {
None => from_str(ident.get().to_str().as_slice()).unwrap()
};
Some(CrateInfo {
ident: ident.get().to_strbuf(),
ident: ident.get().to_string(),
crate_id: crate_id,
id: id,
should_link: should_link(i),
@ -251,7 +251,7 @@ fn visit_item(e: &Env, i: &ast::Item) {
} else {
e.sess
.cstore
.add_used_library(n.get().to_strbuf(), kind);
.add_used_library(n.get().to_string(), kind);
}
}
None => {}
@ -293,7 +293,7 @@ fn register_crate<'a>(e: &mut Env,
// Stash paths for top-most crate locally if necessary.
let crate_paths = if root.is_none() {
Some(CratePaths {
ident: ident.to_strbuf(),
ident: ident.to_string(),
dylib: lib.dylib.clone(),
rlib: lib.rlib.clone(),
})
@ -308,7 +308,7 @@ fn register_crate<'a>(e: &mut Env,
let loader::Library{ dylib, rlib, metadata } = lib;
let cmeta = Rc::new( cstore::crate_metadata {
name: crate_id.name.to_strbuf(),
name: crate_id.name.to_string(),
data: metadata,
cnum_map: cnum_map,
cnum: cnum,
@ -442,11 +442,11 @@ impl<'a> CrateLoader for Loader<'a> {
};
let macros = decoder::get_exported_macros(library.metadata.as_slice());
let registrar = decoder::get_macro_registrar_fn(library.metadata.as_slice()).map(|id| {
decoder::get_symbol(library.metadata.as_slice(), id).to_strbuf()
decoder::get_symbol(library.metadata.as_slice(), id).to_string()
});
let mc = MacroCrate {
lib: library.dylib.clone(),
macros: macros.move_iter().map(|x| x.to_strbuf()).collect(),
macros: macros.move_iter().map(|x| x.to_string()).collect(),
registrar_symbol: registrar,
};
if should_link {

View File

@ -211,14 +211,14 @@ pub fn get_field_type(tcx: &ty::ctxt, class_id: ast::DefId,
decoder::maybe_find_item(class_id.node, all_items),
|| {
(format!("get_field_type: class ID {:?} not found",
class_id)).to_strbuf()
class_id)).to_string()
});
let the_field = expect(tcx.sess.diagnostic(),
decoder::maybe_find_item(def.node, class_doc),
|| {
(format!("get_field_type: in class {:?}, field ID {:?} not found",
class_id,
def)).to_strbuf()
def)).to_string()
});
let ty = decoder::item_type(def, the_field, tcx, &*cdata);
ty::ty_param_bounds_and_ty {

View File

@ -201,7 +201,7 @@ impl CStore {
pub fn add_used_link_args(&self, args: &str) {
for s in args.split(' ') {
self.used_link_args.borrow_mut().push(s.to_strbuf());
self.used_link_args.borrow_mut().push(s.to_string());
}
}

View File

@ -186,7 +186,7 @@ fn item_method_sort(item: ebml::Doc) -> char {
}
fn item_symbol(item: ebml::Doc) -> String {
reader::get_doc(item, tag_items_data_item_symbol).as_str().to_strbuf()
reader::get_doc(item, tag_items_data_item_symbol).as_str().to_string()
}
fn item_parent_item(d: ebml::Doc) -> Option<ast::DefId> {
@ -1095,7 +1095,7 @@ pub fn get_crate_deps(data: &[u8]) -> Vec<CrateDep> {
let mut crate_num = 1;
fn docstr(doc: ebml::Doc, tag_: uint) -> String {
let d = reader::get_doc(doc, tag_);
d.as_str_slice().to_strbuf()
d.as_str_slice().to_string()
}
reader::tagged_docs(depsdoc, tag_crate_dep, |depdoc| {
let crate_id =
@ -1145,7 +1145,7 @@ pub fn maybe_get_crate_id(data: &[u8]) -> Option<CrateId> {
pub fn get_crate_triple(data: &[u8]) -> String {
let cratedoc = reader::Doc(data);
let triple_doc = reader::maybe_get_doc(cratedoc, tag_crate_triple);
triple_doc.expect("No triple in crate").as_str().to_strbuf()
triple_doc.expect("No triple in crate").as_str().to_string()
}
pub fn get_crate_id(data: &[u8]) -> CrateId {
@ -1247,7 +1247,7 @@ pub fn get_native_libraries(cdata: Cmd)
let name_doc = reader::get_doc(lib_doc, tag_native_libraries_name);
let kind: cstore::NativeLibaryKind =
FromPrimitive::from_u32(reader::doc_as_u32(kind_doc)).unwrap();
let name = name_doc.as_str().to_strbuf();
let name = name_doc.as_str().to_string();
result.push((kind, name));
true
});
@ -1264,7 +1264,7 @@ pub fn get_exported_macros(data: &[u8]) -> Vec<String> {
tag_exported_macros);
let mut result = Vec::new();
reader::tagged_docs(macros, tag_macro_def, |macro_doc| {
result.push(macro_doc.as_str().to_strbuf());
result.push(macro_doc.as_str().to_string());
true
});
result
@ -1316,7 +1316,7 @@ pub fn get_method_arg_names(cdata: Cmd, id: ast::NodeId) -> Vec<String> {
match reader::maybe_get_doc(method_doc, tag_method_argument_names) {
Some(args_doc) => {
reader::tagged_docs(args_doc, tag_method_argument_name, |name_doc| {
ret.push(name_doc.as_str_slice().to_strbuf());
ret.push(name_doc.as_str_slice().to_string());
true
});
}

View File

@ -1738,7 +1738,7 @@ fn encode_dylib_dependency_formats(ebml_w: &mut Encoder, ecx: &EncodeContext) {
slot.map(|kind| (format!("{}:{}", i + 1, match kind {
cstore::RequireDynamic => "d",
cstore::RequireStatic => "s",
})).to_strbuf())
})).to_string())
}).collect::<Vec<String>>();
ebml_w.writer.write(s.connect(",").as_bytes());
}
@ -1909,5 +1909,5 @@ pub fn encoded_ty(tcx: &ty::ctxt, t: ty::t) -> String {
tcx: tcx,
abbrevs: &RefCell::new(HashMap::new())
}, t);
str::from_utf8_owned(Vec::from_slice(wr.get_ref())).unwrap().to_strbuf()
str::from_utf8_owned(Vec::from_slice(wr.get_ref())).unwrap().to_string()
}

View File

@ -187,7 +187,7 @@ static PATH_ENTRY_SEPARATOR: &'static str = ":";
/// Returns RUST_PATH as a string, without default paths added
pub fn get_rust_path() -> Option<String> {
os::getenv("RUST_PATH").map(|x| x.to_strbuf())
os::getenv("RUST_PATH").map(|x| x.to_string())
}
/// Returns the value of RUST_PATH, as a list
@ -251,26 +251,26 @@ fn find_libdir(sysroot: &Path) -> String {
#[cfg(target_word_size = "64")]
fn primary_libdir_name() -> String {
"lib64".to_strbuf()
"lib64".to_string()
}
#[cfg(target_word_size = "32")]
fn primary_libdir_name() -> String {
"lib32".to_strbuf()
"lib32".to_string()
}
fn secondary_libdir_name() -> String {
"lib".to_strbuf()
"lib".to_string()
}
}
#[cfg(windows)]
fn find_libdir(_sysroot: &Path) -> String {
"bin".to_strbuf()
"bin".to_string()
}
// The name of rustc's own place to organize libraries.
// Used to be "rustc", now the default is "rustlib"
pub fn rustlibdir() -> String {
"rustlib".to_strbuf()
"rustlib".to_string()
}

View File

@ -327,13 +327,13 @@ impl<'a> Context<'a> {
Some(..) => {} // check the hash
// hash is irrelevant, no version specified
None => return Some(hash.to_strbuf())
None => return Some(hash.to_string())
}
debug!("matching -- {}, vers ok", file);
// hashes in filenames are prefixes of the "true hash"
if self.id_hash == hash.as_slice() {
debug!("matching -- {}, hash ok", file);
Some(hash.to_strbuf())
Some(hash.to_string())
} else {
None
}
@ -423,7 +423,7 @@ impl<'a> Context<'a> {
info!("Rejecting via crate triple: expected {} got {}", self.triple, triple);
self.rejected_via_triple.push(CrateMismatch {
path: libpath.clone(),
got: triple.to_strbuf()
got: triple.to_string()
});
return false;
}
@ -435,7 +435,7 @@ impl<'a> Context<'a> {
info!("Rejecting via hash: expected {} got {}", *myhash, hash);
self.rejected_via_hash.push(CrateMismatch {
path: libpath.clone(),
got: myhash.as_str().to_strbuf()
got: myhash.as_str().to_string()
});
false
} else {

View File

@ -451,7 +451,7 @@ fn parse_fn_style(c: char) -> FnStyle {
fn parse_abi_set(st: &mut PState) -> abi::Abi {
assert_eq!(next(st), '[');
scan(st, |c| c == ']', |bytes| {
let abi_str = str::from_utf8(bytes).unwrap().to_owned();
let abi_str = str::from_utf8(bytes).unwrap().to_string();
abi::lookup(abi_str.as_slice()).expect(abi_str)
})
}

View File

@ -243,7 +243,7 @@ impl<'a> CheckLoanCtxt<'a> {
if restr.loan_path != loan2.loan_path { continue; }
let old_pronoun = if new_loan.loan_path == old_loan.loan_path {
"it".to_strbuf()
"it".to_string()
} else {
format!("`{}`",
self.bccx.loan_path_to_str(&*old_loan.loan_path))

View File

@ -609,7 +609,7 @@ impl<'a> BorrowckCtxt<'a> {
}
err_out_of_scope(..) => {
let msg = match opt_loan_path(&err.cmt) {
None => "borrowed value".to_strbuf(),
None => "borrowed value".to_string(),
Some(lp) => {
format_strbuf!("`{}`", self.loan_path_to_str(&*lp))
}
@ -826,7 +826,7 @@ impl Repr for Loan {
self.kind,
self.gen_scope,
self.kill_scope,
self.restrictions.repr(tcx))).to_strbuf()
self.restrictions.repr(tcx))).to_string()
}
}
@ -834,7 +834,7 @@ impl Repr for Restriction {
fn repr(&self, tcx: &ty::ctxt) -> String {
(format!("Restriction({}, {:x})",
self.loan_path.repr(tcx),
self.set.bits as uint)).to_strbuf()
self.set.bits as uint)).to_string()
}
}
@ -842,17 +842,17 @@ impl Repr for LoanPath {
fn repr(&self, tcx: &ty::ctxt) -> String {
match self {
&LpVar(id) => {
(format!("$({})", tcx.map.node_to_str(id))).to_strbuf()
(format!("$({})", tcx.map.node_to_str(id))).to_string()
}
&LpExtend(ref lp, _, LpDeref(_)) => {
(format!("{}.*", lp.repr(tcx))).to_strbuf()
(format!("{}.*", lp.repr(tcx))).to_string()
}
&LpExtend(ref lp, _, LpInterior(ref interior)) => {
(format!("{}.{}",
lp.repr(tcx),
interior.repr(tcx))).to_strbuf()
interior.repr(tcx))).to_string()
}
}
}

View File

@ -43,7 +43,7 @@ fn replace_newline_with_backslash_l(s: String) -> String {
if last_two.as_slice() != ['\\', 'l'] {
s = s.append("\\l");
}
s.to_strbuf()
s.to_string()
} else {
s
}

View File

@ -164,8 +164,8 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: Vec<@Pat> ) {
match ty::get(ty).sty {
ty::ty_bool => {
match *ctor {
val(const_bool(true)) => Some("true".to_strbuf()),
val(const_bool(false)) => Some("false".to_strbuf()),
val(const_bool(true)) => Some("true".to_string()),
val(const_bool(false)) => Some("false".to_string()),
_ => None
}
}
@ -180,7 +180,7 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: Vec<@Pat> ) {
Some(v) => {
Some(token::get_ident(v.name).get()
.to_str()
.into_strbuf())
.into_string())
}
None => {
fail!("check_exhaustive: bad variant in ctor")
@ -201,7 +201,7 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: Vec<@Pat> ) {
};
let msg = format_strbuf!("non-exhaustive patterns{}", match ext {
Some(ref s) => format_strbuf!(": {} not covered", *s),
None => "".to_strbuf()
None => "".to_string()
});
cx.tcx.sess.span_err(sp, msg.as_slice());
}

View File

@ -134,7 +134,7 @@ impl<'a> Visitor<bool> for CheckStaticVisitor<'a> {
if ty::has_dtor(self.tcx, did) {
self.report_error(e.span,
Some("static items are not allowed to have \
destructors".to_strbuf()));
destructors".to_string()));
return;
}
}

View File

@ -314,8 +314,8 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
Ok(const_float(f)) => Ok(const_float(-f)),
Ok(const_int(i)) => Ok(const_int(-i)),
Ok(const_uint(i)) => Ok(const_uint(-i)),
Ok(const_str(_)) => Err("negate on string".to_strbuf()),
Ok(const_bool(_)) => Err("negate on boolean".to_strbuf()),
Ok(const_str(_)) => Err("negate on string".to_string()),
Ok(const_bool(_)) => Err("negate on boolean".to_string()),
ref err => ((*err).clone())
}
}
@ -324,7 +324,7 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
Ok(const_int(i)) => Ok(const_int(!i)),
Ok(const_uint(i)) => Ok(const_uint(!i)),
Ok(const_bool(b)) => Ok(const_bool(!b)),
_ => Err("not on float or string".to_strbuf())
_ => Err("not on float or string".to_string())
}
}
ExprBinary(op, a, b) => {
@ -343,7 +343,7 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
BiNe => fromb(a != b),
BiGe => fromb(a >= b),
BiGt => fromb(a > b),
_ => Err("can't do this op on floats".to_strbuf())
_ => Err("can't do this op on floats".to_string())
}
}
(Ok(const_int(a)), Ok(const_int(b))) => {
@ -352,12 +352,12 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
BiSub => Ok(const_int(a - b)),
BiMul => Ok(const_int(a * b)),
BiDiv if b == 0 => {
Err("attempted to divide by zero".to_strbuf())
Err("attempted to divide by zero".to_string())
}
BiDiv => Ok(const_int(a / b)),
BiRem if b == 0 => {
Err("attempted remainder with a divisor of \
zero".to_strbuf())
zero".to_string())
}
BiRem => Ok(const_int(a % b)),
BiAnd | BiBitAnd => Ok(const_int(a & b)),
@ -379,12 +379,12 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
BiSub => Ok(const_uint(a - b)),
BiMul => Ok(const_uint(a * b)),
BiDiv if b == 0 => {
Err("attempted to divide by zero".to_strbuf())
Err("attempted to divide by zero".to_string())
}
BiDiv => Ok(const_uint(a / b)),
BiRem if b == 0 => {
Err("attempted remainder with a divisor of \
zero".to_strbuf())
zero".to_string())
}
BiRem => Ok(const_uint(a % b)),
BiAnd | BiBitAnd => Ok(const_uint(a & b)),
@ -405,14 +405,14 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
match op {
BiShl => Ok(const_int(a << b)),
BiShr => Ok(const_int(a >> b)),
_ => Err("can't do this op on an int and uint".to_strbuf())
_ => Err("can't do this op on an int and uint".to_string())
}
}
(Ok(const_uint(a)), Ok(const_int(b))) => {
match op {
BiShl => Ok(const_uint(a << b)),
BiShr => Ok(const_uint(a >> b)),
_ => Err("can't do this op on a uint and int".to_strbuf())
_ => Err("can't do this op on a uint and int".to_string())
}
}
(Ok(const_bool(a)), Ok(const_bool(b))) => {
@ -424,10 +424,10 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
BiBitOr => a | b,
BiEq => a == b,
BiNe => a != b,
_ => return Err("can't do this op on bools".to_strbuf())
_ => return Err("can't do this op on bools".to_string())
}))
}
_ => Err("bad operands for binary".to_strbuf())
_ => Err("bad operands for binary".to_string())
}
}
ExprCast(base, target_ty) => {
@ -452,7 +452,7 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
const_uint(u) => Ok(const_float(u as f64)),
const_int(i) => Ok(const_float(i as f64)),
const_float(f) => Ok(const_float(f)),
_ => Err("can't cast float to str".to_strbuf()),
_ => Err("can't cast float to str".to_string()),
}
}
ty::ty_uint(_) => {
@ -460,7 +460,7 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
const_uint(u) => Ok(const_uint(u)),
const_int(i) => Ok(const_uint(i as u64)),
const_float(f) => Ok(const_uint(f as u64)),
_ => Err("can't cast str to uint".to_strbuf()),
_ => Err("can't cast str to uint".to_string()),
}
}
ty::ty_int(_) | ty::ty_bool => {
@ -468,10 +468,10 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
const_uint(u) => Ok(const_int(u as i64)),
const_int(i) => Ok(const_int(i)),
const_float(f) => Ok(const_int(f as i64)),
_ => Err("can't cast str to int".to_strbuf()),
_ => Err("can't cast str to int".to_string()),
}
}
_ => Err("can't cast this type".to_strbuf())
_ => Err("can't cast this type".to_string())
}
}
}
@ -479,7 +479,7 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
ExprPath(_) => {
match lookup_const(tcx.ty_ctxt(), e) {
Some(actual_e) => eval_const_expr_partial(tcx.ty_ctxt(), actual_e),
None => Err("non-constant path in constant expr".to_strbuf())
None => Err("non-constant path in constant expr".to_string())
}
}
ExprLit(lit) => Ok(lit_to_const(lit)),
@ -492,7 +492,7 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
None => Ok(const_int(0i64))
}
}
_ => Err("unsupported constant expr".to_strbuf())
_ => Err("unsupported constant expr".to_string())
}
}

View File

@ -102,14 +102,14 @@ impl<'a, O:DataFlowOperator> pprust::PpAnn for DataFlowContext<'a, O> {
let gens_str = if gens.iter().any(|&u| u != 0) {
format!(" gen: {}", bits_to_str(gens))
} else {
"".to_strbuf()
"".to_string()
};
let kills = self.kills.slice(start, end);
let kills_str = if kills.iter().any(|&u| u != 0) {
format!(" kill: {}", bits_to_str(kills))
} else {
"".to_strbuf()
"".to_string()
};
try!(ps.synth_comment(format_strbuf!("id {}: {}{}{}",

View File

@ -587,8 +587,8 @@ fn check_sized(tcx: &ty::ctxt, ty: ty::t, name: String, sp: Span) {
// Check that any variables in a pattern have types with statically known size.
fn check_pat(cx: &mut Context, pat: &Pat) {
let var_name = match pat.node {
PatWild => Some("_".to_strbuf()),
PatIdent(_, ref path, _) => Some(path_to_str(path).to_strbuf()),
PatWild => Some("_".to_string()),
PatIdent(_, ref path, _) => Some(path_to_str(path).to_string()),
_ => None
};

View File

@ -162,7 +162,7 @@ fn live_node_kind_to_str(lnk: LiveNodeKind, cx: &ty::ctxt) -> String {
VarDefNode(s) => {
format_strbuf!("Var def node [{}]", cm.span_to_str(s))
}
ExitNode => "Exit node".to_strbuf(),
ExitNode => "Exit node".to_string(),
}
}
@ -325,9 +325,9 @@ impl<'a> IrMaps<'a> {
fn variable_name(&self, var: Variable) -> String {
match self.var_kinds.get(var.get()) {
&Local(LocalInfo { ident: nm, .. }) | &Arg(_, nm) => {
token::get_ident(nm).get().to_str().to_strbuf()
token::get_ident(nm).get().to_str().to_string()
},
&ImplicitRet => "<implicit-ret>".to_strbuf()
&ImplicitRet => "<implicit-ret>".to_string()
}
}
@ -760,7 +760,7 @@ impl<'a> Liveness<'a> {
self.write_vars(wr, ln, |idx| self.users.get(idx).writer);
write!(wr, " precedes {}]", self.successors.get(ln.get()).to_str());
}
str::from_utf8(wr.unwrap().as_slice()).unwrap().to_strbuf()
str::from_utf8(wr.unwrap().as_slice()).unwrap().to_string()
}
fn init_empty(&mut self, ln: LiveNode, succ_ln: LiveNode) {

View File

@ -1096,24 +1096,24 @@ impl<'t,TYPER:Typer> MemCategorizationContext<'t,TYPER> {
pub fn cmt_to_str(&self, cmt: &cmt_) -> String {
match cmt.cat {
cat_static_item => {
"static item".to_strbuf()
"static item".to_string()
}
cat_copied_upvar(_) => {
"captured outer variable in a proc".to_strbuf()
"captured outer variable in a proc".to_string()
}
cat_rvalue(..) => {
"non-lvalue".to_strbuf()
"non-lvalue".to_string()
}
cat_local(_) => {
"local variable".to_strbuf()
"local variable".to_string()
}
cat_arg(..) => {
"argument".to_strbuf()
"argument".to_string()
}
cat_deref(ref base, _, pk) => {
match base.cat {
cat_upvar(..) => {
"captured outer variable".to_strbuf()
"captured outer variable".to_string()
}
_ => {
format_strbuf!("dereference of `{}`-pointer",
@ -1122,22 +1122,22 @@ impl<'t,TYPER:Typer> MemCategorizationContext<'t,TYPER> {
}
}
cat_interior(_, InteriorField(NamedField(_))) => {
"field".to_strbuf()
"field".to_string()
}
cat_interior(_, InteriorField(PositionalField(_))) => {
"anonymous field".to_strbuf()
"anonymous field".to_string()
}
cat_interior(_, InteriorElement(VecElement)) => {
"vec content".to_strbuf()
"vec content".to_string()
}
cat_interior(_, InteriorElement(StrElement)) => {
"str content".to_strbuf()
"str content".to_string()
}
cat_interior(_, InteriorElement(OtherElement)) => {
"indexed content".to_strbuf()
"indexed content".to_string()
}
cat_upvar(..) => {
"captured outer variable".to_strbuf()
"captured outer variable".to_string()
}
cat_discr(ref cmt, _) => {
self.cmt_to_str(&**cmt)
@ -1303,10 +1303,10 @@ impl Repr for InteriorKind {
fn repr(&self, _tcx: &ty::ctxt) -> String {
match *self {
InteriorField(NamedField(fld)) => {
token::get_name(fld).get().to_str().to_strbuf()
token::get_name(fld).get().to_str().to_string()
}
InteriorField(PositionalField(i)) => format_strbuf!("\\#{:?}", i),
InteriorElement(_) => "[]".to_strbuf(),
InteriorElement(_) => "[]".to_string(),
}
}
}

View File

@ -374,7 +374,7 @@ enum FieldName {
impl<'a> PrivacyVisitor<'a> {
// used when debugging
fn nodestr(&self, id: ast::NodeId) -> String {
self.tcx.map.node_to_str(id).to_strbuf()
self.tcx.map.node_to_str(id).to_string()
}
// Determines whether the given definition is public from the point of view
@ -1400,7 +1400,7 @@ impl<'a> Visitor<()> for VisiblePrivateTypesVisitor<'a> {
lint::VisiblePrivateTypes,
path_id, p.span,
"private type in exported type \
signature".to_strbuf());
signature".to_string());
}
}
_ => {}

View File

@ -2097,9 +2097,9 @@ impl<'a> Resolver<'a> {
-> String {
match subclass {
SingleImport(_, source) => {
token::get_ident(source).get().to_strbuf()
token::get_ident(source).get().to_string()
}
GlobImport => "*".to_strbuf()
GlobImport => "*".to_string()
}
}
@ -2113,7 +2113,7 @@ impl<'a> Resolver<'a> {
(format!("{}::{}",
self.idents_to_str(idents),
self.import_directive_subclass_to_str(
subclass))).to_strbuf()
subclass))).to_string()
}
}
@ -3314,7 +3314,7 @@ impl<'a> Resolver<'a> {
debug!("(computing exports) YES: export '{}' => {:?}",
name, def_id_of_def(d));
exports2.push(Export2 {
name: name.get().to_strbuf(),
name: name.get().to_string(),
def_id: def_id_of_def(d)
});
}
@ -4591,7 +4591,7 @@ impl<'a> Resolver<'a> {
.add_lint(UnnecessaryQualification,
id,
path.span,
"unnecessary qualification".to_strbuf());
"unnecessary qualification".to_string());
}
_ => ()
}
@ -5051,7 +5051,7 @@ impl<'a> Resolver<'a> {
*values.get(smallest) <= max_distance &&
name != maybes.get(smallest).get() {
Some(maybes.get(smallest).get().to_strbuf())
Some(maybes.get(smallest).get().to_string())
} else {
None
@ -5145,7 +5145,7 @@ impl<'a> Resolver<'a> {
// limit search to 5 to reduce the number
// of stupid suggestions
self.find_best_match_for_name(wrong_name.as_slice(), 5)
.map_or("".to_strbuf(),
.map_or("".to_string(),
|x| format!("`{}`", x))
}
Field =>
@ -5426,7 +5426,7 @@ impl<'a> Resolver<'a> {
.add_lint(UnusedImports,
id,
p.span,
"unused import".to_strbuf());
"unused import".to_string());
}
},
}
@ -5450,7 +5450,7 @@ impl<'a> Resolver<'a> {
self.session.add_lint(UnusedImports,
id,
span,
"unused import".to_strbuf());
"unused import".to_string());
}
let (v_priv, t_priv) = match self.last_private.find(&id) {
@ -5518,7 +5518,7 @@ impl<'a> Resolver<'a> {
collect_mod(&mut idents, module);
if idents.len() == 0 {
return "???".to_strbuf();
return "???".to_string();
}
self.idents_to_str(idents.move_iter().rev()
.collect::<Vec<ast::Ident>>()
@ -5540,18 +5540,18 @@ impl<'a> Resolver<'a> {
for (&name, import_resolution) in import_resolutions.iter() {
let value_repr;
match import_resolution.target_for_namespace(ValueNS) {
None => { value_repr = "".to_owned(); }
None => { value_repr = "".to_string(); }
Some(_) => {
value_repr = " value:?".to_owned();
value_repr = " value:?".to_string();
// FIXME #4954
}
}
let type_repr;
match import_resolution.target_for_namespace(TypeNS) {
None => { type_repr = "".to_owned(); }
None => { type_repr = "".to_string(); }
Some(_) => {
type_repr = " type:?".to_owned();
type_repr = " type:?".to_string();
// FIXME #4954
}
}

View File

@ -112,7 +112,7 @@ impl<'a> TypeFolder for SubstFolder<'a> {
let root_msg = match self.root_ty {
Some(root) => format!(" in the substitution of `{}`",
root.repr(self.tcx)),
None => "".to_strbuf()
None => "".to_string()
};
let m = format!("can't use type parameters from outer \
function{}; try using a local type \
@ -134,7 +134,7 @@ impl<'a> TypeFolder for SubstFolder<'a> {
let root_msg = match self.root_ty {
Some(root) => format!(" in the substitution of `{}`",
root.repr(self.tcx)),
None => "".to_strbuf()
None => "".to_string()
};
let m = format!("missing `Self` type param{}",
root_msg);

View File

@ -65,7 +65,7 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
let mut constraints =
String::from_str(constraints.iter()
.map(|s| s.get().to_strbuf())
.map(|s| s.get().to_string())
.collect::<Vec<String>>()
.connect(",")
.as_slice());
@ -136,11 +136,11 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
#[cfg(target_arch = "arm")]
#[cfg(target_arch = "mips")]
fn getClobbers() -> String {
"".to_strbuf()
"".to_string()
}
#[cfg(target_arch = "x86")]
#[cfg(target_arch = "x86_64")]
fn getClobbers() -> String {
"~{dirflag},~{fpsr},~{flags}".to_strbuf()
"~{dirflag},~{fpsr},~{flags}".to_string()
}

View File

@ -216,7 +216,7 @@ pub fn get_extern_fn(ccx: &CrateContext,
None => {}
}
let f = decl_fn(ccx.llmod, name, cc, ty, output);
externs.insert(name.to_strbuf(), f);
externs.insert(name.to_string(), f);
f
}
@ -232,7 +232,7 @@ fn get_extern_rust_fn(ccx: &CrateContext, fn_ty: ty::t, name: &str, did: ast::De
set_llvm_fn_attrs(attrs.as_slice(), f)
});
ccx.externs.borrow_mut().insert(name.to_strbuf(), f);
ccx.externs.borrow_mut().insert(name.to_string(), f);
f
}
@ -271,7 +271,7 @@ pub fn get_extern_const(externs: &mut ExternMap, llmod: ModuleRef,
let c = name.with_c_str(|buf| {
llvm::LLVMAddGlobal(llmod, ty.to_ref(), buf)
});
externs.insert(name.to_strbuf(), c);
externs.insert(name.to_string(), c);
return c;
}
}
@ -1106,9 +1106,9 @@ pub fn new_fn_ctxt<'a>(ccx: &'a CrateContext,
debug!("new_fn_ctxt(path={}, id={}, param_substs={})",
if id == -1 {
"".to_owned()
"".to_string()
} else {
ccx.tcx.map.path_to_str(id).to_owned()
ccx.tcx.map.path_to_str(id).to_string()
},
id, param_substs.map(|s| s.repr(ccx.tcx())));
@ -1406,7 +1406,7 @@ pub fn trans_fn(ccx: &CrateContext,
param_substs: Option<&param_substs>,
id: ast::NodeId,
attrs: &[ast::Attribute]) {
let _s = StatRecorder::new(ccx, ccx.tcx.map.path_to_str(id).to_strbuf());
let _s = StatRecorder::new(ccx, ccx.tcx.map.path_to_str(id).to_string());
debug!("trans_fn(param_substs={})", param_substs.map(|s| s.repr(ccx.tcx())));
let _icx = push_ctxt("trans_fn");
let output_type = ty::ty_fn_ret(ty::node_id_to_type(ccx.tcx(), id));
@ -1923,15 +1923,15 @@ fn exported_name(ccx: &CrateContext, id: ast::NodeId,
ty: ty::t, attrs: &[ast::Attribute]) -> String {
match attr::first_attr_value_str_by_name(attrs, "export_name") {
// Use provided name
Some(name) => name.get().to_strbuf(),
Some(name) => name.get().to_string(),
_ => ccx.tcx.map.with_path(id, |mut path| {
if attr::contains_name(attrs, "no_mangle") {
// Don't mangle
path.last().unwrap().to_str().to_strbuf()
path.last().unwrap().to_str().to_string()
} else {
match weak_lang_items::link_name(attrs) {
Some(name) => name.get().to_strbuf(),
Some(name) => name.get().to_string(),
None => {
// Usual name mangling
mangle_exported_name(ccx, path, ty, id)
@ -2328,7 +2328,7 @@ pub fn trans_crate(krate: ast::Crate,
let llmod = ccx.llmod;
let mut reachable: Vec<String> = ccx.reachable.iter().filter_map(|id| {
ccx.item_symbols.borrow().find(id).map(|s| s.to_strbuf())
ccx.item_symbols.borrow().find(id).map(|s| s.to_string())
}).collect();
// Make sure that some other crucial symbols are not eliminated from the
@ -2337,13 +2337,13 @@ pub fn trans_crate(krate: ast::Crate,
// symbol. This symbol is required for use by the libmorestack library that
// we link in, so we must ensure that this symbol is not internalized (if
// defined in the crate).
reachable.push("main".to_strbuf());
reachable.push("rust_stack_exhausted".to_strbuf());
reachable.push("main".to_string());
reachable.push("rust_stack_exhausted".to_string());
// referenced from .eh_frame section on some platforms
reachable.push("rust_eh_personality".to_strbuf());
reachable.push("rust_eh_personality".to_string());
// referenced from rt/rust_try.ll
reachable.push("rust_eh_personality_catch".to_strbuf());
reachable.push("rust_eh_personality_catch".to_string());
let metadata_module = ccx.metadata_llmod;
let formats = ccx.tcx.dependency_formats.borrow().clone();

View File

@ -358,7 +358,7 @@ pub fn trans_fn_ref_with_vtables(
let map_node = session::expect(
ccx.sess(),
tcx.map.find(def_id.node),
|| "local item should be in ast map".to_strbuf());
|| "local item should be in ast map".to_string());
match map_node {
ast_map::NodeForeignItem(_) => {

View File

@ -443,11 +443,11 @@ impl<'a> Block<'a> {
pub fn sess(&self) -> &'a Session { self.fcx.ccx.sess() }
pub fn ident(&self, ident: Ident) -> String {
token::get_ident(ident).get().to_strbuf()
token::get_ident(ident).get().to_string()
}
pub fn node_id_to_str(&self, id: ast::NodeId) -> String {
self.tcx().map.node_to_str(id).to_strbuf()
self.tcx().map.node_to_str(id).to_string()
}
pub fn expr_to_str(&self, e: &ast::Expr) -> String {

View File

@ -736,7 +736,7 @@ pub fn create_function_debug_context(cx: &CrateContext,
let containing_scope = namespace_node.scope;
(linkage_name, containing_scope)
} else {
(function_name.as_slice().to_strbuf(), file_metadata)
(function_name.as_slice().to_string(), file_metadata)
};
// Clang sets this parameter to the opening brace of the function's block, so let's do this too.
@ -1119,7 +1119,7 @@ fn file_metadata(cx: &CrateContext, full_path: &str) -> DIFile {
});
let mut created_files = debug_context(cx).created_files.borrow_mut();
created_files.insert(full_path.to_strbuf(), file_metadata);
created_files.insert(full_path.to_string(), file_metadata);
return file_metadata;
}
@ -1146,28 +1146,28 @@ fn basic_type_metadata(cx: &CrateContext, t: ty::t) -> DIType {
debug!("basic_type_metadata: {:?}", ty::get(t));
let (name, encoding) = match ty::get(t).sty {
ty::ty_nil => ("()".to_owned(), DW_ATE_unsigned),
ty::ty_bot => ("!".to_owned(), DW_ATE_unsigned),
ty::ty_bool => ("bool".to_owned(), DW_ATE_boolean),
ty::ty_char => ("char".to_owned(), DW_ATE_unsigned_char),
ty::ty_nil => ("()".to_string(), DW_ATE_unsigned),
ty::ty_bot => ("!".to_string(), DW_ATE_unsigned),
ty::ty_bool => ("bool".to_string(), DW_ATE_boolean),
ty::ty_char => ("char".to_string(), DW_ATE_unsigned_char),
ty::ty_int(int_ty) => match int_ty {
ast::TyI => ("int".to_owned(), DW_ATE_signed),
ast::TyI8 => ("i8".to_owned(), DW_ATE_signed),
ast::TyI16 => ("i16".to_owned(), DW_ATE_signed),
ast::TyI32 => ("i32".to_owned(), DW_ATE_signed),
ast::TyI64 => ("i64".to_owned(), DW_ATE_signed)
ast::TyI => ("int".to_string(), DW_ATE_signed),
ast::TyI8 => ("i8".to_string(), DW_ATE_signed),
ast::TyI16 => ("i16".to_string(), DW_ATE_signed),
ast::TyI32 => ("i32".to_string(), DW_ATE_signed),
ast::TyI64 => ("i64".to_string(), DW_ATE_signed)
},
ty::ty_uint(uint_ty) => match uint_ty {
ast::TyU => ("uint".to_owned(), DW_ATE_unsigned),
ast::TyU8 => ("u8".to_owned(), DW_ATE_unsigned),
ast::TyU16 => ("u16".to_owned(), DW_ATE_unsigned),
ast::TyU32 => ("u32".to_owned(), DW_ATE_unsigned),
ast::TyU64 => ("u64".to_owned(), DW_ATE_unsigned)
ast::TyU => ("uint".to_string(), DW_ATE_unsigned),
ast::TyU8 => ("u8".to_string(), DW_ATE_unsigned),
ast::TyU16 => ("u16".to_string(), DW_ATE_unsigned),
ast::TyU32 => ("u32".to_string(), DW_ATE_unsigned),
ast::TyU64 => ("u64".to_string(), DW_ATE_unsigned)
},
ty::ty_float(float_ty) => match float_ty {
ast::TyF32 => ("f32".to_owned(), DW_ATE_float),
ast::TyF64 => ("f64".to_owned(), DW_ATE_float),
ast::TyF128 => ("f128".to_owned(), DW_ATE_float)
ast::TyF32 => ("f32".to_string(), DW_ATE_float),
ast::TyF64 => ("f64".to_string(), DW_ATE_float),
ast::TyF128 => ("f128".to_string(), DW_ATE_float)
},
_ => cx.sess().bug("debuginfo::basic_type_metadata - t is invalid type")
};
@ -1255,9 +1255,9 @@ impl StructMemberDescriptionFactory {
self.fields.iter().enumerate().map(|(i, field)| {
let name = if field.ident.name == special_idents::unnamed_field.name {
"".to_strbuf()
"".to_string()
} else {
token::get_ident(field.ident).get().to_strbuf()
token::get_ident(field.ident).get().to_string()
};
let offset = if self.is_simd {
@ -1366,7 +1366,7 @@ impl TupleMemberDescriptionFactory {
-> Vec<MemberDescription> {
self.component_types.iter().map(|&component_type| {
MemberDescription {
name: "".to_strbuf(),
name: "".to_string(),
llvm_type: type_of::type_of(cx, component_type),
type_metadata: type_metadata(cx, component_type, self.span),
offset: ComputedMemberOffset,
@ -1444,7 +1444,7 @@ impl GeneralMemberDescriptionFactory {
self.file_metadata,
codemap::DUMMY_SP);
MemberDescription {
name: "".to_strbuf(),
name: "".to_string(),
llvm_type: variant_llvm_type,
type_metadata: variant_type_metadata,
offset: FixedMemberOffset { bytes: 0 },
@ -1464,7 +1464,7 @@ impl EnumVariantMemberDescriptionFactory {
-> Vec<MemberDescription> {
self.args.iter().enumerate().map(|(i, &(ref name, ty))| {
MemberDescription {
name: name.to_strbuf(),
name: name.to_string(),
llvm_type: type_of::type_of(cx, ty),
type_metadata: match self.discriminant_type_metadata {
Some(metadata) if i == 0 => metadata,
@ -1513,21 +1513,21 @@ fn describe_enum_variant(cx: &CrateContext,
Some(ref names) => {
names.iter()
.map(|ident| {
token::get_ident(*ident).get().to_str().into_strbuf()
token::get_ident(*ident).get().to_str().into_string()
}).collect()
}
None => variant_info.args.iter().map(|_| "".to_strbuf()).collect()
None => variant_info.args.iter().map(|_| "".to_string()).collect()
};
// If this is not a univariant enum, there is also the (unnamed) discriminant field
if discriminant_type_metadata.is_some() {
arg_names.insert(0, "".to_strbuf());
arg_names.insert(0, "".to_string());
}
// Build an array of (field name, field type) pairs to be captured in the factory closure.
let args: Vec<(String, ty::t)> = arg_names.iter()
.zip(struct_def.fields.iter())
.map(|(s, &t)| (s.to_strbuf(), t))
.map(|(s, &t)| (s.to_string(), t))
.collect();
let member_description_factory =
@ -1876,7 +1876,7 @@ fn boxed_type_metadata(cx: &CrateContext,
-> DICompositeType {
let box_type_name = match content_type_name {
Some(content_type_name) => format!("Boxed<{}>", content_type_name),
None => "BoxedType".to_strbuf()
None => "BoxedType".to_string()
};
let box_llvm_type = Type::at_box(cx, content_llvm_type);
@ -1891,31 +1891,31 @@ fn boxed_type_metadata(cx: &CrateContext,
let member_descriptions = [
MemberDescription {
name: "refcnt".to_strbuf(),
name: "refcnt".to_string(),
llvm_type: *member_llvm_types.get(0),
type_metadata: type_metadata(cx, int_type, codemap::DUMMY_SP),
offset: ComputedMemberOffset,
},
MemberDescription {
name: "drop_glue".to_strbuf(),
name: "drop_glue".to_string(),
llvm_type: *member_llvm_types.get(1),
type_metadata: nil_pointer_type_metadata,
offset: ComputedMemberOffset,
},
MemberDescription {
name: "prev".to_strbuf(),
name: "prev".to_string(),
llvm_type: *member_llvm_types.get(2),
type_metadata: nil_pointer_type_metadata,
offset: ComputedMemberOffset,
},
MemberDescription {
name: "next".to_strbuf(),
name: "next".to_string(),
llvm_type: *member_llvm_types.get(3),
type_metadata: nil_pointer_type_metadata,
offset: ComputedMemberOffset,
},
MemberDescription {
name: "val".to_strbuf(),
name: "val".to_string(),
llvm_type: *member_llvm_types.get(4),
type_metadata: content_type_metadata,
offset: ComputedMemberOffset,
@ -2004,19 +2004,19 @@ fn vec_metadata(cx: &CrateContext,
let member_descriptions = [
MemberDescription {
name: "fill".to_strbuf(),
name: "fill".to_string(),
llvm_type: *member_llvm_types.get(0),
type_metadata: int_type_metadata,
offset: ComputedMemberOffset,
},
MemberDescription {
name: "alloc".to_strbuf(),
name: "alloc".to_string(),
llvm_type: *member_llvm_types.get(1),
type_metadata: int_type_metadata,
offset: ComputedMemberOffset,
},
MemberDescription {
name: "elements".to_strbuf(),
name: "elements".to_string(),
llvm_type: *member_llvm_types.get(2),
type_metadata: array_type_metadata,
offset: ComputedMemberOffset,
@ -2061,13 +2061,13 @@ fn vec_slice_metadata(cx: &CrateContext,
let member_descriptions = [
MemberDescription {
name: "data_ptr".to_strbuf(),
name: "data_ptr".to_string(),
llvm_type: *member_llvm_types.get(0),
type_metadata: type_metadata(cx, data_ptr_type, span),
offset: ComputedMemberOffset,
},
MemberDescription {
name: "length".to_strbuf(),
name: "length".to_string(),
llvm_type: *member_llvm_types.get(1),
type_metadata: type_metadata(cx, ty::mk_uint(), span),
offset: ComputedMemberOffset,

View File

@ -89,7 +89,7 @@ impl Dest {
pub fn to_str(&self, ccx: &CrateContext) -> String {
match *self {
SaveIn(v) => format_strbuf!("SaveIn({})", ccx.tn.val_to_str(v)),
Ignore => "Ignore".to_strbuf()
Ignore => "Ignore".to_string()
}
}
}

View File

@ -163,7 +163,7 @@ pub fn register_static(ccx: &CrateContext,
});
lib::llvm::SetLinkage(g1, linkage);
let mut real_name = "_rust_extern_with_linkage_".to_strbuf();
let mut real_name = "_rust_extern_with_linkage_".to_string();
real_name.push_str(ident.get());
let g2 = real_name.with_c_str(|buf| {
llvm::LLVMAddGlobal(ccx.llmod, llty.to_ref(), buf)
@ -465,7 +465,7 @@ pub fn trans_foreign_mod(ccx: &CrateContext, foreign_mod: &ast::ForeignMod) {
}
ccx.item_symbols.borrow_mut().insert(foreign_item.id,
lname.get().to_strbuf());
lname.get().to_string());
}
}

View File

@ -190,7 +190,7 @@ impl<'a, 'b> Reflector<'a, 'b> {
ty::ty_rptr(_, ref mt) => {
match ty::get(mt.ty).sty {
ty::ty_vec(ref mt, None) => {
let (name, extra) = ("slice".to_owned(), Vec::new());
let (name, extra) = ("slice".to_string(), Vec::new());
let extra = extra.append(self.c_mt(mt).as_slice());
self.visit(format!("evec_{}", name).as_slice(),
extra.as_slice())

View File

@ -228,7 +228,7 @@ pub fn trans_uniq_vstore<'a>(bcx: &'a Block<'a>,
content_expr: &ast::Expr)
-> DatumBlock<'a, Expr> {
/*!
* ~[...] and "...".to_owned() allocate boxes in the exchange heap and write
* ~[...] and "...".to_string() allocate boxes in the exchange heap and write
* the array elements into them.
*/
@ -236,7 +236,7 @@ pub fn trans_uniq_vstore<'a>(bcx: &'a Block<'a>,
let fcx = bcx.fcx;
let ccx = fcx.ccx;
// Handle "".to_owned().
// Handle "".to_string().
match content_expr.node {
ast::ExprLit(lit) => {
match lit.node {

View File

@ -3205,7 +3205,7 @@ pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field])
"no field named `{}` found in the list of fields `{:?}`",
token::get_name(name),
fields.iter()
.map(|f| token::get_ident(f.ident).get().to_strbuf())
.map(|f| token::get_ident(f.ident).get().to_string())
.collect::<Vec<String>>()).as_slice());
}
@ -3237,26 +3237,26 @@ pub fn ty_sort_str(cx: &ctxt, t: t) -> String {
}
ty_enum(id, _) => format_strbuf!("enum {}", item_path_str(cx, id)),
ty_box(_) => "@-ptr".to_strbuf(),
ty_uniq(_) => "box".to_strbuf(),
ty_vec(_, _) => "vector".to_strbuf(),
ty_ptr(_) => "*-ptr".to_strbuf(),
ty_rptr(_, _) => "&-ptr".to_strbuf(),
ty_bare_fn(_) => "extern fn".to_strbuf(),
ty_closure(_) => "fn".to_strbuf(),
ty_box(_) => "@-ptr".to_string(),
ty_uniq(_) => "box".to_string(),
ty_vec(_, _) => "vector".to_string(),
ty_ptr(_) => "*-ptr".to_string(),
ty_rptr(_, _) => "&-ptr".to_string(),
ty_bare_fn(_) => "extern fn".to_string(),
ty_closure(_) => "fn".to_string(),
ty_trait(ref inner) => {
format_strbuf!("trait {}", item_path_str(cx, inner.def_id))
}
ty_struct(id, _) => {
format_strbuf!("struct {}", item_path_str(cx, id))
}
ty_tup(_) => "tuple".to_strbuf(),
ty_infer(TyVar(_)) => "inferred type".to_strbuf(),
ty_infer(IntVar(_)) => "integral variable".to_strbuf(),
ty_infer(FloatVar(_)) => "floating-point variable".to_strbuf(),
ty_param(_) => "type parameter".to_strbuf(),
ty_self(_) => "self".to_strbuf(),
ty_err => "type error".to_strbuf(),
ty_tup(_) => "tuple".to_string(),
ty_infer(TyVar(_)) => "inferred type".to_string(),
ty_infer(IntVar(_)) => "integral variable".to_string(),
ty_infer(FloatVar(_)) => "floating-point variable".to_string(),
ty_param(_) => "type parameter".to_string(),
ty_self(_) => "self".to_string(),
ty_err => "type error".to_string(),
}
}
@ -3272,13 +3272,13 @@ pub fn type_err_to_str(cx: &ctxt, err: &type_err) -> String {
fn tstore_to_closure(s: &TraitStore) -> String {
match s {
&UniqTraitStore => "proc".to_strbuf(),
&RegionTraitStore(..) => "closure".to_strbuf()
&UniqTraitStore => "proc".to_string(),
&RegionTraitStore(..) => "closure".to_string()
}
}
match *err {
terr_mismatch => "types differ".to_strbuf(),
terr_mismatch => "types differ".to_string(),
terr_fn_style_mismatch(values) => {
format_strbuf!("expected {} fn but found {} fn",
values.expected.to_str(),
@ -3299,13 +3299,13 @@ pub fn type_err_to_str(cx: &ctxt, err: &type_err) -> String {
tstore_to_closure(&values.expected),
tstore_to_closure(&values.found))
}
terr_mutability => "values differ in mutability".to_strbuf(),
terr_mutability => "values differ in mutability".to_string(),
terr_box_mutability => {
"boxed values differ in mutability".to_strbuf()
"boxed values differ in mutability".to_string()
}
terr_vec_mutability => "vectors differ in mutability".to_strbuf(),
terr_ptr_mutability => "pointers differ in mutability".to_strbuf(),
terr_ref_mutability => "references differ in mutability".to_strbuf(),
terr_vec_mutability => "vectors differ in mutability".to_string(),
terr_ptr_mutability => "pointers differ in mutability".to_string(),
terr_ref_mutability => "references differ in mutability".to_string(),
terr_ty_param_size(values) => {
format_strbuf!("expected a type with {} type params \
but found one with {} type params",
@ -3325,7 +3325,7 @@ pub fn type_err_to_str(cx: &ctxt, err: &type_err) -> String {
values.found)
}
terr_record_mutability => {
"record elements differ in mutability".to_strbuf()
"record elements differ in mutability".to_string()
}
terr_record_fields(values) => {
format_strbuf!("expected a record with field `{}` but found one \
@ -3334,16 +3334,16 @@ pub fn type_err_to_str(cx: &ctxt, err: &type_err) -> String {
token::get_ident(values.found))
}
terr_arg_count => {
"incorrect number of function parameters".to_strbuf()
"incorrect number of function parameters".to_string()
}
terr_regions_does_not_outlive(..) => {
"lifetime mismatch".to_strbuf()
"lifetime mismatch".to_string()
}
terr_regions_not_same(..) => {
"lifetimes are not the same".to_strbuf()
"lifetimes are not the same".to_string()
}
terr_regions_no_overlap(..) => {
"lifetimes do not intersect".to_strbuf()
"lifetimes do not intersect".to_string()
}
terr_regions_insufficiently_polymorphic(br, _) => {
format_strbuf!("expected bound lifetime parameter {}, \
@ -3385,7 +3385,7 @@ pub fn type_err_to_str(cx: &ctxt, err: &type_err) -> String {
}
}
terr_integer_as_char => {
"expected an integral type but found `char`".to_strbuf()
"expected an integral type but found `char`".to_string()
}
terr_int_mismatch(ref values) => {
format_strbuf!("expected `{}` but found `{}`",
@ -3714,7 +3714,7 @@ pub fn substd_enum_variants(cx: &ctxt,
}
pub fn item_path_str(cx: &ctxt, id: ast::DefId) -> String {
with_path(cx, id, |path| ast_map::path_to_str(path)).to_strbuf()
with_path(cx, id, |path| ast_map::path_to_str(path)).to_string()
}
pub enum DtorKind {

View File

@ -166,14 +166,14 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path,
// See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs
fcx.infcx().type_error_message_str_with_expected(pat.span,
|expected, actual| {
expected.map_or("".to_strbuf(), |e| {
expected.map_or("".to_string(), |e| {
format_strbuf!("mismatched types: expected `{}` but \
found {}",
e,
actual)
})},
Some(expected),
"a structure pattern".to_strbuf(),
"a structure pattern".to_string(),
None);
fcx.write_error(pat.id);
kind_name = "[error]";
@ -221,7 +221,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path,
// See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs
fcx.infcx().type_error_message_str_with_expected(pat.span,
|expected, actual| {
expected.map_or("".to_strbuf(),
expected.map_or("".to_string(),
|e| {
format_strbuf!("mismatched types: expected `{}` but \
found {}",
@ -230,7 +230,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path,
})
},
Some(expected),
"an enum or structure pattern".to_strbuf(),
"an enum or structure pattern".to_string(),
None);
fcx.write_error(pat.id);
kind_name = "[error]";
@ -459,7 +459,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
debug!("pat_range ending type: {:?}", e_ty);
if !require_same_types(
tcx, Some(fcx.infcx()), false, pat.span, b_ty, e_ty,
|| "mismatched types in range".to_strbuf())
|| "mismatched types in range".to_string())
{
// no-op
} else if !ty::type_is_numeric(b_ty) && !ty::type_is_char(b_ty) {
@ -553,7 +553,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
// See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs
fcx.infcx().type_error_message_str_with_expected(pat.span,
|expected, actual| {
expected.map_or("".to_strbuf(),
expected.map_or("".to_string(),
|e| {
format_strbuf!("mismatched types: expected \
`{}` but found {}",
@ -561,7 +561,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
actual)
})},
Some(expected),
"a structure pattern".to_strbuf(),
"a structure pattern".to_string(),
None);
match tcx.def_map.borrow().find(&pat.id) {
Some(&ast::DefStruct(supplied_def_id)) => {
@ -620,7 +620,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
fcx.infcx().type_error_message_str_with_expected(pat.span,
|expected,
actual| {
expected.map_or("".to_strbuf(), |e| {
expected.map_or("".to_string(), |e| {
format_strbuf!("mismatched types: expected `{}` \
but found {}",
e,
@ -628,7 +628,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
}
)},
Some(expected),
"tuple".to_strbuf(),
"tuple".to_string(),
Some(&type_error));
fcx.write_error(pat.id);
}
@ -659,7 +659,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
fcx.infcx().type_error_message_str_with_expected(
pat.span,
|expected, actual| {
expected.map_or("".to_strbuf(),
expected.map_or("".to_string(),
|e| {
format_strbuf!("mismatched types: expected `{}` but \
found {}",
@ -668,7 +668,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
})
},
Some(expected),
"a vector pattern".to_strbuf(),
"a vector pattern".to_string(),
None);
fcx.write_error(pat.id);
};
@ -682,7 +682,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) {
fcx.type_error_message(pat.span,
|_| {
"unique vector patterns are no \
longer supported".to_strbuf()
longer supported".to_string()
},
expected,
None);
@ -750,7 +750,7 @@ pub fn check_pointer_pat(pcx: &pat_ctxt,
fcx.infcx().type_error_message_str_with_expected(
span,
|expected, actual| {
expected.map_or("".to_strbuf(), |e| {
expected.map_or("".to_string(), |e| {
format_strbuf!("mismatched types: expected `{}` but \
found {}",
e,

View File

@ -2707,7 +2707,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
let error = if vst == ast::ExprVstoreSlice {
"`&\"string\"` has been removed; use `\"string\"` instead"
} else {
"`~\"string\"` has been removed; use `\"string\".to_owned()` instead"
"`~\"string\"` has been removed; use `\"string\".to_string()` instead"
};
tcx.sess.span_err(expr.span, error);
ty::mk_err()
@ -3355,7 +3355,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt,
ppaux::ty_to_str(tcx, fcx.expr_ty(expr)),
match expected {
Some(t) => ppaux::ty_to_str(tcx, t),
_ => "empty".to_strbuf()
_ => "empty".to_string()
});
unifier();
@ -3511,7 +3511,7 @@ pub fn check_block_with_expected(fcx: &FnCtxt,
.add_lint(UnreachableCode,
s_id,
s.span,
"unreachable statement".to_strbuf());
"unreachable statement".to_string());
warned = true;
}
if ty::type_is_bot(s_ty) {
@ -3538,7 +3538,7 @@ pub fn check_block_with_expected(fcx: &FnCtxt,
.add_lint(UnreachableCode,
e.id,
e.span,
"unreachable expression".to_strbuf());
"unreachable expression".to_string());
}
check_expr_with_opt_hint(fcx, e, expected);
let ety = fcx.expr_ty(e);
@ -4077,7 +4077,7 @@ pub fn structurally_resolved_type(fcx: &FnCtxt, sp: Span, tp: ty::t) -> ty::t {
_ => {
fcx.type_error_message(sp, |_actual| {
"the type of this value must be known in this \
context".to_strbuf()
context".to_string()
}, tp, None);
demand::suptype(fcx, sp, ty::mk_err(), tp);
tp

View File

@ -808,7 +808,7 @@ impl<'a> Rebuilder<'a> {
// choice of lifetime name deterministic and thus easier to test.
let mut names = Vec::new();
for rn in region_names.iter() {
let lt_name = token::get_name(*rn).get().to_owned();
let lt_name = token::get_name(*rn).get().to_string();
names.push(lt_name);
}
names.sort();
@ -1227,12 +1227,12 @@ impl<'a> ErrorReportingHelpers for InferCtxt<'a> {
fn report_inference_failure(&self,
var_origin: RegionVariableOrigin) {
let var_description = match var_origin {
infer::MiscVariable(_) => "".to_strbuf(),
infer::PatternRegion(_) => " for pattern".to_strbuf(),
infer::AddrOfRegion(_) => " for borrow expression".to_strbuf(),
infer::AddrOfSlice(_) => " for slice expression".to_strbuf(),
infer::Autoref(_) => " for autoref".to_strbuf(),
infer::Coercion(_) => " for automatic coercion".to_strbuf(),
infer::MiscVariable(_) => "".to_string(),
infer::PatternRegion(_) => " for pattern".to_string(),
infer::AddrOfRegion(_) => " for borrow expression".to_string(),
infer::AddrOfSlice(_) => " for slice expression".to_string(),
infer::Autoref(_) => " for autoref".to_string(),
infer::Coercion(_) => " for automatic coercion".to_string(),
infer::LateBoundRegion(_, br) => {
format!(" for {}in function call",
bound_region_to_str(self.tcx, "lifetime parameter ", true, br))
@ -1475,7 +1475,7 @@ impl LifeGiver {
fn with_taken(taken: &[ast::Lifetime]) -> LifeGiver {
let mut taken_ = HashSet::new();
for lt in taken.iter() {
let lt_name = token::get_name(lt.name).get().to_strbuf();
let lt_name = token::get_name(lt.name).get().to_string();
taken_.insert(lt_name);
}
LifeGiver {

View File

@ -36,7 +36,7 @@ impl<'f> Glb<'f> {
impl<'f> Combine for Glb<'f> {
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a> { self.get_ref().infcx }
fn tag(&self) -> String { "glb".to_strbuf() }
fn tag(&self) -> String { "glb".to_string() }
fn a_is_expected(&self) -> bool { self.get_ref().a_is_expected }
fn trace(&self) -> TypeTrace { self.get_ref().trace.clone() }

View File

@ -35,7 +35,7 @@ impl<'f> Lub<'f> {
impl<'f> Combine for Lub<'f> {
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a> { self.get_ref().infcx }
fn tag(&self) -> String { "lub".to_strbuf() }
fn tag(&self) -> String { "lub".to_string() }
fn a_is_expected(&self) -> bool { self.get_ref().a_is_expected }
fn trace(&self) -> TypeTrace { self.get_ref().trace.clone() }

View File

@ -247,10 +247,10 @@ pub enum fixup_err {
pub fn fixup_err_to_str(f: fixup_err) -> String {
match f {
unresolved_int_ty(_) => "unconstrained integral type".to_strbuf(),
unresolved_ty(_) => "unconstrained type".to_strbuf(),
cyclic_ty(_) => "cyclic type of infinite size".to_strbuf(),
unresolved_region(_) => "unconstrained region".to_strbuf(),
unresolved_int_ty(_) => "unconstrained integral type".to_string(),
unresolved_ty(_) => "unconstrained type".to_string(),
cyclic_ty(_) => "cyclic type of infinite size".to_string(),
unresolved_region(_) => "unconstrained region".to_string(),
region_var_bound_by_region_var(r1, r2) => {
format_strbuf!("region var {:?} bound by another region var {:?}; \
this is a bug in rustc",
@ -729,7 +729,7 @@ impl<'a> InferCtxt<'a> {
err: Option<&ty::type_err>) {
debug!("hi! expected_ty = {:?}, actual_ty = {}", expected_ty, actual_ty);
let error_str = err.map_or("".to_strbuf(), |t_err| {
let error_str = err.map_or("".to_string(), |t_err| {
format!(" ({})", ty::type_err_to_str(self.tcx, t_err))
});
let resolved_expected = expected_ty.map(|e_ty| {

View File

@ -35,7 +35,7 @@ impl<'f> Sub<'f> {
impl<'f> Combine for Sub<'f> {
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a> { self.get_ref().infcx }
fn tag(&self) -> String { "sub".to_strbuf() }
fn tag(&self) -> String { "sub".to_string() }
fn a_is_expected(&self) -> bool { self.get_ref().a_is_expected }
fn trace(&self) -> TypeTrace { self.get_ref().trace.clone() }

View File

@ -46,11 +46,11 @@ static EMPTY_SOURCE_STR: &str = "/* Hello, world! */";
fn setup_env(test_name: &str, source_string: &str) -> Env {
let messages = @DVec();
let matches = getopts(vec!("-Z".to_owned(), "verbose".to_owned()), optgroups()).get();
let matches = getopts(vec!("-Z".to_string(), "verbose".to_string()), optgroups()).get();
let diag = diagnostic::collect(messages);
let sessopts = build_session_options("rustc".to_owned(), &matches, diag);
let sessopts = build_session_options("rustc".to_string(), &matches, diag);
let sess = build_session(sessopts, None, diag);
let cfg = build_configuration(sess, "whatever".to_owned(), str_input("".to_owned()));
let cfg = build_configuration(sess, "whatever".to_string(), str_input("".to_string()));
let dm = HashMap();
let amap = HashMap();
let freevars = HashMap();

View File

@ -56,7 +56,7 @@ impl<V:InferStr> InferStr for Bound<V> {
fn inf_str(&self, cx: &InferCtxt) -> String {
match *self {
Some(ref v) => v.inf_str(cx),
None => "none".to_strbuf()
None => "none".to_string()
}
}
}
@ -82,13 +82,13 @@ impl<V:Vid + ToStr,T:InferStr> InferStr for VarValue<V, T> {
impl InferStr for IntVarValue {
fn inf_str(&self, _cx: &InferCtxt) -> String {
self.to_str().to_strbuf()
self.to_str().to_string()
}
}
impl InferStr for ast::FloatTy {
fn inf_str(&self, _cx: &InferCtxt) -> String {
self.to_str().to_strbuf()
self.to_str().to_string()
}
}

View File

@ -99,7 +99,7 @@ pub fn explain_region_and_span(cx: &ctxt, region: ty::Region)
format_strbuf!("the anonymous lifetime \\#{} defined on",
idx + 1)
}
BrFresh(_) => "an anonymous lifetime defined on".to_strbuf(),
BrFresh(_) => "an anonymous lifetime defined on".to_string(),
_ => {
format_strbuf!("the lifetime {} as defined on",
bound_region_ptr_to_str(cx, fr.bound_region))
@ -123,9 +123,9 @@ pub fn explain_region_and_span(cx: &ctxt, region: ty::Region)
}
}
ReStatic => { ("the static lifetime".to_strbuf(), None) }
ReStatic => { ("the static lifetime".to_string(), None) }
ReEmpty => { ("the empty lifetime".to_strbuf(), None) }
ReEmpty => { ("the empty lifetime".to_string(), None) }
// I believe these cases should not occur (except when debugging,
// perhaps)
@ -164,8 +164,8 @@ pub fn bound_region_to_str(cx: &ctxt,
token::get_name(name),
space_str)
}
BrAnon(_) => prefix.to_strbuf(),
BrFresh(_) => prefix.to_strbuf(),
BrAnon(_) => prefix.to_string(),
BrFresh(_) => prefix.to_string(),
}
}
@ -188,16 +188,16 @@ pub fn region_to_str(cx: &ctxt, prefix: &str, space: bool, region: Region) -> St
// to fit that into a short string. Hence the recommendation to use
// `explain_region()` or `note_and_explain_region()`.
match region {
ty::ReScope(_) => prefix.to_strbuf(),
ty::ReScope(_) => prefix.to_string(),
ty::ReEarlyBound(_, _, name) => {
token::get_name(name).get().to_strbuf()
token::get_name(name).get().to_string()
}
ty::ReLateBound(_, br) => bound_region_to_str(cx, prefix, space, br),
ty::ReFree(ref fr) => bound_region_to_str(cx, prefix, space, fr.bound_region),
ty::ReInfer(ReSkolemized(_, br)) => {
bound_region_to_str(cx, prefix, space, br)
}
ty::ReInfer(ReVar(_)) => prefix.to_strbuf(),
ty::ReInfer(ReVar(_)) => prefix.to_string(),
ty::ReStatic => format_strbuf!("{}'static{}", prefix, space_str),
ty::ReEmpty => format_strbuf!("{}'<empty>{}", prefix, space_str),
}
@ -205,8 +205,8 @@ pub fn region_to_str(cx: &ctxt, prefix: &str, space: bool, region: Region) -> St
pub fn mutability_to_str(m: ast::Mutability) -> String {
match m {
ast::MutMutable => "mut ".to_strbuf(),
ast::MutImmutable => "".to_strbuf(),
ast::MutMutable => "mut ".to_string(),
ast::MutImmutable => "".to_string(),
}
}
@ -216,7 +216,7 @@ pub fn mt_to_str(cx: &ctxt, m: &mt) -> String {
pub fn trait_store_to_str(cx: &ctxt, s: ty::TraitStore) -> String {
match s {
ty::UniqTraitStore => "Box ".to_strbuf(),
ty::UniqTraitStore => "Box ".to_string(),
ty::RegionTraitStore(r, m) => {
format_strbuf!("{}{}",
region_ptr_to_str(cx, r),
@ -238,12 +238,12 @@ pub fn fn_sig_to_str(cx: &ctxt, typ: &ty::FnSig) -> String {
}
pub fn trait_ref_to_str(cx: &ctxt, trait_ref: &ty::TraitRef) -> String {
trait_ref.user_string(cx).to_strbuf()
trait_ref.user_string(cx).to_string()
}
pub fn ty_to_str(cx: &ctxt, typ: t) -> String {
fn fn_input_to_str(cx: &ctxt, input: ty::t) -> String {
ty_to_str(cx, input).to_strbuf()
ty_to_str(cx, input).to_string()
}
fn bare_fn_to_str(cx: &ctxt,
fn_style: ast::FnStyle,
@ -351,15 +351,15 @@ pub fn ty_to_str(cx: &ctxt, typ: t) -> String {
// pretty print the structural type representation:
return match ty::get(typ).sty {
ty_nil => "()".to_strbuf(),
ty_bot => "!".to_strbuf(),
ty_bool => "bool".to_strbuf(),
ty_char => "char".to_strbuf(),
ty_nil => "()".to_string(),
ty_bot => "!".to_string(),
ty_bool => "bool".to_string(),
ty_char => "char".to_string(),
ty_int(t) => ast_util::int_ty_to_str(t, None,
ast_util::AutoSuffix).to_strbuf(),
ast_util::AutoSuffix).to_string(),
ty_uint(t) => ast_util::uint_ty_to_str(t, None,
ast_util::AutoSuffix).to_strbuf(),
ty_float(t) => ast_util::float_ty_to_str(t).to_strbuf(),
ast_util::AutoSuffix).to_string(),
ty_float(t) => ast_util::float_ty_to_str(t).to_string(),
ty_box(typ) => format_strbuf!("@{}", ty_to_str(cx, typ)),
ty_uniq(typ) => format_strbuf!("~{}", ty_to_str(cx, typ)),
ty_ptr(ref tm) => format_strbuf!("*{}", mt_to_str(cx, tm)),
@ -378,11 +378,11 @@ pub fn ty_to_str(cx: &ctxt, typ: t) -> String {
ty_bare_fn(ref f) => {
bare_fn_to_str(cx, f.fn_style, f.abi, None, &f.sig)
}
ty_infer(infer_ty) => infer_ty.to_str().to_strbuf(),
ty_err => "[type error]".to_strbuf(),
ty_infer(infer_ty) => infer_ty.to_str().to_string(),
ty_err => "[type error]".to_string(),
ty_param(param_ty {idx: id, def_id: did}) => {
let ident = match cx.ty_param_defs.borrow().find(&did.node) {
Some(def) => token::get_ident(def.ident).get().to_strbuf(),
Some(def) => token::get_ident(def.ident).get().to_string(),
// This can only happen when a type mismatch error happens and
// the actual type has more type parameters than the expected one.
None => format_strbuf!("<generic \\#{}>", id)
@ -393,7 +393,7 @@ pub fn ty_to_str(cx: &ctxt, typ: t) -> String {
format_strbuf!("{}:{:?}", ident, did)
}
}
ty_self(..) => "Self".to_strbuf(),
ty_self(..) => "Self".to_string(),
ty_enum(did, ref substs) | ty_struct(did, ref substs) => {
let base = ty::item_path_str(cx, did);
parameterized(cx,
@ -417,7 +417,7 @@ pub fn ty_to_str(cx: &ctxt, typ: t) -> String {
bound_sep,
bound_str)
}
ty_str => "str".to_strbuf(),
ty_str => "str".to_string(),
ty_vec(ref mt, sz) => {
match sz {
Some(n) => {
@ -483,9 +483,9 @@ pub fn parameterized(cx: &ctxt,
}
pub fn ty_to_short_str(cx: &ctxt, typ: t) -> String {
let mut s = typ.repr(cx).to_strbuf();
let mut s = typ.repr(cx).to_string();
if s.len() >= 32u {
s = s.as_slice().slice(0u, 32u).to_strbuf();
s = s.as_slice().slice(0u, 32u).to_string();
}
return s;
}
@ -493,7 +493,7 @@ pub fn ty_to_short_str(cx: &ctxt, typ: t) -> String {
impl<T:Repr> Repr for Option<T> {
fn repr(&self, tcx: &ctxt) -> String {
match self {
&None => "None".to_strbuf(),
&None => "None".to_string(),
&Some(ref t) => t.repr(tcx),
}
}
@ -510,7 +510,7 @@ impl<T:Repr,U:Repr> Repr for Result<T,U> {
impl Repr for () {
fn repr(&self, _tcx: &ctxt) -> String {
"()".to_strbuf()
"()".to_string()
}
}
@ -596,7 +596,7 @@ impl Repr for ty::ItemSubsts {
impl Repr for ty::RegionSubsts {
fn repr(&self, tcx: &ctxt) -> String {
match *self {
ty::ErasedRegions => "erased".to_strbuf(),
ty::ErasedRegions => "erased".to_string(),
ty::NonerasedRegions(ref regions) => regions.repr(tcx)
}
}
@ -607,17 +607,17 @@ impl Repr for ty::ParamBounds {
let mut res = Vec::new();
for b in self.builtin_bounds.iter() {
res.push(match b {
ty::BoundStatic => "'static".to_strbuf(),
ty::BoundSend => "Send".to_strbuf(),
ty::BoundSized => "Sized".to_strbuf(),
ty::BoundCopy => "Copy".to_strbuf(),
ty::BoundShare => "Share".to_strbuf(),
ty::BoundStatic => "'static".to_string(),
ty::BoundSend => "Send".to_string(),
ty::BoundSized => "Sized".to_string(),
ty::BoundCopy => "Copy".to_string(),
ty::BoundShare => "Share".to_string(),
});
}
for t in self.trait_bounds.iter() {
res.push(t.repr(tcx));
}
res.connect("+").to_strbuf()
res.connect("+").to_string()
}
}
@ -694,7 +694,7 @@ impl Repr for ty::Region {
}
ty::ReStatic => {
"ReStatic".to_strbuf()
"ReStatic".to_string()
}
ty::ReInfer(ReVar(ref vid)) => {
@ -708,7 +708,7 @@ impl Repr for ty::Region {
}
ty::ReEmpty => {
"ReEmpty".to_strbuf()
"ReEmpty".to_string()
}
}
}
@ -771,7 +771,7 @@ impl Repr for ty::ItemVariances {
impl Repr for ty::Variance {
fn repr(&self, _: &ctxt) -> String {
self.to_str().to_strbuf()
self.to_str().to_string()
}
}
@ -790,13 +790,13 @@ impl Repr for ty::Method {
impl Repr for ast::Name {
fn repr(&self, _tcx: &ctxt) -> String {
token::get_name(*self).get().to_strbuf()
token::get_name(*self).get().to_string()
}
}
impl Repr for ast::Ident {
fn repr(&self, _tcx: &ctxt) -> String {
token::get_ident(*self).get().to_strbuf()
token::get_ident(*self).get().to_string()
}
}
@ -893,11 +893,11 @@ impl Repr for ty::BuiltinBound {
impl UserString for ty::BuiltinBound {
fn user_string(&self, _tcx: &ctxt) -> String {
match *self {
ty::BoundStatic => "'static".to_strbuf(),
ty::BoundSend => "Send".to_strbuf(),
ty::BoundSized => "Sized".to_strbuf(),
ty::BoundCopy => "Copy".to_strbuf(),
ty::BoundShare => "Share".to_strbuf(),
ty::BoundStatic => "'static".to_string(),
ty::BoundSend => "Send".to_string(),
ty::BoundSized => "Sized".to_string(),
ty::BoundCopy => "Copy".to_string(),
ty::BoundShare => "Share".to_string(),
}
}
}
@ -910,7 +910,7 @@ impl Repr for ty::BuiltinBounds {
impl Repr for Span {
fn repr(&self, tcx: &ctxt) -> String {
tcx.sess.codemap().span_to_str(*self).to_strbuf()
tcx.sess.codemap().span_to_str(*self).to_string()
}
}
@ -927,7 +927,7 @@ impl UserString for ty::BuiltinBounds {
.map(|bb| bb.user_string(tcx))
.collect::<Vec<String>>()
.connect("+")
.to_strbuf()
.to_string()
}
}
@ -962,19 +962,19 @@ impl UserString for ty::t {
impl UserString for ast::Ident {
fn user_string(&self, _tcx: &ctxt) -> String {
token::get_name(self.name).get().to_strbuf()
token::get_name(self.name).get().to_string()
}
}
impl Repr for abi::Abi {
fn repr(&self, _tcx: &ctxt) -> String {
self.to_str().to_strbuf()
self.to_str().to_string()
}
}
impl UserString for abi::Abi {
fn user_string(&self, _tcx: &ctxt) -> String {
self.to_str().to_strbuf()
self.to_str().to_string()
}
}

View File

@ -259,7 +259,7 @@ pub trait Digest {
/// Convenience function that retrieves the result of a digest as a
/// String in hexadecimal format.
fn result_str(&mut self) -> String {
self.result_bytes().as_slice().to_hex().to_strbuf()
self.result_bytes().as_slice().to_hex().to_string()
}
}
@ -578,21 +578,21 @@ mod tests {
// Examples from wikipedia
let wikipedia_tests = vec!(
Test {
input: "".to_strbuf(),
input: "".to_string(),
output_str: "e3b0c44298fc1c149afb\
f4c8996fb92427ae41e4649b934ca495991b7852b855".to_strbuf()
f4c8996fb92427ae41e4649b934ca495991b7852b855".to_string()
},
Test {
input: "The quick brown fox jumps over the lazy \
dog".to_strbuf(),
dog".to_string(),
output_str: "d7a8fbb307d7809469ca\
9abcb0082e4f8d5651e46d3cdb762d02d0bf37c9e592".to_strbuf()
9abcb0082e4f8d5651e46d3cdb762d02d0bf37c9e592".to_string()
},
Test {
input: "The quick brown fox jumps over the lazy \
dog.".to_strbuf(),
dog.".to_string(),
output_str: "ef537f25c895bfa78252\
6529a9b63d97aa631564d5d789c2b765448c8635fb6c".to_strbuf()
6529a9b63d97aa631564d5d789c2b765448c8635fb6c".to_string()
});
let tests = wikipedia_tests;

View File

@ -87,7 +87,7 @@ fn try_inline_def(cx: &core::DocContext,
let fqn = csearch::get_item_path(tcx, did);
ret.push(clean::Item {
source: clean::Span::empty(),
name: Some(fqn.last().unwrap().to_str().to_strbuf()),
name: Some(fqn.last().unwrap().to_str().to_string()),
attrs: load_attrs(tcx, did),
inner: inner,
visibility: Some(ast::Public),
@ -123,7 +123,7 @@ pub fn record_extern_fqn(cx: &core::DocContext,
match cx.maybe_typed {
core::Typed(ref tcx) => {
let fqn = csearch::get_item_path(tcx, did);
let fqn = fqn.move_iter().map(|i| i.to_str().to_strbuf()).collect();
let fqn = fqn.move_iter().map(|i| i.to_str().to_string()).collect();
cx.external_paths.borrow_mut().get_mut_ref().insert(did, (fqn, kind));
}
core::NotTyped(..) => {}

View File

@ -101,7 +101,7 @@ impl<'a> Clean<Crate> for visit_ast::RustdocVisitor<'a> {
let id = link::find_crate_id(self.attrs.as_slice(),
t_outputs.out_filestem.as_slice());
Crate {
name: id.name.to_strbuf(),
name: id.name.to_string(),
module: Some(self.module.clean()),
externs: externs,
}
@ -117,7 +117,7 @@ pub struct ExternalCrate {
impl Clean<ExternalCrate> for cstore::crate_metadata {
fn clean(&self) -> ExternalCrate {
ExternalCrate {
name: self.name.to_strbuf(),
name: self.name.to_string(),
attrs: decoder::get_crate_attributes(self.data()).clean()
.move_iter()
.collect(),
@ -240,7 +240,7 @@ impl Clean<Item> for doctree::Module {
let name = if self.name.is_some() {
self.name.unwrap().clean()
} else {
"".to_strbuf()
"".to_string()
};
let mut foreigns = Vec::new();
for subforeigns in self.foreigns.clean().move_iter() {
@ -305,12 +305,12 @@ pub enum Attribute {
impl Clean<Attribute> for ast::MetaItem {
fn clean(&self) -> Attribute {
match self.node {
ast::MetaWord(ref s) => Word(s.get().to_strbuf()),
ast::MetaWord(ref s) => Word(s.get().to_string()),
ast::MetaList(ref s, ref l) => {
List(s.get().to_strbuf(), l.clean().move_iter().collect())
List(s.get().to_string(), l.clean().move_iter().collect())
}
ast::MetaNameValue(ref s, ref v) => {
NameValue(s.get().to_strbuf(), lit_to_str(v))
NameValue(s.get().to_string(), lit_to_str(v))
}
}
}
@ -404,7 +404,7 @@ fn external_path(name: &str) -> Path {
Path {
global: false,
segments: vec![PathSegment {
name: name.to_strbuf(),
name: name.to_string(),
lifetimes: Vec::new(),
types: Vec::new(),
}]
@ -430,7 +430,7 @@ impl Clean<TyParamBound> for ty::BuiltinBound {
(tcx.lang_items.share_trait().unwrap(), external_path("Share")),
};
let fqn = csearch::get_item_path(tcx, did);
let fqn = fqn.move_iter().map(|i| i.to_str().to_strbuf()).collect();
let fqn = fqn.move_iter().map(|i| i.to_str().to_string()).collect();
cx.external_paths.borrow_mut().get_mut_ref().insert(did,
(fqn, TypeTrait));
TraitBound(ResolvedPath {
@ -449,7 +449,7 @@ impl Clean<TyParamBound> for ty::TraitRef {
core::NotTyped(_) => return RegionBound,
};
let fqn = csearch::get_item_path(tcx, self.def_id);
let fqn = fqn.move_iter().map(|i| i.to_str().to_strbuf())
let fqn = fqn.move_iter().map(|i| i.to_str().to_string())
.collect::<Vec<String>>();
let path = external_path(fqn.last().unwrap().as_slice());
cx.external_paths.borrow_mut().get_mut_ref().insert(self.def_id,
@ -503,22 +503,22 @@ impl Lifetime {
impl Clean<Lifetime> for ast::Lifetime {
fn clean(&self) -> Lifetime {
Lifetime(token::get_name(self.name).get().to_strbuf())
Lifetime(token::get_name(self.name).get().to_string())
}
}
impl Clean<Lifetime> for ty::RegionParameterDef {
fn clean(&self) -> Lifetime {
Lifetime(token::get_name(self.name).get().to_strbuf())
Lifetime(token::get_name(self.name).get().to_string())
}
}
impl Clean<Option<Lifetime>> for ty::Region {
fn clean(&self) -> Option<Lifetime> {
match *self {
ty::ReStatic => Some(Lifetime("static".to_strbuf())),
ty::ReStatic => Some(Lifetime("static".to_string())),
ty::ReLateBound(_, ty::BrNamed(_, name)) =>
Some(Lifetime(token::get_name(name).get().to_strbuf())),
Some(Lifetime(token::get_name(name).get().to_string())),
ty::ReLateBound(..) |
ty::ReEarlyBound(..) |
@ -749,7 +749,7 @@ impl<'a> Clean<FnDecl> for (ast::DefId, &'a ty::FnSig) {
Argument {
type_: t.clean(),
id: 0,
name: names.next().unwrap_or("".to_strbuf()),
name: names.next().unwrap_or("".to_string()),
}
}).collect(),
},
@ -1013,7 +1013,7 @@ impl Clean<Type> for ty::t {
lifetimes: Vec::new(), type_params: Vec::new()
},
decl: (ast_util::local_def(0), &fty.sig).clean(),
abi: fty.abi.to_str().to_strbuf(),
abi: fty.abi.to_str(),
}),
ty::ty_closure(ref fty) => {
let decl = box ClosureDecl {
@ -1038,7 +1038,7 @@ impl Clean<Type> for ty::t {
};
let fqn = csearch::get_item_path(tcx, did);
let fqn: Vec<String> = fqn.move_iter().map(|i| {
i.to_str().to_strbuf()
i.to_str().to_string()
}).collect();
let mut path = external_path(fqn.last()
.unwrap()
@ -1302,7 +1302,7 @@ pub struct Span {
impl Span {
fn empty() -> Span {
Span {
filename: "".to_strbuf(),
filename: "".to_string(),
loline: 0, locol: 0,
hiline: 0, hicol: 0,
}
@ -1317,7 +1317,7 @@ impl Clean<Span> for syntax::codemap::Span {
let lo = cm.lookup_char_pos(self.lo);
let hi = cm.lookup_char_pos(self.hi);
Span {
filename: filename.to_strbuf(),
filename: filename.to_string(),
loline: lo.line,
locol: lo.col.to_uint(),
hiline: hi.line,
@ -1376,13 +1376,13 @@ fn path_to_str(p: &ast::Path) -> String {
impl Clean<String> for ast::Ident {
fn clean(&self) -> String {
token::get_ident(*self).get().to_strbuf()
token::get_ident(*self).get().to_string()
}
}
impl Clean<String> for ast::Name {
fn clean(&self) -> String {
token::get_name(*self).get().to_strbuf()
token::get_name(*self).get().to_string()
}
}
@ -1425,7 +1425,7 @@ impl Clean<BareFunctionDecl> for ast::BareFnTy {
type_params: Vec::new(),
},
decl: self.decl.clean(),
abi: self.abi.to_str().to_strbuf(),
abi: self.abi.to_str().to_string(),
}
}
}
@ -1582,7 +1582,7 @@ impl Clean<ViewItemInner> for ast::ViewItem_ {
&ast::ViewItemExternCrate(ref i, ref p, ref id) => {
let string = match *p {
None => None,
Some((ref x, _)) => Some(x.get().to_strbuf()),
Some((ref x, _)) => Some(x.get().to_string()),
};
ExternCrate(i.clean(), string, *id)
}
@ -1659,7 +1659,7 @@ impl Clean<Item> for ast::ForeignItem {
ForeignStaticItem(Static {
type_: ty.clean(),
mutability: if mutbl {Mutable} else {Immutable},
expr: "".to_strbuf(),
expr: "".to_string(),
})
}
};
@ -1686,8 +1686,8 @@ impl ToSource for syntax::codemap::Span {
let ctxt = super::ctxtkey.get().unwrap();
let cm = ctxt.sess().codemap().clone();
let sn = match cm.span_to_snippet(*self) {
Some(x) => x.to_strbuf(),
None => "".to_strbuf()
Some(x) => x.to_string(),
None => "".to_string()
};
debug!("got snippet {}", sn);
sn
@ -1696,16 +1696,16 @@ impl ToSource for syntax::codemap::Span {
fn lit_to_str(lit: &ast::Lit) -> String {
match lit.node {
ast::LitStr(ref st, _) => st.get().to_strbuf(),
ast::LitStr(ref st, _) => st.get().to_string(),
ast::LitBinary(ref data) => format_strbuf!("{:?}", data.as_slice()),
ast::LitChar(c) => format_strbuf!("'{}'", c),
ast::LitInt(i, _t) => i.to_str().to_strbuf(),
ast::LitUint(u, _t) => u.to_str().to_strbuf(),
ast::LitIntUnsuffixed(i) => i.to_str().to_strbuf(),
ast::LitFloat(ref f, _t) => f.get().to_strbuf(),
ast::LitFloatUnsuffixed(ref f) => f.get().to_strbuf(),
ast::LitBool(b) => b.to_str().to_strbuf(),
ast::LitNil => "".to_strbuf(),
ast::LitInt(i, _t) => i.to_str().to_string(),
ast::LitUint(u, _t) => u.to_str().to_string(),
ast::LitIntUnsuffixed(i) => i.to_str().to_string(),
ast::LitFloat(ref f, _t) => f.get().to_string(),
ast::LitFloatUnsuffixed(ref f) => f.get().to_string(),
ast::LitBool(b) => b.to_str().to_string(),
ast::LitNil => "".to_string(),
}
}
@ -1714,19 +1714,19 @@ fn name_from_pat(p: &ast::Pat) -> String {
debug!("Trying to get a name from pattern: {:?}", p);
match p.node {
PatWild => "_".to_strbuf(),
PatWildMulti => "..".to_strbuf(),
PatWild => "_".to_string(),
PatWildMulti => "..".to_string(),
PatIdent(_, ref p, _) => path_to_str(p),
PatEnum(ref p, _) => path_to_str(p),
PatStruct(..) => fail!("tried to get argument name from pat_struct, \
which is not allowed in function arguments"),
PatTup(..) => "(tuple arg NYI)".to_strbuf(),
PatTup(..) => "(tuple arg NYI)".to_string(),
PatUniq(p) => name_from_pat(p),
PatRegion(p) => name_from_pat(p),
PatLit(..) => {
warn!("tried to get argument name from PatLit, \
which is silly in function arguments");
"()".to_strbuf()
"()".to_string()
},
PatRange(..) => fail!("tried to get argument name from PatRange, \
which is not allowed in function arguments"),

View File

@ -151,12 +151,12 @@ fn resolved_path(w: &mut fmt::Formatter, did: ast::DefId, p: &clean::Path,
path(w, p, print_all,
|cache, loc| {
if ast_util::is_local(did) || cache.paths.contains_key(&did) {
Some(("../".repeat(loc.len())).to_strbuf())
Some(("../".repeat(loc.len())).to_string())
} else {
match *cache.extern_locations.get(&did.krate) {
render::Remote(ref s) => Some(s.to_strbuf()),
render::Remote(ref s) => Some(s.to_string()),
render::Local => {
Some(("../".repeat(loc.len())).to_strbuf())
Some(("../".repeat(loc.len())).to_string())
}
render::Unknown => None,
}
@ -198,7 +198,7 @@ fn path(w: &mut fmt::Formatter, path: &clean::Path, print_all: bool,
let cache = cache_key.get().unwrap();
let abs_root = root(&**cache, loc.as_slice());
let rel_root = match path.segments.get(0).name.as_slice() {
"self" => Some("./".to_owned()),
"self" => Some("./".to_string()),
_ => None,
};
@ -323,7 +323,7 @@ impl fmt::Show for clean::Type {
{arrow, select, yes{ -&gt; {ret}} other{}}",
style = FnStyleSpace(decl.fn_style),
lifetimes = if decl.lifetimes.len() == 0 {
"".to_strbuf()
"".to_string()
} else {
format!("&lt;{:#}&gt;", decl.lifetimes)
},
@ -364,17 +364,17 @@ impl fmt::Show for clean::Type {
{arrow, select, yes{ -&gt; {ret}} other{}}",
style = FnStyleSpace(decl.fn_style),
lifetimes = if decl.lifetimes.len() == 0 {
"".to_strbuf()
"".to_string()
} else {
format_strbuf!("&lt;{:#}&gt;", decl.lifetimes)
},
args = decl.decl.inputs,
bounds = if decl.bounds.len() == 0 {
"".to_strbuf()
"".to_string()
} else {
let mut m = decl.bounds
.iter()
.map(|s| s.to_str().to_strbuf());
.map(|s| s.to_str().to_string());
format_strbuf!(
": {}",
m.collect::<Vec<String>>().connect(" + "))
@ -386,8 +386,8 @@ impl fmt::Show for clean::Type {
write!(f, "{}{}fn{}{}",
FnStyleSpace(decl.fn_style),
match decl.abi.as_slice() {
"" => " extern ".to_strbuf(),
"\"Rust\"" => "".to_strbuf(),
"" => " extern ".to_string(),
"\"Rust\"" => "".to_string(),
s => format_strbuf!(" extern {} ", s)
},
decl.generics,
@ -423,7 +423,7 @@ impl fmt::Show for clean::Type {
clean::BorrowedRef{ lifetime: ref l, mutability, type_: ref ty} => {
let lt = match *l {
Some(ref l) => format!("{} ", *l),
_ => "".to_strbuf(),
_ => "".to_string(),
};
write!(f, "&amp;{}{}{}",
lt,

View File

@ -29,15 +29,15 @@ pub fn highlight(src: &str, class: Option<&str>) -> String {
debug!("highlighting: ================\n{}\n==============", src);
let sess = parse::new_parse_sess();
let fm = parse::string_to_filemap(&sess,
src.to_strbuf(),
"<stdin>".to_strbuf());
src.to_string(),
"<stdin>".to_string());
let mut out = io::MemWriter::new();
doit(&sess,
lexer::new_string_reader(&sess.span_diagnostic, fm),
class,
&mut out).unwrap();
str::from_utf8_lossy(out.unwrap().as_slice()).to_strbuf()
str::from_utf8_lossy(out.unwrap().as_slice()).to_string()
}
/// Exhausts the `lexer` writing the output into `out`.

View File

@ -197,7 +197,7 @@ pub fn render(w: &mut fmt::Formatter, s: &str, print_toc: bool) -> fmt::Result {
// Extract the text provided
let s = if text.is_null() {
"".to_owned()
"".to_string()
} else {
unsafe {
str::raw::from_buf_len((*text).data, (*text).size as uint)
@ -207,14 +207,14 @@ pub fn render(w: &mut fmt::Formatter, s: &str, print_toc: bool) -> fmt::Result {
// Transform the contents of the header into a hyphenated string
let id = (s.as_slice().words().map(|s| {
match s.to_ascii_opt() {
Some(s) => s.to_lower().into_str().to_strbuf(),
None => s.to_strbuf()
Some(s) => s.to_lower().into_str().to_string(),
None => s.to_string()
}
}).collect::<Vec<String>>().connect("-")).to_strbuf();
}).collect::<Vec<String>>().connect("-")).to_string();
// This is a terrible hack working around how hoedown gives us rendered
// html for text rather than the raw text.
let id = id.replace("<code>", "").replace("</code>", "").to_strbuf();
let id = id.replace("<code>", "").replace("</code>", "").to_string();
let opaque = opaque as *mut hoedown_html_renderer_state;
let opaque = unsafe { &mut *((*opaque).opaque as *mut MyOpaque) };
@ -229,7 +229,7 @@ pub fn render(w: &mut fmt::Formatter, s: &str, print_toc: bool) -> fmt::Result {
let sec = match opaque.toc_builder {
Some(ref mut builder) => {
builder.push(level as u32, s.to_strbuf(), id.clone())
builder.push(level as u32, s.to_string(), id.clone())
}
None => {""}
};
@ -302,7 +302,7 @@ pub fn find_testable_code(doc: &str, tests: &mut ::test::Collector) {
stripped_filtered_line(l).unwrap_or(l)
});
let text = lines.collect::<Vec<&str>>().connect("\n");
tests.add_test(text.to_strbuf(), should_fail, no_run, ignore);
tests.add_test(text.to_string(), should_fail, no_run, ignore);
})
}
}

View File

@ -218,8 +218,8 @@ pub fn run(mut krate: clean::Crate, dst: Path) -> io::IoResult<()> {
root_path: String::new(),
sidebar: HashMap::new(),
layout: layout::Layout {
logo: "".to_strbuf(),
favicon: "".to_strbuf(),
logo: "".to_string(),
favicon: "".to_string(),
krate: krate.name.clone(),
},
include_sources: true,
@ -232,11 +232,11 @@ pub fn run(mut krate: clean::Crate, dst: Path) -> io::IoResult<()> {
match *attr {
clean::NameValue(ref x, ref s)
if "html_favicon_url" == x.as_slice() => {
cx.layout.favicon = s.to_strbuf();
cx.layout.favicon = s.to_string();
}
clean::NameValue(ref x, ref s)
if "html_logo_url" == x.as_slice() => {
cx.layout.logo = s.to_strbuf();
cx.layout.logo = s.to_string();
}
clean::Word(ref x)
if "html_no_source" == x.as_slice() => {
@ -307,8 +307,8 @@ pub fn run(mut krate: clean::Crate, dst: Path) -> io::IoResult<()> {
ty: shortty(item),
name: item.name.clone().unwrap(),
path: fqp.slice_to(fqp.len() - 1).connect("::")
.to_strbuf(),
desc: shorter(item.doc_value()).to_strbuf(),
.to_string(),
desc: shorter(item.doc_value()).to_string(),
parent: Some(did),
});
},
@ -338,14 +338,14 @@ pub fn run(mut krate: clean::Crate, dst: Path) -> io::IoResult<()> {
let mut w = MemWriter::new();
try!(write!(&mut w, r#"searchIndex['{}'] = \{"items":["#, krate.name));
let mut lastpath = "".to_strbuf();
let mut lastpath = "".to_string();
for (i, item) in cache.search_index.iter().enumerate() {
// Omit the path if it is same to that of the prior item.
let path;
if lastpath.as_slice() == item.path.as_slice() {
path = "";
} else {
lastpath = item.path.to_strbuf();
lastpath = item.path.to_string();
path = item.path.as_slice();
};
@ -378,7 +378,7 @@ pub fn run(mut krate: clean::Crate, dst: Path) -> io::IoResult<()> {
try!(write!(&mut w, r"]\};"));
str::from_utf8(w.unwrap().as_slice()).unwrap().to_owned()
str::from_utf8(w.unwrap().as_slice()).unwrap().to_string()
};
// Write out the shared files. Note that these are shared among all rustdoc
@ -420,7 +420,7 @@ pub fn run(mut krate: clean::Crate, dst: Path) -> io::IoResult<()> {
format!("{}['{}']", key, krate).as_slice()) {
continue
}
ret.push(line.to_strbuf());
ret.push(line.to_string());
}
}
return Ok(ret);
@ -504,14 +504,14 @@ pub fn run(mut krate: clean::Crate, dst: Path) -> io::IoResult<()> {
cx: &mut cx,
};
// skip all invalid spans
folder.seen.insert("".to_strbuf());
folder.seen.insert("".to_string());
krate = folder.fold_crate(krate);
}
for &(n, ref e) in krate.externs.iter() {
cache.extern_locations.insert(n, extern_location(e, &cx.dst));
let did = ast::DefId { krate: n, node: ast::CRATE_NODE_ID };
cache.paths.insert(did, (vec![e.name.to_strbuf()], item_type::Module));
cache.paths.insert(did, (vec![e.name.to_string()], item_type::Module));
}
// And finally render the whole crate's documentation
@ -570,7 +570,7 @@ fn extern_location(e: &clean::ExternalCrate, dst: &Path) -> ExternalLocation {
clean::NameValue(ref x, ref s)
if "html_root_url" == x.as_slice() => {
if s.as_slice().ends_with("/") {
return Remote(s.to_strbuf());
return Remote(s.to_string());
}
return Remote(format_strbuf!("{}/", s));
}
@ -766,9 +766,9 @@ impl DocFolder for Cache {
(parent, Some(path)) if !self.privmod => {
self.search_index.push(IndexItem {
ty: shortty(&item),
name: s.to_strbuf(),
path: path.connect("::").to_strbuf(),
desc: shorter(item.doc_value()).to_strbuf(),
name: s.to_string(),
path: path.connect("::").to_string(),
desc: shorter(item.doc_value()).to_string(),
parent: parent,
});
}
@ -789,7 +789,7 @@ impl DocFolder for Cache {
let pushed = if item.name.is_some() {
let n = item.name.get_ref();
if n.len() > 0 {
self.stack.push(n.to_strbuf());
self.stack.push(n.to_string());
true
} else { false }
} else { false };
@ -1001,7 +1001,7 @@ impl Context {
// modules are special because they add a namespace. We also need to
// recurse into the items of the module as well.
clean::ModuleItem(..) => {
let name = item.name.get_ref().to_strbuf();
let name = item.name.get_ref().to_string();
let mut item = Some(item);
self.recurse(name, |this| {
let item = item.take_unwrap();
@ -1057,7 +1057,7 @@ impl<'a> Item<'a> {
if ast_util::is_local(self.item.def_id) {
let mut path = Vec::new();
clean_srcpath(self.item.source.filename.as_bytes(), |component| {
path.push(component.to_owned());
path.push(component.to_string());
});
let href = if self.item.source.loline == self.item.source.hiline {
format!("{}", self.item.source.loline)
@ -1087,7 +1087,7 @@ impl<'a> Item<'a> {
let cache = cache_key.get().unwrap();
let path = cache.external_paths.get(&self.item.def_id);
let root = match *cache.extern_locations.get(&self.item.def_id.krate) {
Remote(ref s) => s.to_strbuf(),
Remote(ref s) => s.to_string(),
Local => format!("{}/..", self.cx.root_path),
Unknown => return None,
};
@ -1908,9 +1908,9 @@ fn build_sidebar(m: &clean::Module) -> HashMap<String, Vec<String>> {
let short = shortty(item).to_static_str();
let myname = match item.name {
None => continue,
Some(ref s) => s.to_strbuf(),
Some(ref s) => s.to_string(),
};
let v = map.find_or_insert_with(short.to_strbuf(), |_| Vec::new());
let v = map.find_or_insert_with(short.to_string(), |_| Vec::new());
v.push(myname);
}

View File

@ -202,8 +202,8 @@ mod test {
macro_rules! push {
($level: expr, $name: expr) => {
assert_eq!(builder.push($level,
$name.to_strbuf(),
"".to_strbuf()),
$name.to_string(),
"".to_string()),
$name);
}
}
@ -242,9 +242,9 @@ mod test {
$(
TocEntry {
level: $level,
name: $name.to_strbuf(),
sec_number: $name.to_strbuf(),
id: "".to_strbuf(),
name: $name.to_string(),
sec_number: $name.to_string(),
id: "".to_string(),
children: toc!($($sub),*)
}
),*

View File

@ -86,7 +86,7 @@ type Output = (clean::Crate, Vec<plugins::PluginJson> );
pub fn main() {
std::os::set_exit_status(main_args(std::os::args().iter()
.map(|x| x.to_strbuf())
.map(|x| x.to_string())
.collect::<Vec<_>>()
.as_slice()));
}
@ -166,7 +166,7 @@ pub fn main_args(args: &[String]) -> int {
let test_args = matches.opt_strs("test-args");
let test_args: Vec<String> = test_args.iter()
.flat_map(|s| s.as_slice().words())
.map(|s| s.to_strbuf())
.map(|s| s.to_string())
.collect();
let should_test = matches.opt_present("test");
@ -184,7 +184,7 @@ pub fn main_args(args: &[String]) -> int {
(true, false) => {
return test::run(input,
cfgs.move_iter()
.map(|x| x.to_strbuf())
.map(|x| x.to_string())
.collect(),
libs,
test_args)
@ -194,7 +194,7 @@ pub fn main_args(args: &[String]) -> int {
(false, false) => {}
}
if matches.opt_strs("passes").as_slice() == &["list".to_strbuf()] {
if matches.opt_strs("passes").as_slice() == &["list".to_string()] {
println!("Available passes for running rustdoc:");
for &(name, _, description) in PASSES.iter() {
println!("{:>20s} - {}", name, description);
@ -268,7 +268,7 @@ fn rust_input(cratefile: &str, matches: &getopts::Matches) -> Output {
let mut passes = matches.opt_strs("passes");
let mut plugins = matches.opt_strs("plugins")
.move_iter()
.map(|x| x.to_strbuf())
.map(|x| x.to_string())
.collect::<Vec<_>>();
// First, parse the crate and extract all relevant information.
@ -282,7 +282,7 @@ fn rust_input(cratefile: &str, matches: &getopts::Matches) -> Output {
let (krate, analysis) = std::task::try(proc() {
let cr = cr;
core::run_core(libs.move_iter().map(|x| x.clone()).collect(),
cfgs.move_iter().map(|x| x.to_strbuf()).collect(),
cfgs.move_iter().map(|x| x.to_string()).collect(),
&cr)
}).map_err(|boxed_any|format!("{:?}", boxed_any)).unwrap();
info!("finished with rustc");
@ -301,13 +301,13 @@ fn rust_input(cratefile: &str, matches: &getopts::Matches) -> Output {
clean::NameValue(ref x, ref value)
if "passes" == x.as_slice() => {
for pass in value.as_slice().words() {
passes.push(pass.to_strbuf());
passes.push(pass.to_string());
}
}
clean::NameValue(ref x, ref value)
if "plugins" == x.as_slice() => {
for p in value.as_slice().words() {
plugins.push(p.to_strbuf());
plugins.push(p.to_string());
}
}
_ => {}
@ -318,13 +318,13 @@ fn rust_input(cratefile: &str, matches: &getopts::Matches) -> Output {
}
if default_passes {
for name in DEFAULT_PASSES.iter().rev() {
passes.unshift(name.to_strbuf());
passes.unshift(name.to_string());
}
}
// Load all plugins/passes into a PluginManager
let path = matches.opt_str("plugin-path")
.unwrap_or("/tmp/rustdoc/plugins".to_strbuf());
.unwrap_or("/tmp/rustdoc/plugins".to_string());
let mut pm = plugins::PluginManager::new(Path::new(path));
for pass in passes.iter() {
let plugin = match PASSES.iter()
@ -359,11 +359,11 @@ fn json_input(input: &str) -> Result<Output, String> {
}
};
match json::from_reader(&mut input) {
Err(s) => Err(s.to_str().to_strbuf()),
Err(s) => Err(s.to_str().to_string()),
Ok(json::Object(obj)) => {
let mut obj = obj;
// Make sure the schema is what we expect
match obj.pop(&"schema".to_strbuf()) {
match obj.pop(&"schema".to_string()) {
Some(json::String(version)) => {
if version.as_slice() != SCHEMA_VERSION {
return Err(format_strbuf!(
@ -371,15 +371,15 @@ fn json_input(input: &str) -> Result<Output, String> {
SCHEMA_VERSION))
}
}
Some(..) => return Err("malformed json".to_strbuf()),
None => return Err("expected a schema version".to_strbuf()),
Some(..) => return Err("malformed json".to_string()),
None => return Err("expected a schema version".to_string()),
}
let krate = match obj.pop(&"crate".to_strbuf()) {
let krate = match obj.pop(&"crate".to_string()) {
Some(json) => {
let mut d = json::Decoder::new(json);
Decodable::decode(&mut d).unwrap()
}
None => return Err("malformed json".to_strbuf()),
None => return Err("malformed json".to_string()),
};
// FIXME: this should read from the "plugins" field, but currently
// Json doesn't implement decodable...
@ -388,7 +388,7 @@ fn json_input(input: &str) -> Result<Output, String> {
}
Ok(..) => {
Err("malformed json input: expected an object at the \
top".to_strbuf())
top".to_string())
}
}
}
@ -403,14 +403,14 @@ fn json_output(krate: clean::Crate, res: Vec<plugins::PluginJson> ,
// "plugins": { output of plugins ... }
// }
let mut json = box collections::TreeMap::new();
json.insert("schema".to_strbuf(),
json::String(SCHEMA_VERSION.to_strbuf()));
json.insert("schema".to_string(),
json::String(SCHEMA_VERSION.to_string()));
let plugins_json = box res.move_iter()
.filter_map(|opt| {
match opt {
None => None,
Some((string, json)) => {
Some((string.to_strbuf(), json))
Some((string.to_string(), json))
}
}
}).collect();
@ -423,15 +423,15 @@ fn json_output(krate: clean::Crate, res: Vec<plugins::PluginJson> ,
let mut encoder = json::Encoder::new(&mut w as &mut io::Writer);
krate.encode(&mut encoder).unwrap();
}
str::from_utf8(w.unwrap().as_slice()).unwrap().to_strbuf()
str::from_utf8(w.unwrap().as_slice()).unwrap().to_string()
};
let crate_json = match json::from_str(crate_json_str.as_slice()) {
Ok(j) => j,
Err(e) => fail!("Rust generated JSON is invalid: {:?}", e)
};
json.insert("crate".to_strbuf(), crate_json);
json.insert("plugins".to_strbuf(), json::Object(plugins_json));
json.insert("crate".to_string(), crate_json);
json.insert("plugins".to_string(), json::Object(plugins_json));
let mut file = try!(File::create(&dst));
try!(json::Object(json).to_writer(&mut file));

Some files were not shown because too many files have changed in this diff Show More