mirror of
https://github.com/rust-lang/rust.git
synced 2024-10-30 22:12:15 +00:00
Change some instances of .connect() to .join()
This commit is contained in:
parent
29c0c956bf
commit
93ddee6cee
@ -344,7 +344,7 @@ fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testfile: &Path) {
|
||||
check_lines,
|
||||
breakpoint_lines
|
||||
} = parse_debugger_commands(testfile, "gdb");
|
||||
let mut cmds = commands.connect("\n");
|
||||
let mut cmds = commands.join("\n");
|
||||
|
||||
// compile test file (it should have 'compile-flags:-g' in the header)
|
||||
let compiler_run_result = compile_test(config, props, testfile);
|
||||
@ -799,7 +799,7 @@ fn cleanup_debug_info_options(options: &Option<String>) -> Option<String> {
|
||||
split_maybe_args(options).into_iter()
|
||||
.filter(|x| !options_to_remove.contains(x))
|
||||
.collect::<Vec<String>>()
|
||||
.connect(" ");
|
||||
.join(" ");
|
||||
Some(new_options)
|
||||
}
|
||||
|
||||
@ -1412,7 +1412,7 @@ fn make_cmdline(libpath: &str, prog: &str, args: &[String]) -> String {
|
||||
|
||||
// Linux and mac don't require adjusting the library search path
|
||||
if cfg!(unix) {
|
||||
format!("{} {}", prog, args.connect(" "))
|
||||
format!("{} {}", prog, args.join(" "))
|
||||
} else {
|
||||
// Build the LD_LIBRARY_PATH variable as it would be seen on the command line
|
||||
// for diagnostic purposes
|
||||
@ -1420,7 +1420,7 @@ fn make_cmdline(libpath: &str, prog: &str, args: &[String]) -> String {
|
||||
format!("{}=\"{}\"", util::lib_path_env_var(), util::make_new_path(path))
|
||||
}
|
||||
|
||||
format!("{} {} {}", lib_path_cmd_prefix(libpath), prog, args.connect(" "))
|
||||
format!("{} {} {}", lib_path_cmd_prefix(libpath), prog, args.join(" "))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -606,22 +606,22 @@ fn test_concat() {
|
||||
assert_eq!(d, [1, 2, 3]);
|
||||
|
||||
let v: &[&[_]] = &[&[1], &[2, 3]];
|
||||
assert_eq!(v.connect(&0), [1, 0, 2, 3]);
|
||||
assert_eq!(v.join(&0), [1, 0, 2, 3]);
|
||||
let v: &[&[_]] = &[&[1], &[2], &[3]];
|
||||
assert_eq!(v.connect(&0), [1, 0, 2, 0, 3]);
|
||||
assert_eq!(v.join(&0), [1, 0, 2, 0, 3]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_connect() {
|
||||
fn test_join() {
|
||||
let v: [Vec<i32>; 0] = [];
|
||||
assert_eq!(v.connect(&0), []);
|
||||
assert_eq!([vec![1], vec![2, 3]].connect(&0), [1, 0, 2, 3]);
|
||||
assert_eq!([vec![1], vec![2], vec![3]].connect(&0), [1, 0, 2, 0, 3]);
|
||||
assert_eq!(v.join(&0), []);
|
||||
assert_eq!([vec![1], vec![2, 3]].join(&0), [1, 0, 2, 3]);
|
||||
assert_eq!([vec![1], vec![2], vec![3]].join(&0), [1, 0, 2, 0, 3]);
|
||||
|
||||
let v: [&[_]; 2] = [&[1], &[2, 3]];
|
||||
assert_eq!(v.connect(&0), [1, 0, 2, 3]);
|
||||
assert_eq!(v.join(&0), [1, 0, 2, 3]);
|
||||
let v: [&[_]; 3] = [&[1], &[2], &[3]];
|
||||
assert_eq!(v.connect(&0), [1, 0, 2, 0, 3]);
|
||||
assert_eq!(v.join(&0), [1, 0, 2, 0, 3]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -1339,11 +1339,11 @@ mod bench {
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn connect(b: &mut Bencher) {
|
||||
fn join(b: &mut Bencher) {
|
||||
let xss: Vec<Vec<i32>> =
|
||||
(0..100).map(|i| (0..i).collect()).collect();
|
||||
b.iter(|| {
|
||||
xss.connect(&0)
|
||||
xss.join(&0)
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -158,32 +158,32 @@ fn test_concat_for_different_lengths() {
|
||||
test_concat!("abc", ["", "a", "bc"]);
|
||||
}
|
||||
|
||||
macro_rules! test_connect {
|
||||
macro_rules! test_join {
|
||||
($expected: expr, $string: expr, $delim: expr) => {
|
||||
{
|
||||
let s = $string.connect($delim);
|
||||
let s = $string.join($delim);
|
||||
assert_eq!($expected, s);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_connect_for_different_types() {
|
||||
test_connect!("a-b", ["a", "b"], "-");
|
||||
fn test_join_for_different_types() {
|
||||
test_join!("a-b", ["a", "b"], "-");
|
||||
let hyphen = "-".to_string();
|
||||
test_connect!("a-b", [s("a"), s("b")], &*hyphen);
|
||||
test_connect!("a-b", vec!["a", "b"], &*hyphen);
|
||||
test_connect!("a-b", &*vec!["a", "b"], "-");
|
||||
test_connect!("a-b", vec![s("a"), s("b")], "-");
|
||||
test_join!("a-b", [s("a"), s("b")], &*hyphen);
|
||||
test_join!("a-b", vec!["a", "b"], &*hyphen);
|
||||
test_join!("a-b", &*vec!["a", "b"], "-");
|
||||
test_join!("a-b", vec![s("a"), s("b")], "-");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_connect_for_different_lengths() {
|
||||
fn test_join_for_different_lengths() {
|
||||
let empty: &[&str] = &[];
|
||||
test_connect!("", empty, "-");
|
||||
test_connect!("a", ["a"], "-");
|
||||
test_connect!("a-b", ["a", "b"], "-");
|
||||
test_connect!("-a-bc", ["", "a", "bc"], "-");
|
||||
test_join!("", empty, "-");
|
||||
test_join!("a", ["a"], "-");
|
||||
test_join!("a-b", ["a", "b"], "-");
|
||||
test_join!("-a-bc", ["", "a", "bc"], "-");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -2081,12 +2081,12 @@ mod bench {
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_connect(b: &mut Bencher) {
|
||||
fn bench_join(b: &mut Bencher) {
|
||||
let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
|
||||
let sep = "→";
|
||||
let v = vec![s, s, s, s, s, s, s, s, s, s];
|
||||
b.iter(|| {
|
||||
assert_eq!(v.connect(sep).len(), s.len() * 10 + sep.len() * 9);
|
||||
assert_eq!(v.join(sep).len(), s.len() * 10 + sep.len() * 9);
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -784,13 +784,13 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String {
|
||||
|
||||
// FIXME: #5516 should be graphemes not codepoints
|
||||
// wrapped description
|
||||
row.push_str(&desc_rows.connect(&desc_sep[..]));
|
||||
row.push_str(&desc_rows.join(&desc_sep[..]));
|
||||
|
||||
row
|
||||
});
|
||||
|
||||
format!("{}\n\nOptions:\n{}\n", brief,
|
||||
rows.collect::<Vec<String>>().connect("\n"))
|
||||
rows.collect::<Vec<String>>().join("\n"))
|
||||
}
|
||||
|
||||
fn format_option(opt: &OptGroup) -> String {
|
||||
@ -836,7 +836,7 @@ pub fn short_usage(program_name: &str, opts: &[OptGroup]) -> String {
|
||||
line.push_str(&opts.iter()
|
||||
.map(format_option)
|
||||
.collect::<Vec<String>>()
|
||||
.connect(" ")[..]);
|
||||
.join(" ")[..]);
|
||||
line
|
||||
}
|
||||
|
||||
|
@ -2028,7 +2028,7 @@ fn encode_dylib_dependency_formats(rbml_w: &mut Encoder, ecx: &EncodeContext) {
|
||||
cstore::RequireStatic => "s",
|
||||
})).to_string())
|
||||
}).collect::<Vec<String>>();
|
||||
rbml_w.wr_tagged_str(tag, &s.connect(","));
|
||||
rbml_w.wr_tagged_str(tag, &s.join(","));
|
||||
}
|
||||
None => {
|
||||
rbml_w.wr_tagged_str(tag, "");
|
||||
|
@ -1098,7 +1098,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
||||
|
||||
pub fn tys_to_string(&self, ts: &[Ty<'tcx>]) -> String {
|
||||
let tstrs: Vec<String> = ts.iter().map(|t| self.ty_to_string(*t)).collect();
|
||||
format!("({})", tstrs.connect(", "))
|
||||
format!("({})", tstrs.join(", "))
|
||||
}
|
||||
|
||||
pub fn trait_ref_to_string(&self, t: &ty::TraitRef<'tcx>) -> String {
|
||||
|
@ -843,7 +843,7 @@ fn write_out_deps(sess: &Session,
|
||||
let mut file = try!(fs::File::create(&deps_filename));
|
||||
for path in &out_filenames {
|
||||
try!(write!(&mut file,
|
||||
"{}: {}\n\n", path.display(), files.connect(" ")));
|
||||
"{}: {}\n\n", path.display(), files.join(" ")));
|
||||
}
|
||||
Ok(())
|
||||
})();
|
||||
|
@ -606,7 +606,7 @@ Available lint options:
|
||||
for (name, to) in lints {
|
||||
let name = name.to_lowercase().replace("_", "-");
|
||||
let desc = to.into_iter().map(|x| x.as_str().replace("_", "-"))
|
||||
.collect::<Vec<String>>().connect(", ");
|
||||
.collect::<Vec<String>>().join(", ");
|
||||
println!(" {} {}",
|
||||
padded(&name[..]), desc);
|
||||
}
|
||||
|
@ -378,7 +378,7 @@ impl UserIdentifiedItem {
|
||||
fn reconstructed_input(&self) -> String {
|
||||
match *self {
|
||||
ItemViaNode(node_id) => node_id.to_string(),
|
||||
ItemViaPath(ref parts) => parts.connect("::"),
|
||||
ItemViaPath(ref parts) => parts.join("::"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -178,7 +178,7 @@ impl<'a, 'tcx> Env<'a, 'tcx> {
|
||||
return match search_mod(self, &self.infcx.tcx.map.krate().module, 0, names) {
|
||||
Some(id) => id,
|
||||
None => {
|
||||
panic!("no item found: `{}`", names.connect("::"));
|
||||
panic!("no item found: `{}`", names.join("::"));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -923,7 +923,7 @@ impl NonSnakeCase {
|
||||
}
|
||||
words.push(buf);
|
||||
}
|
||||
words.connect("_")
|
||||
words.join("_")
|
||||
}
|
||||
|
||||
fn check_snake_case(&self, cx: &Context, sort: &str, name: &str, span: Option<Span>) {
|
||||
|
@ -936,7 +936,7 @@ fn compile_guard<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
bcx.to_str(),
|
||||
guard_expr,
|
||||
m,
|
||||
vals.iter().map(|v| bcx.val_to_string(*v)).collect::<Vec<_>>().connect(", "));
|
||||
vals.iter().map(|v| bcx.val_to_string(*v)).collect::<Vec<_>>().join(", "));
|
||||
let _indenter = indenter();
|
||||
|
||||
let mut bcx = insert_lllocals(bcx, &data.bindings_map, None);
|
||||
@ -981,7 +981,7 @@ fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
debug!("compile_submatch(bcx={}, m={:?}, vals=[{}])",
|
||||
bcx.to_str(),
|
||||
m,
|
||||
vals.iter().map(|v| bcx.val_to_string(*v)).collect::<Vec<_>>().connect(", "));
|
||||
vals.iter().map(|v| bcx.val_to_string(*v)).collect::<Vec<_>>().join(", "));
|
||||
let _indenter = indenter();
|
||||
let _icx = push_ctxt("match::compile_submatch");
|
||||
let mut bcx = bcx;
|
||||
|
@ -92,7 +92,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm)
|
||||
.chain(arch_clobbers.iter()
|
||||
.map(|s| s.to_string()))
|
||||
.collect::<Vec<String>>()
|
||||
.connect(",");
|
||||
.join(",");
|
||||
|
||||
debug!("Asm Constraints: {}", &all_constraints[..]);
|
||||
|
||||
|
@ -148,7 +148,7 @@ pub fn Invoke(cx: Block,
|
||||
terminate(cx, "Invoke");
|
||||
debug!("Invoke({} with arguments ({}))",
|
||||
cx.val_to_string(fn_),
|
||||
args.iter().map(|a| cx.val_to_string(*a)).collect::<Vec<String>>().connect(", "));
|
||||
args.iter().map(|a| cx.val_to_string(*a)).collect::<Vec<String>>().join(", "));
|
||||
debug_loc.apply(cx.fcx);
|
||||
B(cx).invoke(fn_, args, then, catch, attributes)
|
||||
}
|
||||
|
@ -167,7 +167,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
args.iter()
|
||||
.map(|&v| self.ccx.tn().val_to_string(v))
|
||||
.collect::<Vec<String>>()
|
||||
.connect(", "));
|
||||
.join(", "));
|
||||
|
||||
unsafe {
|
||||
let v = llvm::LLVMBuildInvoke(self.llbuilder,
|
||||
@ -809,7 +809,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
args.iter()
|
||||
.map(|&v| self.ccx.tn().val_to_string(v))
|
||||
.collect::<Vec<String>>()
|
||||
.connect(", "));
|
||||
.join(", "));
|
||||
|
||||
unsafe {
|
||||
let v = llvm::LLVMBuildCall(self.llbuilder, llfn, args.as_ptr(),
|
||||
|
@ -1443,7 +1443,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> {
|
||||
let discrfield = discrfield.iter()
|
||||
.skip(1)
|
||||
.map(|x| x.to_string())
|
||||
.collect::<Vec<_>>().connect("$");
|
||||
.collect::<Vec<_>>().join("$");
|
||||
let union_member_name = format!("RUST$ENCODED$ENUM${}${}",
|
||||
discrfield,
|
||||
null_variant_name);
|
||||
|
@ -19,7 +19,7 @@ pub trait LlvmRepr {
|
||||
impl<T:LlvmRepr> LlvmRepr for [T] {
|
||||
fn llrepr(&self, ccx: &CrateContext) -> String {
|
||||
let reprs: Vec<String> = self.iter().map(|t| t.llrepr(ccx)).collect();
|
||||
format!("[{}]", reprs.connect(","))
|
||||
format!("[{}]", reprs.join(","))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -322,7 +322,7 @@ impl TypeNames {
|
||||
|
||||
pub fn types_to_str(&self, tys: &[Type]) -> String {
|
||||
let strs: Vec<String> = tys.iter().map(|t| self.type_to_string(*t)).collect();
|
||||
format!("[{}]", strs.connect(","))
|
||||
format!("[{}]", strs.join(","))
|
||||
}
|
||||
|
||||
pub fn val_to_string(&self, val: ValueRef) -> String {
|
||||
|
@ -457,7 +457,7 @@ fn llvm_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
let tstr = if strings.is_empty() {
|
||||
base
|
||||
} else {
|
||||
format!("{}<{}>", base, strings.connect(", "))
|
||||
format!("{}<{}>", base, strings.join(", "))
|
||||
};
|
||||
|
||||
if did.krate == 0 {
|
||||
|
@ -134,7 +134,7 @@ pub fn report_error<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
p.self_ty(),
|
||||
p))
|
||||
.collect::<Vec<_>>()
|
||||
.connect(", ");
|
||||
.join(", ");
|
||||
cx.sess.fileline_note(
|
||||
span,
|
||||
&format!("the method `{}` exists but the \
|
||||
|
@ -1002,7 +1002,7 @@ fn check_impl_items_against_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
|
||||
"not all trait items implemented, missing: `{}`",
|
||||
missing_items.iter()
|
||||
.map(<ast::Name>::as_str)
|
||||
.collect::<Vec<_>>().connect("`, `"))
|
||||
.collect::<Vec<_>>().join("`, `"))
|
||||
}
|
||||
|
||||
if !invalidated_items.is_empty() {
|
||||
@ -1013,7 +1013,7 @@ fn check_impl_items_against_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
|
||||
invalidator.ident.as_str(),
|
||||
invalidated_items.iter()
|
||||
.map(<ast::Name>::as_str)
|
||||
.collect::<Vec<_>>().connect("`, `"))
|
||||
.collect::<Vec<_>>().join("`, `"))
|
||||
}
|
||||
}
|
||||
|
||||
@ -2868,7 +2868,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
span_err!(tcx.sess, span, E0063,
|
||||
"missing field{}: {}",
|
||||
if missing_fields.len() == 1 {""} else {"s"},
|
||||
missing_fields.connect(", "));
|
||||
missing_fields.join(", "));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -514,7 +514,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> {
|
||||
} else {
|
||||
name.to_string()
|
||||
}, a, b)
|
||||
}).collect::<Vec<_>>().connect(", "));
|
||||
}).collect::<Vec<_>>().join(", "));
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -1536,7 +1536,7 @@ fn convert_typed_item<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
|
||||
d => format!("{:?}", d),
|
||||
})
|
||||
.collect::<Vec<String>>()
|
||||
.connect(",");
|
||||
.join(",");
|
||||
|
||||
tcx.sess.span_err(it.span, &object_lifetime_default_reprs);
|
||||
}
|
||||
|
@ -2536,12 +2536,12 @@ fn name_from_pat(p: &ast::Pat) -> String {
|
||||
format!("{} {{ {}{} }}", path_to_string(name),
|
||||
fields.iter().map(|&Spanned { node: ref fp, .. }|
|
||||
format!("{}: {}", fp.ident.as_str(), name_from_pat(&*fp.pat)))
|
||||
.collect::<Vec<String>>().connect(", "),
|
||||
.collect::<Vec<String>>().join(", "),
|
||||
if etc { ", ..." } else { "" }
|
||||
)
|
||||
},
|
||||
PatTup(ref elts) => format!("({})", elts.iter().map(|p| name_from_pat(&**p))
|
||||
.collect::<Vec<String>>().connect(", ")),
|
||||
.collect::<Vec<String>>().join(", ")),
|
||||
PatBox(ref p) => name_from_pat(&**p),
|
||||
PatRegion(ref p, _) => name_from_pat(&**p),
|
||||
PatLit(..) => {
|
||||
@ -2555,7 +2555,7 @@ fn name_from_pat(p: &ast::Pat) -> String {
|
||||
let begin = begin.iter().map(|p| name_from_pat(&**p));
|
||||
let mid = mid.as_ref().map(|p| format!("..{}", name_from_pat(&**p))).into_iter();
|
||||
let end = end.iter().map(|p| name_from_pat(&**p));
|
||||
format!("[{}]", begin.chain(mid).chain(end).collect::<Vec<_>>().connect(", "))
|
||||
format!("[{}]", begin.chain(mid).chain(end).collect::<Vec<_>>().join(", "))
|
||||
},
|
||||
PatMac(..) => {
|
||||
warn!("can't document the name of a function argument \
|
||||
|
@ -360,7 +360,7 @@ fn resolved_path(w: &mut fmt::Formatter, did: ast::DefId, path: &clean::Path,
|
||||
match href(did) {
|
||||
Some((url, shortty, fqp)) => {
|
||||
try!(write!(w, "<a class='{}' href='{}' title='{}'>{}</a>",
|
||||
shortty, url, fqp.connect("::"), last.name));
|
||||
shortty, url, fqp.join("::"), last.name));
|
||||
}
|
||||
_ => try!(write!(w, "{}", last.name)),
|
||||
}
|
||||
|
@ -199,7 +199,7 @@ fn stripped_filtered_line<'a>(s: &'a str) -> Option<&'a str> {
|
||||
fn collapse_whitespace(s: &str) -> String {
|
||||
s.split(|c: char| c.is_whitespace()).filter(|s| {
|
||||
!s.is_empty()
|
||||
}).collect::<Vec<_>>().connect(" ")
|
||||
}).collect::<Vec<_>>().join(" ")
|
||||
}
|
||||
|
||||
thread_local!(static USED_HEADER_MAP: RefCell<HashMap<String, usize>> = {
|
||||
@ -238,14 +238,14 @@ pub fn render(w: &mut fmt::Formatter, s: &str, print_toc: bool) -> fmt::Result {
|
||||
let lines = origtext.lines().filter(|l| {
|
||||
stripped_filtered_line(*l).is_none()
|
||||
});
|
||||
let text = lines.collect::<Vec<&str>>().connect("\n");
|
||||
let text = lines.collect::<Vec<&str>>().join("\n");
|
||||
if rendered { return }
|
||||
PLAYGROUND_KRATE.with(|krate| {
|
||||
let mut s = String::new();
|
||||
krate.borrow().as_ref().map(|krate| {
|
||||
let test = origtext.lines().map(|l| {
|
||||
stripped_filtered_line(l).unwrap_or(l)
|
||||
}).collect::<Vec<&str>>().connect("\n");
|
||||
}).collect::<Vec<&str>>().join("\n");
|
||||
let krate = krate.as_ref().map(|s| &**s);
|
||||
let test = test::maketest(&test, krate, false,
|
||||
&Default::default());
|
||||
@ -275,7 +275,7 @@ pub fn render(w: &mut fmt::Formatter, s: &str, print_toc: bool) -> fmt::Result {
|
||||
|
||||
// Transform the contents of the header into a hyphenated string
|
||||
let id = s.split_whitespace().map(|s| s.to_ascii_lowercase())
|
||||
.collect::<Vec<String>>().connect("-");
|
||||
.collect::<Vec<String>>().join("-");
|
||||
|
||||
// This is a terrible hack working around how hoedown gives us rendered
|
||||
// html for text rather than the raw text.
|
||||
@ -387,7 +387,7 @@ pub fn find_testable_code(doc: &str, tests: &mut ::test::Collector) {
|
||||
let lines = text.lines().map(|l| {
|
||||
stripped_filtered_line(l).unwrap_or(l)
|
||||
});
|
||||
let text = lines.collect::<Vec<&str>>().connect("\n");
|
||||
let text = lines.collect::<Vec<&str>>().join("\n");
|
||||
tests.add_test(text.to_string(),
|
||||
block_info.should_panic, block_info.no_run,
|
||||
block_info.ignore, block_info.test_harness);
|
||||
|
@ -285,7 +285,7 @@ impl fmt::Display for IndexItemFunctionType {
|
||||
let inputs: Vec<String> = self.inputs.iter().map(|ref t| {
|
||||
format!("{}", t)
|
||||
}).collect();
|
||||
try!(write!(f, "{{\"inputs\":[{}],\"output\":", inputs.connect(",")));
|
||||
try!(write!(f, "{{\"inputs\":[{}],\"output\":", inputs.join(",")));
|
||||
|
||||
match self.output {
|
||||
Some(ref t) => try!(write!(f, "{}", t)),
|
||||
@ -461,7 +461,7 @@ fn build_index(krate: &clean::Crate, cache: &mut Cache) -> io::Result<String> {
|
||||
search_index.push(IndexItem {
|
||||
ty: shortty(item),
|
||||
name: item.name.clone().unwrap(),
|
||||
path: fqp[..fqp.len() - 1].connect("::"),
|
||||
path: fqp[..fqp.len() - 1].join("::"),
|
||||
desc: shorter(item.doc_value()),
|
||||
parent: Some(did),
|
||||
search_type: get_index_search_type(&item, parent_basename),
|
||||
@ -957,7 +957,7 @@ impl DocFolder for Cache {
|
||||
self.search_index.push(IndexItem {
|
||||
ty: shortty(&item),
|
||||
name: s.to_string(),
|
||||
path: path.connect("::").to_string(),
|
||||
path: path.join("::").to_string(),
|
||||
desc: shorter(item.doc_value()),
|
||||
parent: parent,
|
||||
search_type: get_index_search_type(&item, parent_basename),
|
||||
@ -1187,7 +1187,7 @@ impl Context {
|
||||
*slot.borrow_mut() = cx.current.clone();
|
||||
});
|
||||
|
||||
let mut title = cx.current.connect("::");
|
||||
let mut title = cx.current.join("::");
|
||||
if pushname {
|
||||
if !title.is_empty() {
|
||||
title.push_str("::");
|
||||
@ -1393,7 +1393,7 @@ impl<'a> Item<'a> {
|
||||
Some(format!("{root}src/{krate}/{path}.html#{href}",
|
||||
root = self.cx.root_path,
|
||||
krate = self.cx.layout.krate,
|
||||
path = path.connect("/"),
|
||||
path = path.join("/"),
|
||||
href = href))
|
||||
|
||||
// If this item is not part of the local crate, then things get a little
|
||||
@ -1417,7 +1417,7 @@ impl<'a> Item<'a> {
|
||||
};
|
||||
Some(format!("{root}{path}/{file}?gotosrc={goto}",
|
||||
root = root,
|
||||
path = path[..path.len() - 1].connect("/"),
|
||||
path = path[..path.len() - 1].join("/"),
|
||||
file = item_path(self.item),
|
||||
goto = self.item.def_id.node))
|
||||
}
|
||||
@ -1523,7 +1523,7 @@ fn item_path(item: &clean::Item) -> String {
|
||||
}
|
||||
|
||||
fn full_path(cx: &Context, item: &clean::Item) -> String {
|
||||
let mut s = cx.current.connect("::");
|
||||
let mut s = cx.current.join("::");
|
||||
s.push_str("::");
|
||||
s.push_str(item.name.as_ref().unwrap());
|
||||
return s
|
||||
@ -1535,7 +1535,7 @@ fn shorter<'a>(s: Option<&'a str>) -> String {
|
||||
(*line).chars().any(|chr|{
|
||||
!chr.is_whitespace()
|
||||
})
|
||||
}).collect::<Vec<_>>().connect("\n"),
|
||||
}).collect::<Vec<_>>().join("\n"),
|
||||
None => "".to_string()
|
||||
}
|
||||
}
|
||||
@ -1920,12 +1920,12 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
|
||||
try!(write!(w, r#"<script type="text/javascript" async
|
||||
src="{root_path}/implementors/{path}/{ty}.{name}.js">
|
||||
</script>"#,
|
||||
root_path = vec![".."; cx.current.len()].connect("/"),
|
||||
root_path = vec![".."; cx.current.len()].join("/"),
|
||||
path = if ast_util::is_local(it.def_id) {
|
||||
cx.current.connect("/")
|
||||
cx.current.join("/")
|
||||
} else {
|
||||
let path = &cache.external_paths[&it.def_id];
|
||||
path[..path.len() - 1].connect("/")
|
||||
path[..path.len() - 1].join("/")
|
||||
},
|
||||
ty = shortty(it).to_static_str(),
|
||||
name = *it.name.as_ref().unwrap()));
|
||||
|
@ -361,7 +361,7 @@ pub fn unindent(s: &str) -> String {
|
||||
line[min_indent..].to_string()
|
||||
}
|
||||
}).collect::<Vec<_>>());
|
||||
unindented.connect("\n")
|
||||
unindented.join("\n")
|
||||
} else {
|
||||
s.to_string()
|
||||
}
|
||||
|
@ -360,7 +360,7 @@ impl Collector {
|
||||
let s = self.current_header.as_ref().map(|s| &**s).unwrap_or("");
|
||||
format!("{}_{}", s, self.cnt)
|
||||
} else {
|
||||
format!("{}_{}", self.names.connect("::"), self.cnt)
|
||||
format!("{}_{}", self.names.join("::"), self.cnt)
|
||||
};
|
||||
self.cnt += 1;
|
||||
let libs = self.libs.clone();
|
||||
|
@ -442,7 +442,7 @@ impl fmt::Display for Ipv6Addr {
|
||||
.iter()
|
||||
.map(|&seg| format!("{:x}", seg))
|
||||
.collect::<Vec<String>>()
|
||||
.connect(":")
|
||||
.join(":")
|
||||
}
|
||||
|
||||
write!(fmt, "{}::{}",
|
||||
|
@ -27,7 +27,7 @@ pub fn path_name_i(idents: &[Ident]) -> String {
|
||||
// FIXME: Bad copies (#2543 -- same for everything else that says "bad")
|
||||
idents.iter().map(|i| {
|
||||
token::get_ident(*i).to_string()
|
||||
}).collect::<Vec<String>>().connect("::")
|
||||
}).collect::<Vec<String>>().join("::")
|
||||
}
|
||||
|
||||
pub fn local_def(id: NodeId) -> DefId {
|
||||
|
@ -78,7 +78,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
.iter()
|
||||
.map(|x| token::get_ident(*x).to_string())
|
||||
.collect::<Vec<String>>()
|
||||
.connect("::");
|
||||
.join("::");
|
||||
base::MacEager::expr(cx.expr_str(
|
||||
sp,
|
||||
token::intern_and_get_ident(&string[..])))
|
||||
|
@ -465,7 +465,7 @@ pub fn parse(sess: &ParseSess,
|
||||
token::get_ident(bind))).to_string()
|
||||
}
|
||||
_ => panic!()
|
||||
} }).collect::<Vec<String>>().connect(" or ");
|
||||
} }).collect::<Vec<String>>().join(" or ");
|
||||
return Error(sp, format!(
|
||||
"local ambiguity: multiple parsing options: \
|
||||
built-in NTs {} or {} other options.",
|
||||
|
@ -139,7 +139,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String {
|
||||
let lines = vertical_trim(lines);
|
||||
let lines = horizontal_trim(lines);
|
||||
|
||||
return lines.connect("\n");
|
||||
return lines.join("\n");
|
||||
}
|
||||
|
||||
panic!("not a doc-comment: {}", comment);
|
||||
|
@ -5215,7 +5215,7 @@ impl<'a> Parser<'a> {
|
||||
last_span,
|
||||
&format!("illegal ABI: expected one of [{}], \
|
||||
found `{}`",
|
||||
abi::all_names().connect(", "),
|
||||
abi::all_names().join(", "),
|
||||
the_string));
|
||||
Ok(None)
|
||||
}
|
||||
|
@ -1080,7 +1080,7 @@ impl MetricMap {
|
||||
.map(|(k,v)| format!("{}: {} (+/- {})", *k,
|
||||
v.value, v.noise))
|
||||
.collect();
|
||||
v.connect(", ")
|
||||
v.join(", ")
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -36,7 +36,7 @@ impl TTMacroExpander for Expander {
|
||||
sp: Span,
|
||||
_: &[ast::TokenTree]) -> Box<MacResult+'cx> {
|
||||
let args = self.args.iter().map(|i| pprust::meta_item_to_string(&*i))
|
||||
.collect::<Vec<_>>().connect(", ");
|
||||
.collect::<Vec<_>>().join(", ");
|
||||
let interned = token::intern_and_get_ident(&args[..]);
|
||||
MacEager::expr(ecx.expr_str(sp, interned))
|
||||
}
|
||||
|
@ -108,7 +108,7 @@ impl fmt::Display for AsciiArt {
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
// Concatenate the lines together using a new-line.
|
||||
write!(f, "{}", lines.connect("\n"))
|
||||
write!(f, "{}", lines.join("\n"))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -24,7 +24,7 @@ impl<T:to_str> to_str for Vec<T> {
|
||||
self.iter()
|
||||
.map(|e| e.to_string_())
|
||||
.collect::<Vec<String>>()
|
||||
.connect(", "))
|
||||
.join(", "))
|
||||
}
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user