Auto merge of #52738 - ljedrz:push_to_extend, r=eddyb

Replace push loops with extend() where possible

Or set the vector capacity where I couldn't do it.

According to my [simple benchmark](https://gist.github.com/ljedrz/568e97621b749849684c1da71c27dceb) `extend`ing a vector can be over **10 times** faster than `push`ing to it in a loop:

10 elements (6.1 times faster):
```
test bench_extension ... bench:          75 ns/iter (+/- 23)
test bench_push_loop ... bench:         458 ns/iter (+/- 142)
```

100 elements (11.12 times faster):
```
test bench_extension ... bench:          87 ns/iter (+/- 26)
test bench_push_loop ... bench:         968 ns/iter (+/- 3,528)
```

1000 elements (11.04 times faster):
```
test bench_extension ... bench:         311 ns/iter (+/- 9)
test bench_push_loop ... bench:       3,436 ns/iter (+/- 233)
```

Seems like a good idea to use `extend` as much as possible.
This commit is contained in:
bors 2018-07-29 21:37:47 +00:00
commit 866a713258
28 changed files with 101 additions and 150 deletions

View File

@ -92,10 +92,7 @@ pub fn push_exe_path(mut buf: PathBuf, components: &[&str]) -> PathBuf {
file.push_str(".exe"); file.push_str(".exe");
} }
for c in components { buf.extend(components);
buf.push(c);
}
buf.push(file); buf.push(file);
buf buf

View File

@ -567,12 +567,12 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
fn add_returning_edge(&mut self, fn add_returning_edge(&mut self,
_from_expr: &hir::Expr, _from_expr: &hir::Expr,
from_index: CFGIndex) { from_index: CFGIndex) {
let mut data = CFGEdgeData { let data = CFGEdgeData {
exiting_scopes: vec![], exiting_scopes: self.loop_scopes.iter()
.rev()
.map(|&LoopScope { loop_id: id, .. }| id)
.collect()
}; };
for &LoopScope { loop_id: id, .. } in self.loop_scopes.iter().rev() {
data.exiting_scopes.push(id);
}
self.graph.add_edge(from_index, self.fn_exit, data); self.graph.add_edge(from_index, self.fn_exit, data);
} }

View File

@ -151,13 +151,12 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
debug!("process_registered_region_obligations()"); debug!("process_registered_region_obligations()");
// pull out the region obligations with the given `body_id` (leaving the rest) // pull out the region obligations with the given `body_id` (leaving the rest)
let mut my_region_obligations = Vec::with_capacity(self.region_obligations.borrow().len()); let my_region_obligations = {
{
let mut r_o = self.region_obligations.borrow_mut(); let mut r_o = self.region_obligations.borrow_mut();
for (_, obligation) in r_o.drain_filter(|(ro_body_id, _)| *ro_body_id == body_id) { let my_r_o = r_o.drain_filter(|(ro_body_id, _)| *ro_body_id == body_id)
my_region_obligations.push(obligation); .map(|(_, obligation)| obligation).collect::<Vec<_>>();
} my_r_o
} };
let outlives = &mut TypeOutlives::new( let outlives = &mut TypeOutlives::new(
self, self,

View File

@ -367,9 +367,7 @@ impl<'a, 'tcx: 'a> ItemLikeVisitor<'tcx> for CollectPrivateImplItemsVisitor<'a,
// We need only trait impls here, not inherent impls, and only non-exported ones // We need only trait impls here, not inherent impls, and only non-exported ones
if let hir::ItemKind::Impl(.., Some(ref trait_ref), _, ref impl_item_refs) = item.node { if let hir::ItemKind::Impl(.., Some(ref trait_ref), _, ref impl_item_refs) = item.node {
if !self.access_levels.is_reachable(item.id) { if !self.access_levels.is_reachable(item.id) {
for impl_item_ref in impl_item_refs { self.worklist.extend(impl_item_refs.iter().map(|r| r.id.node_id));
self.worklist.push(impl_item_ref.id.node_id);
}
let trait_def_id = match trait_ref.path.def { let trait_def_id = match trait_ref.path.def {
Def::Trait(def_id) => def_id, Def::Trait(def_id) => def_id,
@ -426,9 +424,7 @@ fn reachable_set<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) ->
// If other crates link to us, they're going to expect to be able to // If other crates link to us, they're going to expect to be able to
// use the lang items, so we need to be sure to mark them as // use the lang items, so we need to be sure to mark them as
// exported. // exported.
for (id, _) in &access_levels.map { reachable_context.worklist.extend(access_levels.map.iter().map(|(id, _)| *id));
reachable_context.worklist.push(*id);
}
for item in tcx.lang_items().items().iter() { for item in tcx.lang_items().items().iter() {
if let Some(did) = *item { if let Some(did) = *item {
if let Some(node_id) = tcx.hir.as_local_node_id(did) { if let Some(node_id) = tcx.hir.as_local_node_id(did) {

View File

@ -64,9 +64,7 @@ impl<'a, 'tcx> Iterator for Preorder<'a, 'tcx> {
let data = &self.mir[idx]; let data = &self.mir[idx];
if let Some(ref term) = data.terminator { if let Some(ref term) = data.terminator {
for &succ in term.successors() { self.worklist.extend(term.successors());
self.worklist.push(succ);
}
} }
return Some((idx, data)); return Some((idx, data));

View File

@ -899,9 +899,7 @@ macro_rules! options {
-> bool { -> bool {
match v { match v {
Some(s) => { Some(s) => {
for s in s.split_whitespace() { slot.extend(s.split_whitespace().map(|s| s.to_string()));
slot.push(s.to_string());
}
true true
}, },
None => false, None => false,

View File

@ -438,9 +438,9 @@ fn to_pretty_impl_header(tcx: TyCtxt, impl_def_id: DefId) -> Option<String> {
} }
pretty_predicates.push(p.to_string()); pretty_predicates.push(p.to_string());
} }
for ty in types_without_default_bounds { pretty_predicates.extend(
pretty_predicates.push(format!("{}: ?Sized", ty)); types_without_default_bounds.iter().map(|ty| format!("{}: ?Sized", ty))
} );
if !pretty_predicates.is_empty() { if !pretty_predicates.is_empty() {
write!(w, "\n where {}", pretty_predicates.join(", ")).unwrap(); write!(w, "\n where {}", pretty_predicates.join(", ")).unwrap();
} }

View File

@ -152,9 +152,7 @@ fn path_relative_from(path: &Path, base: &Path) -> Option<PathBuf> {
(Some(_), Some(b)) if b == Component::ParentDir => return None, (Some(_), Some(b)) if b == Component::ParentDir => return None,
(Some(a), Some(_)) => { (Some(a), Some(_)) => {
comps.push(Component::ParentDir); comps.push(Component::ParentDir);
for _ in itb { comps.extend(itb.map(|_| Component::ParentDir));
comps.push(Component::ParentDir);
}
comps.push(a); comps.push(a);
comps.extend(ita.by_ref()); comps.extend(ita.by_ref());
break; break;

View File

@ -39,6 +39,7 @@ use rustc::util::common::path2cstr;
use libc::{c_uint, c_longlong}; use libc::{c_uint, c_longlong};
use std::ffi::CString; use std::ffi::CString;
use std::fmt::Write; use std::fmt::Write;
use std::iter;
use std::ptr; use std::ptr;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use syntax::ast; use syntax::ast;
@ -364,18 +365,16 @@ fn subroutine_type_metadata<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>,
&signature, &signature,
); );
let mut signature_metadata: Vec<DIType> = Vec::with_capacity(signature.inputs().len() + 1); let signature_metadata: Vec<DIType> = iter::once(
// return type
// return type match signature.output().sty {
signature_metadata.push(match signature.output().sty { ty::TyTuple(ref tys) if tys.is_empty() => ptr::null_mut(),
ty::TyTuple(ref tys) if tys.is_empty() => ptr::null_mut(), _ => type_metadata(cx, signature.output(), span)
_ => type_metadata(cx, signature.output(), span) }
}); ).chain(
// regular arguments
// regular arguments signature.inputs().iter().map(|argument_type| type_metadata(cx, argument_type, span))
for &argument_type in signature.inputs() { ).collect();
signature_metadata.push(type_metadata(cx, argument_type, span));
}
return_if_metadata_created_in_meantime!(cx, unique_type_id); return_if_metadata_created_in_meantime!(cx, unique_type_id);

View File

@ -352,9 +352,10 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>,
if sig.abi == Abi::RustCall && !sig.inputs().is_empty() { if sig.abi == Abi::RustCall && !sig.inputs().is_empty() {
if let ty::TyTuple(args) = sig.inputs()[sig.inputs().len() - 1].sty { if let ty::TyTuple(args) = sig.inputs()[sig.inputs().len() - 1].sty {
for &argument_type in args { signature.extend(
signature.push(type_metadata(cx, argument_type, syntax_pos::DUMMY_SP)); args.iter().map(|argument_type|
} type_metadata(cx, argument_type, syntax_pos::DUMMY_SP))
);
} }
} }

View File

@ -1588,10 +1588,7 @@ pub fn in_rustc_thread<F, R>(f: F) -> Result<R, Box<dyn Any + Send>>
/// debugging, since some ICEs only happens with non-default compiler flags /// debugging, since some ICEs only happens with non-default compiler flags
/// (and the users don't always report them). /// (and the users don't always report them).
fn extra_compiler_flags() -> Option<(Vec<String>, bool)> { fn extra_compiler_flags() -> Option<(Vec<String>, bool)> {
let mut args = Vec::new(); let args = env::args_os().map(|arg| arg.to_string_lossy().to_string()).collect::<Vec<_>>();
for arg in env::args_os() {
args.push(arg.to_string_lossy().to_string());
}
// Avoid printing help because of empty args. This can suggest the compiler // Avoid printing help because of empty args. This can suggest the compiler
// itself is not the program root (consider RLS). // itself is not the program root (consider RLS).

View File

@ -204,10 +204,9 @@ pub fn write_counts(count_file: &mut File, counts: &mut HashMap<String,QueryMetr
use rustc::util::common::duration_to_secs_str; use rustc::util::common::duration_to_secs_str;
use std::cmp::Reverse; use std::cmp::Reverse;
let mut data = vec![]; let mut data = counts.iter().map(|(ref cons, ref qm)|
for (ref cons, ref qm) in counts.iter() { (cons.clone(), qm.count.clone(), qm.dur_total.clone(), qm.dur_self.clone())
data.push((cons.clone(), qm.count.clone(), qm.dur_total.clone(), qm.dur_self.clone())); ).collect::<Vec<_>>();
};
data.sort_by_key(|k| Reverse(k.3)); data.sort_by_key(|k| Reverse(k.3));
for (cons, count, dur_total, dur_self) in data { for (cons, count, dur_total, dur_self) in data {
write!(count_file, "{}, {}, {}, {}\n", write!(count_file, "{}, {}, {}, {}\n",

View File

@ -275,12 +275,8 @@ pub fn get_param(llfn: ValueRef, index: c_uint) -> ValueRef {
fn get_params(llfn: ValueRef) -> Vec<ValueRef> { fn get_params(llfn: ValueRef) -> Vec<ValueRef> {
unsafe { unsafe {
let num_params = LLVMCountParams(llfn); let num_params = LLVMCountParams(llfn);
let mut params = Vec::with_capacity(num_params as usize);
for idx in 0..num_params {
params.push(LLVMGetParam(llfn, idx));
}
params (0..num_params).map(|idx| LLVMGetParam(llfn, idx)).collect()
} }
} }

View File

@ -406,10 +406,9 @@ impl<'a, 'tcx> Inliner<'a, 'tcx> {
local_map.push(idx); local_map.push(idx);
} }
for p in callee_mir.promoted.iter().cloned() { promoted_map.extend(
let idx = caller_mir.promoted.push(p); callee_mir.promoted.iter().cloned().map(|p| caller_mir.promoted.push(p))
promoted_map.push(idx); );
}
// If the call is something like `a[*i] = f(i)`, where // If the call is something like `a[*i] = f(i)`, where
// `i : &mut usize`, then just duplicating the `a[*i]` // `i : &mut usize`, then just duplicating the `a[*i]`

View File

@ -3831,9 +3831,9 @@ impl<'a> Resolver<'a> {
} }
// Add primitive types to the mix // Add primitive types to the mix
if filter_fn(Def::PrimTy(TyBool)) { if filter_fn(Def::PrimTy(TyBool)) {
for (name, _) in &self.primitive_type_table.primitive_types { names.extend(
names.push(*name); self.primitive_type_table.primitive_types.iter().map(|(name, _)| name)
} )
} }
} else { } else {
// Search in module. // Search in module.

View File

@ -1318,14 +1318,13 @@ impl<'l, 'tcx: 'l, 'll, O: DumpOutput + 'll> DumpVisitor<'l, 'tcx, 'll, O> {
}; };
// Make a comma-separated list of names of imported modules. // Make a comma-separated list of names of imported modules.
let mut names = vec![];
let glob_map = &self.save_ctxt.analysis.glob_map; let glob_map = &self.save_ctxt.analysis.glob_map;
let glob_map = glob_map.as_ref().unwrap(); let glob_map = glob_map.as_ref().unwrap();
if glob_map.contains_key(&id) { let names = if glob_map.contains_key(&id) {
for n in glob_map.get(&id).unwrap() { glob_map.get(&id).unwrap().iter().map(|n| n.to_string()).collect()
names.push(n.to_string()); } else {
} Vec::new()
} };
let sub_span = self.span.sub_span_of_token(use_tree.span, let sub_span = self.span.sub_span_of_token(use_tree.span,
token::BinOp(token::Star)); token::BinOp(token::Star));

View File

@ -962,19 +962,21 @@ fn generics_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
&["<closure_kind>", "<closure_signature>"][..] &["<closure_kind>", "<closure_signature>"][..]
}; };
for (i, &arg) in dummy_args.iter().enumerate() { params.extend(
params.push(ty::GenericParamDef { dummy_args.iter().enumerate().map(|(i, &arg)|
index: type_start + i as u32, ty::GenericParamDef {
name: Symbol::intern(arg).as_interned_str(), index: type_start + i as u32,
def_id, name: Symbol::intern(arg).as_interned_str(),
pure_wrt_drop: false, def_id,
kind: ty::GenericParamDefKind::Type { pure_wrt_drop: false,
has_default: false, kind: ty::GenericParamDefKind::Type {
object_lifetime_default: rl::Set1::Empty, has_default: false,
synthetic: None, object_lifetime_default: rl::Set1::Empty,
}, synthetic: None,
}); },
} }
)
);
tcx.with_freevars(node_id, |fv| { tcx.with_freevars(node_id, |fv| {
params.extend(fv.iter().zip((dummy_args.len() as u32)..).map(|(_, i)| { params.extend(fv.iter().zip((dummy_args.len() as u32)..).map(|(_, i)| {
@ -1651,10 +1653,7 @@ fn explicit_predicates_of<'a, 'tcx>(
&mut projections); &mut projections);
predicates.push(trait_ref.to_predicate()); predicates.push(trait_ref.to_predicate());
predicates.extend(projections.iter().map(|p| p.to_predicate()));
for projection in &projections {
predicates.push(projection.to_predicate());
}
} }
&hir::GenericBound::Outlives(ref lifetime) => { &hir::GenericBound::Outlives(ref lifetime) => {

View File

@ -203,9 +203,7 @@ pub fn run_core(search_paths: SearchPaths,
intra_link_resolution_failure_name.to_owned(), intra_link_resolution_failure_name.to_owned(),
missing_docs.to_owned()]; missing_docs.to_owned()];
for (lint, _) in &cmd_lints { whitelisted_lints.extend(cmd_lints.iter().map(|(lint, _)| lint).cloned());
whitelisted_lints.push(lint.clone());
}
let lints = lint::builtin::HardwiredLints.get_lints() let lints = lint::builtin::HardwiredLints.get_lints()
.into_iter() .into_iter()

View File

@ -722,9 +722,7 @@ where R: 'static + Send,
}, },
_ => continue, _ => continue,
}; };
for p in value.as_str().split_whitespace() { sink.extend(value.as_str().split_whitespace().map(|p| p.to_string()));
sink.push(p.to_string());
}
} }
if attr.is_word() && name == Some("document_private_items") { if attr.is_word() && name == Some("document_private_items") {

View File

@ -1364,9 +1364,7 @@ impl Stack {
// Used by Parser to insert StackElement::Key elements at the top of the stack. // Used by Parser to insert StackElement::Key elements at the top of the stack.
fn push_key(&mut self, key: string::String) { fn push_key(&mut self, key: string::String) {
self.stack.push(InternalKey(self.str_buffer.len() as u16, key.len() as u16)); self.stack.push(InternalKey(self.str_buffer.len() as u16, key.len() as u16));
for c in key.as_bytes() { self.str_buffer.extend(key.as_bytes());
self.str_buffer.push(*c);
}
} }
// Used by Parser to insert StackElement::Index elements at the top of the stack. // Used by Parser to insert StackElement::Index elements at the top of the stack.
@ -2212,9 +2210,7 @@ impl ::Decoder for Decoder {
}; };
match o.remove(&"fields".to_string()) { match o.remove(&"fields".to_string()) {
Some(Json::Array(l)) => { Some(Json::Array(l)) => {
for field in l.into_iter().rev() { self.stack.extend(l.into_iter().rev());
self.stack.push(field);
}
}, },
Some(val) => { Some(val) => {
return Err(ExpectedError("Array".to_owned(), val.to_string())) return Err(ExpectedError("Array".to_owned(), val.to_string()))
@ -2346,9 +2342,7 @@ impl ::Decoder for Decoder {
{ {
let array = expect!(self.pop(), Array)?; let array = expect!(self.pop(), Array)?;
let len = array.len(); let len = array.len();
for v in array.into_iter().rev() { self.stack.extend(array.into_iter().rev());
self.stack.push(v);
}
f(self, len) f(self, len)
} }

View File

@ -13,6 +13,7 @@ use ffi::OsStr;
use os::unix::ffi::OsStrExt; use os::unix::ffi::OsStrExt;
use fmt; use fmt;
use io::{self, Error, ErrorKind}; use io::{self, Error, ErrorKind};
use iter;
use libc::{EXIT_SUCCESS, EXIT_FAILURE}; use libc::{EXIT_SUCCESS, EXIT_FAILURE};
use path::{Path, PathBuf}; use path::{Path, PathBuf};
use sys::fd::FileDesc; use sys::fd::FileDesc;
@ -296,11 +297,11 @@ impl Command {
t!(callback()); t!(callback());
} }
let mut args: Vec<[usize; 2]> = Vec::new(); let args: Vec<[usize; 2]> = iter::once(
args.push([self.program.as_ptr() as usize, self.program.len()]); [self.program.as_ptr() as usize, self.program.len()]
for arg in self.args.iter() { ).chain(
args.push([arg.as_ptr() as usize, arg.len()]); self.args.iter().map(|arg| [arg.as_ptr() as usize, arg.len()])
} ).collect();
self.env.apply(); self.env.apply();

View File

@ -487,9 +487,7 @@ fn make_command_line(prog: &OsStr, args: &[OsString]) -> io::Result<Vec<u16>> {
} else { } else {
if x == '"' as u16 { if x == '"' as u16 {
// Add n+1 backslashes to total 2n+1 before internal '"'. // Add n+1 backslashes to total 2n+1 before internal '"'.
for _ in 0..(backslashes+1) { cmd.extend((0..(backslashes + 1)).map(|_| '\\' as u16));
cmd.push('\\' as u16);
}
} }
backslashes = 0; backslashes = 0;
} }
@ -498,9 +496,7 @@ fn make_command_line(prog: &OsStr, args: &[OsString]) -> io::Result<Vec<u16>> {
if quote { if quote {
// Add n backslashes to total 2n before ending '"'. // Add n backslashes to total 2n before ending '"'.
for _ in 0..backslashes { cmd.extend((0..backslashes).map(|_| '\\' as u16));
cmd.push('\\' as u16);
}
cmd.push('"' as u16); cmd.push('"' as u16);
} }
Ok(()) Ok(())

View File

@ -500,10 +500,7 @@ impl Pat {
PatKind::Slice(pats, None, _) if pats.len() == 1 => PatKind::Slice(pats, None, _) if pats.len() == 1 =>
pats[0].to_ty().map(TyKind::Slice)?, pats[0].to_ty().map(TyKind::Slice)?,
PatKind::Tuple(pats, None) => { PatKind::Tuple(pats, None) => {
let mut tys = Vec::new(); let tys = pats.iter().map(|pat| pat.to_ty()).collect::<Option<Vec<_>>>()?;
for pat in pats {
tys.push(pat.to_ty()?);
}
TyKind::Tup(tys) TyKind::Tup(tys)
} }
_ => return None, _ => return None,
@ -949,10 +946,7 @@ impl Expr {
ExprKind::Array(exprs) if exprs.len() == 1 => ExprKind::Array(exprs) if exprs.len() == 1 =>
exprs[0].to_ty().map(TyKind::Slice)?, exprs[0].to_ty().map(TyKind::Slice)?,
ExprKind::Tup(exprs) => { ExprKind::Tup(exprs) => {
let mut tys = Vec::new(); let tys = exprs.iter().map(|expr| expr.to_ty()).collect::<Option<Vec<_>>>()?;
for expr in exprs {
tys.push(expr.to_ty()?);
}
TyKind::Tup(tys) TyKind::Tup(tys)
} }
ExprKind::Binary(binop, lhs, rhs) if binop.node == BinOpKind::Add => ExprKind::Binary(binop, lhs, rhs) if binop.node == BinOpKind::Add =>

View File

@ -265,9 +265,7 @@ impl<'a> StringReader<'a> {
m.push(c); m.push(c);
} }
_ => { _ => {
for c in c.escape_default() { m.extend(c.escape_default());
m.push(c);
}
} }
} }
} }

View File

@ -131,8 +131,8 @@ fn decodable_substructure(cx: &mut ExtCtxt,
StaticEnum(_, ref fields) => { StaticEnum(_, ref fields) => {
let variant = cx.ident_of("i"); let variant = cx.ident_of("i");
let mut arms = Vec::new(); let mut arms = Vec::with_capacity(fields.len() + 1);
let mut variants = Vec::new(); let mut variants = Vec::with_capacity(fields.len());
let rvariant_arg = cx.ident_of("read_enum_variant_arg"); let rvariant_arg = cx.ident_of("read_enum_variant_arg");
for (i, &(ident, v_span, ref parts)) in fields.iter().enumerate() { for (i, &(ident, v_span, ref parts)) in fields.iter().enumerate() {

View File

@ -188,6 +188,7 @@ pub use self::StaticFields::*;
pub use self::SubstructureFields::*; pub use self::SubstructureFields::*;
use std::cell::RefCell; use std::cell::RefCell;
use std::iter;
use std::vec; use std::vec;
use rustc_target::spec::abi::Abi; use rustc_target::spec::abi::Abi;
@ -558,15 +559,13 @@ impl<'a> TraitDef<'a> {
// type being derived upon // type being derived upon
self.additional_bounds.iter().map(|p| { self.additional_bounds.iter().map(|p| {
cx.trait_bound(p.to_path(cx, self.span, type_ident, generics)) cx.trait_bound(p.to_path(cx, self.span, type_ident, generics))
}).collect(); }).chain(
// require the current trait
// require the current trait iter::once(cx.trait_bound(trait_path.clone()))
bounds.push(cx.trait_bound(trait_path.clone())); ).chain(
// also add in any bounds from the declaration
// also add in any bounds from the declaration param.bounds.iter().cloned()
for declared_bound in &param.bounds { ).collect();
bounds.push((*declared_bound).clone());
}
cx.typaram(self.span, param.ident, vec![], bounds, None) cx.typaram(self.span, param.ident, vec![], bounds, None)
} }

View File

@ -95,9 +95,8 @@ fn hash_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure)
_ => cx.span_bug(trait_span, "impossible substructure in `derive(Hash)`"), _ => cx.span_bug(trait_span, "impossible substructure in `derive(Hash)`"),
}; };
for &FieldInfo { ref self_, span, .. } in fields { stmts.extend(fields.iter().map(|FieldInfo { ref self_, span, .. }|
stmts.push(call_hash(span, self_.clone())); call_hash(*span, self_.clone())));
}
cx.expr_block(cx.block(trait_span, stmts)) cx.expr_block(cx.block(trait_span, stmts))
} }

View File

@ -406,10 +406,7 @@ impl<'a, 'b> Context<'a, 'b> {
// Map the arguments // Map the arguments
for i in 0..args_len { for i in 0..args_len {
let ref arg_types = self.arg_types[i]; let ref arg_types = self.arg_types[i];
let mut arg_offsets = Vec::with_capacity(arg_types.len()); let arg_offsets = arg_types.iter().map(|offset| sofar + *offset).collect::<Vec<_>>();
for offset in arg_types {
arg_offsets.push(sofar + *offset);
}
self.arg_index_map.push(arg_offsets); self.arg_index_map.push(arg_offsets);
sofar += self.arg_unique_types[i].len(); sofar += self.arg_unique_types[i].len();
} }
@ -581,10 +578,12 @@ impl<'a, 'b> Context<'a, 'b> {
/// Actually builds the expression which the format_args! block will be /// Actually builds the expression which the format_args! block will be
/// expanded to /// expanded to
fn into_expr(self) -> P<ast::Expr> { fn into_expr(self) -> P<ast::Expr> {
let mut locals = Vec::new(); let mut locals = Vec::with_capacity(
let mut counts = Vec::new(); (0..self.args.len()).map(|i| self.arg_unique_types[i].len()).sum()
let mut pats = Vec::new(); );
let mut heads = Vec::new(); let mut counts = Vec::with_capacity(self.count_args.len());
let mut pats = Vec::with_capacity(self.args.len());
let mut heads = Vec::with_capacity(self.args.len());
let names_pos: Vec<_> = (0..self.args.len()) let names_pos: Vec<_> = (0..self.args.len())
.map(|i| self.ecx.ident_of(&format!("arg{}", i)).gensym()) .map(|i| self.ecx.ident_of(&format!("arg{}", i)).gensym())