auto merge of #7263 : thestinger/rust/iterator, r=graydon

This commit is contained in:
bors 2013-06-21 01:49:50 -07:00
commit b0e3ffd380
107 changed files with 353 additions and 671 deletions

View File

@ -283,7 +283,7 @@ let ports = do vec::from_fn(3) |init_val| {
};
// Wait on each port, accumulating the results
let result = ports.foldl(0, |accum, port| *accum + port.recv() );
let result = ports.iter().fold(0, |accum, port| accum + port.recv() );
# fn some_expensive_computation(_i: uint) -> int { 42 }
~~~

View File

@ -358,7 +358,7 @@ fn check_expected_errors(expected_errors: ~[errors::ExpectedError],
// is the ending point, and * represents ANSI color codes.
for ProcRes.stderr.line_iter().advance |line| {
let mut was_expected = false;
for vec::eachi(expected_errors) |i, ee| {
for expected_errors.iter().enumerate().advance |(i, ee)| {
if !found_flags[i] {
debug!("prefix=%s ee.kind=%s ee.msg=%s line=%s",
prefixes[i], ee.kind, ee.msg, line);

View File

@ -639,7 +639,7 @@ impl BitvSet {
if self.capacity() < other.capacity() {
self.bitv.storage.grow(other.capacity() / uint::bits, &0);
}
for other.bitv.storage.eachi |i, &w| {
for other.bitv.storage.iter().enumerate().advance |(i, &w)| {
let old = self.bitv.storage[i];
let new = f(old, w);
self.bitv.storage[i] = new;
@ -672,7 +672,7 @@ impl BaseIter<uint> for BitvSet {
fn size_hint(&self) -> Option<uint> { Some(self.len()) }
fn each(&self, blk: &fn(v: &uint) -> bool) -> bool {
for self.bitv.storage.eachi |i, &w| {
for self.bitv.storage.iter().enumerate().advance |(i, &w)| {
if !iterate_bits(i * uint::bits, w, |b| blk(&b)) {
return false;
}
@ -826,7 +826,7 @@ impl BitvSet {
f: &fn(uint, uint, uint) -> bool) -> bool {
let min = uint::min(self.bitv.storage.len(),
other.bitv.storage.len());
self.bitv.storage.slice(0, min).eachi(|i, &w| {
self.bitv.storage.slice(0, min).iter().enumerate().advance(|(i, &w)| {
f(i * uint::bits, w, other.bitv.storage[i])
})
}
@ -845,12 +845,12 @@ impl BitvSet {
let min = uint::min(len1, len2);
/* only one of these loops will execute and that's the point */
for self.bitv.storage.slice(min, len1).eachi |i, &w| {
for self.bitv.storage.slice(min, len1).iter().enumerate().advance |(i, &w)| {
if !f(true, (i + min) * uint::bits, w) {
return false;
}
}
for other.bitv.storage.slice(min, len2).eachi |i, &w| {
for other.bitv.storage.slice(min, len2).iter().enumerate().advance |(i, &w)| {
if !f(false, (i + min) * uint::bits, w) {
return false;
}

View File

@ -448,7 +448,7 @@ mod test {
|i| fmt!("tmp/lib-fileinput-test-fileinput-read-byte-%u.tmp", i)), true);
// 3 files containing 0\n, 1\n, and 2\n respectively
for filenames.eachi |i, &filename| {
for filenames.iter().enumerate().advance |(i, &filename)| {
make_file(filename.get_ref(), [fmt!("%u", i)]);
}
@ -478,7 +478,7 @@ mod test {
|i| fmt!("tmp/lib-fileinput-test-fileinput-read-%u.tmp", i)), true);
// 3 files containing 1\n, 2\n, and 3\n respectively
for filenames.eachi |i, &filename| {
for filenames.iter().enumerate().advance |(i, &filename)| {
make_file(filename.get_ref(), [fmt!("%u", i)]);
}
@ -498,7 +498,7 @@ mod test {
3,
|i| fmt!("tmp/lib-fileinput-test-input-vec-%u.tmp", i)), true);
for filenames.eachi |i, &filename| {
for filenames.iter().enumerate().advance |(i, &filename)| {
let contents =
vec::from_fn(3, |j| fmt!("%u %u", i, j));
make_file(filename.get_ref(), contents);
@ -518,7 +518,7 @@ mod test {
3,
|i| fmt!("tmp/lib-fileinput-test-input-vec-state-%u.tmp", i)),true);
for filenames.eachi |i, &filename| {
for filenames.iter().enumerate().advance |(i, &filename)| {
let contents =
vec::from_fn(3, |j| fmt!("%u %u", i, j + 1));
make_file(filename.get_ref(), contents);
@ -582,7 +582,7 @@ mod test {
3,
|i| fmt!("tmp/lib-fileinput-test-next-file-%u.tmp", i)),true);
for filenames.eachi |i, &filename| {
for filenames.iter().enumerate().advance |(i, &filename)| {
let contents =
vec::from_fn(3, |j| fmt!("%u %u", i, j + 1));
make_file(&filename.get(), contents);

View File

@ -307,11 +307,11 @@ impl<T,U:Unflattener<T>,P:BytePort> GenericPort<T> for FlatPort<T, U, P> {
impl<T,F:Flattener<T>,C:ByteChan> GenericChan<T> for FlatChan<T, F, C> {
fn send(&self, val: T) {
self.byte_chan.send(CONTINUE.to_vec());
self.byte_chan.send(CONTINUE.to_owned());
let bytes = self.flattener.flatten(val);
let len = bytes.len() as u64;
do io::u64_to_be_bytes(len, size_of::<u64>()) |len_bytes| {
self.byte_chan.send(len_bytes.to_vec());
self.byte_chan.send(len_bytes.to_owned());
}
self.byte_chan.send(bytes);
}
@ -937,7 +937,7 @@ mod test {
fn test_try_recv_none3<P:BytePort>(loader: PortLoader<P>) {
static CONTINUE: [u8, ..4] = [0xAA, 0xBB, 0xCC, 0xDD];
// The control word is followed by garbage
let bytes = CONTINUE.to_vec() + [0];
let bytes = CONTINUE.to_owned() + [0];
let port = loader(bytes);
let res: Option<int> = port.try_recv();
assert!(res.is_none());
@ -959,9 +959,9 @@ mod test {
// then undeserializable garbage
let len_bytes = do io::u64_to_be_bytes(
1, sys::size_of::<u64>()) |len_bytes| {
len_bytes.to_vec()
len_bytes.to_owned()
};
let bytes = CONTINUE.to_vec() + len_bytes + [0, 0, 0, 0];
let bytes = CONTINUE.to_owned() + len_bytes + [0, 0, 0, 0];
let port = loader(bytes);

View File

@ -177,7 +177,7 @@ fn name_str(nm: &Name) -> ~str {
}
fn find_opt(opts: &[Opt], nm: Name) -> Option<uint> {
vec::position(opts, |opt| opt.name == nm)
opts.iter().position_(|opt| opt.name == nm)
}
/**

View File

@ -950,7 +950,7 @@ impl serialize::Decoder for Decoder {
}
ref json => fail!("invalid variant: %?", *json),
};
let idx = match vec::position(names, |n| str::eq_slice(*n, name)) {
let idx = match names.iter().position_(|n| str::eq_slice(*n, name)) {
Some(idx) => idx,
None => fail!("Unknown variant name: %?", name),
};

View File

@ -205,7 +205,7 @@ pub mod v4 {
}).collect();
if parts.len() != 4 {
Err(fmt!("'%s' doesn't have 4 parts", ip))
} else if parts.contains(&256) {
} else if parts.iter().any_(|x| *x == 256u) {
Err(fmt!("invalid octal in addr '%s'", ip))
} else {
Ok(Ipv4Rep {

View File

@ -978,7 +978,7 @@ impl io::Writer for TcpSocketBuf {
let w_result = write_common_impl(socket_data_ptr,
vec::slice(data,
0,
data.len()).to_vec());
data.len()).to_owned());
if w_result.is_err() {
let err_data = w_result.get_err();
debug!(

View File

@ -519,8 +519,9 @@ fn get_authority(rawurl: &str) ->
let end = end; // make end immutable so it can be captured
let host_is_end_plus_one: &fn() -> bool = || {
let xs = ['?', '#', '/'];
end+1 == len
&& !['?', '#', '/'].contains(&(rawurl[end] as char))
&& !xs.iter().any_(|x| *x == (rawurl[end] as char))
};
// finish up

View File

@ -1175,8 +1175,8 @@ mod biguint_tests {
fn test_cmp() {
let data = [ &[], &[1], &[2], &[-1], &[0, 1], &[2, 1], &[1, 1, 1] ]
.map(|v| BigUint::from_slice(*v));
for data.eachi |i, ni| {
for vec::slice(data, i, data.len()).eachi |j0, nj| {
for data.iter().enumerate().advance |(i, ni)| {
for vec::slice(data, i, data.len()).iter().enumerate().advance |(j0, nj)| {
let j = j0 + i;
if i == j {
assert_eq!(ni.cmp(nj), Equal);
@ -1651,8 +1651,8 @@ mod bigint_tests {
nums.push(Zero::zero());
nums.push_all_move(vs.map(|s| BigInt::from_slice(Plus, *s)));
for nums.eachi |i, ni| {
for vec::slice(nums, i, nums.len()).eachi |j0, nj| {
for nums.iter().enumerate().advance |(i, ni)| {
for vec::slice(nums, i, nums.len()).iter().enumerate().advance |(j0, nj)| {
let j = i + j0;
if i == j {
assert_eq!(ni.cmp(nj), Equal);

View File

@ -432,7 +432,7 @@ impl<D:Decoder,T:Decodable<D>> Decodable<D> for @T {
impl<'self, S:Encoder,T:Encodable<S>> Encodable<S> for &'self [T] {
fn encode(&self, s: &mut S) {
do s.emit_seq(self.len()) |s| {
for self.eachi |i, e| {
for self.iter().enumerate().advance |(i, e)| {
s.emit_seq_elt(i, |s| e.encode(s))
}
}
@ -442,7 +442,7 @@ impl<'self, S:Encoder,T:Encodable<S>> Encodable<S> for &'self [T] {
impl<S:Encoder,T:Encodable<S>> Encodable<S> for ~[T] {
fn encode(&self, s: &mut S) {
do s.emit_seq(self.len()) |s| {
for self.eachi |i, e| {
for self.iter().enumerate().advance |(i, e)| {
s.emit_seq_elt(i, |s| e.encode(s))
}
}
@ -462,7 +462,7 @@ impl<D:Decoder,T:Decodable<D>> Decodable<D> for ~[T] {
impl<S:Encoder,T:Encodable<S>> Encodable<S> for @[T] {
fn encode(&self, s: &mut S) {
do s.emit_seq(self.len()) |s| {
for self.eachi |i, e| {
for self.iter().enumerate().advance |(i, e)| {
s.emit_seq_elt(i, |s| e.encode(s))
}
}
@ -901,7 +901,7 @@ pub trait EncoderHelpers {
impl<S:Encoder> EncoderHelpers for S {
fn emit_from_vec<T>(&mut self, v: &[T], f: &fn(&mut S, &T)) {
do self.emit_seq(v.len()) |this| {
for v.eachi |i, e| {
for v.iter().enumerate().advance |(i, e)| {
do this.emit_seq_elt(i) |this| {
f(this, e)
}

View File

@ -36,7 +36,7 @@ use core::vec;
/// The SHA-1 interface
trait Sha1 {
/// Provide message input as bytes
fn input(&mut self, &const [u8]);
fn input(&mut self, &[u8]);
/// Provide message input as string
fn input_str(&mut self, &str);
/**
@ -74,9 +74,9 @@ pub fn sha1() -> @Sha1 {
computed: bool,
work_buf: @mut ~[u32]};
fn add_input(st: &mut Sha1State, msg: &const [u8]) {
fn add_input(st: &mut Sha1State, msg: &[u8]) {
assert!((!st.computed));
for vec::each_const(msg) |element| {
for msg.iter().advance |element| {
st.msg_block[st.msg_block_idx] = *element;
st.msg_block_idx += 1u;
st.len_low += 8u32;
@ -242,7 +242,7 @@ pub fn sha1() -> @Sha1 {
self.h[4] = 0xC3D2E1F0u32;
self.computed = false;
}
fn input(&mut self, msg: &const [u8]) { add_input(self, msg); }
fn input(&mut self, msg: &[u8]) { add_input(self, msg); }
fn input_str(&mut self, msg: &str) {
add_input(self, msg.as_bytes());
}

View File

@ -731,7 +731,7 @@ fn copy_vec<T:Copy>(dest: &mut [T],
from: &[T]) {
assert!(s1+from.len() <= dest.len());
for from.eachi |i, v| {
for from.iter().enumerate().advance |(i, v)| {
dest[s1+i] = copy *v;
}
}
@ -743,7 +743,7 @@ fn shift_vec<T:Copy>(dest: &mut [T],
len: uint) {
assert!(s1+len <= dest.len());
let tmp = dest.slice(s2, s2+len).to_vec();
let tmp = dest.slice(s2, s2+len).to_owned();
copy_vec(dest, s1, tmp);
}

View File

@ -271,7 +271,7 @@ pub fn parse(file: @Reader, longnames: bool) -> Result<~TermInfo, ~str> {
return Err(~"error: hit EOF before end of string table");
}
for string_offsets.eachi |i, v| {
for string_offsets.iter().enumerate().advance |(i, v)| {
let offset = *v;
if offset == 0xFFFF { // non-entry
loop;

View File

@ -853,7 +853,7 @@ mod test_treemap {
for 90.times {
let k = rng.gen();
let v = rng.gen();
if !ctrl.contains(&(k, v)) {
if !ctrl.iter().any_(|x| x == &(k, v)) {
assert!(map.insert(k, v));
ctrl.push((k, v));
check_structure(&map);

View File

@ -135,9 +135,9 @@ fn rustc_help() {
}
fn find_cmd(command_string: &str) -> Option<Command> {
do commands.find |command| {
do commands.iter().find_ |command| {
command.cmd == command_string
}
}.map_consume(|x| copy *x)
}
fn cmd_help(args: &[~str]) -> ValidUsage {

View File

@ -197,13 +197,13 @@ pub fn metas_in_cfg(cfg: ast::crate_cfg,
// which the item is valid
let cfg_metas = vec::filter_map(cfg_metas, |i| attr::get_meta_item_list(i));
if cfg_metas.all(|c| c.is_empty()) { return true; }
if cfg_metas.iter().all(|c| c.is_empty()) { return true; }
cfg_metas.any(|cfg_meta| {
cfg_meta.all(|cfg_mi| {
cfg_metas.iter().any_(|cfg_meta| {
cfg_meta.iter().all(|cfg_mi| {
match cfg_mi.node {
ast::meta_list(s, ref it) if "not" == s
=> it.all(|mi| !attr::contains(cfg, *mi)),
=> it.iter().all(|mi| !attr::contains(cfg, *mi)),
_ => attr::contains(cfg, *cfg_mi)
}
})

View File

@ -2157,7 +2157,7 @@ pub fn type_to_str_inner(names: @TypeNames, outer0: &[TypeRef], ty: TypeRef)
_ => {}
}
let outer = vec::append_one(outer0.to_vec(), ty);
let outer = vec::append_one(outer0.to_owned(), ty);
let kind = llvm::LLVMGetTypeKind(ty);

View File

@ -85,8 +85,7 @@ fn warn_if_multiple_versions(e: @mut Env,
*crate_cache[crate_cache.len() - 1].metas
);
let (matches, non_matches) =
partition(crate_cache.map_to_vec(|&entry| {
let vec: ~[Either<cache_entry, cache_entry>] = crate_cache.iter().transform(|&entry| {
let othername = loader::crate_name_from_metas(
copy *entry.metas);
if name == othername {
@ -94,7 +93,8 @@ fn warn_if_multiple_versions(e: @mut Env,
} else {
Right(entry)
}
}));
}).collect();
let (matches, non_matches) = partition(vec);
assert!(!matches.is_empty());

View File

@ -104,7 +104,7 @@ pub fn get_used_crate_files(cstore: &CStore) -> ~[Path] {
pub fn add_used_library(cstore: &mut CStore, lib: @str) -> bool {
assert!(!lib.is_empty());
if cstore.used_libraries.contains(&lib) { return false; }
if cstore.used_libraries.iter().any_(|x| x == &lib) { return false; }
cstore.used_libraries.push(lib);
true
}

View File

@ -1014,7 +1014,8 @@ fn encode_info_for_item(ecx: &EncodeContext,
ebml_w.end_tag();
// Now output the method info for each method.
for ty::trait_method_def_ids(tcx, local_def(item.id)).eachi |i, &method_def_id| {
let r = ty::trait_method_def_ids(tcx, local_def(item.id));
for r.iter().enumerate().advance |(i, &method_def_id)| {
assert_eq!(method_def_id.crate, ast::local_crate);
let method_ty = ty::method(tcx, method_def_id);

View File

@ -132,7 +132,7 @@ fn borrowck_fn(fk: &visit::fn_kind,
LoanDataFlowOperator,
id_range,
all_loans.len());
for all_loans.eachi |loan_idx, loan| {
for all_loans.iter().enumerate().advance |(loan_idx, loan)| {
loan_dfcx.add_gen(loan.gen_scope, loan_idx);
loan_dfcx.add_kill(loan.kill_scope, loan_idx);
}

View File

@ -348,11 +348,11 @@ impl MoveData {
* killed by scoping. See `doc.rs` for more details.
*/
for self.moves.eachi |i, move| {
for self.moves.iter().enumerate().advance |(i, move)| {
dfcx_moves.add_gen(move.id, i);
}
for self.var_assignments.eachi |i, assignment| {
for self.var_assignments.iter().enumerate().advance |(i, assignment)| {
dfcx_assign.add_gen(assignment.id, i);
self.kill_moves(assignment.path, assignment.id, dfcx_moves);
}
@ -375,7 +375,7 @@ impl MoveData {
}
// Kill all assignments when the variable goes out of scope:
for self.var_assignments.eachi |assignment_index, assignment| {
for self.var_assignments.iter().enumerate().advance |(assignment_index, assignment)| {
match *self.path(assignment.path).loan_path {
LpVar(id) => {
let kill_id = tcx.region_maps.encl_scope(id);

View File

@ -225,7 +225,7 @@ pub fn check_item_recursion(sess: Session,
(visitor.visit_item)(it, (env, visitor));
fn visit_item(it: @item, (env, v): (env, visit::vt<env>)) {
if env.idstack.contains(&(it.id)) {
if env.idstack.iter().any_(|x| x == &(it.id)) {
env.sess.span_fatal(env.root_it.span, "recursive constant");
}
env.idstack.push(it.id);

View File

@ -155,7 +155,7 @@ pub fn check_exhaustive(cx: @MatchCheckCtxt, sp: span, pats: ~[@pat]) {
};
let variants = ty::enum_variants(cx.tcx, id);
match variants.find(|v| v.id == vid) {
match variants.iter().find_(|v| v.id == vid) {
Some(v) => Some(cx.tcx.sess.str_of(v.name)),
None => {
fail!("check_exhaustive: bad variant in ctor")
@ -208,7 +208,7 @@ pub enum ctor {
pub fn is_useful(cx: @MatchCheckCtxt, m: &matrix, v: &[@pat]) -> useful {
if m.len() == 0u { return useful_; }
if m[0].len() == 0u { return not_useful; }
let real_pat = match m.find(|r| r[0].id != 0) {
let real_pat = match m.iter().find_(|r| r[0].id != 0) {
Some(r) => r[0], None => v[0]
};
let left_ty = if real_pat.id == 0 { ty::mk_nil() }
@ -372,7 +372,7 @@ pub fn missing_ctor(cx: @MatchCheckCtxt,
let variants = ty::enum_variants(cx.tcx, eid);
if found.len() != (*variants).len() {
for (*variants).each |v| {
if !found.contains(&(variant(v.id))) {
if !found.iter().any_(|x| x == &(variant(v.id))) {
return Some(variant(v.id));
}
}
@ -456,7 +456,7 @@ pub fn ctor_arity(cx: @MatchCheckCtxt, ctor: &ctor, ty: ty::t) -> uint {
ty::ty_enum(eid, _) => {
let id = match *ctor { variant(id) => id,
_ => fail!("impossible case") };
match vec::find(*ty::enum_variants(cx.tcx, eid), |v| v.id == id ) {
match ty::enum_variants(cx.tcx, eid).iter().find_(|v| v.id == id ) {
Some(v) => v.args.len(),
None => fail!("impossible case")
}
@ -613,7 +613,7 @@ pub fn specialize(cx: @MatchCheckCtxt,
if variant(variant_id) == *ctor_id {
// FIXME #4731: Is this right? --pcw
let args = flds.map(|ty_field| {
match flds.find(|f|
match flds.iter().find_(|f|
f.ident == ty_field.ident) {
Some(f) => f.pat,
_ => wild()
@ -644,7 +644,7 @@ pub fn specialize(cx: @MatchCheckCtxt,
}
}
let args = vec::map(class_fields, |class_field| {
match flds.find(|f|
match flds.iter().find_(|f|
f.ident == class_field.ident) {
Some(f) => f.pat,
_ => wild()
@ -806,13 +806,13 @@ pub fn is_refutable(cx: @MatchCheckCtxt, pat: &pat) -> bool {
}
pat_lit(_) | pat_range(_, _) => { true }
pat_struct(_, ref fields, _) => {
fields.any(|f| is_refutable(cx, f.pat))
fields.iter().any_(|f| is_refutable(cx, f.pat))
}
pat_tup(ref elts) => {
elts.any(|elt| is_refutable(cx, *elt))
elts.iter().any_(|elt| is_refutable(cx, *elt))
}
pat_enum(_, Some(ref args)) => {
args.any(|a| is_refutable(cx, *a))
args.iter().any_(|a| is_refutable(cx, *a))
}
pat_enum(_,_) => { false }
pat_vec(*) => { true }

View File

@ -266,7 +266,7 @@ impl<O:DataFlowOperator> DataFlowContext<O> {
f: &fn(uint) -> bool) -> bool {
//! Helper for iterating over the bits in a bit set.
for words.eachi |word_index, &word| {
for words.iter().enumerate().advance |(word_index, &word)| {
if word != 0 {
let base_index = word_index * uint::bits;
for uint::range(0, uint::bits) |offset| {
@ -342,14 +342,14 @@ impl<O:DataFlowOperator+Copy+'static> DataFlowContext<O> {
let entry_str = bits_to_str(on_entry);
let gens = vec::slice(self.gens, start, end);
let gens_str = if gens.any(|&u| u != 0) {
let gens_str = if gens.iter().any_(|&u| u != 0) {
fmt!(" gen: %s", bits_to_str(gens))
} else {
~""
};
let kills = vec::slice(self.kills, start, end);
let kills_str = if kills.any(|&u| u != 0) {
let kills_str = if kills.iter().any_(|&u| u != 0) {
fmt!(" kill: %s", bits_to_str(kills))
} else {
~""
@ -503,12 +503,12 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
// func_bits represents the state when the function
// returns
let mut func_bits = reslice(in_out).to_vec();
let mut func_bits = reslice(in_out).to_owned();
loop_scopes.push(LoopScope {
loop_id: expr.id,
loop_kind: ForLoop,
break_bits: reslice(in_out).to_vec()
break_bits: reslice(in_out).to_owned()
});
for decl.inputs.each |input| {
self.walk_pat(input.pat, func_bits, loop_scopes);
@ -547,7 +547,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
//
self.walk_expr(cond, in_out, loop_scopes);
let mut then_bits = reslice(in_out).to_vec();
let mut then_bits = reslice(in_out).to_owned();
self.walk_block(then, then_bits, loop_scopes);
self.walk_opt_expr(els, in_out, loop_scopes);
@ -569,11 +569,11 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
self.walk_expr(cond, in_out, loop_scopes);
let mut body_bits = reslice(in_out).to_vec();
let mut body_bits = reslice(in_out).to_owned();
loop_scopes.push(LoopScope {
loop_id: expr.id,
loop_kind: TrueLoop,
break_bits: reslice(in_out).to_vec()
break_bits: reslice(in_out).to_owned()
});
self.walk_block(blk, body_bits, loop_scopes);
self.add_to_entry_set(expr.id, body_bits);
@ -591,12 +591,12 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
// <--+ (break)
//
let mut body_bits = reslice(in_out).to_vec();
let mut body_bits = reslice(in_out).to_owned();
self.reset(in_out);
loop_scopes.push(LoopScope {
loop_id: expr.id,
loop_kind: TrueLoop,
break_bits: reslice(in_out).to_vec()
break_bits: reslice(in_out).to_owned()
});
self.walk_block(blk, body_bits, loop_scopes);
self.add_to_entry_set(expr.id, body_bits);
@ -620,7 +620,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
//
self.walk_expr(discr, in_out, loop_scopes);
let mut guards = reslice(in_out).to_vec();
let mut guards = reslice(in_out).to_owned();
// We know that exactly one arm will be taken, so we
// can start out with a blank slate and just union
@ -633,7 +633,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
// determine the bits for the body and then union
// them into `in_out`, which reflects all bodies to date
let mut body = reslice(guards).to_vec();
let mut body = reslice(guards).to_owned();
self.walk_pat_alternatives(arm.pats, body, loop_scopes);
self.walk_block(&arm.body, body, loop_scopes);
join_bits(&self.dfcx.oper, body, in_out);
@ -644,7 +644,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
self.walk_opt_expr(o_e, in_out, loop_scopes);
// is this a return from a `for`-loop closure?
match loop_scopes.position(|s| s.loop_kind == ForLoop) {
match loop_scopes.iter().position_(|s| s.loop_kind == ForLoop) {
Some(i) => {
// if so, add the in_out bits to the state
// upon exit. Remember that we cannot count
@ -735,7 +735,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
ast::expr_binary(_, op, l, r) if ast_util::lazy_binop(op) => {
self.walk_expr(l, in_out, loop_scopes);
let temp = reslice(in_out).to_vec();
let temp = reslice(in_out).to_owned();
self.walk_expr(r, in_out, loop_scopes);
join_bits(&self.dfcx.oper, temp, in_out);
}
@ -896,7 +896,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
// In the general case, the patterns in `pats` are
// alternatives, so we must treat this like an N-way select
// statement.
let initial_state = reslice(in_out).to_vec();
let initial_state = reslice(in_out).to_owned();
for pats.each |&pat| {
let mut temp = copy initial_state;
self.walk_pat(pat, temp, loop_scopes);
@ -917,7 +917,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
Some(_) => {
match self.tcx().def_map.find(&expr.id) {
Some(&ast::def_label(loop_id)) => {
match loop_scopes.position(|l| l.loop_id == loop_id) {
match loop_scopes.iter().position_(|l| l.loop_id == loop_id) {
Some(i) => i,
None => {
self.tcx().sess.span_bug(

View File

@ -477,7 +477,7 @@ pub fn check_cast_for_escaping_regions(
// Check, based on the region associated with the trait, whether it can
// possibly escape the enclosing fn item (note that all type parameters
// must have been declared on the enclosing fn item).
if target_regions.any(|r| is_re_scope(*r)) {
if target_regions.iter().any_(|r| is_re_scope(*r)) {
return; /* case (1) */
}
@ -492,7 +492,7 @@ pub fn check_cast_for_escaping_regions(
|_r| {
// FIXME(#5723) --- turn this check on once &Objects are usable
//
// if !target_regions.any(|t_r| is_subregion_of(cx, *t_r, r)) {
// if !target_regions.iter().any_(|t_r| is_subregion_of(cx, *t_r, r)) {
// cx.tcx.sess.span_err(
// source.span,
// fmt!("source contains borrowed pointer with lifetime \
@ -506,7 +506,7 @@ pub fn check_cast_for_escaping_regions(
|ty| {
match ty::get(ty).sty {
ty::ty_param(source_param) => {
if target_params.contains(&source_param) {
if target_params.iter().any_(|x| x == &source_param) {
/* case (2) */
} else {
check_durable(cx.tcx, ty, source.span); /* case (3) */

View File

@ -90,7 +90,7 @@ impl LanguageItems {
}
pub fn each_item(&self, f: &fn(def_id: def_id, i: uint) -> bool) -> bool {
self.items.eachi(|i, &item| f(item.get(), i))
self.items.iter().enumerate().advance(|(i, &item)| f(item.get(), i))
}
pub fn item_name(index: uint) -> &'static str {

View File

@ -1003,7 +1003,7 @@ fn lint_missing_doc() -> visit::vt<@mut Context> {
// If we have doc(hidden), nothing to do
if cx.doc_hidden { return }
// If we're documented, nothing to do
if attrs.any(|a| a.node.is_sugared_doc) { return }
if attrs.iter().any_(|a| a.node.is_sugared_doc) { return }
// otherwise, warn!
cx.span_lint(missing_doc, sp, msg);

View File

@ -872,7 +872,7 @@ impl mem_categorization_ctxt {
}
};
for subpats.eachi |i, &subpat| {
for subpats.iter().enumerate().advance |(i, &subpat)| {
let subpat_ty = self.pat_ty(subpat); // see (*)
let subcmt =
@ -885,7 +885,7 @@ impl mem_categorization_ctxt {
}
Some(&ast::def_fn(*)) |
Some(&ast::def_struct(*)) => {
for subpats.eachi |i, &subpat| {
for subpats.iter().enumerate().advance |(i, &subpat)| {
let subpat_ty = self.pat_ty(subpat); // see (*)
let cmt_field =
self.cat_imm_interior(
@ -926,7 +926,7 @@ impl mem_categorization_ctxt {
ast::pat_tup(ref subpats) => {
// (p1, ..., pN)
for subpats.eachi |i, &subpat| {
for subpats.iter().enumerate().advance |(i, &subpat)| {
let subpat_ty = self.pat_ty(subpat); // see (*)
let subcmt =
self.cat_imm_interior(

View File

@ -370,8 +370,8 @@ impl VisitContext {
// any fields which (1) were not explicitly
// specified and (2) have a type that
// moves-by-default:
let consume_with = with_fields.any(|tf| {
!fields.any(|f| f.node.ident == tf.ident) &&
let consume_with = with_fields.iter().any_(|tf| {
!fields.iter().any_(|f| f.node.ident == tf.ident) &&
ty::type_moves_by_default(self.tcx, tf.mt.ty)
});

View File

@ -252,7 +252,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
method_id.node);
if is_private &&
(container_id.crate != local_crate ||
!privileged_items.contains(&(container_id.node))) {
!privileged_items.iter().any_(|x| x == &(container_id.node))) {
tcx.sess.span_err(span,
fmt!("method `%s` is private",
token::ident_to_str(name)));
@ -280,7 +280,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
def_fn(def_id, _) => {
if def_id.crate == local_crate {
if local_item_is_private(span, def_id.node) &&
!privileged_items.contains(&def_id.node) {
!privileged_items.iter().any_(|x| x == &def_id.node) {
tcx.sess.span_err(span,
fmt!("function `%s` is private",
token::ident_to_str(path.idents.last())));
@ -324,8 +324,8 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
match (*methods)[method_num] {
provided(method)
if method.vis == private &&
!privileged_items
.contains(&(trait_id.node)) => {
!privileged_items.iter()
.any_(|x| x == &(trait_id.node)) => {
tcx.sess.span_err(span,
fmt!("method `%s` is private",
token::ident_to_str(&method
@ -409,7 +409,8 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
match ty::get(ty::type_autoderef(tcx, ty::expr_ty(tcx,
base))).sty {
ty_struct(id, _)
if id.crate != local_crate || !privileged_items.contains(&(id.node)) => {
if id.crate != local_crate || !privileged_items.iter()
.any_(|x| x == &(id.node)) => {
debug!("(privacy checking) checking field access");
check_field(expr.span, id, ident);
}
@ -422,7 +423,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
base))).sty {
ty_struct(id, _)
if id.crate != local_crate ||
!privileged_items.contains(&(id.node)) => {
!privileged_items.iter().any_(|x| x == &(id.node)) => {
match method_map.find(&expr.id) {
None => {
tcx.sess.span_bug(expr.span,
@ -448,7 +449,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
match ty::get(ty::expr_ty(tcx, expr)).sty {
ty_struct(id, _) => {
if id.crate != local_crate ||
!privileged_items.contains(&(id.node)) {
!privileged_items.iter().any_(|x| x == &(id.node)) {
for (*fields).each |field| {
debug!("(privacy checking) checking \
field in struct literal");
@ -459,7 +460,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
}
ty_enum(id, _) => {
if id.crate != local_crate ||
!privileged_items.contains(&(id.node)) {
!privileged_items.iter().any_(|x| x == &(id.node)) {
match tcx.def_map.get_copy(&expr.id) {
def_variant(_, variant_id) => {
for (*fields).each |field| {
@ -496,7 +497,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
match ty::get(ty::expr_ty(tcx, operand)).sty {
ty_enum(id, _) => {
if id.crate != local_crate ||
!privileged_items.contains(&(id.node)) {
!privileged_items.iter().any_(|x| x == &(id.node)) {
check_variant(expr.span, id);
}
}
@ -514,7 +515,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
match ty::get(ty::pat_ty(tcx, pattern)).sty {
ty_struct(id, _) => {
if id.crate != local_crate ||
!privileged_items.contains(&(id.node)) {
!privileged_items.iter().any_(|x| x == &(id.node)) {
for fields.each |field| {
debug!("(privacy checking) checking \
struct pattern");
@ -525,8 +526,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
}
ty_enum(enum_id, _) => {
if enum_id.crate != local_crate ||
!privileged_items.contains(
&enum_id.node) {
!privileged_items.iter().any_(|x| x == &enum_id.node) {
match tcx.def_map.find(&pattern.id) {
Some(&def_variant(_, variant_id)) => {
for fields.each |field| {

View File

@ -78,7 +78,7 @@ impl RegionMaps {
pub fn relate_free_regions(&mut self, sub: FreeRegion, sup: FreeRegion) {
match self.free_region_map.find_mut(&sub) {
Some(sups) => {
if !sups.contains(&sup) {
if !sups.iter().any_(|x| x == &sup) {
sups.push(sup);
}
return;
@ -202,7 +202,7 @@ impl RegionMaps {
return true;
}
if !queue.contains(parent) {
if !queue.iter().any_(|x| x == parent) {
queue.push(*parent);
}
}
@ -632,7 +632,7 @@ impl DetermineRpCtxt {
ambient_variance: self.ambient_variance,
id: self.item_id
};
if !vec.contains(&dep) { vec.push(dep); }
if !vec.iter().any_(|x| x == &dep) { vec.push(dep); }
}
// Determines whether a reference to a region that appears in the

View File

@ -1463,7 +1463,7 @@ impl Resolver {
let path_len = full_path.idents.len();
assert!(path_len != 0);
for full_path.idents.eachi |i, ident| {
for full_path.idents.iter().enumerate().advance |(i, ident)| {
if i != path_len - 1 {
module_path.push(*ident);
}
@ -4033,7 +4033,7 @@ impl Resolver {
pub fn check_consistent_bindings(@mut self, arm: &arm) {
if arm.pats.len() == 0 { return; }
let map_0 = self.binding_mode_map(arm.pats[0]);
for arm.pats.eachi() |i, p| {
for arm.pats.iter().enumerate().advance |(i, p)| {
let map_i = self.binding_mode_map(*p);
for map_0.each |&key, &binding_0| {
@ -4605,7 +4605,7 @@ impl Resolver {
pub fn intern_module_part_of_path(@mut self, path: @Path) -> ~[ident] {
let mut module_path_idents = ~[];
for path.idents.eachi |index, ident| {
for path.idents.iter().enumerate().advance |(index, ident)| {
if index == path.idents.len() - 1 {
break;
}
@ -4843,8 +4843,7 @@ impl Resolver {
}
let mut smallest = 0;
for maybes.eachi |i, &other| {
for maybes.iter().enumerate().advance |(i, &other)| {
values[i] = name.lev_distance(other);
if values[i] <= values[smallest] {

View File

@ -388,7 +388,7 @@ pub fn expand_nested_bindings<'r>(bcx: block,
match br.pats[col].node {
ast::pat_ident(_, path, Some(inner)) => {
let pats = vec::append(
vec::slice(br.pats, 0u, col).to_vec(),
vec::slice(br.pats, 0u, col).to_owned(),
vec::append(~[inner],
vec::slice(br.pats, col + 1u,
br.pats.len())));
@ -581,8 +581,7 @@ pub fn enter_opt<'r>(bcx: block,
let mut reordered_patterns = ~[];
for ty::lookup_struct_fields(tcx, struct_id).each
|field| {
match field_pats.find(|p|
p.ident == field.ident) {
match field_pats.iter().find_(|p| p.ident == field.ident) {
None => reordered_patterns.push(dummy),
Some(fp) => reordered_patterns.push(fp.pat)
}
@ -642,7 +641,7 @@ pub fn enter_rec_or_struct<'r>(bcx: block,
ast::pat_struct(_, ref fpats, _) => {
let mut pats = ~[];
for fields.each |fname| {
match fpats.find(|p| p.ident == *fname) {
match fpats.iter().find_(|p| p.ident == *fname) {
None => pats.push(dummy),
Some(pat) => pats.push(pat.pat)
}
@ -1308,11 +1307,11 @@ pub fn compile_submatch(bcx: block,
if has_nested_bindings(m, col) {
expand_nested_bindings(bcx, m, col, val)
} else {
m.to_vec()
m.to_owned()
}
};
let vals_left = vec::append(vec::slice(vals, 0u, col).to_vec(),
let vals_left = vec::append(vec::slice(vals, 0u, col).to_owned(),
vec::slice(vals, col + 1u, vals.len()));
let ccx = bcx.fcx.ccx;
let mut pat_id = 0;
@ -1808,7 +1807,7 @@ pub fn bind_irrefutable_pat(bcx: block,
vinfo.disr_val,
val);
for sub_pats.iter().advance |sub_pat| {
for args.vals.eachi |i, argval| {
for args.vals.iter().enumerate().advance |(i, argval)| {
bcx = bind_irrefutable_pat(bcx,
sub_pat[i],
*argval,
@ -1826,7 +1825,7 @@ pub fn bind_irrefutable_pat(bcx: block,
Some(ref elems) => {
// This is the tuple struct case.
let repr = adt::represent_node(bcx, pat.id);
for elems.eachi |i, elem| {
for elems.iter().enumerate().advance |(i, elem)| {
let fldptr = adt::trans_field_ptr(bcx, repr,
val, 0, i);
bcx = bind_irrefutable_pat(bcx,
@ -1865,7 +1864,7 @@ pub fn bind_irrefutable_pat(bcx: block,
}
ast::pat_tup(ref elems) => {
let repr = adt::represent_node(bcx, pat.id);
for elems.eachi |i, elem| {
for elems.iter().enumerate().advance |(i, elem)| {
let fldptr = adt::trans_field_ptr(bcx, repr, val, 0, i);
bcx = bind_irrefutable_pat(bcx,
*elem,

View File

@ -145,7 +145,7 @@ fn represent_type_uncached(cx: &mut CrateContext, t: ty::t) -> Repr {
mk_struct(cx, self.tys, false).size == 0
}
fn find_ptr(&self) -> Option<uint> {
self.tys.position(|&ty| mono_data_classify(ty) == MonoNonNull)
self.tys.iter().position_(|&ty| mono_data_classify(ty) == MonoNonNull)
}
}
@ -161,7 +161,7 @@ fn represent_type_uncached(cx: &mut CrateContext, t: ty::t) -> Repr {
return Univariant(mk_struct(cx, [], false), false);
}
if cases.all(|c| c.tys.len() == 0) {
if cases.iter().all(|c| c.tys.len() == 0) {
// All bodies empty -> intlike
let discrs = cases.map(|c| c.discr);
return CEnum(*discrs.iter().min().unwrap(), *discrs.iter().max().unwrap());
@ -545,7 +545,7 @@ fn build_const_struct(ccx: &mut CrateContext, st: &Struct, vals: &[ValueRef])
let mut offset = 0;
let mut cfields = ~[];
for st.fields.eachi |i, &ty| {
for st.fields.iter().enumerate().advance |(i, &ty)| {
let llty = type_of::sizing_type_of(ccx, ty);
let type_align = machine::llalign_of_min(ccx, llty)
/*bad*/as u64;

View File

@ -133,7 +133,7 @@ pub fn trans_inline_asm(bcx: block, ia: &ast::inline_asm) -> block {
let op = PointerCast(bcx, aoutputs[0], T_ptr(val_ty(outputs[0])));
Store(bcx, r, op);
} else {
for aoutputs.eachi |i, o| {
for aoutputs.iter().enumerate().advance |(i, o)| {
let v = ExtractValue(bcx, r, i);
let op = PointerCast(bcx, *o, T_ptr(val_ty(outputs[i])));
Store(bcx, v, op);

View File

@ -670,7 +670,7 @@ pub fn iter_structural_ty(cx: block, av: ValueRef, t: ty::t,
let tcx = cx.tcx();
let mut cx = cx;
for variant.args.eachi |i, &arg| {
for variant.args.iter().enumerate().advance |(i, &arg)| {
cx = f(cx,
adt::trans_field_ptr(cx, repr, av, variant.disr_val, i),
ty::subst_tps(tcx, tps, None, arg));
@ -683,7 +683,7 @@ pub fn iter_structural_ty(cx: block, av: ValueRef, t: ty::t,
ty::ty_struct(*) => {
let repr = adt::represent_type(cx.ccx(), t);
do expr::with_field_tys(cx.tcx(), t, None) |discr, field_tys| {
for field_tys.eachi |i, field_ty| {
for field_tys.iter().enumerate().advance |(i, field_ty)| {
let llfld_a = adt::trans_field_ptr(cx, repr, av, discr, i);
cx = f(cx, llfld_a, field_ty.mt.ty);
}
@ -696,7 +696,7 @@ pub fn iter_structural_ty(cx: block, av: ValueRef, t: ty::t,
}
ty::ty_tup(ref args) => {
let repr = adt::represent_type(cx.ccx(), t);
for args.eachi |i, arg| {
for args.iter().enumerate().advance |(i, arg)| {
let llfld_a = adt::trans_field_ptr(cx, repr, av, 0, i);
cx = f(cx, llfld_a, *arg);
}
@ -2017,7 +2017,7 @@ pub fn trans_enum_variant(ccx: @mut CrateContext,
repr, ty_to_str(ccx.tcx, enum_ty));
adt::trans_start_init(bcx, repr, fcx.llretptr.get(), disr);
for args.eachi |i, va| {
for args.iter().enumerate().advance |(i, va)| {
let lldestptr = adt::trans_field_ptr(bcx,
repr,
fcx.llretptr.get(),
@ -2092,7 +2092,7 @@ pub fn trans_tuple_struct(ccx: @mut CrateContext,
let repr = adt::represent_type(ccx, tup_ty);
adt::trans_start_init(bcx, repr, fcx.llretptr.get(), 0);
for fields.eachi |i, field| {
for fields.iter().enumerate().advance |(i, field)| {
let lldestptr = adt::trans_field_ptr(bcx,
repr,
fcx.llretptr.get(),

View File

@ -43,7 +43,7 @@ impl FnType {
let fnty = T_fn(atys, rty);
let llfn = decl(fnty);
for vec::eachi(self.attrs) |i, a| {
for self.attrs.iter().enumerate().advance |(i, a)| {
match *a {
option::Some(attr) => {
unsafe {
@ -100,7 +100,7 @@ impl FnType {
ret_def: bool,
llargbundle: ValueRef,
llretval: ValueRef) {
for vec::eachi(self.attrs) |i, a| {
for self.attrs.iter().enumerate().advance |(i, a)| {
match *a {
option::Some(attr) => {
unsafe {

View File

@ -226,7 +226,7 @@ pub fn trans_fn_ref_with_vtables(
type_params.repr(bcx.tcx()),
vtables.repr(bcx.tcx()));
assert!(type_params.all(|t| !ty::type_needs_infer(*t)));
assert!(type_params.iter().all(|t| !ty::type_needs_infer(*t)));
// Polytype of the function item (may have type params)
let fn_tpt = ty::lookup_item_type(tcx, def_id);
@ -681,7 +681,7 @@ pub fn trans_args(cx: block,
match args {
ArgExprs(arg_exprs) => {
let last = arg_exprs.len() - 1u;
for arg_exprs.eachi |i, arg_expr| {
for arg_exprs.iter().enumerate().advance |(i, arg_expr)| {
let arg_val = unpack_result!(bcx, {
trans_arg_expr(bcx,
arg_tys[i],

View File

@ -225,7 +225,7 @@ pub fn store_environment(bcx: block,
// Copy expr values into boxed bindings.
let mut bcx = bcx;
for vec::eachi(bound_values) |i, bv| {
for bound_values.iter().enumerate().advance |(i, bv)| {
debug!("Copy %s into closure", bv.to_str(ccx));
if ccx.sess.asm_comments() {

View File

@ -468,15 +468,14 @@ pub fn add_clean_free(cx: block, ptr: ValueRef, heap: heap) {
pub fn revoke_clean(cx: block, val: ValueRef) {
do in_scope_cx(cx) |scope_info| {
let scope_info = &mut *scope_info; // FIXME(#5074) workaround borrowck
let cleanup_pos = vec::position(
scope_info.cleanups,
let cleanup_pos = scope_info.cleanups.iter().position_(
|cu| match *cu {
clean_temp(v, _, _) if v == val => true,
_ => false
});
for cleanup_pos.iter().advance |i| {
scope_info.cleanups =
vec::append(vec::slice(scope_info.cleanups, 0u, *i).to_vec(),
vec::append(vec::slice(scope_info.cleanups, 0u, *i).to_owned(),
vec::slice(scope_info.cleanups,
*i + 1u,
scope_info.cleanups.len()));
@ -1397,7 +1396,7 @@ pub fn node_id_type_params(bcx: block, id: ast::node_id) -> ~[ty::t] {
let tcx = bcx.tcx();
let params = ty::node_id_to_type_params(tcx, id);
if !params.all(|t| !ty::type_needs_infer(*t)) {
if !params.iter().all(|t| !ty::type_needs_infer(*t)) {
bcx.sess().bug(
fmt!("Type parameters for node %d include inference types: %s",
id, params.map(|t| bcx.ty_to_str(*t)).connect(",")));

View File

@ -91,7 +91,7 @@ pub fn const_vec(cx: @mut CrateContext, e: @ast::expr, es: &[@ast::expr])
let sz = llvm::LLVMConstMul(C_uint(cx, es.len()), unit_sz);
let vs = es.map(|e| const_expr(cx, *e));
// If the vector contains enums, an LLVM array won't work.
let v = if vs.any(|vi| val_ty(*vi) != llunitty) {
let v = if vs.iter().any_(|vi| val_ty(*vi) != llunitty) {
C_struct(vs)
} else {
C_array(llunitty, vs)
@ -487,8 +487,8 @@ fn const_expr_unadjusted(cx: @mut CrateContext, e: @ast::expr) -> ValueRef {
do expr::with_field_tys(tcx, ety, Some(e.id))
|discr, field_tys| {
let cs = field_tys.map(|field_ty| {
match fs.find(|f| field_ty.ident == f.node.ident) {
Some(ref f) => const_expr(cx, (*f).node.expr),
match fs.iter().find_(|f| field_ty.ident == f.node.ident) {
Some(f) => const_expr(cx, (*f).node.expr),
None => {
cx.tcx.sess.span_bug(e.span, "missing struct field");
}

View File

@ -1152,8 +1152,7 @@ fn trans_rec_or_struct(bcx: block,
let mut need_base = vec::from_elem(field_tys.len(), true);
let numbered_fields = do fields.map |field| {
let opt_pos = vec::position(field_tys, |field_ty|
field_ty.ident == field.node.ident);
let opt_pos = field_tys.iter().position_(|field_ty| field_ty.ident == field.node.ident);
match opt_pos {
Some(i) => {
need_base[i] = false;
@ -1168,7 +1167,7 @@ fn trans_rec_or_struct(bcx: block,
let optbase = match base {
Some(base_expr) => {
let mut leftovers = ~[];
for need_base.eachi |i, b| {
for need_base.iter().enumerate().advance |(i, b)| {
if *b {
leftovers.push((i, field_tys[i].mt.ty))
}
@ -1177,7 +1176,7 @@ fn trans_rec_or_struct(bcx: block,
fields: leftovers })
}
None => {
if need_base.any(|b| *b) {
if need_base.iter().any_(|b| *b) {
tcx.sess.span_bug(expr_span, "missing fields and no base expr")
}
None

View File

@ -462,7 +462,7 @@ pub fn trans_struct_drop(bcx: block,
// Drop the fields
let field_tys = ty::struct_fields(bcx.tcx(), class_did, substs);
for vec::eachi(field_tys) |i, fld| {
for field_tys.iter().enumerate().advance |(i, fld)| {
let llfld_a = adt::trans_field_ptr(bcx, repr, v0, 0, i);
bcx = drop_ty(bcx, llfld_a, fld.mt.ty);
}

View File

@ -345,7 +345,7 @@ pub fn trans_static_method_callee(bcx: block,
match vtbls[bound_index] {
typeck::vtable_static(impl_did, ref rcvr_substs, rcvr_origins) => {
assert!(rcvr_substs.all(|t| !ty::type_needs_infer(*t)));
assert!(rcvr_substs.iter().all(|t| !ty::type_needs_infer(*t)));
let mth_id = method_with_name_or_default(bcx.ccx(),
impl_did,
@ -375,7 +375,7 @@ pub fn trans_static_method_callee(bcx: block,
pub fn method_from_methods(ms: &[@ast::method], name: ast::ident)
-> Option<ast::def_id> {
ms.find(|m| m.ident == name).map(|m| ast_util::local_def(m.id))
ms.iter().find_(|m| m.ident == name).map(|m| ast_util::local_def(m.id))
}
pub fn method_with_name_or_default(ccx: @mut CrateContext,
@ -512,7 +512,7 @@ pub fn combine_impl_and_methods_tps(bcx: block,
let node_substs = node_id_type_params(bcx, callee_id);
debug!("rcvr_substs=%?", rcvr_substs.map(|t| bcx.ty_to_str(*t)));
let ty_substs
= vec::append(rcvr_substs.to_vec(),
= vec::append(rcvr_substs.to_owned(),
vec::tailn(node_substs,
node_substs.len() - n_m_tps));
debug!("n_m_tps=%?", n_m_tps);

View File

@ -60,7 +60,7 @@ pub fn monomorphic_fn(ccx: @mut CrateContext,
impl_did_opt.repr(ccx.tcx),
ref_id);
assert!(real_substs.tps.all(|t| !ty::type_needs_infer(*t)));
assert!(real_substs.tps.iter().all(|t| !ty::type_needs_infer(*t)));
let _icx = ccx.insn_ctxt("monomorphic_fn");
let mut must_cast = false;
let substs = vec::map(real_substs.tps, |t| {
@ -209,7 +209,7 @@ pub fn monomorphic_fn(ccx: @mut CrateContext,
}
ast_map::node_variant(ref v, enum_item, _) => {
let tvs = ty::enum_variants(ccx.tcx, local_def(enum_item.id));
let this_tv = vec::find(*tvs, |tv| { tv.id.node == fn_id.node}).get();
let this_tv = *tvs.iter().find_(|tv| { tv.id.node == fn_id.node}).get();
let d = mk_lldecl();
set_inline_hint(d);
match v.node.kind {

View File

@ -95,7 +95,7 @@ impl Reflector {
let v = self.visitor_val;
debug!("passing %u args:", args.len());
let bcx = self.bcx;
for args.eachi |i, a| {
for args.iter().enumerate().advance |(i, a)| {
debug!("arg %u: %s", i, val_str(bcx.ccx().tn, *a));
}
let bool_ty = ty::mk_bool();
@ -207,7 +207,7 @@ impl Reflector {
let extra = ~[self.c_uint(tys.len())]
+ self.c_size_and_align(t);
do self.bracketed(~"tup", extra) |this| {
for tys.eachi |i, t| {
for tys.iter().enumerate().advance |(i, t)| {
let extra = ~[this.c_uint(i), this.c_tydesc(*t)];
this.visit(~"tup_field", extra);
}
@ -252,7 +252,7 @@ impl Reflector {
let extra = ~[self.c_uint(fields.len())]
+ self.c_size_and_align(t);
do self.bracketed(~"class", extra) |this| {
for fields.eachi |i, field| {
for fields.iter().enumerate().advance |(i, field)| {
let extra = ~[this.c_uint(i),
this.c_slice(
bcx.ccx().sess.str_of(field.ident))]
@ -310,14 +310,14 @@ impl Reflector {
let enum_args = ~[self.c_uint(variants.len()), make_get_disr()]
+ self.c_size_and_align(t);
do self.bracketed(~"enum", enum_args) |this| {
for variants.eachi |i, v| {
for variants.iter().enumerate().advance |(i, v)| {
let name = ccx.sess.str_of(v.name);
let variant_args = ~[this.c_uint(i),
this.c_int(v.disr_val),
this.c_uint(v.args.len()),
this.c_slice(name)];
do this.bracketed(~"enum_variant", variant_args) |this| {
for v.args.eachi |j, a| {
for v.args.iter().enumerate().advance |(j, a)| {
let bcx = this.bcx;
let null = C_null(llptrty);
let ptr = adt::trans_field_ptr(bcx, repr, null, v.disr_val, j);
@ -352,7 +352,7 @@ impl Reflector {
}
pub fn visit_sig(&mut self, retval: uint, sig: &ty::FnSig) {
for sig.inputs.eachi |i, arg| {
for sig.inputs.iter().enumerate().advance |(i, arg)| {
let modeval = 5u; // "by copy"
let extra = ~[self.c_uint(i),
self.c_uint(modeval),

View File

@ -386,7 +386,7 @@ pub fn write_content(bcx: block,
SaveIn(lldest) => {
let mut temp_cleanups = ~[];
for elements.eachi |i, element| {
for elements.iter().enumerate().advance |(i, element)| {
let lleltptr = GEPi(bcx, lldest, [i]);
debug!("writing index %? with lleltptr=%?",
i, bcx.val_str(lleltptr));

View File

@ -2116,7 +2116,7 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
ty_struct(did, ref substs) => {
let flds = struct_fields(cx, did, substs);
let mut res = flds.foldl(
let mut res = flds.iter().fold(
TC_NONE,
|tc, f| tc + tc_mt(cx, f.mt, cache));
if ty::has_dtor(cx, did) {
@ -2126,7 +2126,7 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
}
ty_tup(ref tys) => {
tys.foldl(TC_NONE, |tc, ty| *tc + tc_ty(cx, *ty, cache))
tys.iter().fold(TC_NONE, |tc, ty| tc + tc_ty(cx, *ty, cache))
}
ty_enum(did, ref substs) => {
@ -2136,10 +2136,9 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
// are non-copyable
TC_EMPTY_ENUM
} else {
variants.foldl(TC_NONE, |tc, variant| {
variant.args.foldl(
*tc,
|tc, arg_ty| *tc + tc_ty(cx, *arg_ty, cache))
variants.iter().fold(TC_NONE, |tc, variant| {
variant.args.iter().fold(tc,
|tc, arg_ty| tc + tc_ty(cx, *arg_ty, cache))
})
};
apply_tc_attr(cx, did, res)
@ -2365,7 +2364,7 @@ pub fn is_instantiable(cx: ctxt, r_ty: t) -> bool {
}
ty_tup(ref ts) => {
ts.any(|t| type_requires(cx, seen, r_ty, *t))
ts.iter().any_(|t| type_requires(cx, seen, r_ty, *t))
}
ty_enum(ref did, _) if vec::contains(*seen, did) => {
@ -3373,7 +3372,7 @@ pub fn field_idx_strict(tcx: ty::ctxt, id: ast::ident, fields: &[field])
}
pub fn method_idx(id: ast::ident, meths: &[@Method]) -> Option<uint> {
vec::position(meths, |m| m.ident == id)
meths.iter().position_(|m| m.ident == id)
}
/// Returns a vector containing the indices of all type parameters that appear
@ -4129,9 +4128,10 @@ pub fn lookup_struct_field(cx: ctxt,
parent: ast::def_id,
field_id: ast::def_id)
-> field_ty {
match vec::find(lookup_struct_fields(cx, parent),
let r = lookup_struct_fields(cx, parent);
match r.iter().find_(
|f| f.id.node == field_id.node) {
Some(t) => t,
Some(t) => *t,
None => cx.sess.bug("struct ID not found in parent's fields")
}
}

View File

@ -534,7 +534,7 @@ pub fn bound_lifetimes<AC:AstConv>(
let special_idents = [special_idents::statik, special_idents::self_];
let mut bound_lifetime_names = opt_vec::Empty;
ast_lifetimes.map_to_vec(|ast_lifetime| {
if special_idents.any(|&i| i == ast_lifetime.ident) {
if special_idents.iter().any_(|&i| i == ast_lifetime.ident) {
this.tcx().sess.span_err(
ast_lifetime.span,
fmt!("illegal lifetime parameter name: `%s`",

View File

@ -274,7 +274,7 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
// Index the class fields.
let mut field_map = HashMap::new();
for class_fields.eachi |i, class_field| {
for class_fields.iter().enumerate().advance |(i, class_field)| {
field_map.insert(class_field.ident, i);
}
@ -303,7 +303,7 @@ pub fn check_struct_pat_fields(pcx: &pat_ctxt,
// Report an error if not all the fields were specified.
if !etc {
for class_fields.eachi |i, field| {
for class_fields.iter().enumerate().advance |(i, field)| {
if found_fields.contains(&i) {
loop;
}
@ -510,7 +510,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: @ast::pat, expected: ty::t) {
let e_count = elts.len();
match s {
ty::ty_tup(ref ex_elts) if e_count == ex_elts.len() => {
for elts.eachi |i, elt| {
for elts.iter().enumerate().advance |(i, elt)| {
check_pat(pcx, *elt, ex_elts[i]);
}
fcx.write_ty(pat.id, expected);

View File

@ -254,7 +254,7 @@ impl<'self> LookupContext<'self> {
ty_enum(did, _) => {
// Watch out for newtype'd enums like "enum t = @T".
// See discussion in typeck::check::do_autoderef().
if enum_dids.contains(&did) {
if enum_dids.iter().any_(|x| x == &did) {
return None;
}
enum_dids.push(did);
@ -376,7 +376,7 @@ impl<'self> LookupContext<'self> {
let trait_methods = ty::trait_methods(tcx, bound_trait_ref.def_id);
let pos = {
match trait_methods.position(|m| {
match trait_methods.iter().position_(|m| {
m.explicit_self != ast::sty_static &&
m.ident == self.m_name })
{
@ -420,7 +420,7 @@ impl<'self> LookupContext<'self> {
let tcx = self.tcx();
let ms = ty::trait_methods(tcx, did);
let index = match vec::position(*ms, |m| m.ident == self.m_name) {
let index = match ms.iter().position_(|m| m.ident == self.m_name) {
Some(i) => i,
None => { return; } // no method with the right name
};
@ -474,7 +474,7 @@ impl<'self> LookupContext<'self> {
// First, try self methods
let mut method_info: Option<MethodInfo> = None;
let methods = ty::trait_methods(tcx, did);
match vec::position(*methods, |m| m.ident == self.m_name) {
match methods.iter().position_(|m| m.ident == self.m_name) {
Some(i) => {
method_info = Some(MethodInfo {
method_ty: methods[i],
@ -489,8 +489,7 @@ impl<'self> LookupContext<'self> {
for ty::trait_supertraits(tcx, did).each() |trait_ref| {
let supertrait_methods =
ty::trait_methods(tcx, trait_ref.def_id);
match vec::position(*supertrait_methods,
|m| m.ident == self.m_name) {
match supertrait_methods.iter().position_(|m| m.ident == self.m_name) {
Some(i) => {
method_info = Some(MethodInfo {
method_ty: supertrait_methods[i],
@ -547,7 +546,7 @@ impl<'self> LookupContext<'self> {
impl_info.methods.map(|m| m.ident).repr(self.tcx()));
let idx = {
match impl_info.methods.position(|m| m.ident == self.m_name) {
match impl_info.methods.iter().position_(|m| m.ident == self.m_name) {
Some(idx) => idx,
None => { return; } // No method with the right name.
}
@ -817,8 +816,9 @@ impl<'self> LookupContext<'self> {
rcvr_ty: ty::t,
candidates: &mut ~[Candidate])
-> Option<method_map_entry> {
let relevant_candidates =
candidates.filter_to_vec(|c| self.is_relevant(rcvr_ty, c));
let relevant_candidates: ~[Candidate] =
candidates.iter().transform(|c| copy *c).
filter(|c| self.is_relevant(rcvr_ty, c)).collect();
let relevant_candidates = self.merge_candidates(relevant_candidates);
@ -946,7 +946,7 @@ impl<'self> LookupContext<'self> {
parameters given for this method");
self.fcx.infcx().next_ty_vars(num_method_tps)
} else {
self.supplied_tps.to_vec()
self.supplied_tps.to_owned()
}
};

View File

@ -1107,7 +1107,7 @@ pub fn lookup_field_ty(tcx: ty::ctxt,
fieldname: ast::ident,
substs: &ty::substs) -> Option<ty::t> {
let o_field = vec::find(items, |f| f.ident == fieldname);
let o_field = items.iter().find_(|f| f.ident == fieldname);
do o_field.map() |f| {
ty::lookup_field_type(tcx, class_id, f.id, substs)
}
@ -1236,7 +1236,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
vtable::early_resolve_expr(callee_expr, fcx, true);
}
for args.eachi |i, arg| {
for args.iter().enumerate().advance |(i, arg)| {
let is_block = match arg.node {
ast::expr_fn_block(*) | ast::expr_loop_body(*) |
ast::expr_do_body(*) => true,
@ -2492,13 +2492,13 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
let discrim_ty = fcx.expr_ty(discrim);
let arm_tys = arms.map(|a| fcx.node_ty(a.body.node.id));
if ty::type_is_error(discrim_ty) ||
arm_tys.any(|t| ty::type_is_error(*t)) {
arm_tys.iter().any_(|t| ty::type_is_error(*t)) {
fcx.write_error(id);
}
// keep in mind that `all` returns true in the empty vec case,
// which is what we want
else if ty::type_is_bot(discrim_ty) ||
arm_tys.all(|t| ty::type_is_bot(*t)) {
arm_tys.iter().all(|t| ty::type_is_bot(*t)) {
fcx.write_bot(id);
}
else {
@ -2561,8 +2561,8 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
ast::expr_call(f, ref args, sugar) => {
check_call(fcx, expr.id, expr, f, *args, sugar);
let f_ty = fcx.expr_ty(f);
let (args_bot, args_err) = args.foldl((false, false),
|&(rest_bot, rest_err), a| {
let (args_bot, args_err) = args.iter().fold((false, false),
|(rest_bot, rest_err), a| {
// is this not working?
let a_ty = fcx.expr_ty(*a);
(rest_bot || ty::type_is_bot(a_ty),
@ -2578,8 +2578,8 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
check_method_call(fcx, callee_id, expr, rcvr, ident, *args, *tps, sugar);
let f_ty = fcx.expr_ty(rcvr);
let arg_tys = args.map(|a| fcx.expr_ty(*a));
let (args_bot, args_err) = arg_tys.foldl((false, false),
|&(rest_bot, rest_err), a| {
let (args_bot, args_err) = arg_tys.iter().fold((false, false),
|(rest_bot, rest_err), a| {
(rest_bot || ty::type_is_bot(*a),
rest_err || ty::type_is_error(*a))});
if ty::type_is_error(f_ty) || args_err {
@ -3419,7 +3419,7 @@ pub fn check_bounds_are_used(ccx: @mut CrateCtxt,
true
});
for tps_used.eachi |i, b| {
for tps_used.iter().enumerate().advance |(i, b)| {
if !*b {
ccx.tcx.sess.span_err(
span, fmt!("type parameter `%s` is unused",

View File

@ -103,8 +103,8 @@ pub fn replace_bound_regions_in_fn_sig(
}
// For each type `ty` in `tys`...
do tys.foldl(isr) |isr, ty| {
let mut isr = *isr;
do tys.iter().fold(isr) |isr, ty| {
let mut isr = isr;
// Using fold_regions is inefficient, because it
// constructs new types, but it avoids code duplication in

View File

@ -68,7 +68,7 @@ impl VtableContext {
}
fn has_trait_bounds(type_param_defs: &[ty::TypeParameterDef]) -> bool {
type_param_defs.any(
type_param_defs.iter().any_(
|type_param_def| !type_param_def.bounds.trait_bounds.is_empty())
}

View File

@ -421,7 +421,7 @@ pub fn ensure_supertraits(ccx: &CrateCtxt,
generics, self_ty);
// FIXME(#5527) Could have same trait multiple times
if ty_trait_refs.any(|other_trait| other_trait.def_id == trait_ref.def_id) {
if ty_trait_refs.iter().any_(|other_trait| other_trait.def_id == trait_ref.def_id) {
// This means a trait inherited from the same supertrait more
// than once.
tcx.sess.span_err(sp, "Duplicate supertrait in trait declaration");
@ -515,7 +515,7 @@ pub fn compare_impl_method(tcx: ty::ctxt,
return;
}
for trait_m.generics.type_param_defs.eachi |i, trait_param_def| {
for trait_m.generics.type_param_defs.iter().enumerate().advance |(i, trait_param_def)| {
// For each of the corresponding impl ty param's bounds...
let impl_param_def = &impl_m.generics.type_param_defs[i];
@ -687,11 +687,11 @@ pub fn check_methods_against_trait(ccx: &CrateCtxt,
// we'll catch it in coherence
let trait_ms = ty::trait_methods(tcx, trait_ref.def_id);
for impl_ms.each |impl_m| {
match trait_ms.find(|trait_m| trait_m.ident == impl_m.mty.ident) {
match trait_ms.iter().find_(|trait_m| trait_m.ident == impl_m.mty.ident) {
Some(trait_m) => {
let num_impl_tps = generics.ty_params.len();
compare_impl_method(
ccx.tcx, num_impl_tps, impl_m, trait_m,
ccx.tcx, num_impl_tps, impl_m, *trait_m,
&trait_ref.substs, selfty);
}
None => {

View File

@ -265,7 +265,7 @@ pub fn super_tps<C:Combine>(
if vec::same_length(as_, bs) {
iter_vec2(as_, bs, |a, b| {
eq_tys(this, *a, *b)
}).then(|| Ok(as_.to_vec()) )
}).then(|| Ok(as_.to_owned()) )
} else {
Err(ty::terr_ty_param_size(
expected_found(this, as_.len(), bs.len())))

View File

@ -503,7 +503,7 @@ pub fn var_ids<T:Combine>(this: &T, isr: isr_alist) -> ~[RegionVid] {
pub fn is_var_in_set(new_vars: &[RegionVid], r: ty::Region) -> bool {
match r {
ty::re_infer(ty::ReVar(ref v)) => new_vars.contains(v),
ty::re_infer(ty::ReVar(ref v)) => new_vars.iter().any_(|x| x == v),
_ => false
}
}

View File

@ -175,7 +175,7 @@ impl Combine for Lub {
// Variables created during LUB computation which are
// *related* to regions that pre-date the LUB computation
// stay as they are.
if !tainted.all(|r| is_var_in_set(new_vars, *r)) {
if !tainted.iter().all(|r| is_var_in_set(new_vars, *r)) {
debug!("generalize_region(r0=%?): \
non-new-variables found in %?",
r0, tainted);
@ -189,7 +189,7 @@ impl Combine for Lub {
// with.
for list::each(a_isr) |pair| {
let (a_br, a_r) = *pair;
if tainted.contains(&a_r) {
if tainted.iter().any_(|x| x == &a_r) {
debug!("generalize_region(r0=%?): \
replacing with %?, tainted=%?",
r0, a_br, tainted);

View File

@ -980,7 +980,7 @@ impl RegionVarBindings {
{
let mut result_set = result_set;
if *r == *r1 { // Clearly, this is potentially inefficient.
if !result_set.contains(r2) {
if !result_set.iter().any_(|x| x == r2) {
result_set.push(*r2);
}
}

View File

@ -203,7 +203,7 @@ impl Combine for Sub {
// or new variables:
match *tainted_region {
ty::re_infer(ty::ReVar(ref vid)) => {
if new_vars.contains(vid) { loop; }
if new_vars.iter().any_(|x| x == vid) { loop; }
}
_ => {
if *tainted_region == skol { loop; }

View File

@ -198,7 +198,7 @@ pub fn write_substs_to_tcx(tcx: ty::ctxt,
if substs.len() > 0u {
debug!("write_substs_to_tcx(%d, %?)", node_id,
substs.map(|t| ppaux::ty_to_str(tcx, *t)));
assert!(substs.all(|t| !ty::type_needs_infer(*t)));
assert!(substs.iter().all(|t| !ty::type_needs_infer(*t)));
tcx.node_type_substs.insert(node_id, substs);
}
}

View File

@ -216,7 +216,7 @@ pub fn run_compiler(args: &~[~str], demitter: diagnostic::Emitter) {
let lint_flags = vec::append(getopts::opt_strs(matches, "W"),
getopts::opt_strs(matches, "warn"));
let show_lint_options = lint_flags.contains(&~"help") ||
let show_lint_options = lint_flags.iter().any_(|x| x == &~"help") ||
(opt_present(matches, "W") && lint_flags.is_empty());
if show_lint_options {
@ -224,7 +224,8 @@ pub fn run_compiler(args: &~[~str], demitter: diagnostic::Emitter) {
return;
}
if getopts::opt_strs(matches, "Z").contains(&~"help") {
let r = getopts::opt_strs(matches, "Z");
if r.iter().any_(|x| x == &~"help") {
describe_debug_flags();
return;
}

View File

@ -57,7 +57,8 @@ pub fn parse_desc(attrs: ~[ast::attribute]) -> Option<~str> {
}
pub fn parse_hidden(attrs: ~[ast::attribute]) -> bool {
do doc_metas(attrs).find |meta| {
let r = doc_metas(attrs);
do r.iter().any_ |meta| {
match attr::get_meta_item_list(*meta) {
Some(metas) => {
let hiddens = attr::find_meta_items_by_name(metas, "hidden");
@ -65,7 +66,7 @@ pub fn parse_hidden(attrs: ~[ast::attribute]) -> bool {
}
None => false
}
}.is_some()
}
}
#[cfg(test)]

View File

@ -135,7 +135,7 @@ fn fold_enum(
node: ast::item_enum(ref enum_definition, _), _
}, _) => {
let ast_variant =
vec::find(enum_definition.variants, |v| {
copy *enum_definition.variants.iter().find_(|v| {
to_str(v.node.name) == variant.name
}).get();

View File

@ -230,16 +230,15 @@ pub fn maybe_find_pandoc(
}
};
let pandoc = do vec::find(possible_pandocs) |pandoc| {
let pandoc = do possible_pandocs.iter().find_ |&pandoc| {
let output = process_output(*pandoc, [~"--version"]);
debug!("testing pandoc cmd %s: %?", *pandoc, output);
output.status == 0
};
if pandoc.is_some() {
result::Ok(pandoc)
} else {
result::Err(~"couldn't find pandoc")
match pandoc {
Some(x) => Ok(Some(copy *x)), // ugly, shouldn't be doubly wrapped
None => Err(~"couldn't find pandoc")
}
}

View File

@ -190,7 +190,7 @@ pub fn header_name(doc: doc::ItemTag) -> ~str {
};
let self_ty = doc.self_ty.get_ref();
let mut trait_part = ~"";
for doc.trait_types.eachi |i, trait_type| {
for doc.trait_types.iter().enumerate().advance |(i, trait_type)| {
if i == 0 {
trait_part += " of ";
} else {

View File

@ -82,7 +82,7 @@ fn strip_priv_methods(
item_vis: ast::visibility
) -> doc::ImplDoc {
let methods = do (&doc.methods).filtered |method| {
let ast_method = do methods.find |m| {
let ast_method = do methods.iter().find_ |m| {
extract::to_str(m.ident) == method.name
};
assert!(ast_method.is_some());

View File

@ -78,7 +78,7 @@ mod std {
pub fn main() {
let args = os::args();
if args.contains(&~"-h") || args.contains(&~"--help") {
if args.iter().any_(|x| "-h" == *x) || args.iter().any_(|x| "--help" == *x) {
config::usage();
return;
}

View File

@ -124,7 +124,7 @@ fn fold_enum(
node: ast::item_enum(ref enum_definition, _), _
}, _) => {
let ast_variant =
do vec::find(enum_definition.variants) |v| {
copy *do enum_definition.variants.iter().find_ |v| {
to_str(v.node.name) == variant.name
}.get();
@ -178,14 +178,14 @@ fn get_method_sig(
ast_map::node_item(@ast::item {
node: ast::item_trait(_, _, ref methods), _
}, _) => {
match vec::find(*methods, |method| {
match methods.iter().find_(|&method| {
match copy *method {
ast::required(ty_m) => to_str(ty_m.ident) == method_name,
ast::provided(m) => to_str(m.ident) == method_name,
}
}) {
Some(method) => {
match method {
match copy *method {
ast::required(ty_m) => {
Some(pprust::fun_to_str(
&ty_m.decl,
@ -214,7 +214,7 @@ fn get_method_sig(
ast_map::node_item(@ast::item {
node: ast::item_impl(_, _, _, ref methods), _
}, _) => {
match vec::find(*methods, |method| {
match methods.iter().find_(|method| {
to_str(method.ident) == method_name
}) {
Some(method) => {

View File

@ -287,7 +287,7 @@ fn compile_crate(src_filename: ~str, binary: ~str) -> Option<bool> {
// file, skip compilation and return None.
let mut should_compile = true;
let dir = os::list_dir_path(&Path(outputs.out_filename.dirname()));
let maybe_lib_path = do dir.find |file| {
let maybe_lib_path = do dir.iter().find_ |file| {
// The actual file's name has a hash value and version
// number in it which is unknown at this time, so looking
// for a file that matches out_filename won't work,
@ -381,7 +381,7 @@ fn run_cmd(repl: &mut Repl, _in: @io::Reader, _out: @io::Writer,
let crate_path = Path(*crate);
let crate_dir = crate_path.dirname();
repl.program.record_extern(fmt!("extern mod %s;", *crate));
if !repl.lib_search_paths.contains(&crate_dir) {
if !repl.lib_search_paths.iter().any_(|x| x == &crate_dir) {
repl.lib_search_paths.push(crate_dir);
}
}
@ -430,7 +430,7 @@ pub fn run_line(repl: &mut Repl, in: @io::Reader, out: @io::Writer, line: ~str,
if !cmd.is_empty() {
let args = if len > 1 {
vec::slice(split, 1, len).to_vec()
vec::slice(split, 1, len).to_owned()
} else { ~[] };
match run_cmd(repl, in, out, cmd, args, use_rl) {

View File

@ -60,9 +60,9 @@ impl PkgSrc {
let dir;
let dirs = pkgid_src_in_workspace(&self.id, &self.root);
debug!("Checking dirs: %?", dirs);
let path = dirs.find(|d| os::path_exists(d));
let path = dirs.iter().find_(|&d| os::path_exists(d));
match path {
Some(d) => dir = d,
Some(d) => dir = copy *d,
None => dir = match self.fetch_git() {
None => cond.raise((copy self.id, ~"supplied path for package dir does not \
exist, and couldn't interpret it as a URL fragment")),

View File

@ -570,14 +570,14 @@ fn install_remove() {
command_line_test([~"install", ~"bar"], &dir);
command_line_test([~"install", ~"quux"], &dir);
let list_output = command_line_test_output([~"list"]);
assert!(list_output.contains(&~"foo"));
assert!(list_output.contains(&~"bar"));
assert!(list_output.contains(&~"quux"));
assert!(list_output.iter().any_(|x| x == &~"foo"));
assert!(list_output.iter().any_(|x| x == &~"bar"));
assert!(list_output.iter().any_(|x| x == &~"quux"));
command_line_test([~"remove", ~"foo"], &dir);
let list_output = command_line_test_output([~"list"]);
assert!(!list_output.contains(&~"foo"));
assert!(list_output.contains(&~"bar"));
assert!(list_output.contains(&~"quux"));
assert!(!list_output.iter().any_(|x| x == &~"foo"));
assert!(list_output.iter().any_(|x| x == &~"bar"));
assert!(list_output.iter().any_(|x| x == &~"quux"));
}
#[test]
@ -643,7 +643,7 @@ fn test_versions() {
command_line_test([~"install", ~"foo#0.1"], &workspace);
let output = command_line_test_output([~"list"]);
// make sure output includes versions
assert!(!output.contains(&~"foo#0.2"));
assert!(!output.iter().any_(|x| x == &~"foo#0.2"));
}
#[test]

View File

@ -56,7 +56,7 @@ pub fn root() -> Path {
}
pub fn is_cmd(cmd: &str) -> bool {
Commands.any(|&c| c == cmd)
Commands.iter().any_(|&c| c == cmd)
}
struct ListenerFn {

View File

@ -155,7 +155,7 @@ fn try_parsing_version(s: &str) -> Option<Version> {
/// Just an approximation
fn is_url_like(p: &RemotePath) -> bool {
let str = p.to_str();
str.split_iter('/').count() > 2
str.split_iter('/').len_() > 2
}
/// If s is of the form foo#bar, where bar is a valid version
@ -170,7 +170,7 @@ pub fn split_version_general<'a>(s: &'a str, sep: char) -> Option<(&'a str, Vers
for s.split_iter(sep).advance |st| {
debug!("whole = %s part = %s", s, st);
}
if s.split_iter(sep).count() > 2 {
if s.split_iter(sep).len_() > 2 {
return None;
}
match s.rfind(sep) {

View File

@ -65,7 +65,7 @@ use str::StrSlice;
use to_str::ToStr;
use uint;
use vec;
use vec::{OwnedVector, OwnedCopyableVector};
use vec::{OwnedVector, OwnedCopyableVector, CopyableVector};
#[allow(non_camel_case_types)] // not sure what to do about this
pub type fd_t = c_int;
@ -698,7 +698,7 @@ impl<T:Reader> ReaderUtil for T {
// over-read by reading 1-byte per char needed
nbread = if ncreq > nbreq { ncreq } else { nbreq };
if nbread > 0 {
bytes = vec::slice(bytes, offset, bytes.len()).to_vec();
bytes = vec::slice(bytes, offset, bytes.len()).to_owned();
}
}
chars

View File

@ -273,6 +273,7 @@ pub trait IteratorUtil<A> {
/// ~~~
fn fold<B>(&mut self, start: B, f: &fn(B, A) -> B) -> B;
// FIXME: #5898: should be called len
/// Counts the number of elements in this iterator.
///
/// # Example
@ -280,10 +281,10 @@ pub trait IteratorUtil<A> {
/// ~~~ {.rust}
/// let a = [1, 2, 3, 4, 5];
/// let mut it = a.iter();
/// assert!(it.count() == 5);
/// assert!(it.count() == 0);
/// assert!(it.len_() == 5);
/// assert!(it.len_() == 0);
/// ~~~
fn count(&mut self) -> uint;
fn len_(&mut self) -> uint;
/// Tests whether the predicate holds true for all elements in the iterator.
///
@ -314,6 +315,9 @@ pub trait IteratorUtil<A> {
/// Return the index of the first element satisfying the specified predicate
fn position_(&mut self, predicate: &fn(A) -> bool) -> Option<uint>;
/// Count the number of elements satisfying the specified predicate
fn count(&mut self, predicate: &fn(A) -> bool) -> uint;
}
/// Iterator adaptors provided for every `Iterator` implementation. The adaptor objects are also
@ -432,7 +436,7 @@ impl<A, T: Iterator<A>> IteratorUtil<A> for T {
/// Count the number of items yielded by an iterator
#[inline]
fn count(&mut self) -> uint { self.fold(0, |cnt, _x| cnt + 1) }
fn len_(&mut self) -> uint { self.fold(0, |cnt, _x| cnt + 1) }
#[inline]
fn all(&mut self, f: &fn(A) -> bool) -> bool {
@ -467,6 +471,15 @@ impl<A, T: Iterator<A>> IteratorUtil<A> for T {
}
None
}
#[inline]
fn count(&mut self, predicate: &fn(A) -> bool) -> uint {
let mut i = 0;
for self.advance |x| {
if predicate(x) { i += 1 }
}
i
}
}
/// A trait for iterators over elements which can be added together
@ -1020,11 +1033,11 @@ mod tests {
}
#[test]
fn test_iterator_count() {
fn test_iterator_len() {
let v = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
assert_eq!(v.slice(0, 4).iter().count(), 4);
assert_eq!(v.slice(0, 10).iter().count(), 10);
assert_eq!(v.slice(0, 0).iter().count(), 0);
assert_eq!(v.slice(0, 4).iter().len_(), 4);
assert_eq!(v.slice(0, 10).iter().len_(), 10);
assert_eq!(v.slice(0, 0).iter().len_(), 0);
}
#[test]
@ -1099,4 +1112,12 @@ mod tests {
assert_eq!(v.iter().position_(|x| *x % 3 == 0).unwrap(), 1);
assert!(v.iter().position_(|x| *x % 12 == 0).is_none());
}
#[test]
fn test_count() {
let xs = &[1, 2, 2, 1, 5, 9, 0, 2];
assert_eq!(xs.iter().count(|x| *x == 2), 3);
assert_eq!(xs.iter().count(|x| *x == 5), 1);
assert_eq!(xs.iter().count(|x| *x == 95), 0);
}
}

View File

@ -16,7 +16,7 @@
#[allow(missing_doc)];
use cmp::{Eq};
use cmp::Eq;
use kinds::Copy;
use option::{None, Option, Some};
use vec;

View File

@ -60,7 +60,7 @@ pub fn from_bytes(vv: &[u8]) -> ~str {
use str::not_utf8::cond;
if !is_utf8(vv) {
let first_bad_byte = vec::find(vv, |b| !is_utf8([*b])).get();
let first_bad_byte = *vv.iter().find_(|&b| !is_utf8([*b])).get();
cond.raise(fmt!("from_bytes: input is not UTF-8; first bad byte is %u",
first_bad_byte as uint))
}
@ -1317,7 +1317,7 @@ impl<'self> StrSlice<'self> for &'self str {
}
/// Returns the number of characters that a string holds
#[inline]
fn char_len(&self) -> uint { self.iter().count() }
fn char_len(&self) -> uint { self.iter().len_() }
/**
* Returns a slice of the given string from the byte range

View File

@ -142,7 +142,7 @@ unsafe fn local_data_lookup<T: 'static>(
-> Option<(uint, *libc::c_void)> {
let key_value = key_to_key_value(key);
let map_pos = (*map).position(|entry|
let map_pos = (*map).iter().position_(|entry|
match *entry {
Some((k,_,_)) => k == key_value,
None => false
@ -215,7 +215,7 @@ pub unsafe fn local_set<T: 'static>(
}
None => {
// Find an empty slot. If not, grow the vector.
match (*map).position(|x| x.is_none()) {
match (*map).iter().position_(|x| x.is_none()) {
Some(empty_index) => { map[empty_index] = new_entry; }
None => { map.push(new_entry); }
}

View File

@ -350,7 +350,7 @@ pub mod ct {
#[test]
fn test_parse_flags() {
fn pack(fs: &[Flag]) -> uint {
fs.foldl(0, |&p, &f| p | (1 << f as uint))
fs.iter().fold(0, |p, &f| p | (1 << f as uint))
}
fn test(s: &str, flags: &[Flag], next: uint) {

View File

@ -19,12 +19,11 @@ use cmp::{Eq, Ord, TotalEq, TotalOrd, Ordering, Less, Equal, Greater};
use clone::Clone;
use old_iter::BaseIter;
use old_iter;
use iterator::{Iterator};
use iterator::{Iterator, IteratorUtil};
use iter::FromIter;
use kinds::Copy;
use libc;
use num::Zero;
use old_iter::CopyableIter;
use option::{None, Option, Some};
use ptr::to_unsafe_ptr;
use ptr;
@ -324,12 +323,12 @@ pub fn split<T:Copy>(v: &[T], f: &fn(t: &T) -> bool) -> ~[~[T]] {
match position_between(v, start, ln, f) {
None => break,
Some(i) => {
result.push(slice(v, start, i).to_vec());
result.push(slice(v, start, i).to_owned());
start = i + 1u;
}
}
}
result.push(slice(v, start, ln).to_vec());
result.push(slice(v, start, ln).to_owned());
result
}
@ -348,14 +347,14 @@ pub fn splitn<T:Copy>(v: &[T], n: uint, f: &fn(t: &T) -> bool) -> ~[~[T]] {
match position_between(v, start, ln, f) {
None => break,
Some(i) => {
result.push(slice(v, start, i).to_vec());
result.push(slice(v, start, i).to_owned());
// Make sure to skip the separator.
start = i + 1u;
count -= 1u;
}
}
}
result.push(slice(v, start, ln).to_vec());
result.push(slice(v, start, ln).to_owned());
result
}
@ -373,12 +372,12 @@ pub fn rsplit<T:Copy>(v: &[T], f: &fn(t: &T) -> bool) -> ~[~[T]] {
match rposition_between(v, 0, end, f) {
None => break,
Some(i) => {
result.push(slice(v, i + 1, end).to_vec());
result.push(slice(v, i + 1, end).to_owned());
end = i;
}
}
}
result.push(slice(v, 0u, end).to_vec());
result.push(slice(v, 0u, end).to_owned());
reverse(result);
result
}
@ -398,14 +397,14 @@ pub fn rsplitn<T:Copy>(v: &[T], n: uint, f: &fn(t: &T) -> bool) -> ~[~[T]] {
match rposition_between(v, 0u, end, f) {
None => break,
Some(i) => {
result.push(slice(v, i + 1u, end).to_vec());
result.push(slice(v, i + 1u, end).to_owned());
// Make sure to skip the separator.
end = i;
count -= 1u;
}
}
}
result.push(slice(v, 0u, end).to_vec());
result.push(slice(v, 0u, end).to_owned());
reverse(result);
result
}
@ -1057,17 +1056,6 @@ pub fn contains<T:Eq>(v: &[T], x: &T) -> bool {
false
}
/**
* Search for the first element that matches a given predicate
*
* Apply function `f` to each element of `v`, starting from the first.
* When function `f` returns true then an option containing the element
* is returned. If `f` matches no elements then none is returned.
*/
pub fn find<T:Copy>(v: &[T], f: &fn(t: &T) -> bool) -> Option<T> {
find_between(v, 0u, v.len(), f)
}
/**
* Search for the first element that matches a given predicate within a range
*
@ -1108,18 +1096,7 @@ pub fn rfind_between<T:Copy>(v: &[T],
/// Find the first index containing a matching value
pub fn position_elem<T:Eq>(v: &[T], x: &T) -> Option<uint> {
position(v, |y| *x == *y)
}
/**
* Find the first index matching some predicate
*
* Apply function `f` to each element of `v`. When function `f` returns true
* then an option containing the index is returned. If `f` matches no elements
* then none is returned.
*/
pub fn position<T>(v: &[T], f: &fn(t: &T) -> bool) -> Option<uint> {
position_between(v, 0u, v.len(), f)
v.iter().position_(|y| *x == *y)
}
/**
@ -1427,36 +1404,6 @@ pub fn each<'r,T>(v: &'r [T], f: &fn(&'r T) -> bool) -> bool {
return !broke;
}
/// Like `each()`, but for the case where you have a vector that *may or may
/// not* have mutable contents.
#[inline]
pub fn each_const<T>(v: &const [T], f: &fn(elem: &const T) -> bool) -> bool {
let mut i = 0;
let n = v.len();
while i < n {
if !f(&const v[i]) {
return false;
}
i += 1;
}
return true;
}
/**
* Iterates over a vector's elements and indices
*
* Return true to continue, false to break.
*/
#[inline]
pub fn eachi<'r,T>(v: &'r [T], f: &fn(uint, v: &'r T) -> bool) -> bool {
let mut i = 0;
for each(v) |p| {
if !f(i, p) { return false; }
i += 1;
}
return true;
}
/**
* Iterate over all permutations of vector `v`.
*
@ -1822,7 +1769,6 @@ pub trait ImmutableVector<'self, T> {
fn initn(&self, n: uint) -> &'self [T];
fn last(&self) -> &'self T;
fn last_opt(&self) -> Option<&'self T>;
fn position(&self, f: &fn(t: &T) -> bool) -> Option<uint>;
fn rposition(&self, f: &fn(t: &T) -> bool) -> Option<uint>;
fn map<U>(&self, f: &fn(t: &T) -> U) -> ~[U];
fn mapi<U>(&self, f: &fn(uint, t: &T) -> U) -> ~[U];
@ -1890,18 +1836,6 @@ impl<'self,T> ImmutableVector<'self, T> for &'self [T] {
#[inline]
fn last_opt(&self) -> Option<&'self T> { last_opt(*self) }
/**
* Find the first index matching some predicate
*
* Apply function `f` to each element of `v`. When function `f` returns
* true then an option containing the index is returned. If `f` matches no
* elements then none is returned.
*/
#[inline]
fn position(&self, f: &fn(t: &T) -> bool) -> Option<uint> {
position(*self, f)
}
/**
* Find the last index matching some predicate
*
@ -2464,9 +2398,6 @@ pub mod bytes {
}
}
// ___________________________________________________________________________
// ITERATION TRAIT METHODS
impl<'self,A> old_iter::BaseIter<A> for &'self [A] {
#[inline]
fn each<'a>(&'a self, blk: &fn(v: &'a A) -> bool) -> bool {
@ -2476,152 +2407,6 @@ impl<'self,A> old_iter::BaseIter<A> for &'self [A] {
fn size_hint(&self) -> Option<uint> { Some(self.len()) }
}
// FIXME(#4148): This should be redundant
impl<A> old_iter::BaseIter<A> for ~[A] {
#[inline]
fn each<'a>(&'a self, blk: &fn(v: &'a A) -> bool) -> bool {
each(*self, blk)
}
#[inline]
fn size_hint(&self) -> Option<uint> { Some(self.len()) }
}
// FIXME(#4148): This should be redundant
impl<A> old_iter::BaseIter<A> for @[A] {
#[inline]
fn each<'a>(&'a self, blk: &fn(v: &'a A) -> bool) -> bool {
each(*self, blk)
}
#[inline]
fn size_hint(&self) -> Option<uint> { Some(self.len()) }
}
impl<'self,A> old_iter::ExtendedIter<A> for &'self [A] {
pub fn eachi(&self, blk: &fn(uint, v: &A) -> bool) -> bool {
old_iter::eachi(self, blk)
}
pub fn all(&self, blk: &fn(&A) -> bool) -> bool {
old_iter::all(self, blk)
}
pub fn any(&self, blk: &fn(&A) -> bool) -> bool {
old_iter::any(self, blk)
}
pub fn foldl<B>(&self, b0: B, blk: &fn(&B, &A) -> B) -> B {
old_iter::foldl(self, b0, blk)
}
pub fn position(&self, f: &fn(&A) -> bool) -> Option<uint> {
old_iter::position(self, f)
}
fn map_to_vec<B>(&self, op: &fn(&A) -> B) -> ~[B] {
old_iter::map_to_vec(self, op)
}
fn flat_map_to_vec<B,IB:BaseIter<B>>(&self, op: &fn(&A) -> IB)
-> ~[B] {
old_iter::flat_map_to_vec(self, op)
}
}
// FIXME(#4148): This should be redundant
impl<A> old_iter::ExtendedIter<A> for ~[A] {
pub fn eachi(&self, blk: &fn(uint, v: &A) -> bool) -> bool {
old_iter::eachi(self, blk)
}
pub fn all(&self, blk: &fn(&A) -> bool) -> bool {
old_iter::all(self, blk)
}
pub fn any(&self, blk: &fn(&A) -> bool) -> bool {
old_iter::any(self, blk)
}
pub fn foldl<B>(&self, b0: B, blk: &fn(&B, &A) -> B) -> B {
old_iter::foldl(self, b0, blk)
}
pub fn position(&self, f: &fn(&A) -> bool) -> Option<uint> {
old_iter::position(self, f)
}
fn map_to_vec<B>(&self, op: &fn(&A) -> B) -> ~[B] {
old_iter::map_to_vec(self, op)
}
fn flat_map_to_vec<B,IB:BaseIter<B>>(&self, op: &fn(&A) -> IB)
-> ~[B] {
old_iter::flat_map_to_vec(self, op)
}
}
// FIXME(#4148): This should be redundant
impl<A> old_iter::ExtendedIter<A> for @[A] {
pub fn eachi(&self, blk: &fn(uint, v: &A) -> bool) -> bool {
old_iter::eachi(self, blk)
}
pub fn all(&self, blk: &fn(&A) -> bool) -> bool {
old_iter::all(self, blk)
}
pub fn any(&self, blk: &fn(&A) -> bool) -> bool {
old_iter::any(self, blk)
}
pub fn foldl<B>(&self, b0: B, blk: &fn(&B, &A) -> B) -> B {
old_iter::foldl(self, b0, blk)
}
pub fn position(&self, f: &fn(&A) -> bool) -> Option<uint> {
old_iter::position(self, f)
}
fn map_to_vec<B>(&self, op: &fn(&A) -> B) -> ~[B] {
old_iter::map_to_vec(self, op)
}
fn flat_map_to_vec<B,IB:BaseIter<B>>(&self, op: &fn(&A) -> IB)
-> ~[B] {
old_iter::flat_map_to_vec(self, op)
}
}
impl<'self,A:Eq> old_iter::EqIter<A> for &'self [A] {
pub fn contains(&self, x: &A) -> bool { old_iter::contains(self, x) }
pub fn count(&self, x: &A) -> uint { old_iter::count(self, x) }
}
// FIXME(#4148): This should be redundant
impl<A:Eq> old_iter::EqIter<A> for ~[A] {
pub fn contains(&self, x: &A) -> bool { old_iter::contains(self, x) }
pub fn count(&self, x: &A) -> uint { old_iter::count(self, x) }
}
// FIXME(#4148): This should be redundant
impl<A:Eq> old_iter::EqIter<A> for @[A] {
pub fn contains(&self, x: &A) -> bool { old_iter::contains(self, x) }
pub fn count(&self, x: &A) -> uint { old_iter::count(self, x) }
}
impl<'self,A:Copy> old_iter::CopyableIter<A> for &'self [A] {
fn filter_to_vec(&self, pred: &fn(&A) -> bool) -> ~[A] {
old_iter::filter_to_vec(self, pred)
}
fn to_vec(&self) -> ~[A] { old_iter::to_vec(self) }
pub fn find(&self, f: &fn(&A) -> bool) -> Option<A> {
old_iter::find(self, f)
}
}
// FIXME(#4148): This should be redundant
impl<A:Copy> old_iter::CopyableIter<A> for ~[A] {
fn filter_to_vec(&self, pred: &fn(&A) -> bool) -> ~[A] {
old_iter::filter_to_vec(self, pred)
}
fn to_vec(&self) -> ~[A] { old_iter::to_vec(self) }
pub fn find(&self, f: &fn(&A) -> bool) -> Option<A> {
old_iter::find(self, f)
}
}
// FIXME(#4148): This should be redundant
impl<A:Copy> old_iter::CopyableIter<A> for @[A] {
fn filter_to_vec(&self, pred: &fn(&A) -> bool) -> ~[A] {
old_iter::filter_to_vec(self, pred)
}
fn to_vec(&self) -> ~[A] { old_iter::to_vec(self) }
pub fn find(&self, f: &fn(&A) -> bool) -> Option<A> {
old_iter::find(self, f)
}
}
impl<A:Clone> Clone for ~[A] {
#[inline]
fn clone(&self) -> ~[A] {
@ -2946,7 +2731,7 @@ mod tests {
fn test_slice() {
// Test fixed length vector.
let vec_fixed = [1, 2, 3, 4];
let v_a = slice(vec_fixed, 1u, vec_fixed.len()).to_vec();
let v_a = slice(vec_fixed, 1u, vec_fixed.len()).to_owned();
assert_eq!(v_a.len(), 3u);
assert_eq!(v_a[0], 2);
assert_eq!(v_a[1], 3);
@ -2954,14 +2739,14 @@ mod tests {
// Test on stack.
let vec_stack = &[1, 2, 3];
let v_b = slice(vec_stack, 1u, 3u).to_vec();
let v_b = slice(vec_stack, 1u, 3u).to_owned();
assert_eq!(v_b.len(), 2u);
assert_eq!(v_b[0], 2);
assert_eq!(v_b[1], 3);
// Test on managed heap.
let vec_managed = @[1, 2, 3, 4, 5];
let v_c = slice(vec_managed, 0u, 3u).to_vec();
let v_c = slice(vec_managed, 0u, 3u).to_owned();
assert_eq!(v_c.len(), 3u);
assert_eq!(v_c[0], 1);
assert_eq!(v_c[1], 2);
@ -2969,7 +2754,7 @@ mod tests {
// Test on exchange heap.
let vec_unique = ~[1, 2, 3, 4, 5, 6];
let v_d = slice(vec_unique, 1u, 6u).to_vec();
let v_d = slice(vec_unique, 1u, 6u).to_owned();
assert_eq!(v_d.len(), 5u);
assert_eq!(v_d[0], 2);
assert_eq!(v_d[1], 3);
@ -3274,17 +3059,6 @@ mod tests {
assert_eq!(i, 6);
}
#[test]
fn test_eachi() {
let mut i = 0;
for eachi([1, 2, 3]) |j, v| {
if i == 0 { assert!(*v == 1); }
assert_eq!(j + 1u, *v as uint);
i += *v;
}
assert_eq!(i, 6);
}
#[test]
fn test_each_ret_len0() {
let a0 : [int, .. 0] = [];
@ -3350,18 +3124,6 @@ mod tests {
assert!(position_elem(v1, &4).is_none());
}
#[test]
fn test_position() {
fn less_than_three(i: &int) -> bool { *i < 3 }
fn is_eighteen(i: &int) -> bool { *i == 18 }
assert!(position([], less_than_three).is_none());
let v1 = ~[5, 4, 3, 2, 1];
assert_eq!(position(v1, less_than_three), Some(3u));
assert!(position(v1, is_eighteen).is_none());
}
#[test]
fn test_position_between() {
assert!(position_between([], 0u, 0u, f).is_none());
@ -3390,18 +3152,6 @@ mod tests {
assert!(position_between(v, 4u, 4u, f).is_none());
}
#[test]
fn test_find() {
assert!(find([], f).is_none());
fn f(xy: &(int, char)) -> bool { let (_x, y) = *xy; y == 'b' }
fn g(xy: &(int, char)) -> bool { let (_x, y) = *xy; y == 'd' }
let v = ~[(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')];
assert_eq!(find(v, f), Some((1, 'b')));
assert!(find(v, g).is_none());
}
#[test]
fn test_find_between() {
assert!(find_between([], 0u, 0u, f).is_none());
@ -3432,14 +3182,12 @@ mod tests {
#[test]
fn test_rposition() {
assert!(find([], f).is_none());
fn f(xy: &(int, char)) -> bool { let (_x, y) = *xy; y == 'b' }
fn g(xy: &(int, char)) -> bool { let (_x, y) = *xy; y == 'd' }
let v = ~[(0, 'a'), (1, 'b'), (2, 'c'), (3, 'b')];
assert_eq!(position(v, f), Some(1u));
assert!(position(v, g).is_none());
assert_eq!(rposition(v, f), Some(3u));
assert!(rposition(v, g).is_none());
}
#[test]
@ -4065,37 +3813,6 @@ mod tests {
};
}
#[test]
#[ignore(windows)]
#[should_fail]
#[allow(non_implicitly_copyable_typarams)]
fn test_find_fail() {
let v = [(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
let mut i = 0;
do find(v) |_elt| {
if i == 2 {
fail!()
}
i += 0;
false
};
}
#[test]
#[ignore(windows)]
#[should_fail]
fn test_position_fail() {
let v = [(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
let mut i = 0;
do position(v) |_elt| {
if i == 2 {
fail!()
}
i += 0;
false
};
}
#[test]
#[ignore(windows)]
#[should_fail]
@ -4126,21 +3843,6 @@ mod tests {
};
}
#[test]
#[ignore(windows)]
#[should_fail]
fn test_eachi_fail() {
let v = [(~0, @0), (~0, @0), (~0, @0), (~0, @0)];
let mut i = 0;
do eachi(v) |_i, _elt| {
if i == 2 {
fail!()
}
i += 0;
false
};
}
#[test]
#[ignore(windows)]
#[should_fail]

View File

@ -211,7 +211,7 @@ impl AbiSet {
let mut abis = ~[];
for self.each |abi| { abis.push(abi); }
for abis.eachi |i, abi| {
for abis.iter().enumerate().advance |(i, abi)| {
let data = abi.data();
for abis.slice(0, i).each |other_abi| {
let other_data = other_abi.data();
@ -374,7 +374,7 @@ fn abi_to_str_rust() {
#[test]
fn indices_are_correct() {
for AbiDatas.eachi |i, abi_data| {
for AbiDatas.iter().enumerate().advance |(i, abi_data)| {
assert!(i == abi_data.abi.index());
}

View File

@ -194,7 +194,7 @@ fn eq(a: @ast::meta_item, b: @ast::meta_item) -> bool {
ast::meta_list(ref nb, ref misb) => {
if na != nb { return false; }
for misa.each |mi| {
if !misb.contains(mi) { return false; }
if !misb.iter().any_(|x| x == mi) { return false; }
}
true
}

View File

@ -246,7 +246,7 @@ fn highlight_lines(cm: @codemap::CodeMap,
let mut elided = false;
let mut display_lines = /* FIXME (#2543) */ copy lines.lines;
if display_lines.len() > max_lines {
display_lines = vec::slice(display_lines, 0u, max_lines).to_vec();
display_lines = vec::slice(display_lines, 0u, max_lines).to_owned();
elided = true;
}
// Print the offending lines

View File

@ -20,7 +20,7 @@ use parse::token::{str_to_ident};
pub fn expand_syntax_ext(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
-> base::MacResult {
let mut res_str = ~"";
for tts.eachi |i, e| {
for tts.iter().enumerate().advance |(i, e)| {
if i & 1 == 1 {
match *e {
ast::tt_tok(_, token::COMMA) => (),

View File

@ -111,7 +111,7 @@ fn decodable_substructure(cx: @ExtCtxt, span: span,
let mut variants = ~[];
let rvariant_arg = cx.ident_of("read_enum_variant_arg");
for fields.eachi |i, f| {
for fields.iter().enumerate().advance |(i, f)| {
let (name, parts) = match *f { (i, ref p) => (i, p) };
variants.push(cx.expr_str(span, cx.str_of(name)));

View File

@ -124,7 +124,7 @@ fn encodable_substructure(cx: @ExtCtxt, span: span,
Struct(ref fields) => {
let emit_struct_field = cx.ident_of("emit_struct_field");
let mut stmts = ~[];
for fields.eachi |i, f| {
for fields.iter().enumerate().advance |(i, f)| {
let (name, val) = match *f {
(Some(id), e, _) => (cx.str_of(id), e),
(None, e, _) => (fmt!("_field%u", i).to_managed(), e)
@ -155,7 +155,7 @@ fn encodable_substructure(cx: @ExtCtxt, span: span,
let encoder = cx.expr_ident(span, blkarg);
let emit_variant_arg = cx.ident_of("emit_enum_variant_arg");
let mut stmts = ~[];
for fields.eachi |i, f| {
for fields.iter().enumerate().advance |(i, f)| {
let val = match *f { (_, e, _) => e };
let enc = cx.expr_method_call(span, val, encode, ~[blkencoder]);
let lambda = cx.lambda_expr_1(span, enc, blkarg);

View File

@ -487,7 +487,7 @@ impl<'self> MethodDef<'self> {
None => respan(span, ast::sty_static),
};
for self.args.eachi |i, ty| {
for self.args.iter().enumerate().advance |(i, ty)| {
let ast_ty = ty.to_ty(cx, span, type_ident, generics);
let ident = cx.ident_of(fmt!("__arg_%u", i));
arg_tys.push((ident, ast_ty));
@ -741,7 +741,7 @@ impl<'self> MethodDef<'self> {
let mut enum_matching_fields = vec::from_elem(self_vec.len(), ~[]);
for matches_so_far.tail().each |&(_, _, other_fields)| {
for other_fields.eachi |i, &(_, other_field)| {
for other_fields.iter().enumerate().advance |(i, &(_, other_field))| {
enum_matching_fields[i].push(other_field);
}
}
@ -809,7 +809,7 @@ impl<'self> MethodDef<'self> {
}
} else {
// create an arm matching on each variant
for enum_def.variants.eachi |index, variant| {
for enum_def.variants.iter().enumerate().advance |(index, variant)| {
let (pattern, idents) = create_enum_variant_pattern(cx, span,
variant,
current_match_str,
@ -923,7 +923,7 @@ fn create_struct_pattern(cx: @ExtCtxt,
let mut ident_expr = ~[];
let mut struct_type = Unknown;
for struct_def.fields.eachi |i, struct_field| {
for struct_def.fields.iter().enumerate().advance |(i, struct_field)| {
let opt_id = match struct_field.node.kind {
ast::named_field(ident, _) if (struct_type == Unknown ||
struct_type == Record) => {

View File

@ -200,7 +200,7 @@ pub fn expand_item(extsbox: @mut SyntaxEnv,
// does this attribute list contain "macro_escape" ?
pub fn contains_macro_escape (attrs: &[ast::attribute]) -> bool {
attrs.any(|attr| "macro_escape" == attr::get_attr_name(attr))
attrs.iter().any_(|attr| "macro_escape" == attr::get_attr_name(attr))
}
// Support for item-position macro invocations, exactly the same
@ -425,8 +425,8 @@ fn renames_to_fold(renames : @mut ~[(ast::ident,ast::Name)]) -> @ast_fold {
fold_ident: |id,_| {
// the individual elements are memoized... it would
// also be possible to memoize on the whole list at once.
let new_ctxt = renames.foldl(id.ctxt,|ctxt,&(from,to)| {
new_rename(from,to,*ctxt)
let new_ctxt = renames.iter().fold(id.ctxt,|ctxt,&(from,to)| {
new_rename(from,to,ctxt)
});
ast::ident{name:id.name,ctxt:new_ctxt}
},

View File

@ -47,13 +47,13 @@ use extra::bitv::Bitv;
pub fn analyze(proto: @mut protocol_, _cx: @ExtCtxt) {
debug!("initializing colive analysis");
let num_states = proto.num_states();
let mut colive = do (copy proto.states).map_to_vec |state| {
let mut colive: ~[~Bitv] = do (copy proto.states).iter().transform() |state| {
let mut bv = ~Bitv::new(num_states, false);
for state.reachable |s| {
bv.set(s.id, true);
}
bv
};
}.collect();
let mut i = 0;
let mut changed = true;
@ -61,7 +61,7 @@ pub fn analyze(proto: @mut protocol_, _cx: @ExtCtxt) {
changed = false;
debug!("colive iteration %?", i);
let mut new_colive = ~[];
for colive.eachi |i, this_colive| {
for colive.iter().enumerate().advance |(i, this_colive)| {
let mut result = this_colive.clone();
let this = proto.get_state_by_id(i);
for this_colive.ones |j| {
@ -80,7 +80,7 @@ pub fn analyze(proto: @mut protocol_, _cx: @ExtCtxt) {
// Determine if we're bounded
let mut self_live = ~[];
for colive.eachi |i, bv| {
for colive.iter().enumerate().advance |(i, bv)| {
if bv.get(i) {
self_live.push(proto.get_state_by_id(i))
}

View File

@ -333,14 +333,14 @@ impl gen_init for protocol {
dummy_sp(),
path(~[ext_cx.ident_of("__Buffer")],
dummy_sp()),
self.states.map_to_vec(|s| {
self.states.iter().transform(|s| {
let fty = s.to_ty(ext_cx);
ext_cx.field_imm(dummy_sp(),
ext_cx.ident_of(s.name),
quote_expr!(
::std::pipes::mk_packet::<$fty>()
))
}))
}).collect())
}
fn gen_init_bounded(&self, ext_cx: @ExtCtxt) -> @ast::expr {
@ -354,10 +354,10 @@ impl gen_init for protocol {
let entangle_body = ext_cx.expr_blk(
ext_cx.blk(
dummy_sp(),
self.states.map_to_vec(
self.states.iter().transform(
|s| ext_cx.parse_stmt(
fmt!("data.%s.set_buffer(buffer)",
s.name).to_managed())),
s.name).to_managed())).collect(),
Some(ext_cx.parse_expr(fmt!(
"::std::ptr::to_mut_unsafe_ptr(&mut (data.%s))",
self.states[0].name).to_managed()))));
@ -390,7 +390,7 @@ impl gen_init for protocol {
fn gen_buffer_type(&self, cx: @ExtCtxt) -> @ast::item {
let ext_cx = cx;
let mut params: OptVec<ast::TyParam> = opt_vec::Empty;
let fields = do (copy self.states).map_to_vec |s| {
let fields = do (copy self.states).iter().transform |s| {
for s.generics.ty_params.each |tp| {
match params.find(|tpp| tp.ident == tpp.ident) {
None => params.push(*tp),
@ -411,7 +411,7 @@ impl gen_init for protocol {
},
span: dummy_sp()
}
};
}.collect();
let generics = Generics {
lifetimes: opt_vec::Empty,

View File

@ -146,13 +146,13 @@ pub struct protocol_ {
impl protocol_ {
/// Get a state.
pub fn get_state(&self, name: &str) -> state {
self.states.find(|i| name == i.name).get()
*self.states.iter().find_(|i| name == i.name).get()
}
pub fn get_state_by_id(&self, id: uint) -> state { self.states[id] }
pub fn has_state(&self, name: &str) -> bool {
self.states.find(|i| name == i.name).is_some()
self.states.iter().find_(|i| name == i.name).is_some()
}
pub fn filename(&self) -> ~str {
@ -216,12 +216,12 @@ pub fn visit<Tproto, Tstate, Tmessage, V: visitor<Tproto, Tstate, Tmessage>>(
proto: protocol, visitor: V) -> Tproto {
// the copy keywords prevent recursive use of dvec
let states = do (copy proto.states).map_to_vec |&s| {
let messages = do (copy s.messages).map_to_vec |&m| {
let states: ~[Tstate] = do (copy proto.states).iter().transform |&s| {
let messages: ~[Tmessage] = do (copy s.messages).iter().transform |&m| {
let message(name, span, tys, this, next) = m;
visitor.visit_message(name, span, tys, this, next)
};
}.collect();
visitor.visit_state(s, messages)
};
}.collect();
visitor.visit_proto(proto, states)
}

View File

@ -94,7 +94,7 @@ pub fn add_new_extension(cx: @ExtCtxt,
let s_d = cx.parse_sess().span_diagnostic;
for lhses.eachi |i, lhs| { // try each arm's matchers
for lhses.iter().enumerate().advance |(i, lhs)| { // try each arm's matchers
match *lhs {
@matched_nonterminal(nt_matchers(ref mtcs)) => {
// `none` is because we're not interpolating

View File

@ -592,7 +592,7 @@ pub fn print_item(s: @ps, item: @ast::item) {
print_generics(s, generics);
if traits.len() != 0u {
word(s.s, ":");
for traits.eachi |i, trait_| {
for traits.iter().enumerate().advance |(i, trait_)| {
nbsp(s);
if i != 0 {
word_space(s, "+");
@ -758,7 +758,7 @@ pub fn print_tt(s: @ps, tt: &ast::token_tree) {
pub fn print_tts(s: @ps, tts: &[ast::token_tree]) {
ibox(s, 0);
for tts.eachi |i, tt| {
for tts.iter().enumerate().advance |(i, tt)| {
if i != 0 {
space(s.s);
}
@ -1229,7 +1229,7 @@ pub fn print_expr(s: @ps, expr: @ast::expr) {
space(s.s);
bopen(s);
let len = arms.len();
for arms.eachi |i, arm| {
for arms.iter().enumerate().advance |(i, arm)| {
space(s.s);
cbox(s, indent_unit);
ibox(s, 0u);

View File

@ -44,7 +44,7 @@ fn maybe_run_test(argv: &[~str], name: ~str, test: &fn()) {
if os::getenv(~"RUST_BENCH").is_some() {
run_test = true
} else if argv.len() > 0 {
run_test = argv.contains(&~"all") || argv.contains(&name)
run_test = argv.iter().any_(|x| x == &~"all") || argv.iter().any_(|x| x == &name)
}
if !run_test {

View File

@ -378,7 +378,7 @@ fn validate(edges: ~[(node_id, node_id)],
info!(~"Verifying graph edges...");
let status = do edges.all() |e| {
let status = do edges.iter().all |e| {
let (u, v) = *e;
abs(level[u] - level[v]) <= 1
@ -402,7 +402,7 @@ fn validate(edges: ~[(node_id, node_id)],
if *v == -1i64 || u == root {
true
} else {
edges.contains(&(u, *v)) || edges.contains(&(*v, u))
edges.iter().any_(|x| x == &(u, *v)) || edges.iter().any_(|x| x == &(*v, u))
}
};
result

View File

@ -23,10 +23,10 @@ use std::uint;
use std::vec;
fn print_complements() {
let all = ~[Blue, Red, Yellow];
for vec::each(all) |aa| {
for vec::each(all) |bb| {
io::println(show_color(*aa) + " + " + show_color(*bb) +
let all = [Blue, Red, Yellow];
for all.iter().advance |aa| {
for all.iter().advance |bb| {
println(show_color(*aa) + " + " + show_color(*bb) +
" -> " + show_color(transform(*aa, *bb)));
}
}
@ -49,7 +49,7 @@ fn show_color(cc: color) -> ~str {
fn show_color_list(set: ~[color]) -> ~str {
let mut out = ~"";
for vec::eachi(set) |_ii, col| {
for set.iter().advance |col| {
out += " ";
out += show_color(*col);
}
@ -182,7 +182,7 @@ fn rendezvous(nn: uint, set: ~[color]) {
}
// tell each creature to stop
for vec::eachi(to_creature) |_ii, to_one| {
for to_creature.iter().advance |to_one| {
to_one.send(None);
}

Some files were not shown because too many files have changed in this diff Show More