mirror of
https://github.com/rust-lang/rust.git
synced 2025-02-12 06:53:05 +00:00
parent
f0fc9c92ff
commit
fad7857c7b
@ -63,7 +63,7 @@ pub struct MutDListIterator<'self, T> {
|
|||||||
|
|
||||||
/// DList consuming iterator
|
/// DList consuming iterator
|
||||||
#[deriving(Clone)]
|
#[deriving(Clone)]
|
||||||
pub struct ConsumeIterator<T> {
|
pub struct MoveIterator<T> {
|
||||||
priv list: DList<T>
|
priv list: DList<T>
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -391,14 +391,14 @@ impl<T> DList<T> {
|
|||||||
|
|
||||||
/// Consume the list into an iterator yielding elements by value
|
/// Consume the list into an iterator yielding elements by value
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn consume_iter(self) -> ConsumeIterator<T> {
|
pub fn move_iter(self) -> MoveIterator<T> {
|
||||||
ConsumeIterator{list: self}
|
MoveIterator{list: self}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Consume the list into an iterator yielding elements by value, in reverse
|
/// Consume the list into an iterator yielding elements by value, in reverse
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn consume_rev_iter(self) -> Invert<ConsumeIterator<T>> {
|
pub fn move_rev_iter(self) -> Invert<MoveIterator<T>> {
|
||||||
self.consume_iter().invert()
|
self.move_iter().invert()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -557,7 +557,7 @@ impl<'self, A> ListInsertion<A> for MutDListIterator<'self, A> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<A> Iterator<A> for ConsumeIterator<A> {
|
impl<A> Iterator<A> for MoveIterator<A> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn next(&mut self) -> Option<A> { self.list.pop_front() }
|
fn next(&mut self) -> Option<A> { self.list.pop_front() }
|
||||||
|
|
||||||
@ -567,7 +567,7 @@ impl<A> Iterator<A> for ConsumeIterator<A> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<A> DoubleEndedIterator<A> for ConsumeIterator<A> {
|
impl<A> DoubleEndedIterator<A> for MoveIterator<A> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn next_back(&mut self) -> Option<A> { self.list.pop_back() }
|
fn next_back(&mut self) -> Option<A> { self.list.pop_back() }
|
||||||
}
|
}
|
||||||
@ -721,7 +721,7 @@ mod tests {
|
|||||||
check_links(&m);
|
check_links(&m);
|
||||||
let sum = v + u;
|
let sum = v + u;
|
||||||
assert_eq!(sum.len(), m.len());
|
assert_eq!(sum.len(), m.len());
|
||||||
for elt in sum.consume_iter() {
|
for elt in sum.move_iter() {
|
||||||
assert_eq!(m.pop_front(), Some(elt))
|
assert_eq!(m.pop_front(), Some(elt))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -745,7 +745,7 @@ mod tests {
|
|||||||
check_links(&m);
|
check_links(&m);
|
||||||
let sum = u + v;
|
let sum = u + v;
|
||||||
assert_eq!(sum.len(), m.len());
|
assert_eq!(sum.len(), m.len());
|
||||||
for elt in sum.consume_iter() {
|
for elt in sum.move_iter() {
|
||||||
assert_eq!(m.pop_front(), Some(elt))
|
assert_eq!(m.pop_front(), Some(elt))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -770,7 +770,7 @@ mod tests {
|
|||||||
m.rotate_backward(); check_links(&m);
|
m.rotate_backward(); check_links(&m);
|
||||||
m.push_front(9); check_links(&m);
|
m.push_front(9); check_links(&m);
|
||||||
m.rotate_forward(); check_links(&m);
|
m.rotate_forward(); check_links(&m);
|
||||||
assert_eq!(~[3,9,5,1,2], m.consume_iter().collect());
|
assert_eq!(~[3,9,5,1,2], m.move_iter().collect());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -900,7 +900,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
check_links(&m);
|
check_links(&m);
|
||||||
assert_eq!(m.len(), 3 + len * 2);
|
assert_eq!(m.len(), 3 + len * 2);
|
||||||
assert_eq!(m.consume_iter().collect::<~[int]>(), ~[-2,0,1,2,3,4,5,6,7,8,9,0,1]);
|
assert_eq!(m.move_iter().collect::<~[int]>(), ~[-2,0,1,2,3,4,5,6,7,8,9,0,1]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -911,7 +911,7 @@ mod tests {
|
|||||||
m.merge(n, |a, b| a <= b);
|
m.merge(n, |a, b| a <= b);
|
||||||
assert_eq!(m.len(), len);
|
assert_eq!(m.len(), len);
|
||||||
check_links(&m);
|
check_links(&m);
|
||||||
let res = m.consume_iter().collect::<~[int]>();
|
let res = m.move_iter().collect::<~[int]>();
|
||||||
assert_eq!(res, ~[-1, 0, 0, 0, 1, 3, 5, 6, 7, 2, 7, 7, 9]);
|
assert_eq!(res, ~[-1, 0, 0, 0, 1, 3, 5, 6, 7, 2, 7, 7, 9]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -927,7 +927,7 @@ mod tests {
|
|||||||
m.push_back(4);
|
m.push_back(4);
|
||||||
m.insert_ordered(3);
|
m.insert_ordered(3);
|
||||||
check_links(&m);
|
check_links(&m);
|
||||||
assert_eq!(~[2,3,4], m.consume_iter().collect::<~[int]>());
|
assert_eq!(~[2,3,4], m.move_iter().collect::<~[int]>());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -1003,7 +1003,7 @@ mod tests {
|
|||||||
check_links(&m);
|
check_links(&m);
|
||||||
|
|
||||||
let mut i = 0u;
|
let mut i = 0u;
|
||||||
for (a, &b) in m.consume_iter().zip(v.iter()) {
|
for (a, &b) in m.move_iter().zip(v.iter()) {
|
||||||
i += 1;
|
i += 1;
|
||||||
assert_eq!(a, b);
|
assert_eq!(a, b);
|
||||||
}
|
}
|
||||||
|
@ -948,7 +948,7 @@ impl serialize::Decoder for Decoder {
|
|||||||
let name = match self.stack.pop() {
|
let name = match self.stack.pop() {
|
||||||
String(s) => s,
|
String(s) => s,
|
||||||
List(list) => {
|
List(list) => {
|
||||||
for v in list.consume_rev_iter() {
|
for v in list.move_rev_iter() {
|
||||||
self.stack.push(v);
|
self.stack.push(v);
|
||||||
}
|
}
|
||||||
match self.stack.pop() {
|
match self.stack.pop() {
|
||||||
@ -1066,7 +1066,7 @@ impl serialize::Decoder for Decoder {
|
|||||||
let len = match self.stack.pop() {
|
let len = match self.stack.pop() {
|
||||||
List(list) => {
|
List(list) => {
|
||||||
let len = list.len();
|
let len = list.len();
|
||||||
for v in list.consume_rev_iter() {
|
for v in list.move_rev_iter() {
|
||||||
self.stack.push(v);
|
self.stack.push(v);
|
||||||
}
|
}
|
||||||
len
|
len
|
||||||
@ -1086,7 +1086,7 @@ impl serialize::Decoder for Decoder {
|
|||||||
let len = match self.stack.pop() {
|
let len = match self.stack.pop() {
|
||||||
Object(obj) => {
|
Object(obj) => {
|
||||||
let len = obj.len();
|
let len = obj.len();
|
||||||
for (key, value) in obj.consume_iter() {
|
for (key, value) in obj.move_iter() {
|
||||||
self.stack.push(value);
|
self.stack.push(value);
|
||||||
self.stack.push(String(key));
|
self.stack.push(String(key));
|
||||||
}
|
}
|
||||||
|
@ -77,7 +77,7 @@ fn map_slices<A:Clone + Send,B:Clone + Send>(
|
|||||||
info!("num_tasks: %?", (num_tasks, futures.len()));
|
info!("num_tasks: %?", (num_tasks, futures.len()));
|
||||||
assert_eq!(num_tasks, futures.len());
|
assert_eq!(num_tasks, futures.len());
|
||||||
|
|
||||||
do futures.consume_iter().transform |ys| {
|
do futures.move_iter().transform |ys| {
|
||||||
let mut ys = ys;
|
let mut ys = ys;
|
||||||
ys.get()
|
ys.get()
|
||||||
}.collect()
|
}.collect()
|
||||||
|
@ -152,12 +152,12 @@ impl<V> SmallIntMap<V> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Empties the hash map, moving all values into the specified closure
|
/// Empties the hash map, moving all values into the specified closure
|
||||||
pub fn consume(&mut self)
|
pub fn move_iter(&mut self)
|
||||||
-> FilterMap<(uint, Option<V>), (uint, V),
|
-> FilterMap<(uint, Option<V>), (uint, V),
|
||||||
Enumerate<vec::ConsumeIterator<Option<V>>>>
|
Enumerate<vec::MoveIterator<Option<V>>>>
|
||||||
{
|
{
|
||||||
let values = replace(&mut self.v, ~[]);
|
let values = replace(&mut self.v, ~[]);
|
||||||
values.consume_iter().enumerate().filter_map(|(i, v)| {
|
values.move_iter().enumerate().filter_map(|(i, v)| {
|
||||||
v.map_move(|v| (i, v))
|
v.map_move(|v| (i, v))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -452,11 +452,11 @@ mod test_map {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_consume() {
|
fn test_move_iter() {
|
||||||
let mut m = SmallIntMap::new();
|
let mut m = SmallIntMap::new();
|
||||||
m.insert(1, ~2);
|
m.insert(1, ~2);
|
||||||
let mut called = false;
|
let mut called = false;
|
||||||
for (k, v) in m.consume() {
|
for (k, v) in m.move_iter() {
|
||||||
assert!(!called);
|
assert!(!called);
|
||||||
called = true;
|
called = true;
|
||||||
assert_eq!(k, 1);
|
assert_eq!(k, 1);
|
||||||
|
@ -893,7 +893,7 @@ mod tests {
|
|||||||
fn ile(x: &(&'static str), y: &(&'static str)) -> bool
|
fn ile(x: &(&'static str), y: &(&'static str)) -> bool
|
||||||
{
|
{
|
||||||
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
|
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
|
||||||
// to_ascii_consume and to_str_consume to not do a unnecessary clone.
|
// to_ascii_move and to_str_move to not do a unnecessary clone.
|
||||||
// (Actually, could just remove the to_str_* call, but needs an deriving(Ord) on
|
// (Actually, could just remove the to_str_* call, but needs an deriving(Ord) on
|
||||||
// Ascii)
|
// Ascii)
|
||||||
let x = x.to_ascii().to_lower().to_str_ascii();
|
let x = x.to_ascii().to_lower().to_str_ascii();
|
||||||
|
@ -698,7 +698,7 @@ fn run_tests(opts: &TestOpts,
|
|||||||
|
|
||||||
// All benchmarks run at the end, in serial.
|
// All benchmarks run at the end, in serial.
|
||||||
// (this includes metric fns)
|
// (this includes metric fns)
|
||||||
for b in filtered_benchs_and_metrics.consume_iter() {
|
for b in filtered_benchs_and_metrics.move_iter() {
|
||||||
callback(TeWait(b.desc.clone()));
|
callback(TeWait(b.desc.clone()));
|
||||||
run_test(!opts.run_benchmarks, b, ch.clone());
|
run_test(!opts.run_benchmarks, b, ch.clone());
|
||||||
let (test, result) = p.recv();
|
let (test, result) = p.recv();
|
||||||
@ -744,7 +744,7 @@ pub fn filter_tests(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
filtered.consume_iter().filter_map(|x| filter_fn(x, filter_str)).collect()
|
filtered.move_iter().filter_map(|x| filter_fn(x, filter_str)).collect()
|
||||||
};
|
};
|
||||||
|
|
||||||
// Maybe pull out the ignored test and unignore them
|
// Maybe pull out the ignored test and unignore them
|
||||||
@ -762,7 +762,7 @@ pub fn filter_tests(
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
filtered.consume_iter().filter_map(|x| filter(x)).collect()
|
filtered.move_iter().filter_map(|x| filter(x)).collect()
|
||||||
};
|
};
|
||||||
|
|
||||||
// Sort the tests alphabetically
|
// Sort the tests alphabetically
|
||||||
|
@ -213,13 +213,13 @@ impl<K: TotalOrd, V> TreeMap<K, V> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Get a lazy iterator that consumes the treemap.
|
/// Get a lazy iterator that consumes the treemap.
|
||||||
pub fn consume_iter(self) -> TreeMapConsumeIterator<K, V> {
|
pub fn move_iter(self) -> TreeMapMoveIterator<K, V> {
|
||||||
let TreeMap { root: root, length: length } = self;
|
let TreeMap { root: root, length: length } = self;
|
||||||
let stk = match root {
|
let stk = match root {
|
||||||
None => ~[],
|
None => ~[],
|
||||||
Some(~tn) => ~[tn]
|
Some(~tn) => ~[tn]
|
||||||
};
|
};
|
||||||
TreeMapConsumeIterator {
|
TreeMapMoveIterator {
|
||||||
stack: stk,
|
stack: stk,
|
||||||
remaining: length
|
remaining: length
|
||||||
}
|
}
|
||||||
@ -331,12 +331,12 @@ fn iter_traverse_complete<'a, K, V>(it: &mut TreeMapIterator<'a, K, V>) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Lazy forward iterator over a map that consumes the map while iterating
|
/// Lazy forward iterator over a map that consumes the map while iterating
|
||||||
pub struct TreeMapConsumeIterator<K, V> {
|
pub struct TreeMapMoveIterator<K, V> {
|
||||||
priv stack: ~[TreeNode<K, V>],
|
priv stack: ~[TreeNode<K, V>],
|
||||||
priv remaining: uint
|
priv remaining: uint
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<K, V> Iterator<(K, V)> for TreeMapConsumeIterator<K,V> {
|
impl<K, V> Iterator<(K, V)> for TreeMapMoveIterator<K,V> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn next(&mut self) -> Option<(K, V)> {
|
fn next(&mut self) -> Option<(K, V)> {
|
||||||
while !self.stack.is_empty() {
|
while !self.stack.is_empty() {
|
||||||
|
@ -935,7 +935,7 @@ pub fn link_args(sess: Session,
|
|||||||
// Add all the link args for external crates.
|
// Add all the link args for external crates.
|
||||||
do cstore::iter_crate_data(cstore) |crate_num, _| {
|
do cstore::iter_crate_data(cstore) |crate_num, _| {
|
||||||
let link_args = csearch::get_link_args_for_crate(cstore, crate_num);
|
let link_args = csearch::get_link_args_for_crate(cstore, crate_num);
|
||||||
for link_arg in link_args.consume_iter() {
|
for link_arg in link_args.move_iter() {
|
||||||
args.push(link_arg);
|
args.push(link_arg);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -120,7 +120,7 @@ pub fn build_configuration(sess: Session, argv0: @str, input: &input) ->
|
|||||||
// Convert strings provided as --cfg [cfgspec] into a crate_cfg
|
// Convert strings provided as --cfg [cfgspec] into a crate_cfg
|
||||||
fn parse_cfgspecs(cfgspecs: ~[~str],
|
fn parse_cfgspecs(cfgspecs: ~[~str],
|
||||||
demitter: diagnostic::Emitter) -> ast::CrateConfig {
|
demitter: diagnostic::Emitter) -> ast::CrateConfig {
|
||||||
do cfgspecs.consume_iter().transform |s| {
|
do cfgspecs.move_iter().transform |s| {
|
||||||
let sess = parse::new_parse_sess(Some(demitter));
|
let sess = parse::new_parse_sess(Some(demitter));
|
||||||
parse::parse_meta_from_source_str(@"cfgspec", s.to_managed(), ~[], sess)
|
parse::parse_meta_from_source_str(@"cfgspec", s.to_managed(), ~[], sess)
|
||||||
}.collect::<ast::CrateConfig>()
|
}.collect::<ast::CrateConfig>()
|
||||||
@ -631,7 +631,7 @@ pub fn build_session_options(binary: @str,
|
|||||||
let level_name = lint::level_to_str(*level);
|
let level_name = lint::level_to_str(*level);
|
||||||
|
|
||||||
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
|
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
|
||||||
// to_ascii_consume and to_str_consume to not do a unnecessary copy.
|
// to_ascii_move and to_str_move to not do a unnecessary copy.
|
||||||
let level_short = level_name.slice_chars(0, 1);
|
let level_short = level_name.slice_chars(0, 1);
|
||||||
let level_short = level_short.to_ascii().to_upper().to_str_ascii();
|
let level_short = level_short.to_ascii().to_upper().to_str_ascii();
|
||||||
let flags = vec::append(getopts::opt_strs(matches, level_short),
|
let flags = vec::append(getopts::opt_strs(matches, level_short),
|
||||||
|
@ -1204,7 +1204,7 @@ fn decode_side_tables(xcx: @ExtendedDecodeContext,
|
|||||||
}
|
}
|
||||||
c::tag_table_capture_map => {
|
c::tag_table_capture_map => {
|
||||||
let cvars =
|
let cvars =
|
||||||
at_vec::to_managed_consume(
|
at_vec::to_managed_move(
|
||||||
val_dsr.read_to_vec(
|
val_dsr.read_to_vec(
|
||||||
|val_dsr| val_dsr.read_capture_var(xcx)));
|
|val_dsr| val_dsr.read_capture_var(xcx)));
|
||||||
dcx.maps.capture_map.insert(id, cvars);
|
dcx.maps.capture_map.insert(id, cvars);
|
||||||
|
@ -994,7 +994,7 @@ fn lint_session(cx: @mut Context) -> @visit::Visitor<()> {
|
|||||||
match cx.tcx.sess.lints.pop(&id) {
|
match cx.tcx.sess.lints.pop(&id) {
|
||||||
None => {},
|
None => {},
|
||||||
Some(l) => {
|
Some(l) => {
|
||||||
for (lint, span, msg) in l.consume_iter() {
|
for (lint, span, msg) in l.move_iter() {
|
||||||
cx.span_lint(lint, span, msg)
|
cx.span_lint(lint, span, msg)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5366,7 +5366,7 @@ impl Resolver {
|
|||||||
if idents.len() == 0 {
|
if idents.len() == 0 {
|
||||||
return ~"???";
|
return ~"???";
|
||||||
}
|
}
|
||||||
return self.idents_to_str(idents.consume_rev_iter().collect::<~[ast::ident]>());
|
return self.idents_to_str(idents.move_rev_iter().collect::<~[ast::ident]>());
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn dump_module(@mut self, module_: @mut Module) {
|
pub fn dump_module(@mut self, module_: @mut Module) {
|
||||||
|
@ -156,7 +156,7 @@ Available lint options:
|
|||||||
");
|
");
|
||||||
|
|
||||||
let lint_dict = lint::get_lint_dict();
|
let lint_dict = lint::get_lint_dict();
|
||||||
let mut lint_dict = lint_dict.consume()
|
let mut lint_dict = lint_dict.move_iter()
|
||||||
.transform(|(k, v)| (v, k))
|
.transform(|(k, v)| (v, k))
|
||||||
.collect::<~[(lint::LintSpec, &'static str)]>();
|
.collect::<~[(lint::LintSpec, &'static str)]>();
|
||||||
lint_dict.qsort();
|
lint_dict.qsort();
|
||||||
@ -173,7 +173,7 @@ Available lint options:
|
|||||||
padded(max_key, "name"), "default", "meaning");
|
padded(max_key, "name"), "default", "meaning");
|
||||||
printfln!(" %s %7.7s %s\n",
|
printfln!(" %s %7.7s %s\n",
|
||||||
padded(max_key, "----"), "-------", "-------");
|
padded(max_key, "----"), "-------", "-------");
|
||||||
for (spec, name) in lint_dict.consume_iter() {
|
for (spec, name) in lint_dict.move_iter() {
|
||||||
let name = name.replace("_", "-");
|
let name = name.replace("_", "-");
|
||||||
printfln!(" %s %7.7s %s",
|
printfln!(" %s %7.7s %s",
|
||||||
padded(max_key, name),
|
padded(max_key, name),
|
||||||
|
@ -41,7 +41,7 @@ pub fn parse_crate(attrs: ~[ast::Attribute]) -> CrateAttrs {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_desc(attrs: ~[ast::Attribute]) -> Option<~str> {
|
pub fn parse_desc(attrs: ~[ast::Attribute]) -> Option<~str> {
|
||||||
let doc_strs = do doc_metas(attrs).consume_iter().filter_map |meta| {
|
let doc_strs = do doc_metas(attrs).move_iter().filter_map |meta| {
|
||||||
meta.value_str()
|
meta.value_str()
|
||||||
}.collect::<~[@str]>();
|
}.collect::<~[@str]>();
|
||||||
if doc_strs.is_empty() {
|
if doc_strs.is_empty() {
|
||||||
|
@ -167,7 +167,7 @@ impl Program {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let newvars = util::replace(&mut self.newvars, HashMap::new());
|
let newvars = util::replace(&mut self.newvars, HashMap::new());
|
||||||
for (name, var) in newvars.consume() {
|
for (name, var) in newvars.move_iter() {
|
||||||
self.local_vars.insert(name, var);
|
self.local_vars.insert(name, var);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -233,7 +233,7 @@ impl Program {
|
|||||||
pub fn consume_cache(&mut self) {
|
pub fn consume_cache(&mut self) {
|
||||||
let map = local_data::pop(tls_key).expect("tls is empty");
|
let map = local_data::pop(tls_key).expect("tls is empty");
|
||||||
let cons_map = util::replace(map, HashMap::new());
|
let cons_map = util::replace(map, HashMap::new());
|
||||||
for (name, value) in cons_map.consume() {
|
for (name, value) in cons_map.move_iter() {
|
||||||
match self.local_vars.find_mut(&name) {
|
match self.local_vars.find_mut(&name) {
|
||||||
Some(v) => { v.data = (*value).clone(); }
|
Some(v) => { v.data = (*value).clone(); }
|
||||||
None => { fail!("unknown variable %s", name) }
|
None => { fail!("unknown variable %s", name) }
|
||||||
@ -345,7 +345,7 @@ impl Program {
|
|||||||
|
|
||||||
// I'm not an @ pointer, so this has to be done outside.
|
// I'm not an @ pointer, so this has to be done outside.
|
||||||
let cons_newvars = util::replace(newvars, HashMap::new());
|
let cons_newvars = util::replace(newvars, HashMap::new());
|
||||||
for (k, v) in cons_newvars.consume() {
|
for (k, v) in cons_newvars.move_iter() {
|
||||||
self.newvars.insert(k, v);
|
self.newvars.insert(k, v);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -37,7 +37,7 @@ pub fn each_pkg_parent_workspace(pkgid: &PkgId, action: &fn(&Path) -> bool) -> b
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn pkg_parent_workspaces(pkgid: &PkgId) -> ~[Path] {
|
pub fn pkg_parent_workspaces(pkgid: &PkgId) -> ~[Path] {
|
||||||
rust_path().consume_iter()
|
rust_path().move_iter()
|
||||||
.filter(|ws| workspace_contains_package_id(pkgid, ws))
|
.filter(|ws| workspace_contains_package_id(pkgid, ws))
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
@ -141,11 +141,11 @@ pub fn from_elem<T:Clone>(n_elts: uint, t: T) -> @[T] {
|
|||||||
* Creates and initializes an immutable managed vector by moving all the
|
* Creates and initializes an immutable managed vector by moving all the
|
||||||
* elements from an owned vector.
|
* elements from an owned vector.
|
||||||
*/
|
*/
|
||||||
pub fn to_managed_consume<T>(v: ~[T]) -> @[T] {
|
pub fn to_managed_move<T>(v: ~[T]) -> @[T] {
|
||||||
let mut av = @[];
|
let mut av = @[];
|
||||||
unsafe {
|
unsafe {
|
||||||
raw::reserve(&mut av, v.len());
|
raw::reserve(&mut av, v.len());
|
||||||
for x in v.consume_iter() {
|
for x in v.move_iter() {
|
||||||
raw::push(&mut av, x);
|
raw::push(&mut av, x);
|
||||||
}
|
}
|
||||||
av
|
av
|
||||||
@ -331,12 +331,12 @@ mod test {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_to_managed_consume() {
|
fn test_to_managed_move() {
|
||||||
assert_eq!(to_managed_consume::<int>(~[]), @[]);
|
assert_eq!(to_managed_move::<int>(~[]), @[]);
|
||||||
assert_eq!(to_managed_consume(~[true]), @[true]);
|
assert_eq!(to_managed_move(~[true]), @[true]);
|
||||||
assert_eq!(to_managed_consume(~[1, 2, 3, 4, 5]), @[1, 2, 3, 4, 5]);
|
assert_eq!(to_managed_move(~[1, 2, 3, 4, 5]), @[1, 2, 3, 4, 5]);
|
||||||
assert_eq!(to_managed_consume(~[~"abc", ~"123"]), @[~"abc", ~"123"]);
|
assert_eq!(to_managed_move(~[~"abc", ~"123"]), @[~"abc", ~"123"]);
|
||||||
assert_eq!(to_managed_consume(~[~[42]]), @[~[42]]);
|
assert_eq!(to_managed_move(~[~[42]]), @[~[42]]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -150,7 +150,7 @@ pub fn rights<L, R: Clone>(eithers: &[Either<L, R>]) -> ~[R] {
|
|||||||
pub fn partition<L, R>(eithers: ~[Either<L, R>]) -> (~[L], ~[R]) {
|
pub fn partition<L, R>(eithers: ~[Either<L, R>]) -> (~[L], ~[R]) {
|
||||||
let mut lefts: ~[L] = ~[];
|
let mut lefts: ~[L] = ~[];
|
||||||
let mut rights: ~[R] = ~[];
|
let mut rights: ~[R] = ~[];
|
||||||
for elt in eithers.consume_iter() {
|
for elt in eithers.move_iter() {
|
||||||
match elt {
|
match elt {
|
||||||
Left(l) => lefts.push(l),
|
Left(l) => lefts.push(l),
|
||||||
Right(r) => rights.push(r)
|
Right(r) => rights.push(r)
|
||||||
|
@ -159,8 +159,8 @@ impl<K:Hash + Eq,V> HashMap<K, V> {
|
|||||||
vec::from_fn(new_capacity, |_| None));
|
vec::from_fn(new_capacity, |_| None));
|
||||||
|
|
||||||
self.size = 0;
|
self.size = 0;
|
||||||
// consume_rev_iter is more efficient
|
// move_rev_iter is more efficient
|
||||||
for bucket in old_buckets.consume_rev_iter() {
|
for bucket in old_buckets.move_rev_iter() {
|
||||||
self.insert_opt_bucket(bucket);
|
self.insert_opt_bucket(bucket);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -470,9 +470,9 @@ impl<K: Hash + Eq, V> HashMap<K, V> {
|
|||||||
/// Creates a consuming iterator, that is, one that moves each key-value
|
/// Creates a consuming iterator, that is, one that moves each key-value
|
||||||
/// pair out of the map in arbitrary order. The map cannot be used after
|
/// pair out of the map in arbitrary order. The map cannot be used after
|
||||||
/// calling this.
|
/// calling this.
|
||||||
pub fn consume(self) -> HashMapConsumeIterator<K, V> {
|
pub fn move_iter(self) -> HashMapMoveIterator<K, V> {
|
||||||
// `consume_rev_iter` is more efficient than `consume_iter` for vectors
|
// `move_rev_iter` is more efficient than `move_iter` for vectors
|
||||||
HashMapConsumeIterator {iter: self.buckets.consume_rev_iter()}
|
HashMapMoveIterator {iter: self.buckets.move_rev_iter()}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -524,9 +524,9 @@ pub struct HashMapMutIterator<'self, K, V> {
|
|||||||
priv iter: vec::VecMutIterator<'self, Option<Bucket<K, V>>>,
|
priv iter: vec::VecMutIterator<'self, Option<Bucket<K, V>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// HashMap consume iterator
|
/// HashMap move iterator
|
||||||
pub struct HashMapConsumeIterator<K, V> {
|
pub struct HashMapMoveIterator<K, V> {
|
||||||
priv iter: vec::ConsumeRevIterator<Option<Bucket<K, V>>>,
|
priv iter: vec::MoveRevIterator<Option<Bucket<K, V>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// HashSet iterator
|
/// HashSet iterator
|
||||||
@ -535,9 +535,9 @@ pub struct HashSetIterator<'self, K> {
|
|||||||
priv iter: vec::VecIterator<'self, Option<Bucket<K, ()>>>,
|
priv iter: vec::VecIterator<'self, Option<Bucket<K, ()>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// HashSet consume iterator
|
/// HashSet move iterator
|
||||||
pub struct HashSetConsumeIterator<K> {
|
pub struct HashSetMoveIterator<K> {
|
||||||
priv iter: vec::ConsumeRevIterator<Option<Bucket<K, ()>>>,
|
priv iter: vec::MoveRevIterator<Option<Bucket<K, ()>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'self, K, V> Iterator<(&'self K, &'self V)> for HashMapIterator<'self, K, V> {
|
impl<'self, K, V> Iterator<(&'self K, &'self V)> for HashMapIterator<'self, K, V> {
|
||||||
@ -566,7 +566,7 @@ impl<'self, K, V> Iterator<(&'self K, &'self mut V)> for HashMapMutIterator<'sel
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<K, V> Iterator<(K, V)> for HashMapConsumeIterator<K, V> {
|
impl<K, V> Iterator<(K, V)> for HashMapMoveIterator<K, V> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn next(&mut self) -> Option<(K, V)> {
|
fn next(&mut self) -> Option<(K, V)> {
|
||||||
for elt in self.iter {
|
for elt in self.iter {
|
||||||
@ -592,7 +592,7 @@ impl<'self, K> Iterator<&'self K> for HashSetIterator<'self, K> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<K> Iterator<K> for HashSetConsumeIterator<K> {
|
impl<K> Iterator<K> for HashSetMoveIterator<K> {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn next(&mut self) -> Option<K> {
|
fn next(&mut self) -> Option<K> {
|
||||||
for elt in self.iter {
|
for elt in self.iter {
|
||||||
@ -707,9 +707,9 @@ impl<T:Hash + Eq> HashSet<T> {
|
|||||||
/// Creates a consuming iterator, that is, one that moves each value out
|
/// Creates a consuming iterator, that is, one that moves each value out
|
||||||
/// of the set in arbitrary order. The set cannot be used after calling
|
/// of the set in arbitrary order. The set cannot be used after calling
|
||||||
/// this.
|
/// this.
|
||||||
pub fn consume(self) -> HashSetConsumeIterator<T> {
|
pub fn move_iter(self) -> HashSetMoveIterator<T> {
|
||||||
// `consume_rev_iter` is more efficient than `consume_iter` for vectors
|
// `move_rev_iter` is more efficient than `move_iter` for vectors
|
||||||
HashSetConsumeIterator {iter: self.map.buckets.consume_rev_iter()}
|
HashSetMoveIterator {iter: self.map.buckets.move_rev_iter()}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Visit the values representing the difference
|
/// Visit the values representing the difference
|
||||||
@ -881,7 +881,7 @@ mod test_map {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_consume() {
|
fn test_move_iter() {
|
||||||
let hm = {
|
let hm = {
|
||||||
let mut hm = HashMap::new();
|
let mut hm = HashMap::new();
|
||||||
|
|
||||||
@ -891,7 +891,7 @@ mod test_map {
|
|||||||
hm
|
hm
|
||||||
};
|
};
|
||||||
|
|
||||||
let v = hm.consume().collect::<~[(char, int)]>();
|
let v = hm.move_iter().collect::<~[(char, int)]>();
|
||||||
assert!([('a', 1), ('b', 2)] == v || [('b', 2), ('a', 1)] == v);
|
assert!([('a', 1), ('b', 2)] == v || [('b', 2), ('a', 1)] == v);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1177,7 +1177,7 @@ mod test_set {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_consume() {
|
fn test_move_iter() {
|
||||||
let hs = {
|
let hs = {
|
||||||
let mut hs = HashSet::new();
|
let mut hs = HashSet::new();
|
||||||
|
|
||||||
@ -1187,7 +1187,7 @@ mod test_set {
|
|||||||
hs
|
hs
|
||||||
};
|
};
|
||||||
|
|
||||||
let v = hs.consume().collect::<~[char]>();
|
let v = hs.move_iter().collect::<~[char]>();
|
||||||
assert!(['a', 'b'] == v || ['b', 'a'] == v);
|
assert!(['a', 'b'] == v || ['b', 'a'] == v);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -134,7 +134,7 @@ impl<T> Option<T> {
|
|||||||
|
|
||||||
/// Return a consuming iterator over the possibly contained value
|
/// Return a consuming iterator over the possibly contained value
|
||||||
#[inline]
|
#[inline]
|
||||||
pub fn consume(self) -> OptionIterator<T> {
|
pub fn move_iter(self) -> OptionIterator<T> {
|
||||||
OptionIterator{opt: self}
|
OptionIterator{opt: self}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -765,7 +765,7 @@ pub fn list_dir(p: &Path) -> ~[~str] {
|
|||||||
strings
|
strings
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
do get_list(p).consume_iter().filter |filename| {
|
do get_list(p).move_iter().filter |filename| {
|
||||||
"." != *filename && ".." != *filename
|
"." != *filename && ".." != *filename
|
||||||
}.collect()
|
}.collect()
|
||||||
}
|
}
|
||||||
|
@ -961,7 +961,7 @@ impl GenericPath for WindowsPath {
|
|||||||
match self.filestem() {
|
match self.filestem() {
|
||||||
Some(stem) => {
|
Some(stem) => {
|
||||||
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
|
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
|
||||||
// to_ascii_consume and to_str_consume to not do a unnecessary copy.
|
// to_ascii_move and to_str_move to not do a unnecessary copy.
|
||||||
match stem.to_ascii().to_lower().to_str_ascii() {
|
match stem.to_ascii().to_lower().to_str_ascii() {
|
||||||
~"con" | ~"aux" | ~"com1" | ~"com2" | ~"com3" | ~"com4" |
|
~"con" | ~"aux" | ~"com1" | ~"com2" | ~"com3" | ~"com4" |
|
||||||
~"lpt1" | ~"lpt2" | ~"lpt3" | ~"prn" | ~"nul" => true,
|
~"lpt1" | ~"lpt2" | ~"lpt3" | ~"prn" | ~"nul" => true,
|
||||||
@ -1020,7 +1020,7 @@ impl GenericPath for WindowsPath {
|
|||||||
None => None,
|
None => None,
|
||||||
|
|
||||||
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
|
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
|
||||||
// to_ascii_consume and to_str_consume to not do a unnecessary copy.
|
// to_ascii_move and to_str_move to not do a unnecessary copy.
|
||||||
Some(ref device) => Some(device.to_ascii().to_upper().to_str_ascii())
|
Some(ref device) => Some(device.to_ascii().to_upper().to_str_ascii())
|
||||||
},
|
},
|
||||||
is_absolute: self.is_absolute,
|
is_absolute: self.is_absolute,
|
||||||
|
@ -94,7 +94,7 @@ impl<T, E: ToStr> Result<T, E> {
|
|||||||
match *self {
|
match *self {
|
||||||
Ok(ref t) => Some(t),
|
Ok(ref t) => Some(t),
|
||||||
Err(*) => None,
|
Err(*) => None,
|
||||||
}.consume()
|
}.move_iter()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Call a method based on a previous result
|
/// Call a method based on a previous result
|
||||||
@ -108,7 +108,7 @@ impl<T, E: ToStr> Result<T, E> {
|
|||||||
match *self {
|
match *self {
|
||||||
Ok(*) => None,
|
Ok(*) => None,
|
||||||
Err(ref t) => Some(t),
|
Err(ref t) => Some(t),
|
||||||
}.consume()
|
}.move_iter()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Unwraps a result, yielding the content of an `Ok`.
|
/// Unwraps a result, yielding the content of an `Ok`.
|
||||||
|
@ -239,7 +239,7 @@ impl BlockedTask {
|
|||||||
};
|
};
|
||||||
// Even if the task was unkillable before, we use 'Killable' because
|
// Even if the task was unkillable before, we use 'Killable' because
|
||||||
// multiple pipes will have handles. It does not really mean killable.
|
// multiple pipes will have handles. It does not really mean killable.
|
||||||
handles.consume_iter().transform(|x| Killable(x)).collect()
|
handles.move_iter().transform(|x| Killable(x)).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
// This assertion has two flavours because the wake involves an atomic op.
|
// This assertion has two flavours because the wake involves an atomic op.
|
||||||
|
@ -391,7 +391,7 @@ fn run_(main: ~fn(), use_main_sched: bool) -> int {
|
|||||||
rtdebug!("waiting for threads");
|
rtdebug!("waiting for threads");
|
||||||
|
|
||||||
// Wait for schedulers
|
// Wait for schedulers
|
||||||
for thread in threads.consume_iter() {
|
for thread in threads.move_iter() {
|
||||||
thread.join();
|
thread.join();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -54,7 +54,7 @@ pub fn select<A: Select>(ports: &mut [A]) -> uint {
|
|||||||
let task_handles = task.make_selectable(ports.len());
|
let task_handles = task.make_selectable(ports.len());
|
||||||
|
|
||||||
for (index, (port, task_handle)) in
|
for (index, (port, task_handle)) in
|
||||||
ports.mut_iter().zip(task_handles.consume_iter()).enumerate() {
|
ports.mut_iter().zip(task_handles.move_iter()).enumerate() {
|
||||||
// If one of the ports has data by now, it will wake the handle.
|
// If one of the ports has data by now, it will wake the handle.
|
||||||
if port.block_on(sched, task_handle) {
|
if port.block_on(sched, task_handle) {
|
||||||
ready_index = index;
|
ready_index = index;
|
||||||
@ -128,7 +128,7 @@ mod test {
|
|||||||
let (ports, chans) = unzip(from_fn(num_ports, |_| oneshot::<()>()));
|
let (ports, chans) = unzip(from_fn(num_ports, |_| oneshot::<()>()));
|
||||||
let mut dead_chans = ~[];
|
let mut dead_chans = ~[];
|
||||||
let mut ports = ports;
|
let mut ports = ports;
|
||||||
for (i, chan) in chans.consume_iter().enumerate() {
|
for (i, chan) in chans.move_iter().enumerate() {
|
||||||
if send_on_chans.contains(&i) {
|
if send_on_chans.contains(&i) {
|
||||||
chan.send(());
|
chan.send(());
|
||||||
} else {
|
} else {
|
||||||
@ -145,7 +145,7 @@ mod test {
|
|||||||
let (ports, chans) = unzip(from_fn(num_ports, |_| stream::<()>()));
|
let (ports, chans) = unzip(from_fn(num_ports, |_| stream::<()>()));
|
||||||
let mut dead_chans = ~[];
|
let mut dead_chans = ~[];
|
||||||
let mut ports = ports;
|
let mut ports = ports;
|
||||||
for (i, chan) in chans.consume_iter().enumerate() {
|
for (i, chan) in chans.move_iter().enumerate() {
|
||||||
if send_on_chans.contains(&i) {
|
if send_on_chans.contains(&i) {
|
||||||
chan.send(());
|
chan.send(());
|
||||||
} else {
|
} else {
|
||||||
|
@ -232,7 +232,7 @@ pub fn run_in_mt_newsched_task(f: ~fn()) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Wait for schedulers
|
// Wait for schedulers
|
||||||
for thread in threads.consume_iter() {
|
for thread in threads.move_iter() {
|
||||||
thread.join();
|
thread.join();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -79,7 +79,7 @@ use cast;
|
|||||||
use cell::Cell;
|
use cell::Cell;
|
||||||
use container::MutableMap;
|
use container::MutableMap;
|
||||||
use comm::{Chan, GenericChan, oneshot};
|
use comm::{Chan, GenericChan, oneshot};
|
||||||
use hashmap::{HashSet, HashSetConsumeIterator};
|
use hashmap::{HashSet, HashSetMoveIterator};
|
||||||
use local_data;
|
use local_data;
|
||||||
use task::{Failure, SingleThreaded};
|
use task::{Failure, SingleThreaded};
|
||||||
use task::{Success, TaskOpts, TaskResult};
|
use task::{Success, TaskOpts, TaskResult};
|
||||||
@ -141,8 +141,8 @@ impl TaskSet {
|
|||||||
assert!(was_present);
|
assert!(was_present);
|
||||||
}
|
}
|
||||||
#[inline]
|
#[inline]
|
||||||
fn consume(self) -> HashSetConsumeIterator<TaskHandle> {
|
fn move_iter(self) -> HashSetMoveIterator<TaskHandle> {
|
||||||
(*self).consume()
|
(*self).move_iter()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -460,13 +460,13 @@ fn kill_taskgroup(state: TaskGroupInner, me: &TaskHandle, is_main: bool) {
|
|||||||
if newstate.is_some() {
|
if newstate.is_some() {
|
||||||
let TaskGroupData { members: members, descendants: descendants } =
|
let TaskGroupData { members: members, descendants: descendants } =
|
||||||
newstate.unwrap();
|
newstate.unwrap();
|
||||||
for sibling in members.consume() {
|
for sibling in members.move_iter() {
|
||||||
// Skip self - killing ourself won't do much good.
|
// Skip self - killing ourself won't do much good.
|
||||||
if &sibling != me {
|
if &sibling != me {
|
||||||
RuntimeGlue::kill_task(sibling);
|
RuntimeGlue::kill_task(sibling);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for child in descendants.consume() {
|
for child in descendants.move_iter() {
|
||||||
assert!(&child != me);
|
assert!(&child != me);
|
||||||
RuntimeGlue::kill_task(child);
|
RuntimeGlue::kill_task(child);
|
||||||
}
|
}
|
||||||
|
@ -526,7 +526,7 @@ pub mod rt {
|
|||||||
TyHexLower => uint_to_str_prec(u, 16, prec),
|
TyHexLower => uint_to_str_prec(u, 16, prec),
|
||||||
|
|
||||||
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
|
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
|
||||||
// to_ascii_consume and to_str_consume to not do a unnecessary copy.
|
// to_ascii_move and to_str_move to not do a unnecessary copy.
|
||||||
TyHexUpper => {
|
TyHexUpper => {
|
||||||
let s = uint_to_str_prec(u, 16, prec);
|
let s = uint_to_str_prec(u, 16, prec);
|
||||||
s.to_ascii().to_upper().to_str_ascii()
|
s.to_ascii().to_upper().to_str_ascii()
|
||||||
|
@ -382,7 +382,7 @@ pub fn unzip_slice<T:Clone,U:Clone>(v: &[(T, U)]) -> (~[T], ~[U]) {
|
|||||||
pub fn unzip<T,U>(v: ~[(T, U)]) -> (~[T], ~[U]) {
|
pub fn unzip<T,U>(v: ~[(T, U)]) -> (~[T], ~[U]) {
|
||||||
let mut ts = ~[];
|
let mut ts = ~[];
|
||||||
let mut us = ~[];
|
let mut us = ~[];
|
||||||
for p in v.consume_iter() {
|
for p in v.move_iter() {
|
||||||
let (t, u) = p;
|
let (t, u) = p;
|
||||||
ts.push(t);
|
ts.push(t);
|
||||||
us.push(u);
|
us.push(u);
|
||||||
@ -1174,8 +1174,8 @@ impl<'self,T:Clone> ImmutableCopyableVector<T> for &'self [T] {
|
|||||||
|
|
||||||
#[allow(missing_doc)]
|
#[allow(missing_doc)]
|
||||||
pub trait OwnedVector<T> {
|
pub trait OwnedVector<T> {
|
||||||
fn consume_iter(self) -> ConsumeIterator<T>;
|
fn move_iter(self) -> MoveIterator<T>;
|
||||||
fn consume_rev_iter(self) -> ConsumeRevIterator<T>;
|
fn move_rev_iter(self) -> MoveRevIterator<T>;
|
||||||
|
|
||||||
fn reserve(&mut self, n: uint);
|
fn reserve(&mut self, n: uint);
|
||||||
fn reserve_at_least(&mut self, n: uint);
|
fn reserve_at_least(&mut self, n: uint);
|
||||||
@ -1204,26 +1204,26 @@ impl<T> OwnedVector<T> for ~[T] {
|
|||||||
/// value out of the vector (from start to end). The vector cannot
|
/// value out of the vector (from start to end). The vector cannot
|
||||||
/// be used after calling this.
|
/// be used after calling this.
|
||||||
///
|
///
|
||||||
/// Note that this performs O(n) swaps, and so `consume_rev_iter`
|
/// Note that this performs O(n) swaps, and so `move_rev_iter`
|
||||||
/// (which just calls `pop` repeatedly) is more efficient.
|
/// (which just calls `pop` repeatedly) is more efficient.
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
///
|
///
|
||||||
/// ~~~ {.rust}
|
/// ~~~ {.rust}
|
||||||
/// let v = ~[~"a", ~"b"];
|
/// let v = ~[~"a", ~"b"];
|
||||||
/// for s in v.consume_iter() {
|
/// for s in v.move_iter() {
|
||||||
/// // s has type ~str, not &~str
|
/// // s has type ~str, not &~str
|
||||||
/// println(s);
|
/// println(s);
|
||||||
/// }
|
/// }
|
||||||
/// ~~~
|
/// ~~~
|
||||||
fn consume_iter(self) -> ConsumeIterator<T> {
|
fn move_iter(self) -> MoveIterator<T> {
|
||||||
ConsumeIterator { v: self, idx: 0 }
|
MoveIterator { v: self, idx: 0 }
|
||||||
}
|
}
|
||||||
/// Creates a consuming iterator that moves out of the vector in
|
/// Creates a consuming iterator that moves out of the vector in
|
||||||
/// reverse order. Also see `consume_iter`, however note that this
|
/// reverse order. Also see `move_iter`, however note that this
|
||||||
/// is more efficient.
|
/// is more efficient.
|
||||||
fn consume_rev_iter(self) -> ConsumeRevIterator<T> {
|
fn move_rev_iter(self) -> MoveRevIterator<T> {
|
||||||
ConsumeRevIterator { v: self }
|
MoveRevIterator { v: self }
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -1540,7 +1540,7 @@ impl<T> OwnedVector<T> for ~[T] {
|
|||||||
let mut lefts = ~[];
|
let mut lefts = ~[];
|
||||||
let mut rights = ~[];
|
let mut rights = ~[];
|
||||||
|
|
||||||
for elt in self.consume_iter() {
|
for elt in self.move_iter() {
|
||||||
if f(&elt) {
|
if f(&elt) {
|
||||||
lefts.push(elt);
|
lefts.push(elt);
|
||||||
} else {
|
} else {
|
||||||
@ -2281,12 +2281,12 @@ pub type MutRevIterator<'self, T> = Invert<VecMutIterator<'self, T>>;
|
|||||||
|
|
||||||
/// An iterator that moves out of a vector.
|
/// An iterator that moves out of a vector.
|
||||||
#[deriving(Clone)]
|
#[deriving(Clone)]
|
||||||
pub struct ConsumeIterator<T> {
|
pub struct MoveIterator<T> {
|
||||||
priv v: ~[T],
|
priv v: ~[T],
|
||||||
priv idx: uint,
|
priv idx: uint,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Iterator<T> for ConsumeIterator<T> {
|
impl<T> Iterator<T> for MoveIterator<T> {
|
||||||
fn next(&mut self) -> Option<T> {
|
fn next(&mut self) -> Option<T> {
|
||||||
// this is peculiar, but is required for safety with respect
|
// this is peculiar, but is required for safety with respect
|
||||||
// to dtors. It traverses the first half of the vec, and
|
// to dtors. It traverses the first half of the vec, and
|
||||||
@ -2308,11 +2308,11 @@ impl<T> Iterator<T> for ConsumeIterator<T> {
|
|||||||
|
|
||||||
/// An iterator that moves out of a vector in reverse order.
|
/// An iterator that moves out of a vector in reverse order.
|
||||||
#[deriving(Clone)]
|
#[deriving(Clone)]
|
||||||
pub struct ConsumeRevIterator<T> {
|
pub struct MoveRevIterator<T> {
|
||||||
priv v: ~[T]
|
priv v: ~[T]
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> Iterator<T> for ConsumeRevIterator<T> {
|
impl<T> Iterator<T> for MoveRevIterator<T> {
|
||||||
fn next(&mut self) -> Option<T> {
|
fn next(&mut self) -> Option<T> {
|
||||||
self.v.pop_opt()
|
self.v.pop_opt()
|
||||||
}
|
}
|
||||||
@ -3323,17 +3323,17 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_consume_iterator() {
|
fn test_move_iterator() {
|
||||||
use iterator::*;
|
use iterator::*;
|
||||||
let xs = ~[1u,2,3,4,5];
|
let xs = ~[1u,2,3,4,5];
|
||||||
assert_eq!(xs.consume_iter().fold(0, |a: uint, b: uint| 10*a + b), 12345);
|
assert_eq!(xs.move_iter().fold(0, |a: uint, b: uint| 10*a + b), 12345);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_consume_rev_iterator() {
|
fn test_move_rev_iterator() {
|
||||||
use iterator::*;
|
use iterator::*;
|
||||||
let xs = ~[1u,2,3,4,5];
|
let xs = ~[1u,2,3,4,5];
|
||||||
assert_eq!(xs.consume_rev_iter().fold(0, |a: uint, b: uint| 10*a + b), 54321);
|
assert_eq!(xs.move_rev_iter().fold(0, |a: uint, b: uint| 10*a + b), 54321);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -3608,7 +3608,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
assert_eq!(cnt, 8);
|
assert_eq!(cnt, 8);
|
||||||
|
|
||||||
for f in v.consume_iter() {
|
for f in v.move_iter() {
|
||||||
assert!(f == Foo);
|
assert!(f == Foo);
|
||||||
cnt += 1;
|
cnt += 1;
|
||||||
}
|
}
|
||||||
|
@ -247,7 +247,7 @@ pub fn unguarded_pat(a: &arm) -> Option<~[@pat]> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn public_methods(ms: ~[@method]) -> ~[@method] {
|
pub fn public_methods(ms: ~[@method]) -> ~[@method] {
|
||||||
do ms.consume_iter().filter |m| {
|
do ms.move_iter().filter |m| {
|
||||||
match m.vis {
|
match m.vis {
|
||||||
public => true,
|
public => true,
|
||||||
_ => false
|
_ => false
|
||||||
|
@ -209,7 +209,7 @@ pub fn sort_meta_items(items: &[@MetaItem]) -> ~[@MetaItem] {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// There doesn't seem to be a more optimal way to do this
|
// There doesn't seem to be a more optimal way to do this
|
||||||
do v.consume_iter().transform |(_, m)| {
|
do v.move_iter().transform |(_, m)| {
|
||||||
match m.node {
|
match m.node {
|
||||||
MetaList(n, ref mis) => {
|
MetaList(n, ref mis) => {
|
||||||
@spanned {
|
@spanned {
|
||||||
|
@ -702,7 +702,7 @@ impl AstBuilder for @ExtCtxt {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn variant(&self, span: span, name: ident, tys: ~[ast::Ty]) -> ast::variant {
|
fn variant(&self, span: span, name: ident, tys: ~[ast::Ty]) -> ast::variant {
|
||||||
let args = tys.consume_iter().transform(|ty| {
|
let args = tys.move_iter().transform(|ty| {
|
||||||
ast::variant_arg { ty: ty, id: self.next_id() }
|
ast::variant_arg { ty: ty, id: self.next_id() }
|
||||||
}).collect();
|
}).collect();
|
||||||
|
|
||||||
|
@ -269,7 +269,7 @@ fn pieces_to_expr(cx: @ExtCtxt, sp: span,
|
|||||||
corresponding function in std::unstable::extfmt. Each function takes a
|
corresponding function in std::unstable::extfmt. Each function takes a
|
||||||
buffer to insert data into along with the data being formatted. */
|
buffer to insert data into along with the data being formatted. */
|
||||||
let npieces = pieces.len();
|
let npieces = pieces.len();
|
||||||
for (i, pc) in pieces.consume_iter().enumerate() {
|
for (i, pc) in pieces.move_iter().enumerate() {
|
||||||
match pc {
|
match pc {
|
||||||
/* Raw strings get appended via str::push_str */
|
/* Raw strings get appended via str::push_str */
|
||||||
PieceString(s) => {
|
PieceString(s) => {
|
||||||
|
@ -575,8 +575,8 @@ impl Context {
|
|||||||
Some(self.format_arg(e.span, Right(name), lname));
|
Some(self.format_arg(e.span, Right(name), lname));
|
||||||
}
|
}
|
||||||
|
|
||||||
let args = names.consume_iter().transform(|a| a.unwrap());
|
let args = names.move_iter().transform(|a| a.unwrap());
|
||||||
let mut args = locals.consume_iter().chain_(args);
|
let mut args = locals.move_iter().chain_(args);
|
||||||
|
|
||||||
// Next, build up the actual call to the sprintf function.
|
// Next, build up the actual call to the sprintf function.
|
||||||
let result = self.ecx.expr_call_global(self.fmtsp, ~[
|
let result = self.ecx.expr_call_global(self.fmtsp, ~[
|
||||||
|
@ -176,7 +176,7 @@ pub fn fold_ty_param(tp: TyParam,
|
|||||||
pub fn fold_ty_params(tps: &OptVec<TyParam>,
|
pub fn fold_ty_params(tps: &OptVec<TyParam>,
|
||||||
fld: @ast_fold) -> OptVec<TyParam> {
|
fld: @ast_fold) -> OptVec<TyParam> {
|
||||||
let tps = /*bad*/ (*tps).clone();
|
let tps = /*bad*/ (*tps).clone();
|
||||||
tps.map_consume(|tp| fold_ty_param(tp, fld))
|
tps.map_move(|tp| fold_ty_param(tp, fld))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn fold_lifetime(l: &Lifetime,
|
pub fn fold_lifetime(l: &Lifetime,
|
||||||
|
@ -57,10 +57,10 @@ impl<T> OptVec<T> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn map_consume<U>(self, op: &fn(T) -> U) -> OptVec<U> {
|
fn map_move<U>(self, op: &fn(T) -> U) -> OptVec<U> {
|
||||||
match self {
|
match self {
|
||||||
Empty => Empty,
|
Empty => Empty,
|
||||||
Vec(v) => Vec(v.consume_iter().transform(op).collect())
|
Vec(v) => Vec(v.move_iter().transform(op).collect())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4310,7 +4310,7 @@ impl Parser {
|
|||||||
seq_sep_trailing_disallowed(token::COMMA),
|
seq_sep_trailing_disallowed(token::COMMA),
|
||||||
|p| p.parse_ty(false)
|
|p| p.parse_ty(false)
|
||||||
);
|
);
|
||||||
for ty in arg_tys.consume_iter() {
|
for ty in arg_tys.move_iter() {
|
||||||
args.push(ast::variant_arg {
|
args.push(ast::variant_arg {
|
||||||
ty: ty,
|
ty: ty,
|
||||||
id: self.get_id(),
|
id: self.get_id(),
|
||||||
|
@ -96,9 +96,9 @@ fn make_graph(N: uint, edges: ~[(node_id, node_id)]) -> graph {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
do graph.consume_iter().transform |v| {
|
do graph.move_iter().transform |v| {
|
||||||
let mut vec = ~[];
|
let mut vec = ~[];
|
||||||
for i in v.consume() {
|
for i in v.move_iter() {
|
||||||
vec.push(i);
|
vec.push(i);
|
||||||
}
|
}
|
||||||
vec
|
vec
|
||||||
@ -119,7 +119,7 @@ fn gen_search_keys(graph: &[~[node_id]], n: uint) -> ~[node_id] {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut vec = ~[];
|
let mut vec = ~[];
|
||||||
for i in keys.consume() {
|
for i in keys.move_iter() {
|
||||||
vec.push(i);
|
vec.push(i);
|
||||||
}
|
}
|
||||||
return vec;
|
return vec;
|
||||||
|
@ -75,7 +75,7 @@ fn sort_and_fmt(mm: &HashMap<~[u8], uint>, total: uint) -> ~str {
|
|||||||
unsafe {
|
unsafe {
|
||||||
let b = str::raw::from_bytes(k);
|
let b = str::raw::from_bytes(k);
|
||||||
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
|
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
|
||||||
// to_ascii_consume and to_str_consume to not do a unnecessary copy.
|
// to_ascii_move and to_str_move to not do a unnecessary copy.
|
||||||
buffer.push_str(fmt!("%s %0.3f\n", b.to_ascii().to_upper().to_str_ascii(), v));
|
buffer.push_str(fmt!("%s %0.3f\n", b.to_ascii().to_upper().to_str_ascii(), v));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -86,7 +86,7 @@ fn sort_and_fmt(mm: &HashMap<~[u8], uint>, total: uint) -> ~str {
|
|||||||
// given a map, search for the frequency of a pattern
|
// given a map, search for the frequency of a pattern
|
||||||
fn find(mm: &HashMap<~[u8], uint>, key: ~str) -> uint {
|
fn find(mm: &HashMap<~[u8], uint>, key: ~str) -> uint {
|
||||||
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
|
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
|
||||||
// to_ascii_consume and to_str_consume to not do a unnecessary copy.
|
// to_ascii_move and to_str_move to not do a unnecessary copy.
|
||||||
let key = key.to_ascii().to_lower().to_str_ascii();
|
let key = key.to_ascii().to_lower().to_str_ascii();
|
||||||
match mm.find_equiv(&key.as_bytes()) {
|
match mm.find_equiv(&key.as_bytes()) {
|
||||||
option::None => { return 0u; }
|
option::None => { return 0u; }
|
||||||
|
@ -28,20 +28,20 @@ fn calc(children: uint, parent_wait_chan: &Chan<Chan<Chan<int>>>) {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let child_start_chans: ~[Chan<Chan<int>>] =
|
let child_start_chans: ~[Chan<Chan<int>>] =
|
||||||
wait_ports.consume_iter().transform(|port| port.recv()).collect();
|
wait_ports.move_iter().transform(|port| port.recv()).collect();
|
||||||
|
|
||||||
let (start_port, start_chan) = stream::<Chan<int>>();
|
let (start_port, start_chan) = stream::<Chan<int>>();
|
||||||
parent_wait_chan.send(start_chan);
|
parent_wait_chan.send(start_chan);
|
||||||
let parent_result_chan: Chan<int> = start_port.recv();
|
let parent_result_chan: Chan<int> = start_port.recv();
|
||||||
|
|
||||||
let child_sum_ports: ~[Port<int>] =
|
let child_sum_ports: ~[Port<int>] =
|
||||||
do child_start_chans.consume_iter().transform |child_start_chan| {
|
do child_start_chans.move_iter().transform |child_start_chan| {
|
||||||
let (child_sum_port, child_sum_chan) = stream::<int>();
|
let (child_sum_port, child_sum_chan) = stream::<int>();
|
||||||
child_start_chan.send(child_sum_chan);
|
child_start_chan.send(child_sum_chan);
|
||||||
child_sum_port
|
child_sum_port
|
||||||
}.collect();
|
}.collect();
|
||||||
|
|
||||||
let sum = child_sum_ports.consume_iter().fold(0, |sum, sum_port| sum + sum_port.recv() );
|
let sum = child_sum_ports.move_iter().fold(0, |sum, sum_port| sum + sum_port.recv() );
|
||||||
|
|
||||||
parent_result_chan.send(sum + 1);
|
parent_result_chan.send(sum + 1);
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user