Auto merge of #21997 - Manishearth:rollup, r=alexcrichton

None
This commit is contained in:
bors 2015-02-06 23:30:17 +00:00
commit d3732a12e8
110 changed files with 2051 additions and 2029 deletions

View File

@ -118,7 +118,10 @@ pub fn parse_config(args: Vec<String> ) -> Config {
}
fn opt_path(m: &getopts::Matches, nm: &str) -> Path {
Path::new(m.opt_str(nm).unwrap())
match m.opt_str(nm) {
Some(s) => Path::new(s),
None => panic!("no option (=path) found for {}", nm),
}
}
let filter = if !matches.free.is_empty() {

View File

@ -1813,7 +1813,6 @@ default visibility with the `priv` keyword. When an item is declared as `pub`,
it can be thought of as being accessible to the outside world. For example:
```
# #![allow(missing_copy_implementations)]
# fn main() {}
// Declare a private struct
struct Foo;

View File

@ -523,7 +523,7 @@ fn print<'a>(s: &'a str); // expanded
fn debug(lvl: u32, s: &str); // elided
fn debug<'a>(lvl: u32, s: &'a str); // expanded
// In the preceeding example, `lvl` doesn't need a lifetime because it's not a
// In the preceding example, `lvl` doesn't need a lifetime because it's not a
// reference (`&`). Only things relating to references (such as a `struct`
// which contains a reference) need lifetimes.

View File

@ -194,9 +194,9 @@ for name in lib_feature_stats:
if not name in joint_features:
print "error: feature '" + name + "' is both a lang and lib feature but not whitelisted"
errors = True
lang_status = lang_feature_stats[name][3]
lang_status = language_feature_stats[name][3]
lib_status = lib_feature_stats[name][3]
lang_stable_since = lang_feature_stats[name][4]
lang_stable_since = language_feature_stats[name][4]
lib_stable_since = lib_feature_stats[name][4]
if lang_status != lib_status and lib_status != "deprecated":

View File

@ -1195,7 +1195,7 @@ maybe_stmts
//
// There are also two other expr subtypes: first, nonparen_expr
// disallows exprs surrounded by parens (including tuple expressions),
// this is neccesary for BOX (place) expressions, so a parens expr
// this is necessary for BOX (place) expressions, so a parens expr
// following the BOX is always parsed as the place. There is also
// expr_norange used in index_expr, which disallows '..' in
// expressions as that has special meaning inside of brackets.

View File

@ -311,7 +311,7 @@ impl<T: Sync + Send> Drop for Arc<T> {
///
/// // stuff
///
/// drop(five); // explict drop
/// drop(five); // explicit drop
/// }
/// {
/// let five = Arc::new(5);
@ -441,7 +441,7 @@ impl<T: Sync + Send> Drop for Weak<T> {
///
/// // stuff
///
/// drop(weak_five); // explict drop
/// drop(weak_five); // explicit drop
/// }
/// {
/// let five = Arc::new(5);

View File

@ -73,7 +73,9 @@
#![feature(unboxed_closures)]
#![feature(core)]
#![feature(hash)]
#![feature(libc)]
#![cfg_attr(all(not(feature = "external_funcs"), not(feature = "external_crate")),
feature(libc))]
#[macro_use]
extern crate core;

View File

@ -383,7 +383,7 @@ impl<T> Drop for Rc<T> {
///
/// // stuff
///
/// drop(five); // explict drop
/// drop(five); // explicit drop
/// }
/// {
/// let five = Rc::new(5);
@ -688,7 +688,7 @@ impl<T> Drop for Weak<T> {
///
/// // stuff
///
/// drop(weak_five); // explict drop
/// drop(weak_five); // explicit drop
/// }
/// {
/// let five = Rc::new(5);

View File

@ -13,40 +13,40 @@ use std::rand;
use std::rand::Rng;
use test::{Bencher, black_box};
pub fn insert_rand_n<M, I, R>(n: uint,
pub fn insert_rand_n<M, I, R>(n: usize,
map: &mut M,
b: &mut Bencher,
mut insert: I,
mut remove: R) where
I: FnMut(&mut M, uint),
R: FnMut(&mut M, uint),
I: FnMut(&mut M, usize),
R: FnMut(&mut M, usize),
{
// setup
let mut rng = rand::weak_rng();
for _ in 0..n {
insert(map, rng.gen::<uint>() % n);
insert(map, rng.gen::<usize>() % n);
}
// measure
b.iter(|| {
let k = rng.gen::<uint>() % n;
let k = rng.gen::<usize>() % n;
insert(map, k);
remove(map, k);
});
black_box(map);
}
pub fn insert_seq_n<M, I, R>(n: uint,
pub fn insert_seq_n<M, I, R>(n: usize,
map: &mut M,
b: &mut Bencher,
mut insert: I,
mut remove: R) where
I: FnMut(&mut M, uint),
R: FnMut(&mut M, uint),
I: FnMut(&mut M, usize),
R: FnMut(&mut M, usize),
{
// setup
for i in 0u..n {
for i in 0..n {
insert(map, i * 2);
}
@ -60,18 +60,17 @@ pub fn insert_seq_n<M, I, R>(n: uint,
black_box(map);
}
pub fn find_rand_n<M, T, I, F>(n: uint,
pub fn find_rand_n<M, T, I, F>(n: usize,
map: &mut M,
b: &mut Bencher,
mut insert: I,
mut find: F) where
I: FnMut(&mut M, uint),
F: FnMut(&M, uint) -> T,
I: FnMut(&mut M, usize),
F: FnMut(&M, usize) -> T,
{
// setup
let mut rng = rand::weak_rng();
let mut keys = (0..n).map(|_| rng.gen::<uint>() % n)
.collect::<Vec<_>>();
let mut keys: Vec<_> = (0..n).map(|_| rng.gen::<usize>() % n).collect();
for k in &keys {
insert(map, *k);
@ -88,16 +87,16 @@ pub fn find_rand_n<M, T, I, F>(n: uint,
})
}
pub fn find_seq_n<M, T, I, F>(n: uint,
pub fn find_seq_n<M, T, I, F>(n: usize,
map: &mut M,
b: &mut Bencher,
mut insert: I,
mut find: F) where
I: FnMut(&mut M, uint),
F: FnMut(&M, uint) -> T,
I: FnMut(&mut M, usize),
F: FnMut(&M, usize) -> T,
{
// setup
for i in 0u..n {
for i in 0..n {
insert(map, i);
}

View File

@ -28,12 +28,12 @@
//! ```
//! use std::cmp::Ordering;
//! use std::collections::BinaryHeap;
//! use std::uint;
//! use std::usize;
//!
//! #[derive(Copy, Eq, PartialEq)]
//! struct State {
//! cost: uint,
//! position: uint,
//! cost: usize,
//! position: usize,
//! }
//!
//! // The priority queue depends on `Ord`.
@ -53,21 +53,21 @@
//! }
//! }
//!
//! // Each node is represented as an `uint`, for a shorter implementation.
//! // Each node is represented as an `usize`, for a shorter implementation.
//! struct Edge {
//! node: uint,
//! cost: uint,
//! node: usize,
//! cost: usize,
//! }
//!
//! // Dijkstra's shortest path algorithm.
//!
//! // Start at `start` and use `dist` to track the current shortest distance
//! // to each node. This implementation isn't memory-efficient as it may leave duplicate
//! // nodes in the queue. It also uses `uint::MAX` as a sentinel value,
//! // nodes in the queue. It also uses `usize::MAX` as a sentinel value,
//! // for a simpler implementation.
//! fn shortest_path(adj_list: &Vec<Vec<Edge>>, start: uint, goal: uint) -> uint {
//! fn shortest_path(adj_list: &Vec<Vec<Edge>>, start: usize, goal: usize) -> usize {
//! // dist[node] = current shortest distance from `start` to `node`
//! let mut dist: Vec<_> = (0..adj_list.len()).map(|_| uint::MAX).collect();
//! let mut dist: Vec<_> = (0..adj_list.len()).map(|_| usize::MAX).collect();
//!
//! let mut heap = BinaryHeap::new();
//!
@ -98,7 +98,7 @@
//! }
//!
//! // Goal not reachable
//! uint::MAX
//! usize::MAX
//! }
//!
//! fn main() {
@ -143,7 +143,7 @@
//! assert_eq!(shortest_path(&graph, 0, 3), 3);
//! assert_eq!(shortest_path(&graph, 3, 0), 7);
//! assert_eq!(shortest_path(&graph, 0, 4), 5);
//! assert_eq!(shortest_path(&graph, 4, 0), uint::MAX);
//! assert_eq!(shortest_path(&graph, 4, 0), usize::MAX);
//! }
//! ```
@ -183,7 +183,7 @@ impl<T: Ord> BinaryHeap<T> {
/// ```
/// use std::collections::BinaryHeap;
/// let mut heap = BinaryHeap::new();
/// heap.push(4u);
/// heap.push(4);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn new() -> BinaryHeap<T> { BinaryHeap { data: vec![] } }
@ -198,10 +198,10 @@ impl<T: Ord> BinaryHeap<T> {
/// ```
/// use std::collections::BinaryHeap;
/// let mut heap = BinaryHeap::with_capacity(10);
/// heap.push(4u);
/// heap.push(4);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn with_capacity(capacity: uint) -> BinaryHeap<T> {
pub fn with_capacity(capacity: usize) -> BinaryHeap<T> {
BinaryHeap { data: Vec::with_capacity(capacity) }
}
@ -292,10 +292,10 @@ impl<T: Ord> BinaryHeap<T> {
/// use std::collections::BinaryHeap;
/// let mut heap = BinaryHeap::with_capacity(100);
/// assert!(heap.capacity() >= 100);
/// heap.push(4u);
/// heap.push(4);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn capacity(&self) -> uint { self.data.capacity() }
pub fn capacity(&self) -> usize { self.data.capacity() }
/// Reserves the minimum capacity for exactly `additional` more elements to be inserted in the
/// given `BinaryHeap`. Does nothing if the capacity is already sufficient.
@ -306,7 +306,7 @@ impl<T: Ord> BinaryHeap<T> {
///
/// # Panics
///
/// Panics if the new capacity overflows `uint`.
/// Panics if the new capacity overflows `usize`.
///
/// # Examples
///
@ -315,10 +315,10 @@ impl<T: Ord> BinaryHeap<T> {
/// let mut heap = BinaryHeap::new();
/// heap.reserve_exact(100);
/// assert!(heap.capacity() >= 100);
/// heap.push(4u);
/// heap.push(4);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn reserve_exact(&mut self, additional: uint) {
pub fn reserve_exact(&mut self, additional: usize) {
self.data.reserve_exact(additional);
}
@ -327,7 +327,7 @@ impl<T: Ord> BinaryHeap<T> {
///
/// # Panics
///
/// Panics if the new capacity overflows `uint`.
/// Panics if the new capacity overflows `usize`.
///
/// # Examples
///
@ -336,10 +336,10 @@ impl<T: Ord> BinaryHeap<T> {
/// let mut heap = BinaryHeap::new();
/// heap.reserve(100);
/// assert!(heap.capacity() >= 100);
/// heap.push(4u);
/// heap.push(4);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn reserve(&mut self, additional: uint) {
pub fn reserve(&mut self, additional: usize) {
self.data.reserve(additional);
}
@ -497,7 +497,7 @@ impl<T: Ord> BinaryHeap<T> {
// zeroed element), shift along the others and move it back into the
// vector over the junk element. This reduces the constant factor
// compared to using swaps, which involves twice as many moves.
fn sift_up(&mut self, start: uint, mut pos: uint) {
fn sift_up(&mut self, start: usize, mut pos: usize) {
unsafe {
let new = replace(&mut self.data[pos], zeroed());
@ -514,7 +514,7 @@ impl<T: Ord> BinaryHeap<T> {
}
}
fn sift_down_range(&mut self, mut pos: uint, end: uint) {
fn sift_down_range(&mut self, mut pos: usize, end: usize) {
unsafe {
let start = pos;
let new = replace(&mut self.data[pos], zeroed());
@ -536,14 +536,14 @@ impl<T: Ord> BinaryHeap<T> {
}
}
fn sift_down(&mut self, pos: uint) {
fn sift_down(&mut self, pos: usize) {
let len = self.len();
self.sift_down_range(pos, len);
}
/// Returns the length of the binary heap.
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len(&self) -> uint { self.data.len() }
pub fn len(&self) -> usize { self.data.len() }
/// Checks if the binary heap is empty.
#[stable(feature = "rust1", since = "1.0.0")]
@ -584,7 +584,7 @@ impl<'a, T> Iterator for Iter<'a, T> {
fn next(&mut self) -> Option<&'a T> { self.iter.next() }
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) { self.iter.size_hint() }
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
}
#[stable(feature = "rust1", since = "1.0.0")]
@ -610,7 +610,7 @@ impl<T> Iterator for IntoIter<T> {
fn next(&mut self) -> Option<T> { self.iter.next() }
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) { self.iter.size_hint() }
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
}
#[stable(feature = "rust1", since = "1.0.0")]
@ -636,7 +636,7 @@ impl<'a, T: 'a> Iterator for Drain<'a, T> {
fn next(&mut self) -> Option<T> { self.iter.next() }
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) { self.iter.size_hint() }
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
}
#[stable(feature = "rust1", since = "1.0.0")]
@ -692,7 +692,7 @@ mod tests {
#[test]
fn test_iterator() {
let data = vec!(5, 9, 3);
let data = vec![5, 9, 3];
let iterout = [9, 5, 3];
let heap = BinaryHeap::from_vec(data);
let mut i = 0;
@ -704,27 +704,27 @@ mod tests {
#[test]
fn test_iterator_reverse() {
let data = vec!(5, 9, 3);
let iterout = vec!(3, 5, 9);
let data = vec![5, 9, 3];
let iterout = vec![3, 5, 9];
let pq = BinaryHeap::from_vec(data);
let v: Vec<int> = pq.iter().rev().map(|&x| x).collect();
let v: Vec<_> = pq.iter().rev().cloned().collect();
assert_eq!(v, iterout);
}
#[test]
fn test_move_iter() {
let data = vec!(5, 9, 3);
let iterout = vec!(9, 5, 3);
let data = vec![5, 9, 3];
let iterout = vec![9, 5, 3];
let pq = BinaryHeap::from_vec(data);
let v: Vec<int> = pq.into_iter().collect();
let v: Vec<_> = pq.into_iter().collect();
assert_eq!(v, iterout);
}
#[test]
fn test_move_iter_size_hint() {
let data = vec!(5, 9);
let data = vec![5, 9];
let pq = BinaryHeap::from_vec(data);
let mut it = pq.into_iter();
@ -741,17 +741,17 @@ mod tests {
#[test]
fn test_move_iter_reverse() {
let data = vec!(5, 9, 3);
let iterout = vec!(3, 5, 9);
let data = vec![5, 9, 3];
let iterout = vec![3, 5, 9];
let pq = BinaryHeap::from_vec(data);
let v: Vec<int> = pq.into_iter().rev().collect();
let v: Vec<_> = pq.into_iter().rev().collect();
assert_eq!(v, iterout);
}
#[test]
fn test_peek_and_pop() {
let data = vec!(2u, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1);
let data = vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1];
let mut sorted = data.clone();
sorted.sort();
let mut heap = BinaryHeap::from_vec(data);
@ -763,7 +763,7 @@ mod tests {
#[test]
fn test_push() {
let mut heap = BinaryHeap::from_vec(vec!(2, 4, 9));
let mut heap = BinaryHeap::from_vec(vec![2, 4, 9]);
assert_eq!(heap.len(), 3);
assert!(*heap.peek().unwrap() == 9);
heap.push(11);
@ -785,7 +785,7 @@ mod tests {
#[test]
fn test_push_unique() {
let mut heap = BinaryHeap::from_vec(vec!(box 2, box 4, box 9));
let mut heap = BinaryHeap::from_vec(vec![box 2, box 4, box 9]);
assert_eq!(heap.len(), 3);
assert!(*heap.peek().unwrap() == box 9);
heap.push(box 11);
@ -807,7 +807,7 @@ mod tests {
#[test]
fn test_push_pop() {
let mut heap = BinaryHeap::from_vec(vec!(5, 5, 2, 1, 3));
let mut heap = BinaryHeap::from_vec(vec![5, 5, 2, 1, 3]);
assert_eq!(heap.len(), 5);
assert_eq!(heap.push_pop(6), 6);
assert_eq!(heap.len(), 5);
@ -821,7 +821,7 @@ mod tests {
#[test]
fn test_replace() {
let mut heap = BinaryHeap::from_vec(vec!(5, 5, 2, 1, 3));
let mut heap = BinaryHeap::from_vec(vec![5, 5, 2, 1, 3]);
assert_eq!(heap.len(), 5);
assert_eq!(heap.replace(6).unwrap(), 5);
assert_eq!(heap.len(), 5);
@ -833,7 +833,7 @@ mod tests {
assert_eq!(heap.len(), 5);
}
fn check_to_vec(mut data: Vec<int>) {
fn check_to_vec(mut data: Vec<i32>) {
let heap = BinaryHeap::from_vec(data.clone());
let mut v = heap.clone().into_vec();
v.sort();
@ -845,44 +845,44 @@ mod tests {
#[test]
fn test_to_vec() {
check_to_vec(vec!());
check_to_vec(vec!(5));
check_to_vec(vec!(3, 2));
check_to_vec(vec!(2, 3));
check_to_vec(vec!(5, 1, 2));
check_to_vec(vec!(1, 100, 2, 3));
check_to_vec(vec!(1, 3, 5, 7, 9, 2, 4, 6, 8, 0));
check_to_vec(vec!(2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1));
check_to_vec(vec!(9, 11, 9, 9, 9, 9, 11, 2, 3, 4, 11, 9, 0, 0, 0, 0));
check_to_vec(vec!(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10));
check_to_vec(vec!(10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0));
check_to_vec(vec!(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 0, 0, 1, 2));
check_to_vec(vec!(5, 4, 3, 2, 1, 5, 4, 3, 2, 1, 5, 4, 3, 2, 1));
check_to_vec(vec![]);
check_to_vec(vec![5]);
check_to_vec(vec![3, 2]);
check_to_vec(vec![2, 3]);
check_to_vec(vec![5, 1, 2]);
check_to_vec(vec![1, 100, 2, 3]);
check_to_vec(vec![1, 3, 5, 7, 9, 2, 4, 6, 8, 0]);
check_to_vec(vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]);
check_to_vec(vec![9, 11, 9, 9, 9, 9, 11, 2, 3, 4, 11, 9, 0, 0, 0, 0]);
check_to_vec(vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
check_to_vec(vec![10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
check_to_vec(vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 0, 0, 1, 2]);
check_to_vec(vec![5, 4, 3, 2, 1, 5, 4, 3, 2, 1, 5, 4, 3, 2, 1]);
}
#[test]
fn test_empty_pop() {
let mut heap = BinaryHeap::<int>::new();
let mut heap = BinaryHeap::<i32>::new();
assert!(heap.pop().is_none());
}
#[test]
fn test_empty_peek() {
let empty = BinaryHeap::<int>::new();
let empty = BinaryHeap::<i32>::new();
assert!(empty.peek().is_none());
}
#[test]
fn test_empty_replace() {
let mut heap = BinaryHeap::<int>::new();
let mut heap = BinaryHeap::new();
assert!(heap.replace(5).is_none());
}
#[test]
fn test_from_iter() {
let xs = vec!(9u, 8, 7, 6, 5, 4, 3, 2, 1);
let xs = vec![9, 8, 7, 6, 5, 4, 3, 2, 1];
let mut q: BinaryHeap<uint> = xs.iter().rev().map(|&x| x).collect();
let mut q: BinaryHeap<_> = xs.iter().rev().cloned().collect();
for &x in &xs {
assert_eq!(q.pop().unwrap(), x);
@ -891,8 +891,7 @@ mod tests {
#[test]
fn test_drain() {
let mut q: BinaryHeap<_> =
[9u, 8, 7, 6, 5, 4, 3, 2, 1].iter().cloned().collect();
let mut q: BinaryHeap<_> = [9, 8, 7, 6, 5, 4, 3, 2, 1].iter().cloned().collect();
assert_eq!(q.drain().take(5).count(), 5);

File diff suppressed because it is too large Load Diff

View File

@ -15,7 +15,7 @@
// writing (August 2014) freely licensed under the following Creative Commons Attribution
// License: [CC BY 2.5 CA](http://creativecommons.org/licenses/by/2.5/ca/).
pub use self::Entry::*;
use self::Entry::*;
use core::prelude::*;
@ -63,36 +63,20 @@ use super::node::{self, Node, Found, GoDown};
/// would like to further explore choosing the optimal search strategy based on the choice of B,
/// and possibly other factors. Using linear search, searching for a random element is expected
/// to take O(B log<sub>B</sub>n) comparisons, which is generally worse than a BST. In practice,
/// however, performance is excellent. `BTreeMap` is able to readily outperform `TreeMap` under
/// many workloads, and is competitive where it doesn't. BTreeMap also generally *scales* better
/// than TreeMap, making it more appropriate for large datasets.
///
/// However, `TreeMap` may still be more appropriate to use in many contexts. If elements are very
/// large or expensive to compare, `TreeMap` may be more appropriate. It won't allocate any
/// more space than is needed, and will perform the minimal number of comparisons necessary.
/// `TreeMap` also provides much better performance stability guarantees. Generally, very few
/// changes need to be made to update a BST, and two updates are expected to take about the same
/// amount of time on roughly equal sized BSTs. However a B-Tree's performance is much more
/// amortized. If a node is overfull, it must be split into two nodes. If a node is underfull, it
/// may be merged with another. Both of these operations are relatively expensive to perform, and
/// it's possible to force one to occur at every single level of the tree in a single insertion or
/// deletion. In fact, a malicious or otherwise unlucky sequence of insertions and deletions can
/// force this degenerate behaviour to occur on every operation. While the total amount of work
/// done on each operation isn't *catastrophic*, and *is* still bounded by O(B log<sub>B</sub>n),
/// it is certainly much slower when it does.
/// however, performance is excellent.
#[derive(Clone)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct BTreeMap<K, V> {
root: Node<K, V>,
length: uint,
depth: uint,
b: uint,
length: usize,
depth: usize,
b: usize,
}
/// An abstract base over-which all other BTree iterators are built.
struct AbsIter<T> {
traversals: RingBuf<T>,
size: uint,
size: usize,
}
/// An iterator over a BTreeMap's entries.
@ -171,7 +155,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
/// Makes a new empty BTreeMap with the given B.
///
/// B cannot be less than 2.
pub fn with_b(b: uint) -> BTreeMap<K, V> {
pub fn with_b(b: usize) -> BTreeMap<K, V> {
assert!(b > 1, "B must be greater than 1");
BTreeMap {
length: 0,
@ -189,7 +173,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
/// use std::collections::BTreeMap;
///
/// let mut a = BTreeMap::new();
/// a.insert(1u, "a");
/// a.insert(1, "a");
/// a.clear();
/// assert!(a.is_empty());
/// ```
@ -219,7 +203,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
/// use std::collections::BTreeMap;
///
/// let mut map = BTreeMap::new();
/// map.insert(1u, "a");
/// map.insert(1, "a");
/// assert_eq!(map.get(&1), Some(&"a"));
/// assert_eq!(map.get(&2), None);
/// ```
@ -251,7 +235,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
/// use std::collections::BTreeMap;
///
/// let mut map = BTreeMap::new();
/// map.insert(1u, "a");
/// map.insert(1, "a");
/// assert_eq!(map.contains_key(&1), true);
/// assert_eq!(map.contains_key(&2), false);
/// ```
@ -271,7 +255,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
/// use std::collections::BTreeMap;
///
/// let mut map = BTreeMap::new();
/// map.insert(1u, "a");
/// map.insert(1, "a");
/// match map.get_mut(&1) {
/// Some(x) => *x = "b",
/// None => (),
@ -333,7 +317,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
/// use std::collections::BTreeMap;
///
/// let mut map = BTreeMap::new();
/// assert_eq!(map.insert(37u, "a"), None);
/// assert_eq!(map.insert(37, "a"), None);
/// assert_eq!(map.is_empty(), false);
///
/// map.insert(37, "b");
@ -445,7 +429,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
/// use std::collections::BTreeMap;
///
/// let mut map = BTreeMap::new();
/// map.insert(1u, "a");
/// map.insert(1, "a");
/// assert_eq!(map.remove(&1), Some("a"));
/// assert_eq!(map.remove(&1), None);
/// ```
@ -1001,7 +985,7 @@ impl<K, V, E, T> Iterator for AbsIter<T> where
}
}
fn size_hint(&self) -> (uint, Option<uint>) {
fn size_hint(&self) -> (usize, Option<usize>) {
(self.size, Some(self.size))
}
}
@ -1038,7 +1022,7 @@ impl<'a, K, V> Iterator for Iter<'a, K, V> {
type Item = (&'a K, &'a V);
fn next(&mut self) -> Option<(&'a K, &'a V)> { self.inner.next() }
fn size_hint(&self) -> (uint, Option<uint>) { self.inner.size_hint() }
fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K, V> DoubleEndedIterator for Iter<'a, K, V> {
@ -1052,7 +1036,7 @@ impl<'a, K, V> Iterator for IterMut<'a, K, V> {
type Item = (&'a K, &'a mut V);
fn next(&mut self) -> Option<(&'a K, &'a mut V)> { self.inner.next() }
fn size_hint(&self) -> (uint, Option<uint>) { self.inner.size_hint() }
fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K, V> DoubleEndedIterator for IterMut<'a, K, V> {
@ -1066,7 +1050,7 @@ impl<K, V> Iterator for IntoIter<K, V> {
type Item = (K, V);
fn next(&mut self) -> Option<(K, V)> { self.inner.next() }
fn size_hint(&self) -> (uint, Option<uint>) { self.inner.size_hint() }
fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V> DoubleEndedIterator for IntoIter<K, V> {
@ -1080,7 +1064,7 @@ impl<'a, K, V> Iterator for Keys<'a, K, V> {
type Item = &'a K;
fn next(&mut self) -> Option<(&'a K)> { self.inner.next() }
fn size_hint(&self) -> (uint, Option<uint>) { self.inner.size_hint() }
fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K, V> DoubleEndedIterator for Keys<'a, K, V> {
@ -1095,7 +1079,7 @@ impl<'a, K, V> Iterator for Values<'a, K, V> {
type Item = &'a V;
fn next(&mut self) -> Option<(&'a V)> { self.inner.next() }
fn size_hint(&self) -> (uint, Option<uint>) { self.inner.size_hint() }
fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K, V> DoubleEndedIterator for Values<'a, K, V> {
@ -1137,8 +1121,7 @@ impl<'a, K: Ord, V> Entry<'a, K, V> {
impl<'a, K: Ord, V> VacantEntry<'a, K, V> {
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it.
#[unstable(feature = "collections",
reason = "matches collection reform v2 specification, waiting for dust to settle")]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn insert(self, value: V) -> &'a mut V {
self.stack.insert(self.key, value)
}
@ -1146,38 +1129,33 @@ impl<'a, K: Ord, V> VacantEntry<'a, K, V> {
impl<'a, K: Ord, V> OccupiedEntry<'a, K, V> {
/// Gets a reference to the value in the entry.
#[unstable(feature = "collections",
reason = "matches collection reform v2 specification, waiting for dust to settle")]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn get(&self) -> &V {
self.stack.peek()
}
/// Gets a mutable reference to the value in the entry.
#[unstable(feature = "collections",
reason = "matches collection reform v2 specification, waiting for dust to settle")]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn get_mut(&mut self) -> &mut V {
self.stack.peek_mut()
}
/// Converts the entry into a mutable reference to its value.
#[unstable(feature = "collections",
reason = "matches collection reform v2 specification, waiting for dust to settle")]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn into_mut(self) -> &'a mut V {
self.stack.into_top()
}
/// Sets the value of the entry with the OccupiedEntry's key,
/// and returns the entry's old value.
#[unstable(feature = "collections",
reason = "matches collection reform v2 specification, waiting for dust to settle")]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn insert(&mut self, mut value: V) -> V {
mem::swap(self.stack.peek_mut(), &mut value);
value
}
/// Takes the value of the entry out of the map, and returns it.
#[unstable(feature = "collections",
reason = "matches collection reform v2 specification, waiting for dust to settle")]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn remove(self) -> V {
self.stack.remove()
}
@ -1192,16 +1170,16 @@ impl<K, V> BTreeMap<K, V> {
/// use std::collections::BTreeMap;
///
/// let mut map = BTreeMap::new();
/// map.insert(1u, "a");
/// map.insert(2u, "b");
/// map.insert(3u, "c");
/// map.insert(1, "a");
/// map.insert(2, "b");
/// map.insert(3, "c");
///
/// for (key, value) in map.iter() {
/// println!("{}: {}", key, value);
/// }
///
/// let (first_key, first_value) = map.iter().next().unwrap();
/// assert_eq!((*first_key, *first_value), (1u, "a"));
/// assert_eq!((*first_key, *first_value), (1, "a"));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn iter(&self) -> Iter<K, V> {
@ -1225,9 +1203,9 @@ impl<K, V> BTreeMap<K, V> {
/// use std::collections::BTreeMap;
///
/// let mut map = BTreeMap::new();
/// map.insert("a", 1u);
/// map.insert("b", 2u);
/// map.insert("c", 3u);
/// map.insert("a", 1);
/// map.insert("b", 2);
/// map.insert("c", 3);
///
/// // add 10 to the value if the key isn't "a"
/// for (key, value) in map.iter_mut() {
@ -1257,9 +1235,9 @@ impl<K, V> BTreeMap<K, V> {
/// use std::collections::BTreeMap;
///
/// let mut map = BTreeMap::new();
/// map.insert(1u, "a");
/// map.insert(2u, "b");
/// map.insert(3u, "c");
/// map.insert(1, "a");
/// map.insert(2, "b");
/// map.insert(3, "c");
///
/// for (key, value) in map.into_iter() {
/// println!("{}: {}", key, value);
@ -1286,11 +1264,11 @@ impl<K, V> BTreeMap<K, V> {
/// use std::collections::BTreeMap;
///
/// let mut a = BTreeMap::new();
/// a.insert(1u, "a");
/// a.insert(2u, "b");
/// a.insert(1, "a");
/// a.insert(2, "b");
///
/// let keys: Vec<uint> = a.keys().cloned().collect();
/// assert_eq!(keys, vec![1u,2,]);
/// let keys: Vec<usize> = a.keys().cloned().collect();
/// assert_eq!(keys, vec![1,2,]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn keys<'a>(&'a self) -> Keys<'a, K, V> {
@ -1308,8 +1286,8 @@ impl<K, V> BTreeMap<K, V> {
/// use std::collections::BTreeMap;
///
/// let mut a = BTreeMap::new();
/// a.insert(1u, "a");
/// a.insert(2u, "b");
/// a.insert(1, "a");
/// a.insert(2, "b");
///
/// let values: Vec<&str> = a.values().cloned().collect();
/// assert_eq!(values, vec!["a","b"]);
@ -1331,11 +1309,11 @@ impl<K, V> BTreeMap<K, V> {
///
/// let mut a = BTreeMap::new();
/// assert_eq!(a.len(), 0);
/// a.insert(1u, "a");
/// a.insert(1, "a");
/// assert_eq!(a.len(), 1);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len(&self) -> uint { self.length }
pub fn len(&self) -> usize { self.length }
/// Return true if the map contains no elements.
///
@ -1346,7 +1324,7 @@ impl<K, V> BTreeMap<K, V> {
///
/// let mut a = BTreeMap::new();
/// assert!(a.is_empty());
/// a.insert(1u, "a");
/// a.insert(1, "a");
/// assert!(!a.is_empty());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
@ -1496,13 +1474,13 @@ impl<K: Ord, V> BTreeMap<K, V> {
/// use std::collections::Bound::{Included, Unbounded};
///
/// let mut map = BTreeMap::new();
/// map.insert(3u, "a");
/// map.insert(5u, "b");
/// map.insert(8u, "c");
/// map.insert(3, "a");
/// map.insert(5, "b");
/// map.insert(8, "c");
/// for (&key, &value) in map.range(Included(&4), Included(&8)) {
/// println!("{}: {}", key, value);
/// }
/// assert_eq!(Some((&5u, &"b")), map.range(Included(&4), Unbounded).next());
/// assert_eq!(Some((&5, &"b")), map.range(Included(&4), Unbounded).next());
/// ```
#[unstable(feature = "collections",
reason = "matches collection reform specification, waiting for dust to settle")]
@ -1546,7 +1524,7 @@ impl<K: Ord, V> BTreeMap<K, V> {
/// use std::collections::BTreeMap;
/// use std::collections::btree_map::Entry;
///
/// let mut count: BTreeMap<&str, uint> = BTreeMap::new();
/// let mut count: BTreeMap<&str, usize> = BTreeMap::new();
///
/// // count the number of occurrences of letters in the vec
/// for x in vec!["a","b","a","c","a","b"].iter() {
@ -1561,12 +1539,10 @@ impl<K: Ord, V> BTreeMap<K, V> {
/// }
/// }
///
/// assert_eq!(count["a"], 3u);
/// assert_eq!(count["a"], 3);
/// ```
/// The key must have the same ordering before or after `.to_owned()` is called.
#[unstable(feature = "collections",
reason = "precise API still under development")]
pub fn entry<'a>(&'a mut self, mut key: K) -> Entry<'a, K, V> {
#[stable(feature = "rust1", since = "1.0.0")]
pub fn entry(&mut self, mut key: K) -> Entry<K, V> {
// same basic logic of `swap` and `pop`, blended together
let mut stack = stack::PartialSearchStack::new(self);
loop {
@ -1616,13 +1592,14 @@ mod test {
use prelude::*;
use std::iter::range_inclusive;
use super::{BTreeMap, Occupied, Vacant};
use super::BTreeMap;
use super::Entry::{Occupied, Vacant};
use Bound::{self, Included, Excluded, Unbounded};
#[test]
fn test_basic_large() {
let mut map = BTreeMap::new();
let size = 10000u;
let size = 10000;
assert_eq!(map.len(), 0);
for i in 0..size {
@ -1669,7 +1646,7 @@ mod test {
let mut map = BTreeMap::new();
assert_eq!(map.remove(&1), None);
assert_eq!(map.get(&1), None);
assert_eq!(map.insert(1u, 1u), None);
assert_eq!(map.insert(1, 1), None);
assert_eq!(map.get(&1), Some(&1));
assert_eq!(map.insert(1, 2), Some(1));
assert_eq!(map.get(&1), Some(&2));
@ -1682,12 +1659,12 @@ mod test {
#[test]
fn test_iter() {
let size = 10000u;
let size = 10000;
// Forwards
let mut map: BTreeMap<uint, uint> = (0..size).map(|i| (i, i)).collect();
let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
fn test<T>(size: uint, mut iter: T) where T: Iterator<Item=(uint, uint)> {
fn test<T>(size: usize, mut iter: T) where T: Iterator<Item=(usize, usize)> {
for i in 0..size {
assert_eq!(iter.size_hint(), (size - i, Some(size - i)));
assert_eq!(iter.next().unwrap(), (i, i));
@ -1702,12 +1679,12 @@ mod test {
#[test]
fn test_iter_rev() {
let size = 10000u;
let size = 10000;
// Forwards
let mut map: BTreeMap<uint, uint> = (0..size).map(|i| (i, i)).collect();
let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
fn test<T>(size: uint, mut iter: T) where T: Iterator<Item=(uint, uint)> {
fn test<T>(size: usize, mut iter: T) where T: Iterator<Item=(usize, usize)> {
for i in 0..size {
assert_eq!(iter.size_hint(), (size - i, Some(size - i)));
assert_eq!(iter.next().unwrap(), (size - i - 1, size - i - 1));
@ -1722,13 +1699,13 @@ mod test {
#[test]
fn test_iter_mixed() {
let size = 10000u;
let size = 10000;
// Forwards
let mut map: BTreeMap<uint, uint> = (0..size).map(|i| (i, i)).collect();
let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
fn test<T>(size: uint, mut iter: T)
where T: Iterator<Item=(uint, uint)> + DoubleEndedIterator {
fn test<T>(size: usize, mut iter: T)
where T: Iterator<Item=(usize, usize)> + DoubleEndedIterator {
for i in 0..size / 4 {
assert_eq!(iter.size_hint(), (size - i * 2, Some(size - i * 2)));
assert_eq!(iter.next().unwrap(), (i, i));
@ -1748,13 +1725,13 @@ mod test {
#[test]
fn test_range_small() {
let size = 5u;
let size = 5;
// Forwards
let map: BTreeMap<uint, uint> = (0..size).map(|i| (i, i)).collect();
let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
let mut j = 0u;
for ((&k, &v), i) in map.range(Included(&2), Unbounded).zip(2u..size) {
let mut j = 0;
for ((&k, &v), i) in map.range(Included(&2), Unbounded).zip(2..size) {
assert_eq!(k, i);
assert_eq!(v, i);
j += 1;
@ -1764,10 +1741,10 @@ mod test {
#[test]
fn test_range_1000() {
let size = 1000u;
let map: BTreeMap<uint, uint> = (0..size).map(|i| (i, i)).collect();
let size = 1000;
let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
fn test(map: &BTreeMap<uint, uint>, size: uint, min: Bound<&uint>, max: Bound<&uint>) {
fn test(map: &BTreeMap<u32, u32>, size: u32, min: Bound<&u32>, max: Bound<&u32>) {
let mut kvs = map.range(min, max).map(|(&k, &v)| (k, v));
let mut pairs = (0..size).map(|i| (i, i));
@ -1787,8 +1764,8 @@ mod test {
#[test]
fn test_range() {
let size = 200u;
let map: BTreeMap<uint, uint> = (0..size).map(|i| (i, i)).collect();
let size = 200;
let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
for i in 0..size {
for j in i..size {
@ -1808,7 +1785,7 @@ mod test {
fn test_entry(){
let xs = [(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
let mut map: BTreeMap<int, int> = xs.iter().map(|&x| x).collect();
let mut map: BTreeMap<_, _> = xs.iter().map(|&x| x).collect();
// Existing key (insert)
match map.entry(1) {
@ -1872,7 +1849,7 @@ mod bench {
#[bench]
pub fn insert_rand_100(b: &mut Bencher) {
let mut m : BTreeMap<uint,uint> = BTreeMap::new();
let mut m = BTreeMap::new();
insert_rand_n(100, &mut m, b,
|m, i| { m.insert(i, 1); },
|m, i| { m.remove(&i); });
@ -1880,7 +1857,7 @@ mod bench {
#[bench]
pub fn insert_rand_10_000(b: &mut Bencher) {
let mut m : BTreeMap<uint,uint> = BTreeMap::new();
let mut m = BTreeMap::new();
insert_rand_n(10_000, &mut m, b,
|m, i| { m.insert(i, 1); },
|m, i| { m.remove(&i); });
@ -1889,7 +1866,7 @@ mod bench {
// Insert seq
#[bench]
pub fn insert_seq_100(b: &mut Bencher) {
let mut m : BTreeMap<uint,uint> = BTreeMap::new();
let mut m = BTreeMap::new();
insert_seq_n(100, &mut m, b,
|m, i| { m.insert(i, 1); },
|m, i| { m.remove(&i); });
@ -1897,7 +1874,7 @@ mod bench {
#[bench]
pub fn insert_seq_10_000(b: &mut Bencher) {
let mut m : BTreeMap<uint,uint> = BTreeMap::new();
let mut m = BTreeMap::new();
insert_seq_n(10_000, &mut m, b,
|m, i| { m.insert(i, 1); },
|m, i| { m.remove(&i); });
@ -1906,7 +1883,7 @@ mod bench {
// Find rand
#[bench]
pub fn find_rand_100(b: &mut Bencher) {
let mut m : BTreeMap<uint,uint> = BTreeMap::new();
let mut m = BTreeMap::new();
find_rand_n(100, &mut m, b,
|m, i| { m.insert(i, 1); },
|m, i| { m.get(&i); });
@ -1914,7 +1891,7 @@ mod bench {
#[bench]
pub fn find_rand_10_000(b: &mut Bencher) {
let mut m : BTreeMap<uint,uint> = BTreeMap::new();
let mut m = BTreeMap::new();
find_rand_n(10_000, &mut m, b,
|m, i| { m.insert(i, 1); },
|m, i| { m.get(&i); });
@ -1923,7 +1900,7 @@ mod bench {
// Find seq
#[bench]
pub fn find_seq_100(b: &mut Bencher) {
let mut m : BTreeMap<uint,uint> = BTreeMap::new();
let mut m = BTreeMap::new();
find_seq_n(100, &mut m, b,
|m, i| { m.insert(i, 1); },
|m, i| { m.get(&i); });
@ -1931,14 +1908,14 @@ mod bench {
#[bench]
pub fn find_seq_10_000(b: &mut Bencher) {
let mut m : BTreeMap<uint,uint> = BTreeMap::new();
let mut m = BTreeMap::new();
find_seq_n(10_000, &mut m, b,
|m, i| { m.insert(i, 1); },
|m, i| { m.get(&i); });
}
fn bench_iter(b: &mut Bencher, size: uint) {
let mut map = BTreeMap::<uint, uint>::new();
fn bench_iter(b: &mut Bencher, size: i32) {
let mut map = BTreeMap::<i32, i32>::new();
let mut rng = weak_rng();
for _ in 0..size {

View File

@ -65,7 +65,7 @@ pub struct Node<K, V> {
//
// Note: instead of accessing this field directly, please call the `len()` method, which should
// be more stable in the face of representation changes.
_len: uint,
_len: usize,
// FIXME(gereeter) It shouldn't be necessary to store the capacity in every node, as it should
// be constant throughout the tree. Once a solution to this is found, it might be possible to
@ -74,7 +74,7 @@ pub struct Node<K, V> {
//
// Note: instead of accessing this field directly, please call the `capacity()` method, which
// should be more stable in the face of representation changes.
_capacity: uint,
_capacity: usize,
}
struct NodeSlice<'a, K: 'a, V: 'a> {
@ -102,7 +102,7 @@ struct MutNodeSlice<'a, K: 'a, V: 'a> {
///
/// Fails if `target_alignment` is not a power of two.
#[inline]
fn round_up_to_next(unrounded: uint, target_alignment: uint) -> uint {
fn round_up_to_next(unrounded: usize, target_alignment: usize) -> usize {
assert!(num::UnsignedInt::is_power_of_two(target_alignment));
(unrounded + target_alignment - 1) & !(target_alignment - 1)
}
@ -120,10 +120,10 @@ fn test_rounding() {
// Returns a tuple of (val_offset, edge_offset),
// from the start of a mallocated array.
#[inline]
fn calculate_offsets(keys_size: uint,
vals_size: uint, vals_align: uint,
edges_align: uint)
-> (uint, uint) {
fn calculate_offsets(keys_size: usize,
vals_size: usize, vals_align: usize,
edges_align: usize)
-> (usize, usize) {
let vals_offset = round_up_to_next(keys_size, vals_align);
let end_of_vals = vals_offset + vals_size;
@ -135,10 +135,10 @@ fn calculate_offsets(keys_size: uint,
// Returns a tuple of (minimum required alignment, array_size),
// from the start of a mallocated array.
#[inline]
fn calculate_allocation(keys_size: uint, keys_align: uint,
vals_size: uint, vals_align: uint,
edges_size: uint, edges_align: uint)
-> (uint, uint) {
fn calculate_allocation(keys_size: usize, keys_align: usize,
vals_size: usize, vals_align: usize,
edges_size: usize, edges_align: usize)
-> (usize, usize) {
let (_, edges_offset) = calculate_offsets(keys_size,
vals_size, vals_align,
edges_align);
@ -159,7 +159,7 @@ fn test_offset_calculation() {
assert_eq!(calculate_offsets(6, 12, 4, 8), (8, 24));
}
fn calculate_allocation_generic<K, V>(capacity: uint, is_leaf: bool) -> (uint, uint) {
fn calculate_allocation_generic<K, V>(capacity: usize, is_leaf: bool) -> (usize, usize) {
let (keys_size, keys_align) = (capacity * mem::size_of::<K>(), mem::min_align_of::<K>());
let (vals_size, vals_align) = (capacity * mem::size_of::<V>(), mem::min_align_of::<V>());
let (edges_size, edges_align) = if is_leaf {
@ -175,7 +175,7 @@ fn calculate_allocation_generic<K, V>(capacity: uint, is_leaf: bool) -> (uint, u
)
}
fn calculate_offsets_generic<K, V>(capacity: uint, is_leaf: bool) -> (uint, uint) {
fn calculate_offsets_generic<K, V>(capacity: usize, is_leaf: bool) -> (usize, usize) {
let keys_size = capacity * mem::size_of::<K>();
let vals_size = capacity * mem::size_of::<V>();
let vals_align = mem::min_align_of::<V>();
@ -203,16 +203,16 @@ impl<T> RawItems<T> {
RawItems::from_parts(slice.as_ptr(), slice.len())
}
unsafe fn from_parts(ptr: *const T, len: uint) -> RawItems<T> {
unsafe fn from_parts(ptr: *const T, len: usize) -> RawItems<T> {
if mem::size_of::<T>() == 0 {
RawItems {
head: ptr,
tail: (ptr as uint + len) as *const T,
tail: (ptr as usize + len) as *const T,
}
} else {
RawItems {
head: ptr,
tail: ptr.offset(len as int),
tail: ptr.offset(len as isize),
}
}
}
@ -221,7 +221,7 @@ impl<T> RawItems<T> {
ptr::write(self.tail as *mut T, val);
if mem::size_of::<T>() == 0 {
self.tail = (self.tail as uint + 1) as *const T;
self.tail = (self.tail as usize + 1) as *const T;
} else {
self.tail = self.tail.offset(1);
}
@ -239,7 +239,7 @@ impl<T> Iterator for RawItems<T> {
let ret = Some(ptr::read(self.head));
if mem::size_of::<T>() == 0 {
self.head = (self.head as uint + 1) as *const T;
self.head = (self.head as usize + 1) as *const T;
} else {
self.head = self.head.offset(1);
}
@ -257,7 +257,7 @@ impl<T> DoubleEndedIterator for RawItems<T> {
} else {
unsafe {
if mem::size_of::<T>() == 0 {
self.tail = (self.tail as uint - 1) as *const T;
self.tail = (self.tail as usize - 1) as *const T;
} else {
self.tail = self.tail.offset(-1);
}
@ -299,7 +299,7 @@ impl<K, V> Drop for Node<K, V> {
impl<K, V> Node<K, V> {
/// Make a new internal node. The caller must initialize the result to fix the invariant that
/// there are `len() + 1` edges.
unsafe fn new_internal(capacity: uint) -> Node<K, V> {
unsafe fn new_internal(capacity: usize) -> Node<K, V> {
let (alignment, size) = calculate_allocation_generic::<K, V>(capacity, false);
let buffer = heap::allocate(size, alignment);
@ -309,15 +309,15 @@ impl<K, V> Node<K, V> {
Node {
keys: Unique(buffer as *mut K),
vals: Unique(buffer.offset(vals_offset as int) as *mut V),
edges: Unique(buffer.offset(edges_offset as int) as *mut Node<K, V>),
vals: Unique(buffer.offset(vals_offset as isize) as *mut V),
edges: Unique(buffer.offset(edges_offset as isize) as *mut Node<K, V>),
_len: 0,
_capacity: capacity,
}
}
/// Make a new leaf node
fn new_leaf(capacity: uint) -> Node<K, V> {
fn new_leaf(capacity: usize) -> Node<K, V> {
let (alignment, size) = calculate_allocation_generic::<K, V>(capacity, true);
let buffer = unsafe { heap::allocate(size, alignment) };
@ -327,7 +327,7 @@ impl<K, V> Node<K, V> {
Node {
keys: Unique(buffer as *mut K),
vals: Unique(unsafe { buffer.offset(vals_offset as int) as *mut V }),
vals: Unique(unsafe { buffer.offset(vals_offset as isize) as *mut V }),
edges: Unique(ptr::null_mut()),
_len: 0,
_capacity: capacity,
@ -479,15 +479,15 @@ impl<K: Clone, V: Clone> Clone for Node<K, V> {
///
/// ```rust,ignore
/// struct Nasty<'a> {
/// first: &'a Node<uint, uint>,
/// second: &'a Node<uint, uint>,
/// first: &'a Node<usize, usize>,
/// second: &'a Node<usize, usize>,
/// flag: &'a Cell<bool>,
/// }
///
/// impl<'a> Deref for Nasty<'a> {
/// type Target = Node<uint, uint>;
/// type Target = Node<usize, usize>;
///
/// fn deref(&self) -> &Node<uint, uint> {
/// fn deref(&self) -> &Node<usize, usize> {
/// if self.flag.get() {
/// &*self.second
/// } else {
@ -524,7 +524,7 @@ impl<K: Clone, V: Clone> Clone for Node<K, V> {
#[derive(Copy)]
pub struct Handle<NodeRef, Type, NodeType> {
node: NodeRef,
index: uint
index: usize
}
pub mod handle {
@ -546,7 +546,7 @@ impl<K: Ord, V> Node<K, V> {
-> SearchResult<NodeRef> where Q: BorrowFrom<K> + Ord {
// FIXME(Gankro): Tune when to search linear or binary based on B (and maybe K/V).
// For the B configured as of this writing (B = 6), binary search was *significantly*
// worse for uints.
// worse for usizes.
match node.as_slices_internal().search_linear(key) {
(index, true) => Found(Handle { node: node, index: index }),
(index, false) => GoDown(Handle { node: node, index: index }),
@ -557,12 +557,12 @@ impl<K: Ord, V> Node<K, V> {
// Public interface
impl <K, V> Node<K, V> {
/// Make a leaf root from scratch
pub fn make_leaf_root(b: uint) -> Node<K, V> {
pub fn make_leaf_root(b: usize) -> Node<K, V> {
Node::new_leaf(capacity_from_b(b))
}
/// Make an internal root and swap it with an old root
pub fn make_internal_root(left_and_out: &mut Node<K,V>, b: uint, key: K, value: V,
pub fn make_internal_root(left_and_out: &mut Node<K,V>, b: usize, key: K, value: V,
right: Node<K,V>) {
let node = mem::replace(left_and_out, unsafe { Node::new_internal(capacity_from_b(b)) });
left_and_out._len = 1;
@ -575,12 +575,12 @@ impl <K, V> Node<K, V> {
}
/// How many key-value pairs the node contains
pub fn len(&self) -> uint {
pub fn len(&self) -> usize {
self._len
}
/// How many key-value pairs the node can fit
pub fn capacity(&self) -> uint {
pub fn capacity(&self) -> usize {
self._capacity
}
@ -669,7 +669,7 @@ impl<'a, K: 'a, V: 'a> Handle<&'a mut Node<K, V>, handle::Edge, handle::Internal
impl<K, V, NodeRef: Deref<Target=Node<K, V>>> Handle<NodeRef, handle::Edge, handle::Internal> {
// This doesn't exist because there are no uses for it,
// but is fine to add, analagous to edge_mut.
// but is fine to add, analogous to edge_mut.
//
// /// Returns a reference to the edge pointed-to by this handle. This should not be
// /// confused with `node`, which references the parent node of what is returned here.
@ -1038,7 +1038,7 @@ impl<K, V> Node<K, V> {
/// # Panics (in debug build)
///
/// Panics if the given index is out of bounds.
pub fn kv_handle(&mut self, index: uint) -> Handle<&mut Node<K, V>, handle::KV,
pub fn kv_handle(&mut self, index: usize) -> Handle<&mut Node<K, V>, handle::KV,
handle::LeafOrInternal> {
// Necessary for correctness, but in a private module
debug_assert!(index < self.len(), "kv_handle index out of bounds");
@ -1114,15 +1114,15 @@ impl<K, V> Node<K, V> {
// This must be followed by insert_edge on an internal node.
#[inline]
unsafe fn insert_kv(&mut self, index: uint, key: K, val: V) -> &mut V {
unsafe fn insert_kv(&mut self, index: usize, key: K, val: V) -> &mut V {
ptr::copy_memory(
self.keys_mut().as_mut_ptr().offset(index as int + 1),
self.keys().as_ptr().offset(index as int),
self.keys_mut().as_mut_ptr().offset(index as isize + 1),
self.keys().as_ptr().offset(index as isize),
self.len() - index
);
ptr::copy_memory(
self.vals_mut().as_mut_ptr().offset(index as int + 1),
self.vals().as_ptr().offset(index as int),
self.vals_mut().as_mut_ptr().offset(index as isize + 1),
self.vals().as_ptr().offset(index as isize),
self.len() - index
);
@ -1136,10 +1136,10 @@ impl<K, V> Node<K, V> {
// This can only be called immediately after a call to insert_kv.
#[inline]
unsafe fn insert_edge(&mut self, index: uint, edge: Node<K, V>) {
unsafe fn insert_edge(&mut self, index: usize, edge: Node<K, V>) {
ptr::copy_memory(
self.edges_mut().as_mut_ptr().offset(index as int + 1),
self.edges().as_ptr().offset(index as int),
self.edges_mut().as_mut_ptr().offset(index as isize + 1),
self.edges().as_ptr().offset(index as isize),
self.len() - index
);
ptr::write(self.edges_mut().get_unchecked_mut(index), edge);
@ -1166,18 +1166,18 @@ impl<K, V> Node<K, V> {
// This must be followed by remove_edge on an internal node.
#[inline]
unsafe fn remove_kv(&mut self, index: uint) -> (K, V) {
unsafe fn remove_kv(&mut self, index: usize) -> (K, V) {
let key = ptr::read(self.keys().get_unchecked(index));
let val = ptr::read(self.vals().get_unchecked(index));
ptr::copy_memory(
self.keys_mut().as_mut_ptr().offset(index as int),
self.keys().as_ptr().offset(index as int + 1),
self.keys_mut().as_mut_ptr().offset(index as isize),
self.keys().as_ptr().offset(index as isize + 1),
self.len() - index - 1
);
ptr::copy_memory(
self.vals_mut().as_mut_ptr().offset(index as int),
self.vals().as_ptr().offset(index as int + 1),
self.vals_mut().as_mut_ptr().offset(index as isize),
self.vals().as_ptr().offset(index as isize + 1),
self.len() - index - 1
);
@ -1188,12 +1188,12 @@ impl<K, V> Node<K, V> {
// This can only be called immediately after a call to remove_kv.
#[inline]
unsafe fn remove_edge(&mut self, index: uint) -> Node<K, V> {
unsafe fn remove_edge(&mut self, index: usize) -> Node<K, V> {
let edge = ptr::read(self.edges().get_unchecked(index));
ptr::copy_memory(
self.edges_mut().as_mut_ptr().offset(index as int),
self.edges().as_ptr().offset(index as int + 1),
self.edges_mut().as_mut_ptr().offset(index as isize),
self.edges().as_ptr().offset(index as isize + 1),
self.len() - index + 1
);
@ -1220,18 +1220,18 @@ impl<K, V> Node<K, V> {
let right_offset = self.len() - right.len();
ptr::copy_nonoverlapping_memory(
right.keys_mut().as_mut_ptr(),
self.keys().as_ptr().offset(right_offset as int),
self.keys().as_ptr().offset(right_offset as isize),
right.len()
);
ptr::copy_nonoverlapping_memory(
right.vals_mut().as_mut_ptr(),
self.vals().as_ptr().offset(right_offset as int),
self.vals().as_ptr().offset(right_offset as isize),
right.len()
);
if !self.is_leaf() {
ptr::copy_nonoverlapping_memory(
right.edges_mut().as_mut_ptr(),
self.edges().as_ptr().offset(right_offset as int),
self.edges().as_ptr().offset(right_offset as isize),
right.len() + 1
);
}
@ -1260,18 +1260,18 @@ impl<K, V> Node<K, V> {
ptr::write(self.vals_mut().get_unchecked_mut(old_len), val);
ptr::copy_nonoverlapping_memory(
self.keys_mut().as_mut_ptr().offset(old_len as int + 1),
self.keys_mut().as_mut_ptr().offset(old_len as isize + 1),
right.keys().as_ptr(),
right.len()
);
ptr::copy_nonoverlapping_memory(
self.vals_mut().as_mut_ptr().offset(old_len as int + 1),
self.vals_mut().as_mut_ptr().offset(old_len as isize + 1),
right.vals().as_ptr(),
right.len()
);
if !self.is_leaf() {
ptr::copy_nonoverlapping_memory(
self.edges_mut().as_mut_ptr().offset(old_len as int + 1),
self.edges_mut().as_mut_ptr().offset(old_len as isize + 1),
right.edges().as_ptr(),
right.len() + 1
);
@ -1284,12 +1284,12 @@ impl<K, V> Node<K, V> {
}
/// Get the capacity of a node from the order of the parent B-Tree
fn capacity_from_b(b: uint) -> uint {
fn capacity_from_b(b: usize) -> usize {
2 * b - 1
}
/// Get the minimum load of a node from its capacity
fn min_load_from_capacity(cap: uint) -> uint {
fn min_load_from_capacity(cap: usize) -> usize {
// B - 1
cap / 2
}
@ -1334,7 +1334,7 @@ struct MoveTraversalImpl<K, V> {
// For deallocation when we are done iterating.
ptr: *mut u8,
capacity: uint,
capacity: usize,
is_leaf: bool
}
@ -1490,7 +1490,7 @@ macro_rules! node_slice_impl {
$as_slices_internal:ident, $index:ident, $iter:ident) => {
impl<'a, K: Ord + 'a, V: 'a> $NodeSlice<'a, K, V> {
/// Performs linear search in a slice. Returns a tuple of (index, is_exact_match).
fn search_linear<Q: ?Sized>(&self, key: &Q) -> (uint, bool)
fn search_linear<Q: ?Sized>(&self, key: &Q) -> (usize, bool)
where Q: BorrowFrom<K> + Ord {
for (i, k) in self.keys.iter().enumerate() {
match key.cmp(BorrowFrom::borrow_from(k)) {

View File

@ -101,7 +101,7 @@ impl<T: Ord> BTreeSet<T> {
/// B cannot be less than 2.
#[unstable(feature = "collections",
reason = "probably want this to be on the type, eventually")]
pub fn with_b(b: uint) -> BTreeSet<T> {
pub fn with_b(b: usize) -> BTreeSet<T> {
BTreeSet { map: BTreeMap::with_b(b) }
}
}
@ -114,14 +114,14 @@ impl<T> BTreeSet<T> {
/// ```
/// use std::collections::BTreeSet;
///
/// let set: BTreeSet<uint> = [1u, 2, 3, 4].iter().map(|&x| x).collect();
/// let set: BTreeSet<usize> = [1, 2, 3, 4].iter().map(|&x| x).collect();
///
/// for x in set.iter() {
/// println!("{}", x);
/// }
///
/// let v: Vec<uint> = set.iter().map(|&x| x).collect();
/// assert_eq!(v, vec![1u,2,3,4]);
/// let v: Vec<usize> = set.iter().map(|&x| x).collect();
/// assert_eq!(v, vec![1,2,3,4]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn iter(&self) -> Iter<T> {
@ -135,10 +135,10 @@ impl<T> BTreeSet<T> {
/// ```
/// use std::collections::BTreeSet;
///
/// let set: BTreeSet<uint> = [1u, 2, 3, 4].iter().map(|&x| x).collect();
/// let set: BTreeSet<usize> = [1, 2, 3, 4].iter().map(|&x| x).collect();
///
/// let v: Vec<uint> = set.into_iter().collect();
/// assert_eq!(v, vec![1u,2,3,4]);
/// let v: Vec<usize> = set.into_iter().collect();
/// assert_eq!(v, vec![1,2,3,4]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn into_iter(self) -> IntoIter<T> {
@ -162,13 +162,13 @@ impl<T: Ord> BTreeSet<T> {
/// use std::collections::Bound::{Included, Unbounded};
///
/// let mut set = BTreeSet::new();
/// set.insert(3u);
/// set.insert(5u);
/// set.insert(8u);
/// set.insert(3);
/// set.insert(5);
/// set.insert(8);
/// for &elem in set.range(Included(&4), Included(&8)) {
/// println!("{}", elem);
/// }
/// assert_eq!(Some(&5u), set.range(Included(&4), Unbounded).next());
/// assert_eq!(Some(&5), set.range(Included(&4), Unbounded).next());
/// ```
#[unstable(feature = "collections",
reason = "matches collection reform specification, waiting for dust to settle")]
@ -189,15 +189,15 @@ impl<T: Ord> BTreeSet<T> {
/// use std::collections::BTreeSet;
///
/// let mut a = BTreeSet::new();
/// a.insert(1u);
/// a.insert(2u);
/// a.insert(1);
/// a.insert(2);
///
/// let mut b = BTreeSet::new();
/// b.insert(2u);
/// b.insert(3u);
/// b.insert(2);
/// b.insert(3);
///
/// let diff: Vec<uint> = a.difference(&b).cloned().collect();
/// assert_eq!(diff, vec![1u]);
/// let diff: Vec<usize> = a.difference(&b).cloned().collect();
/// assert_eq!(diff, vec![1]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn difference<'a>(&'a self, other: &'a BTreeSet<T>) -> Difference<'a, T> {
@ -212,15 +212,15 @@ impl<T: Ord> BTreeSet<T> {
/// use std::collections::BTreeSet;
///
/// let mut a = BTreeSet::new();
/// a.insert(1u);
/// a.insert(2u);
/// a.insert(1);
/// a.insert(2);
///
/// let mut b = BTreeSet::new();
/// b.insert(2u);
/// b.insert(3u);
/// b.insert(2);
/// b.insert(3);
///
/// let sym_diff: Vec<uint> = a.symmetric_difference(&b).cloned().collect();
/// assert_eq!(sym_diff, vec![1u,3]);
/// let sym_diff: Vec<usize> = a.symmetric_difference(&b).cloned().collect();
/// assert_eq!(sym_diff, vec![1,3]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn symmetric_difference<'a>(&'a self, other: &'a BTreeSet<T>)
@ -236,15 +236,15 @@ impl<T: Ord> BTreeSet<T> {
/// use std::collections::BTreeSet;
///
/// let mut a = BTreeSet::new();
/// a.insert(1u);
/// a.insert(2u);
/// a.insert(1);
/// a.insert(2);
///
/// let mut b = BTreeSet::new();
/// b.insert(2u);
/// b.insert(3u);
/// b.insert(2);
/// b.insert(3);
///
/// let intersection: Vec<uint> = a.intersection(&b).cloned().collect();
/// assert_eq!(intersection, vec![2u]);
/// let intersection: Vec<usize> = a.intersection(&b).cloned().collect();
/// assert_eq!(intersection, vec![2]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn intersection<'a>(&'a self, other: &'a BTreeSet<T>)
@ -260,13 +260,13 @@ impl<T: Ord> BTreeSet<T> {
/// use std::collections::BTreeSet;
///
/// let mut a = BTreeSet::new();
/// a.insert(1u);
/// a.insert(1);
///
/// let mut b = BTreeSet::new();
/// b.insert(2u);
/// b.insert(2);
///
/// let union: Vec<uint> = a.union(&b).cloned().collect();
/// assert_eq!(union, vec![1u,2]);
/// let union: Vec<usize> = a.union(&b).cloned().collect();
/// assert_eq!(union, vec![1,2]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn union<'a>(&'a self, other: &'a BTreeSet<T>) -> Union<'a, T> {
@ -286,7 +286,7 @@ impl<T: Ord> BTreeSet<T> {
/// assert_eq!(v.len(), 1);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len(&self) -> uint { self.map.len() }
pub fn len(&self) -> usize { self.map.len() }
/// Returns true if the set contains no elements
///
@ -625,7 +625,7 @@ impl<'a, T> Iterator for Iter<'a, T> {
type Item = &'a T;
fn next(&mut self) -> Option<&'a T> { self.iter.next() }
fn size_hint(&self) -> (uint, Option<uint>) { self.iter.size_hint() }
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
@ -640,7 +640,7 @@ impl<T> Iterator for IntoIter<T> {
type Item = T;
fn next(&mut self) -> Option<T> { self.iter.next() }
fn size_hint(&self) -> (uint, Option<uint>) { self.iter.size_hint() }
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> DoubleEndedIterator for IntoIter<T> {
@ -770,23 +770,23 @@ mod test {
}
struct Counter<'a, 'b> {
i: &'a mut uint,
expected: &'b [int],
i: &'a mut usize,
expected: &'b [i32],
}
impl<'a, 'b, 'c> FnMut<(&'c int,)> for Counter<'a, 'b> {
impl<'a, 'b, 'c> FnMut<(&'c i32,)> for Counter<'a, 'b> {
type Output = bool;
extern "rust-call" fn call_mut(&mut self, (&x,): (&'c int,)) -> bool {
extern "rust-call" fn call_mut(&mut self, (&x,): (&'c i32,)) -> bool {
assert_eq!(x, self.expected[*self.i]);
*self.i += 1;
true
}
}
fn check<F>(a: &[int], b: &[int], expected: &[int], f: F) where
fn check<F>(a: &[i32], b: &[i32], expected: &[i32], f: F) where
// FIXME Replace Counter with `Box<FnMut(_) -> _>`
F: FnOnce(&BTreeSet<int>, &BTreeSet<int>, Counter) -> bool,
F: FnOnce(&BTreeSet<i32>, &BTreeSet<i32>, Counter) -> bool,
{
let mut set_a = BTreeSet::new();
let mut set_b = BTreeSet::new();
@ -801,7 +801,7 @@ mod test {
#[test]
fn test_intersection() {
fn check_intersection(a: &[int], b: &[int], expected: &[int]) {
fn check_intersection(a: &[i32], b: &[i32], expected: &[i32]) {
check(a, b, expected, |x, y, f| x.intersection(y).all(f))
}
@ -817,7 +817,7 @@ mod test {
#[test]
fn test_difference() {
fn check_difference(a: &[int], b: &[int], expected: &[int]) {
fn check_difference(a: &[i32], b: &[i32], expected: &[i32]) {
check(a, b, expected, |x, y, f| x.difference(y).all(f))
}
@ -834,8 +834,7 @@ mod test {
#[test]
fn test_symmetric_difference() {
fn check_symmetric_difference(a: &[int], b: &[int],
expected: &[int]) {
fn check_symmetric_difference(a: &[i32], b: &[i32], expected: &[i32]) {
check(a, b, expected, |x, y, f| x.symmetric_difference(y).all(f))
}
@ -849,8 +848,7 @@ mod test {
#[test]
fn test_union() {
fn check_union(a: &[int], b: &[int],
expected: &[int]) {
fn check_union(a: &[i32], b: &[i32], expected: &[i32]) {
check(a, b, expected, |x, y, f| x.union(y).all(f))
}
@ -865,9 +863,9 @@ mod test {
#[test]
fn test_zip() {
let mut x = BTreeSet::new();
x.insert(5u);
x.insert(12u);
x.insert(11u);
x.insert(5);
x.insert(12);
x.insert(11);
let mut y = BTreeSet::new();
y.insert("foo");
@ -878,13 +876,13 @@ mod test {
let mut z = x.iter().zip(y.iter());
// FIXME: #5801: this needs a type hint to compile...
let result: Option<(&uint, & &'static str)> = z.next();
assert_eq!(result.unwrap(), (&5u, &("bar")));
let result: Option<(&usize, & &'static str)> = z.next();
assert_eq!(result.unwrap(), (&5, &("bar")));
let result: Option<(&uint, & &'static str)> = z.next();
assert_eq!(result.unwrap(), (&11u, &("foo")));
let result: Option<(&usize, & &'static str)> = z.next();
assert_eq!(result.unwrap(), (&11, &("foo")));
let result: Option<(&uint, & &'static str)> = z.next();
let result: Option<(&usize, & &'static str)> = z.next();
assert!(result.is_none());
}
@ -892,7 +890,7 @@ mod test {
fn test_from_iter() {
let xs = [1, 2, 3, 4, 5, 6, 7, 8, 9];
let set: BTreeSet<int> = xs.iter().map(|&x| x).collect();
let set: BTreeSet<_> = xs.iter().cloned().collect();
for x in &xs {
assert!(set.contains(x));
@ -901,8 +899,8 @@ mod test {
#[test]
fn test_show() {
let mut set: BTreeSet<int> = BTreeSet::new();
let empty: BTreeSet<int> = BTreeSet::new();
let mut set = BTreeSet::new();
let empty = BTreeSet::<i32>::new();
set.insert(1);
set.insert(2);

View File

@ -35,7 +35,7 @@ use core::ptr;
/// A doubly-linked list.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct DList<T> {
length: uint,
length: usize,
list_head: Link<T>,
list_tail: Rawlink<Node<T>>,
}
@ -61,7 +61,7 @@ struct Node<T> {
pub struct Iter<'a, T:'a> {
head: &'a Link<T>,
tail: Rawlink<Node<T>>,
nelem: uint,
nelem: usize,
}
// FIXME #19839: deriving is too aggressive on the bounds (T doesn't need to be Clone).
@ -82,7 +82,7 @@ pub struct IterMut<'a, T:'a> {
list: &'a mut DList<T>,
head: Rawlink<Node<T>>,
tail: Rawlink<Node<T>>,
nelem: uint,
nelem: usize,
}
/// An iterator over mutable references to the items of a `DList`.
@ -345,7 +345,7 @@ impl<T> DList<T> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len(&self) -> uint {
pub fn len(&self) -> usize {
self.length
}
@ -578,7 +578,7 @@ impl<T> DList<T> {
/// assert_eq!(splitted.pop_front(), None);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn split_off(&mut self, at: uint) -> DList<T> {
pub fn split_off(&mut self, at: usize) -> DList<T> {
let len = self.len();
assert!(at < len, "Cannot split off at a nonexistent index");
if at == 0 {
@ -659,7 +659,7 @@ impl<'a, A> Iterator for Iter<'a, A> {
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
fn size_hint(&self) -> (usize, Option<usize>) {
(self.nelem, Some(self.nelem))
}
}
@ -701,7 +701,7 @@ impl<'a, A> Iterator for IterMut<'a, A> {
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
fn size_hint(&self) -> (usize, Option<usize>) {
(self.nelem, Some(self.nelem))
}
}
@ -810,7 +810,7 @@ impl<A> Iterator for IntoIter<A> {
fn next(&mut self) -> Option<A> { self.list.pop_front() }
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
fn size_hint(&self) -> (usize, Option<usize>) {
(self.list.length, Some(self.list.length))
}
}
@ -935,11 +935,11 @@ mod tests {
use super::{DList, Node};
pub fn check_links<T>(list: &DList<T>) {
let mut len = 0u;
let mut len = 0;
let mut last_ptr: Option<&Node<T>> = None;
let mut node_ptr: &Node<T>;
match list.list_head {
None => { assert_eq!(0u, list.length); return }
None => { assert_eq!(0, list.length); return }
Some(ref node) => node_ptr = &**node,
}
loop {
@ -968,7 +968,7 @@ mod tests {
#[test]
fn test_basic() {
let mut m: DList<Box<int>> = DList::new();
let mut m = DList::new();
assert_eq!(m.pop_front(), None);
assert_eq!(m.pop_back(), None);
assert_eq!(m.pop_front(), None);
@ -1007,7 +1007,7 @@ mod tests {
}
#[cfg(test)]
fn generate_test() -> DList<int> {
fn generate_test() -> DList<i32> {
list_from(&[0,1,2,3,4,5,6])
}
@ -1020,7 +1020,7 @@ mod tests {
fn test_append() {
// Empty to empty
{
let mut m: DList<int> = DList::new();
let mut m = DList::<i32>::new();
let mut n = DList::new();
m.append(&mut n);
check_links(&m);
@ -1122,7 +1122,7 @@ mod tests {
fn test_iterator() {
let m = generate_test();
for (i, elt) in m.iter().enumerate() {
assert_eq!(i as int, *elt);
assert_eq!(i as i32, *elt);
}
let mut n = DList::new();
assert_eq!(n.iter().next(), None);
@ -1170,7 +1170,7 @@ mod tests {
fn test_rev_iter() {
let m = generate_test();
for (i, elt) in m.iter().rev().enumerate() {
assert_eq!((6 - i) as int, *elt);
assert_eq!((6 - i) as i32, *elt);
}
let mut n = DList::new();
assert_eq!(n.iter().rev().next(), None);
@ -1187,7 +1187,7 @@ mod tests {
let mut m = generate_test();
let mut len = m.len();
for (i, elt) in m.iter_mut().enumerate() {
assert_eq!(i as int, *elt);
assert_eq!(i as i32, *elt);
len -= 1;
}
assert_eq!(len, 0);
@ -1245,14 +1245,14 @@ mod tests {
}
check_links(&m);
assert_eq!(m.len(), 3 + len * 2);
assert_eq!(m.into_iter().collect::<Vec<int>>(), vec![-2,0,1,2,3,4,5,6,7,8,9,0,1]);
assert_eq!(m.into_iter().collect::<Vec<_>>(), vec![-2,0,1,2,3,4,5,6,7,8,9,0,1]);
}
#[test]
fn test_mut_rev_iter() {
let mut m = generate_test();
for (i, elt) in m.iter_mut().rev().enumerate() {
assert_eq!((6-i) as int, *elt);
assert_eq!((6 - i) as i32, *elt);
}
let mut n = DList::new();
assert!(n.iter_mut().rev().next().is_none());
@ -1268,13 +1268,13 @@ mod tests {
Thread::scoped(move || {
check_links(&n);
let a: &[_] = &[&1,&2,&3];
assert_eq!(a, n.iter().collect::<Vec<&int>>());
assert_eq!(a, n.iter().collect::<Vec<_>>());
}).join().ok().unwrap();
}
#[test]
fn test_eq() {
let mut n: DList<u8> = list_from(&[]);
let mut n = list_from(&[]);
let mut m = list_from(&[]);
assert!(n == m);
n.push_front(1);
@ -1307,7 +1307,7 @@ mod tests {
#[test]
fn test_ord() {
let n: DList<int> = list_from(&[]);
let n = list_from(&[]);
let m = list_from(&[1,2,3]);
assert!(n < m);
assert!(m > n);
@ -1349,7 +1349,7 @@ mod tests {
#[test]
fn test_fuzz() {
for _ in 0u..25 {
for _ in 0..25 {
fuzz_test(3);
fuzz_test(16);
fuzz_test(189);
@ -1358,18 +1358,16 @@ mod tests {
#[test]
fn test_show() {
let list: DList<i32> = (0..10).collect();
let list: DList<_> = (0..10).collect();
assert_eq!(format!("{:?}", list), "DList [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]");
let list: DList<&str> = vec!["just", "one", "test", "more"].iter()
.map(|&s| s)
.collect();
let list: DList<_> = vec!["just", "one", "test", "more"].iter().cloned().collect();
assert_eq!(format!("{:?}", list), "DList [\"just\", \"one\", \"test\", \"more\"]");
}
#[cfg(test)]
fn fuzz_test(sz: int) {
let mut m: DList<int> = DList::new();
fn fuzz_test(sz: i32) {
let mut m: DList<_> = DList::new();
let mut v = vec![];
for i in 0..sz {
check_links(&m);
@ -1398,7 +1396,7 @@ mod tests {
check_links(&m);
let mut i = 0u;
let mut i = 0;
for (a, &b) in m.into_iter().zip(v.iter()) {
i += 1;
assert_eq!(a, b);
@ -1410,13 +1408,13 @@ mod tests {
fn bench_collect_into(b: &mut test::Bencher) {
let v = &[0; 64];
b.iter(|| {
let _: DList<int> = v.iter().map(|x| *x).collect();
let _: DList<_> = v.iter().cloned().collect();
})
}
#[bench]
fn bench_push_front(b: &mut test::Bencher) {
let mut m: DList<int> = DList::new();
let mut m: DList<_> = DList::new();
b.iter(|| {
m.push_front(0);
})
@ -1424,7 +1422,7 @@ mod tests {
#[bench]
fn bench_push_back(b: &mut test::Bencher) {
let mut m: DList<int> = DList::new();
let mut m: DList<_> = DList::new();
b.iter(|| {
m.push_back(0);
})
@ -1432,7 +1430,7 @@ mod tests {
#[bench]
fn bench_push_back_pop_back(b: &mut test::Bencher) {
let mut m: DList<int> = DList::new();
let mut m: DList<_> = DList::new();
b.iter(|| {
m.push_back(0);
m.pop_back();
@ -1441,7 +1439,7 @@ mod tests {
#[bench]
fn bench_push_front_pop_front(b: &mut test::Bencher) {
let mut m: DList<int> = DList::new();
let mut m: DList<_> = DList::new();
b.iter(|| {
m.push_front(0);
m.pop_front();
@ -1451,7 +1449,7 @@ mod tests {
#[bench]
fn bench_iter(b: &mut test::Bencher) {
let v = &[0; 128];
let m: DList<int> = v.iter().map(|&x|x).collect();
let m: DList<_> = v.iter().cloned().collect();
b.iter(|| {
assert!(m.iter().count() == 128);
})
@ -1459,7 +1457,7 @@ mod tests {
#[bench]
fn bench_iter_mut(b: &mut test::Bencher) {
let v = &[0; 128];
let mut m: DList<int> = v.iter().map(|&x|x).collect();
let mut m: DList<_> = v.iter().cloned().collect();
b.iter(|| {
assert!(m.iter_mut().count() == 128);
})
@ -1467,7 +1465,7 @@ mod tests {
#[bench]
fn bench_iter_rev(b: &mut test::Bencher) {
let v = &[0; 128];
let m: DList<int> = v.iter().map(|&x|x).collect();
let m: DList<_> = v.iter().cloned().collect();
b.iter(|| {
assert!(m.iter().rev().count() == 128);
})
@ -1475,7 +1473,7 @@ mod tests {
#[bench]
fn bench_iter_mut_rev(b: &mut test::Bencher) {
let v = &[0; 128];
let mut m: DList<int> = v.iter().map(|&x|x).collect();
let mut m: DList<_> = v.iter().cloned().collect();
b.iter(|| {
assert!(m.iter_mut().rev().count() == 128);
})

View File

@ -26,7 +26,7 @@ use core::ops::{Sub, BitOr, BitAnd, BitXor};
pub struct EnumSet<E> {
// We must maintain the invariant that no bits are set
// for which no variant exists
bits: uint
bits: usize
}
impl<E> Copy for EnumSet<E> {}
@ -47,37 +47,37 @@ impl<E:CLike + fmt::Debug> fmt::Debug for EnumSet<E> {
}
}
/// An interface for casting C-like enum to uint and back.
/// An interface for casting C-like enum to usize and back.
/// A typically implementation is as below.
///
/// ```{rust,ignore}
/// #[repr(uint)]
/// #[repr(usize)]
/// enum Foo {
/// A, B, C
/// }
///
/// impl CLike for Foo {
/// fn to_uint(&self) -> uint {
/// *self as uint
/// fn to_usize(&self) -> usize {
/// *self as usize
/// }
///
/// fn from_uint(v: uint) -> Foo {
/// fn from_usize(v: usize) -> Foo {
/// unsafe { mem::transmute(v) }
/// }
/// }
/// ```
pub trait CLike {
/// Converts a C-like enum to a `uint`.
fn to_uint(&self) -> uint;
/// Converts a `uint` to a C-like enum.
fn from_uint(uint) -> Self;
/// Converts a C-like enum to a `usize`.
fn to_usize(&self) -> usize;
/// Converts a `usize` to a C-like enum.
fn from_usize(usize) -> Self;
}
fn bit<E:CLike>(e: &E) -> uint {
use core::uint;
let value = e.to_uint();
assert!(value < uint::BITS,
"EnumSet only supports up to {} variants.", uint::BITS - 1);
fn bit<E:CLike>(e: &E) -> usize {
use core::usize;
let value = e.to_usize();
assert!(value < usize::BITS,
"EnumSet only supports up to {} variants.", usize::BITS - 1);
1 << value
}
@ -92,7 +92,7 @@ impl<E:CLike> EnumSet<E> {
/// Returns the number of elements in the given `EnumSet`.
#[unstable(feature = "collections",
reason = "matches collection reform specification, waiting for dust to settle")]
pub fn len(&self) -> uint {
pub fn len(&self) -> usize {
self.bits.count_ones()
}
@ -205,8 +205,8 @@ impl<E:CLike> BitXor for EnumSet<E> {
/// An iterator over an EnumSet
pub struct Iter<E> {
index: uint,
bits: uint,
index: usize,
bits: usize,
}
// FIXME(#19839) Remove in favor of `#[derive(Clone)]`
@ -220,7 +220,7 @@ impl<E> Clone for Iter<E> {
}
impl<E:CLike> Iter<E> {
fn new(bits: uint) -> Iter<E> {
fn new(bits: usize) -> Iter<E> {
Iter { index: 0, bits: bits }
}
}
@ -237,13 +237,13 @@ impl<E:CLike> Iterator for Iter<E> {
self.index += 1;
self.bits >>= 1;
}
let elem = CLike::from_uint(self.index);
let elem = CLike::from_usize(self.index);
self.index += 1;
self.bits >>= 1;
Some(elem)
}
fn size_hint(&self) -> (uint, Option<uint>) {
fn size_hint(&self) -> (usize, Option<usize>) {
let exact = self.bits.count_ones();
(exact, Some(exact))
}
@ -282,17 +282,17 @@ mod test {
use super::{EnumSet, CLike};
#[derive(Copy, PartialEq, Debug)]
#[repr(uint)]
#[repr(usize)]
enum Foo {
A, B, C
}
impl CLike for Foo {
fn to_uint(&self) -> uint {
*self as uint
fn to_usize(&self) -> usize {
*self as usize
}
fn from_uint(v: uint) -> Foo {
fn from_usize(v: usize) -> Foo {
unsafe { mem::transmute(v) }
}
}
@ -486,7 +486,7 @@ mod test {
fn test_overflow() {
#[allow(dead_code)]
#[derive(Copy)]
#[repr(uint)]
#[repr(usize)]
enum Bar {
V00, V01, V02, V03, V04, V05, V06, V07, V08, V09,
V10, V11, V12, V13, V14, V15, V16, V17, V18, V19,
@ -498,11 +498,11 @@ mod test {
}
impl CLike for Bar {
fn to_uint(&self) -> uint {
*self as uint
fn to_usize(&self) -> usize {
*self as usize
}
fn from_uint(v: uint) -> Bar {
fn from_usize(v: usize) -> Bar {
unsafe { mem::transmute(v) }
}
}

View File

@ -26,7 +26,6 @@
#![feature(box_syntax)]
#![feature(core)]
#![feature(hash)]
#![feature(int_uint)]
#![feature(staged_api)]
#![feature(unboxed_closures)]
#![feature(unicode)]

View File

@ -32,8 +32,8 @@ use std::cmp;
use alloc::heap;
static INITIAL_CAPACITY: uint = 7u; // 2^3 - 1
static MINIMUM_CAPACITY: uint = 1u; // 2 - 1
static INITIAL_CAPACITY: usize = 7; // 2^3 - 1
static MINIMUM_CAPACITY: usize = 1; // 2 - 1
/// `RingBuf` is a circular buffer, which can be used as a double-ended queue efficiently.
#[stable(feature = "rust1", since = "1.0.0")]
@ -44,9 +44,9 @@ pub struct RingBuf<T> {
// If tail == head the buffer is empty. The length of the ringbuf
// is defined as the distance between the two.
tail: uint,
head: uint,
cap: uint,
tail: usize,
head: usize,
cap: usize,
ptr: *mut T
}
@ -59,7 +59,7 @@ unsafe impl<T: Sync> Sync for RingBuf<T> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Clone> Clone for RingBuf<T> {
fn clone(&self) -> RingBuf<T> {
self.iter().map(|t| t.clone()).collect()
self.iter().cloned().collect()
}
}
@ -99,14 +99,14 @@ impl<T> RingBuf<T> {
/// Moves an element out of the buffer
#[inline]
unsafe fn buffer_read(&mut self, off: uint) -> T {
ptr::read(self.ptr.offset(off as int))
unsafe fn buffer_read(&mut self, off: usize) -> T {
ptr::read(self.ptr.offset(off as isize))
}
/// Writes an element into the buffer, moving it.
#[inline]
unsafe fn buffer_write(&mut self, off: uint, t: T) {
ptr::write(self.ptr.offset(off as int), t);
unsafe fn buffer_write(&mut self, off: usize, t: T) {
ptr::write(self.ptr.offset(off as isize), t);
}
/// Returns true iff the buffer is at capacity
@ -115,31 +115,31 @@ impl<T> RingBuf<T> {
/// Returns the index in the underlying buffer for a given logical element index.
#[inline]
fn wrap_index(&self, idx: uint) -> uint { wrap_index(idx, self.cap) }
fn wrap_index(&self, idx: usize) -> usize { wrap_index(idx, self.cap) }
/// Copies a contiguous block of memory len long from src to dst
#[inline]
unsafe fn copy(&self, dst: uint, src: uint, len: uint) {
unsafe fn copy(&self, dst: usize, src: usize, len: usize) {
debug_assert!(dst + len <= self.cap, "dst={} src={} len={} cap={}", dst, src, len,
self.cap);
debug_assert!(src + len <= self.cap, "dst={} src={} len={} cap={}", dst, src, len,
self.cap);
ptr::copy_memory(
self.ptr.offset(dst as int),
self.ptr.offset(src as int),
self.ptr.offset(dst as isize),
self.ptr.offset(src as isize),
len);
}
/// Copies a contiguous block of memory len long from src to dst
#[inline]
unsafe fn copy_nonoverlapping(&self, dst: uint, src: uint, len: uint) {
unsafe fn copy_nonoverlapping(&self, dst: usize, src: usize, len: usize) {
debug_assert!(dst + len <= self.cap, "dst={} src={} len={} cap={}", dst, src, len,
self.cap);
debug_assert!(src + len <= self.cap, "dst={} src={} len={} cap={}", dst, src, len,
self.cap);
ptr::copy_nonoverlapping_memory(
self.ptr.offset(dst as int),
self.ptr.offset(src as int),
self.ptr.offset(dst as isize),
self.ptr.offset(src as isize),
len);
}
}
@ -153,7 +153,7 @@ impl<T> RingBuf<T> {
/// Creates an empty `RingBuf` with space for at least `n` elements.
#[stable(feature = "rust1", since = "1.0.0")]
pub fn with_capacity(n: uint) -> RingBuf<T> {
pub fn with_capacity(n: usize) -> RingBuf<T> {
// +1 since the ringbuffer always leaves one space empty
let cap = cmp::max(n + 1, MINIMUM_CAPACITY + 1).next_power_of_two();
assert!(cap > n, "capacity overflow");
@ -192,10 +192,10 @@ impl<T> RingBuf<T> {
/// assert_eq!(buf.get(1).unwrap(), &4);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn get(&self, i: uint) -> Option<&T> {
pub fn get(&self, i: usize) -> Option<&T> {
if i < self.len() {
let idx = self.wrap_index(self.tail + i);
unsafe { Some(&*self.ptr.offset(idx as int)) }
unsafe { Some(&*self.ptr.offset(idx as isize)) }
} else {
None
}
@ -222,10 +222,10 @@ impl<T> RingBuf<T> {
/// assert_eq!(buf[1], 7);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn get_mut(&mut self, i: uint) -> Option<&mut T> {
pub fn get_mut(&mut self, i: usize) -> Option<&mut T> {
if i < self.len() {
let idx = self.wrap_index(self.tail + i);
unsafe { Some(&mut *self.ptr.offset(idx as int)) }
unsafe { Some(&mut *self.ptr.offset(idx as isize)) }
} else {
None
}
@ -251,13 +251,13 @@ impl<T> RingBuf<T> {
/// assert_eq!(buf[2], 3);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn swap(&mut self, i: uint, j: uint) {
pub fn swap(&mut self, i: usize, j: usize) {
assert!(i < self.len());
assert!(j < self.len());
let ri = self.wrap_index(self.tail + i);
let rj = self.wrap_index(self.tail + j);
unsafe {
ptr::swap(self.ptr.offset(ri as int), self.ptr.offset(rj as int))
ptr::swap(self.ptr.offset(ri as isize), self.ptr.offset(rj as isize))
}
}
@ -274,7 +274,7 @@ impl<T> RingBuf<T> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn capacity(&self) -> uint { self.cap - 1 }
pub fn capacity(&self) -> usize { self.cap - 1 }
/// Reserves the minimum capacity for exactly `additional` more elements to be inserted in the
/// given `RingBuf`. Does nothing if the capacity is already sufficient.
@ -285,7 +285,7 @@ impl<T> RingBuf<T> {
///
/// # Panics
///
/// Panics if the new capacity overflows `uint`.
/// Panics if the new capacity overflows `usize`.
///
/// # Examples
///
@ -297,7 +297,7 @@ impl<T> RingBuf<T> {
/// assert!(buf.capacity() >= 11);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn reserve_exact(&mut self, additional: uint) {
pub fn reserve_exact(&mut self, additional: usize) {
self.reserve(additional);
}
@ -306,7 +306,7 @@ impl<T> RingBuf<T> {
///
/// # Panics
///
/// Panics if the new capacity overflows `uint`.
/// Panics if the new capacity overflows `usize`.
///
/// # Examples
///
@ -318,7 +318,7 @@ impl<T> RingBuf<T> {
/// assert!(buf.capacity() >= 11);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn reserve(&mut self, additional: uint) {
pub fn reserve(&mut self, additional: usize) {
let new_len = self.len() + additional;
assert!(new_len + 1 > self.len(), "capacity overflow");
if new_len > self.capacity() {
@ -388,7 +388,7 @@ impl<T> RingBuf<T> {
/// use std::collections::RingBuf;
///
/// let mut buf = RingBuf::with_capacity(15);
/// buf.extend(0u..4);
/// buf.extend(0..4);
/// assert_eq!(buf.capacity(), 15);
/// buf.shrink_to_fit();
/// assert!(buf.capacity() >= 4);
@ -482,7 +482,7 @@ impl<T> RingBuf<T> {
/// ```
#[unstable(feature = "collections",
reason = "matches collection reform specification; waiting on panic semantics")]
pub fn truncate(&mut self, len: uint) {
pub fn truncate(&mut self, len: usize) {
for _ in len..self.len() {
self.pop_back();
}
@ -529,13 +529,13 @@ impl<T> RingBuf<T> {
/// assert_eq!(&buf.iter_mut().collect::<Vec<&mut i32>>()[], b);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn iter_mut<'a>(&'a mut self) -> IterMut<'a, T> {
pub fn iter_mut(&mut self) -> IterMut<T> {
IterMut {
tail: self.tail,
head: self.head,
cap: self.cap,
ptr: self.ptr,
marker: marker::ContravariantLifetime::<'a>,
marker: marker::ContravariantLifetime,
}
}
@ -552,7 +552,7 @@ impl<T> RingBuf<T> {
#[inline]
#[unstable(feature = "collections",
reason = "matches collection reform specification, waiting for dust to settle")]
pub fn as_slices<'a>(&'a self) -> (&'a [T], &'a [T]) {
pub fn as_slices(&self) -> (&[T], &[T]) {
unsafe {
let contiguous = self.is_contiguous();
let buf = self.buffer_as_slice();
@ -572,7 +572,7 @@ impl<T> RingBuf<T> {
#[inline]
#[unstable(feature = "collections",
reason = "matches collection reform specification, waiting for dust to settle")]
pub fn as_mut_slices<'a>(&'a mut self) -> (&'a mut [T], &'a mut [T]) {
pub fn as_mut_slices(&mut self) -> (&mut [T], &mut [T]) {
unsafe {
let contiguous = self.is_contiguous();
let head = self.head;
@ -604,7 +604,7 @@ impl<T> RingBuf<T> {
/// assert_eq!(v.len(), 1);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len(&self) -> uint { count(self.tail, self.head, self.cap) }
pub fn len(&self) -> usize { count(self.tail, self.head, self.cap) }
/// Returns true if the buffer contains no elements
///
@ -878,7 +878,7 @@ impl<T> RingBuf<T> {
/// ```
#[unstable(feature = "collections",
reason = "the naming of this function may be altered")]
pub fn swap_back_remove(&mut self, index: uint) -> Option<T> {
pub fn swap_back_remove(&mut self, index: usize) -> Option<T> {
let length = self.len();
if length > 0 && index < length - 1 {
self.swap(index, length - 1);
@ -911,7 +911,7 @@ impl<T> RingBuf<T> {
/// ```
#[unstable(feature = "collections",
reason = "the naming of this function may be altered")]
pub fn swap_front_remove(&mut self, index: uint) -> Option<T> {
pub fn swap_front_remove(&mut self, index: usize) -> Option<T> {
let length = self.len();
if length > 0 && index < length && index != 0 {
self.swap(index, 0);
@ -939,7 +939,7 @@ impl<T> RingBuf<T> {
/// buf.insert(1,11);
/// assert_eq!(Some(&11), buf.get(1));
/// ```
pub fn insert(&mut self, i: uint, t: T) {
pub fn insert(&mut self, i: usize, t: T) {
assert!(i <= self.len(), "index out of bounds");
if self.is_full() {
self.reserve(1);
@ -1144,7 +1144,7 @@ impl<T> RingBuf<T> {
/// assert_eq!(Some(&15), buf.get(2));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn remove(&mut self, i: uint) -> Option<T> {
pub fn remove(&mut self, i: usize) -> Option<T> {
if self.is_empty() || self.len() <= i {
return None;
}
@ -1312,7 +1312,7 @@ impl<T: Clone> RingBuf<T> {
/// ```
#[unstable(feature = "collections",
reason = "matches collection reform specification; waiting on panic semantics")]
pub fn resize(&mut self, new_len: uint, value: T) {
pub fn resize(&mut self, new_len: usize, value: T) {
let len = self.len();
if new_len > len {
@ -1325,14 +1325,14 @@ impl<T: Clone> RingBuf<T> {
/// Returns the index in the underlying buffer for a given logical element index.
#[inline]
fn wrap_index(index: uint, size: uint) -> uint {
fn wrap_index(index: usize, size: usize) -> usize {
// size is always a power of 2
index & (size - 1)
}
/// Calculate the number of elements left to be read in the buffer
#[inline]
fn count(tail: uint, head: uint, size: uint) -> uint {
fn count(tail: usize, head: usize, size: usize) -> usize {
// size is always a power of 2
(head - tail) & (size - 1)
}
@ -1341,8 +1341,8 @@ fn count(tail: uint, head: uint, size: uint) -> uint {
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Iter<'a, T:'a> {
ring: &'a [T],
tail: uint,
head: uint
tail: usize,
head: usize
}
// FIXME(#19839) Remove in favor of `#[derive(Clone)]`
@ -1371,7 +1371,7 @@ impl<'a, T> Iterator for Iter<'a, T> {
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
fn size_hint(&self) -> (usize, Option<usize>) {
let len = count(self.tail, self.head, self.ring.len());
(len, Some(len))
}
@ -1395,13 +1395,13 @@ impl<'a, T> ExactSizeIterator for Iter<'a, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> RandomAccessIterator for Iter<'a, T> {
#[inline]
fn indexable(&self) -> uint {
fn indexable(&self) -> usize {
let (len, _) = self.size_hint();
len
}
#[inline]
fn idx(&mut self, j: uint) -> Option<&'a T> {
fn idx(&mut self, j: usize) -> Option<&'a T> {
if j >= self.indexable() {
None
} else {
@ -1418,9 +1418,9 @@ impl<'a, T> RandomAccessIterator for Iter<'a, T> {
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IterMut<'a, T:'a> {
ptr: *mut T,
tail: uint,
head: uint,
cap: uint,
tail: usize,
head: usize,
cap: usize,
marker: marker::ContravariantLifetime<'a>,
}
@ -1437,12 +1437,12 @@ impl<'a, T> Iterator for IterMut<'a, T> {
self.tail = wrap_index(self.tail + 1, self.cap);
unsafe {
Some(&mut *self.ptr.offset(tail as int))
Some(&mut *self.ptr.offset(tail as isize))
}
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
fn size_hint(&self) -> (usize, Option<usize>) {
let len = count(self.tail, self.head, self.cap);
(len, Some(len))
}
@ -1458,7 +1458,7 @@ impl<'a, T> DoubleEndedIterator for IterMut<'a, T> {
self.head = wrap_index(self.head - 1, self.cap);
unsafe {
Some(&mut *self.ptr.offset(self.head as int))
Some(&mut *self.ptr.offset(self.head as isize))
}
}
}
@ -1482,7 +1482,7 @@ impl<T> Iterator for IntoIter<T> {
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.inner.len();
(len, Some(len))
}
@ -1526,7 +1526,7 @@ impl<'a, T: 'a> Iterator for Drain<'a, T> {
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.inner.len();
(len, Some(len))
}
@ -1580,21 +1580,21 @@ impl<S: Writer + Hasher, A: Hash<S>> Hash<S> for RingBuf<A> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A> Index<uint> for RingBuf<A> {
impl<A> Index<usize> for RingBuf<A> {
type Output = A;
#[inline]
fn index<'a>(&'a self, i: &uint) -> &'a A {
fn index(&self, i: &usize) -> &A {
self.get(*i).expect("Out of bounds access")
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A> IndexMut<uint> for RingBuf<A> {
impl<A> IndexMut<usize> for RingBuf<A> {
type Output = A;
#[inline]
fn index_mut<'a>(&'a mut self, i: &uint) -> &'a mut A {
fn index_mut(&mut self, i: &usize) -> &mut A {
self.get_mut(*i).expect("Out of bounds access")
}
}
@ -1673,13 +1673,13 @@ mod tests {
#[allow(deprecated)]
fn test_simple() {
let mut d = RingBuf::new();
assert_eq!(d.len(), 0u);
assert_eq!(d.len(), 0);
d.push_front(17);
d.push_front(42);
d.push_back(137);
assert_eq!(d.len(), 3u);
assert_eq!(d.len(), 3);
d.push_back(137);
assert_eq!(d.len(), 4u);
assert_eq!(d.len(), 4);
assert_eq!(*d.front().unwrap(), 42);
assert_eq!(*d.back().unwrap(), 137);
let mut i = d.pop_front();
@ -1690,15 +1690,15 @@ mod tests {
assert_eq!(i, Some(137));
i = d.pop_back();
assert_eq!(i, Some(17));
assert_eq!(d.len(), 0u);
assert_eq!(d.len(), 0);
d.push_back(3);
assert_eq!(d.len(), 1u);
assert_eq!(d.len(), 1);
d.push_front(2);
assert_eq!(d.len(), 2u);
assert_eq!(d.len(), 2);
d.push_back(4);
assert_eq!(d.len(), 3u);
assert_eq!(d.len(), 3);
d.push_front(1);
assert_eq!(d.len(), 4u);
assert_eq!(d.len(), 4);
debug!("{}", d[0]);
debug!("{}", d[1]);
debug!("{}", d[2]);
@ -1743,21 +1743,21 @@ mod tests {
#[test]
fn test_push_front_grow() {
let mut deq = RingBuf::new();
for i in 0u..66 {
for i in 0..66 {
deq.push_front(i);
}
assert_eq!(deq.len(), 66);
for i in 0u..66 {
for i in 0..66 {
assert_eq!(deq[i], 65 - i);
}
let mut deq = RingBuf::new();
for i in 0u..66 {
for i in 0..66 {
deq.push_back(i);
}
for i in 0u..66 {
for i in 0..66 {
assert_eq!(deq[i], i);
}
}
@ -1765,7 +1765,7 @@ mod tests {
#[test]
fn test_index() {
let mut deq = RingBuf::new();
for i in 1u..4 {
for i in 1..4 {
deq.push_front(i);
}
assert_eq!(deq[1], 2);
@ -1775,7 +1775,7 @@ mod tests {
#[should_fail]
fn test_index_out_of_bounds() {
let mut deq = RingBuf::new();
for i in 1u..4 {
for i in 1..4 {
deq.push_front(i);
}
deq[3];
@ -1784,7 +1784,7 @@ mod tests {
#[bench]
fn bench_new(b: &mut test::Bencher) {
b.iter(|| {
let ring: RingBuf<u64> = RingBuf::new();
let ring: RingBuf<i32> = RingBuf::new();
test::black_box(ring);
})
}
@ -1815,7 +1815,7 @@ mod tests {
#[bench]
fn bench_pop_back_100(b: &mut test::Bencher) {
let mut deq: RingBuf<i32> = RingBuf::with_capacity(101);
let mut deq= RingBuf::<i32>::with_capacity(101);
b.iter(|| {
deq.head = 100;
@ -1828,7 +1828,7 @@ mod tests {
#[bench]
fn bench_pop_front_100(b: &mut test::Bencher) {
let mut deq: RingBuf<i32> = RingBuf::with_capacity(101);
let mut deq = RingBuf::<i32>::with_capacity(101);
b.iter(|| {
deq.head = 100;
@ -1852,7 +1852,7 @@ mod tests {
#[bench]
fn bench_iter_1000(b: &mut test::Bencher) {
let ring: RingBuf<i32> = (0..1000).collect();
let ring: RingBuf<_> = (0..1000).collect();
b.iter(|| {
let mut sum = 0;
@ -1865,7 +1865,7 @@ mod tests {
#[bench]
fn bench_mut_iter_1000(b: &mut test::Bencher) {
let mut ring: RingBuf<i32> = (0..1000).collect();
let mut ring: RingBuf<_> = (0..1000).collect();
b.iter(|| {
let mut sum = 0;
@ -1978,11 +1978,7 @@ mod tests {
#[test]
fn test_reserve_exact() {
let mut d = RingBuf::new();
d.push_back(0u64);
d.reserve_exact(50);
assert!(d.capacity() >= 51);
let mut d = RingBuf::new();
d.push_back(0u32);
d.push_back(0);
d.reserve_exact(50);
assert!(d.capacity() >= 51);
}
@ -1990,21 +1986,17 @@ mod tests {
#[test]
fn test_reserve() {
let mut d = RingBuf::new();
d.push_back(0u64);
d.reserve(50);
assert!(d.capacity() >= 51);
let mut d = RingBuf::new();
d.push_back(0u32);
d.push_back(0);
d.reserve(50);
assert!(d.capacity() >= 51);
}
#[test]
fn test_swap() {
let mut d: RingBuf<i32> = (0..5).collect();
let mut d: RingBuf<_> = (0..5).collect();
d.pop_front();
d.swap(0, 3);
assert_eq!(d.iter().map(|&x|x).collect::<Vec<i32>>(), vec!(4, 2, 3, 1));
assert_eq!(d.iter().cloned().collect::<Vec<_>>(), vec!(4, 2, 3, 1));
}
#[test]
@ -2018,7 +2010,7 @@ mod tests {
}
{
let b: &[_] = &[&0,&1,&2,&3,&4];
assert_eq!(d.iter().collect::<Vec<&i32>>(), b);
assert_eq!(d.iter().collect::<Vec<_>>(), b);
}
for i in 6..9 {
@ -2026,7 +2018,7 @@ mod tests {
}
{
let b: &[_] = &[&8,&7,&6,&0,&1,&2,&3,&4];
assert_eq!(d.iter().collect::<Vec<&i32>>(), b);
assert_eq!(d.iter().collect::<Vec<_>>(), b);
}
let mut it = d.iter();
@ -2049,14 +2041,14 @@ mod tests {
}
{
let b: &[_] = &[&4,&3,&2,&1,&0];
assert_eq!(d.iter().rev().collect::<Vec<&i32>>(), b);
assert_eq!(d.iter().rev().collect::<Vec<_>>(), b);
}
for i in 6..9 {
d.push_front(i);
}
let b: &[_] = &[&4,&3,&2,&1,&0,&6,&7,&8];
assert_eq!(d.iter().rev().collect::<Vec<&i32>>(), b);
assert_eq!(d.iter().rev().collect::<Vec<_>>(), b);
}
#[test]
@ -2070,8 +2062,8 @@ mod tests {
assert_eq!(d.pop_front(), Some(1));
d.push_back(4);
assert_eq!(d.iter_mut().rev().map(|x| *x).collect::<Vec<i32>>(),
vec!(4, 3, 2));
assert_eq!(d.iter_mut().rev().cloned().collect::<Vec<_>>(),
vec![4, 3, 2]);
}
#[test]
@ -2079,7 +2071,7 @@ mod tests {
let mut d = RingBuf::new();
assert!(d.iter_mut().next().is_none());
for i in 0u..3 {
for i in 0..3 {
d.push_front(i);
}
@ -2102,7 +2094,7 @@ mod tests {
let mut d = RingBuf::new();
assert!(d.iter_mut().rev().next().is_none());
for i in 0u..3 {
for i in 0..3 {
d.push_front(i);
}
@ -2141,7 +2133,7 @@ mod tests {
}
let b = vec![0,1,2,3,4];
assert_eq!(d.into_iter().collect::<Vec<i32>>(), b);
assert_eq!(d.into_iter().collect::<Vec<_>>(), b);
}
// wrapped iter
@ -2155,7 +2147,7 @@ mod tests {
}
let b = vec![8,7,6,0,1,2,3,4];
assert_eq!(d.into_iter().collect::<Vec<i32>>(), b);
assert_eq!(d.into_iter().collect::<Vec<_>>(), b);
}
// partially used
@ -2224,7 +2216,7 @@ mod tests {
// partially used
{
let mut d: RingBuf<i32> = RingBuf::new();
let mut d: RingBuf<_> = RingBuf::new();
for i in 0..5 {
d.push_back(i);
}
@ -2250,12 +2242,12 @@ mod tests {
fn test_from_iter() {
use core::iter;
let v = vec!(1,2,3,4,5,6,7);
let deq: RingBuf<i32> = v.iter().map(|&x| x).collect();
let u: Vec<i32> = deq.iter().map(|&x| x).collect();
let deq: RingBuf<_> = v.iter().cloned().collect();
let u: Vec<_> = deq.iter().cloned().collect();
assert_eq!(u, v);
let seq = iter::count(0u, 2).take(256);
let deq: RingBuf<uint> = seq.collect();
let seq = iter::count(0, 2).take(256);
let deq: RingBuf<_> = seq.collect();
for (i, &x) in deq.iter().enumerate() {
assert_eq!(2*i, x);
}
@ -2269,14 +2261,14 @@ mod tests {
d.push_front(42);
d.push_back(137);
d.push_back(137);
assert_eq!(d.len(), 4u);
assert_eq!(d.len(), 4);
let mut e = d.clone();
assert_eq!(e.len(), 4u);
assert_eq!(e.len(), 4);
while !d.is_empty() {
assert_eq!(d.pop_back(), e.pop_back());
}
assert_eq!(d.len(), 0u);
assert_eq!(e.len(), 0u);
assert_eq!(d.len(), 0);
assert_eq!(e.len(), 0);
}
#[test]
@ -2333,18 +2325,18 @@ mod tests {
#[test]
fn test_show() {
let ringbuf: RingBuf<i32> = (0..10).collect();
let ringbuf: RingBuf<_> = (0..10).collect();
assert_eq!(format!("{:?}", ringbuf), "RingBuf [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]");
let ringbuf: RingBuf<&str> = vec!["just", "one", "test", "more"].iter()
.map(|&s| s)
let ringbuf: RingBuf<_> = vec!["just", "one", "test", "more"].iter()
.cloned()
.collect();
assert_eq!(format!("{:?}", ringbuf), "RingBuf [\"just\", \"one\", \"test\", \"more\"]");
}
#[test]
fn test_drop() {
static mut drops: uint = 0;
static mut drops: i32 = 0;
struct Elem;
impl Drop for Elem {
fn drop(&mut self) {
@ -2364,7 +2356,7 @@ mod tests {
#[test]
fn test_drop_with_pop() {
static mut drops: uint = 0;
static mut drops: i32 = 0;
struct Elem;
impl Drop for Elem {
fn drop(&mut self) {
@ -2388,7 +2380,7 @@ mod tests {
#[test]
fn test_drop_clear() {
static mut drops: uint = 0;
static mut drops: i32 = 0;
struct Elem;
impl Drop for Elem {
fn drop(&mut self) {

File diff suppressed because it is too large Load Diff

View File

@ -241,7 +241,7 @@ impl<'a> Iterator for Decompositions<'a> {
}
}
fn size_hint(&self) -> (uint, Option<uint>) {
fn size_hint(&self) -> (usize, Option<usize>) {
let (lower, _) = self.iter.size_hint();
(lower, None)
}
@ -367,7 +367,7 @@ impl<'a> Iterator for Utf16Units<'a> {
fn next(&mut self) -> Option<u16> { self.encoder.next() }
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) { self.encoder.size_hint() }
fn size_hint(&self) -> (usize, Option<usize>) { self.encoder.size_hint() }
}
/*
@ -464,7 +464,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
#[inline]
#[unstable(feature = "collections",
reason = "this functionality may be moved to libunicode")]
fn nfd_chars<'a>(&'a self) -> Decompositions<'a> {
fn nfd_chars(&self) -> Decompositions {
Decompositions {
iter: self[].chars(),
buffer: Vec::new(),
@ -478,7 +478,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
#[inline]
#[unstable(feature = "collections",
reason = "this functionality may be moved to libunicode")]
fn nfkd_chars<'a>(&'a self) -> Decompositions<'a> {
fn nfkd_chars(&self) -> Decompositions {
Decompositions {
iter: self[].chars(),
buffer: Vec::new(),
@ -492,7 +492,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
#[inline]
#[unstable(feature = "collections",
reason = "this functionality may be moved to libunicode")]
fn nfc_chars<'a>(&'a self) -> Recompositions<'a> {
fn nfc_chars(&self) -> Recompositions {
Recompositions {
iter: self.nfd_chars(),
state: Composing,
@ -507,7 +507,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
#[inline]
#[unstable(feature = "collections",
reason = "this functionality may be moved to libunicode")]
fn nfkc_chars<'a>(&'a self) -> Recompositions<'a> {
fn nfkc_chars(&self) -> Recompositions {
Recompositions {
iter: self.nfkd_chars(),
state: Composing,
@ -629,7 +629,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
/// assert_eq!(v, vec![""]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn splitn<P: CharEq>(&self, count: uint, pat: P) -> SplitN<P> {
fn splitn<P: CharEq>(&self, count: usize, pat: P) -> SplitN<P> {
core_str::StrExt::splitn(&self[], count, pat)
}
@ -679,7 +679,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
/// assert_eq!(v, vec!["leopard", "tiger", "lionX"]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn rsplitn<P: CharEq>(&self, count: uint, pat: P) -> RSplitN<P> {
fn rsplitn<P: CharEq>(&self, count: usize, pat: P) -> RSplitN<P> {
core_str::StrExt::rsplitn(&self[], count, pat)
}
@ -694,13 +694,13 @@ pub trait StrExt: Index<RangeFull, Output = str> {
/// # Example
///
/// ```rust
/// let v: Vec<(uint, uint)> = "abcXXXabcYYYabc".match_indices("abc").collect();
/// let v: Vec<(usize, usize)> = "abcXXXabcYYYabc".match_indices("abc").collect();
/// assert_eq!(v, vec![(0,3), (6,9), (12,15)]);
///
/// let v: Vec<(uint, uint)> = "1abcabc2".match_indices("abc").collect();
/// let v: Vec<(usize, usize)> = "1abcabc2".match_indices("abc").collect();
/// assert_eq!(v, vec![(1,4), (4,7)]);
///
/// let v: Vec<(uint, uint)> = "ababa".match_indices("aba").collect();
/// let v: Vec<(usize, usize)> = "ababa".match_indices("aba").collect();
/// assert_eq!(v, vec![(0, 3)]); // only the first `aba`
/// ```
#[unstable(feature = "collections",
@ -762,19 +762,19 @@ pub trait StrExt: Index<RangeFull, Output = str> {
#[unstable(feature = "collections",
reason = "use slice notation [a..b] instead")]
#[deprecated(since = "1.0.0", reason = "use slice notation [a..b] instead")]
fn slice(&self, begin: uint, end: uint) -> &str;
fn slice(&self, begin: usize, end: usize) -> &str;
/// Deprecated: use `s[a..]` instead.
#[unstable(feature = "collections",
reason = "use slice notation [a..b] instead")]
#[deprecated(since = "1.0.0", reason = "use slice notation [a..] instead")]
fn slice_from(&self, begin: uint) -> &str;
fn slice_from(&self, begin: usize) -> &str;
/// Deprecated: use `s[..a]` instead.
#[unstable(feature = "collections",
reason = "use slice notation [a..b] instead")]
#[deprecated(since = "1.0.0", reason = "use slice notation [..a] instead")]
fn slice_to(&self, end: uint) -> &str;
fn slice_to(&self, end: usize) -> &str;
/// Returns a slice of the string from the character range
/// [`begin`..`end`).
@ -801,7 +801,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
/// ```
#[unstable(feature = "collections",
reason = "may have yet to prove its worth")]
fn slice_chars(&self, begin: uint, end: uint) -> &str {
fn slice_chars(&self, begin: usize, end: usize) -> &str {
core_str::StrExt::slice_chars(&self[], begin, end)
}
@ -812,7 +812,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
/// Caller must check both UTF-8 character boundaries and the boundaries of
/// the entire slice as well.
#[stable(feature = "rust1", since = "1.0.0")]
unsafe fn slice_unchecked(&self, begin: uint, end: uint) -> &str {
unsafe fn slice_unchecked(&self, begin: usize, end: usize) -> &str {
core_str::StrExt::slice_unchecked(&self[], begin, end)
}
@ -925,7 +925,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
/// ```
#[unstable(feature = "collections",
reason = "naming is uncertain with container conventions")]
fn is_char_boundary(&self, index: uint) -> bool {
fn is_char_boundary(&self, index: usize) -> bool {
core_str::StrExt::is_char_boundary(&self[], index)
}
@ -945,7 +945,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
/// use std::str::CharRange;
///
/// let s = "中华Việt Nam";
/// let mut i = 0u;
/// let mut i = 0;
/// while i < s.len() {
/// let CharRange {ch, next} = s.char_range_at(i);
/// println!("{}: {}", i, ch);
@ -975,7 +975,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
///
/// # Return value
///
/// A record {ch: char, next: uint} containing the char value and the byte
/// A record {ch: char, next: usize} containing the char value and the byte
/// index of the next Unicode character.
///
/// # Panics
@ -984,7 +984,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
/// If `i` is not the index of the beginning of a valid UTF-8 character.
#[unstable(feature = "collections",
reason = "naming is uncertain with container conventions")]
fn char_range_at(&self, start: uint) -> CharRange {
fn char_range_at(&self, start: usize) -> CharRange {
core_str::StrExt::char_range_at(&self[], start)
}
@ -1000,7 +1000,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
/// If `i` is not an index following a valid UTF-8 character.
#[unstable(feature = "collections",
reason = "naming is uncertain with container conventions")]
fn char_range_at_reverse(&self, start: uint) -> CharRange {
fn char_range_at_reverse(&self, start: usize) -> CharRange {
core_str::StrExt::char_range_at_reverse(&self[], start)
}
@ -1021,7 +1021,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
/// If `i` is not the index of the beginning of a valid UTF-8 character.
#[unstable(feature = "collections",
reason = "naming is uncertain with container conventions")]
fn char_at(&self, i: uint) -> char {
fn char_at(&self, i: usize) -> char {
core_str::StrExt::char_at(&self[], i)
}
@ -1033,7 +1033,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
/// If `i` is not an index following a valid UTF-8 character.
#[unstable(feature = "collections",
reason = "naming is uncertain with container conventions")]
fn char_at_reverse(&self, i: uint) -> char {
fn char_at_reverse(&self, i: usize) -> char {
core_str::StrExt::char_at_reverse(&self[], i)
}
@ -1073,7 +1073,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
/// assert_eq!(s.find(x), None);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn find<P: CharEq>(&self, pat: P) -> Option<uint> {
fn find<P: CharEq>(&self, pat: P) -> Option<usize> {
core_str::StrExt::find(&self[], pat)
}
@ -1101,7 +1101,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
/// assert_eq!(s.rfind(x), None);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn rfind<P: CharEq>(&self, pat: P) -> Option<uint> {
fn rfind<P: CharEq>(&self, pat: P) -> Option<usize> {
core_str::StrExt::rfind(&self[], pat)
}
@ -1126,7 +1126,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
/// ```
#[unstable(feature = "collections",
reason = "might get removed in favor of a more generic find in the future")]
fn find_str(&self, needle: &str) -> Option<uint> {
fn find_str(&self, needle: &str) -> Option<usize> {
core_str::StrExt::find_str(&self[], needle)
}
@ -1170,7 +1170,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
/// ```
#[unstable(feature = "collections",
reason = "awaiting convention about comparability of arbitrary slices")]
fn subslice_offset(&self, inner: &str) -> uint {
fn subslice_offset(&self, inner: &str) -> usize {
core_str::StrExt::subslice_offset(&self[], inner)
}
@ -1202,7 +1202,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
fn len(&self) -> uint {
fn len(&self) -> usize {
core_str::StrExt::len(&self[])
}
@ -1264,8 +1264,8 @@ pub trait StrExt: Index<RangeFull, Output = str> {
/// # Example
///
/// ```rust
/// let gr_inds = "a̐éö̲\r\n".grapheme_indices(true).collect::<Vec<(uint, &str)>>();
/// let b: &[_] = &[(0u, "a̐"), (3, "é"), (6, "ö̲"), (11, "\r\n")];
/// let gr_inds = "a̐éö̲\r\n".grapheme_indices(true).collect::<Vec<(usize, &str)>>();
/// let b: &[_] = &[(0, "a̐"), (3, "é"), (6, "ö̲"), (11, "\r\n")];
/// assert_eq!(gr_inds.as_slice(), b);
/// ```
#[unstable(feature = "collections",
@ -1301,7 +1301,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
/// `is_cjk` = `false`) if the locale is unknown.
#[unstable(feature = "collections",
reason = "this functionality may only be provided by libunicode")]
fn width(&self, is_cjk: bool) -> uint {
fn width(&self, is_cjk: bool) -> usize {
UnicodeStr::width(&self[], is_cjk)
}
@ -1326,15 +1326,15 @@ pub trait StrExt: Index<RangeFull, Output = str> {
#[stable(feature = "rust1", since = "1.0.0")]
impl StrExt for str {
fn slice(&self, begin: uint, end: uint) -> &str {
fn slice(&self, begin: usize, end: usize) -> &str {
&self[begin..end]
}
fn slice_from(&self, begin: uint) -> &str {
fn slice_from(&self, begin: usize) -> &str {
&self[begin..]
}
fn slice_to(&self, end: uint) -> &str {
fn slice_to(&self, end: usize) -> &str {
&self[..end]
}
}
@ -1357,51 +1357,51 @@ mod tests {
#[test]
fn test_len() {
assert_eq!("".len(), 0u);
assert_eq!("hello world".len(), 11u);
assert_eq!("\x63".len(), 1u);
assert_eq!("\u{a2}".len(), 2u);
assert_eq!("\u{3c0}".len(), 2u);
assert_eq!("\u{2620}".len(), 3u);
assert_eq!("\u{1d11e}".len(), 4u);
assert_eq!("".len(), 0);
assert_eq!("hello world".len(), 11);
assert_eq!("\x63".len(), 1);
assert_eq!("\u{a2}".len(), 2);
assert_eq!("\u{3c0}".len(), 2);
assert_eq!("\u{2620}".len(), 3);
assert_eq!("\u{1d11e}".len(), 4);
assert_eq!("".chars().count(), 0u);
assert_eq!("hello world".chars().count(), 11u);
assert_eq!("\x63".chars().count(), 1u);
assert_eq!("\u{a2}".chars().count(), 1u);
assert_eq!("\u{3c0}".chars().count(), 1u);
assert_eq!("\u{2620}".chars().count(), 1u);
assert_eq!("\u{1d11e}".chars().count(), 1u);
assert_eq!("ประเทศไทย中华Việt Nam".chars().count(), 19u);
assert_eq!("".chars().count(), 0);
assert_eq!("hello world".chars().count(), 11);
assert_eq!("\x63".chars().count(), 1);
assert_eq!("\u{a2}".chars().count(), 1);
assert_eq!("\u{3c0}".chars().count(), 1);
assert_eq!("\u{2620}".chars().count(), 1);
assert_eq!("\u{1d11e}".chars().count(), 1);
assert_eq!("ประเทศไทย中华Việt Nam".chars().count(), 19);
assert_eq!("".width(false), 10u);
assert_eq!("".width(true), 10u);
assert_eq!("\0\0\0\0\0".width(false), 0u);
assert_eq!("\0\0\0\0\0".width(true), 0u);
assert_eq!("".width(false), 0u);
assert_eq!("".width(true), 0u);
assert_eq!("\u{2081}\u{2082}\u{2083}\u{2084}".width(false), 4u);
assert_eq!("\u{2081}\u{2082}\u{2083}\u{2084}".width(true), 8u);
assert_eq!("".width(false), 10);
assert_eq!("".width(true), 10);
assert_eq!("\0\0\0\0\0".width(false), 0);
assert_eq!("\0\0\0\0\0".width(true), 0);
assert_eq!("".width(false), 0);
assert_eq!("".width(true), 0);
assert_eq!("\u{2081}\u{2082}\u{2083}\u{2084}".width(false), 4);
assert_eq!("\u{2081}\u{2082}\u{2083}\u{2084}".width(true), 8);
}
#[test]
fn test_find() {
assert_eq!("hello".find('l'), Some(2u));
assert_eq!("hello".find(|c:char| c == 'o'), Some(4u));
assert_eq!("hello".find('l'), Some(2));
assert_eq!("hello".find(|c:char| c == 'o'), Some(4));
assert!("hello".find('x').is_none());
assert!("hello".find(|c:char| c == 'x').is_none());
assert_eq!("ประเทศไทย中华Việt Nam".find('华'), Some(30u));
assert_eq!("ประเทศไทย中华Việt Nam".find(|c: char| c == '华'), Some(30u));
assert_eq!("ประเทศไทย中华Việt Nam".find('华'), Some(30));
assert_eq!("ประเทศไทย中华Việt Nam".find(|c: char| c == '华'), Some(30));
}
#[test]
fn test_rfind() {
assert_eq!("hello".rfind('l'), Some(3u));
assert_eq!("hello".rfind(|c:char| c == 'o'), Some(4u));
assert_eq!("hello".rfind('l'), Some(3));
assert_eq!("hello".rfind(|c:char| c == 'o'), Some(4));
assert!("hello".rfind('x').is_none());
assert!("hello".rfind(|c:char| c == 'x').is_none());
assert_eq!("ประเทศไทย中华Việt Nam".rfind('华'), Some(30u));
assert_eq!("ประเทศไทย中华Việt Nam".rfind(|c: char| c == '华'), Some(30u));
assert_eq!("ประเทศไทย中华Việt Nam".rfind('华'), Some(30));
assert_eq!("ประเทศไทย中华Việt Nam".rfind(|c: char| c == '华'), Some(30));
}
#[test]
@ -1424,37 +1424,37 @@ mod tests {
#[test]
fn test_find_str() {
// byte positions
assert_eq!("".find_str(""), Some(0u));
assert_eq!("".find_str(""), Some(0));
assert!("banana".find_str("apple pie").is_none());
let data = "abcabc";
assert_eq!(data[0u..6u].find_str("ab"), Some(0u));
assert_eq!(data[2u..6u].find_str("ab"), Some(3u - 2u));
assert!(data[2u..4u].find_str("ab").is_none());
assert_eq!(data[0..6].find_str("ab"), Some(0));
assert_eq!(data[2..6].find_str("ab"), Some(3 - 2));
assert!(data[2..4].find_str("ab").is_none());
let string = "ประเทศไทย中华Việt Nam";
let mut data = String::from_str(string);
data.push_str(string);
assert!(data.find_str("ไท华").is_none());
assert_eq!(data[0u..43u].find_str(""), Some(0u));
assert_eq!(data[6u..43u].find_str(""), Some(6u - 6u));
assert_eq!(data[0..43].find_str(""), Some(0));
assert_eq!(data[6..43].find_str(""), Some(6 - 6));
assert_eq!(data[0u..43u].find_str("ประ"), Some( 0u));
assert_eq!(data[0u..43u].find_str("ทศไ"), Some(12u));
assert_eq!(data[0u..43u].find_str("ย中"), Some(24u));
assert_eq!(data[0u..43u].find_str("iệt"), Some(34u));
assert_eq!(data[0u..43u].find_str("Nam"), Some(40u));
assert_eq!(data[0..43].find_str("ประ"), Some( 0));
assert_eq!(data[0..43].find_str("ทศไ"), Some(12));
assert_eq!(data[0..43].find_str("ย中"), Some(24));
assert_eq!(data[0..43].find_str("iệt"), Some(34));
assert_eq!(data[0..43].find_str("Nam"), Some(40));
assert_eq!(data[43u..86u].find_str("ประ"), Some(43u - 43u));
assert_eq!(data[43u..86u].find_str("ทศไ"), Some(55u - 43u));
assert_eq!(data[43u..86u].find_str("ย中"), Some(67u - 43u));
assert_eq!(data[43u..86u].find_str("iệt"), Some(77u - 43u));
assert_eq!(data[43u..86u].find_str("Nam"), Some(83u - 43u));
assert_eq!(data[43..86].find_str("ประ"), Some(43 - 43));
assert_eq!(data[43..86].find_str("ทศไ"), Some(55 - 43));
assert_eq!(data[43..86].find_str("ย中"), Some(67 - 43));
assert_eq!(data[43..86].find_str("iệt"), Some(77 - 43));
assert_eq!(data[43..86].find_str("Nam"), Some(83 - 43));
}
#[test]
fn test_slice_chars() {
fn t(a: &str, b: &str, start: uint) {
fn t(a: &str, b: &str, start: usize) {
assert_eq!(a.slice_chars(start, start + b.chars().count()), b);
}
t("", "", 0);
@ -1527,7 +1527,7 @@ mod tests {
assert_eq!("bc", unsafe {"abc".slice_unchecked(1, 3)});
assert_eq!("", unsafe {"abc".slice_unchecked(1, 1)});
fn a_million_letter_a() -> String {
let mut i = 0u;
let mut i = 0;
let mut rs = String::new();
while i < 100000 {
rs.push_str("aaaaaaaaaa");
@ -1536,7 +1536,7 @@ mod tests {
rs
}
fn half_a_million_letter_a() -> String {
let mut i = 0u;
let mut i = 0;
let mut rs = String::new();
while i < 100000 {
rs.push_str("aaaaa");
@ -1547,7 +1547,7 @@ mod tests {
let letters = a_million_letter_a();
assert!(half_a_million_letter_a() ==
unsafe {String::from_str(letters.slice_unchecked(
0u,
0,
500000))});
}
@ -1644,7 +1644,7 @@ mod tests {
assert_eq!("", data.slice(30, 33));
fn a_million_letter_x() -> String {
let mut i = 0u;
let mut i = 0;
let mut rs = String::new();
while i < 100000 {
rs.push_str("华华华华华华华华华华");
@ -1653,7 +1653,7 @@ mod tests {
rs
}
fn half_a_million_letter_x() -> String {
let mut i = 0u;
let mut i = 0;
let mut rs = String::new();
while i < 100000 {
rs.push_str("华华华华华");
@ -1663,23 +1663,23 @@ mod tests {
}
let letters = a_million_letter_x();
assert!(half_a_million_letter_x() ==
String::from_str(letters.slice(0u, 3u * 500000u)));
String::from_str(letters.slice(0, 3 * 500000)));
}
#[test]
fn test_slice_2() {
let ss = "中华Việt Nam";
assert_eq!("", ss.slice(3u, 6u));
assert_eq!("Việt Nam", ss.slice(6u, 16u));
assert_eq!("", ss.slice(3, 6));
assert_eq!("Việt Nam", ss.slice(6, 16));
assert_eq!("ab", "abc".slice(0u, 2u));
assert_eq!("bc", "abc".slice(1u, 3u));
assert_eq!("", "abc".slice(1u, 1u));
assert_eq!("ab", "abc".slice(0, 2));
assert_eq!("bc", "abc".slice(1, 3));
assert_eq!("", "abc".slice(1, 1));
assert_eq!("", ss.slice(0u, 3u));
assert_eq!("华V", ss.slice(3u, 7u));
assert_eq!("", ss.slice(3u, 3u));
assert_eq!("", ss.slice(0, 3));
assert_eq!("华V", ss.slice(3, 7));
assert_eq!("", ss.slice(3, 3));
/*0: 中
3:
6: V
@ -1695,7 +1695,7 @@ mod tests {
#[test]
#[should_fail]
fn test_slice_fail() {
"中华Việt Nam".slice(0u, 2u);
"中华Việt Nam".slice(0, 2);
}
#[test]
@ -1961,9 +1961,9 @@ mod tests {
let v: Vec<u8> = s1.as_bytes().to_vec();
let s2: String = String::from_str(from_utf8(&v).unwrap());
let mut i: uint = 0u;
let n1: uint = s1.len();
let n2: uint = v.len();
let mut i = 0;
let n1 = s1.len();
let n2 = v.len();
assert_eq!(n1, n2);
while i < n1 {
let a: u8 = s1.as_bytes()[i];
@ -1971,7 +1971,7 @@ mod tests {
debug!("{}", a);
debug!("{}", b);
assert_eq!(a, b);
i += 1u;
i += 1;
}
}
@ -2093,7 +2093,7 @@ mod tests {
let v = ['ศ','ไ','ท','ย','中','华','V','i','ệ','t',' ','N','a','m'];
let mut pos = 0;
let mut it = s.chars();
let it = s.chars();
for c in it {
assert_eq!(c, v[pos]);
@ -2108,7 +2108,7 @@ mod tests {
let v = ['m', 'a', 'N', ' ', 't', 'ệ','i','V','华','中','ย','ท','ไ','ศ'];
let mut pos = 0;
let mut it = s.chars().rev();
let it = s.chars().rev();
for c in it {
assert_eq!(c, v[pos]);
@ -2188,7 +2188,7 @@ mod tests {
let v = ['ศ','ไ','ท','ย','中','华','V','i','ệ','t',' ','N','a','m'];
let mut pos = 0;
let mut it = s.char_indices();
let it = s.char_indices();
for c in it {
assert_eq!(c, (p[pos], v[pos]));
@ -2205,7 +2205,7 @@ mod tests {
let v = ['m', 'a', 'N', ' ', 't', 'ệ','i','V','华','中','ย','ท','ไ','ศ'];
let mut pos = 0;
let mut it = s.char_indices().rev();
let it = s.char_indices().rev();
for c in it {
assert_eq!(c, (p[pos], v[pos]));
@ -2725,11 +2725,11 @@ mod tests {
// test the indices iterators
let s = "a̐éö̲\r\n";
let gr_inds = s.grapheme_indices(true).collect::<Vec<(uint, &str)>>();
let b: &[_] = &[(0u, ""), (3, ""), (6, "ö̲"), (11, "\r\n")];
let gr_inds = s.grapheme_indices(true).collect::<Vec<(usize, &str)>>();
let b: &[_] = &[(0, ""), (3, ""), (6, "ö̲"), (11, "\r\n")];
assert_eq!(gr_inds, b);
let gr_inds = s.grapheme_indices(true).rev().collect::<Vec<(uint, &str)>>();
let b: &[_] = &[(11, "\r\n"), (6, "ö̲"), (3, ""), (0u, "")];
let gr_inds = s.grapheme_indices(true).rev().collect::<Vec<(usize, &str)>>();
let b: &[_] = &[(11, "\r\n"), (6, "ö̲"), (3, ""), (0, "")];
assert_eq!(gr_inds, b);
let mut gr_inds_iter = s.grapheme_indices(true);
{
@ -2785,7 +2785,7 @@ mod tests {
#[test]
fn test_str_container() {
fn sum_len(v: &[&str]) -> uint {
fn sum_len(v: &[&str]) -> usize {
v.iter().map(|x| x.len()).sum()
}

View File

@ -49,7 +49,6 @@ pub struct FromUtf8Error {
/// A possible error value from the `String::from_utf16` function.
#[stable(feature = "rust1", since = "1.0.0")]
#[allow(missing_copy_implementations)]
#[derive(Debug)]
pub struct FromUtf16Error(());
@ -80,7 +79,7 @@ impl String {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn with_capacity(capacity: uint) -> String {
pub fn with_capacity(capacity: usize) -> String {
String {
vec: Vec::with_capacity(capacity),
}
@ -157,10 +156,10 @@ impl String {
static TAG_CONT_U8: u8 = 128u8;
static REPLACEMENT: &'static [u8] = b"\xEF\xBF\xBD"; // U+FFFD in UTF-8
let total = v.len();
fn unsafe_get(xs: &[u8], i: uint) -> u8 {
fn unsafe_get(xs: &[u8], i: usize) -> u8 {
unsafe { *xs.get_unchecked(i) }
}
fn safe_get(xs: &[u8], i: uint, total: uint) -> u8 {
fn safe_get(xs: &[u8], i: usize, total: usize) -> u8 {
if i >= total {
0
} else {
@ -319,7 +318,7 @@ impl String {
/// * We assume that the `Vec` contains valid UTF-8.
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn from_raw_parts(buf: *mut u8, length: uint, capacity: uint) -> String {
pub unsafe fn from_raw_parts(buf: *mut u8, length: usize, capacity: usize) -> String {
String {
vec: Vec::from_raw_parts(buf, length, capacity),
}
@ -375,7 +374,7 @@ impl String {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn capacity(&self) -> uint {
pub fn capacity(&self) -> usize {
self.vec.capacity()
}
@ -385,7 +384,7 @@ impl String {
///
/// # Panics
///
/// Panics if the new capacity overflows `uint`.
/// Panics if the new capacity overflows `usize`.
///
/// # Examples
///
@ -396,7 +395,7 @@ impl String {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn reserve(&mut self, additional: uint) {
pub fn reserve(&mut self, additional: usize) {
self.vec.reserve(additional)
}
@ -410,7 +409,7 @@ impl String {
///
/// # Panics
///
/// Panics if the new capacity overflows `uint`.
/// Panics if the new capacity overflows `usize`.
///
/// # Examples
///
@ -421,7 +420,7 @@ impl String {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn reserve_exact(&mut self, additional: uint) {
pub fn reserve_exact(&mut self, additional: usize) {
self.vec.reserve_exact(additional)
}
@ -469,7 +468,7 @@ impl String {
// Attempt to not use an intermediate buffer by just pushing bytes
// directly onto this string.
let slice = RawSlice {
data: self.vec.as_ptr().offset(cur_len as int),
data: self.vec.as_ptr().offset(cur_len as isize),
len: 4,
};
let used = ch.encode_utf8(mem::transmute(slice)).unwrap_or(0);
@ -488,7 +487,7 @@ impl String {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn as_bytes<'a>(&'a self) -> &'a [u8] {
pub fn as_bytes(&self) -> &[u8] {
&self.vec
}
@ -508,7 +507,7 @@ impl String {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn truncate(&mut self, new_len: uint) {
pub fn truncate(&mut self, new_len: usize) {
assert!(self.is_char_boundary(new_len));
self.vec.truncate(new_len)
}
@ -563,14 +562,14 @@ impl String {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn remove(&mut self, idx: uint) -> char {
pub fn remove(&mut self, idx: usize) -> char {
let len = self.len();
assert!(idx <= len);
let CharRange { ch, next } = self.char_range_at(idx);
unsafe {
ptr::copy_memory(self.vec.as_mut_ptr().offset(idx as int),
self.vec.as_ptr().offset(next as int),
ptr::copy_memory(self.vec.as_mut_ptr().offset(idx as isize),
self.vec.as_ptr().offset(next as isize),
len - next);
self.vec.set_len(len - (next - idx));
}
@ -590,7 +589,7 @@ impl String {
/// this function will panic.
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn insert(&mut self, idx: uint, ch: char) {
pub fn insert(&mut self, idx: usize, ch: char) {
let len = self.len();
assert!(idx <= len);
assert!(self.is_char_boundary(idx));
@ -599,10 +598,10 @@ impl String {
let amt = ch.encode_utf8(&mut bits).unwrap();
unsafe {
ptr::copy_memory(self.vec.as_mut_ptr().offset((idx + amt) as int),
self.vec.as_ptr().offset(idx as int),
ptr::copy_memory(self.vec.as_mut_ptr().offset((idx + amt) as isize),
self.vec.as_ptr().offset(idx as isize),
len - idx);
ptr::copy_memory(self.vec.as_mut_ptr().offset(idx as int),
ptr::copy_memory(self.vec.as_mut_ptr().offset(idx as isize),
bits.as_ptr(),
amt);
self.vec.set_len(len + amt);
@ -627,7 +626,7 @@ impl String {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn as_mut_vec<'a>(&'a mut self) -> &'a mut Vec<u8> {
pub unsafe fn as_mut_vec(&mut self) -> &mut Vec<u8> {
&mut self.vec
}
@ -641,7 +640,7 @@ impl String {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len(&self) -> uint { self.vec.len() }
pub fn len(&self) -> usize { self.vec.len() }
/// Returns true if the string contains no bytes
///
@ -803,7 +802,7 @@ impl<'a, 'b> PartialEq<CowString<'a>> for &'b str {
impl Str for String {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn as_slice<'a>(&'a self) -> &'a str {
fn as_slice(&self) -> &str {
unsafe { mem::transmute(&*self.vec) }
}
}
@ -854,26 +853,26 @@ impl<'a> Add<&'a str> for String {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl ops::Index<ops::Range<uint>> for String {
impl ops::Index<ops::Range<usize>> for String {
type Output = str;
#[inline]
fn index(&self, index: &ops::Range<uint>) -> &str {
fn index(&self, index: &ops::Range<usize>) -> &str {
&self[][*index]
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl ops::Index<ops::RangeTo<uint>> for String {
impl ops::Index<ops::RangeTo<usize>> for String {
type Output = str;
#[inline]
fn index(&self, index: &ops::RangeTo<uint>) -> &str {
fn index(&self, index: &ops::RangeTo<usize>) -> &str {
&self[][*index]
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl ops::Index<ops::RangeFrom<uint>> for String {
impl ops::Index<ops::RangeFrom<usize>> for String {
type Output = str;
#[inline]
fn index(&self, index: &ops::RangeFrom<uint>) -> &str {
fn index(&self, index: &ops::RangeFrom<usize>) -> &str {
&self[][*index]
}
}
@ -891,7 +890,7 @@ impl ops::Deref for String {
type Target = str;
#[inline]
fn deref<'a>(&'a self) -> &'a str {
fn deref(&self) -> &str {
unsafe { mem::transmute(&self.vec[]) }
}
}
@ -1298,7 +1297,7 @@ mod tests {
fn test_simple_types() {
assert_eq!(1.to_string(), "1");
assert_eq!((-1).to_string(), "-1");
assert_eq!(200u.to_string(), "200");
assert_eq!(200.to_string(), "200");
assert_eq!(2u8.to_string(), "2");
assert_eq!(true.to_string(), "true");
assert_eq!(false.to_string(), "false");
@ -1307,7 +1306,7 @@ mod tests {
#[test]
fn test_vectors() {
let x: Vec<int> = vec![];
let x: Vec<i32> = vec![];
assert_eq!(format!("{:?}", x), "[]");
assert_eq!(format!("{:?}", vec![1]), "[1]");
assert_eq!(format!("{:?}", vec![1, 2, 3]), "[1, 2, 3]");

View File

@ -66,7 +66,7 @@ use core::ops;
use core::ptr;
use core::raw::Slice as RawSlice;
use core::slice;
use core::uint;
use core::usize;
/// A growable list type, written `Vec<T>` but pronounced 'vector.'
///
@ -138,8 +138,8 @@ use core::uint;
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Vec<T> {
ptr: NonZero<*mut T>,
len: uint,
cap: uint,
len: usize,
cap: usize,
}
unsafe impl<T: Send> Send for Vec<T> { }
@ -196,9 +196,9 @@ impl<T> Vec<T> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn with_capacity(capacity: uint) -> Vec<T> {
pub fn with_capacity(capacity: usize) -> Vec<T> {
if mem::size_of::<T>() == 0 {
Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: uint::MAX }
Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: usize::MAX }
} else if capacity == 0 {
Vec::new()
} else {
@ -234,7 +234,7 @@ impl<T> Vec<T> {
/// mem::forget(v);
///
/// // Overwrite memory with 4, 5, 6
/// for i in 0..len as int {
/// for i in 0..len as isize {
/// ptr::write(p.offset(i), 4 + i);
/// }
///
@ -245,8 +245,8 @@ impl<T> Vec<T> {
/// }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn from_raw_parts(ptr: *mut T, length: uint,
capacity: uint) -> Vec<T> {
pub unsafe fn from_raw_parts(ptr: *mut T, length: usize,
capacity: usize) -> Vec<T> {
Vec { ptr: NonZero::new(ptr), len: length, cap: capacity }
}
@ -258,7 +258,7 @@ impl<T> Vec<T> {
#[inline]
#[unstable(feature = "collections",
reason = "may be better expressed via composition")]
pub unsafe fn from_raw_buf(ptr: *const T, elts: uint) -> Vec<T> {
pub unsafe fn from_raw_buf(ptr: *const T, elts: usize) -> Vec<T> {
let mut dst = Vec::with_capacity(elts);
dst.set_len(elts);
ptr::copy_nonoverlapping_memory(dst.as_mut_ptr(), ptr, elts);
@ -276,7 +276,7 @@ impl<T> Vec<T> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn capacity(&self) -> uint {
pub fn capacity(&self) -> usize {
self.cap
}
@ -285,7 +285,7 @@ impl<T> Vec<T> {
///
/// # Panics
///
/// Panics if the new capacity overflows `uint`.
/// Panics if the new capacity overflows `usize`.
///
/// # Examples
///
@ -295,9 +295,9 @@ impl<T> Vec<T> {
/// assert!(vec.capacity() >= 11);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn reserve(&mut self, additional: uint) {
pub fn reserve(&mut self, additional: usize) {
if self.cap - self.len < additional {
let err_msg = "Vec::reserve: `uint` overflow";
let err_msg = "Vec::reserve: `usize` overflow";
let new_cap = self.len.checked_add(additional).expect(err_msg)
.checked_next_power_of_two().expect(err_msg);
self.grow_capacity(new_cap);
@ -314,7 +314,7 @@ impl<T> Vec<T> {
///
/// # Panics
///
/// Panics if the new capacity overflows `uint`.
/// Panics if the new capacity overflows `usize`.
///
/// # Examples
///
@ -324,10 +324,10 @@ impl<T> Vec<T> {
/// assert!(vec.capacity() >= 11);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn reserve_exact(&mut self, additional: uint) {
pub fn reserve_exact(&mut self, additional: usize) {
if self.cap - self.len < additional {
match self.len.checked_add(additional) {
None => panic!("Vec::reserve: `uint` overflow"),
None => panic!("Vec::reserve: `usize` overflow"),
Some(new_cap) => self.grow_capacity(new_cap)
}
}
@ -401,7 +401,7 @@ impl<T> Vec<T> {
/// assert_eq!(vec, vec![1, 2]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn truncate(&mut self, len: uint) {
pub fn truncate(&mut self, len: usize) {
unsafe {
// drop any extra elements
while len < self.len {
@ -425,7 +425,7 @@ impl<T> Vec<T> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn as_mut_slice<'a>(&'a mut self) -> &'a mut [T] {
pub fn as_mut_slice(&mut self) -> &mut [T] {
unsafe {
mem::transmute(RawSlice {
data: *self.ptr,
@ -455,9 +455,9 @@ impl<T> Vec<T> {
let cap = self.cap;
let begin = ptr as *const T;
let end = if mem::size_of::<T>() == 0 {
(ptr as uint + self.len()) as *const T
(ptr as usize + self.len()) as *const T
} else {
ptr.offset(self.len() as int) as *const T
ptr.offset(self.len() as isize) as *const T
};
mem::forget(self);
IntoIter { allocation: ptr, cap: cap, ptr: begin, end: end }
@ -473,14 +473,14 @@ impl<T> Vec<T> {
/// # Examples
///
/// ```
/// let mut v = vec![1u, 2, 3, 4];
/// let mut v = vec![1, 2, 3, 4];
/// unsafe {
/// v.set_len(1);
/// }
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn set_len(&mut self, len: uint) {
pub unsafe fn set_len(&mut self, len: usize) {
self.len = len;
}
@ -506,7 +506,7 @@ impl<T> Vec<T> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn swap_remove(&mut self, index: uint) -> T {
pub fn swap_remove(&mut self, index: usize) -> T {
let length = self.len();
self.swap(index, length - 1);
self.pop().unwrap()
@ -530,7 +530,7 @@ impl<T> Vec<T> {
/// assert_eq!(vec, vec![1, 4, 2, 3, 5]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn insert(&mut self, index: uint, element: T) {
pub fn insert(&mut self, index: usize, element: T) {
let len = self.len();
assert!(index <= len);
// space for the new element
@ -539,7 +539,7 @@ impl<T> Vec<T> {
unsafe { // infallible
// The spot to put the new value
{
let p = self.as_mut_ptr().offset(index as int);
let p = self.as_mut_ptr().offset(index as isize);
// Shift everything over to make space. (Duplicating the
// `index`th element into two consecutive places.)
ptr::copy_memory(p.offset(1), &*p, len - index);
@ -566,14 +566,14 @@ impl<T> Vec<T> {
/// assert_eq!(v, vec![1, 3]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn remove(&mut self, index: uint) -> T {
pub fn remove(&mut self, index: usize) -> T {
let len = self.len();
assert!(index < len);
unsafe { // infallible
let ret;
{
// the place we are taking from.
let ptr = self.as_mut_ptr().offset(index as int);
let ptr = self.as_mut_ptr().offset(index as isize);
// copy it out, unsafely having a copy of the value on
// the stack and in the vector at the same time.
ret = ptr::read(ptr);
@ -602,11 +602,11 @@ impl<T> Vec<T> {
#[stable(feature = "rust1", since = "1.0.0")]
pub fn retain<F>(&mut self, mut f: F) where F: FnMut(&T) -> bool {
let len = self.len();
let mut del = 0u;
let mut del = 0;
{
let v = &mut **self;
for i in 0u..len {
for i in 0..len {
if !f(&v[i]) {
del += 1;
} else if del > 0 {
@ -623,7 +623,7 @@ impl<T> Vec<T> {
///
/// # Panics
///
/// Panics if the number of elements in the vector overflows a `uint`.
/// Panics if the number of elements in the vector overflows a `usize`.
///
/// # Examples
///
@ -655,7 +655,7 @@ impl<T> Vec<T> {
}
unsafe {
let end = (*self.ptr).offset(self.len as int);
let end = (*self.ptr).offset(self.len as isize);
ptr::write(&mut *end, value);
self.len += 1;
}
@ -687,7 +687,7 @@ impl<T> Vec<T> {
///
/// # Panics
///
/// Panics if the number of elements in the vector overflows a `uint`.
/// Panics if the number of elements in the vector overflows a `usize`.
///
/// # Examples
/// ```rust
@ -737,13 +737,13 @@ impl<T> Vec<T> {
#[inline]
#[unstable(feature = "collections",
reason = "matches collection reform specification, waiting for dust to settle")]
pub fn drain<'a>(&'a mut self) -> Drain<'a, T> {
pub fn drain(&mut self) -> Drain<T> {
unsafe {
let begin = *self.ptr as *const T;
let end = if mem::size_of::<T>() == 0 {
(*self.ptr as uint + self.len()) as *const T
(*self.ptr as usize + self.len()) as *const T
} else {
(*self.ptr).offset(self.len() as int) as *const T
(*self.ptr).offset(self.len() as isize) as *const T
};
self.set_len(0);
Drain {
@ -781,7 +781,7 @@ impl<T> Vec<T> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len(&self) -> uint { self.len }
pub fn len(&self) -> usize { self.len }
/// Returns `true` if the vector contains no elements.
///
@ -808,7 +808,7 @@ impl<T> Vec<T> {
/// # Examples
///
/// ```
/// let v = vec![0u, 1, 2];
/// let v = vec![0, 1, 2];
/// let w = v.map_in_place(|i| i + 3);
/// assert_eq!(w.as_slice(), [3, 4, 5].as_slice());
///
@ -835,7 +835,7 @@ impl<T> Vec<T> {
// types are passed to the allocator by `Vec`.
assert!(mem::min_align_of::<T>() == mem::min_align_of::<U>());
// This `as int` cast is safe, because the size of the elements of the
// This `as isize` cast is safe, because the size of the elements of the
// vector is not 0, and:
//
// 1) If the size of the elements in the vector is 1, the `int` may
@ -850,9 +850,9 @@ impl<T> Vec<T> {
// After `array.offset(offset)`: 0x9.
// (0x1 + 0x8 = 0x1 - 0x8)
//
// 2) If the size of the elements in the vector is >1, the `uint` ->
// 2) If the size of the elements in the vector is >1, the `usize` ->
// `int` conversion can't overflow.
let offset = vec.len() as int;
let offset = vec.len() as isize;
let start = vec.as_mut_ptr();
let mut pv = PartialVecNonZeroSized {
@ -977,8 +977,8 @@ impl<T> Vec<T> {
let u = f(t);
// Forget the `U` and increment `num_u`. This increment
// cannot overflow the `uint` as we only do this for a
// number of times that fits into a `uint` (and start with
// cannot overflow the `usize` as we only do this for a
// number of times that fits into a `usize` (and start with
// `0`). Again, we should not panic between these steps.
mem::forget(u);
pv.num_u += 1;
@ -1052,7 +1052,7 @@ impl<T: Clone> Vec<T> {
/// ```
#[unstable(feature = "collections",
reason = "matches collection reform specification; waiting for dust to settle")]
pub fn resize(&mut self, new_len: uint, value: T) {
pub fn resize(&mut self, new_len: usize, value: T) {
let len = self.len();
if new_len > len {
@ -1179,8 +1179,8 @@ impl<T: PartialEq> Vec<T> {
let mut w = 1;
while r < ln {
let p_r = p.offset(r as int);
let p_wm1 = p.offset((w - 1) as int);
let p_r = p.offset(r as isize);
let p_wm1 = p.offset((w - 1) as isize);
if *p_r != *p_wm1 {
if r != w {
let p_w = p_wm1.offset(1);
@ -1205,7 +1205,7 @@ impl<T> Vec<T> {
///
/// If the capacity for `self` is already equal to or greater than the
/// requested capacity, then no action is taken.
fn grow_capacity(&mut self, capacity: uint) {
fn grow_capacity(&mut self, capacity: usize) {
if mem::size_of::<T>() == 0 { return }
if capacity > self.cap {
@ -1223,7 +1223,7 @@ impl<T> Vec<T> {
// FIXME: #13996: need a way to mark the return value as `noalias`
#[inline(never)]
unsafe fn alloc_or_realloc<T>(ptr: *mut T, old_size: uint, size: uint) -> *mut T {
unsafe fn alloc_or_realloc<T>(ptr: *mut T, old_size: usize, size: usize) -> *mut T {
if old_size == 0 {
allocate(size, mem::min_align_of::<T>()) as *mut T
} else {
@ -1232,7 +1232,7 @@ unsafe fn alloc_or_realloc<T>(ptr: *mut T, old_size: uint, size: uint) -> *mut T
}
#[inline]
unsafe fn dealloc<T>(ptr: *mut T, len: uint) {
unsafe fn dealloc<T>(ptr: *mut T, len: usize) {
if mem::size_of::<T>() != 0 {
deallocate(ptr as *mut u8,
len * mem::size_of::<T>(),
@ -1274,22 +1274,22 @@ impl<S: hash::Writer + hash::Hasher, T: Hash<S>> Hash<S> for Vec<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Index<uint> for Vec<T> {
impl<T> Index<usize> for Vec<T> {
type Output = T;
#[inline]
fn index<'a>(&'a self, index: &uint) -> &'a T {
fn index(&self, index: &usize) -> &T {
// NB built-in indexing via `&[T]`
&(**self)[*index]
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> IndexMut<uint> for Vec<T> {
impl<T> IndexMut<usize> for Vec<T> {
type Output = T;
#[inline]
fn index_mut<'a>(&'a mut self, index: &uint) -> &'a mut T {
fn index_mut(&mut self, index: &usize) -> &mut T {
// NB built-in indexing via `&mut [T]`
&mut (**self)[*index]
}
@ -1297,26 +1297,26 @@ impl<T> IndexMut<uint> for Vec<T> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ops::Index<ops::Range<uint>> for Vec<T> {
impl<T> ops::Index<ops::Range<usize>> for Vec<T> {
type Output = [T];
#[inline]
fn index(&self, index: &ops::Range<uint>) -> &[T] {
fn index(&self, index: &ops::Range<usize>) -> &[T] {
Index::index(&**self, index)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ops::Index<ops::RangeTo<uint>> for Vec<T> {
impl<T> ops::Index<ops::RangeTo<usize>> for Vec<T> {
type Output = [T];
#[inline]
fn index(&self, index: &ops::RangeTo<uint>) -> &[T] {
fn index(&self, index: &ops::RangeTo<usize>) -> &[T] {
Index::index(&**self, index)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ops::Index<ops::RangeFrom<uint>> for Vec<T> {
impl<T> ops::Index<ops::RangeFrom<usize>> for Vec<T> {
type Output = [T];
#[inline]
fn index(&self, index: &ops::RangeFrom<uint>) -> &[T] {
fn index(&self, index: &ops::RangeFrom<usize>) -> &[T] {
Index::index(&**self, index)
}
}
@ -1330,26 +1330,26 @@ impl<T> ops::Index<ops::RangeFull> for Vec<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ops::IndexMut<ops::Range<uint>> for Vec<T> {
impl<T> ops::IndexMut<ops::Range<usize>> for Vec<T> {
type Output = [T];
#[inline]
fn index_mut(&mut self, index: &ops::Range<uint>) -> &mut [T] {
fn index_mut(&mut self, index: &ops::Range<usize>) -> &mut [T] {
IndexMut::index_mut(&mut **self, index)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ops::IndexMut<ops::RangeTo<uint>> for Vec<T> {
impl<T> ops::IndexMut<ops::RangeTo<usize>> for Vec<T> {
type Output = [T];
#[inline]
fn index_mut(&mut self, index: &ops::RangeTo<uint>) -> &mut [T] {
fn index_mut(&mut self, index: &ops::RangeTo<usize>) -> &mut [T] {
IndexMut::index_mut(&mut **self, index)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ops::IndexMut<ops::RangeFrom<uint>> for Vec<T> {
impl<T> ops::IndexMut<ops::RangeFrom<usize>> for Vec<T> {
type Output = [T];
#[inline]
fn index_mut(&mut self, index: &ops::RangeFrom<uint>) -> &mut [T] {
fn index_mut(&mut self, index: &ops::RangeFrom<usize>) -> &mut [T] {
IndexMut::index_mut(&mut **self, index)
}
}
@ -1366,12 +1366,12 @@ impl<T> ops::IndexMut<ops::RangeFull> for Vec<T> {
impl<T> ops::Deref for Vec<T> {
type Target = [T];
fn deref<'a>(&'a self) -> &'a [T] { self.as_slice() }
fn deref(&self) -> &[T] { self.as_slice() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ops::DerefMut for Vec<T> {
fn deref_mut<'a>(&'a mut self) -> &'a mut [T] { self.as_mut_slice() }
fn deref_mut(&mut self) -> &mut [T] { self.as_mut_slice() }
}
#[stable(feature = "rust1", since = "1.0.0")]
@ -1519,7 +1519,7 @@ impl<T> AsSlice<T> for Vec<T> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn as_slice<'a>(&'a self) -> &'a [T] {
fn as_slice(&self) -> &[T] {
unsafe {
mem::transmute(RawSlice {
data: *self.ptr,
@ -1609,7 +1609,7 @@ impl<'a, T> IntoCow<'a, Vec<T>, [T]> for &'a [T] where T: Clone {
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IntoIter<T> {
allocation: *mut T, // the block of memory allocated for the vector
cap: uint, // the capacity of the vector
cap: usize, // the capacity of the vector
ptr: *const T,
end: *const T
}
@ -1636,7 +1636,7 @@ impl<T> Iterator for IntoIter<T> {
type Item = T;
#[inline]
fn next<'a>(&'a mut self) -> Option<T> {
fn next(&mut self) -> Option<T> {
unsafe {
if self.ptr == self.end {
None
@ -1645,10 +1645,10 @@ impl<T> Iterator for IntoIter<T> {
// purposefully don't use 'ptr.offset' because for
// vectors with 0-size elements this would return the
// same pointer.
self.ptr = mem::transmute(self.ptr as uint + 1);
self.ptr = mem::transmute(self.ptr as usize + 1);
// Use a non-null pointer value
Some(ptr::read(mem::transmute(1u)))
Some(ptr::read(EMPTY as *mut T))
} else {
let old = self.ptr;
self.ptr = self.ptr.offset(1);
@ -1660,8 +1660,8 @@ impl<T> Iterator for IntoIter<T> {
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
let diff = (self.end as uint) - (self.ptr as uint);
fn size_hint(&self) -> (usize, Option<usize>) {
let diff = (self.end as usize) - (self.ptr as usize);
let size = mem::size_of::<T>();
let exact = diff / (if size == 0 {1} else {size});
(exact, Some(exact))
@ -1671,17 +1671,17 @@ impl<T> Iterator for IntoIter<T> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> DoubleEndedIterator for IntoIter<T> {
#[inline]
fn next_back<'a>(&'a mut self) -> Option<T> {
fn next_back(&mut self) -> Option<T> {
unsafe {
if self.end == self.ptr {
None
} else {
if mem::size_of::<T>() == 0 {
// See above for why 'ptr.offset' isn't used
self.end = mem::transmute(self.end as uint - 1);
self.end = mem::transmute(self.end as usize - 1);
// Use a non-null pointer value
Some(ptr::read(mem::transmute(1u)))
Some(ptr::read(EMPTY as *mut T))
} else {
self.end = self.end.offset(-1);
@ -1733,10 +1733,10 @@ impl<'a, T> Iterator for Drain<'a, T> {
// purposefully don't use 'ptr.offset' because for
// vectors with 0-size elements this would return the
// same pointer.
self.ptr = mem::transmute(self.ptr as uint + 1);
self.ptr = mem::transmute(self.ptr as usize + 1);
// Use a non-null pointer value
Some(ptr::read(mem::transmute(1u)))
Some(ptr::read(EMPTY as *mut T))
} else {
let old = self.ptr;
self.ptr = self.ptr.offset(1);
@ -1748,8 +1748,8 @@ impl<'a, T> Iterator for Drain<'a, T> {
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
let diff = (self.end as uint) - (self.ptr as uint);
fn size_hint(&self) -> (usize, Option<usize>) {
let diff = (self.end as usize) - (self.ptr as usize);
let size = mem::size_of::<T>();
let exact = diff / (if size == 0 {1} else {size});
(exact, Some(exact))
@ -1766,10 +1766,10 @@ impl<'a, T> DoubleEndedIterator for Drain<'a, T> {
} else {
if mem::size_of::<T>() == 0 {
// See above for why 'ptr.offset' isn't used
self.end = mem::transmute(self.end as uint - 1);
self.end = mem::transmute(self.end as usize - 1);
// Use a non-null pointer value
Some(ptr::read(mem::transmute(1u)))
Some(ptr::read(EMPTY as *mut T))
} else {
self.end = self.end.offset(-1);
@ -1862,8 +1862,8 @@ struct PartialVecNonZeroSized<T,U> {
/// When the destructor of this struct runs, all `num_t` `T`s and `num_u` `U`s
/// are destructed.
struct PartialVecZeroSized<T,U> {
num_t: uint,
num_u: uint,
num_t: usize,
num_u: usize,
marker_t: InvariantType<T>,
marker_u: InvariantType<U>,
}
@ -1920,7 +1920,7 @@ mod tests {
use super::as_vec;
struct DropCounter<'a> {
count: &'a mut int
count: &'a mut u32
}
#[unsafe_destructor]
@ -1949,7 +1949,7 @@ mod tests {
#[test]
fn test_small_vec_struct() {
assert!(size_of::<Vec<u8>>() == size_of::<uint>() * 3);
assert!(size_of::<Vec<u8>>() == size_of::<usize>() * 3);
}
#[test]
@ -2020,7 +2020,7 @@ mod tests {
#[test]
fn test_slice_from_mut() {
let mut values = vec![1u8,2,3,4,5];
let mut values = vec![1, 2, 3, 4, 5];
{
let slice = &mut values[2 ..];
assert!(slice == [3, 4, 5]);
@ -2034,7 +2034,7 @@ mod tests {
#[test]
fn test_slice_to_mut() {
let mut values = vec![1u8,2,3,4,5];
let mut values = vec![1, 2, 3, 4, 5];
{
let slice = &mut values[.. 2];
assert!(slice == [1, 2]);
@ -2048,7 +2048,7 @@ mod tests {
#[test]
fn test_split_at_mut() {
let mut values = vec![1u8,2,3,4,5];
let mut values = vec![1, 2, 3, 4, 5];
{
let (left, right) = values.split_at_mut(2);
{
@ -2068,12 +2068,12 @@ mod tests {
}
}
assert!(values == vec![2u8, 3, 5, 6, 7]);
assert!(values == vec![2, 3, 5, 6, 7]);
}
#[test]
fn test_clone() {
let v: Vec<int> = vec!();
let v: Vec<i32> = vec![];
let w = vec!(1, 2, 3);
assert_eq!(v, v.clone());
@ -2108,9 +2108,9 @@ mod tests {
#[test]
fn test_retain() {
let mut vec = vec![1u, 2, 3, 4];
let mut vec = vec![1, 2, 3, 4];
vec.retain(|&x| x % 2 == 0);
assert!(vec == vec![2u, 4]);
assert!(vec == vec![2, 4]);
}
#[test]
@ -2146,10 +2146,10 @@ mod tests {
#[test]
fn test_partition() {
assert_eq!(vec![].into_iter().partition(|x: &int| *x < 3), (vec![], vec![]));
assert_eq!(vec![1, 2, 3].into_iter().partition(|x: &int| *x < 4), (vec![1, 2, 3], vec![]));
assert_eq!(vec![1, 2, 3].into_iter().partition(|x: &int| *x < 2), (vec![1], vec![2, 3]));
assert_eq!(vec![1, 2, 3].into_iter().partition(|x: &int| *x < 0), (vec![], vec![1, 2, 3]));
assert_eq!(vec![].into_iter().partition(|x: &i32| *x < 3), (vec![], vec![]));
assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 4), (vec![1, 2, 3], vec![]));
assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 2), (vec![1], vec![2, 3]));
assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 0), (vec![], vec![1, 2, 3]));
}
#[test]
@ -2169,21 +2169,21 @@ mod tests {
// Test on-stack copy-from-buf.
let a = [1, 2, 3];
let ptr = a.as_ptr();
let b = Vec::from_raw_buf(ptr, 3u);
let b = Vec::from_raw_buf(ptr, 3);
assert_eq!(b, vec![1, 2, 3]);
// Test on-heap copy-from-buf.
let c = vec![1, 2, 3, 4, 5];
let ptr = c.as_ptr();
let d = Vec::from_raw_buf(ptr, 5u);
let d = Vec::from_raw_buf(ptr, 5);
assert_eq!(d, vec![1, 2, 3, 4, 5]);
}
}
#[test]
fn test_vec_truncate_drop() {
static mut drops: uint = 0;
struct Elem(int);
static mut drops: u32 = 0;
struct Elem(i32);
impl Drop for Elem {
fn drop(&mut self) {
unsafe { drops += 1; }
@ -2201,7 +2201,7 @@ mod tests {
#[test]
#[should_fail]
fn test_vec_truncate_fail() {
struct BadElem(int);
struct BadElem(i32);
impl Drop for BadElem {
fn drop(&mut self) {
let BadElem(ref mut x) = *self;
@ -2217,62 +2217,62 @@ mod tests {
#[test]
fn test_index() {
let vec = vec!(1, 2, 3);
let vec = vec![1, 2, 3];
assert!(vec[1] == 2);
}
#[test]
#[should_fail]
fn test_index_out_of_bounds() {
let vec = vec!(1, 2, 3);
let vec = vec![1, 2, 3];
let _ = vec[3];
}
#[test]
#[should_fail]
fn test_slice_out_of_bounds_1() {
let x: Vec<int> = vec![1, 2, 3, 4, 5];
let x = vec![1, 2, 3, 4, 5];
&x[-1..];
}
#[test]
#[should_fail]
fn test_slice_out_of_bounds_2() {
let x: Vec<int> = vec![1, 2, 3, 4, 5];
let x = vec![1, 2, 3, 4, 5];
&x[..6];
}
#[test]
#[should_fail]
fn test_slice_out_of_bounds_3() {
let x: Vec<int> = vec![1, 2, 3, 4, 5];
let x = vec![1, 2, 3, 4, 5];
&x[-1..4];
}
#[test]
#[should_fail]
fn test_slice_out_of_bounds_4() {
let x: Vec<int> = vec![1, 2, 3, 4, 5];
let x = vec![1, 2, 3, 4, 5];
&x[1..6];
}
#[test]
#[should_fail]
fn test_slice_out_of_bounds_5() {
let x: Vec<int> = vec![1, 2, 3, 4, 5];
let x = vec![1, 2, 3, 4, 5];
&x[3..2];
}
#[test]
#[should_fail]
fn test_swap_remove_empty() {
let mut vec: Vec<uint> = vec!();
let mut vec= Vec::<i32>::new();
vec.swap_remove(0);
}
#[test]
fn test_move_iter_unwrap() {
let mut vec: Vec<uint> = Vec::with_capacity(7);
let mut vec = Vec::with_capacity(7);
vec.push(1);
vec.push(2);
let ptr = vec.as_ptr();
@ -2285,14 +2285,14 @@ mod tests {
#[test]
#[should_fail]
fn test_map_in_place_incompatible_types_fail() {
let v = vec![0u, 1, 2];
let v = vec![0, 1, 2];
v.map_in_place(|_| ());
}
#[test]
fn test_map_in_place() {
let v = vec![0u, 1, 2];
assert_eq!(v.map_in_place(|i: uint| i as int - 1), [-1, 0, 1]);
let v = vec![0, 1, 2];
assert_eq!(v.map_in_place(|i: u32| i as i32 - 1), [-1, 0, 1]);
}
#[test]
@ -2318,7 +2318,7 @@ mod tests {
DROP_COUNTER.fetch_add(1, Ordering::Relaxed);
}
}
const NUM_ELEMENTS: uint = 2;
const NUM_ELEMENTS: usize = 2;
static DROP_COUNTER: AtomicUsize = ATOMIC_USIZE_INIT;
let v = repeat(Nothing).take(NUM_ELEMENTS).collect::<Vec<_>>();
@ -2334,7 +2334,7 @@ mod tests {
#[test]
fn test_move_items() {
let vec = vec![1, 2, 3];
let mut vec2 : Vec<i32> = vec![];
let mut vec2 = vec![];
for i in vec {
vec2.push(i);
}
@ -2344,7 +2344,7 @@ mod tests {
#[test]
fn test_move_items_reverse() {
let vec = vec![1, 2, 3];
let mut vec2 : Vec<i32> = vec![];
let mut vec2 = vec![];
for i in vec.into_iter().rev() {
vec2.push(i);
}
@ -2354,7 +2354,7 @@ mod tests {
#[test]
fn test_move_items_zero_sized() {
let vec = vec![(), (), ()];
let mut vec2 : Vec<()> = vec![];
let mut vec2 = vec![];
for i in vec {
vec2.push(i);
}
@ -2364,7 +2364,7 @@ mod tests {
#[test]
fn test_drain_items() {
let mut vec = vec![1, 2, 3];
let mut vec2: Vec<i32> = vec![];
let mut vec2 = vec![];
for i in vec.drain() {
vec2.push(i);
}
@ -2375,18 +2375,18 @@ mod tests {
#[test]
fn test_drain_items_reverse() {
let mut vec = vec![1, 2, 3];
let mut vec2: Vec<i32> = vec![];
let mut vec2 = vec![];
for i in vec.drain().rev() {
vec2.push(i);
}
assert_eq!(vec, []);
assert_eq!(vec2, [ 3, 2, 1 ]);
assert_eq!(vec2, [3, 2, 1]);
}
#[test]
fn test_drain_items_zero_sized() {
let mut vec = vec![(), (), ()];
let mut vec2: Vec<()> = vec![];
let mut vec2 = vec![];
for i in vec.drain() {
vec2.push(i);
}
@ -2396,9 +2396,9 @@ mod tests {
#[test]
fn test_into_boxed_slice() {
let xs = vec![1u, 2, 3];
let xs = vec![1, 2, 3];
let ys = xs.into_boxed_slice();
assert_eq!(ys, [1u, 2, 3]);
assert_eq!(ys, [1, 2, 3]);
}
#[test]
@ -2421,17 +2421,17 @@ mod tests {
#[bench]
fn bench_new(b: &mut Bencher) {
b.iter(|| {
let v: Vec<uint> = Vec::new();
let v: Vec<u32> = Vec::new();
assert_eq!(v.len(), 0);
assert_eq!(v.capacity(), 0);
})
}
fn do_bench_with_capacity(b: &mut Bencher, src_len: uint) {
fn do_bench_with_capacity(b: &mut Bencher, src_len: usize) {
b.bytes = src_len as u64;
b.iter(|| {
let v: Vec<uint> = Vec::with_capacity(src_len);
let v: Vec<u32> = Vec::with_capacity(src_len);
assert_eq!(v.len(), 0);
assert_eq!(v.capacity(), src_len);
})
@ -2457,7 +2457,7 @@ mod tests {
do_bench_with_capacity(b, 1000)
}
fn do_bench_from_fn(b: &mut Bencher, src_len: uint) {
fn do_bench_from_fn(b: &mut Bencher, src_len: usize) {
b.bytes = src_len as u64;
b.iter(|| {
@ -2487,11 +2487,11 @@ mod tests {
do_bench_from_fn(b, 1000)
}
fn do_bench_from_elem(b: &mut Bencher, src_len: uint) {
fn do_bench_from_elem(b: &mut Bencher, src_len: usize) {
b.bytes = src_len as u64;
b.iter(|| {
let dst: Vec<uint> = repeat(5).take(src_len).collect();
let dst: Vec<usize> = repeat(5).take(src_len).collect();
assert_eq!(dst.len(), src_len);
assert!(dst.iter().all(|x| *x == 5));
})
@ -2517,8 +2517,8 @@ mod tests {
do_bench_from_elem(b, 1000)
}
fn do_bench_from_slice(b: &mut Bencher, src_len: uint) {
let src: Vec<uint> = FromIterator::from_iter(0..src_len);
fn do_bench_from_slice(b: &mut Bencher, src_len: usize) {
let src: Vec<_> = FromIterator::from_iter(0..src_len);
b.bytes = src_len as u64;
@ -2549,13 +2549,13 @@ mod tests {
do_bench_from_slice(b, 1000)
}
fn do_bench_from_iter(b: &mut Bencher, src_len: uint) {
let src: Vec<uint> = FromIterator::from_iter(0..src_len);
fn do_bench_from_iter(b: &mut Bencher, src_len: usize) {
let src: Vec<_> = FromIterator::from_iter(0..src_len);
b.bytes = src_len as u64;
b.iter(|| {
let dst: Vec<uint> = FromIterator::from_iter(src.clone().into_iter());
let dst: Vec<_> = FromIterator::from_iter(src.clone().into_iter());
assert_eq!(dst.len(), src_len);
assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
});
@ -2581,9 +2581,9 @@ mod tests {
do_bench_from_iter(b, 1000)
}
fn do_bench_extend(b: &mut Bencher, dst_len: uint, src_len: uint) {
let dst: Vec<uint> = FromIterator::from_iter(0..dst_len);
let src: Vec<uint> = FromIterator::from_iter(dst_len..dst_len + src_len);
fn do_bench_extend(b: &mut Bencher, dst_len: usize, src_len: usize) {
let dst: Vec<_> = FromIterator::from_iter(0..dst_len);
let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len);
b.bytes = src_len as u64;
@ -2630,9 +2630,9 @@ mod tests {
do_bench_extend(b, 1000, 1000)
}
fn do_bench_push_all(b: &mut Bencher, dst_len: uint, src_len: uint) {
let dst: Vec<uint> = FromIterator::from_iter(0..dst_len);
let src: Vec<uint> = FromIterator::from_iter(dst_len..dst_len + src_len);
fn do_bench_push_all(b: &mut Bencher, dst_len: usize, src_len: usize) {
let dst: Vec<_> = FromIterator::from_iter(0..dst_len);
let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len);
b.bytes = src_len as u64;
@ -2679,9 +2679,9 @@ mod tests {
do_bench_push_all(b, 1000, 1000)
}
fn do_bench_push_all_move(b: &mut Bencher, dst_len: uint, src_len: uint) {
let dst: Vec<uint> = FromIterator::from_iter(0u..dst_len);
let src: Vec<uint> = FromIterator::from_iter(dst_len..dst_len + src_len);
fn do_bench_push_all_move(b: &mut Bencher, dst_len: usize, src_len: usize) {
let dst: Vec<_> = FromIterator::from_iter(0..dst_len);
let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len);
b.bytes = src_len as u64;
@ -2728,8 +2728,8 @@ mod tests {
do_bench_push_all_move(b, 1000, 1000)
}
fn do_bench_clone(b: &mut Bencher, src_len: uint) {
let src: Vec<uint> = FromIterator::from_iter(0..src_len);
fn do_bench_clone(b: &mut Bencher, src_len: usize) {
let src: Vec<usize> = FromIterator::from_iter(0..src_len);
b.bytes = src_len as u64;
@ -2760,9 +2760,9 @@ mod tests {
do_bench_clone(b, 1000)
}
fn do_bench_clone_from(b: &mut Bencher, times: uint, dst_len: uint, src_len: uint) {
let dst: Vec<uint> = FromIterator::from_iter(0..src_len);
let src: Vec<uint> = FromIterator::from_iter(dst_len..dst_len + src_len);
fn do_bench_clone_from(b: &mut Bencher, times: usize, dst_len: usize, src_len: usize) {
let dst: Vec<_> = FromIterator::from_iter(0..src_len);
let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len);
b.bytes = (times * src_len) as u64;

View File

@ -13,7 +13,7 @@
#![allow(missing_docs)]
pub use self::Entry::*;
use self::Entry::*;
use core::prelude::*;
@ -29,8 +29,6 @@ use core::ops::{Index, IndexMut};
use {vec, slice};
use vec::Vec;
// FIXME(conventions): capacity management???
/// A map optimized for small integer keys.
///
/// # Examples
@ -117,7 +115,7 @@ impl<S: Writer + Hasher, V: Hash<S>> Hash<S> for VecMap<V> {
fn hash(&self, state: &mut S) {
// In order to not traverse the `VecMap` twice, count the elements
// during iteration.
let mut count: uint = 0;
let mut count: usize = 0;
for elt in self {
elt.hash(state);
count += 1;
@ -148,7 +146,7 @@ impl<V> VecMap<V> {
/// let mut map: VecMap<&str> = VecMap::with_capacity(10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn with_capacity(capacity: uint) -> VecMap<V> {
pub fn with_capacity(capacity: usize) -> VecMap<V> {
VecMap { v: Vec::with_capacity(capacity) }
}
@ -164,7 +162,7 @@ impl<V> VecMap<V> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn capacity(&self) -> uint {
pub fn capacity(&self) -> usize {
self.v.capacity()
}
@ -183,7 +181,7 @@ impl<V> VecMap<V> {
/// assert!(map.capacity() >= 10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn reserve_len(&mut self, len: uint) {
pub fn reserve_len(&mut self, len: usize) {
let cur_len = self.v.len();
if len >= cur_len {
self.v.reserve(len - cur_len);
@ -207,7 +205,7 @@ impl<V> VecMap<V> {
/// assert!(map.capacity() >= 10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn reserve_len_exact(&mut self, len: uint) {
pub fn reserve_len_exact(&mut self, len: usize) {
let cur_len = self.v.len();
if len >= cur_len {
self.v.reserve_exact(len - cur_len);
@ -215,11 +213,11 @@ impl<V> VecMap<V> {
}
/// Returns an iterator visiting all keys in ascending order of the keys.
/// The iterator's element type is `uint`.
/// The iterator's element type is `usize`.
#[stable(feature = "rust1", since = "1.0.0")]
pub fn keys<'r>(&'r self) -> Keys<'r, V> {
fn first<A, B>((a, _): (A, B)) -> A { a }
let first: fn((uint, &'r V)) -> uint = first; // coerce to fn pointer
let first: fn((usize, &'r V)) -> usize = first; // coerce to fn pointer
Keys { iter: self.iter().map(first) }
}
@ -229,13 +227,13 @@ impl<V> VecMap<V> {
#[stable(feature = "rust1", since = "1.0.0")]
pub fn values<'r>(&'r self) -> Values<'r, V> {
fn second<A, B>((_, b): (A, B)) -> B { b }
let second: fn((uint, &'r V)) -> &'r V = second; // coerce to fn pointer
let second: fn((usize, &'r V)) -> &'r V = second; // coerce to fn pointer
Values { iter: self.iter().map(second) }
}
/// Returns an iterator visiting all key-value pairs in ascending order of the keys.
/// The iterator's element type is `(uint, &'r V)`.
/// The iterator's element type is `(usize, &'r V)`.
///
/// # Examples
///
@ -263,7 +261,7 @@ impl<V> VecMap<V> {
/// Returns an iterator visiting all key-value pairs in ascending order of the keys,
/// with mutable references to the values.
/// The iterator's element type is `(uint, &'r mut V)`.
/// The iterator's element type is `(usize, &'r mut V)`.
///
/// # Examples
///
@ -294,7 +292,7 @@ impl<V> VecMap<V> {
/// Returns an iterator visiting all key-value pairs in ascending order of
/// the keys, consuming the original `VecMap`.
/// The iterator's element type is `(uint, &'r V)`.
/// The iterator's element type is `(usize, &'r V)`.
///
/// # Examples
///
@ -306,23 +304,23 @@ impl<V> VecMap<V> {
/// map.insert(3, "c");
/// map.insert(2, "b");
///
/// let vec: Vec<(uint, &str)> = map.into_iter().collect();
/// let vec: Vec<(usize, &str)> = map.into_iter().collect();
///
/// assert_eq!(vec, vec![(1, "a"), (2, "b"), (3, "c")]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn into_iter(self) -> IntoIter<V> {
fn filter<A>((i, v): (uint, Option<A>)) -> Option<(uint, A)> {
fn filter<A>((i, v): (usize, Option<A>)) -> Option<(usize, A)> {
v.map(|v| (i, v))
}
let filter: fn((uint, Option<V>)) -> Option<(uint, V)> = filter; // coerce to fn ptr
let filter: fn((usize, Option<V>)) -> Option<(usize, V)> = filter; // coerce to fn ptr
IntoIter { iter: self.v.into_iter().enumerate().filter_map(filter) }
}
/// Returns an iterator visiting all key-value pairs in ascending order of
/// the keys, emptying (but not consuming) the original `VecMap`.
/// The iterator's element type is `(uint, &'r V)`. Keeps the allocated memory for reuse.
/// The iterator's element type is `(usize, &'r V)`. Keeps the allocated memory for reuse.
///
/// # Examples
///
@ -334,17 +332,17 @@ impl<V> VecMap<V> {
/// map.insert(3, "c");
/// map.insert(2, "b");
///
/// let vec: Vec<(uint, &str)> = map.drain().collect();
/// let vec: Vec<(usize, &str)> = map.drain().collect();
///
/// assert_eq!(vec, vec![(1, "a"), (2, "b"), (3, "c")]);
/// ```
#[unstable(feature = "collections",
reason = "matches collection reform specification, waiting for dust to settle")]
pub fn drain<'a>(&'a mut self) -> Drain<'a, V> {
fn filter<A>((i, v): (uint, Option<A>)) -> Option<(uint, A)> {
fn filter<A>((i, v): (usize, Option<A>)) -> Option<(usize, A)> {
v.map(|v| (i, v))
}
let filter: fn((uint, Option<V>)) -> Option<(uint, V)> = filter; // coerce to fn ptr
let filter: fn((usize, Option<V>)) -> Option<(usize, V)> = filter; // coerce to fn ptr
Drain { iter: self.v.drain().enumerate().filter_map(filter) }
}
@ -362,7 +360,7 @@ impl<V> VecMap<V> {
/// assert_eq!(a.len(), 1);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len(&self) -> uint {
pub fn len(&self) -> usize {
self.v.iter().filter(|elt| elt.is_some()).count()
}
@ -411,7 +409,7 @@ impl<V> VecMap<V> {
/// assert_eq!(map.get(&2), None);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn get(&self, key: &uint) -> Option<&V> {
pub fn get(&self, key: &usize) -> Option<&V> {
if *key < self.v.len() {
match self.v[*key] {
Some(ref value) => Some(value),
@ -436,7 +434,7 @@ impl<V> VecMap<V> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn contains_key(&self, key: &uint) -> bool {
pub fn contains_key(&self, key: &usize) -> bool {
self.get(key).is_some()
}
@ -456,7 +454,7 @@ impl<V> VecMap<V> {
/// assert_eq!(map[1], "b");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn get_mut(&mut self, key: &uint) -> Option<&mut V> {
pub fn get_mut(&mut self, key: &usize) -> Option<&mut V> {
if *key < self.v.len() {
match *(&mut self.v[*key]) {
Some(ref mut value) => Some(value),
@ -484,7 +482,7 @@ impl<V> VecMap<V> {
/// assert_eq!(map[37], "c");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn insert(&mut self, key: uint, value: V) -> Option<V> {
pub fn insert(&mut self, key: usize, value: V) -> Option<V> {
let len = self.v.len();
if len <= key {
self.v.extend((0..key - len + 1).map(|_| None));
@ -506,7 +504,7 @@ impl<V> VecMap<V> {
/// assert_eq!(map.remove(&1), None);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn remove(&mut self, key: &uint) -> Option<V> {
pub fn remove(&mut self, key: &usize) -> Option<V> {
if *key >= self.v.len() {
return None;
}
@ -539,8 +537,7 @@ impl<V> VecMap<V> {
///
/// assert_eq!(count[1], 3);
/// ```
#[unstable(feature = "collections",
reason = "precise API still under development")]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn entry(&mut self, key: usize) -> Entry<V> {
// FIXME(Gankro): this is basically the dumbest implementation of
// entry possible, because weird non-lexical borrows issues make it
@ -576,8 +573,7 @@ impl<'a, V> Entry<'a, V> {
impl<'a, V> VacantEntry<'a, V> {
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it.
#[unstable(feature = "collections",
reason = "matches collection reform v2 specification, waiting for dust to settle")]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn insert(self, value: V) -> &'a mut V {
let index = self.index;
self.map.insert(index, value);
@ -587,24 +583,21 @@ impl<'a, V> VacantEntry<'a, V> {
impl<'a, V> OccupiedEntry<'a, V> {
/// Gets a reference to the value in the entry.
#[unstable(feature = "collections",
reason = "matches collection reform v2 specification, waiting for dust to settle")]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn get(&self) -> &V {
let index = self.index;
&self.map[index]
}
/// Gets a mutable reference to the value in the entry.
#[unstable(feature = "collections",
reason = "matches collection reform v2 specification, waiting for dust to settle")]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn get_mut(&mut self) -> &mut V {
let index = self.index;
&mut self.map[index]
}
/// Converts the entry into a mutable reference to its value.
#[unstable(feature = "collections",
reason = "matches collection reform v2 specification, waiting for dust to settle")]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn into_mut(self) -> &'a mut V {
let index = self.index;
&mut self.map[index]
@ -612,16 +605,14 @@ impl<'a, V> OccupiedEntry<'a, V> {
/// Sets the value of the entry with the OccupiedEntry's key,
/// and returns the entry's old value.
#[unstable(feature = "collections",
reason = "matches collection reform v2 specification, waiting for dust to settle")]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn insert(&mut self, value: V) -> V {
let index = self.index;
self.map.insert(index, value).unwrap()
}
/// Takes the value of the entry out of the map, and returns it.
#[unstable(feature = "collections",
reason = "matches collection reform v2 specification, waiting for dust to settle")]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn remove(self) -> V {
let index = self.index;
self.map.remove(&index).unwrap()
@ -669,8 +660,8 @@ impl<V: fmt::Debug> fmt::Debug for VecMap<V> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<V> FromIterator<(uint, V)> for VecMap<V> {
fn from_iter<Iter: Iterator<Item=(uint, V)>>(iter: Iter) -> VecMap<V> {
impl<V> FromIterator<(usize, V)> for VecMap<V> {
fn from_iter<Iter: Iterator<Item=(usize, V)>>(iter: Iter) -> VecMap<V> {
let mut map = VecMap::new();
map.extend(iter);
map
@ -702,29 +693,29 @@ impl<'a, T> IntoIterator for &'a mut VecMap<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<V> Extend<(uint, V)> for VecMap<V> {
fn extend<Iter: Iterator<Item=(uint, V)>>(&mut self, iter: Iter) {
impl<V> Extend<(usize, V)> for VecMap<V> {
fn extend<Iter: Iterator<Item=(usize, V)>>(&mut self, iter: Iter) {
for (k, v) in iter {
self.insert(k, v);
}
}
}
impl<V> Index<uint> for VecMap<V> {
impl<V> Index<usize> for VecMap<V> {
type Output = V;
#[inline]
fn index<'a>(&'a self, i: &uint) -> &'a V {
fn index<'a>(&'a self, i: &usize) -> &'a V {
self.get(i).expect("key not present")
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<V> IndexMut<uint> for VecMap<V> {
impl<V> IndexMut<usize> for VecMap<V> {
type Output = V;
#[inline]
fn index_mut<'a>(&'a mut self, i: &uint) -> &'a mut V {
fn index_mut<'a>(&'a mut self, i: &usize) -> &'a mut V {
self.get_mut(i).expect("key not present")
}
}
@ -757,7 +748,7 @@ macro_rules! iterator {
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
fn size_hint(&self) -> (usize, Option<usize>) {
(0, Some(self.back - self.front))
}
}
@ -794,8 +785,8 @@ macro_rules! double_ended_iterator {
/// An iterator over the key-value pairs of a map.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Iter<'a, V:'a> {
front: uint,
back: uint,
front: usize,
back: usize,
iter: slice::Iter<'a, Option<V>>
}
@ -810,25 +801,25 @@ impl<'a, V> Clone for Iter<'a, V> {
}
}
iterator! { impl Iter -> (uint, &'a V), as_ref }
double_ended_iterator! { impl Iter -> (uint, &'a V), as_ref }
iterator! { impl Iter -> (usize, &'a V), as_ref }
double_ended_iterator! { impl Iter -> (usize, &'a V), as_ref }
/// An iterator over the key-value pairs of a map, with the
/// values being mutable.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IterMut<'a, V:'a> {
front: uint,
back: uint,
front: usize,
back: usize,
iter: slice::IterMut<'a, Option<V>>
}
iterator! { impl IterMut -> (uint, &'a mut V), as_mut }
double_ended_iterator! { impl IterMut -> (uint, &'a mut V), as_mut }
iterator! { impl IterMut -> (usize, &'a mut V), as_mut }
double_ended_iterator! { impl IterMut -> (usize, &'a mut V), as_mut }
/// An iterator over the keys of a map.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Keys<'a, V: 'a> {
iter: Map<Iter<'a, V>, fn((uint, &'a V)) -> uint>
iter: Map<Iter<'a, V>, fn((usize, &'a V)) -> usize>
}
// FIXME(#19839) Remove in favor of `#[derive(Clone)]`
@ -843,7 +834,7 @@ impl<'a, V> Clone for Keys<'a, V> {
/// An iterator over the values of a map.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Values<'a, V: 'a> {
iter: Map<Iter<'a, V>, fn((uint, &'a V)) -> &'a V>
iter: Map<Iter<'a, V>, fn((usize, &'a V)) -> &'a V>
}
// FIXME(#19839) Remove in favor of `#[derive(Clone)]`
@ -860,39 +851,39 @@ impl<'a, V> Clone for Values<'a, V> {
pub struct IntoIter<V> {
iter: FilterMap<
Enumerate<vec::IntoIter<Option<V>>>,
fn((uint, Option<V>)) -> Option<(uint, V)>>
fn((usize, Option<V>)) -> Option<(usize, V)>>
}
#[unstable(feature = "collections")]
pub struct Drain<'a, V> {
iter: FilterMap<
Enumerate<vec::Drain<'a, Option<V>>>,
fn((uint, Option<V>)) -> Option<(uint, V)>>
fn((usize, Option<V>)) -> Option<(usize, V)>>
}
#[unstable(feature = "collections")]
impl<'a, V> Iterator for Drain<'a, V> {
type Item = (uint, V);
type Item = (usize, V);
fn next(&mut self) -> Option<(uint, V)> { self.iter.next() }
fn size_hint(&self) -> (uint, Option<uint>) { self.iter.size_hint() }
fn next(&mut self) -> Option<(usize, V)> { self.iter.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
}
#[unstable(feature = "collections")]
impl<'a, V> DoubleEndedIterator for Drain<'a, V> {
fn next_back(&mut self) -> Option<(uint, V)> { self.iter.next_back() }
fn next_back(&mut self) -> Option<(usize, V)> { self.iter.next_back() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, V> Iterator for Keys<'a, V> {
type Item = uint;
type Item = usize;
fn next(&mut self) -> Option<uint> { self.iter.next() }
fn size_hint(&self) -> (uint, Option<uint>) { self.iter.size_hint() }
fn next(&mut self) -> Option<usize> { self.iter.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, V> DoubleEndedIterator for Keys<'a, V> {
fn next_back(&mut self) -> Option<uint> { self.iter.next_back() }
fn next_back(&mut self) -> Option<usize> { self.iter.next_back() }
}
#[stable(feature = "rust1", since = "1.0.0")]
@ -900,7 +891,7 @@ impl<'a, V> Iterator for Values<'a, V> {
type Item = &'a V;
fn next(&mut self) -> Option<(&'a V)> { self.iter.next() }
fn size_hint(&self) -> (uint, Option<uint>) { self.iter.size_hint() }
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, V> DoubleEndedIterator for Values<'a, V> {
@ -909,14 +900,14 @@ impl<'a, V> DoubleEndedIterator for Values<'a, V> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<V> Iterator for IntoIter<V> {
type Item = (uint, V);
type Item = (usize, V);
fn next(&mut self) -> Option<(uint, V)> { self.iter.next() }
fn size_hint(&self) -> (uint, Option<uint>) { self.iter.size_hint() }
fn next(&mut self) -> Option<(usize, V)> { self.iter.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<V> DoubleEndedIterator for IntoIter<V> {
fn next_back(&mut self) -> Option<(uint, V)> { self.iter.next_back() }
fn next_back(&mut self) -> Option<(usize, V)> { self.iter.next_back() }
}
#[cfg(test)]
@ -924,7 +915,8 @@ mod test_map {
use prelude::*;
use core::hash::{hash, SipHasher};
use super::{VecMap, Occupied, Vacant};
use super::VecMap;
use super::Entry::{Occupied, Vacant};
#[test]
fn test_get_mut() {
@ -990,7 +982,7 @@ mod test_map {
map.insert(1, 'a');
map.insert(2, 'b');
map.insert(3, 'c');
let keys = map.keys().collect::<Vec<uint>>();
let keys: Vec<_> = map.keys().collect();
assert_eq!(keys.len(), 3);
assert!(keys.contains(&1));
assert!(keys.contains(&2));
@ -1003,7 +995,7 @@ mod test_map {
map.insert(1, 'a');
map.insert(2, 'b');
map.insert(3, 'c');
let values = map.values().map(|&v| v).collect::<Vec<char>>();
let values: Vec<_> = map.values().cloned().collect();
assert_eq!(values.len(), 3);
assert!(values.contains(&'a'));
assert!(values.contains(&'b'));
@ -1062,7 +1054,7 @@ mod test_map {
assert!(m.insert(10, 11).is_none());
for (k, v) in &mut m {
*v += k as int;
*v += k as isize;
}
let mut it = m.iter();
@ -1104,7 +1096,7 @@ mod test_map {
assert!(m.insert(10, 11).is_none());
for (k, v) in m.iter_mut().rev() {
*v += k as int;
*v += k as isize;
}
let mut it = m.iter();
@ -1137,7 +1129,7 @@ mod test_map {
map.insert(3, "c");
map.insert(2, "b");
let vec: Vec<(usize, &str)> = map.drain().collect();
let vec: Vec<_> = map.drain().collect();
assert_eq!(vec, vec![(1, "a"), (2, "b"), (3, "c")]);
assert_eq!(map.len(), 0);
@ -1146,7 +1138,7 @@ mod test_map {
#[test]
fn test_show() {
let mut map = VecMap::new();
let empty = VecMap::<int>::new();
let empty = VecMap::<i32>::new();
map.insert(1, 2);
map.insert(3, 4);
@ -1195,7 +1187,7 @@ mod test_map {
let mut b = VecMap::new();
assert!(!(a < b) && !(b < a));
assert!(b.insert(2u, 5).is_none());
assert!(b.insert(2, 5).is_none());
assert!(a < b);
assert!(a.insert(2, 7).is_none());
assert!(!(a < b) && b < a);
@ -1213,7 +1205,7 @@ mod test_map {
let mut b = VecMap::new();
assert!(a <= b && a >= b);
assert!(a.insert(1u, 1).is_none());
assert!(a.insert(1, 1).is_none());
assert!(a > b && a >= b);
assert!(b < a && b <= a);
assert!(b.insert(2, 2).is_none());
@ -1245,9 +1237,9 @@ mod test_map {
#[test]
fn test_from_iter() {
let xs: Vec<(uint, char)> = vec![(1u, 'a'), (2, 'b'), (3, 'c'), (4, 'd'), (5, 'e')];
let xs = vec![(1, 'a'), (2, 'b'), (3, 'c'), (4, 'd'), (5, 'e')];
let map: VecMap<char> = xs.iter().map(|&x| x).collect();
let map: VecMap<_> = xs.iter().cloned().collect();
for &(k, v) in &xs {
assert_eq!(map.get(&k), Some(&v));
@ -1256,7 +1248,7 @@ mod test_map {
#[test]
fn test_index() {
let mut map: VecMap<int> = VecMap::new();
let mut map = VecMap::new();
map.insert(1, 2);
map.insert(2, 1);
@ -1268,7 +1260,7 @@ mod test_map {
#[test]
#[should_fail]
fn test_index_nonexistent() {
let mut map: VecMap<int> = VecMap::new();
let mut map = VecMap::new();
map.insert(1, 2);
map.insert(2, 1);
@ -1281,7 +1273,7 @@ mod test_map {
fn test_entry(){
let xs = [(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
let mut map: VecMap<i32> = xs.iter().map(|&x| x).collect();
let mut map: VecMap<_> = xs.iter().cloned().collect();
// Existing key (insert)
match map.entry(1) {
@ -1337,7 +1329,7 @@ mod bench {
#[bench]
pub fn insert_rand_100(b: &mut Bencher) {
let mut m : VecMap<uint> = VecMap::new();
let mut m = VecMap::new();
insert_rand_n(100, &mut m, b,
|m, i| { m.insert(i, 1); },
|m, i| { m.remove(&i); });
@ -1345,7 +1337,7 @@ mod bench {
#[bench]
pub fn insert_rand_10_000(b: &mut Bencher) {
let mut m : VecMap<uint> = VecMap::new();
let mut m = VecMap::new();
insert_rand_n(10_000, &mut m, b,
|m, i| { m.insert(i, 1); },
|m, i| { m.remove(&i); });
@ -1354,7 +1346,7 @@ mod bench {
// Insert seq
#[bench]
pub fn insert_seq_100(b: &mut Bencher) {
let mut m : VecMap<uint> = VecMap::new();
let mut m = VecMap::new();
insert_seq_n(100, &mut m, b,
|m, i| { m.insert(i, 1); },
|m, i| { m.remove(&i); });
@ -1362,7 +1354,7 @@ mod bench {
#[bench]
pub fn insert_seq_10_000(b: &mut Bencher) {
let mut m : VecMap<uint> = VecMap::new();
let mut m = VecMap::new();
insert_seq_n(10_000, &mut m, b,
|m, i| { m.insert(i, 1); },
|m, i| { m.remove(&i); });
@ -1371,7 +1363,7 @@ mod bench {
// Find rand
#[bench]
pub fn find_rand_100(b: &mut Bencher) {
let mut m : VecMap<uint> = VecMap::new();
let mut m = VecMap::new();
find_rand_n(100, &mut m, b,
|m, i| { m.insert(i, 1); },
|m, i| { m.get(&i); });
@ -1379,7 +1371,7 @@ mod bench {
#[bench]
pub fn find_rand_10_000(b: &mut Bencher) {
let mut m : VecMap<uint> = VecMap::new();
let mut m = VecMap::new();
find_rand_n(10_000, &mut m, b,
|m, i| { m.insert(i, 1); },
|m, i| { m.get(&i); });
@ -1388,7 +1380,7 @@ mod bench {
// Find seq
#[bench]
pub fn find_seq_100(b: &mut Bencher) {
let mut m : VecMap<uint> = VecMap::new();
let mut m = VecMap::new();
find_seq_n(100, &mut m, b,
|m, i| { m.insert(i, 1); },
|m, i| { m.get(&i); });
@ -1396,7 +1388,7 @@ mod bench {
#[bench]
pub fn find_seq_10_000(b: &mut Bencher) {
let mut m : VecMap<uint> = VecMap::new();
let mut m = VecMap::new();
find_seq_n(10_000, &mut m, b,
|m, i| { m.insert(i, 1); },
|m, i| { m.get(&i); });

View File

@ -30,7 +30,6 @@ use super::{Hasher, Writer};
/// strong, this implementation has not been reviewed for such purposes.
/// As such, all cryptographic uses of this implementation are strongly
/// discouraged.
#[allow(missing_copy_implementations)]
pub struct SipHasher {
k0: u64,
k1: u64,

View File

@ -260,7 +260,7 @@ pub trait IteratorExt: Iterator + Sized {
}
/// Creates an iterator that applies the predicate to each element returned
/// by this iterator. The only elements that will be yieled are those that
/// by this iterator. The only elements that will be yielded are those that
/// make the predicate evaluate to `true`.
///
/// # Examples

View File

@ -396,7 +396,6 @@ pub struct InvariantLifetime<'a>;
reason = "likely to change with new variance strategy")]
#[lang="no_copy_bound"]
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
#[allow(missing_copy_implementations)]
pub struct NoCopy;
/// A type which is considered managed by the GC. This is typically
@ -405,5 +404,4 @@ pub struct NoCopy;
reason = "likely to change with new variance strategy")]
#[lang="managed_bound"]
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
#[allow(missing_copy_implementations)]
pub struct Managed;

View File

@ -331,8 +331,9 @@ pub unsafe fn copy_lifetime<'a, S: ?Sized, T: ?Sized + 'a>(_ptr: &'a S,
#[unstable(feature = "core",
reason = "this function may be removed in the future due to its \
questionable utility")]
pub unsafe fn copy_mut_lifetime<'a, S: ?Sized, T: ?Sized + 'a>(_ptr: &'a mut S,
ptr: &mut T)
-> &'a mut T {
pub unsafe fn copy_mut_lifetime<'a, S: ?Sized, T: ?Sized + 'a>(_ptr: &'a S,
ptr: &mut T)
-> &'a mut T
{
transmute(ptr)
}

View File

@ -190,7 +190,7 @@ pub unsafe fn replace<T>(dest: *mut T, mut src: T) -> T {
src
}
/// Reads the value from `src` without dropping it. This leaves the
/// Reads the value from `src` without moving it. This leaves the
/// memory in `src` unchanged.
///
/// # Safety

View File

@ -149,7 +149,6 @@ impl FromStr for bool {
/// An error returned when parsing a `bool` from a string fails.
#[derive(Debug, Clone, PartialEq)]
#[allow(missing_copy_implementations)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ParseBoolError { _priv: () }

View File

@ -278,7 +278,6 @@
#![feature(collections)]
#![feature(core)]
#![feature(io)]
#![feature(path)]
use self::LabelText::*;
@ -287,8 +286,6 @@ use std::old_io;
use std::string::CowString;
use std::vec::CowVec;
pub mod maybe_owned_vec;
/// The text for a graphviz label on a node or edge.
pub enum LabelText<'a> {
/// This kind of label preserves the text directly as is.

View File

@ -1,165 +0,0 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![unstable(feature = "rustc_private")]
#![deprecated(since = "1.0.0", reason = "use std::vec::CowVec")]
pub use self::MaybeOwnedVector::*;
use std::cmp::Ordering;
use std::default::Default;
use std::fmt;
use std::iter::FromIterator;
use std::old_path::BytesContainer;
use std::slice;
// Note 1: It is not clear whether the flexibility of providing both
// the `Growable` and `FixedLen` variants is sufficiently useful.
// Consider restricting to just a two variant enum.
// Note 2: Once Dynamically Sized Types (DST) lands, it might be
// reasonable to replace this with something like `enum MaybeOwned<'a,
// U: ?Sized>{ Owned(Box<U>), Borrowed(&'a U) }`; and then `U` could be
// instantiated with `[T]` or `str`, etc. Of course, that would imply
// removing the `Growable` variant, which relates to note 1 above.
// Alternatively, we might add `MaybeOwned` for the general case but
// keep some form of `MaybeOwnedVector` to avoid unnecessary copying
// of the contents of `Vec<T>`, since we anticipate that to be a
// frequent way to dynamically construct a vector.
/// MaybeOwnedVector<'a,T> abstracts over `Vec<T>`, `&'a [T]`.
///
/// Some clients will have a pre-allocated vector ready to hand off in
/// a slice; others will want to create the set on the fly and hand
/// off ownership, via `Growable`.
pub enum MaybeOwnedVector<'a,T:'a> {
Growable(Vec<T>),
Borrowed(&'a [T]),
}
/// Trait for moving into a `MaybeOwnedVector`
pub trait IntoMaybeOwnedVector<'a,T> {
/// Moves self into a `MaybeOwnedVector`
fn into_maybe_owned(self) -> MaybeOwnedVector<'a,T>;
}
#[allow(deprecated)]
impl<'a,T:'a> IntoMaybeOwnedVector<'a,T> for Vec<T> {
#[allow(deprecated)]
#[inline]
fn into_maybe_owned(self) -> MaybeOwnedVector<'a,T> { Growable(self) }
}
#[allow(deprecated)]
impl<'a,T> IntoMaybeOwnedVector<'a,T> for &'a [T] {
#[allow(deprecated)]
#[inline]
fn into_maybe_owned(self) -> MaybeOwnedVector<'a,T> { Borrowed(self) }
}
impl<'a,T> MaybeOwnedVector<'a,T> {
pub fn iter(&'a self) -> slice::Iter<'a,T> {
match self {
&Growable(ref v) => v.as_slice().iter(),
&Borrowed(ref v) => v.iter(),
}
}
pub fn len(&self) -> uint { self.as_slice().len() }
#[allow(deprecated)]
pub fn is_empty(&self) -> bool { self.len() == 0 }
}
impl<'a, T: PartialEq> PartialEq for MaybeOwnedVector<'a, T> {
fn eq(&self, other: &MaybeOwnedVector<T>) -> bool {
self.as_slice() == other.as_slice()
}
}
impl<'a, T: Eq> Eq for MaybeOwnedVector<'a, T> {}
impl<'a, T: PartialOrd> PartialOrd for MaybeOwnedVector<'a, T> {
fn partial_cmp(&self, other: &MaybeOwnedVector<T>) -> Option<Ordering> {
self.as_slice().partial_cmp(other.as_slice())
}
}
impl<'a, T: Ord> Ord for MaybeOwnedVector<'a, T> {
fn cmp(&self, other: &MaybeOwnedVector<T>) -> Ordering {
self.as_slice().cmp(other.as_slice())
}
}
// The `Vector` trait is provided in the prelude and is implemented on
// both `&'a [T]` and `Vec<T>`, so it makes sense to try to support it
// seamlessly. The other vector related traits from the prelude do
// not appear to be implemented on both `&'a [T]` and `Vec<T>`. (It
// is possible that this is an oversight in some cases.)
//
// In any case, with `Vector` in place, the client can just use
// `as_slice` if they prefer that over `match`.
impl<'b,T> AsSlice<T> for MaybeOwnedVector<'b,T> {
fn as_slice<'a>(&'a self) -> &'a [T] {
match self {
&Growable(ref v) => v.as_slice(),
&Borrowed(ref v) => v.as_slice(),
}
}
}
impl<'a,T> FromIterator<T> for MaybeOwnedVector<'a,T> {
#[allow(deprecated)]
fn from_iter<I:Iterator<Item=T>>(iterator: I) -> MaybeOwnedVector<'a,T> {
// If we are building from scratch, might as well build the
// most flexible variant.
Growable(iterator.collect())
}
}
impl<'a,T:fmt::Debug> fmt::Debug for MaybeOwnedVector<'a,T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.as_slice().fmt(f)
}
}
impl<'a, T: Clone> Clone for MaybeOwnedVector<'a, T> {
#[allow(deprecated)]
fn clone(&self) -> MaybeOwnedVector<'a, T> {
match *self {
Growable(ref v) => Growable(v.clone()),
Borrowed(v) => Borrowed(v)
}
}
}
impl<'a, T> Default for MaybeOwnedVector<'a, T> {
#[allow(deprecated)]
fn default() -> MaybeOwnedVector<'a, T> {
Growable(Vec::new())
}
}
impl<'a> BytesContainer for MaybeOwnedVector<'a, u8> {
fn container_as_bytes(&self) -> &[u8] {
self.as_slice()
}
}
impl<'a,T:Clone> MaybeOwnedVector<'a,T> {
/// Convert `self` into a growable `Vec`, not making a copy if possible.
pub fn into_vec(self) -> Vec<T> {
match self {
Growable(v) => v,
Borrowed(v) => v.to_vec(),
}
}
}

View File

@ -332,15 +332,12 @@ pub mod types {
/// variants, because the compiler complains about the repr attribute
/// otherwise.
#[repr(u8)]
#[allow(missing_copy_implementations)]
pub enum c_void {
__variant1,
__variant2,
}
#[allow(missing_copy_implementations)]
pub enum FILE {}
#[allow(missing_copy_implementations)]
pub enum fpos_t {}
}
pub mod c99 {
@ -354,9 +351,7 @@ pub mod types {
pub type uint64_t = u64;
}
pub mod posix88 {
#[allow(missing_copy_implementations)]
pub enum DIR {}
#[allow(missing_copy_implementations)]
pub enum dirent_t {}
}
pub mod posix01 {}

View File

@ -387,7 +387,6 @@ pub trait SeedableRng<Seed>: Rng {
/// [1]: Marsaglia, George (July 2003). ["Xorshift
/// RNGs"](http://www.jstatsoft.org/v08/i14/paper). *Journal of
/// Statistical Software*. Vol. 8 (Issue 14).
#[allow(missing_copy_implementations)]
#[derive(Clone)]
pub struct XorShiftRng {
x: u32,

View File

@ -14,9 +14,9 @@ register_long_diagnostics! {
E0001: r##"
This error suggests that the expression arm corresponding to the noted pattern
will never be reached as for all possible values of the expression being matched,
one of the preceeding patterns will match.
one of the preceding patterns will match.
This means that perhaps some of the preceeding patterns are too general, this
This means that perhaps some of the preceding patterns are too general, this
one is too specific or the ordering is incorrect.
"##,

View File

@ -2018,6 +2018,12 @@ declare_lint! {
"unused or unknown features found in crate-level #[feature] directives"
}
declare_lint! {
pub STABLE_FEATURES,
Warn,
"stable features found in #[feature] directive"
}
declare_lint! {
pub UNKNOWN_CRATE_TYPES,
Deny,
@ -2038,7 +2044,7 @@ declare_lint! {
declare_lint! {
pub MISSING_COPY_IMPLEMENTATIONS,
Warn,
Allow,
"detects potentially-forgotten implementations of `Copy`"
}
@ -2060,6 +2066,7 @@ impl LintPass for HardwiredLints {
UNREACHABLE_CODE,
WARNINGS,
UNUSED_FEATURES,
STABLE_FEATURES,
UNKNOWN_CRATE_TYPES,
VARIANT_SIZE_DIFFERENCES,
FAT_PTR_TRANSMUTES

View File

@ -224,7 +224,6 @@ pub struct RegionVarBindings<'a, 'tcx: 'a> {
}
#[derive(Debug)]
#[allow(missing_copy_implementations)]
pub struct RegionSnapshot {
length: uint,
skolemization_count: u32,

View File

@ -201,8 +201,9 @@ impl Index {
/// Cross-references the feature names of unstable APIs with enabled
/// features and possibly prints errors. Returns a list of all
/// features used.
pub fn check_unstable_api_usage(tcx: &ty::ctxt) -> FnvHashSet<InternedString> {
let ref active_lib_features = tcx.sess.features.borrow().lib_features;
pub fn check_unstable_api_usage(tcx: &ty::ctxt)
-> FnvHashMap<InternedString, attr::StabilityLevel> {
let ref active_lib_features = tcx.sess.features.borrow().declared_lib_features;
// Put the active features into a map for quick lookup
let active_features = active_lib_features.iter().map(|&(ref s, _)| s.clone()).collect();
@ -210,7 +211,7 @@ pub fn check_unstable_api_usage(tcx: &ty::ctxt) -> FnvHashSet<InternedString> {
let mut checker = Checker {
tcx: tcx,
active_features: active_features,
used_features: FnvHashSet()
used_features: FnvHashMap()
};
let krate = tcx.map.krate();
@ -223,7 +224,7 @@ pub fn check_unstable_api_usage(tcx: &ty::ctxt) -> FnvHashSet<InternedString> {
struct Checker<'a, 'tcx: 'a> {
tcx: &'a ty::ctxt<'tcx>,
active_features: FnvHashSet<InternedString>,
used_features: FnvHashSet<InternedString>
used_features: FnvHashMap<InternedString, attr::StabilityLevel>
}
impl<'a, 'tcx> Checker<'a, 'tcx> {
@ -234,7 +235,7 @@ impl<'a, 'tcx> Checker<'a, 'tcx> {
match *stab {
Some(Stability { level: attr::Unstable, ref feature, ref reason, .. }) => {
self.used_features.insert(feature.clone());
self.used_features.insert(feature.clone(), attr::Unstable);
if !self.active_features.contains(feature) {
let msg = match *reason {
@ -247,7 +248,9 @@ impl<'a, 'tcx> Checker<'a, 'tcx> {
feature.get(), span, &msg[]);
}
}
Some(..) => {
Some(Stability { level, ref feature, .. }) => {
self.used_features.insert(feature.clone(), level);
// Stable APIs are always ok to call and deprecated APIs are
// handled by a lint.
}
@ -433,17 +436,37 @@ pub fn lookup(tcx: &ty::ctxt, id: DefId) -> Option<Stability> {
/// Given the list of enabled features that were not language features (i.e. that
/// were expected to be library features), and the list of features used from
/// libraries, identify activated features that don't exist and error about them.
pub fn check_unused_features(sess: &Session,
used_lib_features: &FnvHashSet<InternedString>) {
let ref lib_features = sess.features.borrow().lib_features;
let mut active_lib_features: FnvHashMap<InternedString, Span>
= lib_features.clone().into_iter().collect();
pub fn check_unused_or_stable_features(sess: &Session,
lib_features_used: &FnvHashMap<InternedString,
attr::StabilityLevel>) {
let ref declared_lib_features = sess.features.borrow().declared_lib_features;
let mut remaining_lib_features: FnvHashMap<InternedString, Span>
= declared_lib_features.clone().into_iter().collect();
for used_feature in used_lib_features {
active_lib_features.remove(used_feature);
let stable_msg = "this feature is stable. attribute no longer needed";
for &span in sess.features.borrow().declared_stable_lang_features.iter() {
sess.add_lint(lint::builtin::STABLE_FEATURES,
ast::CRATE_NODE_ID,
span,
stable_msg.to_string());
}
for (_, &span) in &active_lib_features {
for (used_lib_feature, level) in lib_features_used.iter() {
match remaining_lib_features.remove(used_lib_feature) {
Some(span) => {
if *level == attr::Stable {
sess.add_lint(lint::builtin::STABLE_FEATURES,
ast::CRATE_NODE_ID,
span,
stable_msg.to_string());
}
}
None => ( /* used but undeclared, handled during the previous ast visit */ )
}
}
for (_, &span) in remaining_lib_features.iter() {
sess.add_lint(lint::builtin::UNUSED_FEATURES,
ast::CRATE_NODE_ID,
span,

View File

@ -59,7 +59,6 @@ pub fn impl_can_satisfy(infcx: &InferCtxt,
|o| selcx.evaluate_obligation(o))
}
#[allow(missing_copy_implementations)]
pub enum OrphanCheckErr<'tcx> {
NoLocalInputType,
UncoveredTy(Ty<'tcx>),

View File

@ -775,7 +775,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
-> bool
{
// In general, it's a good idea to cache results, even
// ambigious ones, to save us some trouble later. But we have
// ambiguous ones, to save us some trouble later. But we have
// to be careful not to cache results that could be
// invalidated later by advances in inference. Normally, this
// is not an issue, because any inference variables whose
@ -1273,7 +1273,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
///
/// - The impl is conditional, in which case we may not have winnowed it out
/// because we don't know if the conditions apply, but the where clause is basically
/// telling us taht there is some impl, though not necessarily the one we see.
/// telling us that there is some impl, though not necessarily the one we see.
///
/// In both cases we prefer to take the where clause, which is
/// essentially harmless. See issue #18453 for more details of
@ -1335,25 +1335,6 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
// the where clauses are in scope.
true
}
(&ParamCandidate(ref bound1), &ParamCandidate(ref bound2)) => {
self.infcx.probe(|_| {
let bound1 =
project::normalize_with_depth(self,
stack.obligation.cause.clone(),
stack.obligation.recursion_depth+1,
bound1);
let bound2 =
project::normalize_with_depth(self,
stack.obligation.cause.clone(),
stack.obligation.recursion_depth+1,
bound2);
let origin =
infer::RelateOutputImplTypes(stack.obligation.cause.span);
self.infcx
.sub_poly_trait_refs(false, origin, bound1.value, bound2.value)
.is_ok()
})
}
_ => {
false
}

View File

@ -11,17 +11,58 @@
use middle::subst::{Substs, VecPerParamSpace};
use middle::infer::InferCtxt;
use middle::ty::{self, Ty, AsPredicate, ToPolyTraitRef};
use std::collections::HashSet;
use std::fmt;
use std::rc::Rc;
use syntax::ast;
use syntax::codemap::Span;
use util::common::ErrorReported;
use util::nodemap::FnvHashSet;
use util::ppaux::Repr;
use super::{Obligation, ObligationCause, PredicateObligation,
VtableImpl, VtableParam, VtableImplData};
struct PredicateSet<'a,'tcx:'a> {
tcx: &'a ty::ctxt<'tcx>,
set: FnvHashSet<ty::Predicate<'tcx>>,
}
impl<'a,'tcx> PredicateSet<'a,'tcx> {
fn new(tcx: &'a ty::ctxt<'tcx>) -> PredicateSet<'a,'tcx> {
PredicateSet { tcx: tcx, set: FnvHashSet() }
}
fn insert(&mut self, pred: &ty::Predicate<'tcx>) -> bool {
// We have to be careful here because we want
//
// for<'a> Foo<&'a int>
//
// and
//
// for<'b> Foo<&'b int>
//
// to be considered equivalent. So normalize all late-bound
// regions before we throw things into the underlying set.
let normalized_pred = match *pred {
ty::Predicate::Trait(ref data) =>
ty::Predicate::Trait(ty::anonymize_late_bound_regions(self.tcx, data)),
ty::Predicate::Equate(ref data) =>
ty::Predicate::Equate(ty::anonymize_late_bound_regions(self.tcx, data)),
ty::Predicate::RegionOutlives(ref data) =>
ty::Predicate::RegionOutlives(ty::anonymize_late_bound_regions(self.tcx, data)),
ty::Predicate::TypeOutlives(ref data) =>
ty::Predicate::TypeOutlives(ty::anonymize_late_bound_regions(self.tcx, data)),
ty::Predicate::Projection(ref data) =>
ty::Predicate::Projection(ty::anonymize_late_bound_regions(self.tcx, data)),
};
self.set.insert(normalized_pred)
}
}
///////////////////////////////////////////////////////////////////////////
// `Elaboration` iterator
///////////////////////////////////////////////////////////////////////////
@ -36,7 +77,7 @@ use super::{Obligation, ObligationCause, PredicateObligation,
pub struct Elaborator<'cx, 'tcx:'cx> {
tcx: &'cx ty::ctxt<'tcx>,
stack: Vec<StackEntry<'tcx>>,
visited: HashSet<ty::Predicate<'tcx>>,
visited: PredicateSet<'cx,'tcx>,
}
struct StackEntry<'tcx> {
@ -65,14 +106,11 @@ pub fn elaborate_trait_refs<'cx, 'tcx>(
pub fn elaborate_predicates<'cx, 'tcx>(
tcx: &'cx ty::ctxt<'tcx>,
predicates: Vec<ty::Predicate<'tcx>>)
mut predicates: Vec<ty::Predicate<'tcx>>)
-> Elaborator<'cx, 'tcx>
{
let visited: HashSet<ty::Predicate<'tcx>> =
predicates.iter()
.map(|b| (*b).clone())
.collect();
let mut visited = PredicateSet::new(tcx);
predicates.retain(|pred| visited.insert(pred));
let entry = StackEntry { position: 0, predicates: predicates };
Elaborator { tcx: tcx, stack: vec![entry], visited: visited }
}
@ -94,7 +132,7 @@ impl<'cx, 'tcx> Elaborator<'cx, 'tcx> {
// recursion in some cases. One common case is when
// people define `trait Sized: Sized { }` rather than `trait
// Sized { }`.
predicates.retain(|r| self.visited.insert(r.clone()));
predicates.retain(|r| self.visited.insert(r));
self.stack.push(StackEntry { position: 0,
predicates: predicates });

View File

@ -1591,10 +1591,10 @@ pub fn region_existential_bound<'tcx>(r: ty::Region) -> ExistentialBounds<'tcx>
}
impl CLike for BuiltinBound {
fn to_uint(&self) -> uint {
fn to_usize(&self) -> uint {
*self as uint
}
fn from_uint(v: uint) -> BuiltinBound {
fn from_usize(v: uint) -> BuiltinBound {
unsafe { mem::transmute(v) }
}
}
@ -2520,7 +2520,7 @@ impl FlagComputation {
fn add_bound_computation(&mut self, computation: &FlagComputation) {
self.add_flags(computation.flags);
// The types that contributed to `computation` occured within
// The types that contributed to `computation` occurred within
// a region binder, so subtract one from the region depth
// within when adding the depth to `self`.
let depth = computation.depth;

View File

@ -132,7 +132,6 @@ pub enum UnstableFeatures {
}
#[derive(Clone, PartialEq, Eq)]
#[allow(missing_copy_implementations)]
pub enum PrintRequest {
FileNames,
Sysroot,
@ -290,7 +289,6 @@ macro_rules! options {
$($opt:ident : $t:ty = ($init:expr, $parse:ident, $desc:expr)),* ,) =>
(
#[derive(Clone)]
#[allow(missing_copy_implementations)]
pub struct $struct_name { $(pub $opt: $t),* }
pub fn $defaultfn() -> $struct_name {

View File

@ -46,7 +46,6 @@ pub fn DefIdSet() -> DefIdSet { FnvHashSet() }
///
/// This uses FNV hashing, as described here:
/// http://en.wikipedia.org/wiki/Fowler%E2%80%93Noll%E2%80%93Vo_hash_function
#[allow(missing_copy_implementations)]
pub struct FnvHasher(u64);
impl Default for FnvHasher {

View File

@ -46,7 +46,6 @@ pub struct SnapshotVec<D:SnapshotVecDelegate> {
}
// Snapshots are tokens that should be created/consumed linearly.
#[allow(missing_copy_implementations)]
pub struct Snapshot {
// Length of the undo log at the time the snapshot was taken.
length: uint,

View File

@ -142,7 +142,7 @@
//! which contains an empty set of actions, still has a purpose---it
//! prevents moves from `LV`. I chose not to make `MOVE` a fourth kind of
//! action because that would imply that sometimes moves are permitted
//! from restrictived values, which is not the case.
//! from restricted values, which is not the case.
//!
//! #### Example
//!

View File

@ -464,7 +464,6 @@ pub fn opt_loan_path<'tcx>(cmt: &mc::cmt<'tcx>) -> Option<Rc<LoanPath<'tcx>>> {
// Errors that can occur
#[derive(PartialEq)]
#[allow(missing_copy_implementations)]
pub enum bckerr_code {
err_mutbl,
err_out_of_scope(ty::Region, ty::Region), // superscope, subscope

View File

@ -171,7 +171,7 @@ pub fn source_name(input: &Input) -> String {
/// CompileController is used to customise compilation, it allows compilation to
/// be stopped and/or to call arbitrary code at various points in compilation.
/// It also allows for various flags to be set to influence what information gets
/// colelcted during compilation.
/// collected during compilation.
///
/// This is a somewhat higher level controller than a Session - the Session
/// controls what happens in each phase, whereas the CompileController controls
@ -668,8 +668,8 @@ pub fn phase_3_run_analysis_passes<'tcx>(sess: Session,
time(time_passes, "stability checking", (), |_|
stability::check_unstable_api_usage(&ty_cx));
time(time_passes, "unused feature checking", (), |_|
stability::check_unused_features(
time(time_passes, "unused lib feature checking", (), |_|
stability::check_unused_or_stable_features(
&ty_cx.sess, lib_features_used));
time(time_passes, "lint checking", (), |_|

View File

@ -436,73 +436,50 @@ pub enum DiagnosticKind {
}
// Opaque pointer types
#[allow(missing_copy_implementations)]
pub enum Module_opaque {}
pub type ModuleRef = *mut Module_opaque;
#[allow(missing_copy_implementations)]
pub enum Context_opaque {}
pub type ContextRef = *mut Context_opaque;
#[allow(missing_copy_implementations)]
pub enum Type_opaque {}
pub type TypeRef = *mut Type_opaque;
#[allow(missing_copy_implementations)]
pub enum Value_opaque {}
pub type ValueRef = *mut Value_opaque;
#[allow(missing_copy_implementations)]
pub enum Metadata_opaque {}
pub type MetadataRef = *mut Metadata_opaque;
#[allow(missing_copy_implementations)]
pub enum BasicBlock_opaque {}
pub type BasicBlockRef = *mut BasicBlock_opaque;
#[allow(missing_copy_implementations)]
pub enum Builder_opaque {}
pub type BuilderRef = *mut Builder_opaque;
#[allow(missing_copy_implementations)]
pub enum ExecutionEngine_opaque {}
pub type ExecutionEngineRef = *mut ExecutionEngine_opaque;
#[allow(missing_copy_implementations)]
pub enum RustJITMemoryManager_opaque {}
pub type RustJITMemoryManagerRef = *mut RustJITMemoryManager_opaque;
#[allow(missing_copy_implementations)]
pub enum MemoryBuffer_opaque {}
pub type MemoryBufferRef = *mut MemoryBuffer_opaque;
#[allow(missing_copy_implementations)]
pub enum PassManager_opaque {}
pub type PassManagerRef = *mut PassManager_opaque;
#[allow(missing_copy_implementations)]
pub enum PassManagerBuilder_opaque {}
pub type PassManagerBuilderRef = *mut PassManagerBuilder_opaque;
#[allow(missing_copy_implementations)]
pub enum Use_opaque {}
pub type UseRef = *mut Use_opaque;
#[allow(missing_copy_implementations)]
pub enum TargetData_opaque {}
pub type TargetDataRef = *mut TargetData_opaque;
#[allow(missing_copy_implementations)]
pub enum ObjectFile_opaque {}
pub type ObjectFileRef = *mut ObjectFile_opaque;
#[allow(missing_copy_implementations)]
pub enum SectionIterator_opaque {}
pub type SectionIteratorRef = *mut SectionIterator_opaque;
#[allow(missing_copy_implementations)]
pub enum Pass_opaque {}
pub type PassRef = *mut Pass_opaque;
#[allow(missing_copy_implementations)]
pub enum TargetMachine_opaque {}
pub type TargetMachineRef = *mut TargetMachine_opaque;
#[allow(missing_copy_implementations)]
pub enum Archive_opaque {}
pub type ArchiveRef = *mut Archive_opaque;
#[allow(missing_copy_implementations)]
pub enum Twine_opaque {}
pub type TwineRef = *mut Twine_opaque;
#[allow(missing_copy_implementations)]
pub enum DiagnosticInfo_opaque {}
pub type DiagnosticInfoRef = *mut DiagnosticInfo_opaque;
#[allow(missing_copy_implementations)]
pub enum DebugLoc_opaque {}
pub type DebugLocRef = *mut DebugLoc_opaque;
#[allow(missing_copy_implementations)]
pub enum SMDiagnostic_opaque {}
pub type SMDiagnosticRef = *mut SMDiagnostic_opaque;
@ -513,7 +490,6 @@ pub mod debuginfo {
pub use self::DIDescriptorFlags::*;
use super::{MetadataRef};
#[allow(missing_copy_implementations)]
pub enum DIBuilder_opaque {}
pub type DIBuilderRef = *mut DIBuilder_opaque;
@ -2215,7 +2191,6 @@ pub fn get_param(llfn: ValueRef, index: c_uint) -> ValueRef {
}
}
#[allow(missing_copy_implementations)]
pub enum RustString_opaque {}
pub type RustStringRef = *mut RustString_opaque;
type RustStringRepr = *mut RefCell<Vec<u8>>;

View File

@ -20,7 +20,7 @@ register_diagnostics! {
E0254, // import conflicts with imported crate in this module
E0255, // import conflicts with value in this module
E0256, // import conflicts with type in this module
E0257, // inherent implementations are only allowen on types defined in the current module
E0257, // inherent implementations are only allowed on types defined in the current module
E0258, // import conflicts with existing submodule
E0259, // an extern crate has already been imported into this module
E0260 // name conflicts with an external crate that has been imported into this module

View File

@ -69,8 +69,8 @@ impl<'a> SpanUtils<'a> {
pub fn snippet(&self, span: Span) -> String {
match self.sess.codemap().span_to_snippet(span) {
Some(s) => s,
None => String::new(),
Ok(s) => s,
Err(_) => String::new(),
}
}

View File

@ -889,11 +889,13 @@ fn compile_guard<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
}
}
for (_, &binding_info) in &data.bindings_map {
bcx.fcx.lllocals.borrow_mut().remove(&binding_info.id);
}
with_cond(bcx, Not(bcx, val, guard_expr.debug_loc()), |bcx| {
// Guard does not match: remove all bindings from the lllocals table
for (_, &binding_info) in &data.bindings_map {
call_lifetime_end(bcx, binding_info.llmatch);
bcx.fcx.lllocals.borrow_mut().remove(&binding_info.id);
}
match chk {
// If the default arm is the only one left, move on to the next

View File

@ -1081,6 +1081,12 @@ pub fn with_cond<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
F: FnOnce(Block<'blk, 'tcx>) -> Block<'blk, 'tcx>,
{
let _icx = push_ctxt("with_cond");
if bcx.unreachable.get() ||
(common::is_const(val) && common::const_to_uint(val) == 0) {
return bcx;
}
let fcx = bcx.fcx;
let next_cx = fcx.new_temp_block("next");
let cond_cx = fcx.new_temp_block("cond");

View File

@ -241,8 +241,10 @@ pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, e: &ast::Expr)
ty::ty_vec(unit_ty, Some(len)) => {
let llunitty = type_of::type_of(cx, unit_ty);
let llptr = ptrcast(llconst, llunitty.ptr_to());
assert!(cx.const_globals().borrow_mut()
.insert(llptr as int, llconst).is_none());
let prev_const = cx.const_globals().borrow_mut()
.insert(llptr as int, llconst);
assert!(prev_const.is_none() ||
prev_const == Some(llconst));
assert_eq!(abi::FAT_PTR_ADDR, 0);
assert_eq!(abi::FAT_PTR_EXTRA, 1);
llconst = C_struct(cx, &[

View File

@ -1094,7 +1094,7 @@ pub fn get_cleanup_debug_loc_for_ast_node<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
// bodies), in which case we also just want to return the span of the
// whole expression.
let code_snippet = cx.sess().codemap().span_to_snippet(node_span);
if let Some(code_snippet) = code_snippet {
if let Ok(code_snippet) = code_snippet {
let bytes = code_snippet.as_bytes();
if bytes.len() > 0 && &bytes[bytes.len()-1..] == b"}" {

View File

@ -365,7 +365,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
(_, "init") => {
let tp_ty = *substs.types.get(FnSpace, 0);
if !return_type_is_void(ccx, tp_ty) {
// Just zero out the stack slot. (See comment on base::memzero for explaination)
// Just zero out the stack slot. (See comment on base::memzero for explanation)
zero_mem(bcx, llresult, tp_ty);
}
C_nil(ccx)

View File

@ -150,7 +150,6 @@ impl Use {
}
/// Iterator for the users of a value
#[allow(missing_copy_implementations)]
pub struct Users {
next: Option<Use>
}

View File

@ -159,11 +159,11 @@ pub fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>,
// vs 'b). However, the normal subtyping rules on fn types handle
// this kind of equivalency just fine.
//
// We now use these subsititions to ensure that all declared bounds are
// We now use these substitutions to ensure that all declared bounds are
// satisfied by the implementation's method.
//
// We do this by creating a parameter environment which contains a
// substition corresponding to impl_to_skol_substs. We then build
// substitution corresponding to impl_to_skol_substs. We then build
// trait_to_skol_substs and use it to convert the predicates contained
// in the trait_m.generics to the skolemized form.
//

View File

@ -288,7 +288,7 @@ pub fn select_all_fcx_obligations_and_apply_defaults(fcx: &FnCtxt) {
pub fn select_all_fcx_obligations_or_error(fcx: &FnCtxt) {
debug!("select_all_fcx_obligations_or_error");
// upvar inference should have ensured that all deferrred call
// upvar inference should have ensured that all deferred call
// resolutions are handled by now.
assert!(fcx.inh.deferred_call_resolutions.borrow().is_empty());

View File

@ -108,7 +108,7 @@ register_diagnostics! {
E0189, // can only cast a boxed pointer to a boxed object
E0190, // can only cast a &-pointer to an &-object
E0191, // value of the associated type must be specified
E0192, // negative imples are allowed just fo `Send` and `Sync`
E0192, // negative imples are allowed just for `Send` and `Sync`
E0193, // cannot bound type where clause bounds may only be attached to types
// involving type parameters
E0194,
@ -119,7 +119,7 @@ register_diagnostics! {
E0199, // implementing trait is not unsafe
E0200, // trait requires an `unsafe impl` declaration
E0201, // duplicate method in trait impl
E0202, // associated items are not allowed in inherint impls
E0202, // associated items are not allowed in inherent impls
E0203, // type parameter has more than one relaxed default bound,
// and only one is supported
E0204, // trait `Copy` may not be implemented for this type; field

View File

@ -83,7 +83,6 @@ This API is completely unstable and subject to change.
#![feature(rustc_private)]
#![feature(slicing_syntax, unsafe_destructor)]
#![feature(staged_api)]
#![feature(std_misc)]
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;

View File

@ -79,7 +79,6 @@ impl RegionScope for UnelidableRscope {
// A scope in which any omitted region defaults to `default`. This is
// used after the `->` in function signatures, but also for backwards
// compatibility with object types. The latter use may go away.
#[allow(missing_copy_implementations)]
pub struct SpecificRscope {
default: ty::Region
}

View File

@ -192,7 +192,7 @@ use self::VarianceTerm::*;
use self::ParamKind::*;
use arena;
use arena::Arena;
use arena::TypedArena;
use middle::resolve_lifetime as rl;
use middle::subst;
use middle::subst::{ParamSpace, FnSpace, TypeSpace, SelfSpace, VecPerParamSpace};
@ -210,7 +210,7 @@ use util::ppaux::Repr;
pub fn infer_variance(tcx: &ty::ctxt) {
let krate = tcx.map.krate();
let mut arena = arena::Arena::new();
let mut arena = arena::TypedArena::new();
let terms_cx = determine_parameters_to_be_inferred(tcx, &mut arena, krate);
let constraints_cx = add_constraints_from_crate(terms_cx, krate);
solve_constraints(constraints_cx);
@ -254,7 +254,7 @@ impl<'a> fmt::Debug for VarianceTerm<'a> {
struct TermsContext<'a, 'tcx: 'a> {
tcx: &'a ty::ctxt<'tcx>,
arena: &'a Arena,
arena: &'a TypedArena<VarianceTerm<'a>>,
empty_variances: Rc<ty::ItemVariances>,
@ -282,7 +282,7 @@ struct InferredInfo<'a> {
}
fn determine_parameters_to_be_inferred<'a, 'tcx>(tcx: &'a ty::ctxt<'tcx>,
arena: &'a mut Arena,
arena: &'a mut TypedArena<VarianceTerm<'a>>,
krate: &ast::Crate)
-> TermsContext<'a, 'tcx> {
let mut terms_cx = TermsContext {
@ -312,7 +312,7 @@ impl<'a, 'tcx> TermsContext<'a, 'tcx> {
index: uint,
param_id: ast::NodeId) {
let inf_index = InferredIndex(self.inferred_infos.len());
let term = self.arena.alloc(|| InferredTerm(inf_index));
let term = self.arena.alloc(InferredTerm(inf_index));
self.inferred_infos.push(InferredInfo { item_id: item_id,
kind: kind,
space: space,
@ -455,10 +455,10 @@ fn add_constraints_from_crate<'a, 'tcx>(terms_cx: TermsContext<'a, 'tcx>,
let unsafe_lang_item = terms_cx.tcx.lang_items.unsafe_type();
let covariant = terms_cx.arena.alloc(|| ConstantTerm(ty::Covariant));
let contravariant = terms_cx.arena.alloc(|| ConstantTerm(ty::Contravariant));
let invariant = terms_cx.arena.alloc(|| ConstantTerm(ty::Invariant));
let bivariant = terms_cx.arena.alloc(|| ConstantTerm(ty::Bivariant));
let covariant = terms_cx.arena.alloc(ConstantTerm(ty::Covariant));
let contravariant = terms_cx.arena.alloc(ConstantTerm(ty::Contravariant));
let invariant = terms_cx.arena.alloc(ConstantTerm(ty::Invariant));
let bivariant = terms_cx.arena.alloc(ConstantTerm(ty::Bivariant));
let mut constraint_cx = ConstraintContext {
terms_cx: terms_cx,
@ -719,7 +719,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
}
_ => {
&*self.terms_cx.arena.alloc(|| TransformTerm(v1, v2))
&*self.terms_cx.arena.alloc(TransformTerm(v1, v2))
}
}
}

View File

@ -2301,8 +2301,8 @@ impl ToSource for syntax::codemap::Span {
fn to_src(&self, cx: &DocContext) -> String {
debug!("converting span {:?} to snippet", self.clean(cx));
let sn = match cx.sess().codemap().span_to_snippet(*self) {
Some(x) => x.to_string(),
None => "".to_string()
Ok(x) => x.to_string(),
Err(_) => "".to_string()
};
debug!("got snippet {}", sn);
sn

View File

@ -136,7 +136,7 @@ impl<
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
let mut bits = 0;
for item in self {
bits |= item.to_uint();
bits |= item.to_usize();
}
s.emit_uint(bits)
}
@ -150,7 +150,7 @@ impl<
let mut set = EnumSet::new();
for bit in 0..uint::BITS {
if bits & (1 << bit) != 0 {
set.insert(CLike::from_uint(1 << bit));
set.insert(CLike::from_usize(1 << bit));
}
}
Ok(set)

View File

@ -21,7 +21,7 @@ fn new_drop(b : &mut Bencher) {
use super::map::HashMap;
b.iter(|| {
let m : HashMap<int, int> = HashMap::new();
let m : HashMap<i32, i32> = HashMap::new();
assert_eq!(m.len(), 0);
})
}

View File

@ -45,9 +45,9 @@ use super::table::BucketState::{
};
use super::state::HashState;
const INITIAL_LOG2_CAP: uint = 5;
const INITIAL_LOG2_CAP: usize = 5;
#[unstable(feature = "std_misc")]
pub const INITIAL_CAPACITY: uint = 1 << INITIAL_LOG2_CAP; // 2^5
pub const INITIAL_CAPACITY: usize = 1 << INITIAL_LOG2_CAP; // 2^5
/// The default behavior of HashMap implements a load factor of 90.9%.
/// This behavior is characterized by the following condition:
@ -62,7 +62,7 @@ impl DefaultResizePolicy {
}
#[inline]
fn min_capacity(&self, usable_size: uint) -> uint {
fn min_capacity(&self, usable_size: usize) -> usize {
// Here, we are rephrasing the logic by specifying the lower limit
// on capacity:
//
@ -72,7 +72,7 @@ impl DefaultResizePolicy {
/// An inverse of `min_capacity`, approximately.
#[inline]
fn usable_capacity(&self, cap: uint) -> uint {
fn usable_capacity(&self, cap: usize) -> usize {
// As the number of entries approaches usable capacity,
// min_capacity(size) must be smaller than the internal capacity,
// so that the map is not resized:
@ -90,7 +90,7 @@ impl DefaultResizePolicy {
fn test_resize_policy() {
use prelude::v1::*;
let rp = DefaultResizePolicy;
for n in 0u..1000 {
for n in 0..1000 {
assert!(rp.min_capacity(rp.usable_capacity(n)) <= n);
assert!(rp.usable_capacity(rp.min_capacity(n)) <= n);
}
@ -287,9 +287,9 @@ fn test_resize_policy() {
/// // Use a HashMap to store the vikings' health points.
/// let mut vikings = HashMap::new();
///
/// vikings.insert(Viking::new("Einar", "Norway"), 25u);
/// vikings.insert(Viking::new("Olaf", "Denmark"), 24u);
/// vikings.insert(Viking::new("Harald", "Iceland"), 12u);
/// vikings.insert(Viking::new("Einar", "Norway"), 25);
/// vikings.insert(Viking::new("Olaf", "Denmark"), 24);
/// vikings.insert(Viking::new("Harald", "Iceland"), 12);
///
/// // Use derived implementation to print the status of the vikings.
/// for (viking, health) in vikings.iter() {
@ -369,7 +369,7 @@ fn pop_internal<K, V>(starting_bucket: FullBucketMut<K, V>) -> (K, V) {
///
/// `hash`, `k`, and `v` are the elements to "robin hood" into the hashtable.
fn robin_hood<'a, K: 'a, V: 'a>(mut bucket: FullBucketMut<'a, K, V>,
mut ib: uint,
mut ib: usize,
mut hash: SafeHash,
mut k: K,
mut v: V)
@ -515,7 +515,7 @@ impl<K: Hash<Hasher> + Eq, V> HashMap<K, V, RandomState> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn with_capacity(capacity: uint) -> HashMap<K, V, RandomState> {
pub fn with_capacity(capacity: usize) -> HashMap<K, V, RandomState> {
HashMap::with_capacity_and_hash_state(capacity, Default::default())
}
}
@ -537,7 +537,7 @@ impl<K, V, S, H> HashMap<K, V, S>
///
/// let s = RandomState::new();
/// let mut map = HashMap::with_hash_state(s);
/// map.insert(1, 2u);
/// map.insert(1, 2);
/// ```
#[inline]
#[unstable(feature = "std_misc", reason = "hasher stuff is unclear")]
@ -565,11 +565,11 @@ impl<K, V, S, H> HashMap<K, V, S>
///
/// let s = RandomState::new();
/// let mut map = HashMap::with_capacity_and_hash_state(10, s);
/// map.insert(1, 2u);
/// map.insert(1, 2);
/// ```
#[inline]
#[unstable(feature = "std_misc", reason = "hasher stuff is unclear")]
pub fn with_capacity_and_hash_state(capacity: uint, hash_state: S)
pub fn with_capacity_and_hash_state(capacity: usize, hash_state: S)
-> HashMap<K, V, S> {
let resize_policy = DefaultResizePolicy::new();
let min_cap = max(INITIAL_CAPACITY, resize_policy.min_capacity(capacity));
@ -593,7 +593,7 @@ impl<K, V, S, H> HashMap<K, V, S>
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn capacity(&self) -> uint {
pub fn capacity(&self) -> usize {
self.resize_policy.usable_capacity(self.table.capacity())
}
@ -603,7 +603,7 @@ impl<K, V, S, H> HashMap<K, V, S>
///
/// # Panics
///
/// Panics if the new allocation size overflows `uint`.
/// Panics if the new allocation size overflows `usize`.
///
/// # Example
///
@ -613,7 +613,7 @@ impl<K, V, S, H> HashMap<K, V, S>
/// map.reserve(10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn reserve(&mut self, additional: uint) {
pub fn reserve(&mut self, additional: usize) {
let new_size = self.len().checked_add(additional).expect("capacity overflow");
let min_cap = self.resize_policy.min_capacity(new_size);
@ -631,7 +631,7 @@ impl<K, V, S, H> HashMap<K, V, S>
/// 1) Make sure the new capacity is enough for all the elements, accounting
/// for the load factor.
/// 2) Ensure new_capacity is a power of two or zero.
fn resize(&mut self, new_capacity: uint) {
fn resize(&mut self, new_capacity: usize) {
assert!(self.table.size() <= new_capacity);
assert!(new_capacity.is_power_of_two() || new_capacity == 0);
@ -793,7 +793,7 @@ impl<K, V, S, H> HashMap<K, V, S>
if (ib as int) < robin_ib {
// Found a luckier bucket than me. Better steal his spot.
return robin_hood(bucket, robin_ib as uint, hash, k, v);
return robin_hood(bucket, robin_ib as usize, hash, k, v);
}
probe = bucket.next();
@ -929,10 +929,8 @@ impl<K, V, S, H> HashMap<K, V, S>
}
/// Gets the given key's corresponding entry in the map for in-place manipulation.
#[unstable(feature = "std_misc",
reason = "precise API still being fleshed out")]
pub fn entry<'a>(&'a mut self, key: K) -> Entry<'a, K, V>
{
#[stable(feature = "rust1", since = "1.0.0")]
pub fn entry(&mut self, key: K) -> Entry<K, V> {
// Gotta resize now.
self.reserve(1);
@ -949,11 +947,11 @@ impl<K, V, S, H> HashMap<K, V, S>
///
/// let mut a = HashMap::new();
/// assert_eq!(a.len(), 0);
/// a.insert(1u, "a");
/// a.insert(1, "a");
/// assert_eq!(a.len(), 1);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len(&self) -> uint { self.table.size() }
pub fn len(&self) -> usize { self.table.size() }
/// Returns true if the map contains no elements.
///
@ -964,7 +962,7 @@ impl<K, V, S, H> HashMap<K, V, S>
///
/// let mut a = HashMap::new();
/// assert!(a.is_empty());
/// a.insert(1u, "a");
/// a.insert(1, "a");
/// assert!(!a.is_empty());
/// ```
#[inline]
@ -980,8 +978,8 @@ impl<K, V, S, H> HashMap<K, V, S>
/// use std::collections::HashMap;
///
/// let mut a = HashMap::new();
/// a.insert(1u, "a");
/// a.insert(2u, "b");
/// a.insert(1, "a");
/// a.insert(2, "b");
///
/// for (k, v) in a.drain().take(1) {
/// assert!(k == 1 || k == 2);
@ -1011,7 +1009,7 @@ impl<K, V, S, H> HashMap<K, V, S>
/// use std::collections::HashMap;
///
/// let mut a = HashMap::new();
/// a.insert(1u, "a");
/// a.insert(1, "a");
/// a.clear();
/// assert!(a.is_empty());
/// ```
@ -1033,7 +1031,7 @@ impl<K, V, S, H> HashMap<K, V, S>
/// use std::collections::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1u, "a");
/// map.insert(1, "a");
/// assert_eq!(map.get(&1), Some(&"a"));
/// assert_eq!(map.get(&2), None);
/// ```
@ -1056,7 +1054,7 @@ impl<K, V, S, H> HashMap<K, V, S>
/// use std::collections::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1u, "a");
/// map.insert(1, "a");
/// assert_eq!(map.contains_key(&1), true);
/// assert_eq!(map.contains_key(&2), false);
/// ```
@ -1079,7 +1077,7 @@ impl<K, V, S, H> HashMap<K, V, S>
/// use std::collections::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1u, "a");
/// map.insert(1, "a");
/// match map.get_mut(&1) {
/// Some(x) => *x = "b",
/// None => (),
@ -1102,7 +1100,7 @@ impl<K, V, S, H> HashMap<K, V, S>
/// use std::collections::HashMap;
///
/// let mut map = HashMap::new();
/// assert_eq!(map.insert(37u, "a"), None);
/// assert_eq!(map.insert(37, "a"), None);
/// assert_eq!(map.is_empty(), false);
///
/// map.insert(37, "b");
@ -1134,7 +1132,7 @@ impl<K, V, S, H> HashMap<K, V, S>
/// use std::collections::HashMap;
///
/// let mut map = HashMap::new();
/// map.insert(1u, "a");
/// map.insert(1, "a");
/// assert_eq!(map.remove(&1), Some("a"));
/// assert_eq!(map.remove(&1), None);
/// ```
@ -1188,7 +1186,7 @@ fn search_entry_hashed<'a, K: Eq, V>(table: &'a mut RawTable<K,V>, hash: SafeHas
return Vacant(VacantEntry {
hash: hash,
key: k,
elem: NeqElem(bucket, robin_ib as uint),
elem: NeqElem(bucket, robin_ib as usize),
});
}
@ -1371,7 +1369,7 @@ pub enum Entry<'a, K: 'a, V: 'a> {
enum VacantEntryState<K, V, M> {
/// The index is occupied, but the key to insert has precedence,
/// and will kick the current one out on insertion.
NeqElem(FullBucket<K, V, M>, uint),
NeqElem(FullBucket<K, V, M>, usize),
/// The index is genuinely vacant.
NoElem(EmptyBucket<K, V, M>),
}
@ -1496,26 +1494,28 @@ impl<'a, K, V> Entry<'a, K, V> {
}
}
#[unstable(feature = "std_misc",
reason = "matches collection reform v2 specification, waiting for dust to settle")]
impl<'a, K, V> OccupiedEntry<'a, K, V> {
/// Gets a reference to the value in the entry.
#[stable(feature = "rust1", since = "1.0.0")]
pub fn get(&self) -> &V {
self.elem.read().1
}
/// Gets a mutable reference to the value in the entry.
#[stable(feature = "rust1", since = "1.0.0")]
pub fn get_mut(&mut self) -> &mut V {
self.elem.read_mut().1
}
/// Converts the OccupiedEntry into a mutable reference to the value in the entry
/// with a lifetime bound to the map itself
#[stable(feature = "rust1", since = "1.0.0")]
pub fn into_mut(self) -> &'a mut V {
self.elem.into_mut_refs().1
}
/// Sets the value of the entry, and returns the entry's old value
#[stable(feature = "rust1", since = "1.0.0")]
pub fn insert(&mut self, mut value: V) -> V {
let old_value = self.get_mut();
mem::swap(&mut value, old_value);
@ -1523,16 +1523,16 @@ impl<'a, K, V> OccupiedEntry<'a, K, V> {
}
/// Takes the value out of the entry, and returns it
#[stable(feature = "rust1", since = "1.0.0")]
pub fn remove(self) -> V {
pop_internal(self.elem).1
}
}
#[unstable(feature = "std_misc",
reason = "matches collection reform v2 specification, waiting for dust to settle")]
impl<'a, K: 'a, V: 'a> VacantEntry<'a, K, V> {
/// Sets the value of the entry with the VacantEntry's key,
/// and returns a mutable reference to it
#[stable(feature = "rust1", since = "1.0.0")]
pub fn insert(self, value: V) -> &'a mut V {
match self.elem {
NeqElem(bucket, ib) => {
@ -1580,7 +1580,6 @@ impl<K, V, S, H> Extend<(K, V)> for HashMap<K, V, S>
/// `Hasher`, but the hashers created by two different `RandomState`
/// instances are unlikely to produce the same result for the same values.
#[derive(Clone)]
#[allow(missing_copy_implementations)]
#[unstable(feature = "std_misc",
reason = "hashing an hash maps may be altered")]
pub struct RandomState {
@ -1623,7 +1622,6 @@ impl Default for RandomState {
/// This is the default hasher used in a `HashMap` to hash keys. Types do not
/// typically declare an ability to explicitly hash into this particular type,
/// but rather in a `H: hash::Writer` type parameter.
#[allow(missing_copy_implementations)]
#[unstable(feature = "std_misc",
reason = "hashing an hash maps may be altered")]
pub struct Hasher { inner: SipHasher }
@ -1674,11 +1672,11 @@ mod test_map {
#[derive(Hash, PartialEq, Eq)]
struct Dropable {
k: uint
k: usize
}
impl Dropable {
fn new(k: uint) -> Dropable {
fn new(k: usize) -> Dropable {
DROP_VECTOR.with(|slot| {
slot.borrow_mut()[k] += 1;
});
@ -1711,24 +1709,24 @@ mod test_map {
let mut m = HashMap::new();
DROP_VECTOR.with(|v| {
for i in 0u..200 {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
for i in 0u..100 {
for i in 0..100 {
let d1 = Dropable::new(i);
let d2 = Dropable::new(i+100);
m.insert(d1, d2);
}
DROP_VECTOR.with(|v| {
for i in 0u..200 {
for i in 0..200 {
assert_eq!(v.borrow()[i], 1);
}
});
for i in 0u..50 {
for i in 0..50 {
let k = Dropable::new(i);
let v = m.remove(&k);
@ -1741,12 +1739,12 @@ mod test_map {
}
DROP_VECTOR.with(|v| {
for i in 0u..50 {
for i in 0..50 {
assert_eq!(v.borrow()[i], 0);
assert_eq!(v.borrow()[i+100], 0);
}
for i in 50u..100 {
for i in 50..100 {
assert_eq!(v.borrow()[i], 1);
assert_eq!(v.borrow()[i+100], 1);
}
@ -1754,7 +1752,7 @@ mod test_map {
}
DROP_VECTOR.with(|v| {
for i in 0u..200 {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
@ -1770,19 +1768,19 @@ mod test_map {
let mut hm = HashMap::new();
DROP_VECTOR.with(|v| {
for i in 0u..200 {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
for i in 0u..100 {
for i in 0..100 {
let d1 = Dropable::new(i);
let d2 = Dropable::new(i+100);
hm.insert(d1, d2);
}
DROP_VECTOR.with(|v| {
for i in 0u..200 {
for i in 0..200 {
assert_eq!(v.borrow()[i], 1);
}
});
@ -1797,7 +1795,7 @@ mod test_map {
let mut half = hm.into_iter().take(50);
DROP_VECTOR.with(|v| {
for i in 0u..200 {
for i in 0..200 {
assert_eq!(v.borrow()[i], 1);
}
});
@ -1805,11 +1803,11 @@ mod test_map {
for _ in half.by_ref() {}
DROP_VECTOR.with(|v| {
let nk = (0u..100).filter(|&i| {
let nk = (0..100).filter(|&i| {
v.borrow()[i] == 1
}).count();
let nv = (0u..100).filter(|&i| {
let nv = (0..100).filter(|&i| {
v.borrow()[i+100] == 1
}).count();
@ -1819,7 +1817,7 @@ mod test_map {
};
DROP_VECTOR.with(|v| {
for i in 0u..200 {
for i in 0..200 {
assert_eq!(v.borrow()[i], 0);
}
});
@ -1964,7 +1962,7 @@ mod test_map {
#[test]
fn test_iterate() {
let mut m = HashMap::with_capacity(4);
for i in 0u..32 {
for i in 0..32 {
assert!(m.insert(i, i*2).is_none());
}
assert_eq!(m.len(), 32);
@ -1981,8 +1979,8 @@ mod test_map {
#[test]
fn test_keys() {
let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
let map = vec.into_iter().collect::<HashMap<int, char>>();
let keys = map.keys().map(|&k| k).collect::<Vec<int>>();
let map: HashMap<_, _> = vec.into_iter().collect();
let keys: Vec<_> = map.keys().cloned().collect();
assert_eq!(keys.len(), 3);
assert!(keys.contains(&1));
assert!(keys.contains(&2));
@ -1992,8 +1990,8 @@ mod test_map {
#[test]
fn test_values() {
let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
let map = vec.into_iter().collect::<HashMap<int, char>>();
let values = map.values().map(|&v| v).collect::<Vec<char>>();
let map: HashMap<_, _> = vec.into_iter().collect();
let values: Vec<_> = map.values().cloned().collect();
assert_eq!(values.len(), 3);
assert!(values.contains(&'a'));
assert!(values.contains(&'b'));
@ -2031,8 +2029,8 @@ mod test_map {
#[test]
fn test_show() {
let mut map: HashMap<int, int> = HashMap::new();
let empty: HashMap<int, int> = HashMap::new();
let mut map = HashMap::new();
let empty: HashMap<i32, i32> = HashMap::new();
map.insert(1, 2);
map.insert(3, 4);
@ -2051,7 +2049,7 @@ mod test_map {
assert_eq!(m.len(), 0);
assert!(m.is_empty());
let mut i = 0u;
let mut i = 0;
let old_cap = m.table.capacity();
while old_cap == m.table.capacity() {
m.insert(i, i);
@ -2079,7 +2077,7 @@ mod test_map {
assert_eq!(cap, initial_cap * 2);
let mut i = 0u;
let mut i = 0;
for _ in 0..cap * 3 / 4 {
m.insert(i, i);
i += 1;
@ -2121,21 +2119,21 @@ mod test_map {
#[test]
fn test_reserve_shrink_to_fit() {
let mut m = HashMap::new();
m.insert(0u, 0u);
m.insert(0, 0);
m.remove(&0);
assert!(m.capacity() >= m.len());
for i in 0us..128 {
for i in 0..128 {
m.insert(i, i);
}
m.reserve(256);
let usable_cap = m.capacity();
for i in 128us..128+256 {
for i in 128..(128 + 256) {
m.insert(i, i);
assert_eq!(m.capacity(), usable_cap);
}
for i in 100us..128+256 {
for i in 100..(128 + 256) {
assert_eq!(m.remove(&i), Some(i));
}
m.shrink_to_fit();
@ -2144,7 +2142,7 @@ mod test_map {
assert!(!m.is_empty());
assert!(m.capacity() >= m.len());
for i in 0us..100 {
for i in 0..100 {
assert_eq!(m.remove(&i), Some(i));
}
m.shrink_to_fit();
@ -2159,7 +2157,7 @@ mod test_map {
fn test_from_iter() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<int, int> = xs.iter().map(|&x| x).collect();
let map: HashMap<_, _> = xs.iter().cloned().collect();
for &(k, v) in &xs {
assert_eq!(map.get(&k), Some(&v));
@ -2170,7 +2168,7 @@ mod test_map {
fn test_size_hint() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<int, int> = xs.iter().map(|&x| x).collect();
let map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter();
@ -2183,7 +2181,7 @@ mod test_map {
fn test_iter_len() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<int, int> = xs.iter().map(|&x| x).collect();
let map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter();
@ -2196,7 +2194,7 @@ mod test_map {
fn test_mut_size_hint() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let mut map: HashMap<int, int> = xs.iter().map(|&x| x).collect();
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter_mut();
@ -2209,7 +2207,7 @@ mod test_map {
fn test_iter_mut_len() {
let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let mut map: HashMap<int, int> = xs.iter().map(|&x| x).collect();
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
let mut iter = map.iter_mut();
@ -2220,7 +2218,7 @@ mod test_map {
#[test]
fn test_index() {
let mut map: HashMap<int, int> = HashMap::new();
let mut map = HashMap::new();
map.insert(1, 2);
map.insert(2, 1);
@ -2232,7 +2230,7 @@ mod test_map {
#[test]
#[should_fail]
fn test_index_nonexistent() {
let mut map: HashMap<int, int> = HashMap::new();
let mut map = HashMap::new();
map.insert(1, 2);
map.insert(2, 1);
@ -2245,7 +2243,7 @@ mod test_map {
fn test_entry(){
let xs = [(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
let mut map: HashMap<int, int> = xs.iter().map(|&x| x).collect();
let mut map: HashMap<_, _> = xs.iter().cloned().collect();
// Existing key (insert)
match map.entry(1) {
@ -2296,7 +2294,7 @@ mod test_map {
#[test]
fn test_entry_take_doesnt_corrupt() {
// Test for #19292
fn check(m: &HashMap<int, ()>) {
fn check(m: &HashMap<isize, ()>) {
for k in m.keys() {
assert!(m.contains_key(k),
"{} is in keys() but not in the map?", k);
@ -2307,12 +2305,12 @@ mod test_map {
let mut rng = weak_rng();
// Populate the map with some items.
for _ in 0u..50 {
for _ in 0..50 {
let x = rng.gen_range(-10, 10);
m.insert(x, ());
}
for i in 0u..1000 {
for i in 0..1000 {
let x = rng.gen_range(-10, 10);
match m.entry(x) {
Vacant(_) => {},

View File

@ -76,15 +76,15 @@ use super::state::HashState;
/// #[derive(Hash, Eq, PartialEq, Debug)]
/// struct Viking<'a> {
/// name: &'a str,
/// power: uint,
/// power: usize,
/// }
///
/// let mut vikings = HashSet::new();
///
/// vikings.insert(Viking { name: "Einar", power: 9u });
/// vikings.insert(Viking { name: "Einar", power: 9u });
/// vikings.insert(Viking { name: "Olaf", power: 4u });
/// vikings.insert(Viking { name: "Harald", power: 8u });
/// vikings.insert(Viking { name: "Einar", power: 9 });
/// vikings.insert(Viking { name: "Einar", power: 9 });
/// vikings.insert(Viking { name: "Olaf", power: 4 });
/// vikings.insert(Viking { name: "Harald", power: 8 });
///
/// // Use derived implementation to print the vikings.
/// for x in vikings.iter() {
@ -123,7 +123,7 @@ impl<T: Hash<Hasher> + Eq> HashSet<T, RandomState> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn with_capacity(capacity: uint) -> HashSet<T, RandomState> {
pub fn with_capacity(capacity: usize) -> HashSet<T, RandomState> {
HashSet { map: HashMap::with_capacity(capacity) }
}
}
@ -146,7 +146,7 @@ impl<T, S, H> HashSet<T, S>
///
/// let s = RandomState::new();
/// let mut set = HashSet::with_hash_state(s);
/// set.insert(2u);
/// set.insert(2);
/// ```
#[inline]
#[unstable(feature = "std_misc", reason = "hasher stuff is unclear")]
@ -169,12 +169,12 @@ impl<T, S, H> HashSet<T, S>
/// use std::collections::hash_map::RandomState;
///
/// let s = RandomState::new();
/// let mut set = HashSet::with_capacity_and_hash_state(10u, s);
/// let mut set = HashSet::with_capacity_and_hash_state(10, s);
/// set.insert(1);
/// ```
#[inline]
#[unstable(feature = "std_misc", reason = "hasher stuff is unclear")]
pub fn with_capacity_and_hash_state(capacity: uint, hash_state: S)
pub fn with_capacity_and_hash_state(capacity: usize, hash_state: S)
-> HashSet<T, S> {
HashSet {
map: HashMap::with_capacity_and_hash_state(capacity, hash_state),
@ -192,7 +192,7 @@ impl<T, S, H> HashSet<T, S>
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn capacity(&self) -> uint {
pub fn capacity(&self) -> usize {
self.map.capacity()
}
@ -202,7 +202,7 @@ impl<T, S, H> HashSet<T, S>
///
/// # Panics
///
/// Panics if the new allocation size overflows `uint`.
/// Panics if the new allocation size overflows `usize`.
///
/// # Example
///
@ -212,7 +212,7 @@ impl<T, S, H> HashSet<T, S>
/// set.reserve(10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn reserve(&mut self, additional: uint) {
pub fn reserve(&mut self, additional: usize) {
self.map.reserve(additional)
}
@ -398,11 +398,11 @@ impl<T, S, H> HashSet<T, S>
///
/// let mut v = HashSet::new();
/// assert_eq!(v.len(), 0);
/// v.insert(1u);
/// v.insert(1);
/// assert_eq!(v.len(), 1);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len(&self) -> uint { self.map.len() }
pub fn len(&self) -> usize { self.map.len() }
/// Returns true if the set contains no elements
///
@ -413,7 +413,7 @@ impl<T, S, H> HashSet<T, S>
///
/// let mut v = HashSet::new();
/// assert!(v.is_empty());
/// v.insert(1u);
/// v.insert(1);
/// assert!(!v.is_empty());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
@ -438,7 +438,7 @@ impl<T, S, H> HashSet<T, S>
/// use std::collections::HashSet;
///
/// let mut v = HashSet::new();
/// v.insert(1u);
/// v.insert(1);
/// v.clear();
/// assert!(v.is_empty());
/// ```
@ -456,7 +456,7 @@ impl<T, S, H> HashSet<T, S>
/// ```
/// use std::collections::HashSet;
///
/// let set: HashSet<uint> = [1, 2, 3].iter().map(|&x| x).collect();
/// let set: HashSet<_> = [1, 2, 3].iter().cloned().collect();
/// assert_eq!(set.contains(&1), true);
/// assert_eq!(set.contains(&4), false);
/// ```
@ -475,8 +475,8 @@ impl<T, S, H> HashSet<T, S>
/// ```
/// use std::collections::HashSet;
///
/// let a: HashSet<uint> = [1, 2, 3].iter().map(|&x| x).collect();
/// let mut b: HashSet<uint> = HashSet::new();
/// let a: HashSet<_> = [1, 2, 3].iter().cloned().collect();
/// let mut b = HashSet::new();
///
/// assert_eq!(a.is_disjoint(&b), true);
/// b.insert(4);
@ -496,8 +496,8 @@ impl<T, S, H> HashSet<T, S>
/// ```
/// use std::collections::HashSet;
///
/// let sup: HashSet<uint> = [1, 2, 3].iter().map(|&x| x).collect();
/// let mut set: HashSet<uint> = HashSet::new();
/// let sup: HashSet<_> = [1, 2, 3].iter().cloned().collect();
/// let mut set = HashSet::new();
///
/// assert_eq!(set.is_subset(&sup), true);
/// set.insert(2);
@ -517,8 +517,8 @@ impl<T, S, H> HashSet<T, S>
/// ```
/// use std::collections::HashSet;
///
/// let sub: HashSet<uint> = [1, 2].iter().map(|&x| x).collect();
/// let mut set: HashSet<uint> = HashSet::new();
/// let sub: HashSet<_> = [1, 2].iter().cloned().collect();
/// let mut set = HashSet::new();
///
/// assert_eq!(set.is_superset(&sub), false);
///
@ -545,7 +545,7 @@ impl<T, S, H> HashSet<T, S>
///
/// let mut set = HashSet::new();
///
/// assert_eq!(set.insert(2u), true);
/// assert_eq!(set.insert(2), true);
/// assert_eq!(set.insert(2), false);
/// assert_eq!(set.len(), 1);
/// ```
@ -566,7 +566,7 @@ impl<T, S, H> HashSet<T, S>
///
/// let mut set = HashSet::new();
///
/// set.insert(2u);
/// set.insert(2);
/// assert_eq!(set.remove(&2), true);
/// assert_eq!(set.remove(&2), false);
/// ```
@ -670,10 +670,10 @@ impl<'a, 'b, T, S, H> BitOr<&'b HashSet<T, S>> for &'a HashSet<T, S>
/// ```
/// use std::collections::HashSet;
///
/// let a: HashSet<int> = vec![1, 2, 3].into_iter().collect();
/// let b: HashSet<int> = vec![3, 4, 5].into_iter().collect();
/// let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
/// let b: HashSet<_> = vec![3, 4, 5].into_iter().collect();
///
/// let set: HashSet<int> = &a | &b;
/// let set = &a | &b;
///
/// let mut i = 0;
/// let expected = [1, 2, 3, 4, 5];
@ -703,10 +703,10 @@ impl<'a, 'b, T, S, H> BitAnd<&'b HashSet<T, S>> for &'a HashSet<T, S>
/// ```
/// use std::collections::HashSet;
///
/// let a: HashSet<int> = vec![1, 2, 3].into_iter().collect();
/// let b: HashSet<int> = vec![2, 3, 4].into_iter().collect();
/// let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
/// let b: HashSet<_> = vec![2, 3, 4].into_iter().collect();
///
/// let set: HashSet<int> = &a & &b;
/// let set = &a & &b;
///
/// let mut i = 0;
/// let expected = [2, 3];
@ -736,10 +736,10 @@ impl<'a, 'b, T, S, H> BitXor<&'b HashSet<T, S>> for &'a HashSet<T, S>
/// ```
/// use std::collections::HashSet;
///
/// let a: HashSet<int> = vec![1, 2, 3].into_iter().collect();
/// let b: HashSet<int> = vec![3, 4, 5].into_iter().collect();
/// let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
/// let b: HashSet<_> = vec![3, 4, 5].into_iter().collect();
///
/// let set: HashSet<int> = &a ^ &b;
/// let set = &a ^ &b;
///
/// let mut i = 0;
/// let expected = [1, 2, 4, 5];
@ -769,10 +769,10 @@ impl<'a, 'b, T, S, H> Sub<&'b HashSet<T, S>> for &'a HashSet<T, S>
/// ```
/// use std::collections::HashSet;
///
/// let a: HashSet<int> = vec![1, 2, 3].into_iter().collect();
/// let b: HashSet<int> = vec![3, 4, 5].into_iter().collect();
/// let a: HashSet<_> = vec![1, 2, 3].into_iter().collect();
/// let b: HashSet<_> = vec![3, 4, 5].into_iter().collect();
///
/// let set: HashSet<int> = &a - &b;
/// let set = &a - &b;
///
/// let mut i = 0;
/// let expected = [1, 2];
@ -1029,7 +1029,7 @@ mod test_set {
#[test]
fn test_iterate() {
let mut a = HashSet::new();
for i in 0u..32 {
for i in 0..32 {
assert!(a.insert(i));
}
let mut observed: u32 = 0;
@ -1152,7 +1152,7 @@ mod test_set {
fn test_from_iter() {
let xs = [1, 2, 3, 4, 5, 6, 7, 8, 9];
let set: HashSet<int> = xs.iter().map(|&x| x).collect();
let set: HashSet<_> = xs.iter().cloned().collect();
for x in &xs {
assert!(set.contains(x));
@ -1198,8 +1198,8 @@ mod test_set {
#[test]
fn test_show() {
let mut set: HashSet<int> = HashSet::new();
let empty: HashSet<int> = HashSet::new();
let mut set = HashSet::new();
let empty = HashSet::<i32>::new();
set.insert(1);
set.insert(2);
@ -1212,19 +1212,19 @@ mod test_set {
#[test]
fn test_trivial_drain() {
let mut s = HashSet::<int>::new();
let mut s = HashSet::<i32>::new();
for _ in s.drain() {}
assert!(s.is_empty());
drop(s);
let mut s = HashSet::<int>::new();
let mut s = HashSet::<i32>::new();
drop(s.drain());
assert!(s.is_empty());
}
#[test]
fn test_drain() {
let mut s: HashSet<i32> = (1..100).collect();
let mut s: HashSet<_> = (1..100).collect();
// try this a bunch of times to make sure we don't screw up internal state.
for _ in 0..20 {

View File

@ -67,8 +67,8 @@ const EMPTY_BUCKET: u64 = 0u64;
/// but in general is just a tricked out `Vec<Option<u64, K, V>>`.
#[unsafe_no_drop_flag]
pub struct RawTable<K, V> {
capacity: uint,
size: uint,
capacity: usize,
size: usize,
hashes: *mut u64,
// Because K/V do not appear directly in any of the types in the struct,
// inform rustc that in fact instances of K and V are reachable from here.
@ -88,7 +88,7 @@ impl<K,V> Copy for RawBucket<K,V> {}
pub struct Bucket<K, V, M> {
raw: RawBucket<K, V>,
idx: uint,
idx: usize,
table: M
}
@ -96,13 +96,13 @@ impl<K,V,M:Copy> Copy for Bucket<K,V,M> {}
pub struct EmptyBucket<K, V, M> {
raw: RawBucket<K, V>,
idx: uint,
idx: usize,
table: M
}
pub struct FullBucket<K, V, M> {
raw: RawBucket<K, V>,
idx: uint,
idx: usize,
table: M
}
@ -190,7 +190,7 @@ impl<K, V, M> FullBucket<K, V, M> {
self.table
}
/// Get the raw index.
pub fn index(&self) -> uint {
pub fn index(&self) -> usize {
self.idx
}
}
@ -212,21 +212,21 @@ impl<K, V, M> Bucket<K, V, M> {
self.table
}
/// Get the raw index.
pub fn index(&self) -> uint {
pub fn index(&self) -> usize {
self.idx
}
}
impl<K, V, M: Deref<Target=RawTable<K, V>>> Bucket<K, V, M> {
pub fn new(table: M, hash: SafeHash) -> Bucket<K, V, M> {
Bucket::at_index(table, hash.inspect() as uint)
Bucket::at_index(table, hash.inspect() as usize)
}
pub fn at_index(table: M, ib_index: uint) -> Bucket<K, V, M> {
pub fn at_index(table: M, ib_index: usize) -> Bucket<K, V, M> {
let ib_index = ib_index & (table.capacity() - 1);
Bucket {
raw: unsafe {
table.first_bucket_raw().offset(ib_index as int)
table.first_bucket_raw().offset(ib_index as isize)
},
idx: ib_index,
table: table
@ -276,7 +276,7 @@ impl<K, V, M: Deref<Target=RawTable<K, V>>> Bucket<K, V, M> {
// ... and it's zero at all other times.
let maybe_wraparound_dist = (self.idx ^ (self.idx + 1)) & self.table.capacity();
// Finally, we obtain the offset 1 or the offset -cap + 1.
let dist = 1 - (maybe_wraparound_dist as int);
let dist = 1 - (maybe_wraparound_dist as isize);
self.idx += 1;
@ -366,11 +366,11 @@ impl<K, V, M: Deref<Target=RawTable<K, V>>> FullBucket<K, V, M> {
///
/// In the cited blog posts above, this is called the "distance to
/// initial bucket", or DIB. Also known as "probe count".
pub fn distance(&self) -> uint {
pub fn distance(&self) -> usize {
// Calculates the distance one has to travel when going from
// `hash mod capacity` onwards to `idx mod capacity`, wrapping around
// if the destination is not reached before the end of the table.
(self.idx - self.hash().inspect() as uint) & (self.table.capacity() - 1)
(self.idx - self.hash().inspect() as usize) & (self.table.capacity() - 1)
}
#[inline]
@ -503,7 +503,7 @@ impl<K, V, M: Deref<Target=RawTable<K, V>>> GapThenFull<K, V, M> {
/// # Panics
///
/// Panics if `target_alignment` is not a power of two.
fn round_up_to_next(unrounded: uint, target_alignment: uint) -> uint {
fn round_up_to_next(unrounded: usize, target_alignment: usize) -> usize {
assert!(target_alignment.is_power_of_two());
(unrounded + target_alignment - 1) & !(target_alignment - 1)
}
@ -520,10 +520,10 @@ fn test_rounding() {
// Returns a tuple of (key_offset, val_offset),
// from the start of a mallocated array.
fn calculate_offsets(hashes_size: uint,
keys_size: uint, keys_align: uint,
vals_align: uint)
-> (uint, uint) {
fn calculate_offsets(hashes_size: usize,
keys_size: usize, keys_align: usize,
vals_align: usize)
-> (usize, usize) {
let keys_offset = round_up_to_next(hashes_size, keys_align);
let end_of_keys = keys_offset + keys_size;
@ -534,10 +534,10 @@ fn calculate_offsets(hashes_size: uint,
// Returns a tuple of (minimum required malloc alignment, hash_offset,
// array_size), from the start of a mallocated array.
fn calculate_allocation(hash_size: uint, hash_align: uint,
keys_size: uint, keys_align: uint,
vals_size: uint, vals_align: uint)
-> (uint, uint, uint) {
fn calculate_allocation(hash_size: usize, hash_align: usize,
keys_size: usize, keys_align: usize,
vals_size: usize, vals_align: usize)
-> (usize, usize, usize) {
let hash_offset = 0;
let (_, vals_offset) = calculate_offsets(hash_size,
keys_size, keys_align,
@ -562,7 +562,7 @@ fn test_offset_calculation() {
impl<K, V> RawTable<K, V> {
/// Does not initialize the buckets. The caller should ensure they,
/// at the very least, set every hash to EMPTY_BUCKET.
unsafe fn new_uninitialized(capacity: uint) -> RawTable<K, V> {
unsafe fn new_uninitialized(capacity: usize) -> RawTable<K, V> {
if capacity == 0 {
return RawTable {
size: 0,
@ -601,7 +601,7 @@ impl<K, V> RawTable<K, V> {
let buffer = allocate(size, malloc_alignment);
if buffer.is_null() { ::alloc::oom() }
let hashes = buffer.offset(hash_offset as int) as *mut u64;
let hashes = buffer.offset(hash_offset as isize) as *mut u64;
RawTable {
capacity: capacity,
@ -623,15 +623,15 @@ impl<K, V> RawTable<K, V> {
unsafe {
RawBucket {
hash: self.hashes,
key: buffer.offset(keys_offset as int) as *mut K,
val: buffer.offset(vals_offset as int) as *mut V
key: buffer.offset(keys_offset as isize) as *mut K,
val: buffer.offset(vals_offset as isize) as *mut V
}
}
}
/// Creates a new raw table from a given capacity. All buckets are
/// initially empty.
pub fn new(capacity: uint) -> RawTable<K, V> {
pub fn new(capacity: usize) -> RawTable<K, V> {
unsafe {
let ret = RawTable::new_uninitialized(capacity);
zero_memory(ret.hashes, capacity);
@ -640,13 +640,13 @@ impl<K, V> RawTable<K, V> {
}
/// The hashtable's capacity, similar to a vector's.
pub fn capacity(&self) -> uint {
pub fn capacity(&self) -> usize {
self.capacity
}
/// The number of elements ever `put` in the hashtable, minus the number
/// of elements ever `take`n.
pub fn size(&self) -> uint {
pub fn size(&self) -> usize {
self.size
}
@ -654,7 +654,7 @@ impl<K, V> RawTable<K, V> {
RawBuckets {
raw: self.first_bucket_raw(),
hashes_end: unsafe {
self.hashes.offset(self.capacity as int)
self.hashes.offset(self.capacity as isize)
},
marker: marker::ContravariantLifetime,
}
@ -705,7 +705,7 @@ impl<K, V> RawTable<K, V> {
unsafe fn rev_move_buckets(&mut self) -> RevMoveBuckets<K, V> {
let raw_bucket = self.first_bucket_raw();
RevMoveBuckets {
raw: raw_bucket.offset(self.capacity as int),
raw: raw_bucket.offset(self.capacity as isize),
hashes_end: raw_bucket.hash,
elems_left: self.size,
marker: marker::ContravariantLifetime,
@ -758,7 +758,7 @@ impl<'a, K, V> Iterator for RawBuckets<'a, K, V> {
struct RevMoveBuckets<'a, K, V> {
raw: RawBucket<K, V>,
hashes_end: *mut u64,
elems_left: uint,
elems_left: usize,
marker: marker::ContravariantLifetime<'a>,
}
@ -791,7 +791,7 @@ impl<'a, K, V> Iterator for RevMoveBuckets<'a, K, V> {
/// Iterator over shared references to entries in a table.
pub struct Iter<'a, K: 'a, V: 'a> {
iter: RawBuckets<'a, K, V>,
elems_left: uint,
elems_left: usize,
}
// FIXME(#19839) Remove in favor of `#[derive(Clone)]`
@ -808,7 +808,7 @@ impl<'a, K, V> Clone for Iter<'a, K, V> {
/// Iterator over mutable references to entries in a table.
pub struct IterMut<'a, K: 'a, V: 'a> {
iter: RawBuckets<'a, K, V>,
elems_left: uint,
elems_left: usize,
}
/// Iterator over the entries in a table, consuming the table.

View File

@ -209,7 +209,7 @@
//! all the contents of the collection.
//!
//! ```
//! let vec = vec![1u, 2, 3, 4];
//! let vec = vec![1, 2, 3, 4];
//! for x in vec.iter() {
//! println!("vec contained {}", x);
//! }
@ -219,7 +219,7 @@
//! This is great for mutating all the contents of the collection.
//!
//! ```
//! let mut vec = vec![1u, 2, 3, 4];
//! let mut vec = vec![1, 2, 3, 4];
//! for x in vec.iter_mut() {
//! *x += 1;
//! }
@ -234,15 +234,15 @@
//! previous section to do this as efficiently as possible.
//!
//! ```
//! let mut vec1 = vec![1u, 2, 3, 4];
//! let vec2 = vec![10u, 20, 30, 40];
//! let mut vec1 = vec![1, 2, 3, 4];
//! let vec2 = vec![10, 20, 30, 40];
//! vec1.extend(vec2.into_iter());
//! ```
//!
//! ```
//! use std::collections::RingBuf;
//!
//! let vec = vec![1u, 2, 3, 4];
//! let vec = vec![1, 2, 3, 4];
//! let buf: RingBuf<uint> = vec.into_iter().collect();
//! ```
//!
@ -253,7 +253,7 @@
//! iterators as the way to iterate over them in reverse order.
//!
//! ```
//! let vec = vec![1u, 2, 3, 4];
//! let vec = vec![1, 2, 3, 4];
//! for x in vec.iter().rev() {
//! println!("vec contained {}", x);
//! }
@ -299,21 +299,21 @@
//! #### Counting the number of times each character in a string occurs
//!
//! ```
//! use std::collections::btree_map::{BTreeMap, Occupied, Vacant};
//! use std::collections::btree_map::{BTreeMap, Entry};
//!
//! let mut count = BTreeMap::new();
//! let message = "she sells sea shells by the sea shore";
//!
//! for c in message.chars() {
//! match count.entry(c) {
//! Vacant(entry) => { entry.insert(1u); },
//! Occupied(mut entry) => *entry.get_mut() += 1,
//! Entry::Vacant(entry) => { entry.insert(1); },
//! Entry::Occupied(mut entry) => *entry.get_mut() += 1,
//! }
//! }
//!
//! assert_eq!(count.get(&'s'), Some(&8));
//!
//! println!("Number of occurences of each character");
//! println!("Number of occurrences of each character");
//! for (char, count) in count.iter() {
//! println!("{}: {}", char, count);
//! }
@ -326,7 +326,7 @@
//! #### Tracking the inebriation of customers at a bar
//!
//! ```
//! use std::collections::btree_map::{BTreeMap, Occupied, Vacant};
//! use std::collections::btree_map::{BTreeMap, Entry};
//!
//! // A client of the bar. They have an id and a blood alcohol level.
//! struct Person { id: u32, blood_alcohol: f32 };
@ -341,8 +341,8 @@
//! // If this is the first time we've seen this customer, initialize them
//! // with no blood alcohol. Otherwise, just retrieve them.
//! let person = match blood_alcohol.entry(id) {
//! Vacant(entry) => entry.insert(Person{id: id, blood_alcohol: 0.0}),
//! Occupied(entry) => entry.into_mut(),
//! Entry::Vacant(entry) => entry.insert(Person{id: id, blood_alcohol: 0.0}),
//! Entry::Occupied(entry) => entry.into_mut(),
//! };
//!
//! // Reduce their blood alcohol level. It takes time to order and drink a beer!

View File

@ -22,7 +22,6 @@ use mem;
use env;
use str;
#[allow(missing_copy_implementations)]
pub struct DynamicLibrary {
handle: *mut u8
}

View File

@ -337,7 +337,7 @@ pub fn temp_dir() -> Path {
///
/// # Errors
///
/// Acquring the path to the current executable is a platform-specific operation
/// Acquiring the path to the current executable is a platform-specific operation
/// that can fail for a good number of reasons. Some errors can include, but not
/// be limited to filesystem operations failing or general syscall failures.
///
@ -562,6 +562,38 @@ pub mod consts {
pub const EXE_EXTENSION: &'static str = "";
}
/// Constants associated with the current target
#[cfg(target_os = "openbsd")]
pub mod consts {
pub use super::arch_consts::ARCH;
pub const FAMILY: &'static str = "unix";
/// A string describing the specific operating system in use: in this
/// case, `dragonfly`.
pub const OS: &'static str = "openbsd";
/// Specifies the filename prefix used for shared libraries on this
/// platform: in this case, `lib`.
pub const DLL_PREFIX: &'static str = "lib";
/// Specifies the filename suffix used for shared libraries on this
/// platform: in this case, `.so`.
pub const DLL_SUFFIX: &'static str = ".so";
/// Specifies the file extension used for shared libraries on this
/// platform that goes after the dot: in this case, `so`.
pub const DLL_EXTENSION: &'static str = "so";
/// Specifies the filename suffix used for executable binaries on this
/// platform: in this case, the empty string.
pub const EXE_SUFFIX: &'static str = "";
/// Specifies the file extension, if any, used for executable binaries
/// on this platform: in this case, the empty string.
pub const EXE_EXTENSION: &'static str = "";
}
/// Constants associated with the current target
#[cfg(target_os = "android")]
pub mod consts {

View File

@ -96,7 +96,7 @@ fn with_end_to_cap<F>(v: &mut Vec<u8>, f: F) -> Result<usize>
//
// To this end, we use an RAII guard (to protect against panics) which updates
// the length of the string when it is dropped. This guard initially truncates
// the string to the prior length and only afer we've validated that the
// the string to the prior length and only after we've validated that the
// new contents are valid UTF-8 do we allow it to set a longer length.
//
// The unsafety in this function is twofold:
@ -663,7 +663,7 @@ impl<T> Take<T> {
///
/// # Note
///
/// This instance may reach EOF after reading fewer bytes than indiccated by
/// This instance may reach EOF after reading fewer bytes than indicated by
/// this method if the underlying `Read` instance reaches EOF.
pub fn limit(&self) -> u64 { self.limit }
}

View File

@ -759,7 +759,6 @@ pub fn page_size() -> uint {
///
/// The memory map is released (unmapped) when the destructor is run, so don't
/// let it leave scope by accident if you want it to stick around.
#[allow(missing_copy_implementations)]
pub struct MemoryMap {
data: *mut u8,
len: uint,
@ -1289,6 +1288,8 @@ pub mod consts {
}
#[cfg(target_os = "openbsd")]
#[deprecated(since = "1.0.0", reason = "renamed to env::consts")]
#[unstable(feature = "os")]
pub mod consts {
pub use os::arch_consts::ARCH;

View File

@ -922,7 +922,7 @@ impl PathBuf {
///
/// If `self.file_name()` is `None`, does nothing and returns `false`.
///
/// Otherwise, returns `tru`; if `self.exension()` is `None`, the extension
/// Otherwise, returns `true`; if `self.extension()` is `None`, the extension
/// is added; otherwise it is replaced.
pub fn set_extension<S: ?Sized + AsOsStr>(&mut self, extension: &S) -> bool {
if self.file_name().is_none() { return false; }
@ -1062,7 +1062,7 @@ impl Path {
PathBuf::new(self)
}
/// A path is *absolute* if it is indepedent of the current directory.
/// A path is *absolute* if it is independent of the current directory.
///
/// * On Unix, a path is absolute if it starts with the root, so
/// `is_absolute` and `has_root` are equivalent.

View File

@ -206,7 +206,6 @@ mod imp {
/// - iOS: calls SecRandomCopyBytes as /dev/(u)random is sandboxed.
///
/// This does not block.
#[allow(missing_copy_implementations)]
pub struct OsRng {
// dummy field to ensure that this struct cannot be constructed outside of this module
_dummy: (),

View File

@ -98,8 +98,8 @@ thread_local! { static PANICKING: Cell<bool> = Cell::new(false) }
/// Invoke a closure, capturing the cause of panic if one occurs.
///
/// This function will return `None` if the closure did not panic, and will
/// return `Some(cause)` if the closure panics. The `cause` returned is the
/// This function will return `Ok(())` if the closure did not panic, and will
/// return `Err(cause)` if the closure panics. The `cause` returned is the
/// object with which panic was originally invoked.
///
/// This function also is unsafe for a variety of reasons:
@ -390,13 +390,10 @@ pub mod eabi {
use libc::{c_void, c_int};
#[repr(C)]
#[allow(missing_copy_implementations)]
pub struct EXCEPTION_RECORD;
#[repr(C)]
#[allow(missing_copy_implementations)]
pub struct CONTEXT;
#[repr(C)]
#[allow(missing_copy_implementations)]
pub struct DISPATCHER_CONTEXT;
#[repr(C)]

View File

@ -88,7 +88,6 @@ pub fn default_sched_threads() -> uint {
pub const ENFORCE_SANITY: bool = true || !cfg!(rtopt) || cfg!(rtdebug) ||
cfg!(rtassert);
#[allow(missing_copy_implementations)]
pub struct Stdio(libc::c_int);
#[allow(non_upper_case_globals)]

View File

@ -46,7 +46,6 @@ struct BarrierState {
///
/// Currently this opaque structure only has one method, `.is_leader()`. Only
/// one thread will receive a result that will return `true` from this function.
#[allow(missing_copy_implementations)]
pub struct BarrierWaitResult(bool);
impl Barrier {

View File

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Multi-producer, single-consumer communication primitives threads
//! Multi-producer, single-consumer FIFO queue communication primitives.
//!
//! This module provides message-based communication over channels, concretely
//! defined among three types:

View File

@ -45,7 +45,7 @@ use core::mem;
use sync::atomic::{AtomicUsize, Ordering};
// Various states you can find a port in.
const EMPTY: uint = 0; // initial state: no data, no blocked reciever
const EMPTY: uint = 0; // initial state: no data, no blocked receiver
const DATA: uint = 1; // data ready for receiver to take
const DISCONNECTED: uint = 2; // channel is disconnected OR upgraded
// Any other value represents a pointer to a SignalToken value. The

View File

@ -42,7 +42,6 @@ impl Flag {
}
}
#[allow(missing_copy_implementations)]
pub struct Guard {
panicking: bool,
}

View File

@ -84,7 +84,7 @@ impl CodePoint {
/// Create a new `CodePoint` from a `char`.
///
/// Since all Unicode scalar values are code points, this always succeds.
/// Since all Unicode scalar values are code points, this always succeeds.
#[inline]
pub fn from_char(value: char) -> CodePoint {
CodePoint { value: value as u32 }

View File

@ -74,6 +74,8 @@ pub const _SC_GETPW_R_SIZE_MAX: libc::c_int = 70;
#[cfg(any(target_os = "macos",
target_os = "freebsd"))]
pub const _SC_GETPW_R_SIZE_MAX: libc::c_int = 71;
#[cfg(target_os = "openbsd")]
pub const _SC_GETPW_R_SIZE_MAX: libc::c_int = 101;
#[cfg(target_os = "android")]
pub const _SC_GETPW_R_SIZE_MAX: libc::c_int = 0x0048;
@ -91,7 +93,8 @@ pub struct passwd {
#[repr(C)]
#[cfg(any(target_os = "macos",
target_os = "freebsd"))]
target_os = "freebsd",
target_os = "openbsd"))]
pub struct passwd {
pub pw_name: *mut libc::c_char,
pub pw_passwd: *mut libc::c_char,

View File

@ -47,13 +47,9 @@ pub fn errno() -> i32 {
}
#[cfg(target_os = "openbsd")]
fn errno_location() -> *const c_int {
extern {
fn __errno() -> *const c_int;
}
unsafe {
__errno()
}
unsafe fn errno_location() -> *const c_int {
extern { fn __errno() -> *const c_int; }
__errno()
}
#[cfg(any(target_os = "linux", target_os = "android"))]
@ -197,23 +193,23 @@ pub fn current_exe() -> IoResult<Path> {
}
#[cfg(target_os = "openbsd")]
pub fn load_self() -> Option<Vec<u8>> {
pub fn current_exe() -> IoResult<Path> {
use sync::{StaticMutex, MUTEX_INIT};
static LOCK: StaticMutex = MUTEX_INIT;
extern {
fn rust_load_self() -> *const c_char;
fn rust_current_exe() -> *const c_char;
}
let _guard = LOCK.lock();
unsafe {
let v = rust_load_self();
let v = rust_current_exe();
if v.is_null() {
None
Err(IoError::last_error())
} else {
Some(ffi::c_str_to_bytes(&v).to_vec())
Ok(Path::new(ffi::c_str_to_bytes(&v).to_vec()))
}
}
}
@ -333,7 +329,8 @@ pub fn args() -> Args {
#[cfg(any(target_os = "linux",
target_os = "android",
target_os = "freebsd",
target_os = "dragonfly"))]
target_os = "dragonfly",
target_os = "openbsd"))]
pub fn args() -> Args {
use rt;
let bytes = rt::args::clone().unwrap_or(Vec::new());

View File

@ -191,7 +191,7 @@ unsafe fn unregister_dtor(key: Key) -> bool {
// # What's up with this callback?
//
// The callback specified receives a number of parameters from... someone!
// (the kernel? the runtime? I'm not qute sure!) There are a few events that
// (the kernel? the runtime? I'm not quite sure!) There are a few events that
// this gets invoked for, but we're currently only interested on when a
// thread or a process "detaches" (exits). The process part happens for the
// last thread and the thread part happens for any normal thread.

View File

@ -232,7 +232,7 @@ impl Duration {
secs_part.checked_add(nanos_part as i64)
}
/// Add two durations, returning `None` if overflow occured.
/// Add two durations, returning `None` if overflow occurred.
#[unstable(feature = "std_misc")]
pub fn checked_add(&self, rhs: &Duration) -> Option<Duration> {
let mut secs = try_opt!(self.secs.checked_add(rhs.secs));
@ -247,7 +247,7 @@ impl Duration {
if d < MIN || d > MAX { None } else { Some(d) }
}
/// Subtract two durations, returning `None` if overflow occured.
/// Subtract two durations, returning `None` if overflow occurred.
#[unstable(feature = "std_misc")]
pub fn checked_sub(&self, rhs: &Duration) -> Option<Duration> {
let mut secs = try_opt!(self.secs.checked_sub(rhs.secs));

View File

@ -437,18 +437,35 @@ impl CodeMap {
FileLines {file: lo.file, lines: lines}
}
pub fn span_to_snippet(&self, sp: Span) -> Option<String> {
pub fn span_to_snippet(&self, sp: Span) -> Result<String, SpanSnippetError> {
if sp.lo > sp.hi {
return Err(SpanSnippetError::IllFormedSpan(sp));
}
let begin = self.lookup_byte_offset(sp.lo);
let end = self.lookup_byte_offset(sp.hi);
// FIXME #8256: this used to be an assert but whatever precondition
// it's testing isn't true for all spans in the AST, so to allow the
// caller to not have to panic (and it can't catch it since the CodeMap
// isn't sendable), return None
if begin.fm.start_pos != end.fm.start_pos {
None
return Err(SpanSnippetError::DistinctSources(DistinctSources {
begin: (begin.fm.name.clone(),
begin.fm.start_pos),
end: (end.fm.name.clone(),
end.fm.start_pos)
}));
} else {
Some((&begin.fm.src[begin.pos.to_usize()..end.pos.to_usize()]).to_string())
let start = begin.pos.to_usize();
let limit = end.pos.to_usize();
if start > limit || limit > begin.fm.src.len() {
return Err(SpanSnippetError::MalformedForCodemap(
MalformedCodemapPositions {
name: begin.fm.name.clone(),
source_len: begin.fm.src.len(),
begin_pos: begin.pos,
end_pos: end.pos,
}));
}
return Ok((&begin.fm.src[start..limit]).to_string())
}
}
@ -622,6 +639,27 @@ impl CodeMap {
}
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum SpanSnippetError {
IllFormedSpan(Span),
DistinctSources(DistinctSources),
MalformedForCodemap(MalformedCodemapPositions),
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct DistinctSources {
begin: (String, BytePos),
end: (String, BytePos)
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct MalformedCodemapPositions {
name: String,
source_len: usize,
begin_pos: BytePos,
end_pos: BytePos
}
#[cfg(test)]
mod test {
use super::*;
@ -773,7 +811,7 @@ mod test {
let span = Span {lo: BytePos(12), hi: BytePos(23), expn_id: NO_EXPANSION};
let snippet = cm.span_to_snippet(span);
assert_eq!(snippet, Some("second line".to_string()));
assert_eq!(snippet, Ok("second line".to_string()));
}
#[test]

View File

@ -109,7 +109,7 @@ static KNOWN_FEATURES: &'static [(&'static str, &'static str, Status)] = &[
// int and uint are now deprecated
("int_uint", "1.0.0", Active),
// macro reexport needs more discusion and stabilization
// macro reexport needs more discussion and stabilization
("macro_reexport", "1.0.0", Active),
// These are used to test this portion of the compiler, they don't actually
@ -149,7 +149,10 @@ pub struct Features {
pub old_orphan_check: bool,
pub simd_ffi: bool,
pub unmarked_api: bool,
pub lib_features: Vec<(InternedString, Span)>
/// spans of #![feature] attrs for stable language features. for error reporting
pub declared_stable_lang_features: Vec<Span>,
/// #![feature] attrs for non-language (library) features
pub declared_lib_features: Vec<(InternedString, Span)>
}
impl Features {
@ -162,7 +165,8 @@ impl Features {
old_orphan_check: false,
simd_ffi: false,
unmarked_api: false,
lib_features: Vec::new()
declared_stable_lang_features: Vec::new(),
declared_lib_features: Vec::new()
}
}
}
@ -511,6 +515,7 @@ fn check_crate_inner<F>(cm: &CodeMap, span_handler: &SpanHandler, krate: &ast::C
cm: cm,
};
let mut accepted_features = Vec::new();
let mut unknown_features = Vec::new();
for attr in &krate.attrs {
@ -550,8 +555,7 @@ fn check_crate_inner<F>(cm: &CodeMap, span_handler: &SpanHandler, krate: &ast::C
span_handler.span_err(mi.span, "feature has been removed");
}
Some(&(_, _, Accepted)) => {
span_handler.span_warn(mi.span, "feature has been added to Rust, \
directive not necessary");
accepted_features.push(mi.span);
}
None => {
unknown_features.push((name, mi.span));
@ -572,7 +576,8 @@ fn check_crate_inner<F>(cm: &CodeMap, span_handler: &SpanHandler, krate: &ast::C
old_orphan_check: cx.has_feature("old_orphan_check"),
simd_ffi: cx.has_feature("simd_ffi"),
unmarked_api: cx.has_feature("unmarked_api"),
lib_features: unknown_features
declared_stable_lang_features: accepted_features,
declared_lib_features: unknown_features
}
}

View File

@ -560,7 +560,7 @@ fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>,
}
pub fn float_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> ast::Lit_ {
debug!("float_lit: {:?}, {:?}", s, suffix);
// FIXME #2252: bounds checking float literals is defered until trans
// FIXME #2252: bounds checking float literals is deferred until trans
let s = s.chars().filter(|&c| c != '_').collect::<String>();
let data = token::intern_and_get_ident(&*s);
filtered_float_lit(data, suffix, sd, sp)
@ -1233,8 +1233,8 @@ mod test {
let span = tts.iter().rev().next().unwrap().get_span();
match sess.span_diagnostic.cm.span_to_snippet(span) {
Some(s) => assert_eq!(&s[], "{ body }"),
None => panic!("could not get snippet"),
Ok(s) => assert_eq!(&s[], "{ body }"),
Err(_) => panic!("could not get snippet"),
}
}
}

View File

@ -63,15 +63,15 @@ impl<'a> ParserObsoleteMethods for parser::Parser<'a> {
"use a `move ||` expression instead",
),
ObsoleteSyntax::ClosureType => (
"`|usize| -> bool` closure type syntax",
"`|usize| -> bool` closure type",
"use unboxed closures instead, no type annotation needed"
),
ObsoleteSyntax::ClosureKind => (
"`:`, `&mut:`, or `&:` syntax",
"`:`, `&mut:`, or `&:`",
"rely on inference instead"
),
ObsoleteSyntax::Sized => (
"`Sized? T` syntax for removing the `Sized` bound",
"`Sized? T` for removing the `Sized` bound",
"write `T: ?Sized` instead"
),
};

View File

@ -205,7 +205,7 @@ int *__dfly_error(void) { return __error(); }
#include <sys/sysctl.h>
#include <limits.h>
const char * rust_load_self() {
const char * rust_current_exe() {
static char *self = NULL;
if (self == NULL) {

View File

@ -43,7 +43,7 @@ document.addEventListener("DOMContentLoaded", function(event) {
// of each of the sections.
// It works by extracting the current page based on the url and iterates over
// the menu links until it finds the menu item for the current page. We then
// create a copy of the preceeding and following menu links and add the
// create a copy of the preceding and following menu links and add the
// correct css class and insert them into the bottom of the page.
var toc = document.getElementById('toc').getElementsByTagName('a');
var href = document.location.pathname.split('/').pop();

View File

@ -90,12 +90,12 @@ impl Tables {
}
}
/// Retreives the complement for `i`.
/// Retrieves the complement for `i`.
fn cpl8(&self, i: u8) -> u8 {
self.table8[i as uint]
}
/// Retreives the complement for `i`.
/// Retrieves the complement for `i`.
fn cpl16(&self, i: u16) -> u16 {
self.table16[i as uint]
}

View File

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test associated types are forbidden in inherant impls.
// Test associated types are forbidden in inherent impls.
struct Foo;

View File

@ -20,4 +20,3 @@
#![feature = "foo"] //~ ERROR: malformed feature
#![feature(test_removed_feature)] //~ ERROR: feature has been removed
#![feature(test_accepted_feature)] //~ WARNING: feature has been added

View File

@ -0,0 +1,28 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that (for now) we report an ambiguity error here, because
// specific trait relationships are ignored for the purposes of trait
// matching. This behavior should likely be improved such that this
// test passes. See #21974 for more details.
trait Foo {
fn foo(self);
}
fn foo<'a,'b,T>(x: &'a T, y: &'b T)
where &'a T : Foo,
&'b T : Foo
{
x.foo(); //~ ERROR type annotations required
y.foo();
}
fn main() { }

View File

@ -12,7 +12,7 @@
use std::cell::RefCell;
// Regresion test for issue 7364
// Regression test for issue 7364
static boxed: Box<RefCell<isize>> = box RefCell::new(0);
//~^ ERROR statics are not allowed to have custom pointers
//~| ERROR: the trait `core::marker::Sync` is not implemented for the type

View File

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Regresion test for issue 9243
// Regression test for issue 9243
struct Test {
mem: isize,

View File

@ -12,7 +12,6 @@
#![allow(unused_variables)]
#![allow(non_camel_case_types)]
#![allow(non_upper_case_globals)]
#![allow(missing_copy_implementations)]
#![deny(dead_code)]
#![feature(core)]

View File

@ -12,7 +12,6 @@
// injected intrinsics by the compiler.
#![deny(missing_docs)]
#![allow(dead_code)]
#![allow(missing_copy_implementations)]
//! Some garbage docs for the crate here
#![doc="More garbage"]

Some files were not shown because too many files have changed in this diff Show More