Auto merge of #21613 - alfie:suffix-small, r=alexcrichton

This commit is contained in:
bors 2015-02-03 07:59:04 +00:00
commit 336c8d2e9c
35 changed files with 182 additions and 182 deletions

View File

@ -300,8 +300,8 @@ fn parse_exec_env(line: &str) -> Option<(String, String)> {
.collect(); .collect();
match strs.len() { match strs.len() {
1u => (strs.pop().unwrap(), "".to_string()), 1 => (strs.pop().unwrap(), "".to_string()),
2u => { 2 => {
let end = strs.pop().unwrap(); let end = strs.pop().unwrap();
(strs.pop().unwrap(), end) (strs.pop().unwrap(), end)
} }

View File

@ -230,9 +230,9 @@ fn run_pretty_test(config: &Config, props: &TestProps, testfile: &Path) {
let s = File::open(&filepath).read_to_end().unwrap(); let s = File::open(&filepath).read_to_end().unwrap();
String::from_utf8(s).unwrap() String::from_utf8(s).unwrap()
} }
None => { srcs[srcs.len() - 2u].clone() } None => { srcs[srcs.len() - 2].clone() }
}; };
let mut actual = srcs[srcs.len() - 1u].clone(); let mut actual = srcs[srcs.len() - 1].clone();
if props.pp_exact.is_some() { if props.pp_exact.is_some() {
// Now we have to care about line endings // Now we have to care about line endings
@ -842,7 +842,7 @@ fn check_debugger_output(debugger_run_result: &ProcRes, check_lines: &[String])
}).collect(); }).collect();
// check if each line in props.check_lines appears in the // check if each line in props.check_lines appears in the
// output (in order) // output (in order)
let mut i = 0u; let mut i = 0;
for line in debugger_run_result.stdout.lines() { for line in debugger_run_result.stdout.lines() {
let mut rest = line.trim(); let mut rest = line.trim();
let mut first = true; let mut first = true;
@ -869,7 +869,7 @@ fn check_debugger_output(debugger_run_result: &ProcRes, check_lines: &[String])
first = false; first = false;
} }
if !failed && rest.len() == 0 { if !failed && rest.len() == 0 {
i += 1u; i += 1;
} }
if i == num_check_lines { if i == num_check_lines {
// all lines checked // all lines checked
@ -892,13 +892,13 @@ fn check_error_patterns(props: &TestProps,
fatal(format!("no error pattern specified in {:?}", fatal(format!("no error pattern specified in {:?}",
testfile.display()).as_slice()); testfile.display()).as_slice());
} }
let mut next_err_idx = 0u; let mut next_err_idx = 0;
let mut next_err_pat = &props.error_patterns[next_err_idx]; let mut next_err_pat = &props.error_patterns[next_err_idx];
let mut done = false; let mut done = false;
for line in output_to_check.lines() { for line in output_to_check.lines() {
if line.contains(next_err_pat.as_slice()) { if line.contains(next_err_pat.as_slice()) {
debug!("found error pattern {}", next_err_pat); debug!("found error pattern {}", next_err_pat);
next_err_idx += 1u; next_err_idx += 1;
if next_err_idx == props.error_patterns.len() { if next_err_idx == props.error_patterns.len() {
debug!("found all error patterns"); debug!("found all error patterns");
done = true; done = true;
@ -910,7 +910,7 @@ fn check_error_patterns(props: &TestProps,
if done { return; } if done { return; }
let missing_patterns = &props.error_patterns[next_err_idx..]; let missing_patterns = &props.error_patterns[next_err_idx..];
if missing_patterns.len() == 1u { if missing_patterns.len() == 1 {
fatal_proc_rec(format!("error pattern '{}' not found!", fatal_proc_rec(format!("error pattern '{}' not found!",
missing_patterns[0]).as_slice(), missing_patterns[0]).as_slice(),
proc_res); proc_res);
@ -1025,7 +1025,7 @@ fn check_expected_errors(expected_errors: Vec<errors::ExpectedError> ,
} }
fn is_compiler_error_or_warning(line: &str) -> bool { fn is_compiler_error_or_warning(line: &str) -> bool {
let mut i = 0u; let mut i = 0;
return return
scan_until_char(line, ':', &mut i) && scan_until_char(line, ':', &mut i) &&
scan_char(line, ':', &mut i) && scan_char(line, ':', &mut i) &&
@ -1084,7 +1084,7 @@ fn scan_integer(haystack: &str, idx: &mut uint) -> bool {
fn scan_string(haystack: &str, needle: &str, idx: &mut uint) -> bool { fn scan_string(haystack: &str, needle: &str, idx: &mut uint) -> bool {
let mut haystack_i = *idx; let mut haystack_i = *idx;
let mut needle_i = 0u; let mut needle_i = 0;
while needle_i < needle.len() { while needle_i < needle.len() {
if haystack_i >= haystack.len() { if haystack_i >= haystack.len() {
return false; return false;

View File

@ -101,7 +101,7 @@ pub struct Arena {
impl Arena { impl Arena {
/// Allocates a new Arena with 32 bytes preallocated. /// Allocates a new Arena with 32 bytes preallocated.
pub fn new() -> Arena { pub fn new() -> Arena {
Arena::new_with_size(32u) Arena::new_with_size(32)
} }
/// Allocates a new Arena with `initial_size` bytes preallocated. /// Allocates a new Arena with `initial_size` bytes preallocated.
@ -117,7 +117,7 @@ impl Arena {
fn chunk(size: uint, is_copy: bool) -> Chunk { fn chunk(size: uint, is_copy: bool) -> Chunk {
Chunk { Chunk {
data: Rc::new(RefCell::new(Vec::with_capacity(size))), data: Rc::new(RefCell::new(Vec::with_capacity(size))),
fill: Cell::new(0u), fill: Cell::new(0),
is_copy: Cell::new(is_copy), is_copy: Cell::new(is_copy),
} }
} }
@ -193,7 +193,7 @@ impl Arena {
self.chunks.borrow_mut().push(self.copy_head.borrow().clone()); self.chunks.borrow_mut().push(self.copy_head.borrow().clone());
*self.copy_head.borrow_mut() = *self.copy_head.borrow_mut() =
chunk((new_min_chunk_size + 1u).next_power_of_two(), true); chunk((new_min_chunk_size + 1).next_power_of_two(), true);
return self.alloc_copy_inner(n_bytes, align); return self.alloc_copy_inner(n_bytes, align);
} }
@ -234,7 +234,7 @@ impl Arena {
self.chunks.borrow_mut().push(self.head.borrow().clone()); self.chunks.borrow_mut().push(self.head.borrow().clone());
*self.head.borrow_mut() = *self.head.borrow_mut() =
chunk((new_min_chunk_size + 1u).next_power_of_two(), false); chunk((new_min_chunk_size + 1).next_power_of_two(), false);
return self.alloc_noncopy_inner(n_bytes, align); return self.alloc_noncopy_inner(n_bytes, align);
} }
@ -308,7 +308,7 @@ impl Arena {
#[test] #[test]
fn test_arena_destructors() { fn test_arena_destructors() {
let arena = Arena::new(); let arena = Arena::new();
for i in 0u..10 { for i in 0..10 {
// Arena allocate something with drop glue to make sure it // Arena allocate something with drop glue to make sure it
// doesn't leak. // doesn't leak.
arena.alloc(|| Rc::new(i)); arena.alloc(|| Rc::new(i));
@ -337,7 +337,7 @@ fn test_arena_alloc_nested() {
fn test_arena_destructors_fail() { fn test_arena_destructors_fail() {
let arena = Arena::new(); let arena = Arena::new();
// Put some stuff in the arena. // Put some stuff in the arena.
for i in 0u..10 { for i in 0..10 {
// Arena allocate something with drop glue to make sure it // Arena allocate something with drop glue to make sure it
// doesn't leak. // doesn't leak.
arena.alloc(|| { Rc::new(i) }); arena.alloc(|| { Rc::new(i) });
@ -527,7 +527,7 @@ mod tests {
#[test] #[test]
pub fn test_copy() { pub fn test_copy() {
let arena = TypedArena::new(); let arena = TypedArena::new();
for _ in 0u..100000 { for _ in 0..100000 {
arena.alloc(Point { arena.alloc(Point {
x: 1, x: 1,
y: 2, y: 2,
@ -582,7 +582,7 @@ mod tests {
#[test] #[test]
pub fn test_noncopy() { pub fn test_noncopy() {
let arena = TypedArena::new(); let arena = TypedArena::new();
for _ in 0u..100000 { for _ in 0..100000 {
arena.alloc(Noncopy { arena.alloc(Noncopy {
string: "hello world".to_string(), string: "hello world".to_string(),
array: vec!( 1, 2, 3, 4, 5 ), array: vec!( 1, 2, 3, 4, 5 ),

View File

@ -138,14 +138,14 @@ mod tests {
fn test_flate_round_trip() { fn test_flate_round_trip() {
let mut r = rand::thread_rng(); let mut r = rand::thread_rng();
let mut words = vec!(); let mut words = vec!();
for _ in 0u..20 { for _ in 0..20 {
let range = r.gen_range(1u, 10); let range = r.gen_range(1, 10);
let v = r.gen_iter::<u8>().take(range).collect::<Vec<u8>>(); let v = r.gen_iter::<u8>().take(range).collect::<Vec<u8>>();
words.push(v); words.push(v);
} }
for _ in 0u..20 { for _ in 0..20 {
let mut input = vec![]; let mut input = vec![];
for _ in 0u..2000 { for _ in 0..2000 {
input.push_all(r.choose(words.as_slice()).unwrap().as_slice()); input.push_all(r.choose(words.as_slice()).unwrap().as_slice());
} }
debug!("de/inflate of {} bytes of random word-sequences", debug!("de/inflate of {} bytes of random word-sequences",

View File

@ -227,8 +227,8 @@ pub type Result = result::Result<Matches, Fail>;
impl Name { impl Name {
fn from_str(nm: &str) -> Name { fn from_str(nm: &str) -> Name {
if nm.len() == 1u { if nm.len() == 1 {
Short(nm.char_at(0u)) Short(nm.char_at(0))
} else { } else {
Long(nm.to_string()) Long(nm.to_string())
} }
@ -694,7 +694,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result {
} }
i += 1; i += 1;
} }
for i in 0u..n_opts { for i in 0..n_opts {
let n = vals[i].len(); let n = vals[i].len();
let occ = opts[i].occur; let occ = opts[i].occur;
if occ == Req && n == 0 { if occ == Req && n == 0 {

View File

@ -715,7 +715,7 @@ mod tests {
impl<'a> GraphWalk<'a, Node, &'a Edge> for LabelledGraph { impl<'a> GraphWalk<'a, Node, &'a Edge> for LabelledGraph {
fn nodes(&'a self) -> Nodes<'a,Node> { fn nodes(&'a self) -> Nodes<'a,Node> {
(0u..self.node_labels.len()).collect() (0..self.node_labels.len()).collect()
} }
fn edges(&'a self) -> Edges<'a,&'a Edge> { fn edges(&'a self) -> Edges<'a,&'a Edge> {
self.edges.iter().collect() self.edges.iter().collect()

View File

@ -1935,7 +1935,7 @@ pub mod types {
pub iSecurityScheme: c_int, pub iSecurityScheme: c_int,
pub dwMessageSize: DWORD, pub dwMessageSize: DWORD,
pub dwProviderReserved: DWORD, pub dwProviderReserved: DWORD,
pub szProtocol: [u8; (WSAPROTOCOL_LEN as uint) + 1u], pub szProtocol: [u8; (WSAPROTOCOL_LEN as uint) + 1us],
} }
pub type LPWSAPROTOCOL_INFO = *mut WSAPROTOCOL_INFO; pub type LPWSAPROTOCOL_INFO = *mut WSAPROTOCOL_INFO;

View File

@ -24,7 +24,7 @@
/// fn main() { /// fn main() {
/// log!(log::WARN, "this is a warning {}", "message"); /// log!(log::WARN, "this is a warning {}", "message");
/// log!(log::DEBUG, "this is a debug message"); /// log!(log::DEBUG, "this is a debug message");
/// log!(6, "this is a custom logging level: {level}", level=6u); /// log!(6, "this is a custom logging level: {level}", level=6);
/// } /// }
/// ``` /// ```
/// ///
@ -70,7 +70,7 @@ macro_rules! log {
/// #[macro_use] extern crate log; /// #[macro_use] extern crate log;
/// ///
/// fn main() { /// fn main() {
/// let error = 3u; /// let error = 3;
/// error!("the build has failed with error code: {}", error); /// error!("the build has failed with error code: {}", error);
/// } /// }
/// ``` /// ```
@ -95,7 +95,7 @@ macro_rules! error {
/// #[macro_use] extern crate log; /// #[macro_use] extern crate log;
/// ///
/// fn main() { /// fn main() {
/// let code = 3u; /// let code = 3;
/// warn!("you may like to know that a process exited with: {}", code); /// warn!("you may like to know that a process exited with: {}", code);
/// } /// }
/// ``` /// ```

View File

@ -268,9 +268,9 @@ mod test {
// Store the 17*i-th 32-bit word, // Store the 17*i-th 32-bit word,
// i.e., the i-th word of the i-th 16-word block // i.e., the i-th word of the i-th 16-word block
let mut v : Vec<u32> = Vec::new(); let mut v : Vec<u32> = Vec::new();
for _ in 0u..16 { for _ in 0..16 {
v.push(ra.next_u32()); v.push(ra.next_u32());
for _ in 0u..16 { for _ in 0..16 {
ra.next_u32(); ra.next_u32();
} }
} }
@ -287,7 +287,7 @@ mod test {
let seed : &[_] = &[0u32; 8]; let seed : &[_] = &[0u32; 8];
let mut rng: ChaChaRng = SeedableRng::from_seed(seed); let mut rng: ChaChaRng = SeedableRng::from_seed(seed);
let mut clone = rng.clone(); let mut clone = rng.clone();
for _ in 0u..16 { for _ in 0..16 {
assert_eq!(rng.next_u64(), clone.next_u64()); assert_eq!(rng.next_u64(), clone.next_u64());
} }
} }

View File

@ -103,7 +103,7 @@ mod test {
fn test_exp() { fn test_exp() {
let mut exp = Exp::new(10.0); let mut exp = Exp::new(10.0);
let mut rng = ::test::rng(); let mut rng = ::test::rng();
for _ in 0u..1000 { for _ in 0..1000 {
assert!(exp.sample(&mut rng) >= 0.0); assert!(exp.sample(&mut rng) >= 0.0);
assert!(exp.ind_sample(&mut rng) >= 0.0); assert!(exp.ind_sample(&mut rng) >= 0.0);
} }

View File

@ -332,7 +332,7 @@ mod test {
fn test_chi_squared_one() { fn test_chi_squared_one() {
let mut chi = ChiSquared::new(1.0); let mut chi = ChiSquared::new(1.0);
let mut rng = ::test::rng(); let mut rng = ::test::rng();
for _ in 0u..1000 { for _ in 0..1000 {
chi.sample(&mut rng); chi.sample(&mut rng);
chi.ind_sample(&mut rng); chi.ind_sample(&mut rng);
} }
@ -341,7 +341,7 @@ mod test {
fn test_chi_squared_small() { fn test_chi_squared_small() {
let mut chi = ChiSquared::new(0.5); let mut chi = ChiSquared::new(0.5);
let mut rng = ::test::rng(); let mut rng = ::test::rng();
for _ in 0u..1000 { for _ in 0..1000 {
chi.sample(&mut rng); chi.sample(&mut rng);
chi.ind_sample(&mut rng); chi.ind_sample(&mut rng);
} }
@ -350,7 +350,7 @@ mod test {
fn test_chi_squared_large() { fn test_chi_squared_large() {
let mut chi = ChiSquared::new(30.0); let mut chi = ChiSquared::new(30.0);
let mut rng = ::test::rng(); let mut rng = ::test::rng();
for _ in 0u..1000 { for _ in 0..1000 {
chi.sample(&mut rng); chi.sample(&mut rng);
chi.ind_sample(&mut rng); chi.ind_sample(&mut rng);
} }
@ -365,7 +365,7 @@ mod test {
fn test_f() { fn test_f() {
let mut f = FisherF::new(2.0, 32.0); let mut f = FisherF::new(2.0, 32.0);
let mut rng = ::test::rng(); let mut rng = ::test::rng();
for _ in 0u..1000 { for _ in 0..1000 {
f.sample(&mut rng); f.sample(&mut rng);
f.ind_sample(&mut rng); f.ind_sample(&mut rng);
} }
@ -375,7 +375,7 @@ mod test {
fn test_t() { fn test_t() {
let mut t = StudentT::new(11.0); let mut t = StudentT::new(11.0);
let mut rng = ::test::rng(); let mut rng = ::test::rng();
for _ in 0u..1000 { for _ in 0..1000 {
t.sample(&mut rng); t.sample(&mut rng);
t.ind_sample(&mut rng); t.ind_sample(&mut rng);
} }

View File

@ -97,7 +97,7 @@ pub struct Weighted<T> {
/// Weighted { weight: 1, item: 'c' }); /// Weighted { weight: 1, item: 'c' });
/// let wc = WeightedChoice::new(items.as_mut_slice()); /// let wc = WeightedChoice::new(items.as_mut_slice());
/// let mut rng = rand::thread_rng(); /// let mut rng = rand::thread_rng();
/// for _ in 0u..16 { /// for _ in 0..16 {
/// // on average prints 'a' 4 times, 'b' 8 and 'c' twice. /// // on average prints 'a' 4 times, 'b' 8 and 'c' twice.
/// println!("{}", wc.ind_sample(&mut rng)); /// println!("{}", wc.ind_sample(&mut rng));
/// } /// }
@ -118,7 +118,7 @@ impl<'a, T: Clone> WeightedChoice<'a, T> {
// strictly speaking, this is subsumed by the total weight == 0 case // strictly speaking, this is subsumed by the total weight == 0 case
assert!(!items.is_empty(), "WeightedChoice::new called with no items"); assert!(!items.is_empty(), "WeightedChoice::new called with no items");
let mut running_total = 0u; let mut running_total = 0;
// we convert the list from individual weights to cumulative // we convert the list from individual weights to cumulative
// weights so we can binary search. This *could* drop elements // weights so we can binary search. This *could* drop elements

View File

@ -169,7 +169,7 @@ mod tests {
fn test_normal() { fn test_normal() {
let mut norm = Normal::new(10.0, 10.0); let mut norm = Normal::new(10.0, 10.0);
let mut rng = ::test::rng(); let mut rng = ::test::rng();
for _ in 0u..1000 { for _ in 0..1000 {
norm.sample(&mut rng); norm.sample(&mut rng);
norm.ind_sample(&mut rng); norm.ind_sample(&mut rng);
} }
@ -185,7 +185,7 @@ mod tests {
fn test_log_normal() { fn test_log_normal() {
let mut lnorm = LogNormal::new(10.0, 10.0); let mut lnorm = LogNormal::new(10.0, 10.0);
let mut rng = ::test::rng(); let mut rng = ::test::rng();
for _ in 0u..1000 { for _ in 0..1000 {
lnorm.sample(&mut rng); lnorm.sample(&mut rng);
lnorm.ind_sample(&mut rng); lnorm.ind_sample(&mut rng);
} }

View File

@ -38,10 +38,10 @@ use distributions::{Sample, IndependentSample};
/// use std::rand::distributions::{IndependentSample, Range}; /// use std::rand::distributions::{IndependentSample, Range};
/// ///
/// fn main() { /// fn main() {
/// let between = Range::new(10u, 10000u); /// let between = Range::new(10, 10000);
/// let mut rng = std::rand::thread_rng(); /// let mut rng = std::rand::thread_rng();
/// let mut sum = 0; /// let mut sum = 0;
/// for _ in 0u..1000 { /// for _ in 0..1000 {
/// sum += between.ind_sample(&mut rng); /// sum += between.ind_sample(&mut rng);
/// } /// }
/// println!("{}", sum); /// println!("{}", sum);
@ -190,7 +190,7 @@ mod tests {
(Int::min_value(), Int::max_value())]; (Int::min_value(), Int::max_value())];
for &(low, high) in v { for &(low, high) in v {
let mut sampler: Range<$ty> = Range::new(low, high); let mut sampler: Range<$ty> = Range::new(low, high);
for _ in 0u..1000 { for _ in 0..1000 {
let v = sampler.sample(&mut rng); let v = sampler.sample(&mut rng);
assert!(low <= v && v < high); assert!(low <= v && v < high);
let v = sampler.ind_sample(&mut rng); let v = sampler.ind_sample(&mut rng);
@ -216,7 +216,7 @@ mod tests {
(-1e35, 1e35)]; (-1e35, 1e35)];
for &(low, high) in v { for &(low, high) in v {
let mut sampler: Range<$ty> = Range::new(low, high); let mut sampler: Range<$ty> = Range::new(low, high);
for _ in 0u..1000 { for _ in 0..1000 {
let v = sampler.sample(&mut rng); let v = sampler.sample(&mut rng);
assert!(low <= v && v < high); assert!(low <= v && v < high);
let v = sampler.ind_sample(&mut rng); let v = sampler.ind_sample(&mut rng);

View File

@ -82,7 +82,7 @@ impl IsaacRng {
}} }}
} }
for _ in 0u..4 { for _ in 0..4 {
mix!(); mix!();
} }
@ -166,7 +166,7 @@ impl IsaacRng {
}} }}
} }
for i in range_step(0u, MIDPOINT, 4) { for i in range_step(0, MIDPOINT, 4) {
rngstepp!(i + 0, 13); rngstepp!(i + 0, 13);
rngstepn!(i + 1, 6); rngstepn!(i + 1, 6);
rngstepp!(i + 2, 2); rngstepp!(i + 2, 2);
@ -323,7 +323,7 @@ impl Isaac64Rng {
}} }}
} }
for _ in 0u..4 { for _ in 0..4 {
mix!(); mix!();
} }
@ -412,10 +412,10 @@ impl Isaac64Rng {
}} }}
} }
rngstepp!(0u, 21); rngstepp!(0, 21);
rngstepn!(1u, 5); rngstepn!(1, 5);
rngstepp!(2u, 12); rngstepp!(2, 12);
rngstepn!(3u, 33); rngstepn!(3, 33);
} }
} }
@ -581,7 +581,7 @@ mod test {
let seed: &[_] = &[12345, 67890, 54321, 9876]; let seed: &[_] = &[12345, 67890, 54321, 9876];
let mut rb: IsaacRng = SeedableRng::from_seed(seed); let mut rb: IsaacRng = SeedableRng::from_seed(seed);
// skip forward to the 10000th number // skip forward to the 10000th number
for _ in 0u..10000 { rb.next_u32(); } for _ in 0..10000 { rb.next_u32(); }
let v = (0..10).map(|_| rb.next_u32()).collect::<Vec<_>>(); let v = (0..10).map(|_| rb.next_u32()).collect::<Vec<_>>();
assert_eq!(v, assert_eq!(v,
@ -603,7 +603,7 @@ mod test {
let seed: &[_] = &[12345, 67890, 54321, 9876]; let seed: &[_] = &[12345, 67890, 54321, 9876];
let mut rb: Isaac64Rng = SeedableRng::from_seed(seed); let mut rb: Isaac64Rng = SeedableRng::from_seed(seed);
// skip forward to the 10000th number // skip forward to the 10000th number
for _ in 0u..10000 { rb.next_u64(); } for _ in 0..10000 { rb.next_u64(); }
let v = (0..10).map(|_| rb.next_u64()).collect::<Vec<_>>(); let v = (0..10).map(|_| rb.next_u64()).collect::<Vec<_>>();
assert_eq!(v, assert_eq!(v,
@ -618,7 +618,7 @@ mod test {
let seed: &[_] = &[1, 23, 456, 7890, 12345]; let seed: &[_] = &[1, 23, 456, 7890, 12345];
let mut rng: Isaac64Rng = SeedableRng::from_seed(seed); let mut rng: Isaac64Rng = SeedableRng::from_seed(seed);
let mut clone = rng.clone(); let mut clone = rng.clone();
for _ in 0u..16 { for _ in 0..16 {
assert_eq!(rng.next_u64(), clone.next_u64()); assert_eq!(rng.next_u64(), clone.next_u64());
} }
} }

View File

@ -222,7 +222,7 @@ pub trait Rng : Sized {
/// use std::rand::{thread_rng, Rng}; /// use std::rand::{thread_rng, Rng};
/// ///
/// let mut rng = thread_rng(); /// let mut rng = thread_rng();
/// let n: uint = rng.gen_range(0u, 10); /// let n: uint = rng.gen_range(0, 10);
/// println!("{}", n); /// println!("{}", n);
/// let m: f64 = rng.gen_range(-40.0f64, 1.3e5f64); /// let m: f64 = rng.gen_range(-40.0f64, 1.3e5f64);
/// println!("{}", m); /// println!("{}", m);
@ -278,7 +278,7 @@ pub trait Rng : Sized {
if values.is_empty() { if values.is_empty() {
None None
} else { } else {
Some(&values[self.gen_range(0u, values.len())]) Some(&values[self.gen_range(0, values.len())])
} }
} }
@ -298,11 +298,11 @@ pub trait Rng : Sized {
/// ``` /// ```
fn shuffle<T>(&mut self, values: &mut [T]) { fn shuffle<T>(&mut self, values: &mut [T]) {
let mut i = values.len(); let mut i = values.len();
while i >= 2u { while i >= 2 {
// invariant: elements with index >= i have been locked in place. // invariant: elements with index >= i have been locked in place.
i -= 1u; i -= 1;
// lock element i in place. // lock element i in place.
values.swap(i, self.gen_range(0u, i + 1u)); values.swap(i, self.gen_range(0, i + 1));
} }
} }
} }

View File

@ -241,7 +241,7 @@ mod tests {
// this is unlikely to catch an incorrect implementation that // this is unlikely to catch an incorrect implementation that
// generates exactly 0 or 1, but it keeps it sane. // generates exactly 0 or 1, but it keeps it sane.
let mut rng = thread_rng(); let mut rng = thread_rng();
for _ in 0u..1_000 { for _ in 0..1_000 {
// strict inequalities // strict inequalities
let Open01(f) = rng.gen::<Open01<f64>>(); let Open01(f) = rng.gen::<Open01<f64>>();
assert!(0.0 < f && f < 1.0); assert!(0.0 < f && f < 1.0);
@ -254,7 +254,7 @@ mod tests {
#[test] #[test]
fn rand_closed() { fn rand_closed() {
let mut rng = thread_rng(); let mut rng = thread_rng();
for _ in 0u..1_000 { for _ in 0..1_000 {
// strict inequalities // strict inequalities
let Closed01(f) = rng.gen::<Closed01<f64>>(); let Closed01(f) = rng.gen::<Closed01<f64>>();
assert!(0.0 <= f && f <= 1.0); assert!(0.0 <= f && f <= 1.0);

View File

@ -187,7 +187,7 @@ mod test {
let mut rs = ReseedingRng::new(Counter {i:0}, 400, ReseedWithDefault); let mut rs = ReseedingRng::new(Counter {i:0}, 400, ReseedWithDefault);
let mut i = 0; let mut i = 0;
for _ in 0u..1000 { for _ in 0..1000 {
assert_eq!(rs.next_u32(), i % 100); assert_eq!(rs.next_u32(), i % 100);
i += 1; i += 1;
} }

View File

@ -56,7 +56,7 @@ pub struct Doc<'a> {
impl<'doc> Doc<'doc> { impl<'doc> Doc<'doc> {
pub fn new(data: &'doc [u8]) -> Doc<'doc> { pub fn new(data: &'doc [u8]) -> Doc<'doc> {
Doc { data: data, start: 0u, end: data.len() } Doc { data: data, start: 0, end: data.len() }
} }
pub fn get<'a>(&'a self, tag: uint) -> Doc<'a> { pub fn get<'a>(&'a self, tag: uint) -> Doc<'a> {
@ -170,25 +170,25 @@ pub mod reader {
fn vuint_at_slow(data: &[u8], start: uint) -> DecodeResult<Res> { fn vuint_at_slow(data: &[u8], start: uint) -> DecodeResult<Res> {
let a = data[start]; let a = data[start];
if a & 0x80u8 != 0u8 { if a & 0x80u8 != 0u8 {
return Ok(Res {val: (a & 0x7fu8) as uint, next: start + 1u}); return Ok(Res {val: (a & 0x7fu8) as uint, next: start + 1});
} }
if a & 0x40u8 != 0u8 { if a & 0x40u8 != 0u8 {
return Ok(Res {val: ((a & 0x3fu8) as uint) << 8u | return Ok(Res {val: ((a & 0x3fu8) as uint) << 8 |
(data[start + 1u] as uint), (data[start + 1] as uint),
next: start + 2u}); next: start + 2});
} }
if a & 0x20u8 != 0u8 { if a & 0x20u8 != 0u8 {
return Ok(Res {val: ((a & 0x1fu8) as uint) << 16u | return Ok(Res {val: ((a & 0x1fu8) as uint) << 16 |
(data[start + 1u] as uint) << 8u | (data[start + 1] as uint) << 8 |
(data[start + 2u] as uint), (data[start + 2] as uint),
next: start + 3u}); next: start + 3});
} }
if a & 0x10u8 != 0u8 { if a & 0x10u8 != 0u8 {
return Ok(Res {val: ((a & 0x0fu8) as uint) << 24u | return Ok(Res {val: ((a & 0x0fu8) as uint) << 24 |
(data[start + 1u] as uint) << 16u | (data[start + 1] as uint) << 16 |
(data[start + 2u] as uint) << 8u | (data[start + 2] as uint) << 8 |
(data[start + 3u] as uint), (data[start + 3] as uint),
next: start + 4u}); next: start + 4});
} }
Err(IntTooBig(a as uint)) Err(IntTooBig(a as uint))
} }
@ -225,7 +225,7 @@ pub mod reader {
let ptr = data.as_ptr().offset(start as int) as *const u32; let ptr = data.as_ptr().offset(start as int) as *const u32;
let val = Int::from_be(*ptr); let val = Int::from_be(*ptr);
let i = (val >> 28u) as uint; let i = (val >> 28) as uint;
let (shift, mask) = SHIFT_MASK_TABLE[i]; let (shift, mask) = SHIFT_MASK_TABLE[i];
Ok(Res { Ok(Res {
val: ((val >> shift) & mask) as uint, val: ((val >> shift) & mask) as uint,
@ -311,23 +311,23 @@ pub mod reader {
pub fn doc_as_u8(d: Doc) -> u8 { pub fn doc_as_u8(d: Doc) -> u8 {
assert_eq!(d.end, d.start + 1u); assert_eq!(d.end, d.start + 1);
d.data[d.start] d.data[d.start]
} }
pub fn doc_as_u16(d: Doc) -> u16 { pub fn doc_as_u16(d: Doc) -> u16 {
assert_eq!(d.end, d.start + 2u); assert_eq!(d.end, d.start + 2);
u64_from_be_bytes(d.data, d.start, 2u) as u16 u64_from_be_bytes(d.data, d.start, 2) as u16
} }
pub fn doc_as_u32(d: Doc) -> u32 { pub fn doc_as_u32(d: Doc) -> u32 {
assert_eq!(d.end, d.start + 4u); assert_eq!(d.end, d.start + 4);
u64_from_be_bytes(d.data, d.start, 4u) as u32 u64_from_be_bytes(d.data, d.start, 4) as u32
} }
pub fn doc_as_u64(d: Doc) -> u64 { pub fn doc_as_u64(d: Doc) -> u64 {
assert_eq!(d.end, d.start + 8u); assert_eq!(d.end, d.start + 8);
u64_from_be_bytes(d.data, d.start, 8u) u64_from_be_bytes(d.data, d.start, 8)
} }
pub fn doc_as_i8(d: Doc) -> i8 { doc_as_u8(d) as i8 } pub fn doc_as_i8(d: Doc) -> i8 { doc_as_u8(d) as i8 }
@ -712,11 +712,11 @@ pub mod writer {
fn write_sized_vuint<W: Writer>(w: &mut W, n: uint, size: uint) -> EncodeResult { fn write_sized_vuint<W: Writer>(w: &mut W, n: uint, size: uint) -> EncodeResult {
match size { match size {
1u => w.write_all(&[0x80u8 | (n as u8)]), 1 => w.write_all(&[0x80u8 | (n as u8)]),
2u => w.write_all(&[0x40u8 | ((n >> 8_u) as u8), n as u8]), 2 => w.write_all(&[0x40u8 | ((n >> 8) as u8), n as u8]),
3u => w.write_all(&[0x20u8 | ((n >> 16_u) as u8), (n >> 8_u) as u8, 3 => w.write_all(&[0x20u8 | ((n >> 16) as u8), (n >> 8_u) as u8,
n as u8]), n as u8]),
4u => w.write_all(&[0x10u8 | ((n >> 24_u) as u8), (n >> 16_u) as u8, 4 => w.write_all(&[0x10u8 | ((n >> 24) as u8), (n >> 16_u) as u8,
(n >> 8_u) as u8, n as u8]), (n >> 8_u) as u8, n as u8]),
_ => Err(old_io::IoError { _ => Err(old_io::IoError {
kind: old_io::OtherIoError, kind: old_io::OtherIoError,
@ -727,10 +727,10 @@ pub mod writer {
} }
fn write_vuint<W: Writer>(w: &mut W, n: uint) -> EncodeResult { fn write_vuint<W: Writer>(w: &mut W, n: uint) -> EncodeResult {
if n < 0x7f_u { return write_sized_vuint(w, n, 1u); } if n < 0x7f { return write_sized_vuint(w, n, 1); }
if n < 0x4000_u { return write_sized_vuint(w, n, 2u); } if n < 0x4000 { return write_sized_vuint(w, n, 2); }
if n < 0x200000_u { return write_sized_vuint(w, n, 3u); } if n < 0x200000 { return write_sized_vuint(w, n, 3); }
if n < 0x10000000_u { return write_sized_vuint(w, n, 4u); } if n < 0x10000000 { return write_sized_vuint(w, n, 4); }
Err(old_io::IoError { Err(old_io::IoError {
kind: old_io::OtherIoError, kind: old_io::OtherIoError,
desc: "int too big", desc: "int too big",
@ -772,7 +772,7 @@ pub mod writer {
let cur_pos = try!(self.writer.tell()); let cur_pos = try!(self.writer.tell());
try!(self.writer.seek(last_size_pos as i64, old_io::SeekSet)); try!(self.writer.seek(last_size_pos as i64, old_io::SeekSet));
let size = cur_pos as uint - last_size_pos - 4; let size = cur_pos as uint - last_size_pos - 4;
try!(write_sized_vuint(self.writer, size, 4u)); try!(write_sized_vuint(self.writer, size, 4));
let r = try!(self.writer.seek(cur_pos as i64, old_io::SeekSet)); let r = try!(self.writer.seek(cur_pos as i64, old_io::SeekSet));
debug!("End tag (size = {:?})", size); debug!("End tag (size = {:?})", size);
@ -794,19 +794,19 @@ pub mod writer {
} }
pub fn wr_tagged_u64(&mut self, tag_id: uint, v: u64) -> EncodeResult { pub fn wr_tagged_u64(&mut self, tag_id: uint, v: u64) -> EncodeResult {
u64_to_be_bytes(v, 8u, |v| { u64_to_be_bytes(v, 8, |v| {
self.wr_tagged_bytes(tag_id, v) self.wr_tagged_bytes(tag_id, v)
}) })
} }
pub fn wr_tagged_u32(&mut self, tag_id: uint, v: u32) -> EncodeResult{ pub fn wr_tagged_u32(&mut self, tag_id: uint, v: u32) -> EncodeResult{
u64_to_be_bytes(v as u64, 4u, |v| { u64_to_be_bytes(v as u64, 4, |v| {
self.wr_tagged_bytes(tag_id, v) self.wr_tagged_bytes(tag_id, v)
}) })
} }
pub fn wr_tagged_u16(&mut self, tag_id: uint, v: u16) -> EncodeResult { pub fn wr_tagged_u16(&mut self, tag_id: uint, v: u16) -> EncodeResult {
u64_to_be_bytes(v as u64, 2u, |v| { u64_to_be_bytes(v as u64, 2, |v| {
self.wr_tagged_bytes(tag_id, v) self.wr_tagged_bytes(tag_id, v)
}) })
} }
@ -816,19 +816,19 @@ pub mod writer {
} }
pub fn wr_tagged_i64(&mut self, tag_id: uint, v: i64) -> EncodeResult { pub fn wr_tagged_i64(&mut self, tag_id: uint, v: i64) -> EncodeResult {
u64_to_be_bytes(v as u64, 8u, |v| { u64_to_be_bytes(v as u64, 8, |v| {
self.wr_tagged_bytes(tag_id, v) self.wr_tagged_bytes(tag_id, v)
}) })
} }
pub fn wr_tagged_i32(&mut self, tag_id: uint, v: i32) -> EncodeResult { pub fn wr_tagged_i32(&mut self, tag_id: uint, v: i32) -> EncodeResult {
u64_to_be_bytes(v as u64, 4u, |v| { u64_to_be_bytes(v as u64, 4, |v| {
self.wr_tagged_bytes(tag_id, v) self.wr_tagged_bytes(tag_id, v)
}) })
} }
pub fn wr_tagged_i16(&mut self, tag_id: uint, v: i16) -> EncodeResult { pub fn wr_tagged_i16(&mut self, tag_id: uint, v: i16) -> EncodeResult {
u64_to_be_bytes(v as u64, 2u, |v| { u64_to_be_bytes(v as u64, 2, |v| {
self.wr_tagged_bytes(tag_id, v) self.wr_tagged_bytes(tag_id, v)
}) })
} }
@ -1190,7 +1190,7 @@ mod bench {
_ => i as u8, _ => i as u8,
} }
}).collect::<Vec<_>>(); }).collect::<Vec<_>>();
let mut sum = 0u; let mut sum = 0;
b.iter(|| { b.iter(|| {
let mut i = 0; let mut i = 0;
while i < data.len() { while i < data.len() {
@ -1208,7 +1208,7 @@ mod bench {
_ => i as u8 _ => i as u8
} }
}).collect::<Vec<_>>(); }).collect::<Vec<_>>();
let mut sum = 0u; let mut sum = 0;
b.iter(|| { b.iter(|| {
let mut i = 1; let mut i = 1;
while i < data.len() { while i < data.len() {
@ -1227,7 +1227,7 @@ mod bench {
_ => 0u8 _ => 0u8
} }
}).collect::<Vec<_>>(); }).collect::<Vec<_>>();
let mut sum = 0u; let mut sum = 0;
b.iter(|| { b.iter(|| {
let mut i = 0; let mut i = 0;
while i < data.len() { while i < data.len() {
@ -1246,7 +1246,7 @@ mod bench {
_ => 0u8 _ => 0u8
} }
}).collect::<Vec<_>>(); }).collect::<Vec<_>>();
let mut sum = 0u; let mut sum = 0;
b.iter(|| { b.iter(|| {
let mut i = 1; let mut i = 1;
while i < data.len() { while i < data.len() {

View File

@ -8,8 +8,8 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
pub const BOX_FIELD_DROP_GLUE: uint = 1u; pub const BOX_FIELD_DROP_GLUE: uint = 1;
pub const BOX_FIELD_BODY: uint = 4u; pub const BOX_FIELD_BODY: uint = 4;
/// The first half of a fat pointer. /// The first half of a fat pointer.
/// - For a closure, this is the code address. /// - For a closure, this is the code address.
@ -21,4 +21,4 @@ pub const FAT_PTR_ADDR: uint = 0;
/// - For a closure, this is the address of the environment. /// - For a closure, this is the address of the environment.
/// - For an object or trait instance, this is the address of the vtable. /// - For an object or trait instance, this is the address of the vtable.
/// - For a slice, this is the length. /// - For a slice, this is the length.
pub const FAT_PTR_EXTRA: uint = 1u; pub const FAT_PTR_EXTRA: uint = 1;

View File

@ -40,7 +40,7 @@ fn read_u32_be(input: &[u8]) -> u32 {
/// Read a vector of bytes into a vector of u32s. The values are read in big-endian format. /// Read a vector of bytes into a vector of u32s. The values are read in big-endian format.
fn read_u32v_be(dst: &mut[u32], input: &[u8]) { fn read_u32v_be(dst: &mut[u32], input: &[u8]) {
assert!(dst.len() * 4 == input.len()); assert!(dst.len() * 4 == input.len());
let mut pos = 0u; let mut pos = 0;
for chunk in input.chunks(4) { for chunk in input.chunks(4) {
dst[pos] = read_u32_be(chunk); dst[pos] = read_u32_be(chunk);
pos += 1; pos += 1;
@ -366,7 +366,7 @@ impl Engine256State {
// Putting the message schedule inside the same loop as the round calculations allows for // Putting the message schedule inside the same loop as the round calculations allows for
// the compiler to generate better code. // the compiler to generate better code.
for t in range_step(0u, 48, 8) { for t in range_step(0, 48, 8) {
schedule_round!(t + 16); schedule_round!(t + 16);
schedule_round!(t + 17); schedule_round!(t + 17);
schedule_round!(t + 18); schedule_round!(t + 18);
@ -386,7 +386,7 @@ impl Engine256State {
sha2_round!(b, c, d, e, f, g, h, a, K32, t + 7); sha2_round!(b, c, d, e, f, g, h, a, K32, t + 7);
} }
for t in range_step(48u, 64, 8) { for t in range_step(48, 64, 8) {
sha2_round!(a, b, c, d, e, f, g, h, K32, t); sha2_round!(a, b, c, d, e, f, g, h, K32, t);
sha2_round!(h, a, b, c, d, e, f, g, K32, t + 1); sha2_round!(h, a, b, c, d, e, f, g, K32, t + 1);
sha2_round!(g, h, a, b, c, d, e, f, K32, t + 2); sha2_round!(g, h, a, b, c, d, e, f, K32, t + 2);
@ -569,8 +569,8 @@ mod tests {
sh.reset(); sh.reset();
let len = t.input.len(); let len = t.input.len();
let mut left = len; let mut left = len;
while left > 0u { while left > 0 {
let take = (left + 1u) / 2u; let take = (left + 1) / 2;
sh.input_str(&t.input[len - left..take + len - left]); sh.input_str(&t.input[len - left..take + len - left]);
left = left - take; left = left - take;
} }

View File

@ -103,7 +103,7 @@ impl Svh {
let hash = state.finish(); let hash = state.finish();
return Svh { return Svh {
hash: range_step(0u, 64u, 4u).map(|i| hex(hash >> i)).collect() hash: range_step(0, 64, 4).map(|i| hex(hash >> i)).collect()
}; };
fn hex(b: u64) -> char { fn hex(b: u64) -> char {

View File

@ -126,7 +126,7 @@ fn run_compiler(args: &[String]) {
let odir = matches.opt_str("out-dir").map(|o| Path::new(o)); let odir = matches.opt_str("out-dir").map(|o| Path::new(o));
let ofile = matches.opt_str("o").map(|o| Path::new(o)); let ofile = matches.opt_str("o").map(|o| Path::new(o));
let (input, input_file_path) = match matches.free.len() { let (input, input_file_path) = match matches.free.len() {
0u => { 0 => {
if sopts.describe_lints { if sopts.describe_lints {
let mut ls = lint::LintStore::new(); let mut ls = lint::LintStore::new();
ls.register_builtin(None); ls.register_builtin(None);
@ -139,7 +139,7 @@ fn run_compiler(args: &[String]) {
} }
early_error("no input filename given"); early_error("no input filename given");
} }
1u => { 1 => {
let ifile = &matches.free[0][]; let ifile = &matches.free[0][];
if ifile == "-" { if ifile == "-" {
let contents = old_io::stdin().read_to_end().unwrap(); let contents = old_io::stdin().read_to_end().unwrap();

View File

@ -399,7 +399,7 @@ impl UserIdentifiedItem {
}; };
let mut saw_node = ast::DUMMY_NODE_ID; let mut saw_node = ast::DUMMY_NODE_ID;
let mut seen = 0u; let mut seen = 0;
for node in self.all_matching_node_ids(map) { for node in self.all_matching_node_ids(map) {
saw_node = node; saw_node = node;
seen += 1; seen += 1;

View File

@ -997,7 +997,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
/// Resolves all imports for the crate. This method performs the fixed- /// Resolves all imports for the crate. This method performs the fixed-
/// point iteration. /// point iteration.
fn resolve_imports(&mut self) { fn resolve_imports(&mut self) {
let mut i = 0u; let mut i = 0;
let mut prev_unresolved_imports = 0; let mut prev_unresolved_imports = 0;
loop { loop {
debug!("(resolving imports) iteration {}, {} imports left", debug!("(resolving imports) iteration {}, {} imports left",

View File

@ -347,9 +347,9 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> {
let num_supplied_types = supplied_method_types.len(); let num_supplied_types = supplied_method_types.len();
let num_method_types = pick.method_ty.generics.types.len(subst::FnSpace); let num_method_types = pick.method_ty.generics.types.len(subst::FnSpace);
let method_types = { let method_types = {
if num_supplied_types == 0u { if num_supplied_types == 0 {
self.fcx.infcx().next_ty_vars(num_method_types) self.fcx.infcx().next_ty_vars(num_method_types)
} else if num_method_types == 0u { } else if num_method_types == 0 {
span_err!(self.tcx().sess, self.span, E0035, span_err!(self.tcx().sess, self.span, E0035,
"does not take type parameters"); "does not take type parameters");
self.fcx.infcx().next_ty_vars(num_method_types) self.fcx.infcx().next_ty_vars(num_method_types)

View File

@ -127,7 +127,7 @@ pub fn report_error<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
span_note!(fcx.sess(), method_span, span_note!(fcx.sess(), method_span,
"candidate #{} is defined in an impl{} for the type `{}`", "candidate #{} is defined in an impl{} for the type `{}`",
idx + 1u, idx + 1,
insertion, insertion,
impl_ty.user_string(fcx.tcx())); impl_ty.user_string(fcx.tcx()));
} }
@ -136,7 +136,7 @@ pub fn report_error<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
let method_span = fcx.tcx().map.def_id_span(method.def_id, span); let method_span = fcx.tcx().map.def_id_span(method.def_id, span);
span_note!(fcx.sess(), method_span, span_note!(fcx.sess(), method_span,
"candidate #{} is defined in the trait `{}`", "candidate #{} is defined in the trait `{}`",
idx + 1u, idx + 1,
ty::item_path_str(fcx.tcx(), trait_did)); ty::item_path_str(fcx.tcx(), trait_did));
} }
} }

View File

@ -5193,7 +5193,7 @@ pub fn check_bounds_are_used<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
tps.len(), ppaux::ty_to_string(ccx.tcx, ty)); tps.len(), ppaux::ty_to_string(ccx.tcx, ty));
// make a vector of booleans initially false, set to true when used // make a vector of booleans initially false, set to true when used
if tps.len() == 0u { return; } if tps.len() == 0 { return; }
let mut tps_used: Vec<_> = repeat(false).take(tps.len()).collect(); let mut tps_used: Vec<_> = repeat(false).take(tps.len()).collect();
ty::walk_ty(ty, |t| { ty::walk_ty(ty, |t| {
@ -5259,13 +5259,13 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
let (n_tps, inputs, output) = match name.get() { let (n_tps, inputs, output) = match name.get() {
"breakpoint" => (0, Vec::new(), ty::mk_nil(tcx)), "breakpoint" => (0, Vec::new(), ty::mk_nil(tcx)),
"size_of" | "size_of" |
"pref_align_of" | "min_align_of" => (1u, Vec::new(), ccx.tcx.types.uint), "pref_align_of" | "min_align_of" => (1, Vec::new(), ccx.tcx.types.uint),
"init" => (1u, Vec::new(), param(ccx, 0)), "init" => (1, Vec::new(), param(ccx, 0)),
"uninit" => (1u, Vec::new(), param(ccx, 0)), "uninit" => (1, Vec::new(), param(ccx, 0)),
"forget" => (1u, vec!( param(ccx, 0) ), ty::mk_nil(tcx)), "forget" => (1, vec!( param(ccx, 0) ), ty::mk_nil(tcx)),
"transmute" => (2, vec!( param(ccx, 0) ), param(ccx, 1)), "transmute" => (2, vec!( param(ccx, 0) ), param(ccx, 1)),
"move_val_init" => { "move_val_init" => {
(1u, (1,
vec!( vec!(
ty::mk_mut_rptr(tcx, ty::mk_mut_rptr(tcx,
tcx.mk_region(ty::ReLateBound(ty::DebruijnIndex::new(1), tcx.mk_region(ty::ReLateBound(ty::DebruijnIndex::new(1),
@ -5275,8 +5275,8 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
), ),
ty::mk_nil(tcx)) ty::mk_nil(tcx))
} }
"needs_drop" => (1u, Vec::new(), ccx.tcx.types.bool), "needs_drop" => (1, Vec::new(), ccx.tcx.types.bool),
"owns_managed" => (1u, Vec::new(), ccx.tcx.types.bool), "owns_managed" => (1, Vec::new(), ccx.tcx.types.bool),
"get_tydesc" => { "get_tydesc" => {
let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) { let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) {
@ -5287,9 +5287,9 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
ty: tydesc_ty, ty: tydesc_ty,
mutbl: ast::MutImmutable mutbl: ast::MutImmutable
}); });
(1u, Vec::new(), td_ptr) (1, Vec::new(), td_ptr)
} }
"type_id" => (1u, Vec::new(), ccx.tcx.types.u64), "type_id" => (1, Vec::new(), ccx.tcx.types.u64),
"offset" => { "offset" => {
(1, (1,
vec!( vec!(

View File

@ -921,7 +921,7 @@ fn constrain_autoderefs<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>,
derefd_ty.repr(rcx.tcx())); derefd_ty.repr(rcx.tcx()));
let r_deref_expr = ty::ReScope(CodeExtent::from_node_id(deref_expr.id)); let r_deref_expr = ty::ReScope(CodeExtent::from_node_id(deref_expr.id));
for i in 0u..derefs { for i in 0..derefs {
let method_call = MethodCall::autoderef(deref_expr.id, i); let method_call = MethodCall::autoderef(deref_expr.id, i);
debug!("constrain_autoderefs: method_call={:?} (of {:?} total)", method_call, derefs); debug!("constrain_autoderefs: method_call={:?} (of {:?} total)", method_call, derefs);

View File

@ -36,7 +36,7 @@ impl<T:Decodable> Decodable for DList<T> {
fn decode<D: Decoder>(d: &mut D) -> Result<DList<T>, D::Error> { fn decode<D: Decoder>(d: &mut D) -> Result<DList<T>, D::Error> {
d.read_seq(|d, len| { d.read_seq(|d, len| {
let mut list = DList::new(); let mut list = DList::new();
for i in 0u..len { for i in 0..len {
list.push_back(try!(d.read_seq_elt(i, |d| Decodable::decode(d)))); list.push_back(try!(d.read_seq_elt(i, |d| Decodable::decode(d))));
} }
Ok(list) Ok(list)
@ -59,7 +59,7 @@ impl<T:Decodable> Decodable for RingBuf<T> {
fn decode<D: Decoder>(d: &mut D) -> Result<RingBuf<T>, D::Error> { fn decode<D: Decoder>(d: &mut D) -> Result<RingBuf<T>, D::Error> {
d.read_seq(|d, len| { d.read_seq(|d, len| {
let mut deque: RingBuf<T> = RingBuf::new(); let mut deque: RingBuf<T> = RingBuf::new();
for i in 0u..len { for i in 0..len {
deque.push_back(try!(d.read_seq_elt(i, |d| Decodable::decode(d)))); deque.push_back(try!(d.read_seq_elt(i, |d| Decodable::decode(d))));
} }
Ok(deque) Ok(deque)
@ -91,7 +91,7 @@ impl<
fn decode<D: Decoder>(d: &mut D) -> Result<BTreeMap<K, V>, D::Error> { fn decode<D: Decoder>(d: &mut D) -> Result<BTreeMap<K, V>, D::Error> {
d.read_map(|d, len| { d.read_map(|d, len| {
let mut map = BTreeMap::new(); let mut map = BTreeMap::new();
for i in 0u..len { for i in 0..len {
let key = try!(d.read_map_elt_key(i, |d| Decodable::decode(d))); let key = try!(d.read_map_elt_key(i, |d| Decodable::decode(d)));
let val = try!(d.read_map_elt_val(i, |d| Decodable::decode(d))); let val = try!(d.read_map_elt_val(i, |d| Decodable::decode(d)));
map.insert(key, val); map.insert(key, val);
@ -122,7 +122,7 @@ impl<
fn decode<D: Decoder>(d: &mut D) -> Result<BTreeSet<T>, D::Error> { fn decode<D: Decoder>(d: &mut D) -> Result<BTreeSet<T>, D::Error> {
d.read_seq(|d, len| { d.read_seq(|d, len| {
let mut set = BTreeSet::new(); let mut set = BTreeSet::new();
for i in 0u..len { for i in 0..len {
set.insert(try!(d.read_seq_elt(i, |d| Decodable::decode(d)))); set.insert(try!(d.read_seq_elt(i, |d| Decodable::decode(d))));
} }
Ok(set) Ok(set)
@ -186,7 +186,7 @@ impl<K, V, S> Decodable for HashMap<K, V, S>
d.read_map(|d, len| { d.read_map(|d, len| {
let state = Default::default(); let state = Default::default();
let mut map = HashMap::with_capacity_and_hash_state(len, state); let mut map = HashMap::with_capacity_and_hash_state(len, state);
for i in 0u..len { for i in 0..len {
let key = try!(d.read_map_elt_key(i, |d| Decodable::decode(d))); let key = try!(d.read_map_elt_key(i, |d| Decodable::decode(d)));
let val = try!(d.read_map_elt_val(i, |d| Decodable::decode(d))); let val = try!(d.read_map_elt_val(i, |d| Decodable::decode(d)));
map.insert(key, val); map.insert(key, val);
@ -222,7 +222,7 @@ impl<T, S> Decodable for HashSet<T, S>
d.read_seq(|d, len| { d.read_seq(|d, len| {
let state = Default::default(); let state = Default::default();
let mut set = HashSet::with_capacity_and_hash_state(len, state); let mut set = HashSet::with_capacity_and_hash_state(len, state);
for i in 0u..len { for i in 0..len {
set.insert(try!(d.read_seq_elt(i, |d| Decodable::decode(d)))); set.insert(try!(d.read_seq_elt(i, |d| Decodable::decode(d))));
} }
Ok(set) Ok(set)
@ -246,7 +246,7 @@ impl<V: Decodable> Decodable for VecMap<V> {
fn decode<D: Decoder>(d: &mut D) -> Result<VecMap<V>, D::Error> { fn decode<D: Decoder>(d: &mut D) -> Result<VecMap<V>, D::Error> {
d.read_map(|d, len| { d.read_map(|d, len| {
let mut map = VecMap::new(); let mut map = VecMap::new();
for i in 0u..len { for i in 0..len {
let key = try!(d.read_map_elt_key(i, |d| Decodable::decode(d))); let key = try!(d.read_map_elt_key(i, |d| Decodable::decode(d)));
let val = try!(d.read_map_elt_val(i, |d| Decodable::decode(d))); let val = try!(d.read_map_elt_val(i, |d| Decodable::decode(d)));
map.insert(key, val); map.insert(key, val);

View File

@ -185,14 +185,14 @@ mod tests {
#[test] #[test]
pub fn test_to_hex_all_bytes() { pub fn test_to_hex_all_bytes() {
for i in 0u..256 { for i in 0..256 {
assert_eq!([i as u8].to_hex(), format!("{:02x}", i as uint)); assert_eq!([i as u8].to_hex(), format!("{:02x}", i as uint));
} }
} }
#[test] #[test]
pub fn test_from_hex_all_bytes() { pub fn test_from_hex_all_bytes() {
for i in 0u..256 { for i in 0..256 {
let ii: &[u8] = &[i as u8]; let ii: &[u8] = &[i as u8];
assert_eq!(format!("{:02x}", i as uint).from_hex() assert_eq!(format!("{:02x}", i as uint).from_hex()
.unwrap(), .unwrap(),

View File

@ -457,8 +457,8 @@ fn spaces(wr: &mut fmt::Writer, mut n: uint) -> EncodeResult {
fn fmt_number_or_null(v: f64) -> string::String { fn fmt_number_or_null(v: f64) -> string::String {
match v.classify() { match v.classify() {
Fp::Nan | Fp::Infinite => string::String::from_str("null"), Fp::Nan | Fp::Infinite => string::String::from_str("null"),
_ if v.fract() != 0f64 => f64::to_str_digits(v, 6u), _ if v.fract() != 0f64 => f64::to_str_digits(v, 6),
_ => f64::to_str_digits(v, 6u) + ".0", _ => f64::to_str_digits(v, 6) + ".0",
} }
} }
@ -1474,10 +1474,10 @@ impl<T: Iterator<Item=char>> Parser<T> {
self.ch = self.rdr.next(); self.ch = self.rdr.next();
if self.ch_is('\n') { if self.ch_is('\n') {
self.line += 1u; self.line += 1;
self.col = 1u; self.col = 1;
} else { } else {
self.col += 1u; self.col += 1;
} }
} }
@ -1614,7 +1614,7 @@ impl<T: Iterator<Item=char>> Parser<T> {
fn parse_exponent(&mut self, mut res: f64) -> Result<f64, ParserError> { fn parse_exponent(&mut self, mut res: f64) -> Result<f64, ParserError> {
self.bump(); self.bump();
let mut exp = 0u; let mut exp = 0;
let mut neg_exp = false; let mut neg_exp = false;
if self.ch_is('+') { if self.ch_is('+') {
@ -1652,7 +1652,7 @@ impl<T: Iterator<Item=char>> Parser<T> {
} }
fn decode_hex_escape(&mut self) -> Result<u16, ParserError> { fn decode_hex_escape(&mut self) -> Result<u16, ParserError> {
let mut i = 0u; let mut i = 0;
let mut n = 0u16; let mut n = 0u16;
while i < 4 && !self.eof() { while i < 4 && !self.eof() {
self.bump(); self.bump();
@ -1667,7 +1667,7 @@ impl<T: Iterator<Item=char>> Parser<T> {
_ => return self.error(InvalidEscape) _ => return self.error(InvalidEscape)
}; };
i += 1u; i += 1;
} }
// Error out if we didn't parse 4 digits. // Error out if we didn't parse 4 digits.
@ -2638,7 +2638,7 @@ mod tests {
fn test_decode_option_some() { fn test_decode_option_some() {
let s = "{ \"opt\": 10 }"; let s = "{ \"opt\": 10 }";
let obj: OptionData = super::decode(s).unwrap(); let obj: OptionData = super::decode(s).unwrap();
assert_eq!(obj, OptionData { opt: Some(10u) }); assert_eq!(obj, OptionData { opt: Some(10) });
} }
#[test] #[test]
@ -3092,10 +3092,10 @@ mod tests {
#[test] #[test]
fn test_decode_tuple() { fn test_decode_tuple() {
let t: (uint, uint, uint) = super::decode("[1, 2, 3]").unwrap(); let t: (uint, uint, uint) = super::decode("[1, 2, 3]").unwrap();
assert_eq!(t, (1u, 2, 3)); assert_eq!(t, (1, 2, 3));
let t: (uint, string::String) = super::decode("[1, \"two\"]").unwrap(); let t: (uint, string::String) = super::decode("[1, \"two\"]").unwrap();
assert_eq!(t, (1u, "two".to_string())); assert_eq!(t, (1, "two".to_string()));
} }
#[test] #[test]
@ -3228,7 +3228,7 @@ mod tests {
#[test] #[test]
fn test_multiline_errors() { fn test_multiline_errors() {
assert_eq!(from_str("{\n \"foo\":\n \"bar\""), assert_eq!(from_str("{\n \"foo\":\n \"bar\""),
Err(SyntaxError(EOFWhileParsingObject, 3u, 8u))); Err(SyntaxError(EOFWhileParsingObject, 3, 8)));
} }
#[derive(RustcDecodable)] #[derive(RustcDecodable)]
@ -3512,7 +3512,7 @@ mod tests {
} }
// Test up to 4 spaces of indents (more?) // Test up to 4 spaces of indents (more?)
for i in 0..4u { for i in 0..4 {
let mut writer = Vec::new(); let mut writer = Vec::new();
write!(&mut writer, "{}", write!(&mut writer, "{}",
super::as_pretty_json(&json).indent(i)).unwrap(); super::as_pretty_json(&json).indent(i)).unwrap();
@ -3924,22 +3924,22 @@ mod tests {
assert_eq!(false.to_json(), Boolean(false)); assert_eq!(false.to_json(), Boolean(false));
assert_eq!("abc".to_json(), String("abc".to_string())); assert_eq!("abc".to_json(), String("abc".to_string()));
assert_eq!("abc".to_string().to_json(), String("abc".to_string())); assert_eq!("abc".to_string().to_json(), String("abc".to_string()));
assert_eq!((1u, 2u).to_json(), array2); assert_eq!((1us, 2us).to_json(), array2);
assert_eq!((1u, 2u, 3u).to_json(), array3); assert_eq!((1us, 2us, 3us).to_json(), array3);
assert_eq!([1u, 2].to_json(), array2); assert_eq!([1us, 2us].to_json(), array2);
assert_eq!((&[1u, 2, 3]).to_json(), array3); assert_eq!((&[1us, 2us, 3us]).to_json(), array3);
assert_eq!((vec![1u, 2]).to_json(), array2); assert_eq!((vec![1us, 2us]).to_json(), array2);
assert_eq!(vec!(1u, 2, 3).to_json(), array3); assert_eq!(vec!(1us, 2us, 3us).to_json(), array3);
let mut tree_map = BTreeMap::new(); let mut tree_map = BTreeMap::new();
tree_map.insert("a".to_string(), 1u); tree_map.insert("a".to_string(), 1us);
tree_map.insert("b".to_string(), 2); tree_map.insert("b".to_string(), 2);
assert_eq!(tree_map.to_json(), object); assert_eq!(tree_map.to_json(), object);
let mut hash_map = HashMap::new(); let mut hash_map = HashMap::new();
hash_map.insert("a".to_string(), 1u); hash_map.insert("a".to_string(), 1us);
hash_map.insert("b".to_string(), 2); hash_map.insert("b".to_string(), 2);
assert_eq!(hash_map.to_json(), object); assert_eq!(hash_map.to_json(), object);
assert_eq!(Some(15).to_json(), I64(15)); assert_eq!(Some(15).to_json(), I64(15));
assert_eq!(Some(15u).to_json(), U64(15)); assert_eq!(Some(15us).to_json(), U64(15));
assert_eq!(None::<int>.to_json(), Null); assert_eq!(None::<int>.to_json(), Null);
} }

View File

@ -498,7 +498,7 @@ macro_rules! peel {
/// Evaluates to the number of identifiers passed to it, for example: `count_idents!(a, b, c) == 3 /// Evaluates to the number of identifiers passed to it, for example: `count_idents!(a, b, c) == 3
macro_rules! count_idents { macro_rules! count_idents {
() => { 0u }; () => { 0 };
($_i:ident, $($rest:ident,)*) => { 1 + count_idents!($($rest,)*) } ($_i:ident, $($rest:ident,)*) => { 1 + count_idents!($($rest,)*) }
} }

View File

@ -464,14 +464,14 @@ impl<T: Writer> ConsoleTestState<T> {
out: out, out: out,
log_out: log_out, log_out: log_out,
use_color: use_color(opts), use_color: use_color(opts),
total: 0u, total: 0,
passed: 0u, passed: 0,
failed: 0u, failed: 0,
ignored: 0u, ignored: 0,
measured: 0u, measured: 0,
metrics: MetricMap::new(), metrics: MetricMap::new(),
failures: Vec::new(), failures: Vec::new(),
max_name_len: 0u, max_name_len: 0,
}) })
} }
@ -601,7 +601,7 @@ impl<T: Writer> ConsoleTestState<T> {
pub fn write_run_finish(&mut self) -> old_io::IoResult<bool> { pub fn write_run_finish(&mut self) -> old_io::IoResult<bool> {
assert!(self.passed + self.failed + self.ignored + self.measured == self.total); assert!(self.passed + self.failed + self.ignored + self.measured == self.total);
let success = self.failed == 0u; let success = self.failed == 0;
if !success { if !success {
try!(self.write_failures()); try!(self.write_failures());
} }
@ -679,7 +679,7 @@ pub fn run_tests_console(opts: &TestOpts, tests: Vec<TestDescAndFn> ) -> old_io:
let mut st = try!(ConsoleTestState::new(opts, None::<StdWriter>)); let mut st = try!(ConsoleTestState::new(opts, None::<StdWriter>));
fn len_if_padded(t: &TestDescAndFn) -> uint { fn len_if_padded(t: &TestDescAndFn) -> uint {
match t.testfn.padding() { match t.testfn.padding() {
PadNone => 0u, PadNone => 0,
PadOnLeft | PadOnRight => t.desc.name.as_slice().len(), PadOnLeft | PadOnRight => t.desc.name.as_slice().len(),
} }
} }
@ -712,12 +712,12 @@ fn should_sort_failures_before_printing_them() {
log_out: None, log_out: None,
out: Raw(Vec::new()), out: Raw(Vec::new()),
use_color: false, use_color: false,
total: 0u, total: 0,
passed: 0u, passed: 0,
failed: 0u, failed: 0,
ignored: 0u, ignored: 0,
measured: 0u, measured: 0,
max_name_len: 10u, max_name_len: 10,
metrics: MetricMap::new(), metrics: MetricMap::new(),
failures: vec!((test_b, Vec::new()), (test_a, Vec::new())) failures: vec!((test_b, Vec::new()), (test_a, Vec::new()))
}; };

View File

@ -153,7 +153,7 @@ impl<'a> Iterator for Graphemes<'a> {
#[inline] #[inline]
fn size_hint(&self) -> (uint, Option<uint>) { fn size_hint(&self) -> (uint, Option<uint>) {
let slen = self.string.len(); let slen = self.string.len();
(cmp::min(slen, 1u), Some(slen)) (cmp::min(slen, 1), Some(slen))
} }
#[inline] #[inline]