diff --git a/src/compiletest/runtest.rs b/src/compiletest/runtest.rs index 658c0cb3f4e..047a9e38d52 100644 --- a/src/compiletest/runtest.rs +++ b/src/compiletest/runtest.rs @@ -688,7 +688,7 @@ fn run_debuginfo_lldb_test(config: &Config, props: &TestProps, testfile: &Path) .unwrap() .to_string(); - script_str.push_str(&format!("command script import {}\n", &rust_pp_module_abs_path[])[]); + script_str.push_str(&format!("command script import {}\n", &rust_pp_module_abs_path[..])[]); script_str.push_str("type summary add --no-value "); script_str.push_str("--python-function lldb_rust_formatters.print_val "); script_str.push_str("-x \".*\" --category Rust\n"); diff --git a/src/doc/reference.md b/src/doc/reference.md index 00ed5d4562b..ad8377a59e4 100644 --- a/src/doc/reference.md +++ b/src/doc/reference.md @@ -3587,7 +3587,7 @@ An example of each kind: ```{rust} let vec: Vec = vec![1, 2, 3]; let arr: [i32; 3] = [1, 2, 3]; -let s: &[i32] = &vec[]; +let s: &[i32] = &vec[..]; ``` As you can see, the `vec!` macro allows you to create a `Vec` easily. The diff --git a/src/doc/trpl/patterns.md b/src/doc/trpl/patterns.md index 122cffe3697..9e82e48fd18 100644 --- a/src/doc/trpl/patterns.md +++ b/src/doc/trpl/patterns.md @@ -180,7 +180,7 @@ If you want to match against a slice or array, you can use `&`: fn main() { let v = vec!["match_this", "1"]; - match &v[] { + match &v[..] { ["match_this", second] => println!("The second element is {}", second), _ => {}, } diff --git a/src/libcollections/slice.rs b/src/libcollections/slice.rs index 06ae8127c00..2147b647146 100644 --- a/src/libcollections/slice.rs +++ b/src/libcollections/slice.rs @@ -1177,12 +1177,12 @@ impl ElementSwaps { #[unstable(feature = "collections", reason = "trait is unstable")] impl BorrowFrom> for [T] { - fn borrow_from(owned: &Vec) -> &[T] { &owned[] } + fn borrow_from(owned: &Vec) -> &[T] { &owned[..] } } #[unstable(feature = "collections", reason = "trait is unstable")] impl BorrowFromMut> for [T] { - fn borrow_from_mut(owned: &mut Vec) -> &mut [T] { &mut owned[] } + fn borrow_from_mut(owned: &mut Vec) -> &mut [T] { &mut owned[..] } } #[unstable(feature = "collections", reason = "trait is unstable")] @@ -1743,7 +1743,7 @@ mod tests { #[test] fn test_slice_from() { let vec: &[_] = &[1, 2, 3, 4]; - assert_eq!(&vec[], vec); + assert_eq!(&vec[..], vec); let b: &[_] = &[3, 4]; assert_eq!(&vec[2..], b); let b: &[_] = &[]; @@ -1996,9 +1996,9 @@ mod tests { #[test] fn test_lexicographic_permutations_empty_and_short() { - let empty : &mut[i32] = &mut[]; + let empty : &mut[i32] = &mut[..]; assert!(empty.next_permutation() == false); - let b: &mut[i32] = &mut[]; + let b: &mut[i32] = &mut[..]; assert!(empty == b); assert!(empty.prev_permutation() == false); assert!(empty == b); @@ -2264,15 +2264,15 @@ mod tests { #[test] fn test_total_ord() { let c = &[1, 2, 3]; - [1, 2, 3, 4][].cmp(c) == Greater; + [1, 2, 3, 4][..].cmp(c) == Greater; let c = &[1, 2, 3, 4]; - [1, 2, 3][].cmp(c) == Less; + [1, 2, 3][..].cmp(c) == Less; let c = &[1, 2, 3, 6]; - [1, 2, 3, 4][].cmp(c) == Equal; + [1, 2, 3, 4][..].cmp(c) == Equal; let c = &[1, 2, 3, 4, 5, 6]; - [1, 2, 3, 4, 5, 5, 5, 5][].cmp(c) == Less; + [1, 2, 3, 4, 5, 5, 5, 5][..].cmp(c) == Less; let c = &[1, 2, 3, 4]; - [2, 2][].cmp(c) == Greater; + [2, 2][..].cmp(c) == Greater; } #[test] diff --git a/src/libcollections/str.rs b/src/libcollections/str.rs index 2d4dc2bcf30..6ba5db0fb9d 100644 --- a/src/libcollections/str.rs +++ b/src/libcollections/str.rs @@ -388,7 +388,7 @@ macro_rules! utf8_acc_cont_byte { #[unstable(feature = "collections", reason = "trait is unstable")] impl BorrowFrom for str { - fn borrow_from(owned: &String) -> &str { &owned[] } + fn borrow_from(owned: &String) -> &str { &owned[..] } } #[unstable(feature = "collections", reason = "trait is unstable")] @@ -466,7 +466,7 @@ pub trait StrExt: Index { reason = "this functionality may be moved to libunicode")] fn nfd_chars(&self) -> Decompositions { Decompositions { - iter: self[].chars(), + iter: self[..].chars(), buffer: Vec::new(), sorted: false, kind: Canonical @@ -480,7 +480,7 @@ pub trait StrExt: Index { reason = "this functionality may be moved to libunicode")] fn nfkd_chars(&self) -> Decompositions { Decompositions { - iter: self[].chars(), + iter: self[..].chars(), buffer: Vec::new(), sorted: false, kind: Compatible @@ -530,7 +530,7 @@ pub trait StrExt: Index { /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn contains(&self, pat: &str) -> bool { - core_str::StrExt::contains(&self[], pat) + core_str::StrExt::contains(&self[..], pat) } /// Returns true if a string contains a char pattern. @@ -547,7 +547,7 @@ pub trait StrExt: Index { #[unstable(feature = "collections", reason = "might get removed in favour of a more generic contains()")] fn contains_char(&self, pat: P) -> bool { - core_str::StrExt::contains_char(&self[], pat) + core_str::StrExt::contains_char(&self[..], pat) } /// An iterator over the characters of `self`. Note, this iterates @@ -561,7 +561,7 @@ pub trait StrExt: Index { /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn chars(&self) -> Chars { - core_str::StrExt::chars(&self[]) + core_str::StrExt::chars(&self[..]) } /// An iterator over the bytes of `self` @@ -574,13 +574,13 @@ pub trait StrExt: Index { /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn bytes(&self) -> Bytes { - core_str::StrExt::bytes(&self[]) + core_str::StrExt::bytes(&self[..]) } /// An iterator over the characters of `self` and their byte offsets. #[stable(feature = "rust1", since = "1.0.0")] fn char_indices(&self) -> CharIndices { - core_str::StrExt::char_indices(&self[]) + core_str::StrExt::char_indices(&self[..]) } /// An iterator over substrings of `self`, separated by characters @@ -603,7 +603,7 @@ pub trait StrExt: Index { /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn split(&self, pat: P) -> Split

{ - core_str::StrExt::split(&self[], pat) + core_str::StrExt::split(&self[..], pat) } /// An iterator over substrings of `self`, separated by characters @@ -630,7 +630,7 @@ pub trait StrExt: Index { /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn splitn(&self, count: usize, pat: P) -> SplitN

{ - core_str::StrExt::splitn(&self[], count, pat) + core_str::StrExt::splitn(&self[..], count, pat) } /// An iterator over substrings of `self`, separated by characters @@ -659,7 +659,7 @@ pub trait StrExt: Index { /// ``` #[unstable(feature = "collections", reason = "might get removed")] fn split_terminator(&self, pat: P) -> SplitTerminator

{ - core_str::StrExt::split_terminator(&self[], pat) + core_str::StrExt::split_terminator(&self[..], pat) } /// An iterator over substrings of `self`, separated by characters @@ -680,7 +680,7 @@ pub trait StrExt: Index { /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn rsplitn(&self, count: usize, pat: P) -> RSplitN

{ - core_str::StrExt::rsplitn(&self[], count, pat) + core_str::StrExt::rsplitn(&self[..], count, pat) } /// An iterator over the start and end indices of the disjoint @@ -706,7 +706,7 @@ pub trait StrExt: Index { #[unstable(feature = "collections", reason = "might have its iterator type changed")] fn match_indices<'a>(&'a self, pat: &'a str) -> MatchIndices<'a> { - core_str::StrExt::match_indices(&self[], pat) + core_str::StrExt::match_indices(&self[..], pat) } /// An iterator over the substrings of `self` separated by the pattern `sep`. @@ -723,7 +723,7 @@ pub trait StrExt: Index { #[unstable(feature = "collections", reason = "might get removed in the future in favor of a more generic split()")] fn split_str<'a>(&'a self, pat: &'a str) -> SplitStr<'a> { - core_str::StrExt::split_str(&self[], pat) + core_str::StrExt::split_str(&self[..], pat) } /// An iterator over the lines of a string (subsequences separated @@ -739,7 +739,7 @@ pub trait StrExt: Index { /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn lines(&self) -> Lines { - core_str::StrExt::lines(&self[]) + core_str::StrExt::lines(&self[..]) } /// An iterator over the lines of a string, separated by either @@ -755,7 +755,7 @@ pub trait StrExt: Index { /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn lines_any(&self) -> LinesAny { - core_str::StrExt::lines_any(&self[]) + core_str::StrExt::lines_any(&self[..]) } /// Deprecated: use `s[a .. b]` instead. @@ -802,7 +802,7 @@ pub trait StrExt: Index { #[unstable(feature = "collections", reason = "may have yet to prove its worth")] fn slice_chars(&self, begin: usize, end: usize) -> &str { - core_str::StrExt::slice_chars(&self[], begin, end) + core_str::StrExt::slice_chars(&self[..], begin, end) } /// Takes a bytewise (not UTF-8) slice from a string. @@ -813,7 +813,7 @@ pub trait StrExt: Index { /// the entire slice as well. #[stable(feature = "rust1", since = "1.0.0")] unsafe fn slice_unchecked(&self, begin: usize, end: usize) -> &str { - core_str::StrExt::slice_unchecked(&self[], begin, end) + core_str::StrExt::slice_unchecked(&self[..], begin, end) } /// Returns true if the pattern `pat` is a prefix of the string. @@ -825,7 +825,7 @@ pub trait StrExt: Index { /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn starts_with(&self, pat: &str) -> bool { - core_str::StrExt::starts_with(&self[], pat) + core_str::StrExt::starts_with(&self[..], pat) } /// Returns true if the pattern `pat` is a suffix of the string. @@ -837,7 +837,7 @@ pub trait StrExt: Index { /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn ends_with(&self, pat: &str) -> bool { - core_str::StrExt::ends_with(&self[], pat) + core_str::StrExt::ends_with(&self[..], pat) } /// Returns a string with all pre- and suffixes that match @@ -857,7 +857,7 @@ pub trait StrExt: Index { /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn trim_matches(&self, pat: P) -> &str { - core_str::StrExt::trim_matches(&self[], pat) + core_str::StrExt::trim_matches(&self[..], pat) } /// Returns a string with all prefixes that match @@ -877,7 +877,7 @@ pub trait StrExt: Index { /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn trim_left_matches(&self, pat: P) -> &str { - core_str::StrExt::trim_left_matches(&self[], pat) + core_str::StrExt::trim_left_matches(&self[..], pat) } /// Returns a string with all suffixes that match @@ -897,7 +897,7 @@ pub trait StrExt: Index { /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn trim_right_matches(&self, pat: P) -> &str { - core_str::StrExt::trim_right_matches(&self[], pat) + core_str::StrExt::trim_right_matches(&self[..], pat) } /// Check that `index`-th byte lies at the start and/or end of a @@ -926,7 +926,7 @@ pub trait StrExt: Index { #[unstable(feature = "collections", reason = "naming is uncertain with container conventions")] fn is_char_boundary(&self, index: usize) -> bool { - core_str::StrExt::is_char_boundary(&self[], index) + core_str::StrExt::is_char_boundary(&self[..], index) } /// Pluck a character out of a string and return the index of the next @@ -985,7 +985,7 @@ pub trait StrExt: Index { #[unstable(feature = "collections", reason = "naming is uncertain with container conventions")] fn char_range_at(&self, start: usize) -> CharRange { - core_str::StrExt::char_range_at(&self[], start) + core_str::StrExt::char_range_at(&self[..], start) } /// Given a byte position and a str, return the previous char and its position. @@ -1001,7 +1001,7 @@ pub trait StrExt: Index { #[unstable(feature = "collections", reason = "naming is uncertain with container conventions")] fn char_range_at_reverse(&self, start: usize) -> CharRange { - core_str::StrExt::char_range_at_reverse(&self[], start) + core_str::StrExt::char_range_at_reverse(&self[..], start) } /// Plucks the character starting at the `i`th byte of a string. @@ -1022,7 +1022,7 @@ pub trait StrExt: Index { #[unstable(feature = "collections", reason = "naming is uncertain with container conventions")] fn char_at(&self, i: usize) -> char { - core_str::StrExt::char_at(&self[], i) + core_str::StrExt::char_at(&self[..], i) } /// Plucks the character ending at the `i`th byte of a string. @@ -1034,7 +1034,7 @@ pub trait StrExt: Index { #[unstable(feature = "collections", reason = "naming is uncertain with container conventions")] fn char_at_reverse(&self, i: usize) -> char { - core_str::StrExt::char_at_reverse(&self[], i) + core_str::StrExt::char_at_reverse(&self[..], i) } /// Work with the byte buffer of a string as a byte slice. @@ -1046,7 +1046,7 @@ pub trait StrExt: Index { /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn as_bytes(&self) -> &[u8] { - core_str::StrExt::as_bytes(&self[]) + core_str::StrExt::as_bytes(&self[..]) } /// Returns the byte index of the first character of `self` that @@ -1074,7 +1074,7 @@ pub trait StrExt: Index { /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn find(&self, pat: P) -> Option { - core_str::StrExt::find(&self[], pat) + core_str::StrExt::find(&self[..], pat) } /// Returns the byte index of the last character of `self` that @@ -1102,7 +1102,7 @@ pub trait StrExt: Index { /// ``` #[stable(feature = "rust1", since = "1.0.0")] fn rfind(&self, pat: P) -> Option { - core_str::StrExt::rfind(&self[], pat) + core_str::StrExt::rfind(&self[..], pat) } /// Returns the byte index of the first matching substring @@ -1127,7 +1127,7 @@ pub trait StrExt: Index { #[unstable(feature = "collections", reason = "might get removed in favor of a more generic find in the future")] fn find_str(&self, needle: &str) -> Option { - core_str::StrExt::find_str(&self[], needle) + core_str::StrExt::find_str(&self[..], needle) } /// Retrieves the first character from a string slice and returns @@ -1151,7 +1151,7 @@ pub trait StrExt: Index { #[unstable(feature = "collections", reason = "awaiting conventions about shifting and slices")] fn slice_shift_char(&self) -> Option<(char, &str)> { - core_str::StrExt::slice_shift_char(&self[]) + core_str::StrExt::slice_shift_char(&self[..]) } /// Returns the byte offset of an inner slice relative to an enclosing outer slice. @@ -1171,7 +1171,7 @@ pub trait StrExt: Index { #[unstable(feature = "collections", reason = "awaiting convention about comparability of arbitrary slices")] fn subslice_offset(&self, inner: &str) -> usize { - core_str::StrExt::subslice_offset(&self[], inner) + core_str::StrExt::subslice_offset(&self[..], inner) } /// Return an unsafe pointer to the strings buffer. @@ -1182,14 +1182,14 @@ pub trait StrExt: Index { #[stable(feature = "rust1", since = "1.0.0")] #[inline] fn as_ptr(&self) -> *const u8 { - core_str::StrExt::as_ptr(&self[]) + core_str::StrExt::as_ptr(&self[..]) } /// Return an iterator of `u16` over the string encoded as UTF-16. #[unstable(feature = "collections", reason = "this functionality may only be provided by libunicode")] fn utf16_units(&self) -> Utf16Units { - Utf16Units { encoder: Utf16Encoder::new(self[].chars()) } + Utf16Units { encoder: Utf16Encoder::new(self[..].chars()) } } /// Return the number of bytes in this string @@ -1203,7 +1203,7 @@ pub trait StrExt: Index { #[stable(feature = "rust1", since = "1.0.0")] #[inline] fn len(&self) -> usize { - core_str::StrExt::len(&self[]) + core_str::StrExt::len(&self[..]) } /// Returns true if this slice contains no bytes @@ -1216,7 +1216,7 @@ pub trait StrExt: Index { #[inline] #[stable(feature = "rust1", since = "1.0.0")] fn is_empty(&self) -> bool { - core_str::StrExt::is_empty(&self[]) + core_str::StrExt::is_empty(&self[..]) } /// Parse this string into the specified type. @@ -1230,7 +1230,7 @@ pub trait StrExt: Index { #[inline] #[stable(feature = "rust1", since = "1.0.0")] fn parse(&self) -> Result { - core_str::StrExt::parse(&self[]) + core_str::StrExt::parse(&self[..]) } /// Returns an iterator over the @@ -1255,7 +1255,7 @@ pub trait StrExt: Index { #[unstable(feature = "collections", reason = "this functionality may only be provided by libunicode")] fn graphemes(&self, is_extended: bool) -> Graphemes { - UnicodeStr::graphemes(&self[], is_extended) + UnicodeStr::graphemes(&self[..], is_extended) } /// Returns an iterator over the grapheme clusters of self and their byte offsets. @@ -1271,7 +1271,7 @@ pub trait StrExt: Index { #[unstable(feature = "collections", reason = "this functionality may only be provided by libunicode")] fn grapheme_indices(&self, is_extended: bool) -> GraphemeIndices { - UnicodeStr::grapheme_indices(&self[], is_extended) + UnicodeStr::grapheme_indices(&self[..], is_extended) } /// An iterator over the words of a string (subsequences separated @@ -1288,7 +1288,7 @@ pub trait StrExt: Index { #[unstable(feature = "str_words", reason = "the precise algorithm to use is unclear")] fn words(&self) -> Words { - UnicodeStr::words(&self[]) + UnicodeStr::words(&self[..]) } /// Returns a string's displayed width in columns, treating control @@ -1303,25 +1303,25 @@ pub trait StrExt: Index { #[unstable(feature = "collections", reason = "this functionality may only be provided by libunicode")] fn width(&self, is_cjk: bool) -> usize { - UnicodeStr::width(&self[], is_cjk) + UnicodeStr::width(&self[..], is_cjk) } /// Returns a string with leading and trailing whitespace removed. #[stable(feature = "rust1", since = "1.0.0")] fn trim(&self) -> &str { - UnicodeStr::trim(&self[]) + UnicodeStr::trim(&self[..]) } /// Returns a string with leading whitespace removed. #[stable(feature = "rust1", since = "1.0.0")] fn trim_left(&self) -> &str { - UnicodeStr::trim_left(&self[]) + UnicodeStr::trim_left(&self[..]) } /// Returns a string with trailing whitespace removed. #[stable(feature = "rust1", since = "1.0.0")] fn trim_right(&self) -> &str { - UnicodeStr::trim_right(&self[]) + UnicodeStr::trim_right(&self[..]) } } @@ -2704,7 +2704,7 @@ mod tests { &["\u{378}\u{308}\u{903}"], &["\u{378}\u{308}", "\u{903}"]), ]; - for &(s, g) in &test_same[] { + for &(s, g) in &test_same[..] { // test forward iterator assert!(order::equals(s.graphemes(true), g.iter().cloned())); assert!(order::equals(s.graphemes(false), g.iter().cloned())); diff --git a/src/libcollections/string.rs b/src/libcollections/string.rs index 69fd28d1723..cb3dca31122 100644 --- a/src/libcollections/string.rs +++ b/src/libcollections/string.rs @@ -857,7 +857,7 @@ impl ops::Index> for String { type Output = str; #[inline] fn index(&self, index: &ops::Range) -> &str { - &self[][*index] + &self[..][*index] } } #[stable(feature = "rust1", since = "1.0.0")] @@ -865,7 +865,7 @@ impl ops::Index> for String { type Output = str; #[inline] fn index(&self, index: &ops::RangeTo) -> &str { - &self[][*index] + &self[..][*index] } } #[stable(feature = "rust1", since = "1.0.0")] @@ -873,7 +873,7 @@ impl ops::Index> for String { type Output = str; #[inline] fn index(&self, index: &ops::RangeFrom) -> &str { - &self[][*index] + &self[..][*index] } } #[stable(feature = "rust1", since = "1.0.0")] @@ -891,7 +891,7 @@ impl ops::Deref for String { #[inline] fn deref(&self) -> &str { - unsafe { mem::transmute(&self.vec[]) } + unsafe { mem::transmute(&self.vec[..]) } } } @@ -1287,7 +1287,7 @@ mod tests { #[test] fn test_slicing() { let s = "foobar".to_string(); - assert_eq!("foobar", &s[]); + assert_eq!("foobar", &s[..]); assert_eq!("foo", &s[..3]); assert_eq!("bar", &s[3..]); assert_eq!("oob", &s[1..4]); diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs index bde733644b5..e593e45172c 100644 --- a/src/libcollections/vec.rs +++ b/src/libcollections/vec.rs @@ -2589,7 +2589,7 @@ mod tests { b.bytes = src_len as u64; b.iter(|| { - let dst = src.clone()[].to_vec(); + let dst = src.clone()[..].to_vec(); assert_eq!(dst.len(), src_len); assert!(dst.iter().enumerate().all(|(i, x)| i == *x)); }); diff --git a/src/libcore/array.rs b/src/libcore/array.rs index 838ca4e478b..45c0f65197a 100644 --- a/src/libcore/array.rs +++ b/src/libcore/array.rs @@ -37,14 +37,14 @@ macro_rules! array_impls { impl> Hash for [T; $N] { fn hash(&self, state: &mut S) { - Hash::hash(&self[], state) + Hash::hash(&self[..], state) } } #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for [T; $N] { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Debug::fmt(&&self[], f) + fmt::Debug::fmt(&&self[..], f) } } @@ -72,11 +72,11 @@ macro_rules! array_impls { impl PartialEq<[B; $N]> for [A; $N] where A: PartialEq { #[inline] fn eq(&self, other: &[B; $N]) -> bool { - &self[] == &other[] + &self[..] == &other[..] } #[inline] fn ne(&self, other: &[B; $N]) -> bool { - &self[] != &other[] + &self[..] != &other[..] } } @@ -87,11 +87,11 @@ macro_rules! array_impls { { #[inline(always)] fn eq(&self, other: &Rhs) -> bool { - PartialEq::eq(&self[], &**other) + PartialEq::eq(&self[..], &**other) } #[inline(always)] fn ne(&self, other: &Rhs) -> bool { - PartialEq::ne(&self[], &**other) + PartialEq::ne(&self[..], &**other) } } @@ -102,11 +102,11 @@ macro_rules! array_impls { { #[inline(always)] fn eq(&self, other: &[B; $N]) -> bool { - PartialEq::eq(&**self, &other[]) + PartialEq::eq(&**self, &other[..]) } #[inline(always)] fn ne(&self, other: &[B; $N]) -> bool { - PartialEq::ne(&**self, &other[]) + PartialEq::ne(&**self, &other[..]) } } @@ -117,23 +117,23 @@ macro_rules! array_impls { impl PartialOrd for [T; $N] { #[inline] fn partial_cmp(&self, other: &[T; $N]) -> Option { - PartialOrd::partial_cmp(&&self[], &&other[]) + PartialOrd::partial_cmp(&&self[..], &&other[..]) } #[inline] fn lt(&self, other: &[T; $N]) -> bool { - PartialOrd::lt(&&self[], &&other[]) + PartialOrd::lt(&&self[..], &&other[..]) } #[inline] fn le(&self, other: &[T; $N]) -> bool { - PartialOrd::le(&&self[], &&other[]) + PartialOrd::le(&&self[..], &&other[..]) } #[inline] fn ge(&self, other: &[T; $N]) -> bool { - PartialOrd::ge(&&self[], &&other[]) + PartialOrd::ge(&&self[..], &&other[..]) } #[inline] fn gt(&self, other: &[T; $N]) -> bool { - PartialOrd::gt(&&self[], &&other[]) + PartialOrd::gt(&&self[..], &&other[..]) } } @@ -141,7 +141,7 @@ macro_rules! array_impls { impl Ord for [T; $N] { #[inline] fn cmp(&self, other: &[T; $N]) -> Ordering { - Ord::cmp(&&self[], &&other[]) + Ord::cmp(&&self[..], &&other[..]) } } )+ diff --git a/src/libcore/str/mod.rs b/src/libcore/str/mod.rs index ce26abe606d..eec997b9f10 100644 --- a/src/libcore/str/mod.rs +++ b/src/libcore/str/mod.rs @@ -1280,7 +1280,7 @@ mod traits { /// Any string that can be represented as a slice #[unstable(feature = "core", reason = "Instead of taking this bound generically, this trait will be \ - replaced with one of slicing syntax (&foo[]), deref coercions, or \ + replaced with one of slicing syntax (&foo[..]), deref coercions, or \ a more generic conversion trait")] pub trait Str { /// Work with `self` as a slice. diff --git a/src/libcoretest/iter.rs b/src/libcoretest/iter.rs index 7eb0fb97bed..4653a30a45d 100644 --- a/src/libcoretest/iter.rs +++ b/src/libcoretest/iter.rs @@ -264,7 +264,7 @@ fn test_inspect() { .collect::>(); assert_eq!(n, xs.len()); - assert_eq!(&xs[], &ys[]); + assert_eq!(&xs[..], &ys[..]); } #[test] diff --git a/src/libcoretest/slice.rs b/src/libcoretest/slice.rs index 6d5cc38ef0a..46c7730cc64 100644 --- a/src/libcoretest/slice.rs +++ b/src/libcoretest/slice.rs @@ -43,13 +43,13 @@ fn iterator_to_slice() { { let mut iter = data.iter(); - assert_eq!(&iter[], &other_data[]); + assert_eq!(&iter[..], &other_data[..]); iter.next(); - assert_eq!(&iter[], &other_data[1..]); + assert_eq!(&iter[..], &other_data[1..]); iter.next_back(); - assert_eq!(&iter[], &other_data[1..2]); + assert_eq!(&iter[..], &other_data[1..2]); let s = iter.as_slice(); iter.next(); @@ -57,17 +57,17 @@ fn iterator_to_slice() { } { let mut iter = data.iter_mut(); - assert_eq!(&iter[], &other_data[]); + assert_eq!(&iter[..], &other_data[..]); // mutability: assert!(&mut iter[] == other_data); iter.next(); - assert_eq!(&iter[], &other_data[1..]); + assert_eq!(&iter[..], &other_data[1..]); assert!(&mut iter[] == &mut other_data[1..]); iter.next_back(); - assert_eq!(&iter[], &other_data[1..2]); + assert_eq!(&iter[..], &other_data[1..2]); assert!(&mut iter[] == &mut other_data[1..2]); let s = iter.into_slice(); diff --git a/src/libfmt_macros/lib.rs b/src/libfmt_macros/lib.rs index 1c7e97d784c..be77622ac1d 100644 --- a/src/libfmt_macros/lib.rs +++ b/src/libfmt_macros/lib.rs @@ -215,11 +215,11 @@ impl<'a> Parser<'a> { } Some((_, other)) => { self.err(&format!("expected `{:?}`, found `{:?}`", c, - other)[]); + other)); } None => { self.err(&format!("expected `{:?}` but string was terminated", - c)[]); + c)); } } } diff --git a/src/libgetopts/lib.rs b/src/libgetopts/lib.rs index c743119f409..fdd7f7395c2 100644 --- a/src/libgetopts/lib.rs +++ b/src/libgetopts/lib.rs @@ -287,7 +287,7 @@ impl OptGroup { impl Matches { fn opt_vals(&self, nm: &str) -> Vec { - match find_opt(&self.opts[], Name::from_str(nm)) { + match find_opt(&self.opts[..], Name::from_str(nm)) { Some(id) => self.vals[id].clone(), None => panic!("No option '{}' defined", nm) } @@ -326,7 +326,7 @@ impl Matches { /// Returns the string argument supplied to one of several matching options or `None`. pub fn opts_str(&self, names: &[String]) -> Option { for nm in names { - match self.opt_val(&nm[]) { + match self.opt_val(&nm[..]) { Some(Val(ref s)) => return Some(s.clone()), _ => () } @@ -593,7 +593,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { while i < l { let cur = args[i].clone(); let curlen = cur.len(); - if !is_arg(&cur[]) { + if !is_arg(&cur[..]) { free.push(cur); } else if cur == "--" { let mut j = i + 1; @@ -667,7 +667,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { v.push(Val((i_arg.clone()) .unwrap())); } else if name_pos < names.len() || i + 1 == l || - is_arg(&args[i + 1][]) { + is_arg(&args[i + 1][..]) { let v = &mut vals[optid]; v.push(Given); } else { @@ -730,7 +730,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { 0 => {} 1 => { row.push('-'); - row.push_str(&short_name[]); + row.push_str(&short_name[..]); row.push(' '); } _ => panic!("the short name should only be 1 ascii char long"), @@ -741,7 +741,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { 0 => {} _ => { row.push_str("--"); - row.push_str(&long_name[]); + row.push_str(&long_name[..]); row.push(' '); } } @@ -749,10 +749,10 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { // arg match hasarg { No => {} - Yes => row.push_str(&hint[]), + Yes => row.push_str(&hint[..]), Maybe => { row.push('['); - row.push_str(&hint[]); + row.push_str(&hint[..]); row.push(']'); } } @@ -765,7 +765,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { row.push(' '); } } else { - row.push_str(&desc_sep[]); + row.push_str(&desc_sep[..]); } // Normalize desc to contain words separated by one space character @@ -777,14 +777,14 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { // FIXME: #5516 should be graphemes not codepoints let mut desc_rows = Vec::new(); - each_split_within(&desc_normalized_whitespace[], 54, |substr| { + each_split_within(&desc_normalized_whitespace[..], 54, |substr| { desc_rows.push(substr.to_string()); true }); // FIXME: #5516 should be graphemes not codepoints // wrapped description - row.push_str(&desc_rows.connect(&desc_sep[])[]); + row.push_str(&desc_rows.connect(&desc_sep[..])[]); row }); @@ -803,10 +803,10 @@ fn format_option(opt: &OptGroup) -> String { // Use short_name is possible, but fallback to long_name. if opt.short_name.len() > 0 { line.push('-'); - line.push_str(&opt.short_name[]); + line.push_str(&opt.short_name[..]); } else { line.push_str("--"); - line.push_str(&opt.long_name[]); + line.push_str(&opt.long_name[..]); } if opt.hasarg != No { @@ -814,7 +814,7 @@ fn format_option(opt: &OptGroup) -> String { if opt.hasarg == Maybe { line.push('['); } - line.push_str(&opt.hint[]); + line.push_str(&opt.hint[..]); if opt.hasarg == Maybe { line.push(']'); } @@ -836,7 +836,7 @@ pub fn short_usage(program_name: &str, opts: &[OptGroup]) -> String { line.push_str(&opts.iter() .map(format_option) .collect::>() - .connect(" ")[]); + .connect(" ")[..]); line } diff --git a/src/libgraphviz/lib.rs b/src/libgraphviz/lib.rs index 230deabee00..02914afed26 100644 --- a/src/libgraphviz/lib.rs +++ b/src/libgraphviz/lib.rs @@ -455,7 +455,7 @@ impl<'a> LabelText<'a> { pub fn escape(&self) -> String { match self { &LabelStr(ref s) => s.escape_default(), - &EscStr(ref s) => LabelText::escape_str(&s[]), + &EscStr(ref s) => LabelText::escape_str(&s[..]), } } @@ -484,7 +484,7 @@ impl<'a> LabelText<'a> { let mut prefix = self.pre_escaped_content().into_owned(); let suffix = suffix.pre_escaped_content(); prefix.push_str(r"\n\n"); - prefix.push_str(&suffix[]); + prefix.push_str(&suffix[..]); EscStr(prefix.into_cow()) } } @@ -678,7 +678,7 @@ mod tests { impl<'a> Labeller<'a, Node, &'a Edge> for LabelledGraph { fn graph_id(&'a self) -> Id<'a> { - Id::new(&self.name[]).unwrap() + Id::new(&self.name[..]).unwrap() } fn node_id(&'a self, n: &Node) -> Id<'a> { id_name(n) diff --git a/src/liblog/lib.rs b/src/liblog/lib.rs index 4dab07acfd2..c2c7f20ce9c 100644 --- a/src/liblog/lib.rs +++ b/src/liblog/lib.rs @@ -287,7 +287,7 @@ pub fn log(level: u32, loc: &'static LogLocation, args: fmt::Arguments) { // Test the literal string from args against the current filter, if there // is one. match unsafe { FILTER.as_ref() } { - Some(filter) if !args.to_string().contains(&filter[]) => return, + Some(filter) if !args.to_string().contains(&filter[..]) => return, _ => {} } @@ -382,7 +382,7 @@ fn enabled(level: u32, // Search for the longest match, the vector is assumed to be pre-sorted. for directive in iter.rev() { match directive.name { - Some(ref name) if !module.starts_with(&name[]) => {}, + Some(ref name) if !module.starts_with(&name[..]) => {}, Some(..) | None => { return level <= directive.level } @@ -397,7 +397,7 @@ fn enabled(level: u32, /// `Once` primitive (and this function is called from that primitive). fn init() { let (mut directives, filter) = match env::var("RUST_LOG") { - Ok(spec) => directive::parse_logging_spec(&spec[]), + Ok(spec) => directive::parse_logging_spec(&spec[..]), Err(..) => (Vec::new(), None), }; diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs index ba108b5488e..6a329b7c72b 100644 --- a/src/librustc/lint/builtin.rs +++ b/src/librustc/lint/builtin.rs @@ -509,7 +509,7 @@ impl BoxPointers { if n_uniq > 0 { let s = ty_to_string(cx.tcx, ty); let m = format!("type uses owned (Box type) pointers: {}", s); - cx.span_lint(BOX_POINTERS, span, &m[]); + cx.span_lint(BOX_POINTERS, span, &m[..]); } } } @@ -737,7 +737,7 @@ impl LintPass for UnusedResults { } } else { let attrs = csearch::get_item_attrs(&cx.sess().cstore, did); - warned |= check_must_use(cx, &attrs[], s.span); + warned |= check_must_use(cx, &attrs[..], s.span); } } _ => {} @@ -804,7 +804,7 @@ impl NonCamelCaseTypes { } else { format!("{} `{}` should have a camel case name such as `{}`", sort, s, c) }; - cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m[]); + cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m[..]); } } } @@ -951,7 +951,7 @@ impl NonSnakeCase { if !is_snake_case(ident) { let sc = NonSnakeCase::to_snake_case(&s); - if sc != &s[] { + if sc != &s[..] { cx.span_lint(NON_SNAKE_CASE, span, &*format!("{} `{}` should have a snake case name such as `{}`", sort, s, sc)); @@ -1034,7 +1034,7 @@ impl NonUpperCaseGlobals { if s.chars().any(|c| c.is_lowercase()) { let uc: String = NonSnakeCase::to_snake_case(&s).chars() .map(|c| c.to_uppercase()).collect(); - if uc != &s[] { + if uc != &s[..] { cx.span_lint(NON_UPPER_CASE_GLOBALS, span, &format!("{} `{}` should have an upper case name such as `{}`", sort, s, uc)); @@ -1197,7 +1197,7 @@ impl LintPass for UnusedImportBraces { let m = format!("braces around {} is unnecessary", &token::get_ident(*name)); cx.span_lint(UNUSED_IMPORT_BRACES, item.span, - &m[]); + &m[..]); }, _ => () } @@ -1475,7 +1475,7 @@ impl LintPass for MissingDoc { let doc_hidden = self.doc_hidden() || attrs.iter().any(|attr| { attr.check_name("doc") && match attr.meta_item_list() { None => false, - Some(l) => attr::contains_name(&l[], "hidden"), + Some(l) => attr::contains_name(&l[..], "hidden"), } }); self.doc_hidden_stack.push(doc_hidden); @@ -1703,7 +1703,7 @@ impl Stability { _ => format!("use of {} item", label) }; - cx.span_lint(lint, span, &msg[]); + cx.span_lint(lint, span, &msg[..]); } } } diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs index 42a6861f452..068c179d343 100644 --- a/src/librustc/lint/context.rs +++ b/src/librustc/lint/context.rs @@ -125,11 +125,11 @@ impl LintStore { match (sess, from_plugin) { // We load builtin lints first, so a duplicate is a compiler bug. // Use early_error when handling -W help with no crate. - (None, _) => early_error(&msg[]), - (Some(sess), false) => sess.bug(&msg[]), + (None, _) => early_error(&msg[..]), + (Some(sess), false) => sess.bug(&msg[..]), // A duplicate name from a plugin is a user error. - (Some(sess), true) => sess.err(&msg[]), + (Some(sess), true) => sess.err(&msg[..]), } } @@ -150,11 +150,11 @@ impl LintStore { match (sess, from_plugin) { // We load builtin lints first, so a duplicate is a compiler bug. // Use early_error when handling -W help with no crate. - (None, _) => early_error(&msg[]), - (Some(sess), false) => sess.bug(&msg[]), + (None, _) => early_error(&msg[..]), + (Some(sess), false) => sess.bug(&msg[..]), // A duplicate name from a plugin is a user error. - (Some(sess), true) => sess.err(&msg[]), + (Some(sess), true) => sess.err(&msg[..]), } } } @@ -251,8 +251,8 @@ impl LintStore { let warning = format!("lint {} has been renamed to {}", lint_name, new_name); match span { - Some(span) => sess.span_warn(span, &warning[]), - None => sess.warn(&warning[]), + Some(span) => sess.span_warn(span, &warning[..]), + None => sess.warn(&warning[..]), }; Some(lint_id) } @@ -262,13 +262,13 @@ impl LintStore { pub fn process_command_line(&mut self, sess: &Session) { for &(ref lint_name, level) in &sess.opts.lint_opts { - match self.find_lint(&lint_name[], sess, None) { + match self.find_lint(&lint_name[..], sess, None) { Some(lint_id) => self.set_level(lint_id, (level, CommandLine)), None => { match self.lint_groups.iter().map(|(&x, pair)| (x, pair.0.clone())) .collect::>>() - .get(&lint_name[]) { + .get(&lint_name[..]) { Some(v) => { v.iter() .map(|lint_id: &LintId| @@ -411,15 +411,15 @@ pub fn raw_emit_lint(sess: &Session, lint: &'static Lint, if level == Forbid { level = Deny; } match (level, span) { - (Warn, Some(sp)) => sess.span_warn(sp, &msg[]), - (Warn, None) => sess.warn(&msg[]), - (Deny, Some(sp)) => sess.span_err(sp, &msg[]), - (Deny, None) => sess.err(&msg[]), + (Warn, Some(sp)) => sess.span_warn(sp, &msg[..]), + (Warn, None) => sess.warn(&msg[..]), + (Deny, Some(sp)) => sess.span_err(sp, &msg[..]), + (Deny, None) => sess.err(&msg[..]), _ => sess.bug("impossible level in raw_emit_lint"), } if let Some(note) = note { - sess.note(¬e[]); + sess.note(¬e[..]); } if let Some(span) = def { @@ -503,7 +503,7 @@ impl<'a, 'tcx> Context<'a, 'tcx> { match self.lints.find_lint(&lint_name, &self.tcx.sess, Some(span)) { Some(lint_id) => vec![(lint_id, level, span)], None => { - match self.lints.lint_groups.get(&lint_name[]) { + match self.lints.lint_groups.get(&lint_name[..]) { Some(&(ref v, _)) => v.iter() .map(|lint_id: &LintId| (*lint_id, level, span)) @@ -729,7 +729,7 @@ impl<'a, 'tcx> IdVisitingOperation for Context<'a, 'tcx> { None => {} Some(lints) => { for (lint_id, span, msg) in lints { - self.span_lint(lint_id.lint, span, &msg[]) + self.span_lint(lint_id.lint, span, &msg[..]) } } } diff --git a/src/librustc/metadata/creader.rs b/src/librustc/metadata/creader.rs index 0871c36d892..d48a404176a 100644 --- a/src/librustc/metadata/creader.rs +++ b/src/librustc/metadata/creader.rs @@ -183,7 +183,7 @@ impl<'a> CrateReader<'a> { let name = match *path_opt { Some((ref path_str, _)) => { let name = path_str.to_string(); - validate_crate_name(Some(self.sess), &name[], + validate_crate_name(Some(self.sess), &name[..], Some(i.span)); name } @@ -321,7 +321,7 @@ impl<'a> CrateReader<'a> { let source = self.sess.cstore.get_used_crate_source(cnum).unwrap(); if let Some(locs) = self.sess.opts.externs.get(name) { let found = locs.iter().any(|l| { - let l = fs::realpath(&Path::new(&l[])).ok(); + let l = fs::realpath(&Path::new(&l[..])).ok(); source.dylib.as_ref().map(|p| &p.0) == l.as_ref() || source.rlib.as_ref().map(|p| &p.0) == l.as_ref() }); @@ -459,8 +459,8 @@ impl<'a> CrateReader<'a> { let mut load_ctxt = loader::Context { sess: self.sess, span: span, - ident: &ident[], - crate_name: &name[], + ident: &ident[..], + crate_name: &name[..], hash: None, filesearch: self.sess.host_filesearch(PathKind::Crate), target: &self.sess.host, @@ -562,7 +562,7 @@ impl<'a> CrateReader<'a> { name, config::host_triple(), self.sess.opts.target_triple); - self.sess.span_err(span, &message[]); + self.sess.span_err(span, &message[..]); self.sess.abort_if_errors(); } @@ -575,7 +575,7 @@ impl<'a> CrateReader<'a> { let message = format!("plugin `{}` only found in rlib format, \ but must be available in dylib format", name); - self.sess.span_err(span, &message[]); + self.sess.span_err(span, &message[..]); // No need to abort because the loading code will just ignore this // empty dylib. None diff --git a/src/librustc/metadata/encoder.rs b/src/librustc/metadata/encoder.rs index 3123fa31abd..5c18371e788 100644 --- a/src/librustc/metadata/encoder.rs +++ b/src/librustc/metadata/encoder.rs @@ -163,7 +163,7 @@ fn encode_variant_id(rbml_w: &mut Encoder, vid: DefId) { rbml_w.end_tag(); rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(&s[]); + rbml_w.wr_str(&s[..]); rbml_w.end_tag(); } @@ -353,9 +353,9 @@ fn encode_enum_variant_info(ecx: &EncodeContext, let fields = ty::lookup_struct_fields(ecx.tcx, def_id); let idx = encode_info_for_struct(ecx, rbml_w, - &fields[], + &fields[..], index); - encode_struct_fields(rbml_w, &fields[], def_id); + encode_struct_fields(rbml_w, &fields[..], def_id); encode_index(rbml_w, idx, write_i64); } } @@ -1158,7 +1158,7 @@ fn encode_info_for_item(ecx: &EncodeContext, class itself */ let idx = encode_info_for_struct(ecx, rbml_w, - &fields[], + &fields[..], index); /* Index the class*/ @@ -1181,7 +1181,7 @@ fn encode_info_for_item(ecx: &EncodeContext, /* Encode def_ids for each field and method for methods, write all the stuff get_trait_method needs to know*/ - encode_struct_fields(rbml_w, &fields[], def_id); + encode_struct_fields(rbml_w, &fields[..], def_id); encode_inlined_item(ecx, rbml_w, IIItemRef(item)); diff --git a/src/librustc/metadata/loader.rs b/src/librustc/metadata/loader.rs index 3158ccd0765..c743cb46c24 100644 --- a/src/librustc/metadata/loader.rs +++ b/src/librustc/metadata/loader.rs @@ -322,7 +322,7 @@ impl<'a> Context<'a> { &Some(ref r) => format!("{} which `{}` depends on", message, r.ident) }; - self.sess.span_err(self.span, &message[]); + self.sess.span_err(self.span, &message[..]); if self.rejected_via_triple.len() > 0 { let mismatches = self.rejected_via_triple.iter(); @@ -404,7 +404,7 @@ impl<'a> Context<'a> { None => return FileDoesntMatch, Some(file) => file, }; - let (hash, rlib) = if file.starts_with(&rlib_prefix[]) && + let (hash, rlib) = if file.starts_with(&rlib_prefix[..]) && file.ends_with(".rlib") { (&file[(rlib_prefix.len()) .. (file.len() - ".rlib".len())], true) @@ -413,7 +413,7 @@ impl<'a> Context<'a> { (&file[(dylib_prefix.len()) .. (file.len() - dypair.1.len())], false) } else { - if file.starts_with(&staticlib_prefix[]) && + if file.starts_with(&staticlib_prefix[..]) && file.ends_with(".a") { staticlibs.push(CrateMismatch { path: path.clone(), @@ -627,7 +627,7 @@ impl<'a> Context<'a> { let mut rlibs = HashMap::new(); let mut dylibs = HashMap::new(); { - let locs = locs.iter().map(|l| Path::new(&l[])).filter(|loc| { + let locs = locs.iter().map(|l| Path::new(&l[..])).filter(|loc| { if !loc.exists() { sess.err(&format!("extern location for {} does not exist: {}", self.crate_name, loc.display())[]); @@ -645,8 +645,8 @@ impl<'a> Context<'a> { return true } else { let (ref prefix, ref suffix) = dylibname; - if file.starts_with(&prefix[]) && - file.ends_with(&suffix[]) { + if file.starts_with(&prefix[..]) && + file.ends_with(&suffix[..]) { return true } } diff --git a/src/librustc/metadata/tydecode.rs b/src/librustc/metadata/tydecode.rs index 94654b84922..5805725a8fc 100644 --- a/src/librustc/metadata/tydecode.rs +++ b/src/librustc/metadata/tydecode.rs @@ -641,7 +641,7 @@ fn parse_abi_set(st: &mut PState) -> abi::Abi { assert_eq!(next(st), '['); scan(st, |c| c == ']', |bytes| { let abi_str = str::from_utf8(bytes).unwrap(); - abi::lookup(&abi_str[]).expect(abi_str) + abi::lookup(&abi_str[..]).expect(abi_str) }) } diff --git a/src/librustc/middle/astencode.rs b/src/librustc/middle/astencode.rs index c3302debdfa..ae10eb686b0 100644 --- a/src/librustc/middle/astencode.rs +++ b/src/librustc/middle/astencode.rs @@ -134,7 +134,7 @@ pub fn decode_inlined_item<'tcx>(cdata: &cstore::crate_metadata, // Do an Option dance to use the path after it is moved below. let s = ast_map::path_to_string(path.iter().cloned()); path_as_str = Some(s); - path_as_str.as_ref().map(|x| &x[]) + path_as_str.as_ref().map(|x| &x[..]) }); let mut ast_dsr = reader::Decoder::new(ast_doc); let from_id_range = Decodable::decode(&mut ast_dsr).unwrap(); diff --git a/src/librustc/middle/cfg/graphviz.rs b/src/librustc/middle/cfg/graphviz.rs index 1f0fe4f1aca..46b4a51c9d6 100644 --- a/src/librustc/middle/cfg/graphviz.rs +++ b/src/librustc/middle/cfg/graphviz.rs @@ -92,7 +92,7 @@ impl<'a, 'ast> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a, 'ast> { let s = replace_newline_with_backslash_l(s); label.push_str(&format!("exiting scope_{} {}", i, - &s[])[]); + &s[..])[]); } dot::LabelText::EscStr(label.into_cow()) } diff --git a/src/librustc/middle/check_match.rs b/src/librustc/middle/check_match.rs index 03456f85290..60af99b36b8 100644 --- a/src/librustc/middle/check_match.rs +++ b/src/librustc/middle/check_match.rs @@ -200,7 +200,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) { } // Fourth, check for unreachable arms. - check_arms(cx, &inlined_arms[], source); + check_arms(cx, &inlined_arms[..], source); // Finally, check if the whole match expression is exhaustive. // Check for empty enum, because is_useful only works on inhabited types. @@ -291,7 +291,7 @@ fn check_arms(cx: &MatchCheckCtxt, for pat in pats { let v = vec![&**pat]; - match is_useful(cx, &seen, &v[], LeaveOutWitness) { + match is_useful(cx, &seen, &v[..], LeaveOutWitness) { NotUseful => { match source { ast::MatchSource::IfLetDesugar { .. } => { @@ -351,7 +351,7 @@ fn raw_pat<'a>(p: &'a Pat) -> &'a Pat { fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, matrix: &Matrix, source: ast::MatchSource) { match is_useful(cx, matrix, &[DUMMY_WILD_PAT], ConstructWitness) { UsefulWithWitness(pats) => { - let witness = match &pats[] { + let witness = match &pats[..] { [ref witness] => &**witness, [] => DUMMY_WILD_PAT, _ => unreachable!() @@ -360,7 +360,7 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, matrix: &Matrix, source: ast: ast::MatchSource::ForLoopDesugar => { // `witness` has the form `Some()`, peel off the `Some` let witness = match witness.node { - ast::PatEnum(_, Some(ref pats)) => match &pats[] { + ast::PatEnum(_, Some(ref pats)) => match &pats[..] { [ref pat] => &**pat, _ => unreachable!(), }, @@ -664,7 +664,7 @@ fn is_useful(cx: &MatchCheckCtxt, UsefulWithWitness(pats) => UsefulWithWitness({ let arity = constructor_arity(cx, &c, left_ty); let mut result = { - let pat_slice = &pats[]; + let pat_slice = &pats[..]; let subpats: Vec<_> = (0..arity).map(|i| { pat_slice.get(i).map_or(DUMMY_WILD_PAT, |p| &**p) }).collect(); @@ -711,10 +711,10 @@ fn is_useful_specialized(cx: &MatchCheckCtxt, &Matrix(ref m): &Matrix, witness: WitnessPreference) -> Usefulness { let arity = constructor_arity(cx, &ctor, lty); let matrix = Matrix(m.iter().filter_map(|r| { - specialize(cx, &r[], &ctor, 0, arity) + specialize(cx, &r[..], &ctor, 0, arity) }).collect()); match specialize(cx, v, &ctor, 0, arity) { - Some(v) => is_useful(cx, &matrix, &v[], witness), + Some(v) => is_useful(cx, &matrix, &v[..], witness), None => NotUseful } } diff --git a/src/librustc/middle/const_eval.rs b/src/librustc/middle/const_eval.rs index 3d03cd946c4..6dfd781fa08 100644 --- a/src/librustc/middle/const_eval.rs +++ b/src/librustc/middle/const_eval.rs @@ -62,7 +62,7 @@ fn lookup_variant_by_id<'a>(tcx: &'a ty::ctxt, None => None, Some(ast_map::NodeItem(it)) => match it.node { ast::ItemEnum(ast::EnumDef { ref variants }, _) => { - variant_expr(&variants[], variant_def.node) + variant_expr(&variants[..], variant_def.node) } _ => None }, @@ -83,7 +83,7 @@ fn lookup_variant_by_id<'a>(tcx: &'a ty::ctxt, // NOTE this doesn't do the right thing, it compares inlined // NodeId's to the original variant_def's NodeId, but they // come from different crates, so they will likely never match. - variant_expr(&variants[], variant_def.node).map(|e| e.id) + variant_expr(&variants[..], variant_def.node).map(|e| e.id) } _ => None }, @@ -209,7 +209,7 @@ pub fn const_expr_to_pat(tcx: &ty::ctxt, expr: &Expr, span: Span) -> P pub fn eval_const_expr(tcx: &ty::ctxt, e: &Expr) -> const_val { match eval_const_expr_partial(tcx, e, None) { Ok(r) => r, - Err(s) => tcx.sess.span_fatal(e.span, &s[]) + Err(s) => tcx.sess.span_fatal(e.span, &s[..]) } } @@ -552,14 +552,14 @@ pub fn compare_lit_exprs<'tcx>(tcx: &ty::ctxt<'tcx>, let a = match eval_const_expr_partial(tcx, a, ty_hint) { Ok(a) => a, Err(s) => { - tcx.sess.span_err(a.span, &s[]); + tcx.sess.span_err(a.span, &s[..]); return None; } }; let b = match eval_const_expr_partial(tcx, b, ty_hint) { Ok(b) => b, Err(s) => { - tcx.sess.span_err(b.span, &s[]); + tcx.sess.span_err(b.span, &s[..]); return None; } }; diff --git a/src/librustc/middle/dataflow.rs b/src/librustc/middle/dataflow.rs index b792a44d4d8..37cd348e418 100644 --- a/src/librustc/middle/dataflow.rs +++ b/src/librustc/middle/dataflow.rs @@ -312,7 +312,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { let mut t = on_entry.to_vec(); self.apply_gen_kill(cfgidx, &mut t); temp_bits = t; - &temp_bits[] + &temp_bits[..] } }; debug!("{} each_bit_for_node({:?}, cfgidx={:?}) bits={}", @@ -421,7 +421,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { let bits = &mut self.kills[start.. end]; debug!("{} add_kills_from_flow_exits flow_exit={:?} bits={} [before]", self.analysis_name, flow_exit, mut_bits_to_string(bits)); - bits.clone_from_slice(&orig_kills[]); + bits.clone_from_slice(&orig_kills[..]); debug!("{} add_kills_from_flow_exits flow_exit={:?} bits={} [after]", self.analysis_name, flow_exit, mut_bits_to_string(bits)); } diff --git a/src/librustc/middle/dead.rs b/src/librustc/middle/dead.rs index b2335f91ad9..ff78deb8d12 100644 --- a/src/librustc/middle/dead.rs +++ b/src/librustc/middle/dead.rs @@ -321,7 +321,7 @@ fn has_allow_dead_code_or_lang_attr(attrs: &[ast::Attribute]) -> bool { for attr in lint::gather_attrs(attrs) { match attr { Ok((ref name, lint::Allow, _)) - if &name[] == dead_code => return true, + if &name[..] == dead_code => return true, _ => (), } } diff --git a/src/librustc/middle/expr_use_visitor.rs b/src/librustc/middle/expr_use_visitor.rs index 5cc7502b512..39a27cb95f1 100644 --- a/src/librustc/middle/expr_use_visitor.rs +++ b/src/librustc/middle/expr_use_visitor.rs @@ -1166,7 +1166,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { let msg = format!("Pattern has unexpected def: {:?} and type {}", def, cmt_pat.ty.repr(tcx)); - tcx.sess.span_bug(pat.span, &msg[]) + tcx.sess.span_bug(pat.span, &msg[..]) } } } diff --git a/src/librustc/middle/infer/combine.rs b/src/librustc/middle/infer/combine.rs index daa820f43b5..70b444d81d8 100644 --- a/src/librustc/middle/infer/combine.rs +++ b/src/librustc/middle/infer/combine.rs @@ -142,7 +142,7 @@ pub trait Combine<'tcx> : Sized { for _ in a_regions { invariance.push(ty::Invariant); } - &invariance[] + &invariance[..] } }; diff --git a/src/librustc/middle/infer/error_reporting.rs b/src/librustc/middle/infer/error_reporting.rs index 72b33613c66..f4b9f20988d 100644 --- a/src/librustc/middle/infer/error_reporting.rs +++ b/src/librustc/middle/infer/error_reporting.rs @@ -200,9 +200,9 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { ref trace_origins, ref same_regions) => { if !same_regions.is_empty() { - self.report_processed_errors(&var_origins[], - &trace_origins[], - &same_regions[]); + self.report_processed_errors(&var_origins[..], + &trace_origins[..], + &same_regions[..]); } } } @@ -824,7 +824,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { let parent = self.tcx.map.get_parent(scope_id); let parent_node = self.tcx.map.find(parent); let taken = lifetimes_in_scope(self.tcx, scope_id); - let life_giver = LifeGiver::with_taken(&taken[]); + let life_giver = LifeGiver::with_taken(&taken[..]); let node_inner = match parent_node { Some(ref node) => match *node { ast_map::NodeItem(ref item) => { @@ -942,7 +942,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { } expl_self_opt = self.rebuild_expl_self(expl_self_opt, lifetime, &anon_nums, ®ion_names); - inputs = self.rebuild_args_ty(&inputs[], lifetime, + inputs = self.rebuild_args_ty(&inputs[..], lifetime, &anon_nums, ®ion_names); output = self.rebuild_output(&output, lifetime, &anon_nums, ®ion_names); ty_params = self.rebuild_ty_params(ty_params, lifetime, @@ -1426,7 +1426,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { opt_explicit_self, generics); let msg = format!("consider using an explicit lifetime \ parameter as shown: {}", suggested_fn); - self.tcx.sess.span_help(span, &msg[]); + self.tcx.sess.span_help(span, &msg[..]); } fn report_inference_failure(&self, @@ -1771,7 +1771,7 @@ impl LifeGiver { s.push_str(&num_to_string(self.counter.get())[]); if !self.taken.contains(&s) { lifetime = name_to_dummy_lifetime( - token::str_to_ident(&s[]).name); + token::str_to_ident(&s[..]).name); self.generated.borrow_mut().push(lifetime); break; } diff --git a/src/librustc/middle/infer/region_inference/mod.rs b/src/librustc/middle/infer/region_inference/mod.rs index 5cdfdcc7c9b..b4fd34f206f 100644 --- a/src/librustc/middle/infer/region_inference/mod.rs +++ b/src/librustc/middle/infer/region_inference/mod.rs @@ -977,7 +977,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { self.expansion(&mut var_data); self.contraction(&mut var_data); let values = - self.extract_values_and_collect_conflicts(&var_data[], + self.extract_values_and_collect_conflicts(&var_data[..], errors); self.collect_concrete_region_errors(&values, errors); values diff --git a/src/librustc/middle/lang_items.rs b/src/librustc/middle/lang_items.rs index e13a5672778..51342e08274 100644 --- a/src/librustc/middle/lang_items.rs +++ b/src/librustc/middle/lang_items.rs @@ -149,7 +149,7 @@ impl<'a, 'v> Visitor<'v> for LanguageItemCollector<'a> { fn visit_item(&mut self, item: &ast::Item) { match extract(&item.attrs) { Some(value) => { - let item_index = self.item_refs.get(&value[]).map(|x| *x); + let item_index = self.item_refs.get(&value[..]).map(|x| *x); match item_index { Some(item_index) => { diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index d4fe0979313..e58136fb3f4 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -1119,7 +1119,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { // Uninteresting cases: just propagate in rev exec order ast::ExprVec(ref exprs) => { - self.propagate_through_exprs(&exprs[], succ) + self.propagate_through_exprs(&exprs[..], succ) } ast::ExprRepeat(ref element, ref count) => { @@ -1143,7 +1143,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { } else { succ }; - let succ = self.propagate_through_exprs(&args[], succ); + let succ = self.propagate_through_exprs(&args[..], succ); self.propagate_through_expr(&**f, succ) } @@ -1156,11 +1156,11 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { } else { succ }; - self.propagate_through_exprs(&args[], succ) + self.propagate_through_exprs(&args[..], succ) } ast::ExprTup(ref exprs) => { - self.propagate_through_exprs(&exprs[], succ) + self.propagate_through_exprs(&exprs[..], succ) } ast::ExprBinary(op, ref l, ref r) if ast_util::lazy_binop(op.node) => { diff --git a/src/librustc/middle/traits/select.rs b/src/librustc/middle/traits/select.rs index 061557eb7dc..baa1f5dc5a5 100644 --- a/src/librustc/middle/traits/select.rs +++ b/src/librustc/middle/traits/select.rs @@ -998,7 +998,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let all_bounds = util::transitive_bounds( - self.tcx(), &caller_trait_refs[]); + self.tcx(), &caller_trait_refs[..]); let matching_bounds = all_bounds.filter( diff --git a/src/librustc/middle/ty.rs b/src/librustc/middle/ty.rs index 8618bde95fe..9742accbf07 100644 --- a/src/librustc/middle/ty.rs +++ b/src/librustc/middle/ty.rs @@ -2331,7 +2331,7 @@ impl ClosureKind { }; match result { Ok(trait_did) => trait_did, - Err(err) => cx.sess.fatal(&err[]), + Err(err) => cx.sess.fatal(&err[..]), } } } @@ -2661,7 +2661,7 @@ impl FlagComputation { } &ty_tup(ref ts) => { - self.add_tys(&ts[]); + self.add_tys(&ts[..]); } &ty_bare_fn(_, ref f) => { @@ -3447,7 +3447,7 @@ pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents { ty_struct(did, substs) => { let flds = struct_fields(cx, did, substs); let mut res = - TypeContents::union(&flds[], + TypeContents::union(&flds[..], |f| tc_mt(cx, f.mt, cache)); if !lookup_repr_hints(cx, did).contains(&attr::ReprExtern) { @@ -3470,14 +3470,14 @@ pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents { } ty_tup(ref tys) => { - TypeContents::union(&tys[], + TypeContents::union(&tys[..], |ty| tc_ty(cx, *ty, cache)) } ty_enum(did, substs) => { let variants = substd_enum_variants(cx, did, substs); let mut res = - TypeContents::union(&variants[], |variant| { + TypeContents::union(&variants[..], |variant| { TypeContents::union(&variant.args[], |arg_ty| { tc_ty(cx, *arg_ty, cache) @@ -4940,7 +4940,7 @@ pub fn provided_trait_methods<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId) match item.node { ItemTrait(_, _, _, ref ms) => { let (_, p) = - ast_util::split_trait_methods(&ms[]); + ast_util::split_trait_methods(&ms[..]); p.iter() .map(|m| { match impl_or_trait_item( @@ -6625,7 +6625,7 @@ pub fn with_freevars(tcx: &ty::ctxt, fid: ast::NodeId, f: F) -> T where { match tcx.freevars.borrow().get(&fid) { None => f(&[]), - Some(d) => f(&d[]) + Some(d) => f(&d[..]) } } diff --git a/src/librustc/middle/weak_lang_items.rs b/src/librustc/middle/weak_lang_items.rs index ee3fd681a00..60a9ffc7d2e 100644 --- a/src/librustc/middle/weak_lang_items.rs +++ b/src/librustc/middle/weak_lang_items.rs @@ -55,7 +55,7 @@ pub fn check_crate(krate: &ast::Crate, pub fn link_name(attrs: &[ast::Attribute]) -> Option { lang_items::extract(attrs).and_then(|name| { - $(if &name[] == stringify!($name) { + $(if &name[..] == stringify!($name) { Some(InternedString::new(stringify!($sym))) } else)* { None diff --git a/src/librustc/plugin/load.rs b/src/librustc/plugin/load.rs index 1895cbcb542..b3bc898748f 100644 --- a/src/librustc/plugin/load.rs +++ b/src/librustc/plugin/load.rs @@ -111,19 +111,19 @@ impl<'a> PluginLoader<'a> { // inside this crate, so continue would spew "macro undefined" // errors Err(err) => { - self.sess.span_fatal(span, &err[]) + self.sess.span_fatal(span, &err[..]) } }; unsafe { let registrar = - match lib.symbol(&symbol[]) { + match lib.symbol(&symbol[..]) { Ok(registrar) => { mem::transmute::<*mut u8,PluginRegistrarFun>(registrar) } // again fatal if we can't register macros Err(err) => { - self.sess.span_fatal(span, &err[]) + self.sess.span_fatal(span, &err[..]) } }; diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index 5768539b2cd..93a25de0491 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -629,7 +629,7 @@ pub fn build_configuration(sess: &Session) -> ast::CrateConfig { append_configuration(&mut user_cfg, InternedString::new("test")) } let mut v = user_cfg.into_iter().collect::>(); - v.push_all(&default_cfg[]); + v.push_all(&default_cfg[..]); v } @@ -824,7 +824,7 @@ pub fn parse_cfgspecs(cfgspecs: Vec ) -> ast::CrateConfig { pub fn build_session_options(matches: &getopts::Matches) -> Options { let unparsed_crate_types = matches.opt_strs("crate-type"); let crate_types = parse_crate_types_from_list(unparsed_crate_types) - .unwrap_or_else(|e| early_error(&e[])); + .unwrap_or_else(|e| early_error(&e[..])); let mut lint_opts = vec!(); let mut describe_lints = false; @@ -923,7 +923,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { let mut search_paths = SearchPaths::new(); for s in &matches.opt_strs("L") { - search_paths.add_path(&s[]); + search_paths.add_path(&s[..]); } let libs = matches.opt_strs("l").into_iter().map(|s| { @@ -981,7 +981,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { --debuginfo"); } - let color = match matches.opt_str("color").as_ref().map(|s| &s[]) { + let color = match matches.opt_str("color").as_ref().map(|s| &s[..]) { Some("auto") => Auto, Some("always") => Always, Some("never") => Never, @@ -1119,7 +1119,7 @@ mod test { let sessopts = build_session_options(matches); let sess = build_session(sessopts, None, registry); let cfg = build_configuration(&sess); - assert!((attr::contains_name(&cfg[], "test"))); + assert!((attr::contains_name(&cfg[..], "test"))); } // When the user supplies --test and --cfg test, don't implicitly add diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index bd44dbe78f5..c1c55188875 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -75,13 +75,13 @@ impl Session { } pub fn span_err(&self, sp: Span, msg: &str) { match split_msg_into_multilines(msg) { - Some(msg) => self.diagnostic().span_err(sp, &msg[]), + Some(msg) => self.diagnostic().span_err(sp, &msg[..]), None => self.diagnostic().span_err(sp, msg) } } pub fn span_err_with_code(&self, sp: Span, msg: &str, code: &str) { match split_msg_into_multilines(msg) { - Some(msg) => self.diagnostic().span_err_with_code(sp, &msg[], code), + Some(msg) => self.diagnostic().span_err_with_code(sp, &msg[..], code), None => self.diagnostic().span_err_with_code(sp, msg, code) } } diff --git a/src/librustc/util/lev_distance.rs b/src/librustc/util/lev_distance.rs index ca1bb7d7a94..10a7b2abea8 100644 --- a/src/librustc/util/lev_distance.rs +++ b/src/librustc/util/lev_distance.rs @@ -48,7 +48,7 @@ fn test_lev_distance() { for c in (0u32..MAX as u32) .filter_map(|i| from_u32(i)) .map(|i| i.to_string()) { - assert_eq!(lev_distance(&c[], &c[]), 0); + assert_eq!(lev_distance(&c[..], &c[..]), 0); } let a = "\nMäry häd ä little lämb\n\nLittle lämb\n"; diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index 426101e858a..7dd25e9dc19 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -292,7 +292,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { Some(def_id) => { s.push_str(" {"); let path_str = ty::item_path_str(cx, def_id); - s.push_str(&path_str[]); + s.push_str(&path_str[..]); s.push_str("}"); } None => { } @@ -376,7 +376,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { .iter() .map(|elem| ty_to_string(cx, *elem)) .collect::>(); - match &strs[] { + match &strs[..] { [ref string] => format!("({},)", string), strs => format!("({})", strs.connect(", ")) } @@ -625,7 +625,7 @@ impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for [T] { impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for OwnedSlice { fn repr(&self, tcx: &ctxt<'tcx>) -> String { - repr_vec(tcx, &self[]) + repr_vec(tcx, &self[..]) } } @@ -633,7 +633,7 @@ impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for OwnedSlice { // autoderef cannot convert the &[T] handler impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for Vec { fn repr(&self, tcx: &ctxt<'tcx>) -> String { - repr_vec(tcx, &self[]) + repr_vec(tcx, &self[..]) } } @@ -673,7 +673,7 @@ impl<'tcx> UserString<'tcx> for TraitAndProjections<'tcx> { &base, trait_ref.substs, trait_ref.def_id, - &projection_bounds[], + &projection_bounds[..], || ty::lookup_trait_def(tcx, trait_ref.def_id).generics.clone()) } } @@ -1259,7 +1259,7 @@ impl<'tcx, T> UserString<'tcx> for ty::Binder } }) }); - let names: Vec<_> = names.iter().map(|s| &s[]).collect(); + let names: Vec<_> = names.iter().map(|s| &s[..]).collect(); let value_str = unbound_value.user_string(tcx); if names.len() == 0 { diff --git a/src/librustc_back/archive.rs b/src/librustc_back/archive.rs index b779963a219..c45ee258342 100644 --- a/src/librustc_back/archive.rs +++ b/src/librustc_back/archive.rs @@ -53,7 +53,7 @@ fn run_ar(handler: &ErrorHandler, maybe_ar_prog: &Option, args: &str, cwd: Option<&Path>, paths: &[&Path]) -> ProcessOutput { let ar = match *maybe_ar_prog { - Some(ref ar) => &ar[], + Some(ref ar) => &ar[..], None => "ar" }; let mut cmd = Command::new(ar); @@ -84,7 +84,7 @@ fn run_ar(handler: &ErrorHandler, maybe_ar_prog: &Option, o }, Err(e) => { - handler.err(&format!("could not exec `{}`: {}", &ar[], + handler.err(&format!("could not exec `{}`: {}", &ar[..], e)[]); handler.abort_if_errors(); panic!("rustc::back::archive::run_ar() should not reach this point"); @@ -101,10 +101,10 @@ pub fn find_library(name: &str, osprefix: &str, ossuffix: &str, for path in search_paths { debug!("looking for {} inside {:?}", name, path.display()); - let test = path.join(&oslibname[]); + let test = path.join(&oslibname[..]); if test.exists() { return test } if oslibname != unixlibname { - let test = path.join(&unixlibname[]); + let test = path.join(&unixlibname[..]); if test.exists() { return test } } } @@ -192,12 +192,12 @@ impl<'a> ArchiveBuilder<'a> { // as simple comparison is not enough - there // might be also an extra name suffix let obj_start = format!("{}", name); - let obj_start = &obj_start[]; + let obj_start = &obj_start[..]; // Ignoring all bytecode files, no matter of // name let bc_ext = ".bytecode.deflate"; - self.add_archive(rlib, &name[], |fname: &str| { + self.add_archive(rlib, &name[..], |fname: &str| { let skip_obj = lto && fname.starts_with(obj_start) && fname.ends_with(".o"); skip_obj || fname.ends_with(bc_ext) || fname == METADATA_FILENAME @@ -234,7 +234,7 @@ impl<'a> ArchiveBuilder<'a> { // allow running `ar s file.a` to update symbols only. if self.should_update_symbols { run_ar(self.archive.handler, &self.archive.maybe_ar_prog, - "s", Some(self.work_dir.path()), &args[]); + "s", Some(self.work_dir.path()), &args[..]); } return self.archive; } @@ -254,7 +254,7 @@ impl<'a> ArchiveBuilder<'a> { // Add the archive members seen so far, without updating the // symbol table (`S`). run_ar(self.archive.handler, &self.archive.maybe_ar_prog, - "cruS", Some(self.work_dir.path()), &args[]); + "cruS", Some(self.work_dir.path()), &args[..]); args.clear(); args.push(&abs_dst); @@ -269,7 +269,7 @@ impl<'a> ArchiveBuilder<'a> { // necessary. let flags = if self.should_update_symbols { "crus" } else { "cruS" }; run_ar(self.archive.handler, &self.archive.maybe_ar_prog, - flags, Some(self.work_dir.path()), &args[]); + flags, Some(self.work_dir.path()), &args[..]); self.archive } @@ -312,7 +312,7 @@ impl<'a> ArchiveBuilder<'a> { } else { filename }; - let new_filename = self.work_dir.path().join(&filename[]); + let new_filename = self.work_dir.path().join(&filename[..]); try!(fs::rename(file, &new_filename)); self.members.push(Path::new(filename)); } diff --git a/src/librustc_back/rpath.rs b/src/librustc_back/rpath.rs index 36bbd4b9872..21e4f55ffa7 100644 --- a/src/librustc_back/rpath.rs +++ b/src/librustc_back/rpath.rs @@ -44,8 +44,8 @@ pub fn get_rpath_flags(config: RPathConfig) -> Vec where l.map(|p| p.clone()) }).collect::>(); - let rpaths = get_rpaths(config, &libs[]); - flags.push_all(&rpaths_to_flags(&rpaths[])[]); + let rpaths = get_rpaths(config, &libs[..]); + flags.push_all(&rpaths_to_flags(&rpaths[..])[]); flags } @@ -82,14 +82,14 @@ fn get_rpaths(mut config: RPathConfig, libs: &[Path]) -> Vec } } - log_rpaths("relative", &rel_rpaths[]); - log_rpaths("fallback", &fallback_rpaths[]); + log_rpaths("relative", &rel_rpaths[..]); + log_rpaths("fallback", &fallback_rpaths[..]); let mut rpaths = rel_rpaths; - rpaths.push_all(&fallback_rpaths[]); + rpaths.push_all(&fallback_rpaths[..]); // Remove duplicates - let rpaths = minimize_rpaths(&rpaths[]); + let rpaths = minimize_rpaths(&rpaths[..]); return rpaths; } @@ -139,7 +139,7 @@ fn minimize_rpaths(rpaths: &[String]) -> Vec { let mut set = HashSet::new(); let mut minimized = Vec::new(); for rpath in rpaths { - if set.insert(&rpath[]) { + if set.insert(&rpath[..]) { minimized.push(rpath.clone()); } } diff --git a/src/librustc_back/target/mod.rs b/src/librustc_back/target/mod.rs index 692e6b474fd..01a5f0d6e20 100644 --- a/src/librustc_back/target/mod.rs +++ b/src/librustc_back/target/mod.rs @@ -254,18 +254,18 @@ impl Target { macro_rules! key { ($key_name:ident) => ( { let name = (stringify!($key_name)).replace("_", "-"); - obj.find(&name[]).map(|o| o.as_string() + obj.find(&name[..]).map(|o| o.as_string() .map(|s| base.options.$key_name = s.to_string())); } ); ($key_name:ident, bool) => ( { let name = (stringify!($key_name)).replace("_", "-"); - obj.find(&name[]) + obj.find(&name[..]) .map(|o| o.as_boolean() .map(|s| base.options.$key_name = s)); } ); ($key_name:ident, list) => ( { let name = (stringify!($key_name)).replace("_", "-"); - obj.find(&name[]).map(|o| o.as_array() + obj.find(&name[..]).map(|o| o.as_array() .map(|v| base.options.$key_name = v.iter() .map(|a| a.as_string().unwrap().to_string()).collect() ) diff --git a/src/librustc_borrowck/borrowck/check_loans.rs b/src/librustc_borrowck/borrowck/check_loans.rs index a18e8b16e8b..abe01d193b4 100644 --- a/src/librustc_borrowck/borrowck/check_loans.rs +++ b/src/librustc_borrowck/borrowck/check_loans.rs @@ -656,7 +656,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { &self.bccx.loan_path_to_string(move_path)[]) }; - self.bccx.span_err(span, &err_message[]); + self.bccx.span_err(span, &err_message[..]); self.bccx.span_note( loan_span, &format!("borrow of `{}` occurs here", diff --git a/src/librustc_borrowck/borrowck/fragments.rs b/src/librustc_borrowck/borrowck/fragments.rs index bee1ada28e3..c873831cb0f 100644 --- a/src/librustc_borrowck/borrowck/fragments.rs +++ b/src/librustc_borrowck/borrowck/fragments.rs @@ -38,7 +38,7 @@ enum Fragment { // This represents the collection of all but one of the elements // from an array at the path described by the move path index. // Note that attached MovePathIndex should have mem_categorization - // of InteriorElement (i.e. array dereference `&foo[]`). + // of InteriorElement (i.e. array dereference `&foo[..]`). AllButOneFrom(MovePathIndex), } @@ -198,11 +198,11 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) { // First, filter out duplicates moved.sort(); moved.dedup(); - debug!("fragments 1 moved: {:?}", path_lps(&moved[])); + debug!("fragments 1 moved: {:?}", path_lps(&moved[..])); assigned.sort(); assigned.dedup(); - debug!("fragments 1 assigned: {:?}", path_lps(&assigned[])); + debug!("fragments 1 assigned: {:?}", path_lps(&assigned[..])); // Second, build parents from the moved and assigned. for m in &moved { @@ -222,14 +222,14 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) { parents.sort(); parents.dedup(); - debug!("fragments 2 parents: {:?}", path_lps(&parents[])); + debug!("fragments 2 parents: {:?}", path_lps(&parents[..])); // Third, filter the moved and assigned fragments down to just the non-parents - moved.retain(|f| non_member(*f, &parents[])); - debug!("fragments 3 moved: {:?}", path_lps(&moved[])); + moved.retain(|f| non_member(*f, &parents[..])); + debug!("fragments 3 moved: {:?}", path_lps(&moved[..])); - assigned.retain(|f| non_member(*f, &parents[])); - debug!("fragments 3 assigned: {:?}", path_lps(&assigned[])); + assigned.retain(|f| non_member(*f, &parents[..])); + debug!("fragments 3 assigned: {:?}", path_lps(&assigned[..])); // Fourth, build the leftover from the moved, assigned, and parents. for m in &moved { @@ -247,16 +247,16 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) { unmoved.sort(); unmoved.dedup(); - debug!("fragments 4 unmoved: {:?}", frag_lps(&unmoved[])); + debug!("fragments 4 unmoved: {:?}", frag_lps(&unmoved[..])); // Fifth, filter the leftover fragments down to its core. unmoved.retain(|f| match *f { AllButOneFrom(_) => true, - Just(mpi) => non_member(mpi, &parents[]) && - non_member(mpi, &moved[]) && - non_member(mpi, &assigned[]) + Just(mpi) => non_member(mpi, &parents[..]) && + non_member(mpi, &moved[..]) && + non_member(mpi, &assigned[..]) }); - debug!("fragments 5 unmoved: {:?}", frag_lps(&unmoved[])); + debug!("fragments 5 unmoved: {:?}", frag_lps(&unmoved[..])); // Swap contents back in. fragments.unmoved_fragments = unmoved; @@ -437,7 +437,7 @@ fn add_fragment_siblings_for_extension<'tcx>(this: &MoveData<'tcx>, let msg = format!("type {} ({:?}) is not fragmentable", parent_ty.repr(tcx), sty_and_variant_info); let opt_span = origin_id.and_then(|id|tcx.map.opt_span(id)); - tcx.sess.opt_span_bug(opt_span, &msg[]) + tcx.sess.opt_span_bug(opt_span, &msg[..]) } } } diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs index 93d97a054a4..518e4bc472c 100644 --- a/src/librustc_borrowck/borrowck/mod.rs +++ b/src/librustc_borrowck/borrowck/mod.rs @@ -137,7 +137,7 @@ fn borrowck_fn(this: &mut BorrowckCtxt, check_loans::check_loans(this, &loan_dfcx, flowed_moves, - &all_loans[], + &all_loans[..], id, decl, body); diff --git a/src/librustc_borrowck/graphviz.rs b/src/librustc_borrowck/graphviz.rs index 56bf3ae7fd5..39c9d9ba6ad 100644 --- a/src/librustc_borrowck/graphviz.rs +++ b/src/librustc_borrowck/graphviz.rs @@ -89,7 +89,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> { set.push_str(", "); } let loan_str = self.borrowck_ctxt.loan_path_to_string(&*lp); - set.push_str(&loan_str[]); + set.push_str(&loan_str[..]); saw_some = true; true }); diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 728ff647599..a260997f605 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -85,7 +85,7 @@ pub fn compile_input(sess: Session, let expanded_crate = match phase_2_configure_and_expand(&sess, krate, - &id[], + &id[..], addl_plugins) { None => return, Some(k) => k @@ -99,20 +99,20 @@ pub fn compile_input(sess: Session, &sess, outdir, &expanded_crate, - &id[])); + &id[..])); let mut forest = ast_map::Forest::new(expanded_crate); let arenas = ty::CtxtArenas::new(); let ast_map = assign_node_ids_and_map(&sess, &mut forest); - write_out_deps(&sess, input, &outputs, &id[]); + write_out_deps(&sess, input, &outputs, &id[..]); controller_entry_point!(after_write_deps, CompileState::state_after_write_deps(input, &sess, outdir, &ast_map, - &id[])); + &id[..])); let analysis = phase_3_run_analysis_passes(sess, ast_map, diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index ac91a0098ea..2550432c810 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -272,7 +272,7 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls { -> Compilation { match matches.opt_str("explain") { Some(ref code) => { - match descriptions.find_description(&code[]) { + match descriptions.find_description(&code[..]) { Some(ref description) => { println!("{}", description); } @@ -582,7 +582,7 @@ Available lint options: for lint in lints { let name = lint.name_lower().replace("_", "-"); println!(" {} {:7.7} {}", - padded(&name[]), lint.default_level.as_str(), lint.desc); + padded(&name[..]), lint.default_level.as_str(), lint.desc); } println!("\n"); }; @@ -612,7 +612,7 @@ Available lint options: let desc = to.into_iter().map(|x| x.as_str().replace("_", "-")) .collect::>().connect(", "); println!(" {} {}", - padded(&name[]), desc); + padded(&name[..]), desc); } println!("\n"); }; @@ -678,7 +678,7 @@ pub fn handle_options(mut args: Vec) -> Option { } let matches = - match getopts::getopts(&args[], &config::optgroups()[]) { + match getopts::getopts(&args[..], &config::optgroups()[]) { Ok(m) => m, Err(f_stable_attempt) => { // redo option parsing, including unstable options this time, @@ -803,7 +803,7 @@ pub fn monitor(f: F) { "run with `RUST_BACKTRACE=1` for a backtrace".to_string(), ]; for note in &xs { - emitter.emit(None, ¬e[], None, diagnostic::Note) + emitter.emit(None, ¬e[..], None, diagnostic::Note) } match r.read_to_string() { diff --git a/src/librustc_driver/pretty.rs b/src/librustc_driver/pretty.rs index 5dfef6c775e..0fbfa5fd89d 100644 --- a/src/librustc_driver/pretty.rs +++ b/src/librustc_driver/pretty.rs @@ -383,7 +383,7 @@ impl UserIdentifiedItem { ItemViaNode(node_id) => NodesMatchingDirect(Some(node_id).into_iter()), ItemViaPath(ref parts) => - NodesMatchingSuffix(map.nodes_matching_suffix(&parts[])), + NodesMatchingSuffix(map.nodes_matching_suffix(&parts[..])), } } @@ -395,7 +395,7 @@ impl UserIdentifiedItem { user_option, self.reconstructed_input(), is_wrong_because); - sess.fatal(&message[]) + sess.fatal(&message[..]) }; let mut saw_node = ast::DUMMY_NODE_ID; @@ -522,7 +522,7 @@ pub fn pretty_print_input(sess: Session, let is_expanded = needs_expansion(&ppm); let compute_ast_map = needs_ast_map(&ppm, &opt_uii); let krate = if compute_ast_map { - match driver::phase_2_configure_and_expand(&sess, krate, &id[], None) { + match driver::phase_2_configure_and_expand(&sess, krate, &id[..], None) { None => return, Some(k) => k } @@ -541,7 +541,7 @@ pub fn pretty_print_input(sess: Session, }; let src_name = driver::source_name(input); - let src = sess.codemap().get_filemap(&src_name[]) + let src = sess.codemap().get_filemap(&src_name[..]) .src.as_bytes().to_vec(); let mut rdr = MemReader::new(src); @@ -632,8 +632,8 @@ pub fn pretty_print_input(sess: Session, // point to what was found, if there's an // accessible span. match ast_map.opt_span(nodeid) { - Some(sp) => sess.span_fatal(sp, &message[]), - None => sess.fatal(&message[]) + Some(sp) => sess.span_fatal(sp, &message[..]), + None => sess.fatal(&message[..]) } } } diff --git a/src/librustc_driver/test.rs b/src/librustc_driver/test.rs index 7105a6cc488..97ecb0b97e9 100644 --- a/src/librustc_driver/test.rs +++ b/src/librustc_driver/test.rs @@ -278,7 +278,7 @@ impl<'a, 'tcx> Env<'a, 'tcx> { pub fn t_param(&self, space: subst::ParamSpace, index: u32) -> Ty<'tcx> { let name = format!("T{}", index); - ty::mk_param(self.infcx.tcx, space, index, token::intern(&name[])) + ty::mk_param(self.infcx.tcx, space, index, token::intern(&name[..])) } pub fn re_early_bound(&self, diff --git a/src/librustc_privacy/lib.rs b/src/librustc_privacy/lib.rs index 96e146fc894..5662a74a53d 100644 --- a/src/librustc_privacy/lib.rs +++ b/src/librustc_privacy/lib.rs @@ -585,10 +585,10 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { match result { None => true, Some((span, msg, note)) => { - self.tcx.sess.span_err(span, &msg[]); + self.tcx.sess.span_err(span, &msg[..]); match note { Some((span, msg)) => { - self.tcx.sess.span_note(span, &msg[]) + self.tcx.sess.span_note(span, &msg[..]) } None => {}, } @@ -690,7 +690,7 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { UnnamedField(idx) => format!("field #{} of {} is private", idx + 1, struct_desc), }; - self.tcx.sess.span_err(span, &msg[]); + self.tcx.sess.span_err(span, &msg[..]); } // Given the ID of a method, checks to ensure it's in scope. diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index 874c8f2a940..eba00ad484e 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -1072,7 +1072,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { &import_directive.module_path[], import_directive.subclass), help); - self.resolve_error(span, &msg[]); + self.resolve_error(span, &msg[..]); } Indeterminate => break, // Bail out. We'll come around next time. Success(()) => () // Good. Continue. @@ -1102,7 +1102,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { .iter() .map(|seg| seg.identifier.name) .collect(); - self.names_to_string(&names[]) + self.names_to_string(&names[..]) } fn import_directive_subclass_to_string(&mut self, @@ -1166,7 +1166,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let module_path = &import_directive.module_path; debug!("(resolving import for module) resolving import `{}::...` in `{}`", - self.names_to_string(&module_path[]), + self.names_to_string(&module_path[..]), self.module_to_string(&*module_)); // First, resolve the module path for the directive, if necessary. @@ -1175,7 +1175,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { Some((self.graph_root.get_module(), LastMod(AllPublic))) } else { match self.resolve_module_path(module_.clone(), - &module_path[], + &module_path[..], DontUseLexicalScope, import_directive.span, ImportSearch) { @@ -1768,7 +1768,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { ValueNS => "value", }, &token::get_name(name)); - span_err!(self.session, import_span, E0252, "{}", &msg[]); + span_err!(self.session, import_span, E0252, "{}", &msg[..]); } Some(_) | None => {} } @@ -1783,7 +1783,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { if !name_bindings.defined_in_namespace_with(namespace, IMPORTABLE) { let msg = format!("`{}` is not directly importable", token::get_name(name)); - span_err!(self.session, import_span, E0253, "{}", &msg[]); + span_err!(self.session, import_span, E0253, "{}", &msg[..]); } } @@ -1804,7 +1804,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { crate in this module \ (maybe you meant `use {0}::*`?)", &token::get_name(name)); - span_err!(self.session, import_span, E0254, "{}", &msg[]); + span_err!(self.session, import_span, E0254, "{}", &msg[..]); } Some(_) | None => {} } @@ -1826,7 +1826,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let msg = format!("import `{}` conflicts with value \ in this module", &token::get_name(name)); - span_err!(self.session, import_span, E0255, "{}", &msg[]); + span_err!(self.session, import_span, E0255, "{}", &msg[..]); if let Some(span) = value.value_span { self.session.span_note(span, "conflicting value here"); @@ -1844,7 +1844,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let msg = format!("import `{}` conflicts with type in \ this module", &token::get_name(name)); - span_err!(self.session, import_span, E0256, "{}", &msg[]); + span_err!(self.session, import_span, E0256, "{}", &msg[..]); if let Some(span) = ty.type_span { self.session.span_note(span, "note conflicting type here") @@ -1857,7 +1857,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let msg = format!("inherent implementations \ are only allowed on types \ defined in the current module"); - span_err!(self.session, span, E0257, "{}", &msg[]); + span_err!(self.session, span, E0257, "{}", &msg[..]); self.session.span_note(import_span, "import from other module here") } @@ -1866,7 +1866,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let msg = format!("import `{}` conflicts with existing \ submodule", &token::get_name(name)); - span_err!(self.session, import_span, E0258, "{}", &msg[]); + span_err!(self.session, import_span, E0258, "{}", &msg[..]); if let Some(span) = ty.type_span { self.session.span_note(span, "note conflicting module here") @@ -1953,7 +1953,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let segment_name = token::get_name(name); let module_name = self.module_to_string(&*search_module); let mut span = span; - let msg = if "???" == &module_name[] { + let msg = if "???" == &module_name[..] { span.hi = span.lo + Pos::from_usize(segment_name.len()); match search_parent_externals(name, @@ -2066,7 +2066,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { match module_prefix_result { Failed(None) => { let mpath = self.names_to_string(module_path); - let mpath = &mpath[]; + let mpath = &mpath[..]; match mpath.rfind(':') { Some(idx) => { let msg = format!("Could not find `{}` in `{}`", @@ -2369,11 +2369,11 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let mut containing_module; let mut i; let first_module_path_string = token::get_name(module_path[0]); - if "self" == &first_module_path_string[] { + if "self" == &first_module_path_string[..] { containing_module = self.get_nearest_normal_module_parent_or_self(module_); i = 1; - } else if "super" == &first_module_path_string[] { + } else if "super" == &first_module_path_string[..] { containing_module = self.get_nearest_normal_module_parent_or_self(module_); i = 0; // We'll handle `super` below. @@ -2384,7 +2384,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { // Now loop through all the `super`s we find. while i < module_path.len() { let string = token::get_name(module_path[i]); - if "super" != &string[] { + if "super" != &string[..] { break } debug!("(resolving module prefix) resolving `super` at {}", @@ -2515,7 +2515,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } else { let err = format!("unresolved import (maybe you meant `{}::*`?)", sn); - self.resolve_error((*imports)[index].span, &err[]); + self.resolve_error((*imports)[index].span, &err[..]); } } @@ -2853,7 +2853,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { generics, implemented_traits, &**self_type, - &impl_items[]); + &impl_items[..]); } ItemTrait(_, ref generics, ref bounds, ref trait_items) => { @@ -3196,7 +3196,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { }; let msg = format!("attempt to {} a nonexistent trait `{}`", usage_str, path_str); - self.resolve_error(trait_reference.path.span, &msg[]); + self.resolve_error(trait_reference.path.span, &msg[..]); } Some(def) => { match def { @@ -3624,7 +3624,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { None => { let msg = format!("use of undeclared type name `{}`", self.path_names_to_string(path)); - self.resolve_error(ty.span, &msg[]); + self.resolve_error(ty.span, &msg[..]); } } } @@ -3825,7 +3825,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { def: {:?}", result); let msg = format!("`{}` does not name a structure", self.path_names_to_string(path)); - self.resolve_error(path.span, &msg[]); + self.resolve_error(path.span, &msg[..]); } } } @@ -4082,7 +4082,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let last_private; let module = self.current_module.clone(); match self.resolve_module_path(module, - &module_path[], + &module_path[..], UseLexicalScope, path.span, PathSearch) { @@ -4140,7 +4140,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let containing_module; let last_private; match self.resolve_module_path_from_root(root_module, - &module_path[], + &module_path[..], 0, path.span, PathSearch, @@ -4150,7 +4150,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { Some((span, msg)) => (span, msg), None => { let msg = format!("Use of undeclared module `::{}`", - self.names_to_string(&module_path[])); + self.names_to_string(&module_path[..])); (path.span, msg) } }; @@ -4309,7 +4309,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } } else { match this.resolve_module_path(root, - &name_path[], + &name_path[..], UseLexicalScope, span, PathSearch) { @@ -4347,7 +4347,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let name_path = path.segments.iter().map(|seg| seg.identifier.name).collect::>(); // Look for a method in the current self type's impl module. - match get_module(self, path.span, &name_path[]) { + match get_module(self, path.span, &name_path[..]) { Some(module) => match module.children.borrow().get(&name) { Some(binding) => { let p_str = self.path_names_to_string(&path); @@ -4568,7 +4568,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { def: {:?}", result); let msg = format!("`{}` does not name a structure", self.path_names_to_string(path)); - self.resolve_error(path.span, &msg[]); + self.resolve_error(path.span, &msg[..]); } } diff --git a/src/librustc_trans/back/link.rs b/src/librustc_trans/back/link.rs index 9f26e9182ab..ef849bb3dca 100644 --- a/src/librustc_trans/back/link.rs +++ b/src/librustc_trans/back/link.rs @@ -127,7 +127,7 @@ pub fn find_crate_name(sess: Option<&Session>, attrs: &[ast::Attribute], input: &Input) -> String { let validate = |s: String, span: Option| { - creader::validate_crate_name(sess, &s[], span); + creader::validate_crate_name(sess, &s[..], span); s }; @@ -141,11 +141,11 @@ pub fn find_crate_name(sess: Option<&Session>, if let Some(sess) = sess { if let Some(ref s) = sess.opts.crate_name { if let Some((attr, ref name)) = attr_crate_name { - if *s != &name[] { + if *s != &name[..] { let msg = format!("--crate-name and #[crate_name] are \ required to match, but `{}` != `{}`", s, name); - sess.span_err(attr.span, &msg[]); + sess.span_err(attr.span, &msg[..]); } } return validate(s.clone(), None); @@ -195,7 +195,7 @@ fn symbol_hash<'tcx>(tcx: &ty::ctxt<'tcx>, symbol_hasher.input_str("-"); symbol_hasher.input_str(link_meta.crate_hash.as_str()); for meta in &*tcx.sess.crate_metadata.borrow() { - symbol_hasher.input_str(&meta[]); + symbol_hasher.input_str(&meta[..]); } symbol_hasher.input_str("-"); symbol_hasher.input_str(&encoder::encoded_ty(tcx, t)[]); @@ -262,7 +262,7 @@ pub fn sanitize(s: &str) -> String { if result.len() > 0 && result.as_bytes()[0] != '_' as u8 && ! (result.as_bytes()[0] as char).is_xid_start() { - return format!("_{}", &result[]); + return format!("_{}", &result[..]); } return result; @@ -331,17 +331,17 @@ pub fn mangle_exported_name<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, path: PathEl hash.push(EXTRA_CHARS.as_bytes()[extra2] as char); hash.push(EXTRA_CHARS.as_bytes()[extra3] as char); - exported_name(path, &hash[]) + exported_name(path, &hash[..]) } pub fn mangle_internal_name_by_type_and_seq<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>, name: &str) -> String { let s = ppaux::ty_to_string(ccx.tcx(), t); - let path = [PathName(token::intern(&s[])), + let path = [PathName(token::intern(&s[..])), gensym_name(name)]; let hash = get_symbol_hash(ccx, t); - mangle(path.iter().cloned(), Some(&hash[])) + mangle(path.iter().cloned(), Some(&hash[..])) } pub fn mangle_internal_name_by_path_and_seq(path: PathElems, flav: &str) -> String { @@ -541,7 +541,7 @@ fn link_rlib<'a>(sess: &'a Session, for &(ref l, kind) in &*sess.cstore.get_used_libraries().borrow() { match kind { cstore::NativeStatic => { - ab.add_native_library(&l[]).unwrap(); + ab.add_native_library(&l[..]).unwrap(); } cstore::NativeFramework | cstore::NativeUnknown => {} } @@ -619,7 +619,7 @@ fn link_rlib<'a>(sess: &'a Session, e)[]) }; - let bc_data_deflated = match flate::deflate_bytes(&bc_data[]) { + let bc_data_deflated = match flate::deflate_bytes(&bc_data[..]) { Some(compressed) => compressed, None => sess.fatal(&format!("failed to compress bytecode from {}", bc_filename.display())[]) @@ -678,7 +678,7 @@ fn write_rlib_bytecode_object_v1(writer: &mut T, try! { writer.write_all(RLIB_BYTECODE_OBJECT_MAGIC) }; try! { writer.write_le_u32(1) }; try! { writer.write_le_u64(bc_data_deflated_size) }; - try! { writer.write_all(&bc_data_deflated[]) }; + try! { writer.write_all(&bc_data_deflated[..]) }; let number_of_bytes_written_so_far = RLIB_BYTECODE_OBJECT_MAGIC.len() + // magic id @@ -733,7 +733,7 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) { continue } }; - ab.add_rlib(&p, &name[], sess.lto()).unwrap(); + ab.add_rlib(&p, &name[..], sess.lto()).unwrap(); let native_libs = csearch::get_native_libraries(&sess.cstore, cnum); all_native_libs.extend(native_libs.into_iter()); @@ -769,7 +769,7 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool, // The invocations of cc share some flags across platforms let pname = get_cc_prog(sess); - let mut cmd = Command::new(&pname[]); + let mut cmd = Command::new(&pname[..]); cmd.args(&sess.target.target.options.pre_link_args[]); link_args(&mut cmd, sess, dylib, tmpdir.path(), @@ -798,7 +798,7 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool, sess.note(&format!("{:?}", &cmd)[]); let mut output = prog.error.clone(); output.push_all(&prog.output[]); - sess.note(str::from_utf8(&output[]).unwrap()); + sess.note(str::from_utf8(&output[..]).unwrap()); sess.abort_if_errors(); } debug!("linker stderr:\n{}", String::from_utf8(prog.error).unwrap()); @@ -868,7 +868,7 @@ fn link_args(cmd: &mut Command, let mut v = b"-Wl,-force_load,".to_vec(); v.push_all(morestack.as_vec()); - cmd.arg(&v[]); + cmd.arg(&v[..]); } else { cmd.args(&["-Wl,--whole-archive", "-lmorestack", "-Wl,--no-whole-archive"]); } @@ -993,7 +993,7 @@ fn link_args(cmd: &mut Command, if sess.opts.cg.rpath { let mut v = "-Wl,-install_name,@rpath/".as_bytes().to_vec(); v.push_all(out_filename.filename().unwrap()); - cmd.arg(&v[]); + cmd.arg(&v[..]); } } else { cmd.arg("-shared"); @@ -1029,7 +1029,7 @@ fn link_args(cmd: &mut Command, // with any #[link_args] attributes found inside the crate let empty = Vec::new(); cmd.args(&sess.opts.cg.link_args.as_ref().unwrap_or(&empty)[]); - cmd.args(&used_link_args[]); + cmd.args(&used_link_args[..]); } // # Native library linking @@ -1086,14 +1086,14 @@ fn add_local_native_libraries(cmd: &mut Command, sess: &Session) { } else { // -force_load is the OSX equivalent of --whole-archive, but it // involves passing the full path to the library to link. - let lib = archive::find_library(&l[], + let lib = archive::find_library(&l[..], &sess.target.target.options.staticlib_prefix, &sess.target.target.options.staticlib_suffix, - &search_path[], + &search_path[..], &sess.diagnostic().handler); let mut v = b"-Wl,-force_load,".to_vec(); v.push_all(lib.as_vec()); - cmd.arg(&v[]); + cmd.arg(&v[..]); } } if takes_hints { @@ -1106,7 +1106,7 @@ fn add_local_native_libraries(cmd: &mut Command, sess: &Session) { cmd.arg(format!("-l{}", l)); } cstore::NativeFramework => { - cmd.arg("-framework").arg(&l[]); + cmd.arg("-framework").arg(&l[..]); } cstore::NativeStatic => unreachable!(), } @@ -1248,7 +1248,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session, let mut v = "-l".as_bytes().to_vec(); v.push_all(unlib(&sess.target, cratepath.filestem().unwrap())); - cmd.arg(&v[]); + cmd.arg(&v[..]); } } @@ -1290,7 +1290,7 @@ fn add_upstream_native_libraries(cmd: &mut Command, sess: &Session) { } cstore::NativeFramework => { cmd.arg("-framework"); - cmd.arg(&lib[]); + cmd.arg(&lib[..]); } cstore::NativeStatic => { sess.bug("statics shouldn't be propagated"); diff --git a/src/librustc_trans/back/lto.rs b/src/librustc_trans/back/lto.rs index c88e76f4270..7c34fdd3ff8 100644 --- a/src/librustc_trans/back/lto.rs +++ b/src/librustc_trans/back/lto.rs @@ -132,7 +132,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, bc_decoded.len() as libc::size_t) { write::llvm_err(sess.diagnostic().handler(), format!("failed to load bc of `{}`", - &name[])); + &name[..])); } }); } diff --git a/src/librustc_trans/back/write.rs b/src/librustc_trans/back/write.rs index 9934d9993d6..c7d7942f774 100644 --- a/src/librustc_trans/back/write.rs +++ b/src/librustc_trans/back/write.rs @@ -47,14 +47,14 @@ pub fn llvm_err(handler: &diagnostic::Handler, msg: String) -> ! { unsafe { let cstr = llvm::LLVMRustGetLastError(); if cstr == ptr::null() { - handler.fatal(&msg[]); + handler.fatal(&msg[..]); } else { let err = ffi::c_str_to_bytes(&cstr); let err = String::from_utf8_lossy(err).to_string(); libc::free(cstr as *mut _); handler.fatal(&format!("{}: {}", - &msg[], - &err[])[]); + &msg[..], + &err[..])[]); } } } @@ -105,7 +105,7 @@ impl SharedEmitter { Some(ref code) => { handler.emit_with_code(None, &diag.msg[], - &code[], + &code[..], diag.lvl); }, None => { @@ -165,7 +165,7 @@ fn get_llvm_opt_level(optimize: config::OptLevel) -> llvm::CodeGenOptLevel { fn create_target_machine(sess: &Session) -> TargetMachineRef { let reloc_model_arg = match sess.opts.cg.relocation_model { - Some(ref s) => &s[], + Some(ref s) => &s[..], None => &sess.target.target.options.relocation_model[] }; let reloc_model = match reloc_model_arg { @@ -198,7 +198,7 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef { let fdata_sections = ffunction_sections; let code_model_arg = match sess.opts.cg.code_model { - Some(ref s) => &s[], + Some(ref s) => &s[..], None => &sess.target.target.options.code_model[] }; @@ -365,7 +365,7 @@ unsafe extern "C" fn inline_asm_handler(diag: SMDiagnosticRef, let msg = llvm::build_string(|s| llvm::LLVMWriteSMDiagnosticToString(diag, s)) .expect("non-UTF8 SMDiagnostic"); - report_inline_asm(cgcx, &msg[], cookie); + report_inline_asm(cgcx, &msg[..], cookie); } unsafe extern "C" fn diagnostic_handler(info: DiagnosticInfoRef, user: *mut c_void) { @@ -711,7 +711,7 @@ pub fn run_passes(sess: &Session, }; let pname = get_cc_prog(sess); - let mut cmd = Command::new(&pname[]); + let mut cmd = Command::new(&pname[..]); cmd.args(&sess.target.target.options.pre_link_args[]); cmd.arg("-nostdlib"); @@ -829,12 +829,12 @@ pub fn run_passes(sess: &Session, for i in 0..trans.modules.len() { if modules_config.emit_obj { let ext = format!("{}.o", i); - remove(sess, &crate_output.with_extension(&ext[])); + remove(sess, &crate_output.with_extension(&ext[..])); } if modules_config.emit_bc && !keep_numbered_bitcode { let ext = format!("{}.bc", i); - remove(sess, &crate_output.with_extension(&ext[])); + remove(sess, &crate_output.with_extension(&ext[..])); } } @@ -960,7 +960,7 @@ fn run_work_multithreaded(sess: &Session, pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) { let pname = get_cc_prog(sess); - let mut cmd = Command::new(&pname[]); + let mut cmd = Command::new(&pname[..]); cmd.arg("-c").arg("-o").arg(outputs.path(config::OutputTypeObject)) .arg(outputs.temp_path(config::OutputTypeAssembly)); @@ -975,7 +975,7 @@ pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) { sess.note(&format!("{:?}", &cmd)[]); let mut note = prog.error.clone(); note.push_all(&prog.output[]); - sess.note(str::from_utf8(¬e[]).unwrap()); + sess.note(str::from_utf8(¬e[..]).unwrap()); sess.abort_if_errors(); } }, diff --git a/src/librustc_trans/save/mod.rs b/src/librustc_trans/save/mod.rs index cdcd917ee5e..8d2a2d51ee4 100644 --- a/src/librustc_trans/save/mod.rs +++ b/src/librustc_trans/save/mod.rs @@ -155,7 +155,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { }; self.fmt.sub_mod_ref_str(path.span, *span, - &qualname[], + &qualname[..], self.cur_scope); } } @@ -178,7 +178,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { }; self.fmt.sub_mod_ref_str(path.span, *span, - &qualname[], + &qualname[..], self.cur_scope); } } @@ -197,7 +197,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { let (ref span, ref qualname) = sub_paths[len-2]; self.fmt.sub_type_ref_str(path.span, *span, - &qualname[]); + &qualname[..]); // write the other sub-paths if len <= 2 { @@ -207,7 +207,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { for &(ref span, ref qualname) in sub_paths { self.fmt.sub_mod_ref_str(path.span, *span, - &qualname[], + &qualname[..], self.cur_scope); } } @@ -280,7 +280,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { id, qualname, &path_to_string(p)[], - &typ[]); + &typ[..]); } self.collected_paths.clear(); } @@ -356,7 +356,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { }; let qualname = format!("{}::{}", qualname, &get_ident(method.pe_ident())); - let qualname = &qualname[]; + let qualname = &qualname[..]; // record the decl for this def (if it has one) let decl_id = ty::trait_item_of_item(&self.analysis.ty_cx, @@ -436,9 +436,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { Some(sub_span) => self.fmt.field_str(field.span, Some(sub_span), field.node.id, - &name[], - &qualname[], - &typ[], + &name[..], + &qualname[..], + &typ[..], scope_id), None => self.sess.span_bug(field.span, &format!("Could not find sub-span for field {}", @@ -470,7 +470,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.fmt.typedef_str(full_span, Some(*param_ss), param.id, - &name[], + &name[..], ""); } self.visit_generics(generics); @@ -487,10 +487,10 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.fmt.fn_str(item.span, sub_span, item.id, - &qualname[], + &qualname[..], self.cur_scope); - self.process_formals(&decl.inputs, &qualname[]); + self.process_formals(&decl.inputs, &qualname[..]); // walk arg and return types for arg in &decl.inputs { @@ -504,7 +504,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { // walk the body self.nest(item.id, |v| v.visit_block(&*body)); - self.process_generic_params(ty_params, item.span, &qualname[], item.id); + self.process_generic_params(ty_params, item.span, &qualname[..], item.id); } fn process_static(&mut self, @@ -526,8 +526,8 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { sub_span, item.id, &get_ident(item.ident), - &qualname[], - &value[], + &qualname[..], + &value[..], &ty_to_string(&*typ)[], self.cur_scope); @@ -549,7 +549,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { sub_span, item.id, &get_ident(item.ident), - &qualname[], + &qualname[..], "", &ty_to_string(&*typ)[], self.cur_scope); @@ -575,17 +575,17 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { sub_span, item.id, ctor_id, - &qualname[], + &qualname[..], self.cur_scope, - &val[]); + &val[..]); // fields for field in &def.fields { - self.process_struct_field_def(field, &qualname[], item.id); + self.process_struct_field_def(field, &qualname[..], item.id); self.visit_ty(&*field.node.ty); } - self.process_generic_params(ty_params, item.span, &qualname[], item.id); + self.process_generic_params(ty_params, item.span, &qualname[..], item.id); } fn process_enum(&mut self, @@ -598,9 +598,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { Some(sub_span) => self.fmt.enum_str(item.span, Some(sub_span), item.id, - &enum_name[], + &enum_name[..], self.cur_scope, - &val[]), + &val[..]), None => self.sess.span_bug(item.span, &format!("Could not find subspan for enum {}", enum_name)[]), @@ -619,9 +619,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.span.span_for_first_ident(variant.span), variant.node.id, name, - &qualname[], - &enum_name[], - &val[], + &qualname[..], + &enum_name[..], + &val[..], item.id); for arg in args { self.visit_ty(&*arg.ty); @@ -637,9 +637,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.span.span_for_first_ident(variant.span), variant.node.id, ctor_id, - &qualname[], - &enum_name[], - &val[], + &qualname[..], + &enum_name[..], + &val[..], item.id); for field in &struct_def.fields { @@ -650,7 +650,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { } } - self.process_generic_params(ty_params, item.span, &enum_name[], item.id); + self.process_generic_params(ty_params, item.span, &enum_name[..], item.id); } fn process_impl(&mut self, @@ -724,9 +724,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.fmt.trait_str(item.span, sub_span, item.id, - &qualname[], + &qualname[..], self.cur_scope, - &val[]); + &val[..]); // super-traits for super_bound in &**trait_refs { @@ -758,7 +758,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { } // walk generics and methods - self.process_generic_params(generics, item.span, &qualname[], item.id); + self.process_generic_params(generics, item.span, &qualname[..], item.id); for method in methods { self.visit_trait_item(method) } @@ -776,9 +776,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.fmt.mod_str(item.span, sub_span, item.id, - &qualname[], + &qualname[..], self.cur_scope, - &filename[]); + &filename[..]); self.nest(item.id, |v| visit::walk_mod(v, m)); } @@ -990,7 +990,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.cur_scope); // walk receiver and args - visit::walk_exprs(self, &args[]); + visit::walk_exprs(self, &args[..]); } fn process_pat(&mut self, p:&ast::Pat) { @@ -1164,7 +1164,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { item.id, cnum, name, - &location[], + &location[..], self.cur_scope); } ast::ItemFn(ref decl, _, _, ref ty_params, ref body) => @@ -1196,8 +1196,8 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { self.fmt.typedef_str(item.span, sub_span, item.id, - &qualname[], - &value[]); + &qualname[..], + &value[..]); self.visit_ty(&**ty); self.process_generic_params(ty_params, item.span, &qualname, item.id); @@ -1260,7 +1260,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { }; qualname.push_str(&get_ident(method_type.ident)); - let qualname = &qualname[]; + let qualname = &qualname[..]; let sub_span = self.span.sub_span_after_keyword(method_type.span, keywords::Fn); self.fmt.method_decl_str(method_type.span, @@ -1401,7 +1401,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { let mut id = String::from_str("$"); id.push_str(&ex.id.to_string()[]); - self.process_formals(&decl.inputs, &id[]); + self.process_formals(&decl.inputs, &id[..]); // walk arg and return types for arg in &decl.inputs { @@ -1464,7 +1464,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { Some(p.span), id, &path_to_string(p)[], - &value[], + &value[..], "") } def::DefVariant(..) | def::DefTy(..) | def::DefStruct(..) => { @@ -1520,8 +1520,8 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { sub_span, id, &path_to_string(p)[], - &value[], - &typ[]); + &value[..], + &typ[..]); } self.collected_paths.clear(); @@ -1603,7 +1603,7 @@ pub fn process_crate(sess: &Session, cur_scope: 0 }; - visitor.dump_crate_info(&cratename[], krate); + visitor.dump_crate_info(&cratename[..], krate); visit::walk_crate(&mut visitor, krate); } diff --git a/src/librustc_trans/save/recorder.rs b/src/librustc_trans/save/recorder.rs index 3bd04ed29d4..08e36bb1d85 100644 --- a/src/librustc_trans/save/recorder.rs +++ b/src/librustc_trans/save/recorder.rs @@ -43,7 +43,7 @@ impl Recorder { assert!(self.dump_spans); let result = format!("span,kind,{},{},text,\"{}\"\n", kind, su.extent_str(span), escape(su.snippet(span))); - self.record(&result[]); + self.record(&result[..]); } } @@ -170,14 +170,14 @@ impl<'a> FmtStrs<'a> { if s.len() > 1020 { &s[..1020] } else { - &s[] + &s[..] } }); let pairs = fields.iter().zip(values); let strs = pairs.map(|(f, v)| format!(",{},\"{}\"", f, escape(String::from_str(v)))); Some(strs.fold(String::new(), |mut s, ss| { - s.push_str(&ss[]); + s.push_str(&ss[..]); s })) } @@ -205,9 +205,9 @@ impl<'a> FmtStrs<'a> { }; let mut result = String::from_str(label); - result.push_str(&values_str[]); + result.push_str(&values_str[..]); result.push_str("\n"); - self.recorder.record(&result[]); + self.recorder.record(&result[..]); } pub fn record_with_span(&mut self, @@ -238,7 +238,7 @@ impl<'a> FmtStrs<'a> { None => return, }; let result = format!("{},{}{}\n", label, self.span.extent_str(sub_span), values_str); - self.recorder.record(&result[]); + self.recorder.record(&result[..]); } pub fn check_and_record(&mut self, diff --git a/src/librustc_trans/trans/_match.rs b/src/librustc_trans/trans/_match.rs index b0ed6f9e727..2826afb71a2 100644 --- a/src/librustc_trans/trans/_match.rs +++ b/src/librustc_trans/trans/_match.rs @@ -566,7 +566,7 @@ fn enter_opt<'a, 'p, 'blk, 'tcx>( param_env: param_env, }; enter_match(bcx, dm, m, col, val, |pats| - check_match::specialize(&mcx, &pats[], &ctor, col, variant_size) + check_match::specialize(&mcx, &pats[..], &ctor, col, variant_size) ) } @@ -987,7 +987,7 @@ fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, if has_nested_bindings(m, col) { let expanded = expand_nested_bindings(bcx, m, col, val); compile_submatch_continue(bcx, - &expanded[], + &expanded[..], vals, chk, col, @@ -1233,10 +1233,10 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, } let opt_ms = enter_opt(opt_cx, pat_id, dm, m, opt, col, size, val); let mut opt_vals = unpacked; - opt_vals.push_all(&vals_left[]); + opt_vals.push_all(&vals_left[..]); compile_submatch(opt_cx, - &opt_ms[], - &opt_vals[], + &opt_ms[..], + &opt_vals[..], branch_chk.as_ref().unwrap_or(chk), has_genuine_default); } @@ -1255,8 +1255,8 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, } _ => { compile_submatch(else_cx, - &defaults[], - &vals_left[], + &defaults[..], + &vals_left[..], chk, has_genuine_default); } @@ -1468,7 +1468,7 @@ fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>, && arm.pats.last().unwrap().node == ast::PatWild(ast::PatWildSingle) }); - compile_submatch(bcx, &matches[], &[discr_datum.val], &chk, has_default); + compile_submatch(bcx, &matches[..], &[discr_datum.val], &chk, has_default); let mut arm_cxs = Vec::new(); for arm_data in &arm_datas { @@ -1482,7 +1482,7 @@ fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>, arm_cxs.push(bcx); } - bcx = scope_cx.fcx.join_blocks(match_id, &arm_cxs[]); + bcx = scope_cx.fcx.join_blocks(match_id, &arm_cxs[..]); return bcx; } diff --git a/src/librustc_trans/trans/adt.rs b/src/librustc_trans/trans/adt.rs index ddd720f1e84..eaf6eaa2f08 100644 --- a/src/librustc_trans/trans/adt.rs +++ b/src/librustc_trans/trans/adt.rs @@ -155,7 +155,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Repr<'tcx> { match t.sty { ty::ty_tup(ref elems) => { - Univariant(mk_struct(cx, &elems[], false, t), false) + Univariant(mk_struct(cx, &elems[..], false, t), false) } ty::ty_struct(def_id, substs) => { let fields = ty::lookup_struct_fields(cx.tcx(), def_id); @@ -167,13 +167,13 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let dtor = ty::ty_dtor(cx.tcx(), def_id).has_drop_flag(); if dtor { ftys.push(cx.tcx().types.bool); } - Univariant(mk_struct(cx, &ftys[], packed, t), dtor) + Univariant(mk_struct(cx, &ftys[..], packed, t), dtor) } ty::ty_closure(def_id, _, substs) => { let typer = NormalizingClosureTyper::new(cx.tcx()); let upvars = typer.closure_upvars(def_id, substs).unwrap(); let upvar_types = upvars.iter().map(|u| u.ty).collect::>(); - Univariant(mk_struct(cx, &upvar_types[], false, t), false) + Univariant(mk_struct(cx, &upvar_types[..], false, t), false) } ty::ty_enum(def_id, substs) => { let cases = get_cases(cx.tcx(), def_id, substs); @@ -187,7 +187,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, // (Typechecking will reject discriminant-sizing attrs.) assert_eq!(hint, attr::ReprAny); let ftys = if dtor { vec!(cx.tcx().types.bool) } else { vec!() }; - return Univariant(mk_struct(cx, &ftys[], false, t), + return Univariant(mk_struct(cx, &ftys[..], false, t), dtor); } @@ -219,7 +219,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, assert_eq!(hint, attr::ReprAny); let mut ftys = cases[0].tys.clone(); if dtor { ftys.push(cx.tcx().types.bool); } - return Univariant(mk_struct(cx, &ftys[], false, t), + return Univariant(mk_struct(cx, &ftys[..], false, t), dtor); } @@ -320,10 +320,10 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let mut ftys = vec!(ty_of_inttype(cx.tcx(), ity)); ftys.push_all(&c.tys[]); if dtor { ftys.push(cx.tcx().types.bool); } - mk_struct(cx, &ftys[], false, t) + mk_struct(cx, &ftys[..], false, t) }).collect(); - ensure_enum_fits_in_address_space(cx, &fields[], t); + ensure_enum_fits_in_address_space(cx, &fields[..], t); General(ity, fields, dtor) } @@ -453,9 +453,9 @@ fn mk_struct<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, .map(|&ty| type_of::sizing_type_of(cx, ty)).collect() }; - ensure_struct_fits_in_address_space(cx, &lltys[], packed, scapegoat); + ensure_struct_fits_in_address_space(cx, &lltys[..], packed, scapegoat); - let llty_rec = Type::struct_(cx, &lltys[], packed); + let llty_rec = Type::struct_(cx, &lltys[..], packed); Struct { size: machine::llsize_of_alloc(cx, llty_rec), align: machine::llalign_of_min(cx, llty_rec), @@ -659,7 +659,7 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, // of the size. // // FIXME #10604: this breaks when vector types are present. - let (size, align) = union_size_and_align(&sts[]); + let (size, align) = union_size_and_align(&sts[..]); let align_s = align as u64; assert_eq!(size % align_s, 0); let align_units = size / align_s - 1; @@ -682,10 +682,10 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, Type::array(&discr_ty, align_s / discr_size - 1), fill_ty]; match name { - None => Type::struct_(cx, &fields[], false), + None => Type::struct_(cx, &fields[..], false), Some(name) => { let mut llty = Type::named_struct(cx, name); - llty.set_struct_body(&fields[], false); + llty.set_struct_body(&fields[..], false); llty } } @@ -763,7 +763,7 @@ pub fn trans_get_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>, fn struct_wrapped_nullable_bitdiscr(bcx: Block, nndiscr: Disr, discrfield: &DiscrField, scrutinee: ValueRef) -> ValueRef { - let llptrptr = GEPi(bcx, scrutinee, &discrfield[]); + let llptrptr = GEPi(bcx, scrutinee, &discrfield[..]); let llptr = Load(bcx, llptrptr); let cmp = if nndiscr == 0 { IntEQ } else { IntNE }; ICmp(bcx, cmp, llptr, C_null(val_ty(llptr)), DebugLoc::None) @@ -851,7 +851,7 @@ pub fn trans_set_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>, } StructWrappedNullablePointer { nndiscr, ref discrfield, .. } => { if discr != nndiscr { - let llptrptr = GEPi(bcx, val, &discrfield[]); + let llptrptr = GEPi(bcx, val, &discrfield[..]); let llptrty = val_ty(llptrptr).element_type(); Store(bcx, C_null(llptrty), llptrptr) } @@ -933,7 +933,7 @@ pub fn struct_field_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, st: &Struct<'tcx>, v let val = if needs_cast { let ccx = bcx.ccx(); let fields = st.fields.iter().map(|&ty| type_of::type_of(ccx, ty)).collect::>(); - let real_ty = Type::struct_(ccx, &fields[], st.packed); + let real_ty = Type::struct_(ccx, &fields[..], st.packed); PointerCast(bcx, val, real_ty.ptr_to()) } else { val @@ -972,7 +972,7 @@ pub fn fold_variants<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, let fields = case.fields.iter().map(|&ty| type_of::type_of(bcx.ccx(), ty)).collect::>(); - let real_ty = Type::struct_(ccx, &fields[], case.packed); + let real_ty = Type::struct_(ccx, &fields[..], case.packed); let variant_value = PointerCast(variant_cx, value, real_ty.ptr_to()); variant_cx = f(variant_cx, case, variant_value); @@ -1045,18 +1045,18 @@ pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr } General(ity, ref cases, _) => { let case = &cases[discr as uint]; - let (max_sz, _) = union_size_and_align(&cases[]); + let (max_sz, _) = union_size_and_align(&cases[..]); let lldiscr = C_integral(ll_inttype(ccx, ity), discr as u64, true); let mut f = vec![lldiscr]; f.push_all(vals); - let mut contents = build_const_struct(ccx, case, &f[]); + let mut contents = build_const_struct(ccx, case, &f[..]); contents.push_all(&[padding(ccx, max_sz - case.size)]); - C_struct(ccx, &contents[], false) + C_struct(ccx, &contents[..], false) } Univariant(ref st, _dro) => { assert!(discr == 0); let contents = build_const_struct(ccx, st, vals); - C_struct(ccx, &contents[], st.packed) + C_struct(ccx, &contents[..], st.packed) } RawNullablePointer { nndiscr, nnty, .. } => { if discr == nndiscr { @@ -1080,7 +1080,7 @@ pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr }).collect::>(); C_struct(ccx, &build_const_struct(ccx, nonnull, - &vals[])[], + &vals[..])[], false) } } diff --git a/src/librustc_trans/trans/asm.rs b/src/librustc_trans/trans/asm.rs index e419be65fc4..71cb1d3066c 100644 --- a/src/librustc_trans/trans/asm.rs +++ b/src/librustc_trans/trans/asm.rs @@ -71,7 +71,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm) callee::DontAutorefArg) }) }).collect::>(); - inputs.push_all(&ext_inputs[]); + inputs.push_all(&ext_inputs[..]); // no failure occurred preparing operands, no need to cleanup fcx.pop_custom_cleanup_scope(temp_scope); @@ -91,18 +91,18 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm) if !clobbers.is_empty() { clobbers.push(','); } - clobbers.push_str(&more_clobbers[]); + clobbers.push_str(&more_clobbers[..]); } // Add the clobbers to our constraints list if clobbers.len() != 0 && constraints.len() != 0 { constraints.push(','); - constraints.push_str(&clobbers[]); + constraints.push_str(&clobbers[..]); } else { - constraints.push_str(&clobbers[]); + constraints.push_str(&clobbers[..]); } - debug!("Asm Constraints: {}", &constraints[]); + debug!("Asm Constraints: {}", &constraints[..]); let num_outputs = outputs.len(); @@ -112,7 +112,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm) } else if num_outputs == 1 { output_types[0] } else { - Type::struct_(bcx.ccx(), &output_types[], false) + Type::struct_(bcx.ccx(), &output_types[..], false) }; let dialect = match ia.dialect { diff --git a/src/librustc_trans/trans/base.rs b/src/librustc_trans/trans/base.rs index 7f7b5cd8006..5cfea3c2677 100644 --- a/src/librustc_trans/trans/base.rs +++ b/src/librustc_trans/trans/base.rs @@ -247,7 +247,7 @@ fn get_extern_rust_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_ty: Ty<'tcx>, let f = decl_rust_fn(ccx, fn_ty, name); let attrs = csearch::get_item_attrs(&ccx.sess().cstore, did); - set_llvm_fn_attrs(ccx, &attrs[], f); + set_llvm_fn_attrs(ccx, &attrs[..], f); ccx.externs().borrow_mut().insert(name.to_string(), f); f @@ -523,7 +523,7 @@ pub fn get_res_dtor<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty::mk_nil(ccx.tcx())); get_extern_fn(ccx, &mut *ccx.externs().borrow_mut(), - &name[], + &name[..], llvm::CCallConv, llty, dtor_ty) @@ -898,14 +898,14 @@ pub fn trans_external_path<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty::ty_bare_fn(_, ref fn_ty) => { match ccx.sess().target.target.adjust_abi(fn_ty.abi) { Rust | RustCall => { - get_extern_rust_fn(ccx, t, &name[], did) + get_extern_rust_fn(ccx, t, &name[..], did) } RustIntrinsic => { ccx.sess().bug("unexpected intrinsic in trans_external_path") } _ => { foreign::register_foreign_item_fn(ccx, fn_ty.abi, t, - &name[]) + &name[..]) } } } @@ -947,7 +947,7 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let llresult = Invoke(bcx, llfn, - &llargs[], + &llargs[..], normal_bcx.llbb, landing_pad, Some(attributes), @@ -961,7 +961,7 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let llresult = Call(bcx, llfn, - &llargs[], + &llargs[..], Some(attributes), debug_loc); return (llresult, bcx); @@ -1646,7 +1646,7 @@ fn copy_closure_args_to_allocas<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, "argtuple", arg_scope_id)); let untupled_arg_types = match monomorphized_arg_types[0].sty { - ty::ty_tup(ref types) => &types[], + ty::ty_tup(ref types) => &types[..], _ => { bcx.tcx().sess.span_bug(args[0].pat.span, "first arg to `rust-call` ABI function \ @@ -1834,12 +1834,12 @@ pub fn trans_closure<'a, 'b, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let arg_datums = if abi != RustCall { create_datums_for_fn_args(&fcx, - &monomorphized_arg_types[]) + &monomorphized_arg_types[..]) } else { create_datums_for_fn_args_under_call_abi( bcx, arg_scope, - &monomorphized_arg_types[]) + &monomorphized_arg_types[..]) }; bcx = match closure_env { @@ -1855,7 +1855,7 @@ pub fn trans_closure<'a, 'b, 'tcx>(ccx: &CrateContext<'a, 'tcx>, arg_scope, &decl.inputs[], arg_datums, - &monomorphized_arg_types[]) + &monomorphized_arg_types[..]) } }; @@ -2000,7 +2000,7 @@ pub fn trans_named_tuple_constructor<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, bcx = expr::trans_adt(bcx, result_ty, disr, - &fields[], + &fields[..], None, expr::SaveIn(llresult), debug_loc); @@ -2070,7 +2070,7 @@ fn trans_enum_variant_or_tuple_like_struct<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx ty::erase_late_bound_regions( ccx.tcx(), &ty::ty_fn_args(ctor_ty)); - let arg_datums = create_datums_for_fn_args(&fcx, &arg_tys[]); + let arg_datums = create_datums_for_fn_args(&fcx, &arg_tys[..]); if !type_is_zero_size(fcx.ccx, result_ty.unwrap()) { let dest = fcx.get_ret_slot(bcx, result_ty, "eret_slot"); @@ -2315,7 +2315,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) { ast::ItemImpl(_, _, ref generics, _, _, ref impl_items) => { meth::trans_impl(ccx, item.ident, - &impl_items[], + &impl_items[..], generics, item.id); } @@ -2430,7 +2430,7 @@ fn register_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, _ => panic!("expected bare rust fn") }; - let llfn = decl_rust_fn(ccx, node_type, &sym[]); + let llfn = decl_rust_fn(ccx, node_type, &sym[..]); finish_register_fn(ccx, sp, sym, node_id, llfn); llfn } @@ -2475,7 +2475,7 @@ pub fn get_fn_llvm_attributes<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_ty: Ty< match fn_sig.inputs[1].sty { ty::ty_tup(ref t_in) => { - inputs.push_all(&t_in[]); + inputs.push_all(&t_in[..]); inputs } _ => ccx.sess().bug("expected tuple'd inputs") @@ -2611,7 +2611,7 @@ pub fn register_fn_llvmty(ccx: &CrateContext, debug!("register_fn_llvmty id={} sym={}", node_id, sym); let llfn = decl_fn(ccx, - &sym[], + &sym[..], cc, llfty, ty::FnConverging(ty::mk_nil(ccx.tcx()))); @@ -2667,7 +2667,7 @@ pub fn create_entry_wrapper(ccx: &CrateContext, let (start_fn, args) = if use_start_lang_item { let start_def_id = match ccx.tcx().lang_items.require(StartFnLangItem) { Ok(id) => id, - Err(s) => { ccx.sess().fatal(&s[]); } + Err(s) => { ccx.sess().fatal(&s[..]); } }; let start_fn = if start_def_id.krate == ast::LOCAL_CRATE { get_item_val(ccx, start_def_id.node) @@ -2783,7 +2783,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { } else { llvm::LLVMTypeOf(v) }; - if contains_null(&sym[]) { + if contains_null(&sym[..]) { ccx.sess().fatal( &format!("Illegal null byte in export_name \ value: `{}`", sym)[]); @@ -2988,7 +2988,7 @@ pub fn write_metadata(cx: &SharedCrateContext, krate: &ast::Crate) -> Vec { Some(compressed) => compressed, None => cx.sess().fatal("failed to compress metadata"), }); - let llmeta = C_bytes_in_context(cx.metadata_llcx(), &compressed[]); + let llmeta = C_bytes_in_context(cx.metadata_llcx(), &compressed[..]); let llconst = C_struct_in_context(cx.metadata_llcx(), &[llmeta], false); let name = format!("rust_metadata_{}_{}", cx.link_meta().crate_name, diff --git a/src/librustc_trans/trans/builder.rs b/src/librustc_trans/trans/builder.rs index e268c2f0d5c..9198fce0200 100644 --- a/src/librustc_trans/trans/builder.rs +++ b/src/librustc_trans/trans/builder.rs @@ -567,7 +567,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } else { let v = ixs.iter().map(|i| C_i32(self.ccx, *i as i32)).collect::>(); self.count_insn("gepi"); - self.inbounds_gep(base, &v[]) + self.inbounds_gep(base, &v[..]) } } @@ -775,8 +775,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let s = format!("{} ({})", text, self.ccx.sess().codemap().span_to_string(sp)); - debug!("{}", &s[]); - self.add_comment(&s[]); + debug!("{}", &s[..]); + self.add_comment(&s[..]); } } @@ -813,7 +813,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }).collect::>(); debug!("Asm Output Type: {}", self.ccx.tn().type_to_string(output)); - let fty = Type::func(&argtys[], &output); + let fty = Type::func(&argtys[..], &output); unsafe { let v = llvm::LLVMInlineAsm( fty.to_ref(), asm, cons, volatile, alignstack, dia as c_uint); diff --git a/src/librustc_trans/trans/callee.rs b/src/librustc_trans/trans/callee.rs index bda8b8938b7..3d3e35cd776 100644 --- a/src/librustc_trans/trans/callee.rs +++ b/src/librustc_trans/trans/callee.rs @@ -323,7 +323,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>( let llfn = decl_internal_rust_fn(ccx, tuple_fn_ty, - &function_name[]); + &function_name[..]); // let empty_substs = tcx.mk_substs(Substs::trans_empty()); @@ -359,7 +359,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>( DebugLoc::None, bare_fn_ty, |bcx, _| Callee { bcx: bcx, data: Fn(llfnpointer) }, - ArgVals(&llargs[]), + ArgVals(&llargs[..]), dest).bcx; finish_fn(&fcx, bcx, sig.output, DebugLoc::None); @@ -792,7 +792,7 @@ pub fn trans_call_inner<'a, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, // Invoke the actual rust fn and update bcx/llresult. let (llret, b) = base::invoke(bcx, llfn, - &llargs[], + &llargs[..], callee_ty, debug_loc); bcx = b; @@ -833,7 +833,7 @@ pub fn trans_call_inner<'a, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, callee_ty, llfn, opt_llretslot.unwrap(), - &llargs[], + &llargs[..], arg_tys, debug_loc); } diff --git a/src/librustc_trans/trans/cleanup.rs b/src/librustc_trans/trans/cleanup.rs index 1c831090e3e..85e53618f6d 100644 --- a/src/librustc_trans/trans/cleanup.rs +++ b/src/librustc_trans/trans/cleanup.rs @@ -764,7 +764,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx let name = scope.block_name("clean"); debug!("generating cleanups for {}", name); let bcx_in = self.new_block(label.is_unwind(), - &name[], + &name[..], None); let mut bcx_out = bcx_in; for cleanup in scope.cleanups.iter().rev() { @@ -811,7 +811,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx Some(llbb) => { return llbb; } None => { let name = last_scope.block_name("unwind"); - pad_bcx = self.new_block(true, &name[], None); + pad_bcx = self.new_block(true, &name[..], None); last_scope.cached_landing_pad = Some(pad_bcx.llbb); } } diff --git a/src/librustc_trans/trans/closure.rs b/src/librustc_trans/trans/closure.rs index f92df999e60..c7907ea83c0 100644 --- a/src/librustc_trans/trans/closure.rs +++ b/src/librustc_trans/trans/closure.rs @@ -158,7 +158,7 @@ pub fn get_or_create_declaration_if_closure<'a, 'tcx>(ccx: &CrateContext<'a, 'tc mangle_internal_name_by_path_and_seq(path, "closure") }); - let llfn = decl_internal_rust_fn(ccx, function_type, &symbol[]); + let llfn = decl_internal_rust_fn(ccx, function_type, &symbol[..]); // set an inline hint for all closures set_inline_hint(llfn); @@ -221,7 +221,7 @@ pub fn trans_closure_expr<'a, 'tcx>(dest: Dest<'a, 'tcx>, &[], sig.output, function_type.abi, - ClosureEnv::Closure(&freevars[])); + ClosureEnv::Closure(&freevars[..])); // Don't hoist this to the top of the function. It's perfectly legitimate // to have a zero-size closure (in which case dest will be `Ignore`) and diff --git a/src/librustc_trans/trans/common.rs b/src/librustc_trans/trans/common.rs index d658003702d..21f8fb53644 100644 --- a/src/librustc_trans/trans/common.rs +++ b/src/librustc_trans/trans/common.rs @@ -1165,8 +1165,8 @@ pub fn langcall(bcx: Block, Err(s) => { let msg = format!("{} {}", msg, s); match span { - Some(span) => bcx.tcx().sess.span_fatal(span, &msg[]), - None => bcx.tcx().sess.fatal(&msg[]), + Some(span) => bcx.tcx().sess.span_fatal(span, &msg[..]), + None => bcx.tcx().sess.fatal(&msg[..]), } } } diff --git a/src/librustc_trans/trans/consts.rs b/src/librustc_trans/trans/consts.rs index 86f5589556a..7705b53ee38 100644 --- a/src/librustc_trans/trans/consts.rs +++ b/src/librustc_trans/trans/consts.rs @@ -75,7 +75,7 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: &ast::Lit) ast::LitBool(b) => C_bool(cx, b), ast::LitStr(ref s, _) => C_str_slice(cx, (*s).clone()), ast::LitBinary(ref data) => { - let g = addr_of(cx, C_bytes(cx, &data[]), "binary", e.id); + let g = addr_of(cx, C_bytes(cx, &data[..]), "binary", e.id); let base = ptrcast(g, Type::i8p(cx)); let prev_const = cx.const_unsized().borrow_mut() .insert(base, g); @@ -611,8 +611,8 @@ fn const_expr_unadjusted<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, } ast::ExprTup(ref es) => { let repr = adt::represent_type(cx, ety); - let vals = map_list(&es[]); - adt::trans_const(cx, &*repr, 0, &vals[]) + let vals = map_list(&es[..]); + adt::trans_const(cx, &*repr, 0, &vals[..]) } ast::ExprStruct(_, ref fs, ref base_opt) => { let repr = adt::represent_type(cx, ety); @@ -642,9 +642,9 @@ fn const_expr_unadjusted<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, } }).collect::>(); if ty::type_is_simd(cx.tcx(), ety) { - C_vector(&cs[]) + C_vector(&cs[..]) } else { - adt::trans_const(cx, &*repr, discr, &cs[]) + adt::trans_const(cx, &*repr, discr, &cs[..]) } }) } @@ -655,9 +655,9 @@ fn const_expr_unadjusted<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, .collect::>(); // If the vector contains enums, an LLVM array won't work. if vs.iter().any(|vi| val_ty(*vi) != llunitty) { - C_struct(cx, &vs[], false) + C_struct(cx, &vs[..], false) } else { - C_array(llunitty, &vs[]) + C_array(llunitty, &vs[..]) } } ast::ExprRepeat(ref elem, ref count) => { @@ -671,9 +671,9 @@ fn const_expr_unadjusted<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let unit_val = const_expr(cx, &**elem, param_substs).0; let vs: Vec<_> = repeat(unit_val).take(n).collect(); if val_ty(unit_val) != llunitty { - C_struct(cx, &vs[], false) + C_struct(cx, &vs[..], false) } else { - C_array(llunitty, &vs[]) + C_array(llunitty, &vs[..]) } } ast::ExprPath(_) | ast::ExprQPath(_) => { @@ -715,14 +715,14 @@ fn const_expr_unadjusted<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, } ast::ExprCall(ref callee, ref args) => { let opt_def = cx.tcx().def_map.borrow().get(&callee.id).cloned(); - let arg_vals = map_list(&args[]); + let arg_vals = map_list(&args[..]); match opt_def { Some(def::DefStruct(_)) => { if ty::type_is_simd(cx.tcx(), ety) { - C_vector(&arg_vals[]) + C_vector(&arg_vals[..]) } else { let repr = adt::represent_type(cx, ety); - adt::trans_const(cx, &*repr, 0, &arg_vals[]) + adt::trans_const(cx, &*repr, 0, &arg_vals[..]) } } Some(def::DefVariant(enum_did, variant_did, _)) => { @@ -733,7 +733,7 @@ fn const_expr_unadjusted<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, adt::trans_const(cx, &*repr, vinfo.disr_val, - &arg_vals[]) + &arg_vals[..]) } _ => cx.sess().span_bug(e.span, "expected a struct or variant def") } diff --git a/src/librustc_trans/trans/context.rs b/src/librustc_trans/trans/context.rs index 96506291b5a..bfcd757cf24 100644 --- a/src/librustc_trans/trans/context.rs +++ b/src/librustc_trans/trans/context.rs @@ -288,7 +288,7 @@ impl<'tcx> SharedCrateContext<'tcx> { // such as a function name in the module. // 1. http://llvm.org/bugs/show_bug.cgi?id=11479 let llmod_id = format!("{}.{}.rs", crate_name, i); - let local_ccx = LocalCrateContext::new(&shared_ccx, &llmod_id[]); + let local_ccx = LocalCrateContext::new(&shared_ccx, &llmod_id[..]); shared_ccx.local_ccxs.push(local_ccx); } diff --git a/src/librustc_trans/trans/controlflow.rs b/src/librustc_trans/trans/controlflow.rs index e6cd44676ce..26e12a1af40 100644 --- a/src/librustc_trans/trans/controlflow.rs +++ b/src/librustc_trans/trans/controlflow.rs @@ -177,7 +177,7 @@ pub fn trans_if<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } let name = format!("then-block-{}-", thn.id); - let then_bcx_in = bcx.fcx.new_id_block(&name[], thn.id); + let then_bcx_in = bcx.fcx.new_id_block(&name[..], thn.id); let then_bcx_out = trans_block(then_bcx_in, &*thn, dest); trans::debuginfo::clear_source_location(bcx.fcx); @@ -378,7 +378,7 @@ pub fn trans_fail<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let did = langcall(bcx, Some(call_info.span), "", PanicFnLangItem); let bcx = callee::trans_lang_call(bcx, did, - &args[], + &args[..], Some(expr::Ignore), call_info.debug_loc()).bcx; Unreachable(bcx); @@ -407,7 +407,7 @@ pub fn trans_fail_bounds_check<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let did = langcall(bcx, Some(call_info.span), "", PanicBoundsCheckFnLangItem); let bcx = callee::trans_lang_call(bcx, did, - &args[], + &args[..], Some(expr::Ignore), call_info.debug_loc()).bcx; Unreachable(bcx); diff --git a/src/librustc_trans/trans/debuginfo.rs b/src/librustc_trans/trans/debuginfo.rs index 23498089c58..95e598aa21a 100644 --- a/src/librustc_trans/trans/debuginfo.rs +++ b/src/librustc_trans/trans/debuginfo.rs @@ -299,7 +299,7 @@ impl<'tcx> TypeMap<'tcx> { if self.unique_id_to_metadata.insert(unique_type_id, metadata).is_some() { let unique_type_id_str = self.get_unique_type_id_as_string(unique_type_id); cx.sess().bug(&format!("Type metadata for unique id '{}' is already in the TypeMap!", - &unique_type_id_str[])[]); + &unique_type_id_str[..])[]); } } @@ -380,14 +380,14 @@ impl<'tcx> TypeMap<'tcx> { self.get_unique_type_id_of_type(cx, component_type); let component_type_id = self.get_unique_type_id_as_string(component_type_id); - unique_type_id.push_str(&component_type_id[]); + unique_type_id.push_str(&component_type_id[..]); } }, ty::ty_uniq(inner_type) => { unique_type_id.push('~'); let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(&inner_type_id[]); + unique_type_id.push_str(&inner_type_id[..]); }, ty::ty_ptr(ty::mt { ty: inner_type, mutbl } ) => { unique_type_id.push('*'); @@ -397,7 +397,7 @@ impl<'tcx> TypeMap<'tcx> { let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(&inner_type_id[]); + unique_type_id.push_str(&inner_type_id[..]); }, ty::ty_rptr(_, ty::mt { ty: inner_type, mutbl }) => { unique_type_id.push('&'); @@ -407,7 +407,7 @@ impl<'tcx> TypeMap<'tcx> { let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(&inner_type_id[]); + unique_type_id.push_str(&inner_type_id[..]); }, ty::ty_vec(inner_type, optional_length) => { match optional_length { @@ -421,7 +421,7 @@ impl<'tcx> TypeMap<'tcx> { let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(&inner_type_id[]); + unique_type_id.push_str(&inner_type_id[..]); }, ty::ty_trait(ref trait_data) => { unique_type_id.push_str("trait "); @@ -452,7 +452,7 @@ impl<'tcx> TypeMap<'tcx> { self.get_unique_type_id_of_type(cx, parameter_type); let parameter_type_id = self.get_unique_type_id_as_string(parameter_type_id); - unique_type_id.push_str(¶meter_type_id[]); + unique_type_id.push_str(¶meter_type_id[..]); unique_type_id.push(','); } @@ -465,7 +465,7 @@ impl<'tcx> TypeMap<'tcx> { ty::FnConverging(ret_ty) => { let return_type_id = self.get_unique_type_id_of_type(cx, ret_ty); let return_type_id = self.get_unique_type_id_as_string(return_type_id); - unique_type_id.push_str(&return_type_id[]); + unique_type_id.push_str(&return_type_id[..]); } ty::FnDiverging => { unique_type_id.push_str("!"); @@ -538,7 +538,7 @@ impl<'tcx> TypeMap<'tcx> { type_map.get_unique_type_id_of_type(cx, type_parameter); let param_type_id = type_map.get_unique_type_id_as_string(param_type_id); - output.push_str(¶m_type_id[]); + output.push_str(¶m_type_id[..]); output.push(','); } @@ -568,7 +568,7 @@ impl<'tcx> TypeMap<'tcx> { self.get_unique_type_id_of_type(cx, parameter_type); let parameter_type_id = self.get_unique_type_id_as_string(parameter_type_id); - unique_type_id.push_str(¶meter_type_id[]); + unique_type_id.push_str(¶meter_type_id[..]); unique_type_id.push(','); } @@ -582,7 +582,7 @@ impl<'tcx> TypeMap<'tcx> { ty::FnConverging(ret_ty) => { let return_type_id = self.get_unique_type_id_of_type(cx, ret_ty); let return_type_id = self.get_unique_type_id_as_string(return_type_id); - unique_type_id.push_str(&return_type_id[]); + unique_type_id.push_str(&return_type_id[..]); } ty::FnDiverging => { unique_type_id.push_str("!"); @@ -806,7 +806,7 @@ pub fn create_global_var_metadata(cx: &CrateContext, let namespace_node = namespace_for_item(cx, ast_util::local_def(node_id)); let var_name = token::get_ident(ident).to_string(); let linkage_name = - namespace_node.mangled_name_of_contained_item(&var_name[]); + namespace_node.mangled_name_of_contained_item(&var_name[..]); let var_scope = namespace_node.scope; let var_name = CString::from_slice(var_name.as_bytes()); @@ -1287,7 +1287,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, match expr.node { ast::ExprClosure(_, ref fn_decl, ref top_level_block) => { let name = format!("fn{}", token::gensym("fn")); - let name = token::str_to_ident(&name[]); + let name = token::str_to_ident(&name[..]); (name, &**fn_decl, // This is not quite right. It should actually inherit // the generics of the enclosing function. @@ -1366,7 +1366,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let (linkage_name, containing_scope) = if has_path { let namespace_node = namespace_for_item(cx, ast_util::local_def(fn_ast_id)); let linkage_name = namespace_node.mangled_name_of_contained_item( - &function_name[]); + &function_name[..]); let containing_scope = namespace_node.scope; (linkage_name, containing_scope) } else { @@ -1451,7 +1451,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, signature.push(type_metadata(cx, arg_type, codemap::DUMMY_SP)); } - return create_DIArray(DIB(cx), &signature[]); + return create_DIArray(DIB(cx), &signature[..]); } fn get_template_parameters<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, @@ -1486,7 +1486,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, actual_self_type, true); - name_to_append_suffix_to.push_str(&actual_self_type_name[]); + name_to_append_suffix_to.push_str(&actual_self_type_name[..]); if generics.is_type_parameterized() { name_to_append_suffix_to.push_str(","); @@ -1525,7 +1525,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let actual_type_name = compute_debuginfo_type_name(cx, actual_type, true); - name_to_append_suffix_to.push_str(&actual_type_name[]); + name_to_append_suffix_to.push_str(&actual_type_name[..]); if index != generics.ty_params.len() - 1 { name_to_append_suffix_to.push_str(","); @@ -1552,7 +1552,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, name_to_append_suffix_to.push('>'); - return create_DIArray(DIB(cx), &template_params[]); + return create_DIArray(DIB(cx), &template_params[..]); } } @@ -1646,7 +1646,7 @@ fn declare_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let cx: &CrateContext = bcx.ccx(); let filename = span_start(cx, span).file.name.clone(); - let file_metadata = file_metadata(cx, &filename[]); + let file_metadata = file_metadata(cx, &filename[..]); let name = token::get_ident(variable_ident); let loc = span_start(cx, span); @@ -1959,7 +1959,7 @@ impl<'tcx> RecursiveTypeDescription<'tcx> { set_members_of_composite_type(cx, metadata_stub, llvm_type, - &member_descriptions[]); + &member_descriptions[..]); return MetadataCreationResult::new(metadata_stub, true); } } @@ -2031,7 +2031,7 @@ fn prepare_struct_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let struct_metadata_stub = create_struct_stub(cx, struct_llvm_type, - &struct_name[], + &struct_name[..], unique_type_id, containing_scope); @@ -2098,7 +2098,7 @@ fn prepare_tuple_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, unique_type_id, create_struct_stub(cx, tuple_llvm_type, - &tuple_name[], + &tuple_name[..], unique_type_id, UNKNOWN_SCOPE_METADATA), tuple_llvm_type, @@ -2158,7 +2158,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> { set_members_of_composite_type(cx, variant_type_metadata, variant_llvm_type, - &member_descriptions[]); + &member_descriptions[..]); MemberDescription { name: "".to_string(), llvm_type: variant_llvm_type, @@ -2191,7 +2191,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> { set_members_of_composite_type(cx, variant_type_metadata, variant_llvm_type, - &member_descriptions[]); + &member_descriptions[..]); vec![ MemberDescription { name: "".to_string(), @@ -2291,7 +2291,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> { set_members_of_composite_type(cx, variant_type_metadata, variant_llvm_type, - &variant_member_descriptions[]); + &variant_member_descriptions[..]); // Encode the information about the null variant in the union // member's name. @@ -2662,7 +2662,7 @@ fn set_members_of_composite_type(cx: &CrateContext, .collect(); unsafe { - let type_array = create_DIArray(DIB(cx), &member_metadata[]); + let type_array = create_DIArray(DIB(cx), &member_metadata[..]); llvm::LLVMDICompositeTypeSetTypeArray(DIB(cx), composite_type_metadata, type_array); } } @@ -2763,7 +2763,7 @@ fn vec_slice_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let member_llvm_types = slice_llvm_type.field_types(); assert!(slice_layout_is_correct(cx, - &member_llvm_types[], + &member_llvm_types[..], element_type)); let member_descriptions = [ MemberDescription { @@ -2789,7 +2789,7 @@ fn vec_slice_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let metadata = composite_type_metadata(cx, slice_llvm_type, - &slice_type_name[], + &slice_type_name[..], unique_type_id, &member_descriptions, UNKNOWN_SCOPE_METADATA, @@ -2838,7 +2838,7 @@ fn subroutine_type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, llvm::LLVMDIBuilderCreateSubroutineType( DIB(cx), UNKNOWN_FILE_METADATA, - create_DIArray(DIB(cx), &signature_metadata[])) + create_DIArray(DIB(cx), &signature_metadata[..])) }, false); } @@ -2864,7 +2864,7 @@ fn trait_pointer_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let pp_type_name = ppaux::ty_to_string(cx.tcx(), trait_type); cx.sess().bug(&format!("debuginfo: Unexpected trait-object type in \ trait_pointer_metadata(): {}", - &pp_type_name[])[]); + &pp_type_name[..])[]); } }; @@ -2878,7 +2878,7 @@ fn trait_pointer_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, composite_type_metadata(cx, trait_llvm_type, - &trait_type_name[], + &trait_type_name[..], unique_type_id, &[], containing_scope, @@ -2998,7 +2998,7 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, ty::ty_tup(ref elements) => { prepare_tuple_metadata(cx, t, - &elements[], + &elements[..], unique_type_id, usage_site_span).finalize(cx) } @@ -3022,9 +3022,9 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, type id '{}' to already be in \ the debuginfo::TypeMap but it \ was not. (Ty = {})", - &unique_type_id_str[], + &unique_type_id_str[..], ppaux::ty_to_string(cx.tcx(), t)); - cx.sess().span_bug(usage_site_span, &error_message[]); + cx.sess().span_bug(usage_site_span, &error_message[..]); } }; @@ -3037,9 +3037,9 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, UniqueTypeId maps in \ debuginfo::TypeMap. \ UniqueTypeId={}, Ty={}", - &unique_type_id_str[], + &unique_type_id_str[..], ppaux::ty_to_string(cx.tcx(), t)); - cx.sess().span_bug(usage_site_span, &error_message[]); + cx.sess().span_bug(usage_site_span, &error_message[..]); } } None => { @@ -3128,7 +3128,7 @@ fn contains_nodebug_attribute(attributes: &[ast::Attribute]) -> bool { attributes.iter().any(|attr| { let meta_item: &ast::MetaItem = &*attr.node.value; match meta_item.node { - ast::MetaWord(ref value) => &value[] == "no_debug", + ast::MetaWord(ref value) => &value[..] == "no_debug", _ => false } }) diff --git a/src/librustc_trans/trans/expr.rs b/src/librustc_trans/trans/expr.rs index 480679f43cb..a31fc3346bd 100644 --- a/src/librustc_trans/trans/expr.rs +++ b/src/librustc_trans/trans/expr.rs @@ -1046,14 +1046,14 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, controlflow::trans_if(bcx, expr.id, &**cond, &**thn, els.as_ref().map(|e| &**e), dest) } ast::ExprMatch(ref discr, ref arms, _) => { - _match::trans_match(bcx, expr, &**discr, &arms[], dest) + _match::trans_match(bcx, expr, &**discr, &arms[..], dest) } ast::ExprBlock(ref blk) => { controlflow::trans_block(bcx, &**blk, dest) } ast::ExprStruct(_, ref fields, ref base) => { trans_struct(bcx, - &fields[], + &fields[..], base.as_ref().map(|e| &**e), expr.span, expr.id, @@ -1118,7 +1118,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, trans_adt(bcx, expr_ty(bcx, expr), 0, - &numbered_fields[], + &numbered_fields[..], None, dest, expr.debug_loc()) @@ -1153,13 +1153,13 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, trans_overloaded_call(bcx, expr, &**f, - &args[], + &args[..], Some(dest)) } else { callee::trans_call(bcx, expr, &**f, - callee::ArgExprs(&args[]), + callee::ArgExprs(&args[..]), dest) } } @@ -1167,7 +1167,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, callee::trans_method_call(bcx, expr, &*args[0], - callee::ArgExprs(&args[]), + callee::ArgExprs(&args[..]), dest) } ast::ExprBinary(op, ref lhs, ref rhs) => { @@ -1354,11 +1354,11 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>, ty::ty_struct(did, substs) => { let fields = struct_fields(tcx, did, substs); let fields = monomorphize::normalize_associated_type(tcx, &fields); - op(0, &fields[]) + op(0, &fields[..]) } ty::ty_tup(ref v) => { - op(0, &tup_fields(&v[])[]) + op(0, &tup_fields(&v[..])[]) } ty::ty_enum(_, substs) => { @@ -1378,7 +1378,7 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>, tcx, enum_id, variant_id); let fields = struct_fields(tcx, variant_id, substs); let fields = monomorphize::normalize_associated_type(tcx, &fields); - op(variant_info.disr_val, &fields[]) + op(variant_info.disr_val, &fields[..]) } _ => { tcx.sess.bug("resolve didn't map this expr to a \ diff --git a/src/librustc_trans/trans/foreign.rs b/src/librustc_trans/trans/foreign.rs index 8f0e4e647b5..10abb90b4bf 100644 --- a/src/librustc_trans/trans/foreign.rs +++ b/src/librustc_trans/trans/foreign.rs @@ -238,7 +238,7 @@ pub fn trans_native_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, _ => ccx.sess().bug("trans_native_call called on non-function type") }; let fn_sig = ty::erase_late_bound_regions(ccx.tcx(), fn_sig); - let llsig = foreign_signature(ccx, &fn_sig, &passed_arg_tys[]); + let llsig = foreign_signature(ccx, &fn_sig, &passed_arg_tys[..]); let fn_type = cabi::compute_abi_info(ccx, &llsig.llarg_tys[], llsig.llret_ty, @@ -370,7 +370,7 @@ pub fn trans_native_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let llforeign_retval = CallWithConv(bcx, llfn, - &llargs_foreign[], + &llargs_foreign[..], cc, Some(attrs), call_debug_loc); @@ -611,7 +611,7 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ccx.tcx().map.path_to_string(id), id, t.repr(tcx)); - let llfn = base::decl_internal_rust_fn(ccx, t, &ps[]); + let llfn = base::decl_internal_rust_fn(ccx, t, &ps[..]); base::set_llvm_fn_attrs(ccx, attrs, llfn); base::trans_fn(ccx, decl, body, llfn, param_substs, id, &[]); llfn @@ -974,7 +974,7 @@ fn lltype_for_fn_from_foreign_types(ccx: &CrateContext, tys: &ForeignTypes) -> T if tys.fn_sig.variadic { Type::variadic_func(&llargument_tys, &llreturn_ty) } else { - Type::func(&llargument_tys[], &llreturn_ty) + Type::func(&llargument_tys[..], &llreturn_ty) } } diff --git a/src/librustc_trans/trans/glue.rs b/src/librustc_trans/trans/glue.rs index af90e1ec5c5..92040296f4a 100644 --- a/src/librustc_trans/trans/glue.rs +++ b/src/librustc_trans/trans/glue.rs @@ -170,7 +170,7 @@ pub fn get_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Val let (glue, new_sym) = match ccx.available_drop_glues().borrow().get(&t) { Some(old_sym) => { - let glue = decl_cdecl_fn(ccx, &old_sym[], llfnty, ty::mk_nil(ccx.tcx())); + let glue = decl_cdecl_fn(ccx, &old_sym[..], llfnty, ty::mk_nil(ccx.tcx())); (glue, None) }, None => { @@ -304,7 +304,7 @@ fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, class_did, &[get_drop_glue_type(bcx.ccx(), t)], ty::mk_nil(bcx.tcx())); - let (_, variant_cx) = invoke(variant_cx, dtor_addr, &args[], dtor_ty, DebugLoc::None); + let (_, variant_cx) = invoke(variant_cx, dtor_addr, &args[..], dtor_ty, DebugLoc::None); variant_cx.fcx.pop_and_trans_custom_cleanup_scope(variant_cx, field_scope); variant_cx @@ -541,7 +541,7 @@ fn declare_generic_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>, ccx, t, &format!("glue_{}", name)[]); - let llfn = decl_cdecl_fn(ccx, &fn_nm[], llfnty, ty::mk_nil(ccx.tcx())); + let llfn = decl_cdecl_fn(ccx, &fn_nm[..], llfnty, ty::mk_nil(ccx.tcx())); note_unique_llvm_symbol(ccx, fn_nm.clone()); return (fn_nm, llfn); } diff --git a/src/librustc_trans/trans/intrinsic.rs b/src/librustc_trans/trans/intrinsic.rs index 5687247561e..a1b66ed94f0 100644 --- a/src/librustc_trans/trans/intrinsic.rs +++ b/src/librustc_trans/trans/intrinsic.rs @@ -166,7 +166,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, let name = token::get_ident(foreign_item.ident); // For `transmute` we can just trans the input expr directly into dest - if &name[] == "transmute" { + if &name[..] == "transmute" { let llret_ty = type_of::type_of(ccx, ret_ty.unwrap()); match args { callee::ArgExprs(arg_exprs) => { @@ -274,13 +274,13 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, let call_debug_location = DebugLoc::At(call_info.id, call_info.span); // These are the only intrinsic functions that diverge. - if &name[] == "abort" { + if &name[..] == "abort" { let llfn = ccx.get_intrinsic(&("llvm.trap")); Call(bcx, llfn, &[], None, call_debug_location); fcx.pop_and_trans_custom_cleanup_scope(bcx, cleanup_scope); Unreachable(bcx); return Result::new(bcx, C_undef(Type::nil(ccx).ptr_to())); - } else if &name[] == "unreachable" { + } else if &name[..] == "unreachable" { fcx.pop_and_trans_custom_cleanup_scope(bcx, cleanup_scope); Unreachable(bcx); return Result::new(bcx, C_nil(ccx)); @@ -307,7 +307,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, }; let simple = get_simple_intrinsic(ccx, &*foreign_item); - let llval = match (simple, &name[]) { + let llval = match (simple, &name[..]) { (Some(llfn), _) => { Call(bcx, llfn, &llargs, None, call_debug_location) } diff --git a/src/librustc_trans/trans/monomorphize.rs b/src/librustc_trans/trans/monomorphize.rs index 30797344da8..ec48ab0d34a 100644 --- a/src/librustc_trans/trans/monomorphize.rs +++ b/src/librustc_trans/trans/monomorphize.rs @@ -131,7 +131,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, hash = format!("h{}", state.finish()); ccx.tcx().map.with_path(fn_id.node, |path| { - exported_name(path, &hash[]) + exported_name(path, &hash[..]) }) }; @@ -141,9 +141,9 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let mut hash_id = Some(hash_id); let mut mk_lldecl = |abi: abi::Abi| { let lldecl = if abi != abi::Rust { - foreign::decl_rust_fn_with_foreign_abi(ccx, mono_ty, &s[]) + foreign::decl_rust_fn_with_foreign_abi(ccx, mono_ty, &s[..]) } else { - decl_internal_rust_fn(ccx, mono_ty, &s[]) + decl_internal_rust_fn(ccx, mono_ty, &s[..]) }; ccx.monomorphized().borrow_mut().insert(hash_id.take().unwrap(), lldecl); @@ -182,7 +182,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, if abi != abi::Rust { foreign::trans_rust_fn_with_foreign_abi( ccx, &**decl, &**body, &[], d, psubsts, fn_id.node, - Some(&hash[])); + Some(&hash[..])); } else { trans_fn(ccx, &**decl, &**body, d, psubsts, fn_id.node, &[]); } @@ -206,7 +206,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, trans_enum_variant(ccx, parent, &*v, - &args[], + &args[..], this_tv.disr_val, psubsts, d); diff --git a/src/librustc_trans/trans/type_of.rs b/src/librustc_trans/trans/type_of.rs index 9d1c0fadefc..e4d3699f5c0 100644 --- a/src/librustc_trans/trans/type_of.rs +++ b/src/librustc_trans/trans/type_of.rs @@ -144,7 +144,7 @@ pub fn type_of_rust_fn<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let input_tys = inputs.iter().map(|&arg_ty| type_of_explicit_arg(cx, arg_ty)); atys.extend(input_tys); - Type::func(&atys[], &lloutputtype) + Type::func(&atys[..], &lloutputtype) } // Given a function type and a count of ty params, construct an llvm type @@ -332,7 +332,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type { let repr = adt::represent_type(cx, t); let tps = substs.types.get_slice(subst::TypeSpace); let name = llvm_type_name(cx, an_enum, did, tps); - adt::incomplete_type_of(cx, &*repr, &name[]) + adt::incomplete_type_of(cx, &*repr, &name[..]) } ty::ty_closure(did, _, ref substs) => { // Only create the named struct, but don't fill it in. We @@ -343,7 +343,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type { // contents of the VecPerParamSpace to to construct the llvm // name let name = llvm_type_name(cx, a_closure, did, substs.types.as_slice()); - adt::incomplete_type_of(cx, &*repr, &name[]) + adt::incomplete_type_of(cx, &*repr, &name[..]) } ty::ty_uniq(ty) | ty::ty_rptr(_, ty::mt{ty, ..}) | ty::ty_ptr(ty::mt{ty, ..}) => { @@ -399,7 +399,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type { let repr = adt::represent_type(cx, t); let tps = substs.types.get_slice(subst::TypeSpace); let name = llvm_type_name(cx, a_struct, did, tps); - adt::incomplete_type_of(cx, &*repr, &name[]) + adt::incomplete_type_of(cx, &*repr, &name[..]) } } diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs index 0183b3474a5..f224d87ae5c 100644 --- a/src/librustc_typeck/astconv.rs +++ b/src/librustc_typeck/astconv.rs @@ -1139,14 +1139,14 @@ pub fn ast_ty_to_ty<'tcx>(this: &AstConv<'tcx>, ty::mk_vec(tcx, ast_ty_to_ty(this, rscope, &**ty), None) } ast::TyObjectSum(ref ty, ref bounds) => { - match ast_ty_to_trait_ref(this, rscope, &**ty, &bounds[]) { + match ast_ty_to_trait_ref(this, rscope, &**ty, &bounds[..]) { Ok((trait_ref, projection_bounds)) => { trait_ref_to_object_type(this, rscope, ast_ty.span, trait_ref, projection_bounds, - &bounds[]) + &bounds[..]) } Err(ErrorReported) => { this.tcx().types.err @@ -1185,7 +1185,7 @@ pub fn ast_ty_to_ty<'tcx>(this: &AstConv<'tcx>, ty::mk_bare_fn(tcx, None, tcx.mk_bare_fn(bare_fn)) } ast::TyPolyTraitRef(ref bounds) => { - conv_ty_poly_trait_ref(this, rscope, ast_ty.span, &bounds[]) + conv_ty_poly_trait_ref(this, rscope, ast_ty.span, &bounds[..]) } ast::TyPath(ref path, id) => { let a_def = match tcx.def_map.borrow().get(&id) { @@ -1424,7 +1424,7 @@ fn ty_of_method_or_bare_fn<'a, 'tcx>(this: &AstConv<'tcx>, // Skip the first argument if `self` is present. &self_and_input_tys[1..] } else { - &self_and_input_tys[] + &self_and_input_tys[..] }; let (ior, lfp) = find_implied_output_region(input_tys, input_pats); @@ -1623,7 +1623,7 @@ fn conv_ty_poly_trait_ref<'tcx>( ast_bounds: &[ast::TyParamBound]) -> Ty<'tcx> { - let mut partitioned_bounds = partition_bounds(this.tcx(), span, &ast_bounds[]); + let mut partitioned_bounds = partition_bounds(this.tcx(), span, &ast_bounds[..]); let mut projection_bounds = Vec::new(); let main_trait_bound = if !partitioned_bounds.trait_bounds.is_empty() { diff --git a/src/librustc_typeck/check/_match.rs b/src/librustc_typeck/check/_match.rs index 81868f3695c..dfd7816b824 100644 --- a/src/librustc_typeck/check/_match.rs +++ b/src/librustc_typeck/check/_match.rs @@ -162,7 +162,7 @@ pub fn check_pat<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>, check_pat_enum(pcx, pat, &path, Some(&[]), expected); } ast::PatEnum(ref path, ref subpats) => { - let subpats = subpats.as_ref().map(|v| &v[]); + let subpats = subpats.as_ref().map(|v| &v[..]); check_pat_enum(pcx, pat, path, subpats, expected); } ast::PatStruct(ref path, ref fields, etc) => { diff --git a/src/librustc_typeck/check/callee.rs b/src/librustc_typeck/check/callee.rs index 6a9d34d7637..84f9e953cc6 100644 --- a/src/librustc_typeck/check/callee.rs +++ b/src/librustc_typeck/check/callee.rs @@ -256,7 +256,7 @@ fn confirm_builtin_call<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, check_argument_types(fcx, call_expr.span, &fn_sig.inputs, - &expected_arg_tys[], + &expected_arg_tys[..], arg_exprs, AutorefArgs::No, fn_sig.variadic, diff --git a/src/librustc_typeck/check/method/probe.rs b/src/librustc_typeck/check/method/probe.rs index 82bd4ae87ff..978fbbbcffc 100644 --- a/src/librustc_typeck/check/method/probe.rs +++ b/src/librustc_typeck/check/method/probe.rs @@ -901,7 +901,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { debug!("applicable_candidates: {}", applicable_candidates.repr(self.tcx())); if applicable_candidates.len() > 1 { - match self.collapse_candidates_to_trait_pick(&applicable_candidates[]) { + match self.collapse_candidates_to_trait_pick(&applicable_candidates[..]) { Some(pick) => { return Some(Ok(pick)); } None => { } } diff --git a/src/librustc_typeck/check/method/suggest.rs b/src/librustc_typeck/check/method/suggest.rs index 063300a1d72..0494e1f7686 100644 --- a/src/librustc_typeck/check/method/suggest.rs +++ b/src/librustc_typeck/check/method/suggest.rs @@ -172,7 +172,7 @@ fn suggest_traits_to_import<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, traits_are = if candidates.len() == 1 {"trait is"} else {"traits are"}, one_of_them = if candidates.len() == 1 {"it"} else {"one of them"}); - fcx.sess().fileline_help(span, &msg[]); + fcx.sess().fileline_help(span, &msg[..]); for (i, trait_did) in candidates.iter().enumerate() { fcx.sess().fileline_help(span, @@ -218,7 +218,7 @@ fn suggest_traits_to_import<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, one_of_them = if candidates.len() == 1 {"it"} else {"one of them"}, name = method_ustring); - fcx.sess().fileline_help(span, &msg[]); + fcx.sess().fileline_help(span, &msg[..]); for (i, trait_info) in candidates.iter().enumerate() { fcx.sess().fileline_help(span, diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 30896c1607a..12553a4c21e 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -2209,7 +2209,7 @@ fn check_method_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, check_argument_types(fcx, sp, - &err_inputs[], + &err_inputs[..], &[], args_no_rcvr, autoref_args, @@ -2228,7 +2228,7 @@ fn check_method_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, check_argument_types(fcx, sp, &fty.sig.0.inputs[1..], - &expected_arg_tys[], + &expected_arg_tys[..], args_no_rcvr, autoref_args, fty.sig.0.variadic, @@ -3055,7 +3055,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, ty::ty_struct(base_id, substs) => { debug!("struct named {}", ppaux::ty_to_string(tcx, base_t)); let fields = ty::lookup_struct_fields(tcx, base_id); - fcx.lookup_field_ty(expr.span, base_id, &fields[], + fcx.lookup_field_ty(expr.span, base_id, &fields[..], field.node.name, &(*substs)) } _ => None @@ -3155,7 +3155,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, if tuple_like { debug!("tuple struct named {}", ppaux::ty_to_string(tcx, base_t)); let fields = ty::lookup_struct_fields(tcx, base_id); - fcx.lookup_tup_field_ty(expr.span, base_id, &fields[], + fcx.lookup_tup_field_ty(expr.span, base_id, &fields[..], idx.node, &(*substs)) } else { None @@ -3328,7 +3328,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, class_id, id, fcx.ccx.tcx.mk_substs(struct_substs), - &class_fields[], + &class_fields[..], fields, base_expr.is_none(), None); @@ -3371,7 +3371,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, variant_id, id, fcx.ccx.tcx.mk_substs(substitutions), - &variant_fields[], + &variant_fields[..], fields, true, Some(enum_id)); @@ -3732,10 +3732,10 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, fcx.write_ty(id, fcx.node_ty(b.id)); } ast::ExprCall(ref callee, ref args) => { - callee::check_call(fcx, expr, &**callee, &args[], expected); + callee::check_call(fcx, expr, &**callee, &args[..], expected); } ast::ExprMethodCall(ident, ref tps, ref args) => { - check_method_call(fcx, expr, ident, &args[], &tps[], expected, lvalue_pref); + check_method_call(fcx, expr, ident, &args[..], &tps[..], expected, lvalue_pref); let arg_tys = args.iter().map(|a| fcx.expr_ty(&**a)); let args_err = arg_tys.fold(false, |rest_err, a| { @@ -3822,7 +3822,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, ast::ExprTup(ref elts) => { let flds = expected.only_has_type(fcx).and_then(|ty| { match ty.sty { - ty::ty_tup(ref flds) => Some(&flds[]), + ty::ty_tup(ref flds) => Some(&flds[..]), _ => None } }); @@ -3856,7 +3856,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, let struct_id = match def { Some(def::DefVariant(enum_id, variant_id, true)) => { check_struct_enum_variant(fcx, id, expr.span, enum_id, - variant_id, &fields[]); + variant_id, &fields[..]); enum_id } Some(def::DefTrait(def_id)) => { @@ -3865,7 +3865,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, pprust::path_to_string(path)); check_struct_fields_on_error(fcx, id, - &fields[], + &fields[..], base_expr); def_id }, @@ -3878,7 +3878,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, id, expr.span, struct_did, - &fields[], + &fields[..], base_expr.as_ref().map(|e| &**e)); } _ => { @@ -3887,7 +3887,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, pprust::path_to_string(path)); check_struct_fields_on_error(fcx, id, - &fields[], + &fields[..], base_expr); } } @@ -5232,10 +5232,10 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) { } }; (n_tps, inputs, ty::FnConverging(output)) - } else if &name[] == "abort" || &name[] == "unreachable" { + } else if &name[..] == "abort" || &name[..] == "unreachable" { (0, Vec::new(), ty::FnDiverging) } else { - let (n_tps, inputs, output) = match &name[] { + let (n_tps, inputs, output) = match &name[..] { "breakpoint" => (0, Vec::new(), ty::mk_nil(tcx)), "size_of" | "pref_align_of" | "min_align_of" => (1, Vec::new(), ccx.tcx.types.uint), @@ -5260,7 +5260,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) { "get_tydesc" => { let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) { Ok(t) => t, - Err(s) => { span_fatal!(tcx.sess, it.span, E0240, "{}", &s[]); } + Err(s) => { span_fatal!(tcx.sess, it.span, E0240, "{}", &s[..]); } }; let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt { ty: tydesc_ty, diff --git a/src/librustc_typeck/check/regionck.rs b/src/librustc_typeck/check/regionck.rs index 17c259e674e..8be7bc06c50 100644 --- a/src/librustc_typeck/check/regionck.rs +++ b/src/librustc_typeck/check/regionck.rs @@ -283,7 +283,7 @@ impl<'a, 'tcx> Rcx<'a, 'tcx> { }; let len = self.region_bound_pairs.len(); - self.relate_free_regions(&fn_sig[], body.id); + self.relate_free_regions(&fn_sig[..], body.id); link_fn_args(self, CodeExtent::from_node_id(body.id), &fn_decl.inputs[]); self.visit_block(body); self.visit_region_obligations(body.id); @@ -674,7 +674,7 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) { } ast::ExprMatch(ref discr, ref arms, _) => { - link_match(rcx, &**discr, &arms[]); + link_match(rcx, &**discr, &arms[..]); visit::walk_expr(rcx, expr); } diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs index 44e850a0738..833daf083a7 100644 --- a/src/librustc_typeck/collect.rs +++ b/src/librustc_typeck/collect.rs @@ -268,7 +268,7 @@ fn get_enum_variant_types<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, ast::TupleVariantKind(ref args) if args.len() > 0 => { let rs = ExplicitRscope; let input_tys: Vec<_> = args.iter().map(|va| ccx.to_ty(&rs, &*va.ty)).collect(); - ty::mk_ctor_fn(tcx, variant_def_id, &input_tys[], enum_scheme.ty) + ty::mk_ctor_fn(tcx, variant_def_id, &input_tys[..], enum_scheme.ty) } ast::TupleVariantKind(_) => { @@ -313,7 +313,7 @@ fn collect_trait_methods<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, trait_id, &trait_def.generics, &trait_predicates, - &trait_items[], + &trait_items[..], &m.id, &m.ident.name, &m.explicit_self, @@ -328,7 +328,7 @@ fn collect_trait_methods<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, trait_id, &trait_def.generics, &trait_predicates, - &trait_items[], + &trait_items[..], &m.id, &m.pe_ident().name, m.pe_explicit_self(), @@ -871,7 +871,7 @@ fn convert_struct<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, local_def(field.node.id)].ty).collect(); let ctor_fn_ty = ty::mk_ctor_fn(tcx, local_def(ctor_id), - &inputs[], + &inputs[..], selfty); write_ty_to_tcx(tcx, ctor_id, ctor_fn_ty); tcx.tcache.borrow_mut().insert(local_def(ctor_id), @@ -1358,7 +1358,7 @@ fn ty_generics_for_fn_or_method<'a,'tcx>(ccx: &CollectCtxt<'a,'tcx>, let early_lifetimes = resolve_lifetime::early_bound_lifetimes(generics); ty_generics(ccx, subst::FnSpace, - &early_lifetimes[], + &early_lifetimes[..], &generics.ty_params[], &generics.where_clause, base_generics) diff --git a/src/librustc_typeck/variance.rs b/src/librustc_typeck/variance.rs index d5883d8bf86..bf29bcd5123 100644 --- a/src/librustc_typeck/variance.rs +++ b/src/librustc_typeck/variance.rs @@ -1065,7 +1065,7 @@ impl<'a, 'tcx> SolveContext<'a, 'tcx> { // attribute and report an error with various results if found. if ty::has_attr(tcx, item_def_id, "rustc_variance") { let found = item_variances.repr(tcx); - span_err!(tcx.sess, tcx.map.span(item_id), E0208, "{}", &found[]); + span_err!(tcx.sess, tcx.map.span(item_id), E0208, "{}", &found[..]); } let newly_added = tcx.item_variance_map.borrow_mut() diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index 6acd1537946..44c0acda66f 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -34,7 +34,7 @@ pub fn highlight(src: &str, class: Option<&str>, id: Option<&str>) -> String { class, id, &mut out).unwrap(); - String::from_utf8_lossy(&out[]).into_owned() + String::from_utf8_lossy(&out[..]).into_owned() } /// Exhausts the `lexer` writing the output into `out`. diff --git a/src/libserialize/json.rs b/src/libserialize/json.rs index d79a3171855..c25e2096007 100644 --- a/src/libserialize/json.rs +++ b/src/libserialize/json.rs @@ -1120,7 +1120,7 @@ impl Json { /// Returns None otherwise. pub fn as_string<'a>(&'a self) -> Option<&'a str> { match *self { - Json::String(ref s) => Some(&s[]), + Json::String(ref s) => Some(&s[..]), _ => None } } @@ -2237,7 +2237,7 @@ impl ::Decoder for Decoder { return Err(ExpectedError("String or Object".to_string(), format!("{}", json))) } }; - let idx = match names.iter().position(|n| *n == &name[]) { + let idx = match names.iter().position(|n| *n == &name[..]) { Some(idx) => idx, None => return Err(UnknownVariantError(name)) }; @@ -3461,7 +3461,7 @@ mod tests { hm.insert(1, true); let mut mem_buf = Vec::new(); write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap(); - let json_str = from_utf8(&mem_buf[]).unwrap(); + let json_str = from_utf8(&mem_buf[..]).unwrap(); match from_str(json_str) { Err(_) => panic!("Unable to parse json_str: {:?}", json_str), _ => {} // it parsed and we are good to go @@ -3477,7 +3477,7 @@ mod tests { hm.insert(1, true); let mut mem_buf = Vec::new(); write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap(); - let json_str = from_utf8(&mem_buf[]).unwrap(); + let json_str = from_utf8(&mem_buf[..]).unwrap(); match from_str(json_str) { Err(_) => panic!("Unable to parse json_str: {:?}", json_str), _ => {} // it parsed and we are good to go @@ -3517,7 +3517,7 @@ mod tests { write!(&mut writer, "{}", super::as_pretty_json(&json).indent(i)).unwrap(); - let printed = from_utf8(&writer[]).unwrap(); + let printed = from_utf8(&writer[..]).unwrap(); // Check for indents at each line let lines: Vec<&str> = printed.lines().collect(); @@ -3549,7 +3549,7 @@ mod tests { let mut map = HashMap::new(); map.insert(Enum::Foo, 0); let result = json::encode(&map).unwrap(); - assert_eq!(&result[], r#"{"Foo":0}"#); + assert_eq!(&result[..], r#"{"Foo":0}"#); let decoded: HashMap = json::decode(&result).unwrap(); assert_eq!(map, decoded); } diff --git a/src/libserialize/serialize.rs b/src/libserialize/serialize.rs index 517907bcf58..70f0ba4bb23 100644 --- a/src/libserialize/serialize.rs +++ b/src/libserialize/serialize.rs @@ -326,7 +326,7 @@ impl Encodable for str { impl Encodable for String { fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_str(&self[]) + s.emit_str(&self[..]) } } diff --git a/src/libstd/ffi/os_str.rs b/src/libstd/ffi/os_str.rs index 1d14b141778..88ebe99e981 100644 --- a/src/libstd/ffi/os_str.rs +++ b/src/libstd/ffi/os_str.rs @@ -103,7 +103,7 @@ impl ops::Deref for OsString { #[inline] fn deref(&self) -> &OsStr { - &self[] + &self[..] } } @@ -267,7 +267,7 @@ impl Debug for OsStr { } impl BorrowFrom for OsStr { - fn borrow_from(owned: &OsString) -> &OsStr { &owned[] } + fn borrow_from(owned: &OsString) -> &OsStr { &owned[..] } } impl ToOwned for OsStr { @@ -288,7 +288,7 @@ impl AsOsStr for OsStr { impl AsOsStr for OsString { fn as_os_str(&self) -> &OsStr { - &self[] + &self[..] } } @@ -300,7 +300,7 @@ impl AsOsStr for str { impl AsOsStr for String { fn as_os_str(&self) -> &OsStr { - OsStr::from_str(&self[]) + OsStr::from_str(&self[..]) } } diff --git a/src/libstd/io/buffered.rs b/src/libstd/io/buffered.rs index 2fd6631ecc4..e9a8dbb4098 100644 --- a/src/libstd/io/buffered.rs +++ b/src/libstd/io/buffered.rs @@ -618,14 +618,14 @@ mod tests { #[test] fn read_char_buffered() { let buf = [195u8, 159u8]; - let mut reader = BufReader::with_capacity(1, &buf[]); + let mut reader = BufReader::with_capacity(1, &buf[..]); assert_eq!(reader.chars().next(), Some(Ok('ß'))); } #[test] fn test_chars() { let buf = [195u8, 159u8, b'a']; - let mut reader = BufReader::with_capacity(1, &buf[]); + let mut reader = BufReader::with_capacity(1, &buf[..]); let mut it = reader.chars(); assert_eq!(it.next(), Some(Ok('ß'))); assert_eq!(it.next(), Some(Ok('a'))); diff --git a/src/libstd/io/cursor.rs b/src/libstd/io/cursor.rs index 9f3655de20f..f6cb4a8c9f3 100644 --- a/src/libstd/io/cursor.rs +++ b/src/libstd/io/cursor.rs @@ -180,7 +180,7 @@ mod tests { fn test_buf_writer() { let mut buf = [0 as u8; 9]; { - let mut writer = Cursor::new(&mut buf[]); + let mut writer = Cursor::new(&mut buf[..]); assert_eq!(writer.position(), 0); assert_eq!(writer.write(&[0]), Ok(1)); assert_eq!(writer.position(), 1); @@ -201,7 +201,7 @@ mod tests { fn test_buf_writer_seek() { let mut buf = [0 as u8; 8]; { - let mut writer = Cursor::new(&mut buf[]); + let mut writer = Cursor::new(&mut buf[..]); assert_eq!(writer.position(), 0); assert_eq!(writer.write(&[1]), Ok(1)); assert_eq!(writer.position(), 1); @@ -229,7 +229,7 @@ mod tests { #[test] fn test_buf_writer_error() { let mut buf = [0 as u8; 2]; - let mut writer = Cursor::new(&mut buf[]); + let mut writer = Cursor::new(&mut buf[..]); assert_eq!(writer.write(&[0]), Ok(1)); assert_eq!(writer.write(&[0, 0]), Ok(1)); assert_eq!(writer.write(&[0, 0]), Ok(0)); @@ -331,7 +331,7 @@ mod tests { #[test] fn seek_past_end() { let buf = [0xff]; - let mut r = Cursor::new(&buf[]); + let mut r = Cursor::new(&buf[..]); assert_eq!(r.seek(SeekFrom::Start(10)), Ok(10)); assert_eq!(r.read(&mut [0]), Ok(0)); @@ -340,7 +340,7 @@ mod tests { assert_eq!(r.read(&mut [0]), Ok(0)); let mut buf = [0]; - let mut r = Cursor::new(&mut buf[]); + let mut r = Cursor::new(&mut buf[..]); assert_eq!(r.seek(SeekFrom::Start(10)), Ok(10)); assert_eq!(r.write(&[3]), Ok(0)); } @@ -348,14 +348,14 @@ mod tests { #[test] fn seek_before_0() { let buf = [0xff_u8]; - let mut r = Cursor::new(&buf[]); + let mut r = Cursor::new(&buf[..]); assert!(r.seek(SeekFrom::End(-2)).is_err()); let mut r = Cursor::new(vec!(10u8)); assert!(r.seek(SeekFrom::End(-2)).is_err()); let mut buf = [0]; - let mut r = Cursor::new(&mut buf[]); + let mut r = Cursor::new(&mut buf[..]); assert!(r.seek(SeekFrom::End(-2)).is_err()); } diff --git a/src/libstd/old_io/buffered.rs b/src/libstd/old_io/buffered.rs index 59a437ad916..2d2d0d8b33a 100644 --- a/src/libstd/old_io/buffered.rs +++ b/src/libstd/old_io/buffered.rs @@ -546,7 +546,7 @@ mod test { assert_eq!(a, &w.get_ref()[]); let w = w.into_inner(); let a: &[_] = &[0, 1]; - assert_eq!(a, &w[]); + assert_eq!(a, &w[..]); } // This is just here to make sure that we don't infinite loop in the @@ -643,14 +643,14 @@ mod test { #[test] fn read_char_buffered() { let buf = [195u8, 159u8]; - let mut reader = BufferedReader::with_capacity(1, &buf[]); + let mut reader = BufferedReader::with_capacity(1, &buf[..]); assert_eq!(reader.read_char(), Ok('ß')); } #[test] fn test_chars() { let buf = [195u8, 159u8, b'a']; - let mut reader = BufferedReader::with_capacity(1, &buf[]); + let mut reader = BufferedReader::with_capacity(1, &buf[..]); let mut it = reader.chars(); assert_eq!(it.next(), Some(Ok('ß'))); assert_eq!(it.next(), Some(Ok('a'))); diff --git a/src/libstd/old_path/mod.rs b/src/libstd/old_path/mod.rs index 37de2993c4d..e9005aa22bc 100644 --- a/src/libstd/old_path/mod.rs +++ b/src/libstd/old_path/mod.rs @@ -877,7 +877,7 @@ impl BytesContainer for String { } #[inline] fn container_as_str(&self) -> Option<&str> { - Some(&self[]) + Some(&self[..]) } #[inline] fn is_str(_: Option<&String>) -> bool { true } @@ -893,7 +893,7 @@ impl BytesContainer for [u8] { impl BytesContainer for Vec { #[inline] fn container_as_bytes(&self) -> &[u8] { - &self[] + &self[..] } } diff --git a/src/libstd/old_path/windows.rs b/src/libstd/old_path/windows.rs index 07c5e10992b..3cf15435dd0 100644 --- a/src/libstd/old_path/windows.rs +++ b/src/libstd/old_path/windows.rs @@ -182,7 +182,7 @@ impl GenericPathUnsafe for Path { s.push_str(".."); s.push(SEP); s.push_str(filename); - self.update_normalized(&s[]); + self.update_normalized(&s[..]); } None => { self.update_normalized(filename); @@ -192,20 +192,20 @@ impl GenericPathUnsafe for Path { s.push_str(&self.repr[..end]); s.push(SEP); s.push_str(filename); - self.update_normalized(&s[]); + self.update_normalized(&s[..]); } Some((idxb,idxa,_)) if self.prefix == Some(DiskPrefix) && idxa == self.prefix_len() => { let mut s = String::with_capacity(idxb + filename.len()); s.push_str(&self.repr[..idxb]); s.push_str(filename); - self.update_normalized(&s[]); + self.update_normalized(&s[..]); } Some((idxb,_,_)) => { let mut s = String::with_capacity(idxb + 1 + filename.len()); s.push_str(&self.repr[..idxb]); s.push(SEP); s.push_str(filename); - self.update_normalized(&s[]); + self.update_normalized(&s[..]); } } } @@ -229,7 +229,7 @@ impl GenericPathUnsafe for Path { } fn shares_volume(me: &Path, path: &str) -> bool { // path is assumed to have a prefix of Some(DiskPrefix) - let repr = &me.repr[]; + let repr = &me.repr[..]; match me.prefix { Some(DiskPrefix) => { repr.as_bytes()[0] == path.as_bytes()[0].to_ascii_uppercase() @@ -261,7 +261,7 @@ impl GenericPathUnsafe for Path { else { None }; let pathlen = path_.as_ref().map_or(path.len(), |p| p.len()); let mut s = String::with_capacity(me.repr.len() + 1 + pathlen); - s.push_str(&me.repr[]); + s.push_str(&me.repr[..]); let plen = me.prefix_len(); // if me is "C:" we don't want to add a path separator match me.prefix { @@ -273,9 +273,9 @@ impl GenericPathUnsafe for Path { } match path_ { None => s.push_str(path), - Some(p) => s.push_str(&p[]), + Some(p) => s.push_str(&p[..]), }; - me.update_normalized(&s[]) + me.update_normalized(&s[..]) } if !path.is_empty() { @@ -329,7 +329,7 @@ impl GenericPath for Path { /// Always returns a `Some` value. #[inline] fn as_str<'a>(&'a self) -> Option<&'a str> { - Some(&self.repr[]) + Some(&self.repr[..]) } #[inline] @@ -351,13 +351,13 @@ impl GenericPath for Path { /// Always returns a `Some` value. fn dirname_str<'a>(&'a self) -> Option<&'a str> { Some(match self.sepidx_or_prefix_len() { - None if ".." == self.repr => &self.repr[], + None if ".." == self.repr => &self.repr[..], None => ".", Some((_,idxa,end)) if &self.repr[idxa..end] == ".." => { - &self.repr[] + &self.repr[..] } Some((idxb,_,end)) if &self.repr[idxb..end] == "\\" => { - &self.repr[] + &self.repr[..] } Some((0,idxa,_)) => &self.repr[..idxa], Some((idxb,idxa,_)) => { @@ -379,7 +379,7 @@ impl GenericPath for Path { /// See `GenericPath::filename_str` for info. /// Always returns a `Some` value if `filename` returns a `Some` value. fn filename_str<'a>(&'a self) -> Option<&'a str> { - let repr = &self.repr[]; + let repr = &self.repr[..]; match self.sepidx_or_prefix_len() { None if "." == repr || ".." == repr => None, None => Some(repr), @@ -639,7 +639,7 @@ impl Path { /// Does not distinguish between absolute and cwd-relative paths, e.g. /// C:\foo and C:foo. pub fn str_components<'a>(&'a self) -> StrComponents<'a> { - let repr = &self.repr[]; + let repr = &self.repr[..]; let s = match self.prefix { Some(_) => { let plen = self.prefix_len(); @@ -667,8 +667,8 @@ impl Path { } fn equiv_prefix(&self, other: &Path) -> bool { - let s_repr = &self.repr[]; - let o_repr = &other.repr[]; + let s_repr = &self.repr[..]; + let o_repr = &other.repr[..]; match (self.prefix, other.prefix) { (Some(DiskPrefix), Some(VerbatimDiskPrefix)) => { self.is_absolute() && @@ -823,7 +823,7 @@ impl Path { fn update_sepidx(&mut self) { let s = if self.has_nonsemantic_trailing_slash() { &self.repr[..self.repr.len()-1] - } else { &self.repr[] }; + } else { &self.repr[..] }; let sep_test: fn(char) -> bool = if !prefix_is_verbatim(self.prefix) { is_sep } else { @@ -902,7 +902,7 @@ pub fn is_verbatim(path: &Path) -> bool { /// non-verbatim, the non-verbatim version is returned. /// Otherwise, None is returned. pub fn make_non_verbatim(path: &Path) -> Option { - let repr = &path.repr[]; + let repr = &path.repr[..]; let new_path = match path.prefix { Some(VerbatimPrefix(_)) | Some(DeviceNSPrefix(_)) => return None, Some(UNCPrefix(_,_)) | Some(DiskPrefix) | None => return Some(path.clone()), diff --git a/src/libstd/panicking.rs b/src/libstd/panicking.rs index 35221a7e647..2e05f6d974e 100644 --- a/src/libstd/panicking.rs +++ b/src/libstd/panicking.rs @@ -37,7 +37,7 @@ pub fn on_panic(obj: &(Any+Send), file: &'static str, line: uint) { let msg = match obj.downcast_ref::<&'static str>() { Some(s) => *s, None => match obj.downcast_ref::() { - Some(s) => &s[], + Some(s) => &s[..], None => "Box", } }; diff --git a/src/libstd/path.rs b/src/libstd/path.rs index 1d992668900..3cdf68818ab 100755 --- a/src/libstd/path.rs +++ b/src/libstd/path.rs @@ -978,7 +978,7 @@ impl ops::Deref for PathBuf { type Target = Path; fn deref(&self) -> &Path { - unsafe { mem::transmute(&self.inner[]) } + unsafe { mem::transmute(&self.inner[..]) } } } @@ -1010,7 +1010,7 @@ impl cmp::Ord for PathBuf { impl AsOsStr for PathBuf { fn as_os_str(&self) -> &OsStr { - &self.inner[] + &self.inner[..] } } diff --git a/src/libstd/sys/windows/mod.rs b/src/libstd/sys/windows/mod.rs index 4d6d033deee..a756fb29f81 100644 --- a/src/libstd/sys/windows/mod.rs +++ b/src/libstd/sys/windows/mod.rs @@ -265,12 +265,12 @@ fn fill_utf16_buf_base(mut f1: F1, f2: F2) -> Result let mut n = stack_buf.len(); loop { let buf = if n <= stack_buf.len() { - &mut stack_buf[] + &mut stack_buf[..] } else { let extra = n - heap_buf.len(); heap_buf.reserve(extra); heap_buf.set_len(n); - &mut heap_buf[] + &mut heap_buf[..] }; // This function is typically called on windows API functions which diff --git a/src/libstd/sys/windows/os.rs b/src/libstd/sys/windows/os.rs index 502d70d4e1a..6520d30487c 100644 --- a/src/libstd/sys/windows/os.rs +++ b/src/libstd/sys/windows/os.rs @@ -114,7 +114,7 @@ impl Iterator for Env { let (k, v) = match s.iter().position(|&b| b == '=' as u16) { Some(n) => (&s[..n], &s[n+1..]), - None => (s, &[][]), + None => (s, &[][..]), }; Some((OsStringExt::from_wide(k), OsStringExt::from_wide(v))) } @@ -186,7 +186,7 @@ impl<'a> Iterator for SplitPaths<'a> { if !must_yield && in_progress.is_empty() { None } else { - Some(super::os2path(&in_progress[])) + Some(super::os2path(&in_progress[..])) } } } @@ -208,14 +208,14 @@ pub fn join_paths(paths: I) -> Result return Err(JoinPathsError) } else if v.contains(&sep) { joined.push(b'"' as u16); - joined.push_all(&v[]); + joined.push_all(&v[..]); joined.push(b'"' as u16); } else { - joined.push_all(&v[]); + joined.push_all(&v[..]); } } - Ok(OsStringExt::from_wide(&joined[])) + Ok(OsStringExt::from_wide(&joined[..])) } impl fmt::Display for JoinPathsError { diff --git a/src/libstd/sys/windows/process2.rs b/src/libstd/sys/windows/process2.rs index 19e38196d19..4e36ed2f17f 100644 --- a/src/libstd/sys/windows/process2.rs +++ b/src/libstd/sys/windows/process2.rs @@ -472,7 +472,7 @@ mod tests { "echo \"a b c\"" ); assert_eq!( - test_wrapper("\u{03c0}\u{042f}\u{97f3}\u{00e6}\u{221e}", &[]), + test_wrapper("\u{03c0}\u{042f}\u{97f3}\u{00e6}\u{221e}", &[..]), "\u{03c0}\u{042f}\u{97f3}\u{00e6}\u{221e}" ); } diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index d6778be553e..140e21b5d04 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -198,7 +198,7 @@ impl Encodable for Ident { impl Decodable for Ident { fn decode(d: &mut D) -> Result { - Ok(str_to_ident(&try!(d.read_str())[])) + Ok(str_to_ident(&try!(d.read_str())[..])) } } diff --git a/src/libsyntax/ast_map/mod.rs b/src/libsyntax/ast_map/mod.rs index 5535e5911e0..62291cafbc0 100644 --- a/src/libsyntax/ast_map/mod.rs +++ b/src/libsyntax/ast_map/mod.rs @@ -86,7 +86,7 @@ pub fn path_to_string>(path: PI) -> String { if !s.is_empty() { s.push_str("::"); } - s.push_str(&e[]); + s.push_str(&e[..]); s }) } @@ -463,20 +463,20 @@ impl<'ast> Map<'ast> { F: FnOnce(Option<&[Attribute]>) -> T, { let attrs = match self.get(id) { - NodeItem(i) => Some(&i.attrs[]), - NodeForeignItem(fi) => Some(&fi.attrs[]), + NodeItem(i) => Some(&i.attrs[..]), + NodeForeignItem(fi) => Some(&fi.attrs[..]), NodeTraitItem(ref tm) => match **tm { - RequiredMethod(ref type_m) => Some(&type_m.attrs[]), - ProvidedMethod(ref m) => Some(&m.attrs[]), - TypeTraitItem(ref typ) => Some(&typ.attrs[]), + RequiredMethod(ref type_m) => Some(&type_m.attrs[..]), + ProvidedMethod(ref m) => Some(&m.attrs[..]), + TypeTraitItem(ref typ) => Some(&typ.attrs[..]), }, NodeImplItem(ref ii) => { match **ii { - MethodImplItem(ref m) => Some(&m.attrs[]), - TypeImplItem(ref t) => Some(&t.attrs[]), + MethodImplItem(ref m) => Some(&m.attrs[..]), + TypeImplItem(ref t) => Some(&t.attrs[..]), } } - NodeVariant(ref v) => Some(&v.node.attrs[]), + NodeVariant(ref v) => Some(&v.node.attrs[..]), // unit/tuple structs take the attributes straight from // the struct definition. // FIXME(eddyb) make this work again (requires access to the map). @@ -577,7 +577,7 @@ impl<'a, 'ast> NodesMatchingSuffix<'a, 'ast> { None => return false, Some((node_id, name)) => (node_id, name), }; - if &part[] != mod_name.as_str() { + if &part[..] != mod_name.as_str() { return false; } cursor = self.map.get_parent(mod_id); @@ -615,7 +615,7 @@ impl<'a, 'ast> NodesMatchingSuffix<'a, 'ast> { // We are looking at some node `n` with a given name and parent // id; do their names match what I am seeking? fn matches_names(&self, parent_of_n: NodeId, name: Name) -> bool { - name.as_str() == &self.item_name[] && + name.as_str() == &self.item_name[..] && self.suffix_matches(parent_of_n) } } @@ -1026,7 +1026,7 @@ impl<'a> NodePrinter for pprust::State<'a> { fn node_id_to_string(map: &Map, id: NodeId, include_id: bool) -> String { let id_str = format!(" (id={})", id); - let id_str = if include_id { &id_str[] } else { "" }; + let id_str = if include_id { &id_str[..] } else { "" }; match map.find(id) { Some(NodeItem(item)) => { diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index b8d4c90f745..1f0ff2a0728 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -263,11 +263,11 @@ pub fn impl_pretty_name(trait_ref: &Option, ty: &Ty) -> Ident { match *trait_ref { Some(ref trait_ref) => { pretty.push('.'); - pretty.push_str(&pprust::path_to_string(&trait_ref.path)[]); + pretty.push_str(&pprust::path_to_string(&trait_ref.path)); } None => {} } - token::gensym_ident(&pretty[]) + token::gensym_ident(&pretty[..]) } pub fn trait_method_to_ty_method(method: &Method) -> TypeMethod { @@ -679,7 +679,7 @@ pub fn pat_is_ident(pat: P) -> bool { pub fn path_name_eq(a : &ast::Path, b : &ast::Path) -> bool { (a.span == b.span) && (a.global == b.global) - && (segments_name_eq(&a.segments[], &b.segments[])) + && (segments_name_eq(&a.segments[..], &b.segments[..])) } // are two arrays of segments equal when compared unhygienically? diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index a3afe5780d0..373f250679a 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -44,7 +44,7 @@ pub fn is_used(attr: &Attribute) -> bool { pub trait AttrMetaMethods { fn check_name(&self, name: &str) -> bool { - name == &self.name()[] + name == &self.name()[..] } /// Retrieve the name of the meta item, e.g. `foo` in `#[foo]`, @@ -62,7 +62,7 @@ pub trait AttrMetaMethods { impl AttrMetaMethods for Attribute { fn check_name(&self, name: &str) -> bool { - let matches = name == &self.name()[]; + let matches = name == &self.name()[..]; if matches { mark_used(self); } @@ -101,7 +101,7 @@ impl AttrMetaMethods for MetaItem { fn meta_item_list<'a>(&'a self) -> Option<&'a [P]> { match self.node { - MetaList(_, ref l) => Some(&l[]), + MetaList(_, ref l) => Some(&l[..]), _ => None } } @@ -142,7 +142,7 @@ impl AttributeMethods for Attribute { let meta = mk_name_value_item_str( InternedString::new("doc"), token::intern_and_get_ident(&strip_doc_comment_decoration( - &comment)[])); + &comment))); if self.node.style == ast::AttrOuter { f(&mk_attr_outer(self.node.id, meta)) } else { @@ -302,9 +302,9 @@ pub fn find_inline_attr(attrs: &[Attribute]) -> InlineAttr { } MetaList(ref n, ref items) if *n == "inline" => { mark_used(attr); - if contains_name(&items[], "always") { + if contains_name(&items[..], "always") { InlineAlways - } else if contains_name(&items[], "never") { + } else if contains_name(&items[..], "never") { InlineNever } else { InlineHint @@ -326,11 +326,11 @@ pub fn requests_inline(attrs: &[Attribute]) -> bool { /// Tests if a cfg-pattern matches the cfg set pub fn cfg_matches(diagnostic: &SpanHandler, cfgs: &[P], cfg: &ast::MetaItem) -> bool { match cfg.node { - ast::MetaList(ref pred, ref mis) if &pred[] == "any" => + ast::MetaList(ref pred, ref mis) if &pred[..] == "any" => mis.iter().any(|mi| cfg_matches(diagnostic, cfgs, &**mi)), - ast::MetaList(ref pred, ref mis) if &pred[] == "all" => + ast::MetaList(ref pred, ref mis) if &pred[..] == "all" => mis.iter().all(|mi| cfg_matches(diagnostic, cfgs, &**mi)), - ast::MetaList(ref pred, ref mis) if &pred[] == "not" => { + ast::MetaList(ref pred, ref mis) if &pred[..] == "not" => { if mis.len() != 1 { diagnostic.span_err(cfg.span, "expected 1 cfg-pattern"); return false; @@ -382,7 +382,7 @@ fn find_stability_generic<'a, 'outer: for attr in attrs { let tag = attr.name(); - let tag = &tag[]; + let tag = &tag[..]; if tag != "deprecated" && tag != "unstable" && tag != "stable" { continue // not a stability level } @@ -404,7 +404,7 @@ fn find_stability_generic<'a, } } } - if &meta.name()[] == "since" { + if &meta.name()[..] == "since" { match meta.value_str() { Some(v) => since = Some(v), None => { @@ -413,7 +413,7 @@ fn find_stability_generic<'a, } } } - if &meta.name()[] == "reason" { + if &meta.name()[..] == "reason" { match meta.value_str() { Some(v) => reason = Some(v), None => { @@ -501,7 +501,7 @@ pub fn require_unique_names(diagnostic: &SpanHandler, metas: &[P]) { if !set.insert(name.clone()) { diagnostic.span_fatal(meta.span, - &format!("duplicate meta item `{}`", name)[]); + &format!("duplicate meta item `{}`", name)); } } } @@ -521,7 +521,7 @@ pub fn find_repr_attrs(diagnostic: &SpanHandler, attr: &Attribute) -> Vec { - let hint = match &word[] { + let hint = match &word[..] { // Can't use "extern" because it's not a lexical identifier. "C" => Some(ReprExtern), "packed" => Some(ReprPacked), diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 3231342cb50..099f6462942 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -360,7 +360,7 @@ impl CodeMap { let mut src = if src.starts_with("\u{feff}") { String::from_str(&src[3..]) } else { - String::from_str(&src[]) + String::from_str(&src[..]) }; // Append '\n' in case it's not already there. diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs index 7ca0591be50..dfe3477bddc 100644 --- a/src/libsyntax/config.rs +++ b/src/libsyntax/config.rs @@ -311,7 +311,7 @@ impl<'a> fold::Folder for CfgAttrFolder<'a> { } }; - if attr::cfg_matches(self.diag, &self.config[], &cfg) { + if attr::cfg_matches(self.diag, &self.config[..], &cfg) { Some(respan(mi.span, ast::Attribute_ { id: attr::mk_attr_id(), style: attr.node.style, diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index 83a4d938bb5..fe409f7030f 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -129,7 +129,7 @@ impl SpanHandler { panic!(ExplicitBug); } pub fn span_unimpl(&self, sp: Span, msg: &str) -> ! { - self.span_bug(sp, &format!("unimplemented {}", msg)[]); + self.span_bug(sp, &format!("unimplemented {}", msg)); } pub fn handler<'a>(&'a self) -> &'a Handler { &self.handler @@ -173,7 +173,7 @@ impl Handler { self.err_count.get()); } } - self.fatal(&s[]); + self.fatal(&s[..]); } pub fn warn(&self, msg: &str) { self.emit.borrow_mut().emit(None, msg, None, Warning); @@ -189,7 +189,7 @@ impl Handler { panic!(ExplicitBug); } pub fn unimpl(&self, msg: &str) -> ! { - self.bug(&format!("unimplemented {}", msg)[]); + self.bug(&format!("unimplemented {}", msg)); } pub fn emit(&self, cmsp: Option<(&codemap::CodeMap, Span)>, @@ -419,12 +419,12 @@ fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan, // the span) let span_end = Span { lo: sp.hi, hi: sp.hi, expn_id: sp.expn_id}; let ses = cm.span_to_string(span_end); - try!(print_diagnostic(dst, &ses[], lvl, msg, code)); + try!(print_diagnostic(dst, &ses[..], lvl, msg, code)); if rsp.is_full_span() { try!(custom_highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp))); } } else { - try!(print_diagnostic(dst, &ss[], lvl, msg, code)); + try!(print_diagnostic(dst, &ss[..], lvl, msg, code)); if rsp.is_full_span() { try!(highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp))); } @@ -436,7 +436,7 @@ fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan, Some(code) => match dst.registry.as_ref().and_then(|registry| registry.find_description(code)) { Some(_) => { - try!(print_diagnostic(dst, &ss[], Help, + try!(print_diagnostic(dst, &ss[..], Help, &format!("pass `--explain {}` to see a detailed \ explanation", code)[], None)); } @@ -455,7 +455,7 @@ fn highlight_lines(err: &mut EmitterWriter, let fm = &*lines.file; let mut elided = false; - let mut display_lines = &lines.lines[]; + let mut display_lines = &lines.lines[..]; if display_lines.len() > MAX_LINES { display_lines = &display_lines[0..MAX_LINES]; elided = true; @@ -563,7 +563,7 @@ fn custom_highlight_lines(w: &mut EmitterWriter, -> old_io::IoResult<()> { let fm = &*lines.file; - let lines = &lines.lines[]; + let lines = &lines.lines[..]; if lines.len() > MAX_LINES { if let Some(line) = fm.get_line(lines[0]) { try!(write!(&mut w.dst, "{}:{} {}\n", fm.name, @@ -610,7 +610,7 @@ fn custom_highlight_lines(w: &mut EmitterWriter, s.push('^'); s.push('\n'); print_maybe_styled(w, - &s[], + &s[..], term::attr::ForegroundColor(lvl.color())) } @@ -625,12 +625,12 @@ fn print_macro_backtrace(w: &mut EmitterWriter, codemap::MacroAttribute => ("#[", "]"), codemap::MacroBang => ("", "!") }; - try!(print_diagnostic(w, &ss[], Note, + try!(print_diagnostic(w, &ss[..], Note, &format!("in expansion of {}{}{}", pre, ei.callee.name, post)[], None)); let ss = cm.span_to_string(ei.call_site); - try!(print_diagnostic(w, &ss[], Note, "expansion site", None)); + try!(print_diagnostic(w, &ss[..], Note, "expansion site", None)); Ok(Some(ei.call_site)) } None => Ok(None) @@ -643,6 +643,6 @@ pub fn expect(diag: &SpanHandler, opt: Option, msg: M) -> T where { match opt { Some(t) => t, - None => diag.handler().bug(&msg()[]), + None => diag.handler().bug(&msg()), } } diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index 833a6d52acb..b3afc3fc4dd 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -59,7 +59,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, Some(previous_span) => { ecx.span_warn(span, &format!( "diagnostic code {} already used", &token::get_ident(code) - )[]); + )); ecx.span_note(previous_span, "previous invocation"); }, None => () @@ -70,7 +70,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, if !diagnostics.contains_key(&code.name) { ecx.span_err(span, &format!( "used diagnostic code {} not registered", &token::get_ident(code) - )[]); + )); } }); MacExpr::new(quote_expr!(ecx, ())) @@ -95,12 +95,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, if diagnostics.insert(code.name, description).is_some() { ecx.span_err(span, &format!( "diagnostic code {} already registered", &token::get_ident(*code) - )[]); + )); } }); let sym = Ident::new(token::gensym(&( "__register_diagnostic_".to_string() + &token::get_ident(*code) - )[])); + ))); MacItems::new(vec![quote_item!(ecx, mod $sym {}).unwrap()].into_iter()) } diff --git a/src/libsyntax/ext/concat.rs b/src/libsyntax/ext/concat.rs index 80d128959ea..38098e50dee 100644 --- a/src/libsyntax/ext/concat.rs +++ b/src/libsyntax/ext/concat.rs @@ -62,5 +62,5 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, } base::MacExpr::new(cx.expr_str( sp, - token::intern_and_get_ident(&accumulator[]))) + token::intern_and_get_ident(&accumulator[..]))) } diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index 63a8bd9ddf1..9410a51e7a5 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -49,7 +49,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree] } } } - let res = str_to_ident(&res_str[]); + let res = str_to_ident(&res_str[..]); let e = P(ast::Expr { id: ast::DUMMY_NODE_ID, diff --git a/src/libsyntax/ext/deriving/bounds.rs b/src/libsyntax/ext/deriving/bounds.rs index 879718a6399..93098484ae0 100644 --- a/src/libsyntax/ext/deriving/bounds.rs +++ b/src/libsyntax/ext/deriving/bounds.rs @@ -24,7 +24,7 @@ pub fn expand_deriving_bound(cx: &mut ExtCtxt, { let name = match mitem.node { MetaWord(ref tname) => { - match &tname[] { + match &tname[..] { "Copy" => "Copy", "Send" | "Sync" => { return cx.span_err(span, diff --git a/src/libsyntax/ext/deriving/generic/mod.rs b/src/libsyntax/ext/deriving/generic/mod.rs index f878cb5ca8b..29e51024d53 100644 --- a/src/libsyntax/ext/deriving/generic/mod.rs +++ b/src/libsyntax/ext/deriving/generic/mod.rs @@ -536,15 +536,15 @@ impl<'a> TraitDef<'a> { self, struct_def, type_ident, - &self_args[], - &nonself_args[]) + &self_args[..], + &nonself_args[..]) } else { method_def.expand_struct_method_body(cx, self, struct_def, type_ident, - &self_args[], - &nonself_args[]) + &self_args[..], + &nonself_args[..]) }; method_def.create_method(cx, @@ -576,15 +576,15 @@ impl<'a> TraitDef<'a> { self, enum_def, type_ident, - &self_args[], - &nonself_args[]) + &self_args[..], + &nonself_args[..]) } else { method_def.expand_enum_method_body(cx, self, enum_def, type_ident, self_args, - &nonself_args[]) + &nonself_args[..]) }; method_def.create_method(cx, @@ -934,22 +934,22 @@ impl<'a> MethodDef<'a> { .collect::>(); let self_arg_idents = self_arg_names.iter() - .map(|name|cx.ident_of(&name[])) + .map(|name|cx.ident_of(&name[..])) .collect::>(); // The `vi_idents` will be bound, solely in the catch-all, to // a series of let statements mapping each self_arg to a usize // corresponding to its variant index. let vi_idents: Vec = self_arg_names.iter() - .map(|name| { let vi_suffix = format!("{}_vi", &name[]); - cx.ident_of(&vi_suffix[]) }) + .map(|name| { let vi_suffix = format!("{}_vi", &name[..]); + cx.ident_of(&vi_suffix[..]) }) .collect::>(); // Builds, via callback to call_substructure_method, the // delegated expression that handles the catch-all case, // using `__variants_tuple` to drive logic if necessary. let catch_all_substructure = EnumNonMatchingCollapsed( - self_arg_idents, &variants[], &vi_idents[]); + self_arg_idents, &variants[..], &vi_idents[..]); // These arms are of the form: // (Variant1, Variant1, ...) => Body1 @@ -976,7 +976,7 @@ impl<'a> MethodDef<'a> { idents }; for self_arg_name in self_arg_names.tail() { - let (p, idents) = mk_self_pat(cx, &self_arg_name[]); + let (p, idents) = mk_self_pat(cx, &self_arg_name[..]); subpats.push(p); self_pats_idents.push(idents); } @@ -1032,7 +1032,7 @@ impl<'a> MethodDef<'a> { &**variant, field_tuples); let arm_expr = self.call_substructure_method( - cx, trait_, type_ident, &self_args[], nonself_args, + cx, trait_, type_ident, &self_args[..], nonself_args, &substructure); cx.arm(sp, vec![single_pat], arm_expr) @@ -1085,7 +1085,7 @@ impl<'a> MethodDef<'a> { } let arm_expr = self.call_substructure_method( - cx, trait_, type_ident, &self_args[], nonself_args, + cx, trait_, type_ident, &self_args[..], nonself_args, &catch_all_substructure); // Builds the expression: @@ -1391,7 +1391,7 @@ pub fn cs_fold(use_foldl: bool, } }, EnumNonMatchingCollapsed(ref all_args, _, tuple) => - enum_nonmatch_f(cx, trait_span, (&all_args[], tuple), + enum_nonmatch_f(cx, trait_span, (&all_args[..], tuple), substructure.nonself_args), StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, "static function in `derive`") @@ -1431,7 +1431,7 @@ pub fn cs_same_method(f: F, f(cx, trait_span, called) }, EnumNonMatchingCollapsed(ref all_self_args, _, tuple) => - enum_nonmatch_f(cx, trait_span, (&all_self_args[], tuple), + enum_nonmatch_f(cx, trait_span, (&all_self_args[..], tuple), substructure.nonself_args), StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, "static function in `derive`") diff --git a/src/libsyntax/ext/deriving/mod.rs b/src/libsyntax/ext/deriving/mod.rs index 0ed9e85e576..f8bc331bfcf 100644 --- a/src/libsyntax/ext/deriving/mod.rs +++ b/src/libsyntax/ext/deriving/mod.rs @@ -102,7 +102,7 @@ pub fn expand_meta_derive(cx: &mut ExtCtxt, |i| push(i))) } - match &tname[] { + match &tname[..] { "Clone" => expand!(clone::expand_deriving_clone), "Hash" => expand!(hash::expand_deriving_hash), diff --git a/src/libsyntax/ext/deriving/show.rs b/src/libsyntax/ext/deriving/show.rs index 3f5947672e0..281f23f9e61 100644 --- a/src/libsyntax/ext/deriving/show.rs +++ b/src/libsyntax/ext/deriving/show.rs @@ -128,7 +128,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, let formatter = substr.nonself_args[0].clone(); let meth = cx.ident_of("write_fmt"); - let s = token::intern_and_get_ident(&format_string[]); + let s = token::intern_and_get_ident(&format_string[..]); let format_string = cx.expr_str(span, s); // phew, not our responsibility any more! diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs index 5d56707c87a..9c04d1e9282 100644 --- a/src/libsyntax/ext/env.rs +++ b/src/libsyntax/ext/env.rs @@ -30,7 +30,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT Some(v) => v }; - let e = match env::var(&var[]) { + let e = match env::var(&var[..]) { Err(..) => { cx.expr_path(cx.path_all(sp, true, @@ -56,7 +56,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT cx.ident_of("Some")), vec!(cx.expr_str(sp, token::intern_and_get_ident( - &s[])))) + &s[..])))) } }; MacExpr::new(e) @@ -101,7 +101,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) } } - let e = match env::var(&var[]) { + let e = match env::var(&var[..]) { Err(_) => { cx.span_err(sp, &msg); cx.expr_usize(sp, 0) diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 6b7cecee815..d4dda7390a5 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -405,7 +405,7 @@ fn expand_mac_invoc(mac: ast::Mac, span: codemap::Span, }, }); let fm = fresh_mark(); - let marked_before = mark_tts(&tts[], fm); + let marked_before = mark_tts(&tts[..], fm); // The span that we pass to the expanders we want to // be the root of the call stack. That's the most @@ -416,7 +416,7 @@ fn expand_mac_invoc(mac: ast::Mac, span: codemap::Span, let opt_parsed = { let expanded = expandfun.expand(fld.cx, mac_span, - &marked_before[]); + &marked_before[..]); parse_thunk(expanded) }; let parsed = match opt_parsed { @@ -425,7 +425,7 @@ fn expand_mac_invoc(mac: ast::Mac, span: codemap::Span, fld.cx.span_err( pth.span, &format!("non-expression macro in expression position: {}", - &extnamestr[] + &extnamestr[..] )[]); return None; } @@ -633,8 +633,8 @@ pub fn expand_item_mac(it: P, } }); // mark before expansion: - let marked_before = mark_tts(&tts[], fm); - expander.expand(fld.cx, it.span, &marked_before[]) + let marked_before = mark_tts(&tts[..], fm); + expander.expand(fld.cx, it.span, &marked_before[..]) } IdentTT(ref expander, span) => { if it.ident.name == parse::token::special_idents::invalid.name { @@ -652,7 +652,7 @@ pub fn expand_item_mac(it: P, } }); // mark before expansion: - let marked_tts = mark_tts(&tts[], fm); + let marked_tts = mark_tts(&tts[..], fm); expander.expand(fld.cx, it.span, it.ident, marked_tts) } MacroRulesTT => { @@ -971,11 +971,11 @@ fn expand_pat(p: P, fld: &mut MacroExpander) -> P { }); let fm = fresh_mark(); - let marked_before = mark_tts(&tts[], fm); + let marked_before = mark_tts(&tts[..], fm); let mac_span = fld.cx.original_span(); let expanded = match expander.expand(fld.cx, mac_span, - &marked_before[]).make_pat() { + &marked_before[..]).make_pat() { Some(e) => e, None => { fld.cx.span_err( @@ -1128,7 +1128,7 @@ fn expand_annotatable(a: Annotatable, if valid_ident { fld.cx.mod_push(it.ident); } - let macro_use = contains_macro_use(fld, &new_attrs[]); + let macro_use = contains_macro_use(fld, &new_attrs[..]); let result = with_exts_frame!(fld.cx.syntax_env, macro_use, noop_fold_item(it, fld)); @@ -1508,7 +1508,7 @@ impl Folder for Marker { node: match node { MacInvocTT(path, tts, ctxt) => { MacInvocTT(self.fold_path(path), - self.fold_tts(&tts[]), + self.fold_tts(&tts[..]), mtwt::apply_mark(self.mark, ctxt)) } }, @@ -1914,7 +1914,7 @@ mod test { .collect(); println!("varref #{}: {:?}, resolves to {}",idx, varref_idents, varref_name); let string = token::get_ident(final_varref_ident); - println!("varref's first segment's string: \"{}\"", &string[]); + println!("varref's first segment's string: \"{}\"", &string[..]); println!("binding #{}: {}, resolves to {}", binding_idx, bindings[binding_idx], binding_name); mtwt::with_sctable(|x| mtwt::display_sctable(x)); @@ -1967,10 +1967,10 @@ foo_module!(); let cxbinds: Vec<&ast::Ident> = bindings.iter().filter(|b| { let ident = token::get_ident(**b); - let string = &ident[]; + let string = &ident[..]; "xx" == string }).collect(); - let cxbinds: &[&ast::Ident] = &cxbinds[]; + let cxbinds: &[&ast::Ident] = &cxbinds[..]; let cxbind = match cxbinds { [b] => b, _ => panic!("expected just one binding for ext_cx") diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs index 170a455a913..e17329d7d33 100644 --- a/src/libsyntax/ext/format.rs +++ b/src/libsyntax/ext/format.rs @@ -118,7 +118,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) } }; let interned_name = token::get_ident(ident); - let name = &interned_name[]; + let name = &interned_name[..]; p.expect(&token::Eq); let e = p.parse_expr(); @@ -218,7 +218,7 @@ impl<'a, 'b> Context<'a, 'b> { let msg = format!("invalid reference to argument `{}` ({})", arg, self.describe_num_args()); - self.ecx.span_err(self.fmtsp, &msg[]); + self.ecx.span_err(self.fmtsp, &msg[..]); return; } { @@ -238,7 +238,7 @@ impl<'a, 'b> Context<'a, 'b> { Some(e) => e.span, None => { let msg = format!("there is no argument named `{}`", name); - self.ecx.span_err(self.fmtsp, &msg[]); + self.ecx.span_err(self.fmtsp, &msg[..]); return; } }; @@ -587,7 +587,7 @@ impl<'a, 'b> Context<'a, 'b> { -> P { let trait_ = match *ty { Known(ref tyname) => { - match &tyname[] { + match &tyname[..] { "" => "Display", "?" => "Debug", "e" => "LowerExp", diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 67990895d07..2c7bf713aad 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -668,7 +668,7 @@ fn mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree) -> Vec> { for i in 0..tt.len() { seq.push(tt.get_tt(i)); } - mk_tts(cx, &seq[]) + mk_tts(cx, &seq[..]) } ast::TtToken(sp, ref tok) => { let e_sp = cx.expr_ident(sp, id_ext("_sp")); @@ -757,7 +757,7 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp)); let mut vector = vec!(stmt_let_sp, stmt_let_tt); - vector.extend(mk_tts(cx, &tts[]).into_iter()); + vector.extend(mk_tts(cx, &tts[..]).into_iter()); let block = cx.expr_block( cx.block_all(sp, vector, diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index 7a3a3562bdf..2312f6b633d 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -65,7 +65,7 @@ pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> Box { let s = pprust::tts_to_string(tts); base::MacExpr::new(cx.expr_str(sp, - token::intern_and_get_ident(&s[]))) + token::intern_and_get_ident(&s[..]))) } pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) @@ -78,7 +78,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) .connect("::"); base::MacExpr::new(cx.expr_str( sp, - token::intern_and_get_ident(&string[]))) + token::intern_and_get_ident(&string[..]))) } /// include! : parse the given file as an expr @@ -151,7 +151,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) // Add this input file to the code map to make it available as // dependency information let filename = format!("{}", file.display()); - let interned = token::intern_and_get_ident(&src[]); + let interned = token::intern_and_get_ident(&src[..]); cx.codemap().new_filemap(filename, src); base::MacExpr::new(cx.expr_str(sp, interned)) diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index d752e34c112..d5fa791b32b 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -165,7 +165,7 @@ pub fn count_names(ms: &[TokenTree]) -> usize { pub fn initial_matcher_pos(ms: Rc>, sep: Option, lo: BytePos) -> Box { - let match_idx_hi = count_names(&ms[]); + let match_idx_hi = count_names(&ms[..]); let matches: Vec<_> = (0..match_idx_hi).map(|_| Vec::new()).collect(); box MatcherPos { stack: vec![], @@ -254,13 +254,13 @@ pub fn parse_or_else(sess: &ParseSess, rdr: TtReader, ms: Vec ) -> HashMap> { - match parse(sess, cfg, rdr, &ms[]) { + match parse(sess, cfg, rdr, &ms[..]) { Success(m) => m, Failure(sp, str) => { - sess.span_diagnostic.span_fatal(sp, &str[]) + sess.span_diagnostic.span_fatal(sp, &str[..]) } Error(sp, str) => { - sess.span_diagnostic.span_fatal(sp, &str[]) + sess.span_diagnostic.span_fatal(sp, &str[..]) } } } @@ -447,7 +447,7 @@ pub fn parse(sess: &ParseSess, for dv in &mut (&mut eof_eis[0]).matches { v.push(dv.pop().unwrap()); } - return Success(nameize(sess, ms, &v[])); + return Success(nameize(sess, ms, &v[..])); } else if eof_eis.len() > 1 { return Error(sp, "ambiguity: multiple successful parses".to_string()); } else { @@ -533,7 +533,7 @@ pub fn parse_nt(p: &mut Parser, sp: Span, name: &str) -> Nonterminal { _ => { let token_str = pprust::token_to_string(&p.token); p.fatal(&format!("expected ident, found {}", - &token_str[])[]) + &token_str[..])[]) } }, "path" => { diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index f322cf8bad0..8d9a9d9d406 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -50,7 +50,7 @@ impl<'a> ParserAnyMacro<'a> { following", token_str); let span = parser.span; - parser.span_err(span, &msg[]); + parser.span_err(span, &msg[..]); } } } @@ -192,13 +192,13 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, best_fail_spot = sp; best_fail_msg = (*msg).clone(); }, - Error(sp, ref msg) => cx.span_fatal(sp, &msg[]) + Error(sp, ref msg) => cx.span_fatal(sp, &msg[..]) } } _ => cx.bug("non-matcher found in parsed lhses") } } - cx.span_fatal(best_fail_spot, &best_fail_msg[]); + cx.span_fatal(best_fail_spot, &best_fail_msg[..]); } // Note that macro-by-example's input is also matched against a token tree: diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 83234e3b7a5..17016f3ac11 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -255,7 +255,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } LisContradiction(ref msg) => { // FIXME #2887 blame macro invoker instead - r.sp_diag.span_fatal(sp.clone(), &msg[]); + r.sp_diag.span_fatal(sp.clone(), &msg[..]); } LisConstraint(len, _) => { if len == 0 { diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 3bebba15a57..a57d675facc 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -356,7 +356,7 @@ pub fn emit_feature_err(diag: &SpanHandler, feature: &str, span: Span, explain: diag.span_err(span, explain); diag.span_help(span, &format!("add #![feature({})] to the \ crate attributes to enable", - feature)[]); + feature)); } pub fn emit_feature_warn(diag: &SpanHandler, feature: &str, span: Span, explain: &str) { @@ -364,7 +364,7 @@ pub fn emit_feature_warn(diag: &SpanHandler, feature: &str, span: Span, explain: if diag.handler.can_emit_warnings { diag.span_help(span, &format!("add #![feature({})] to the \ crate attributes to silence this warning", - feature)[]); + feature)); } } @@ -438,7 +438,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { fn visit_item(&mut self, i: &ast::Item) { match i.node { ast::ItemExternCrate(_) => { - if attr::contains_name(&i.attrs[], "macro_reexport") { + if attr::contains_name(&i.attrs[..], "macro_reexport") { self.gate_feature("macro_reexport", i.span, "macros reexports are experimental \ and possibly buggy"); @@ -446,7 +446,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { } ast::ItemForeignMod(ref foreign_module) => { - if attr::contains_name(&i.attrs[], "link_args") { + if attr::contains_name(&i.attrs[..], "link_args") { self.gate_feature("link_args", i.span, "the `link_args` attribute is not portable \ across platforms, it is recommended to \ @@ -460,17 +460,17 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { } ast::ItemFn(..) => { - if attr::contains_name(&i.attrs[], "plugin_registrar") { + if attr::contains_name(&i.attrs[..], "plugin_registrar") { self.gate_feature("plugin_registrar", i.span, "compiler plugins are experimental and possibly buggy"); } - if attr::contains_name(&i.attrs[], "start") { + if attr::contains_name(&i.attrs[..], "start") { self.gate_feature("start", i.span, "a #[start] function is an experimental \ feature whose signature may change \ over time"); } - if attr::contains_name(&i.attrs[], "main") { + if attr::contains_name(&i.attrs[..], "main") { self.gate_feature("main", i.span, "declaration of a nonstandard #[main] \ function may change over time, for now \ @@ -479,7 +479,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { } ast::ItemStruct(..) => { - if attr::contains_name(&i.attrs[], "simd") { + if attr::contains_name(&i.attrs[..], "simd") { self.gate_feature("simd", i.span, "SIMD types are experimental and possibly buggy"); } @@ -505,7 +505,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { removed in the future"); } - if attr::contains_name(&i.attrs[], + if attr::contains_name(&i.attrs[..], "old_orphan_check") { self.gate_feature( "old_orphan_check", @@ -513,7 +513,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { "the new orphan check rules will eventually be strictly enforced"); } - if attr::contains_name(&i.attrs[], + if attr::contains_name(&i.attrs[..], "old_impl_check") { self.gate_feature("old_impl_check", i.span, @@ -528,7 +528,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { } fn visit_foreign_item(&mut self, i: &ast::ForeignItem) { - if attr::contains_name(&i.attrs[], "linkage") { + if attr::contains_name(&i.attrs, "linkage") { self.gate_feature("linkage", i.span, "the `linkage` attribute is experimental \ and not portable across platforms") diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index b17fc7fe82e..88f7b33ad24 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -187,7 +187,7 @@ fn read_line_comments(rdr: &mut StringReader, code_to_the_left: bool, let line = rdr.read_one_line_comment(); debug!("{}", line); // Doc comments are not put in comments. - if is_doc_comment(&line[]) { + if is_doc_comment(&line[..]) { break; } lines.push(line); @@ -224,7 +224,7 @@ fn all_whitespace(s: &str, col: CharPos) -> Option { fn trim_whitespace_prefix_and_push_line(lines: &mut Vec , s: String, col: CharPos) { let len = s.len(); - let s1 = match all_whitespace(&s[], col) { + let s1 = match all_whitespace(&s[..], col) { Some(col) => { if col < len { (&s[col..len]).to_string() @@ -261,7 +261,7 @@ fn read_block_comment(rdr: &mut StringReader, rdr.bump(); rdr.bump(); } - if is_block_doc_comment(&curr_line[]) { + if is_block_doc_comment(&curr_line[..]) { return } assert!(!curr_line.contains_char('\n')); diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 38ba0b38df5..cca641a7852 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -196,7 +196,7 @@ impl<'a> StringReader<'a> { let mut m = m.to_string(); m.push_str(": "); for c in c.escape_default() { m.push(c) } - self.fatal_span_(from_pos, to_pos, &m[]); + self.fatal_span_(from_pos, to_pos, &m[..]); } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -205,7 +205,7 @@ impl<'a> StringReader<'a> { let mut m = m.to_string(); m.push_str(": "); for c in c.escape_default() { m.push(c) } - self.err_span_(from_pos, to_pos, &m[]); + self.err_span_(from_pos, to_pos, &m[..]); } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the @@ -215,7 +215,7 @@ impl<'a> StringReader<'a> { let from = self.byte_offset(from_pos).to_usize(); let to = self.byte_offset(to_pos).to_usize(); m.push_str(&self.filemap.src[from..to]); - self.fatal_span_(from_pos, to_pos, &m[]); + self.fatal_span_(from_pos, to_pos, &m[..]); } /// Advance peek_tok and peek_span to refer to the next token, and @@ -556,7 +556,7 @@ impl<'a> StringReader<'a> { self.translate_crlf(start_bpos, string, "bare CR not allowed in block doc-comment") } else { string.into_cow() }; - token::DocComment(token::intern(&string[])) + token::DocComment(token::intern(&string[..])) } else { token::Comment }; diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 5f4cf9af5ee..b5a2b0425c6 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -258,7 +258,7 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option) unreachable!() } }; - match str::from_utf8(&bytes[]).ok() { + match str::from_utf8(&bytes[..]).ok() { Some(s) => { return string_to_filemap(sess, s.to_string(), path.as_str().unwrap().to_string()) @@ -398,7 +398,7 @@ pub fn char_lit(lit: &str) -> (char, isize) { } let msg = format!("lexer should have rejected a bad character escape {}", lit); - let msg2 = &msg[]; + let msg2 = &msg[..]; fn esc(len: usize, lit: &str) -> Option<(char, isize)> { num::from_str_radix(&lit[2..len], 16).ok() @@ -662,7 +662,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> // s can only be ascii, byte indexing is fine let s2 = s.chars().filter(|&c| c != '_').collect::(); - let mut s = &s2[]; + let mut s = &s2[..]; debug!("integer_lit: {}, {:?}", s, suffix); @@ -817,7 +817,7 @@ mod test { #[test] fn string_to_tts_macro () { let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string()); - let tts: &[ast::TokenTree] = &tts[]; + let tts: &[ast::TokenTree] = &tts[..]; match tts { [ast::TtToken(_, token::Ident(name_macro_rules, token::Plain)), ast::TtToken(_, token::Not), @@ -1112,24 +1112,24 @@ mod test { let use_s = "use foo::bar::baz;"; let vitem = string_to_item(use_s.to_string()).unwrap(); let vitem_s = item_to_string(&*vitem); - assert_eq!(&vitem_s[], use_s); + assert_eq!(&vitem_s[..], use_s); let use_s = "use foo::bar as baz;"; let vitem = string_to_item(use_s.to_string()).unwrap(); let vitem_s = item_to_string(&*vitem); - assert_eq!(&vitem_s[], use_s); + assert_eq!(&vitem_s[..], use_s); } #[test] fn parse_extern_crate() { let ex_s = "extern crate foo;"; let vitem = string_to_item(ex_s.to_string()).unwrap(); let vitem_s = item_to_string(&*vitem); - assert_eq!(&vitem_s[], ex_s); + assert_eq!(&vitem_s[..], ex_s); let ex_s = "extern crate \"foo\" as bar;"; let vitem = string_to_item(ex_s.to_string()).unwrap(); let vitem_s = item_to_string(&*vitem); - assert_eq!(&vitem_s[], ex_s); + assert_eq!(&vitem_s[..], ex_s); } fn get_spans_of_pat_idents(src: &str) -> Vec { @@ -1201,19 +1201,19 @@ mod test { let source = "/// doc comment\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name.clone(), source, Vec::new(), &sess).unwrap(); let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); - assert_eq!(&doc[], "/// doc comment"); + assert_eq!(&doc[..], "/// doc comment"); let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name.clone(), source, Vec::new(), &sess).unwrap(); let docs = item.attrs.iter().filter(|a| &a.name()[] == "doc") .map(|a| a.value_str().unwrap().to_string()).collect::>(); let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; - assert_eq!(&docs[], b); + assert_eq!(&docs[..], b); let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name, source, Vec::new(), &sess).unwrap(); let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); - assert_eq!(&doc[], "/** doc comment\n * with CRLF */"); + assert_eq!(&doc[..], "/** doc comment\n * with CRLF */"); } #[test] @@ -1233,7 +1233,7 @@ mod test { let span = tts.iter().rev().next().unwrap().get_span(); match sess.span_diagnostic.cm.span_to_snippet(span) { - Ok(s) => assert_eq!(&s[], "{ body }"), + Ok(s) => assert_eq!(&s[..], "{ body }"), Err(_) => panic!("could not get snippet"), } } diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index c6d852627c6..8480772ce6c 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -41,7 +41,8 @@ pub trait ParserObsoleteMethods { sp: Span, kind: ObsoleteSyntax, kind_str: &str, - desc: &str); + desc: &str, + error: bool); fn is_obsolete_ident(&mut self, ident: &str) -> bool; fn eat_obsolete_ident(&mut self, ident: &str) -> bool; } @@ -68,17 +69,17 @@ impl<'a> ParserObsoleteMethods for parser::Parser<'a> { ), ObsoleteSyntax::ClosureType => ( "`|usize| -> bool` closure type", - "use unboxed closures instead, no type annotation needed" + "use unboxed closures instead, no type annotation needed", true, ), ObsoleteSyntax::ClosureKind => ( "`:`, `&mut:`, or `&:`", - "rely on inference instead" + "rely on inference instead", true, ), ObsoleteSyntax::Sized => ( "`Sized? T` for removing the `Sized` bound", - "write `T: ?Sized` instead" + "write `T: ?Sized` instead", true, ), ObsoleteSyntax::EmptyIndex => ( diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 7d3a7d60101..6be16bbf688 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -362,7 +362,7 @@ impl<'a> Parser<'a> { let token_str = Parser::token_to_string(t); let last_span = self.last_span; self.span_fatal(last_span, &format!("unexpected token: `{}`", - token_str)[]); + token_str)); } pub fn unexpected(&mut self) -> ! { @@ -381,7 +381,7 @@ impl<'a> Parser<'a> { let this_token_str = self.this_token_to_string(); self.fatal(&format!("expected `{}`, found `{}`", token_str, - this_token_str)[]) + this_token_str)) } } else { self.expect_one_of(slice::ref_slice(t), &[]); @@ -422,7 +422,7 @@ impl<'a> Parser<'a> { expected.push_all(&*self.expected_tokens); expected.sort_by(|a, b| a.to_string().cmp(&b.to_string())); expected.dedup(); - let expect = tokens_to_string(&expected[]); + let expect = tokens_to_string(&expected[..]); let actual = self.this_token_to_string(); self.fatal( &(if expected.len() > 1 { @@ -436,7 +436,7 @@ impl<'a> Parser<'a> { (format!("expected {}, found `{}`", expect, actual)) - }[]) + })[..] ) } } @@ -469,7 +469,7 @@ impl<'a> Parser<'a> { // might be unit-struct construction; check for recoverableinput error. let mut expected = edible.iter().map(|x| x.clone()).collect::>(); expected.push_all(inedible); - self.check_for_erroneous_unit_struct_expecting(&expected[]); + self.check_for_erroneous_unit_struct_expecting(&expected[..]); } self.expect_one_of(edible, inedible) } @@ -486,9 +486,9 @@ impl<'a> Parser<'a> { .as_ref() .map_or(false, |t| t.is_ident() || t.is_path()) { let mut expected = edible.iter().map(|x| x.clone()).collect::>(); - expected.push_all(&inedible[]); + expected.push_all(&inedible[..]); self.check_for_erroneous_unit_struct_expecting( - &expected[]); + &expected[..]); } self.expect_one_of(edible, inedible) } @@ -511,7 +511,7 @@ impl<'a> Parser<'a> { _ => { let token_str = self.this_token_to_string(); self.fatal(&format!("expected ident, found `{}`", - token_str)[]) + token_str)) } } } @@ -599,7 +599,7 @@ impl<'a> Parser<'a> { let span = self.span; self.span_err(span, &format!("expected identifier, found keyword `{}`", - token_str)[]); + token_str)); } } @@ -608,7 +608,7 @@ impl<'a> Parser<'a> { if self.token.is_reserved_keyword() { let token_str = self.this_token_to_string(); self.fatal(&format!("`{}` is a reserved keyword", - token_str)[]) + token_str)) } } @@ -734,7 +734,7 @@ impl<'a> Parser<'a> { let this_token_str = self.this_token_to_string(); self.fatal(&format!("expected `{}`, found `{}`", gt_str, - this_token_str)[]) + this_token_str)) } } } @@ -1364,7 +1364,7 @@ impl<'a> Parser<'a> { let (inner_attrs, body) = p.parse_inner_attrs_and_block(); let mut attrs = attrs; - attrs.push_all(&inner_attrs[]); + attrs.push_all(&inner_attrs[..]); ProvidedMethod(P(ast::Method { attrs: attrs, id: ast::DUMMY_NODE_ID, @@ -1383,7 +1383,7 @@ impl<'a> Parser<'a> { _ => { let token_str = p.this_token_to_string(); p.fatal(&format!("expected `;` or `{{`, found `{}`", - token_str)[]) + token_str)[..]) } } } @@ -1551,7 +1551,7 @@ impl<'a> Parser<'a> { } else { let this_token_str = self.this_token_to_string(); let msg = format!("expected type, found `{}`", this_token_str); - self.fatal(&msg[]); + self.fatal(&msg[..]); }; let sp = mk_sp(lo, self.last_span.hi); @@ -1699,7 +1699,7 @@ impl<'a> Parser<'a> { token::StrRaw(s, n) => { (true, LitStr( - token::intern_and_get_ident(&parse::raw_str_lit(s.as_str())[]), + token::intern_and_get_ident(&parse::raw_str_lit(s.as_str())), ast::RawStr(n))) } token::Binary(i) => @@ -1944,7 +1944,7 @@ impl<'a> Parser<'a> { }; } _ => { - self.fatal(&format!("expected a lifetime name")[]); + self.fatal(&format!("expected a lifetime name")); } } } @@ -1982,7 +1982,7 @@ impl<'a> Parser<'a> { let msg = format!("expected `,` or `>` after lifetime \ name, found `{}`", this_token_str); - self.fatal(&msg[]); + self.fatal(&msg[..]); } } } @@ -2497,7 +2497,7 @@ impl<'a> Parser<'a> { let last_span = self.last_span; let fstr = n.as_str(); self.span_err(last_span, - &format!("unexpected token: `{}`", n.as_str())[]); + &format!("unexpected token: `{}`", n.as_str())); if fstr.chars().all(|x| "0123456789.".contains_char(x)) { let float = match fstr.parse::().ok() { Some(f) => f, @@ -2506,7 +2506,7 @@ impl<'a> Parser<'a> { self.span_help(last_span, &format!("try parenthesizing the first index; e.g., `(foo.{}){}`", float.trunc() as usize, - &float.fract().to_string()[1..])[]); + &float.fract().to_string()[1..])); } self.abort_if_errors(); @@ -2638,7 +2638,7 @@ impl<'a> Parser<'a> { match self.token { token::SubstNt(name, _) => self.fatal(&format!("unknown macro variable `{}`", - token::get_ident(name))[]), + token::get_ident(name))), _ => {} } } @@ -2700,7 +2700,7 @@ impl<'a> Parser<'a> { }; let token_str = p.this_token_to_string(); p.fatal(&format!("incorrect close delimiter: `{}`", - token_str)[]) + token_str)) }, /* we ought to allow different depths of unquotation */ token::Dollar | token::SubstNt(..) if p.quote_depth > 0 => { @@ -2821,7 +2821,7 @@ impl<'a> Parser<'a> { let this_token_to_string = self.this_token_to_string(); self.span_err(span, &format!("expected expression, found `{}`", - this_token_to_string)[]); + this_token_to_string)); let box_span = mk_sp(lo, self.last_span.hi); self.span_help(box_span, "perhaps you meant `box() (foo)` instead?"); @@ -3274,7 +3274,7 @@ impl<'a> Parser<'a> { if self.token != token::CloseDelim(token::Brace) { let token_str = self.this_token_to_string(); self.fatal(&format!("expected `{}`, found `{}`", "}", - token_str)[]) + token_str)) } etc = true; break; @@ -3575,7 +3575,7 @@ impl<'a> Parser<'a> { let span = self.span; let tok_str = self.this_token_to_string(); self.span_fatal(span, - &format!("expected identifier, found `{}`", tok_str)[]); + &format!("expected identifier, found `{}`", tok_str)); } let ident = self.parse_ident(); let last_span = self.last_span; @@ -3672,7 +3672,7 @@ impl<'a> Parser<'a> { let lo = self.span.lo; if self.check_keyword(keywords::Let) { - check_expected_item(self, &item_attrs[]); + check_expected_item(self, &item_attrs[..]); self.expect_keyword(keywords::Let); let decl = self.parse_let(); P(spanned(lo, decl.span.hi, StmtDecl(decl, ast::DUMMY_NODE_ID))) @@ -3681,7 +3681,7 @@ impl<'a> Parser<'a> { && self.look_ahead(1, |t| *t == token::Not) { // it's a macro invocation: - check_expected_item(self, &item_attrs[]); + check_expected_item(self, &item_attrs[..]); // Potential trouble: if we allow macros with paths instead of // idents, we'd need to look ahead past the whole path here... @@ -3709,7 +3709,7 @@ impl<'a> Parser<'a> { let tok_str = self.this_token_to_string(); self.fatal(&format!("expected {}`(` or `{{`, found `{}`", ident_str, - tok_str)[]) + tok_str)) }, }; @@ -3757,7 +3757,7 @@ impl<'a> Parser<'a> { } } else { let found_attrs = !item_attrs.is_empty(); - let item_err = Parser::expected_item_err(&item_attrs[]); + let item_err = Parser::expected_item_err(&item_attrs[..]); match self.parse_item_(item_attrs, false) { Ok(i) => { let hi = i.span.hi; @@ -3794,7 +3794,7 @@ impl<'a> Parser<'a> { let sp = self.span; let tok = self.this_token_to_string(); self.span_fatal_help(sp, - &format!("expected `{{`, found `{}`", tok)[], + &format!("expected `{{`, found `{}`", tok), "place this code inside a block"); } @@ -3829,13 +3829,13 @@ impl<'a> Parser<'a> { while self.token != token::CloseDelim(token::Brace) { // parsing items even when they're not allowed lets us give // better error messages and recover more gracefully. - attributes_box.push_all(&self.parse_outer_attributes()[]); + attributes_box.push_all(&self.parse_outer_attributes()); match self.token { token::Semi => { if !attributes_box.is_empty() { let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(&attributes_box[])); + Parser::expected_item_err(&attributes_box[..])); attributes_box = Vec::new(); } self.bump(); // empty @@ -3927,7 +3927,7 @@ impl<'a> Parser<'a> { if !attributes_box.is_empty() { let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(&attributes_box[])); + Parser::expected_item_err(&attributes_box[..])); } let hi = self.span.hi; @@ -4382,7 +4382,7 @@ impl<'a> Parser<'a> { _ => { let token_str = self.this_token_to_string(); self.fatal(&format!("expected `self`, found `{}`", - token_str)[]) + token_str)) } } } @@ -4711,7 +4711,7 @@ impl<'a> Parser<'a> { let (inner_attrs, body) = self.parse_inner_attrs_and_block(); let body_span = body.span; let mut new_attrs = attrs; - new_attrs.push_all(&inner_attrs[]); + new_attrs.push_all(&inner_attrs[..]); (ast::MethDecl(ident, generics, abi, @@ -5123,7 +5123,7 @@ impl<'a> Parser<'a> { // We parsed attributes for the first item but didn't find it let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(&attrs[])); + Parser::expected_item_err(&attrs[..])); } ast::Mod { @@ -5202,8 +5202,8 @@ impl<'a> Parser<'a> { let mod_name = mod_string.to_string(); let default_path_str = format!("{}.rs", mod_name); let secondary_path_str = format!("{}/mod.rs", mod_name); - let default_path = dir_path.join(&default_path_str[]); - let secondary_path = dir_path.join(&secondary_path_str[]); + let default_path = dir_path.join(&default_path_str[..]); + let secondary_path = dir_path.join(&secondary_path_str[..]); let default_exists = default_path.exists(); let secondary_exists = secondary_path.exists(); @@ -5275,7 +5275,7 @@ impl<'a> Parser<'a> { err.push_str(" -> "); } err.push_str(&path.display().as_cow()[]); - self.span_fatal(id_sp, &err[]); + self.span_fatal(id_sp, &err[..]); } None => () } @@ -5771,7 +5771,7 @@ impl<'a> Parser<'a> { if self.eat_keyword(keywords::Mod) { // MODULE ITEM let (ident, item_, extra_attrs) = - self.parse_item_mod(&attrs[]); + self.parse_item_mod(&attrs[..]); let last_span = self.last_span; let item = self.mk_item(lo, last_span.hi, @@ -6077,7 +6077,7 @@ impl<'a> Parser<'a> { if !attrs.is_empty() { let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(&attrs[])); + Parser::expected_item_err(&attrs[..])); } foreign_items diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 0747a97fa37..433c013591c 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -482,7 +482,7 @@ macro_rules! declare_special_idents_and_keywords {( $(init_vec.push($si_str);)* $(init_vec.push($sk_str);)* $(init_vec.push($rk_str);)* - interner::StrInterner::prefill(&init_vec[]) + interner::StrInterner::prefill(&init_vec[..]) } }} @@ -644,7 +644,7 @@ impl BytesContainer for InternedString { // of `BytesContainer`, which is itself a workaround for the lack of // DST. unsafe { - let this = &self[]; + let this = &self[..]; mem::transmute::<&[u8],&[u8]>(this.container_as_bytes()) } } diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index 707b3c72ecd..c72038935d8 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -607,7 +607,7 @@ impl Printer { assert_eq!(l, len); // assert!(l <= space); self.space -= len; - self.print_str(&s[]) + self.print_str(&s[..]) } Token::Eof => { // Eof should never get here. diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 4b021f2434f..9683d448607 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -602,7 +602,7 @@ impl<'a> State<'a> { pub fn synth_comment(&mut self, text: String) -> IoResult<()> { try!(word(&mut self.s, "/*")); try!(space(&mut self.s)); - try!(word(&mut self.s, &text[])); + try!(word(&mut self.s, &text[..])); try!(space(&mut self.s)); word(&mut self.s, "*/") } @@ -701,7 +701,7 @@ impl<'a> State<'a> { } ast::TyTup(ref elts) => { try!(self.popen()); - try!(self.commasep(Inconsistent, &elts[], + try!(self.commasep(Inconsistent, &elts[..], |s, ty| s.print_type(&**ty))); if elts.len() == 1 { try!(word(&mut self.s, ",")); @@ -734,10 +734,10 @@ impl<'a> State<'a> { } ast::TyObjectSum(ref ty, ref bounds) => { try!(self.print_type(&**ty)); - try!(self.print_bounds("+", &bounds[])); + try!(self.print_bounds("+", &bounds[..])); } ast::TyPolyTraitRef(ref bounds) => { - try!(self.print_bounds("", &bounds[])); + try!(self.print_bounds("", &bounds[..])); } ast::TyQPath(ref qpath) => { try!(self.print_qpath(&**qpath, false)) @@ -994,7 +994,7 @@ impl<'a> State<'a> { real_bounds.push(b); } } - try!(self.print_bounds(":", &real_bounds[])); + try!(self.print_bounds(":", &real_bounds[..])); try!(self.print_where_clause(generics)); try!(word(&mut self.s, " ")); try!(self.bopen()); @@ -1012,7 +1012,7 @@ impl<'a> State<'a> { try!(self.print_ident(item.ident)); try!(self.cbox(indent_unit)); try!(self.popen()); - try!(self.print_tts(&tts[])); + try!(self.print_tts(&tts[..])); try!(self.pclose()); try!(word(&mut self.s, ";")); try!(self.end()); @@ -1210,7 +1210,7 @@ impl<'a> State<'a> { if !args.is_empty() { try!(self.popen()); try!(self.commasep(Consistent, - &args[], + &args[..], |s, arg| s.print_type(&*arg.ty))); try!(self.pclose()); } @@ -1290,7 +1290,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, "! ")); try!(self.cbox(indent_unit)); try!(self.popen()); - try!(self.print_tts(&tts[])); + try!(self.print_tts(&tts[..])); try!(self.pclose()); try!(word(&mut self.s, ";")); self.end() @@ -1552,7 +1552,7 @@ impl<'a> State<'a> { fn print_expr_vec(&mut self, exprs: &[P]) -> IoResult<()> { try!(self.ibox(indent_unit)); try!(word(&mut self.s, "[")); - try!(self.commasep_exprs(Inconsistent, &exprs[])); + try!(self.commasep_exprs(Inconsistent, &exprs[..])); try!(word(&mut self.s, "]")); self.end() } @@ -1578,7 +1578,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, "{")); try!(self.commasep_cmnt( Consistent, - &fields[], + &fields[..], |s, field| { try!(s.ibox(indent_unit)); try!(s.print_ident(field.ident.node)); @@ -1607,7 +1607,7 @@ impl<'a> State<'a> { fn print_expr_tup(&mut self, exprs: &[P]) -> IoResult<()> { try!(self.popen()); - try!(self.commasep_exprs(Inconsistent, &exprs[])); + try!(self.commasep_exprs(Inconsistent, &exprs[..])); if exprs.len() == 1 { try!(word(&mut self.s, ",")); } @@ -1672,22 +1672,22 @@ impl<'a> State<'a> { try!(self.print_expr_box(place, &**expr)); } ast::ExprVec(ref exprs) => { - try!(self.print_expr_vec(&exprs[])); + try!(self.print_expr_vec(&exprs[..])); } ast::ExprRepeat(ref element, ref count) => { try!(self.print_expr_repeat(&**element, &**count)); } ast::ExprStruct(ref path, ref fields, ref wth) => { - try!(self.print_expr_struct(path, &fields[], wth)); + try!(self.print_expr_struct(path, &fields[..], wth)); } ast::ExprTup(ref exprs) => { - try!(self.print_expr_tup(&exprs[])); + try!(self.print_expr_tup(&exprs[..])); } ast::ExprCall(ref func, ref args) => { - try!(self.print_expr_call(&**func, &args[])); + try!(self.print_expr_call(&**func, &args[..])); } ast::ExprMethodCall(ident, ref tys, ref args) => { - try!(self.print_expr_method_call(ident, &tys[], &args[])); + try!(self.print_expr_method_call(ident, &tys[..], &args[..])); } ast::ExprBinary(op, ref lhs, ref rhs) => { try!(self.print_expr_binary(op, &**lhs, &**rhs)); @@ -1977,7 +1977,7 @@ impl<'a> State<'a> { pub fn print_ident(&mut self, ident: ast::Ident) -> IoResult<()> { if self.encode_idents_with_hygiene { let encoded = ident.encode_with_hygiene(); - try!(word(&mut self.s, &encoded[])) + try!(word(&mut self.s, &encoded[..])) } else { try!(word(&mut self.s, &token::get_ident(ident))) } @@ -2151,7 +2151,7 @@ impl<'a> State<'a> { Some(ref args) => { if !args.is_empty() { try!(self.popen()); - try!(self.commasep(Inconsistent, &args[], + try!(self.commasep(Inconsistent, &args[..], |s, p| s.print_pat(&**p))); try!(self.pclose()); } @@ -2163,7 +2163,7 @@ impl<'a> State<'a> { try!(self.nbsp()); try!(self.word_space("{")); try!(self.commasep_cmnt( - Consistent, &fields[], + Consistent, &fields[..], |s, f| { try!(s.cbox(indent_unit)); if !f.node.is_shorthand { @@ -2184,7 +2184,7 @@ impl<'a> State<'a> { ast::PatTup(ref elts) => { try!(self.popen()); try!(self.commasep(Inconsistent, - &elts[], + &elts[..], |s, p| s.print_pat(&**p))); if elts.len() == 1 { try!(word(&mut self.s, ",")); @@ -2212,7 +2212,7 @@ impl<'a> State<'a> { ast::PatVec(ref before, ref slice, ref after) => { try!(word(&mut self.s, "[")); try!(self.commasep(Inconsistent, - &before[], + &before[..], |s, p| s.print_pat(&**p))); if let Some(ref p) = *slice { if !before.is_empty() { try!(self.word_space(",")); } @@ -2226,7 +2226,7 @@ impl<'a> State<'a> { if !after.is_empty() { try!(self.word_space(",")); } } try!(self.commasep(Inconsistent, - &after[], + &after[..], |s, p| s.print_pat(&**p))); try!(word(&mut self.s, "]")); } @@ -2475,7 +2475,7 @@ impl<'a> State<'a> { ints.push(i); } - try!(self.commasep(Inconsistent, &ints[], |s, &idx| { + try!(self.commasep(Inconsistent, &ints[..], |s, &idx| { if idx < generics.lifetimes.len() { let lifetime = &generics.lifetimes[idx]; s.print_lifetime_def(lifetime) @@ -2562,7 +2562,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, &name)); } ast::MetaNameValue(ref name, ref value) => { - try!(self.word_space(&name[])); + try!(self.word_space(&name[..])); try!(self.word_space("=")); try!(self.print_literal(value)); } @@ -2570,7 +2570,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, &name)); try!(self.popen()); try!(self.commasep(Consistent, - &items[], + &items[..], |s, i| s.print_meta_item(&**i))); try!(self.pclose()); } @@ -2606,7 +2606,7 @@ impl<'a> State<'a> { try!(self.print_path(path, false)); try!(word(&mut self.s, "::{")); } - try!(self.commasep(Inconsistent, &idents[], |s, w| { + try!(self.commasep(Inconsistent, &idents[..], |s, w| { match w.node { ast::PathListIdent { name, .. } => { s.print_ident(name) @@ -2763,13 +2763,13 @@ impl<'a> State<'a> { let mut res = String::from_str("b'"); res.extend(ascii::escape_default(byte).map(|c| c as char)); res.push('\''); - word(&mut self.s, &res[]) + word(&mut self.s, &res[..]) } ast::LitChar(ch) => { let mut res = String::from_str("'"); res.extend(ch.escape_default()); res.push('\''); - word(&mut self.s, &res[]) + word(&mut self.s, &res[..]) } ast::LitInt(i, t) => { match t { @@ -2800,7 +2800,7 @@ impl<'a> State<'a> { &f, &ast_util::float_ty_to_string(t)[])[]) } - ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, &f[]), + ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, &f[..]), ast::LitBool(val) => { if val { word(&mut self.s, "true") } else { word(&mut self.s, "false") } } @@ -2860,7 +2860,7 @@ impl<'a> State<'a> { // Don't print empty lines because they will end up as trailing // whitespace if !line.is_empty() { - try!(word(&mut self.s, &line[])); + try!(word(&mut self.s, &line[..])); } try!(hardbreak(&mut self.s)); } @@ -2875,7 +2875,7 @@ impl<'a> State<'a> { try!(self.ibox(0)); for line in &cmnt.lines { if !line.is_empty() { - try!(word(&mut self.s, &line[])); + try!(word(&mut self.s, &line[..])); } try!(hardbreak(&mut self.s)); } @@ -2908,7 +2908,7 @@ impl<'a> State<'a> { string=st)) } }; - word(&mut self.s, &st[]) + word(&mut self.s, &st[..]) } pub fn next_comment(&mut self) -> Option { diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs index 98c193c7e6b..8c2b9edfb22 100644 --- a/src/libsyntax/std_inject.rs +++ b/src/libsyntax/std_inject.rs @@ -54,7 +54,7 @@ impl<'a> fold::Folder for StandardLibraryInjector<'a> { // The name to use in `extern crate "name" as std;` let actual_crate_name = match self.alt_std_name { - Some(ref s) => token::intern_and_get_ident(&s[]), + Some(ref s) => token::intern_and_get_ident(&s[..]), None => token::intern_and_get_ident("std"), }; diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index 6511dffa6bf..31b264eb76d 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -274,8 +274,8 @@ fn strip_test_functions(krate: ast::Crate) -> ast::Crate { // When not compiling with --test we should not compile the // #[test] functions config::strip_items(krate, |attrs| { - !attr::contains_name(&attrs[], "test") && - !attr::contains_name(&attrs[], "bench") + !attr::contains_name(&attrs[..], "test") && + !attr::contains_name(&attrs[..], "bench") }) } @@ -563,7 +563,7 @@ fn mk_tests(cx: &TestCtxt) -> P { fn is_test_crate(krate: &ast::Crate) -> bool { match attr::find_crate_name(&krate.attrs[]) { - Some(ref s) if "test" == &s[] => true, + Some(ref s) if "test" == &s[..] => true, _ => false } } @@ -603,11 +603,11 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> P { // creates $name: $expr let field = |name, expr| ecx.field_imm(span, ecx.ident_of(name), expr); - debug!("encoding {}", ast_util::path_name_i(&path[])); + debug!("encoding {}", ast_util::path_name_i(&path[..])); // path to the #[test] function: "foo::bar::baz" - let path_string = ast_util::path_name_i(&path[]); - let name_expr = ecx.expr_str(span, token::intern_and_get_ident(&path_string[])); + let path_string = ast_util::path_name_i(&path[..]); + let name_expr = ecx.expr_str(span, token::intern_and_get_ident(&path_string[..])); // self::test::StaticTestName($name_expr) let name_expr = ecx.expr_call(span, diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs index 51144267519..c286ff9d65c 100644 --- a/src/libsyntax/util/interner.rs +++ b/src/libsyntax/util/interner.rs @@ -110,34 +110,34 @@ impl Eq for RcStr {} impl Ord for RcStr { fn cmp(&self, other: &RcStr) -> Ordering { - self[].cmp(&other[]) + self[..].cmp(&other[..]) } } impl fmt::Debug for RcStr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use std::fmt::Debug; - self[].fmt(f) + self[..].fmt(f) } } impl fmt::Display for RcStr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use std::fmt::Display; - self[].fmt(f) + self[..].fmt(f) } } impl BorrowFrom for str { fn borrow_from(owned: &RcStr) -> &str { - &owned.string[] + &owned.string[..] } } impl Deref for RcStr { type Target = str; - fn deref(&self) -> &str { &self.string[] } + fn deref(&self) -> &str { &self.string[..] } } /// A StrInterner differs from Interner in that it accepts diff --git a/src/libterm/terminfo/mod.rs b/src/libterm/terminfo/mod.rs index b978d2d8054..be1c623c859 100644 --- a/src/libterm/terminfo/mod.rs +++ b/src/libterm/terminfo/mod.rs @@ -180,7 +180,7 @@ impl TerminfoTerminal { } }; - let entry = open(&term[]); + let entry = open(&term[..]); if entry.is_err() { if env::var("MSYSCON").ok().map_or(false, |s| { "mintty.exe" == s diff --git a/src/libterm/terminfo/searcher.rs b/src/libterm/terminfo/searcher.rs index fd6e6a843e1..c40a5534efb 100644 --- a/src/libterm/terminfo/searcher.rs +++ b/src/libterm/terminfo/searcher.rs @@ -60,13 +60,13 @@ pub fn get_dbpath_for_term(term: &str) -> Option> { for p in &dirs_to_search { if p.exists() { let f = first_char.to_string(); - let newp = p.join_many(&[&f[], term]); + let newp = p.join_many(&[&f[..], term]); if newp.exists() { return Some(box newp); } // on some installations the dir is named after the hex of the char (e.g. OS X) let f = format!("{:x}", first_char as uint); - let newp = p.join_many(&[&f[], term]); + let newp = p.join_many(&[&f[..], term]); if newp.exists() { return Some(box newp); } diff --git a/src/libtest/lib.rs b/src/libtest/lib.rs index 2cb30ad9804..43b454f2a37 100644 --- a/src/libtest/lib.rs +++ b/src/libtest/lib.rs @@ -721,7 +721,7 @@ fn should_sort_failures_before_printing_them() { st.write_failures().unwrap(); let s = match st.out { - Raw(ref m) => String::from_utf8_lossy(&m[]), + Raw(ref m) => String::from_utf8_lossy(&m[..]), Pretty(_) => unreachable!() }; @@ -834,7 +834,7 @@ pub fn filter_tests(opts: &TestOpts, tests: Vec) -> Vec filtered, Some(ref filter) => { filtered.into_iter().filter(|test| { - test.desc.name.as_slice().contains(&filter[]) + test.desc.name.as_slice().contains(&filter[..]) }).collect() } }; diff --git a/src/rustbook/build.rs b/src/rustbook/build.rs index 6f5fc5c1969..224f1ef1a8b 100644 --- a/src/rustbook/build.rs +++ b/src/rustbook/build.rs @@ -92,7 +92,7 @@ fn render(book: &Book, tgt: &Path) -> CliResult<()> { { let urls = markdown_data.replace(".md)", ".html)"); try!(File::create(&preprocessed_path) - .write_str(&urls[])); + .write_str(&urls[..])); } // write the prelude to a temporary HTML file for rustdoc inclusion diff --git a/src/rustbook/error.rs b/src/rustbook/error.rs index 1c10a270acc..43c882c7d5b 100644 --- a/src/rustbook/error.rs +++ b/src/rustbook/error.rs @@ -52,7 +52,7 @@ impl<'a> Error for &'a str { impl Error for String { fn description<'a>(&'a self) -> &'a str { - &self[] + &self[..] } } @@ -75,7 +75,7 @@ impl Error for IoError { self.desc } fn detail(&self) -> Option<&str> { - self.detail.as_ref().map(|s| &s[]) + self.detail.as_ref().map(|s| &s[..]) } } diff --git a/src/rustbook/test.rs b/src/rustbook/test.rs index d3cb8a7316e..c5d4875423a 100644 --- a/src/rustbook/test.rs +++ b/src/rustbook/test.rs @@ -65,7 +65,7 @@ impl Subcommand for Test { } Err(errors) => { for err in errors { - term.err(&err[]); + term.err(&err[..]); } return Err(box "There was an error." as Box); } diff --git a/src/test/auxiliary/lint_group_plugin_test.rs b/src/test/auxiliary/lint_group_plugin_test.rs index 36b3091852b..e9d98889ff8 100644 --- a/src/test/auxiliary/lint_group_plugin_test.rs +++ b/src/test/auxiliary/lint_group_plugin_test.rs @@ -37,9 +37,9 @@ impl LintPass for Pass { fn check_item(&mut self, cx: &Context, it: &ast::Item) { let name = token::get_ident(it.ident); - if &name[] == "lintme" { + if &name[..] == "lintme" { cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'"); - } else if &name[] == "pleaselintme" { + } else if &name[..] == "pleaselintme" { cx.span_lint(PLEASE_LINT, it.span, "item is named 'pleaselintme'"); } } diff --git a/src/test/auxiliary/lint_plugin_test.rs b/src/test/auxiliary/lint_plugin_test.rs index 9020bb7b0fb..ffb234f70c8 100644 --- a/src/test/auxiliary/lint_plugin_test.rs +++ b/src/test/auxiliary/lint_plugin_test.rs @@ -35,7 +35,7 @@ impl LintPass for Pass { fn check_item(&mut self, cx: &Context, it: &ast::Item) { let name = token::get_ident(it.ident); - if &name[] == "lintme" { + if &name[..] == "lintme" { cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'"); } } diff --git a/src/test/auxiliary/plugin_args.rs b/src/test/auxiliary/plugin_args.rs index 907d80b50db..d0ab944813a 100644 --- a/src/test/auxiliary/plugin_args.rs +++ b/src/test/auxiliary/plugin_args.rs @@ -37,7 +37,7 @@ impl TTMacroExpander for Expander { _: &[ast::TokenTree]) -> Box { let args = self.args.iter().map(|i| pprust::meta_item_to_string(&*i)) .collect::>().connect(", "); - let interned = token::intern_and_get_ident(&args[]); + let interned = token::intern_and_get_ident(&args[..]); MacExpr::new(ecx.expr_str(sp, interned)) } } diff --git a/src/test/run-pass/regions-refcell.rs b/src/test/run-pass/regions-refcell.rs index 019db2a977e..20b64ecc071 100644 --- a/src/test/run-pass/regions-refcell.rs +++ b/src/test/run-pass/regions-refcell.rs @@ -19,7 +19,7 @@ use std::cell::RefCell; #[cfg(cannot_use_this_yet)] fn foo<'a>(map: RefCell>) { let one = [1u]; - assert_eq!(map.borrow().get("one"), Some(&one[])); + assert_eq!(map.borrow().get("one"), Some(&one[..])); } #[cfg(cannot_use_this_yet_either)] @@ -45,9 +45,9 @@ fn main() { let one = [1u8]; let two = [2u8]; let mut map = HashMap::new(); - map.insert("zero", &zer[]); - map.insert("one", &one[]); - map.insert("two", &two[]); + map.insert("zero", &zer[..]); + map.insert("one", &one[..]); + map.insert("two", &two[..]); let map = RefCell::new(map); foo(map); }