mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-22 23:04:33 +00:00
Replace all uses of &foo[]
with &foo[..]
en masse.
This commit is contained in:
parent
64cd30e0ca
commit
9ea84aeed4
@ -688,7 +688,7 @@ fn run_debuginfo_lldb_test(config: &Config, props: &TestProps, testfile: &Path)
|
||||
.unwrap()
|
||||
.to_string();
|
||||
|
||||
script_str.push_str(&format!("command script import {}\n", &rust_pp_module_abs_path[])[]);
|
||||
script_str.push_str(&format!("command script import {}\n", &rust_pp_module_abs_path[..])[]);
|
||||
script_str.push_str("type summary add --no-value ");
|
||||
script_str.push_str("--python-function lldb_rust_formatters.print_val ");
|
||||
script_str.push_str("-x \".*\" --category Rust\n");
|
||||
|
@ -3587,7 +3587,7 @@ An example of each kind:
|
||||
```{rust}
|
||||
let vec: Vec<i32> = vec![1, 2, 3];
|
||||
let arr: [i32; 3] = [1, 2, 3];
|
||||
let s: &[i32] = &vec[];
|
||||
let s: &[i32] = &vec[..];
|
||||
```
|
||||
|
||||
As you can see, the `vec!` macro allows you to create a `Vec<T>` easily. The
|
||||
|
@ -180,7 +180,7 @@ If you want to match against a slice or array, you can use `&`:
|
||||
fn main() {
|
||||
let v = vec!["match_this", "1"];
|
||||
|
||||
match &v[] {
|
||||
match &v[..] {
|
||||
["match_this", second] => println!("The second element is {}", second),
|
||||
_ => {},
|
||||
}
|
||||
|
@ -1177,12 +1177,12 @@ impl ElementSwaps {
|
||||
|
||||
#[unstable(feature = "collections", reason = "trait is unstable")]
|
||||
impl<T> BorrowFrom<Vec<T>> for [T] {
|
||||
fn borrow_from(owned: &Vec<T>) -> &[T] { &owned[] }
|
||||
fn borrow_from(owned: &Vec<T>) -> &[T] { &owned[..] }
|
||||
}
|
||||
|
||||
#[unstable(feature = "collections", reason = "trait is unstable")]
|
||||
impl<T> BorrowFromMut<Vec<T>> for [T] {
|
||||
fn borrow_from_mut(owned: &mut Vec<T>) -> &mut [T] { &mut owned[] }
|
||||
fn borrow_from_mut(owned: &mut Vec<T>) -> &mut [T] { &mut owned[..] }
|
||||
}
|
||||
|
||||
#[unstable(feature = "collections", reason = "trait is unstable")]
|
||||
@ -1743,7 +1743,7 @@ mod tests {
|
||||
#[test]
|
||||
fn test_slice_from() {
|
||||
let vec: &[_] = &[1, 2, 3, 4];
|
||||
assert_eq!(&vec[], vec);
|
||||
assert_eq!(&vec[..], vec);
|
||||
let b: &[_] = &[3, 4];
|
||||
assert_eq!(&vec[2..], b);
|
||||
let b: &[_] = &[];
|
||||
@ -1996,9 +1996,9 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_lexicographic_permutations_empty_and_short() {
|
||||
let empty : &mut[i32] = &mut[];
|
||||
let empty : &mut[i32] = &mut[..];
|
||||
assert!(empty.next_permutation() == false);
|
||||
let b: &mut[i32] = &mut[];
|
||||
let b: &mut[i32] = &mut[..];
|
||||
assert!(empty == b);
|
||||
assert!(empty.prev_permutation() == false);
|
||||
assert!(empty == b);
|
||||
@ -2264,15 +2264,15 @@ mod tests {
|
||||
#[test]
|
||||
fn test_total_ord() {
|
||||
let c = &[1, 2, 3];
|
||||
[1, 2, 3, 4][].cmp(c) == Greater;
|
||||
[1, 2, 3, 4][..].cmp(c) == Greater;
|
||||
let c = &[1, 2, 3, 4];
|
||||
[1, 2, 3][].cmp(c) == Less;
|
||||
[1, 2, 3][..].cmp(c) == Less;
|
||||
let c = &[1, 2, 3, 6];
|
||||
[1, 2, 3, 4][].cmp(c) == Equal;
|
||||
[1, 2, 3, 4][..].cmp(c) == Equal;
|
||||
let c = &[1, 2, 3, 4, 5, 6];
|
||||
[1, 2, 3, 4, 5, 5, 5, 5][].cmp(c) == Less;
|
||||
[1, 2, 3, 4, 5, 5, 5, 5][..].cmp(c) == Less;
|
||||
let c = &[1, 2, 3, 4];
|
||||
[2, 2][].cmp(c) == Greater;
|
||||
[2, 2][..].cmp(c) == Greater;
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -388,7 +388,7 @@ macro_rules! utf8_acc_cont_byte {
|
||||
|
||||
#[unstable(feature = "collections", reason = "trait is unstable")]
|
||||
impl BorrowFrom<String> for str {
|
||||
fn borrow_from(owned: &String) -> &str { &owned[] }
|
||||
fn borrow_from(owned: &String) -> &str { &owned[..] }
|
||||
}
|
||||
|
||||
#[unstable(feature = "collections", reason = "trait is unstable")]
|
||||
@ -466,7 +466,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
reason = "this functionality may be moved to libunicode")]
|
||||
fn nfd_chars(&self) -> Decompositions {
|
||||
Decompositions {
|
||||
iter: self[].chars(),
|
||||
iter: self[..].chars(),
|
||||
buffer: Vec::new(),
|
||||
sorted: false,
|
||||
kind: Canonical
|
||||
@ -480,7 +480,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
reason = "this functionality may be moved to libunicode")]
|
||||
fn nfkd_chars(&self) -> Decompositions {
|
||||
Decompositions {
|
||||
iter: self[].chars(),
|
||||
iter: self[..].chars(),
|
||||
buffer: Vec::new(),
|
||||
sorted: false,
|
||||
kind: Compatible
|
||||
@ -530,7 +530,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn contains(&self, pat: &str) -> bool {
|
||||
core_str::StrExt::contains(&self[], pat)
|
||||
core_str::StrExt::contains(&self[..], pat)
|
||||
}
|
||||
|
||||
/// Returns true if a string contains a char pattern.
|
||||
@ -547,7 +547,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
#[unstable(feature = "collections",
|
||||
reason = "might get removed in favour of a more generic contains()")]
|
||||
fn contains_char<P: CharEq>(&self, pat: P) -> bool {
|
||||
core_str::StrExt::contains_char(&self[], pat)
|
||||
core_str::StrExt::contains_char(&self[..], pat)
|
||||
}
|
||||
|
||||
/// An iterator over the characters of `self`. Note, this iterates
|
||||
@ -561,7 +561,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn chars(&self) -> Chars {
|
||||
core_str::StrExt::chars(&self[])
|
||||
core_str::StrExt::chars(&self[..])
|
||||
}
|
||||
|
||||
/// An iterator over the bytes of `self`
|
||||
@ -574,13 +574,13 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn bytes(&self) -> Bytes {
|
||||
core_str::StrExt::bytes(&self[])
|
||||
core_str::StrExt::bytes(&self[..])
|
||||
}
|
||||
|
||||
/// An iterator over the characters of `self` and their byte offsets.
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn char_indices(&self) -> CharIndices {
|
||||
core_str::StrExt::char_indices(&self[])
|
||||
core_str::StrExt::char_indices(&self[..])
|
||||
}
|
||||
|
||||
/// An iterator over substrings of `self`, separated by characters
|
||||
@ -603,7 +603,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn split<P: CharEq>(&self, pat: P) -> Split<P> {
|
||||
core_str::StrExt::split(&self[], pat)
|
||||
core_str::StrExt::split(&self[..], pat)
|
||||
}
|
||||
|
||||
/// An iterator over substrings of `self`, separated by characters
|
||||
@ -630,7 +630,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn splitn<P: CharEq>(&self, count: usize, pat: P) -> SplitN<P> {
|
||||
core_str::StrExt::splitn(&self[], count, pat)
|
||||
core_str::StrExt::splitn(&self[..], count, pat)
|
||||
}
|
||||
|
||||
/// An iterator over substrings of `self`, separated by characters
|
||||
@ -659,7 +659,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
/// ```
|
||||
#[unstable(feature = "collections", reason = "might get removed")]
|
||||
fn split_terminator<P: CharEq>(&self, pat: P) -> SplitTerminator<P> {
|
||||
core_str::StrExt::split_terminator(&self[], pat)
|
||||
core_str::StrExt::split_terminator(&self[..], pat)
|
||||
}
|
||||
|
||||
/// An iterator over substrings of `self`, separated by characters
|
||||
@ -680,7 +680,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn rsplitn<P: CharEq>(&self, count: usize, pat: P) -> RSplitN<P> {
|
||||
core_str::StrExt::rsplitn(&self[], count, pat)
|
||||
core_str::StrExt::rsplitn(&self[..], count, pat)
|
||||
}
|
||||
|
||||
/// An iterator over the start and end indices of the disjoint
|
||||
@ -706,7 +706,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
#[unstable(feature = "collections",
|
||||
reason = "might have its iterator type changed")]
|
||||
fn match_indices<'a>(&'a self, pat: &'a str) -> MatchIndices<'a> {
|
||||
core_str::StrExt::match_indices(&self[], pat)
|
||||
core_str::StrExt::match_indices(&self[..], pat)
|
||||
}
|
||||
|
||||
/// An iterator over the substrings of `self` separated by the pattern `sep`.
|
||||
@ -723,7 +723,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
#[unstable(feature = "collections",
|
||||
reason = "might get removed in the future in favor of a more generic split()")]
|
||||
fn split_str<'a>(&'a self, pat: &'a str) -> SplitStr<'a> {
|
||||
core_str::StrExt::split_str(&self[], pat)
|
||||
core_str::StrExt::split_str(&self[..], pat)
|
||||
}
|
||||
|
||||
/// An iterator over the lines of a string (subsequences separated
|
||||
@ -739,7 +739,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn lines(&self) -> Lines {
|
||||
core_str::StrExt::lines(&self[])
|
||||
core_str::StrExt::lines(&self[..])
|
||||
}
|
||||
|
||||
/// An iterator over the lines of a string, separated by either
|
||||
@ -755,7 +755,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn lines_any(&self) -> LinesAny {
|
||||
core_str::StrExt::lines_any(&self[])
|
||||
core_str::StrExt::lines_any(&self[..])
|
||||
}
|
||||
|
||||
/// Deprecated: use `s[a .. b]` instead.
|
||||
@ -802,7 +802,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
#[unstable(feature = "collections",
|
||||
reason = "may have yet to prove its worth")]
|
||||
fn slice_chars(&self, begin: usize, end: usize) -> &str {
|
||||
core_str::StrExt::slice_chars(&self[], begin, end)
|
||||
core_str::StrExt::slice_chars(&self[..], begin, end)
|
||||
}
|
||||
|
||||
/// Takes a bytewise (not UTF-8) slice from a string.
|
||||
@ -813,7 +813,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
/// the entire slice as well.
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
unsafe fn slice_unchecked(&self, begin: usize, end: usize) -> &str {
|
||||
core_str::StrExt::slice_unchecked(&self[], begin, end)
|
||||
core_str::StrExt::slice_unchecked(&self[..], begin, end)
|
||||
}
|
||||
|
||||
/// Returns true if the pattern `pat` is a prefix of the string.
|
||||
@ -825,7 +825,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn starts_with(&self, pat: &str) -> bool {
|
||||
core_str::StrExt::starts_with(&self[], pat)
|
||||
core_str::StrExt::starts_with(&self[..], pat)
|
||||
}
|
||||
|
||||
/// Returns true if the pattern `pat` is a suffix of the string.
|
||||
@ -837,7 +837,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn ends_with(&self, pat: &str) -> bool {
|
||||
core_str::StrExt::ends_with(&self[], pat)
|
||||
core_str::StrExt::ends_with(&self[..], pat)
|
||||
}
|
||||
|
||||
/// Returns a string with all pre- and suffixes that match
|
||||
@ -857,7 +857,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn trim_matches<P: CharEq>(&self, pat: P) -> &str {
|
||||
core_str::StrExt::trim_matches(&self[], pat)
|
||||
core_str::StrExt::trim_matches(&self[..], pat)
|
||||
}
|
||||
|
||||
/// Returns a string with all prefixes that match
|
||||
@ -877,7 +877,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn trim_left_matches<P: CharEq>(&self, pat: P) -> &str {
|
||||
core_str::StrExt::trim_left_matches(&self[], pat)
|
||||
core_str::StrExt::trim_left_matches(&self[..], pat)
|
||||
}
|
||||
|
||||
/// Returns a string with all suffixes that match
|
||||
@ -897,7 +897,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn trim_right_matches<P: CharEq>(&self, pat: P) -> &str {
|
||||
core_str::StrExt::trim_right_matches(&self[], pat)
|
||||
core_str::StrExt::trim_right_matches(&self[..], pat)
|
||||
}
|
||||
|
||||
/// Check that `index`-th byte lies at the start and/or end of a
|
||||
@ -926,7 +926,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
#[unstable(feature = "collections",
|
||||
reason = "naming is uncertain with container conventions")]
|
||||
fn is_char_boundary(&self, index: usize) -> bool {
|
||||
core_str::StrExt::is_char_boundary(&self[], index)
|
||||
core_str::StrExt::is_char_boundary(&self[..], index)
|
||||
}
|
||||
|
||||
/// Pluck a character out of a string and return the index of the next
|
||||
@ -985,7 +985,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
#[unstable(feature = "collections",
|
||||
reason = "naming is uncertain with container conventions")]
|
||||
fn char_range_at(&self, start: usize) -> CharRange {
|
||||
core_str::StrExt::char_range_at(&self[], start)
|
||||
core_str::StrExt::char_range_at(&self[..], start)
|
||||
}
|
||||
|
||||
/// Given a byte position and a str, return the previous char and its position.
|
||||
@ -1001,7 +1001,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
#[unstable(feature = "collections",
|
||||
reason = "naming is uncertain with container conventions")]
|
||||
fn char_range_at_reverse(&self, start: usize) -> CharRange {
|
||||
core_str::StrExt::char_range_at_reverse(&self[], start)
|
||||
core_str::StrExt::char_range_at_reverse(&self[..], start)
|
||||
}
|
||||
|
||||
/// Plucks the character starting at the `i`th byte of a string.
|
||||
@ -1022,7 +1022,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
#[unstable(feature = "collections",
|
||||
reason = "naming is uncertain with container conventions")]
|
||||
fn char_at(&self, i: usize) -> char {
|
||||
core_str::StrExt::char_at(&self[], i)
|
||||
core_str::StrExt::char_at(&self[..], i)
|
||||
}
|
||||
|
||||
/// Plucks the character ending at the `i`th byte of a string.
|
||||
@ -1034,7 +1034,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
#[unstable(feature = "collections",
|
||||
reason = "naming is uncertain with container conventions")]
|
||||
fn char_at_reverse(&self, i: usize) -> char {
|
||||
core_str::StrExt::char_at_reverse(&self[], i)
|
||||
core_str::StrExt::char_at_reverse(&self[..], i)
|
||||
}
|
||||
|
||||
/// Work with the byte buffer of a string as a byte slice.
|
||||
@ -1046,7 +1046,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn as_bytes(&self) -> &[u8] {
|
||||
core_str::StrExt::as_bytes(&self[])
|
||||
core_str::StrExt::as_bytes(&self[..])
|
||||
}
|
||||
|
||||
/// Returns the byte index of the first character of `self` that
|
||||
@ -1074,7 +1074,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn find<P: CharEq>(&self, pat: P) -> Option<usize> {
|
||||
core_str::StrExt::find(&self[], pat)
|
||||
core_str::StrExt::find(&self[..], pat)
|
||||
}
|
||||
|
||||
/// Returns the byte index of the last character of `self` that
|
||||
@ -1102,7 +1102,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn rfind<P: CharEq>(&self, pat: P) -> Option<usize> {
|
||||
core_str::StrExt::rfind(&self[], pat)
|
||||
core_str::StrExt::rfind(&self[..], pat)
|
||||
}
|
||||
|
||||
/// Returns the byte index of the first matching substring
|
||||
@ -1127,7 +1127,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
#[unstable(feature = "collections",
|
||||
reason = "might get removed in favor of a more generic find in the future")]
|
||||
fn find_str(&self, needle: &str) -> Option<usize> {
|
||||
core_str::StrExt::find_str(&self[], needle)
|
||||
core_str::StrExt::find_str(&self[..], needle)
|
||||
}
|
||||
|
||||
/// Retrieves the first character from a string slice and returns
|
||||
@ -1151,7 +1151,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
#[unstable(feature = "collections",
|
||||
reason = "awaiting conventions about shifting and slices")]
|
||||
fn slice_shift_char(&self) -> Option<(char, &str)> {
|
||||
core_str::StrExt::slice_shift_char(&self[])
|
||||
core_str::StrExt::slice_shift_char(&self[..])
|
||||
}
|
||||
|
||||
/// Returns the byte offset of an inner slice relative to an enclosing outer slice.
|
||||
@ -1171,7 +1171,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
#[unstable(feature = "collections",
|
||||
reason = "awaiting convention about comparability of arbitrary slices")]
|
||||
fn subslice_offset(&self, inner: &str) -> usize {
|
||||
core_str::StrExt::subslice_offset(&self[], inner)
|
||||
core_str::StrExt::subslice_offset(&self[..], inner)
|
||||
}
|
||||
|
||||
/// Return an unsafe pointer to the strings buffer.
|
||||
@ -1182,14 +1182,14 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[inline]
|
||||
fn as_ptr(&self) -> *const u8 {
|
||||
core_str::StrExt::as_ptr(&self[])
|
||||
core_str::StrExt::as_ptr(&self[..])
|
||||
}
|
||||
|
||||
/// Return an iterator of `u16` over the string encoded as UTF-16.
|
||||
#[unstable(feature = "collections",
|
||||
reason = "this functionality may only be provided by libunicode")]
|
||||
fn utf16_units(&self) -> Utf16Units {
|
||||
Utf16Units { encoder: Utf16Encoder::new(self[].chars()) }
|
||||
Utf16Units { encoder: Utf16Encoder::new(self[..].chars()) }
|
||||
}
|
||||
|
||||
/// Return the number of bytes in this string
|
||||
@ -1203,7 +1203,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[inline]
|
||||
fn len(&self) -> usize {
|
||||
core_str::StrExt::len(&self[])
|
||||
core_str::StrExt::len(&self[..])
|
||||
}
|
||||
|
||||
/// Returns true if this slice contains no bytes
|
||||
@ -1216,7 +1216,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn is_empty(&self) -> bool {
|
||||
core_str::StrExt::is_empty(&self[])
|
||||
core_str::StrExt::is_empty(&self[..])
|
||||
}
|
||||
|
||||
/// Parse this string into the specified type.
|
||||
@ -1230,7 +1230,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn parse<F: FromStr>(&self) -> Result<F, F::Err> {
|
||||
core_str::StrExt::parse(&self[])
|
||||
core_str::StrExt::parse(&self[..])
|
||||
}
|
||||
|
||||
/// Returns an iterator over the
|
||||
@ -1255,7 +1255,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
#[unstable(feature = "collections",
|
||||
reason = "this functionality may only be provided by libunicode")]
|
||||
fn graphemes(&self, is_extended: bool) -> Graphemes {
|
||||
UnicodeStr::graphemes(&self[], is_extended)
|
||||
UnicodeStr::graphemes(&self[..], is_extended)
|
||||
}
|
||||
|
||||
/// Returns an iterator over the grapheme clusters of self and their byte offsets.
|
||||
@ -1271,7 +1271,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
#[unstable(feature = "collections",
|
||||
reason = "this functionality may only be provided by libunicode")]
|
||||
fn grapheme_indices(&self, is_extended: bool) -> GraphemeIndices {
|
||||
UnicodeStr::grapheme_indices(&self[], is_extended)
|
||||
UnicodeStr::grapheme_indices(&self[..], is_extended)
|
||||
}
|
||||
|
||||
/// An iterator over the words of a string (subsequences separated
|
||||
@ -1288,7 +1288,7 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
#[unstable(feature = "str_words",
|
||||
reason = "the precise algorithm to use is unclear")]
|
||||
fn words(&self) -> Words {
|
||||
UnicodeStr::words(&self[])
|
||||
UnicodeStr::words(&self[..])
|
||||
}
|
||||
|
||||
/// Returns a string's displayed width in columns, treating control
|
||||
@ -1303,25 +1303,25 @@ pub trait StrExt: Index<RangeFull, Output = str> {
|
||||
#[unstable(feature = "collections",
|
||||
reason = "this functionality may only be provided by libunicode")]
|
||||
fn width(&self, is_cjk: bool) -> usize {
|
||||
UnicodeStr::width(&self[], is_cjk)
|
||||
UnicodeStr::width(&self[..], is_cjk)
|
||||
}
|
||||
|
||||
/// Returns a string with leading and trailing whitespace removed.
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn trim(&self) -> &str {
|
||||
UnicodeStr::trim(&self[])
|
||||
UnicodeStr::trim(&self[..])
|
||||
}
|
||||
|
||||
/// Returns a string with leading whitespace removed.
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn trim_left(&self) -> &str {
|
||||
UnicodeStr::trim_left(&self[])
|
||||
UnicodeStr::trim_left(&self[..])
|
||||
}
|
||||
|
||||
/// Returns a string with trailing whitespace removed.
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
fn trim_right(&self) -> &str {
|
||||
UnicodeStr::trim_right(&self[])
|
||||
UnicodeStr::trim_right(&self[..])
|
||||
}
|
||||
}
|
||||
|
||||
@ -2704,7 +2704,7 @@ mod tests {
|
||||
&["\u{378}\u{308}\u{903}"], &["\u{378}\u{308}", "\u{903}"]),
|
||||
];
|
||||
|
||||
for &(s, g) in &test_same[] {
|
||||
for &(s, g) in &test_same[..] {
|
||||
// test forward iterator
|
||||
assert!(order::equals(s.graphemes(true), g.iter().cloned()));
|
||||
assert!(order::equals(s.graphemes(false), g.iter().cloned()));
|
||||
|
@ -857,7 +857,7 @@ impl ops::Index<ops::Range<usize>> for String {
|
||||
type Output = str;
|
||||
#[inline]
|
||||
fn index(&self, index: &ops::Range<usize>) -> &str {
|
||||
&self[][*index]
|
||||
&self[..][*index]
|
||||
}
|
||||
}
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
@ -865,7 +865,7 @@ impl ops::Index<ops::RangeTo<usize>> for String {
|
||||
type Output = str;
|
||||
#[inline]
|
||||
fn index(&self, index: &ops::RangeTo<usize>) -> &str {
|
||||
&self[][*index]
|
||||
&self[..][*index]
|
||||
}
|
||||
}
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
@ -873,7 +873,7 @@ impl ops::Index<ops::RangeFrom<usize>> for String {
|
||||
type Output = str;
|
||||
#[inline]
|
||||
fn index(&self, index: &ops::RangeFrom<usize>) -> &str {
|
||||
&self[][*index]
|
||||
&self[..][*index]
|
||||
}
|
||||
}
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
@ -891,7 +891,7 @@ impl ops::Deref for String {
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &str {
|
||||
unsafe { mem::transmute(&self.vec[]) }
|
||||
unsafe { mem::transmute(&self.vec[..]) }
|
||||
}
|
||||
}
|
||||
|
||||
@ -1287,7 +1287,7 @@ mod tests {
|
||||
#[test]
|
||||
fn test_slicing() {
|
||||
let s = "foobar".to_string();
|
||||
assert_eq!("foobar", &s[]);
|
||||
assert_eq!("foobar", &s[..]);
|
||||
assert_eq!("foo", &s[..3]);
|
||||
assert_eq!("bar", &s[3..]);
|
||||
assert_eq!("oob", &s[1..4]);
|
||||
|
@ -2589,7 +2589,7 @@ mod tests {
|
||||
b.bytes = src_len as u64;
|
||||
|
||||
b.iter(|| {
|
||||
let dst = src.clone()[].to_vec();
|
||||
let dst = src.clone()[..].to_vec();
|
||||
assert_eq!(dst.len(), src_len);
|
||||
assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
|
||||
});
|
||||
|
@ -37,14 +37,14 @@ macro_rules! array_impls {
|
||||
|
||||
impl<S: hash::Writer + Hasher, T: Hash<S>> Hash<S> for [T; $N] {
|
||||
fn hash(&self, state: &mut S) {
|
||||
Hash::hash(&self[], state)
|
||||
Hash::hash(&self[..], state)
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<T: fmt::Debug> fmt::Debug for [T; $N] {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Debug::fmt(&&self[], f)
|
||||
fmt::Debug::fmt(&&self[..], f)
|
||||
}
|
||||
}
|
||||
|
||||
@ -72,11 +72,11 @@ macro_rules! array_impls {
|
||||
impl<A, B> PartialEq<[B; $N]> for [A; $N] where A: PartialEq<B> {
|
||||
#[inline]
|
||||
fn eq(&self, other: &[B; $N]) -> bool {
|
||||
&self[] == &other[]
|
||||
&self[..] == &other[..]
|
||||
}
|
||||
#[inline]
|
||||
fn ne(&self, other: &[B; $N]) -> bool {
|
||||
&self[] != &other[]
|
||||
&self[..] != &other[..]
|
||||
}
|
||||
}
|
||||
|
||||
@ -87,11 +87,11 @@ macro_rules! array_impls {
|
||||
{
|
||||
#[inline(always)]
|
||||
fn eq(&self, other: &Rhs) -> bool {
|
||||
PartialEq::eq(&self[], &**other)
|
||||
PartialEq::eq(&self[..], &**other)
|
||||
}
|
||||
#[inline(always)]
|
||||
fn ne(&self, other: &Rhs) -> bool {
|
||||
PartialEq::ne(&self[], &**other)
|
||||
PartialEq::ne(&self[..], &**other)
|
||||
}
|
||||
}
|
||||
|
||||
@ -102,11 +102,11 @@ macro_rules! array_impls {
|
||||
{
|
||||
#[inline(always)]
|
||||
fn eq(&self, other: &[B; $N]) -> bool {
|
||||
PartialEq::eq(&**self, &other[])
|
||||
PartialEq::eq(&**self, &other[..])
|
||||
}
|
||||
#[inline(always)]
|
||||
fn ne(&self, other: &[B; $N]) -> bool {
|
||||
PartialEq::ne(&**self, &other[])
|
||||
PartialEq::ne(&**self, &other[..])
|
||||
}
|
||||
}
|
||||
|
||||
@ -117,23 +117,23 @@ macro_rules! array_impls {
|
||||
impl<T:PartialOrd> PartialOrd for [T; $N] {
|
||||
#[inline]
|
||||
fn partial_cmp(&self, other: &[T; $N]) -> Option<Ordering> {
|
||||
PartialOrd::partial_cmp(&&self[], &&other[])
|
||||
PartialOrd::partial_cmp(&&self[..], &&other[..])
|
||||
}
|
||||
#[inline]
|
||||
fn lt(&self, other: &[T; $N]) -> bool {
|
||||
PartialOrd::lt(&&self[], &&other[])
|
||||
PartialOrd::lt(&&self[..], &&other[..])
|
||||
}
|
||||
#[inline]
|
||||
fn le(&self, other: &[T; $N]) -> bool {
|
||||
PartialOrd::le(&&self[], &&other[])
|
||||
PartialOrd::le(&&self[..], &&other[..])
|
||||
}
|
||||
#[inline]
|
||||
fn ge(&self, other: &[T; $N]) -> bool {
|
||||
PartialOrd::ge(&&self[], &&other[])
|
||||
PartialOrd::ge(&&self[..], &&other[..])
|
||||
}
|
||||
#[inline]
|
||||
fn gt(&self, other: &[T; $N]) -> bool {
|
||||
PartialOrd::gt(&&self[], &&other[])
|
||||
PartialOrd::gt(&&self[..], &&other[..])
|
||||
}
|
||||
}
|
||||
|
||||
@ -141,7 +141,7 @@ macro_rules! array_impls {
|
||||
impl<T:Ord> Ord for [T; $N] {
|
||||
#[inline]
|
||||
fn cmp(&self, other: &[T; $N]) -> Ordering {
|
||||
Ord::cmp(&&self[], &&other[])
|
||||
Ord::cmp(&&self[..], &&other[..])
|
||||
}
|
||||
}
|
||||
)+
|
||||
|
@ -1280,7 +1280,7 @@ mod traits {
|
||||
/// Any string that can be represented as a slice
|
||||
#[unstable(feature = "core",
|
||||
reason = "Instead of taking this bound generically, this trait will be \
|
||||
replaced with one of slicing syntax (&foo[]), deref coercions, or \
|
||||
replaced with one of slicing syntax (&foo[..]), deref coercions, or \
|
||||
a more generic conversion trait")]
|
||||
pub trait Str {
|
||||
/// Work with `self` as a slice.
|
||||
|
@ -264,7 +264,7 @@ fn test_inspect() {
|
||||
.collect::<Vec<uint>>();
|
||||
|
||||
assert_eq!(n, xs.len());
|
||||
assert_eq!(&xs[], &ys[]);
|
||||
assert_eq!(&xs[..], &ys[..]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -43,13 +43,13 @@ fn iterator_to_slice() {
|
||||
|
||||
{
|
||||
let mut iter = data.iter();
|
||||
assert_eq!(&iter[], &other_data[]);
|
||||
assert_eq!(&iter[..], &other_data[..]);
|
||||
|
||||
iter.next();
|
||||
assert_eq!(&iter[], &other_data[1..]);
|
||||
assert_eq!(&iter[..], &other_data[1..]);
|
||||
|
||||
iter.next_back();
|
||||
assert_eq!(&iter[], &other_data[1..2]);
|
||||
assert_eq!(&iter[..], &other_data[1..2]);
|
||||
|
||||
let s = iter.as_slice();
|
||||
iter.next();
|
||||
@ -57,17 +57,17 @@ fn iterator_to_slice() {
|
||||
}
|
||||
{
|
||||
let mut iter = data.iter_mut();
|
||||
assert_eq!(&iter[], &other_data[]);
|
||||
assert_eq!(&iter[..], &other_data[..]);
|
||||
// mutability:
|
||||
assert!(&mut iter[] == other_data);
|
||||
|
||||
iter.next();
|
||||
assert_eq!(&iter[], &other_data[1..]);
|
||||
assert_eq!(&iter[..], &other_data[1..]);
|
||||
assert!(&mut iter[] == &mut other_data[1..]);
|
||||
|
||||
iter.next_back();
|
||||
|
||||
assert_eq!(&iter[], &other_data[1..2]);
|
||||
assert_eq!(&iter[..], &other_data[1..2]);
|
||||
assert!(&mut iter[] == &mut other_data[1..2]);
|
||||
|
||||
let s = iter.into_slice();
|
||||
|
@ -215,11 +215,11 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
Some((_, other)) => {
|
||||
self.err(&format!("expected `{:?}`, found `{:?}`", c,
|
||||
other)[]);
|
||||
other));
|
||||
}
|
||||
None => {
|
||||
self.err(&format!("expected `{:?}` but string was terminated",
|
||||
c)[]);
|
||||
c));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -287,7 +287,7 @@ impl OptGroup {
|
||||
|
||||
impl Matches {
|
||||
fn opt_vals(&self, nm: &str) -> Vec<Optval> {
|
||||
match find_opt(&self.opts[], Name::from_str(nm)) {
|
||||
match find_opt(&self.opts[..], Name::from_str(nm)) {
|
||||
Some(id) => self.vals[id].clone(),
|
||||
None => panic!("No option '{}' defined", nm)
|
||||
}
|
||||
@ -326,7 +326,7 @@ impl Matches {
|
||||
/// Returns the string argument supplied to one of several matching options or `None`.
|
||||
pub fn opts_str(&self, names: &[String]) -> Option<String> {
|
||||
for nm in names {
|
||||
match self.opt_val(&nm[]) {
|
||||
match self.opt_val(&nm[..]) {
|
||||
Some(Val(ref s)) => return Some(s.clone()),
|
||||
_ => ()
|
||||
}
|
||||
@ -593,7 +593,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result {
|
||||
while i < l {
|
||||
let cur = args[i].clone();
|
||||
let curlen = cur.len();
|
||||
if !is_arg(&cur[]) {
|
||||
if !is_arg(&cur[..]) {
|
||||
free.push(cur);
|
||||
} else if cur == "--" {
|
||||
let mut j = i + 1;
|
||||
@ -667,7 +667,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result {
|
||||
v.push(Val((i_arg.clone())
|
||||
.unwrap()));
|
||||
} else if name_pos < names.len() || i + 1 == l ||
|
||||
is_arg(&args[i + 1][]) {
|
||||
is_arg(&args[i + 1][..]) {
|
||||
let v = &mut vals[optid];
|
||||
v.push(Given);
|
||||
} else {
|
||||
@ -730,7 +730,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String {
|
||||
0 => {}
|
||||
1 => {
|
||||
row.push('-');
|
||||
row.push_str(&short_name[]);
|
||||
row.push_str(&short_name[..]);
|
||||
row.push(' ');
|
||||
}
|
||||
_ => panic!("the short name should only be 1 ascii char long"),
|
||||
@ -741,7 +741,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String {
|
||||
0 => {}
|
||||
_ => {
|
||||
row.push_str("--");
|
||||
row.push_str(&long_name[]);
|
||||
row.push_str(&long_name[..]);
|
||||
row.push(' ');
|
||||
}
|
||||
}
|
||||
@ -749,10 +749,10 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String {
|
||||
// arg
|
||||
match hasarg {
|
||||
No => {}
|
||||
Yes => row.push_str(&hint[]),
|
||||
Yes => row.push_str(&hint[..]),
|
||||
Maybe => {
|
||||
row.push('[');
|
||||
row.push_str(&hint[]);
|
||||
row.push_str(&hint[..]);
|
||||
row.push(']');
|
||||
}
|
||||
}
|
||||
@ -765,7 +765,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String {
|
||||
row.push(' ');
|
||||
}
|
||||
} else {
|
||||
row.push_str(&desc_sep[]);
|
||||
row.push_str(&desc_sep[..]);
|
||||
}
|
||||
|
||||
// Normalize desc to contain words separated by one space character
|
||||
@ -777,14 +777,14 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String {
|
||||
|
||||
// FIXME: #5516 should be graphemes not codepoints
|
||||
let mut desc_rows = Vec::new();
|
||||
each_split_within(&desc_normalized_whitespace[], 54, |substr| {
|
||||
each_split_within(&desc_normalized_whitespace[..], 54, |substr| {
|
||||
desc_rows.push(substr.to_string());
|
||||
true
|
||||
});
|
||||
|
||||
// FIXME: #5516 should be graphemes not codepoints
|
||||
// wrapped description
|
||||
row.push_str(&desc_rows.connect(&desc_sep[])[]);
|
||||
row.push_str(&desc_rows.connect(&desc_sep[..])[]);
|
||||
|
||||
row
|
||||
});
|
||||
@ -803,10 +803,10 @@ fn format_option(opt: &OptGroup) -> String {
|
||||
// Use short_name is possible, but fallback to long_name.
|
||||
if opt.short_name.len() > 0 {
|
||||
line.push('-');
|
||||
line.push_str(&opt.short_name[]);
|
||||
line.push_str(&opt.short_name[..]);
|
||||
} else {
|
||||
line.push_str("--");
|
||||
line.push_str(&opt.long_name[]);
|
||||
line.push_str(&opt.long_name[..]);
|
||||
}
|
||||
|
||||
if opt.hasarg != No {
|
||||
@ -814,7 +814,7 @@ fn format_option(opt: &OptGroup) -> String {
|
||||
if opt.hasarg == Maybe {
|
||||
line.push('[');
|
||||
}
|
||||
line.push_str(&opt.hint[]);
|
||||
line.push_str(&opt.hint[..]);
|
||||
if opt.hasarg == Maybe {
|
||||
line.push(']');
|
||||
}
|
||||
@ -836,7 +836,7 @@ pub fn short_usage(program_name: &str, opts: &[OptGroup]) -> String {
|
||||
line.push_str(&opts.iter()
|
||||
.map(format_option)
|
||||
.collect::<Vec<String>>()
|
||||
.connect(" ")[]);
|
||||
.connect(" ")[..]);
|
||||
line
|
||||
}
|
||||
|
||||
|
@ -455,7 +455,7 @@ impl<'a> LabelText<'a> {
|
||||
pub fn escape(&self) -> String {
|
||||
match self {
|
||||
&LabelStr(ref s) => s.escape_default(),
|
||||
&EscStr(ref s) => LabelText::escape_str(&s[]),
|
||||
&EscStr(ref s) => LabelText::escape_str(&s[..]),
|
||||
}
|
||||
}
|
||||
|
||||
@ -484,7 +484,7 @@ impl<'a> LabelText<'a> {
|
||||
let mut prefix = self.pre_escaped_content().into_owned();
|
||||
let suffix = suffix.pre_escaped_content();
|
||||
prefix.push_str(r"\n\n");
|
||||
prefix.push_str(&suffix[]);
|
||||
prefix.push_str(&suffix[..]);
|
||||
EscStr(prefix.into_cow())
|
||||
}
|
||||
}
|
||||
@ -678,7 +678,7 @@ mod tests {
|
||||
|
||||
impl<'a> Labeller<'a, Node, &'a Edge> for LabelledGraph {
|
||||
fn graph_id(&'a self) -> Id<'a> {
|
||||
Id::new(&self.name[]).unwrap()
|
||||
Id::new(&self.name[..]).unwrap()
|
||||
}
|
||||
fn node_id(&'a self, n: &Node) -> Id<'a> {
|
||||
id_name(n)
|
||||
|
@ -287,7 +287,7 @@ pub fn log(level: u32, loc: &'static LogLocation, args: fmt::Arguments) {
|
||||
// Test the literal string from args against the current filter, if there
|
||||
// is one.
|
||||
match unsafe { FILTER.as_ref() } {
|
||||
Some(filter) if !args.to_string().contains(&filter[]) => return,
|
||||
Some(filter) if !args.to_string().contains(&filter[..]) => return,
|
||||
_ => {}
|
||||
}
|
||||
|
||||
@ -382,7 +382,7 @@ fn enabled(level: u32,
|
||||
// Search for the longest match, the vector is assumed to be pre-sorted.
|
||||
for directive in iter.rev() {
|
||||
match directive.name {
|
||||
Some(ref name) if !module.starts_with(&name[]) => {},
|
||||
Some(ref name) if !module.starts_with(&name[..]) => {},
|
||||
Some(..) | None => {
|
||||
return level <= directive.level
|
||||
}
|
||||
@ -397,7 +397,7 @@ fn enabled(level: u32,
|
||||
/// `Once` primitive (and this function is called from that primitive).
|
||||
fn init() {
|
||||
let (mut directives, filter) = match env::var("RUST_LOG") {
|
||||
Ok(spec) => directive::parse_logging_spec(&spec[]),
|
||||
Ok(spec) => directive::parse_logging_spec(&spec[..]),
|
||||
Err(..) => (Vec::new(), None),
|
||||
};
|
||||
|
||||
|
@ -509,7 +509,7 @@ impl BoxPointers {
|
||||
if n_uniq > 0 {
|
||||
let s = ty_to_string(cx.tcx, ty);
|
||||
let m = format!("type uses owned (Box type) pointers: {}", s);
|
||||
cx.span_lint(BOX_POINTERS, span, &m[]);
|
||||
cx.span_lint(BOX_POINTERS, span, &m[..]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -737,7 +737,7 @@ impl LintPass for UnusedResults {
|
||||
}
|
||||
} else {
|
||||
let attrs = csearch::get_item_attrs(&cx.sess().cstore, did);
|
||||
warned |= check_must_use(cx, &attrs[], s.span);
|
||||
warned |= check_must_use(cx, &attrs[..], s.span);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
@ -804,7 +804,7 @@ impl NonCamelCaseTypes {
|
||||
} else {
|
||||
format!("{} `{}` should have a camel case name such as `{}`", sort, s, c)
|
||||
};
|
||||
cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m[]);
|
||||
cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m[..]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -951,7 +951,7 @@ impl NonSnakeCase {
|
||||
|
||||
if !is_snake_case(ident) {
|
||||
let sc = NonSnakeCase::to_snake_case(&s);
|
||||
if sc != &s[] {
|
||||
if sc != &s[..] {
|
||||
cx.span_lint(NON_SNAKE_CASE, span,
|
||||
&*format!("{} `{}` should have a snake case name such as `{}`",
|
||||
sort, s, sc));
|
||||
@ -1034,7 +1034,7 @@ impl NonUpperCaseGlobals {
|
||||
if s.chars().any(|c| c.is_lowercase()) {
|
||||
let uc: String = NonSnakeCase::to_snake_case(&s).chars()
|
||||
.map(|c| c.to_uppercase()).collect();
|
||||
if uc != &s[] {
|
||||
if uc != &s[..] {
|
||||
cx.span_lint(NON_UPPER_CASE_GLOBALS, span,
|
||||
&format!("{} `{}` should have an upper case name such as `{}`",
|
||||
sort, s, uc));
|
||||
@ -1197,7 +1197,7 @@ impl LintPass for UnusedImportBraces {
|
||||
let m = format!("braces around {} is unnecessary",
|
||||
&token::get_ident(*name));
|
||||
cx.span_lint(UNUSED_IMPORT_BRACES, item.span,
|
||||
&m[]);
|
||||
&m[..]);
|
||||
},
|
||||
_ => ()
|
||||
}
|
||||
@ -1475,7 +1475,7 @@ impl LintPass for MissingDoc {
|
||||
let doc_hidden = self.doc_hidden() || attrs.iter().any(|attr| {
|
||||
attr.check_name("doc") && match attr.meta_item_list() {
|
||||
None => false,
|
||||
Some(l) => attr::contains_name(&l[], "hidden"),
|
||||
Some(l) => attr::contains_name(&l[..], "hidden"),
|
||||
}
|
||||
});
|
||||
self.doc_hidden_stack.push(doc_hidden);
|
||||
@ -1703,7 +1703,7 @@ impl Stability {
|
||||
_ => format!("use of {} item", label)
|
||||
};
|
||||
|
||||
cx.span_lint(lint, span, &msg[]);
|
||||
cx.span_lint(lint, span, &msg[..]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -125,11 +125,11 @@ impl LintStore {
|
||||
match (sess, from_plugin) {
|
||||
// We load builtin lints first, so a duplicate is a compiler bug.
|
||||
// Use early_error when handling -W help with no crate.
|
||||
(None, _) => early_error(&msg[]),
|
||||
(Some(sess), false) => sess.bug(&msg[]),
|
||||
(None, _) => early_error(&msg[..]),
|
||||
(Some(sess), false) => sess.bug(&msg[..]),
|
||||
|
||||
// A duplicate name from a plugin is a user error.
|
||||
(Some(sess), true) => sess.err(&msg[]),
|
||||
(Some(sess), true) => sess.err(&msg[..]),
|
||||
}
|
||||
}
|
||||
|
||||
@ -150,11 +150,11 @@ impl LintStore {
|
||||
match (sess, from_plugin) {
|
||||
// We load builtin lints first, so a duplicate is a compiler bug.
|
||||
// Use early_error when handling -W help with no crate.
|
||||
(None, _) => early_error(&msg[]),
|
||||
(Some(sess), false) => sess.bug(&msg[]),
|
||||
(None, _) => early_error(&msg[..]),
|
||||
(Some(sess), false) => sess.bug(&msg[..]),
|
||||
|
||||
// A duplicate name from a plugin is a user error.
|
||||
(Some(sess), true) => sess.err(&msg[]),
|
||||
(Some(sess), true) => sess.err(&msg[..]),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -251,8 +251,8 @@ impl LintStore {
|
||||
let warning = format!("lint {} has been renamed to {}",
|
||||
lint_name, new_name);
|
||||
match span {
|
||||
Some(span) => sess.span_warn(span, &warning[]),
|
||||
None => sess.warn(&warning[]),
|
||||
Some(span) => sess.span_warn(span, &warning[..]),
|
||||
None => sess.warn(&warning[..]),
|
||||
};
|
||||
Some(lint_id)
|
||||
}
|
||||
@ -262,13 +262,13 @@ impl LintStore {
|
||||
|
||||
pub fn process_command_line(&mut self, sess: &Session) {
|
||||
for &(ref lint_name, level) in &sess.opts.lint_opts {
|
||||
match self.find_lint(&lint_name[], sess, None) {
|
||||
match self.find_lint(&lint_name[..], sess, None) {
|
||||
Some(lint_id) => self.set_level(lint_id, (level, CommandLine)),
|
||||
None => {
|
||||
match self.lint_groups.iter().map(|(&x, pair)| (x, pair.0.clone()))
|
||||
.collect::<FnvHashMap<&'static str,
|
||||
Vec<LintId>>>()
|
||||
.get(&lint_name[]) {
|
||||
.get(&lint_name[..]) {
|
||||
Some(v) => {
|
||||
v.iter()
|
||||
.map(|lint_id: &LintId|
|
||||
@ -411,15 +411,15 @@ pub fn raw_emit_lint(sess: &Session, lint: &'static Lint,
|
||||
if level == Forbid { level = Deny; }
|
||||
|
||||
match (level, span) {
|
||||
(Warn, Some(sp)) => sess.span_warn(sp, &msg[]),
|
||||
(Warn, None) => sess.warn(&msg[]),
|
||||
(Deny, Some(sp)) => sess.span_err(sp, &msg[]),
|
||||
(Deny, None) => sess.err(&msg[]),
|
||||
(Warn, Some(sp)) => sess.span_warn(sp, &msg[..]),
|
||||
(Warn, None) => sess.warn(&msg[..]),
|
||||
(Deny, Some(sp)) => sess.span_err(sp, &msg[..]),
|
||||
(Deny, None) => sess.err(&msg[..]),
|
||||
_ => sess.bug("impossible level in raw_emit_lint"),
|
||||
}
|
||||
|
||||
if let Some(note) = note {
|
||||
sess.note(¬e[]);
|
||||
sess.note(¬e[..]);
|
||||
}
|
||||
|
||||
if let Some(span) = def {
|
||||
@ -503,7 +503,7 @@ impl<'a, 'tcx> Context<'a, 'tcx> {
|
||||
match self.lints.find_lint(&lint_name, &self.tcx.sess, Some(span)) {
|
||||
Some(lint_id) => vec![(lint_id, level, span)],
|
||||
None => {
|
||||
match self.lints.lint_groups.get(&lint_name[]) {
|
||||
match self.lints.lint_groups.get(&lint_name[..]) {
|
||||
Some(&(ref v, _)) => v.iter()
|
||||
.map(|lint_id: &LintId|
|
||||
(*lint_id, level, span))
|
||||
@ -729,7 +729,7 @@ impl<'a, 'tcx> IdVisitingOperation for Context<'a, 'tcx> {
|
||||
None => {}
|
||||
Some(lints) => {
|
||||
for (lint_id, span, msg) in lints {
|
||||
self.span_lint(lint_id.lint, span, &msg[])
|
||||
self.span_lint(lint_id.lint, span, &msg[..])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -183,7 +183,7 @@ impl<'a> CrateReader<'a> {
|
||||
let name = match *path_opt {
|
||||
Some((ref path_str, _)) => {
|
||||
let name = path_str.to_string();
|
||||
validate_crate_name(Some(self.sess), &name[],
|
||||
validate_crate_name(Some(self.sess), &name[..],
|
||||
Some(i.span));
|
||||
name
|
||||
}
|
||||
@ -321,7 +321,7 @@ impl<'a> CrateReader<'a> {
|
||||
let source = self.sess.cstore.get_used_crate_source(cnum).unwrap();
|
||||
if let Some(locs) = self.sess.opts.externs.get(name) {
|
||||
let found = locs.iter().any(|l| {
|
||||
let l = fs::realpath(&Path::new(&l[])).ok();
|
||||
let l = fs::realpath(&Path::new(&l[..])).ok();
|
||||
source.dylib.as_ref().map(|p| &p.0) == l.as_ref() ||
|
||||
source.rlib.as_ref().map(|p| &p.0) == l.as_ref()
|
||||
});
|
||||
@ -459,8 +459,8 @@ impl<'a> CrateReader<'a> {
|
||||
let mut load_ctxt = loader::Context {
|
||||
sess: self.sess,
|
||||
span: span,
|
||||
ident: &ident[],
|
||||
crate_name: &name[],
|
||||
ident: &ident[..],
|
||||
crate_name: &name[..],
|
||||
hash: None,
|
||||
filesearch: self.sess.host_filesearch(PathKind::Crate),
|
||||
target: &self.sess.host,
|
||||
@ -562,7 +562,7 @@ impl<'a> CrateReader<'a> {
|
||||
name,
|
||||
config::host_triple(),
|
||||
self.sess.opts.target_triple);
|
||||
self.sess.span_err(span, &message[]);
|
||||
self.sess.span_err(span, &message[..]);
|
||||
self.sess.abort_if_errors();
|
||||
}
|
||||
|
||||
@ -575,7 +575,7 @@ impl<'a> CrateReader<'a> {
|
||||
let message = format!("plugin `{}` only found in rlib format, \
|
||||
but must be available in dylib format",
|
||||
name);
|
||||
self.sess.span_err(span, &message[]);
|
||||
self.sess.span_err(span, &message[..]);
|
||||
// No need to abort because the loading code will just ignore this
|
||||
// empty dylib.
|
||||
None
|
||||
|
@ -163,7 +163,7 @@ fn encode_variant_id(rbml_w: &mut Encoder, vid: DefId) {
|
||||
rbml_w.end_tag();
|
||||
|
||||
rbml_w.start_tag(tag_mod_child);
|
||||
rbml_w.wr_str(&s[]);
|
||||
rbml_w.wr_str(&s[..]);
|
||||
rbml_w.end_tag();
|
||||
}
|
||||
|
||||
@ -353,9 +353,9 @@ fn encode_enum_variant_info(ecx: &EncodeContext,
|
||||
let fields = ty::lookup_struct_fields(ecx.tcx, def_id);
|
||||
let idx = encode_info_for_struct(ecx,
|
||||
rbml_w,
|
||||
&fields[],
|
||||
&fields[..],
|
||||
index);
|
||||
encode_struct_fields(rbml_w, &fields[], def_id);
|
||||
encode_struct_fields(rbml_w, &fields[..], def_id);
|
||||
encode_index(rbml_w, idx, write_i64);
|
||||
}
|
||||
}
|
||||
@ -1158,7 +1158,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
class itself */
|
||||
let idx = encode_info_for_struct(ecx,
|
||||
rbml_w,
|
||||
&fields[],
|
||||
&fields[..],
|
||||
index);
|
||||
|
||||
/* Index the class*/
|
||||
@ -1181,7 +1181,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
|
||||
/* Encode def_ids for each field and method
|
||||
for methods, write all the stuff get_trait_method
|
||||
needs to know*/
|
||||
encode_struct_fields(rbml_w, &fields[], def_id);
|
||||
encode_struct_fields(rbml_w, &fields[..], def_id);
|
||||
|
||||
encode_inlined_item(ecx, rbml_w, IIItemRef(item));
|
||||
|
||||
|
@ -322,7 +322,7 @@ impl<'a> Context<'a> {
|
||||
&Some(ref r) => format!("{} which `{}` depends on",
|
||||
message, r.ident)
|
||||
};
|
||||
self.sess.span_err(self.span, &message[]);
|
||||
self.sess.span_err(self.span, &message[..]);
|
||||
|
||||
if self.rejected_via_triple.len() > 0 {
|
||||
let mismatches = self.rejected_via_triple.iter();
|
||||
@ -404,7 +404,7 @@ impl<'a> Context<'a> {
|
||||
None => return FileDoesntMatch,
|
||||
Some(file) => file,
|
||||
};
|
||||
let (hash, rlib) = if file.starts_with(&rlib_prefix[]) &&
|
||||
let (hash, rlib) = if file.starts_with(&rlib_prefix[..]) &&
|
||||
file.ends_with(".rlib") {
|
||||
(&file[(rlib_prefix.len()) .. (file.len() - ".rlib".len())],
|
||||
true)
|
||||
@ -413,7 +413,7 @@ impl<'a> Context<'a> {
|
||||
(&file[(dylib_prefix.len()) .. (file.len() - dypair.1.len())],
|
||||
false)
|
||||
} else {
|
||||
if file.starts_with(&staticlib_prefix[]) &&
|
||||
if file.starts_with(&staticlib_prefix[..]) &&
|
||||
file.ends_with(".a") {
|
||||
staticlibs.push(CrateMismatch {
|
||||
path: path.clone(),
|
||||
@ -627,7 +627,7 @@ impl<'a> Context<'a> {
|
||||
let mut rlibs = HashMap::new();
|
||||
let mut dylibs = HashMap::new();
|
||||
{
|
||||
let locs = locs.iter().map(|l| Path::new(&l[])).filter(|loc| {
|
||||
let locs = locs.iter().map(|l| Path::new(&l[..])).filter(|loc| {
|
||||
if !loc.exists() {
|
||||
sess.err(&format!("extern location for {} does not exist: {}",
|
||||
self.crate_name, loc.display())[]);
|
||||
@ -645,8 +645,8 @@ impl<'a> Context<'a> {
|
||||
return true
|
||||
} else {
|
||||
let (ref prefix, ref suffix) = dylibname;
|
||||
if file.starts_with(&prefix[]) &&
|
||||
file.ends_with(&suffix[]) {
|
||||
if file.starts_with(&prefix[..]) &&
|
||||
file.ends_with(&suffix[..]) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
@ -641,7 +641,7 @@ fn parse_abi_set(st: &mut PState) -> abi::Abi {
|
||||
assert_eq!(next(st), '[');
|
||||
scan(st, |c| c == ']', |bytes| {
|
||||
let abi_str = str::from_utf8(bytes).unwrap();
|
||||
abi::lookup(&abi_str[]).expect(abi_str)
|
||||
abi::lookup(&abi_str[..]).expect(abi_str)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -134,7 +134,7 @@ pub fn decode_inlined_item<'tcx>(cdata: &cstore::crate_metadata,
|
||||
// Do an Option dance to use the path after it is moved below.
|
||||
let s = ast_map::path_to_string(path.iter().cloned());
|
||||
path_as_str = Some(s);
|
||||
path_as_str.as_ref().map(|x| &x[])
|
||||
path_as_str.as_ref().map(|x| &x[..])
|
||||
});
|
||||
let mut ast_dsr = reader::Decoder::new(ast_doc);
|
||||
let from_id_range = Decodable::decode(&mut ast_dsr).unwrap();
|
||||
|
@ -92,7 +92,7 @@ impl<'a, 'ast> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a, 'ast> {
|
||||
let s = replace_newline_with_backslash_l(s);
|
||||
label.push_str(&format!("exiting scope_{} {}",
|
||||
i,
|
||||
&s[])[]);
|
||||
&s[..])[]);
|
||||
}
|
||||
dot::LabelText::EscStr(label.into_cow())
|
||||
}
|
||||
|
@ -200,7 +200,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) {
|
||||
}
|
||||
|
||||
// Fourth, check for unreachable arms.
|
||||
check_arms(cx, &inlined_arms[], source);
|
||||
check_arms(cx, &inlined_arms[..], source);
|
||||
|
||||
// Finally, check if the whole match expression is exhaustive.
|
||||
// Check for empty enum, because is_useful only works on inhabited types.
|
||||
@ -291,7 +291,7 @@ fn check_arms(cx: &MatchCheckCtxt,
|
||||
for pat in pats {
|
||||
let v = vec![&**pat];
|
||||
|
||||
match is_useful(cx, &seen, &v[], LeaveOutWitness) {
|
||||
match is_useful(cx, &seen, &v[..], LeaveOutWitness) {
|
||||
NotUseful => {
|
||||
match source {
|
||||
ast::MatchSource::IfLetDesugar { .. } => {
|
||||
@ -351,7 +351,7 @@ fn raw_pat<'a>(p: &'a Pat) -> &'a Pat {
|
||||
fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, matrix: &Matrix, source: ast::MatchSource) {
|
||||
match is_useful(cx, matrix, &[DUMMY_WILD_PAT], ConstructWitness) {
|
||||
UsefulWithWitness(pats) => {
|
||||
let witness = match &pats[] {
|
||||
let witness = match &pats[..] {
|
||||
[ref witness] => &**witness,
|
||||
[] => DUMMY_WILD_PAT,
|
||||
_ => unreachable!()
|
||||
@ -360,7 +360,7 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, matrix: &Matrix, source: ast:
|
||||
ast::MatchSource::ForLoopDesugar => {
|
||||
// `witness` has the form `Some(<head>)`, peel off the `Some`
|
||||
let witness = match witness.node {
|
||||
ast::PatEnum(_, Some(ref pats)) => match &pats[] {
|
||||
ast::PatEnum(_, Some(ref pats)) => match &pats[..] {
|
||||
[ref pat] => &**pat,
|
||||
_ => unreachable!(),
|
||||
},
|
||||
@ -664,7 +664,7 @@ fn is_useful(cx: &MatchCheckCtxt,
|
||||
UsefulWithWitness(pats) => UsefulWithWitness({
|
||||
let arity = constructor_arity(cx, &c, left_ty);
|
||||
let mut result = {
|
||||
let pat_slice = &pats[];
|
||||
let pat_slice = &pats[..];
|
||||
let subpats: Vec<_> = (0..arity).map(|i| {
|
||||
pat_slice.get(i).map_or(DUMMY_WILD_PAT, |p| &**p)
|
||||
}).collect();
|
||||
@ -711,10 +711,10 @@ fn is_useful_specialized(cx: &MatchCheckCtxt, &Matrix(ref m): &Matrix,
|
||||
witness: WitnessPreference) -> Usefulness {
|
||||
let arity = constructor_arity(cx, &ctor, lty);
|
||||
let matrix = Matrix(m.iter().filter_map(|r| {
|
||||
specialize(cx, &r[], &ctor, 0, arity)
|
||||
specialize(cx, &r[..], &ctor, 0, arity)
|
||||
}).collect());
|
||||
match specialize(cx, v, &ctor, 0, arity) {
|
||||
Some(v) => is_useful(cx, &matrix, &v[], witness),
|
||||
Some(v) => is_useful(cx, &matrix, &v[..], witness),
|
||||
None => NotUseful
|
||||
}
|
||||
}
|
||||
|
@ -62,7 +62,7 @@ fn lookup_variant_by_id<'a>(tcx: &'a ty::ctxt,
|
||||
None => None,
|
||||
Some(ast_map::NodeItem(it)) => match it.node {
|
||||
ast::ItemEnum(ast::EnumDef { ref variants }, _) => {
|
||||
variant_expr(&variants[], variant_def.node)
|
||||
variant_expr(&variants[..], variant_def.node)
|
||||
}
|
||||
_ => None
|
||||
},
|
||||
@ -83,7 +83,7 @@ fn lookup_variant_by_id<'a>(tcx: &'a ty::ctxt,
|
||||
// NOTE this doesn't do the right thing, it compares inlined
|
||||
// NodeId's to the original variant_def's NodeId, but they
|
||||
// come from different crates, so they will likely never match.
|
||||
variant_expr(&variants[], variant_def.node).map(|e| e.id)
|
||||
variant_expr(&variants[..], variant_def.node).map(|e| e.id)
|
||||
}
|
||||
_ => None
|
||||
},
|
||||
@ -209,7 +209,7 @@ pub fn const_expr_to_pat(tcx: &ty::ctxt, expr: &Expr, span: Span) -> P<ast::Pat>
|
||||
pub fn eval_const_expr(tcx: &ty::ctxt, e: &Expr) -> const_val {
|
||||
match eval_const_expr_partial(tcx, e, None) {
|
||||
Ok(r) => r,
|
||||
Err(s) => tcx.sess.span_fatal(e.span, &s[])
|
||||
Err(s) => tcx.sess.span_fatal(e.span, &s[..])
|
||||
}
|
||||
}
|
||||
|
||||
@ -552,14 +552,14 @@ pub fn compare_lit_exprs<'tcx>(tcx: &ty::ctxt<'tcx>,
|
||||
let a = match eval_const_expr_partial(tcx, a, ty_hint) {
|
||||
Ok(a) => a,
|
||||
Err(s) => {
|
||||
tcx.sess.span_err(a.span, &s[]);
|
||||
tcx.sess.span_err(a.span, &s[..]);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
let b = match eval_const_expr_partial(tcx, b, ty_hint) {
|
||||
Ok(b) => b,
|
||||
Err(s) => {
|
||||
tcx.sess.span_err(b.span, &s[]);
|
||||
tcx.sess.span_err(b.span, &s[..]);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
@ -312,7 +312,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> {
|
||||
let mut t = on_entry.to_vec();
|
||||
self.apply_gen_kill(cfgidx, &mut t);
|
||||
temp_bits = t;
|
||||
&temp_bits[]
|
||||
&temp_bits[..]
|
||||
}
|
||||
};
|
||||
debug!("{} each_bit_for_node({:?}, cfgidx={:?}) bits={}",
|
||||
@ -421,7 +421,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> {
|
||||
let bits = &mut self.kills[start.. end];
|
||||
debug!("{} add_kills_from_flow_exits flow_exit={:?} bits={} [before]",
|
||||
self.analysis_name, flow_exit, mut_bits_to_string(bits));
|
||||
bits.clone_from_slice(&orig_kills[]);
|
||||
bits.clone_from_slice(&orig_kills[..]);
|
||||
debug!("{} add_kills_from_flow_exits flow_exit={:?} bits={} [after]",
|
||||
self.analysis_name, flow_exit, mut_bits_to_string(bits));
|
||||
}
|
||||
|
@ -321,7 +321,7 @@ fn has_allow_dead_code_or_lang_attr(attrs: &[ast::Attribute]) -> bool {
|
||||
for attr in lint::gather_attrs(attrs) {
|
||||
match attr {
|
||||
Ok((ref name, lint::Allow, _))
|
||||
if &name[] == dead_code => return true,
|
||||
if &name[..] == dead_code => return true,
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
@ -1166,7 +1166,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> {
|
||||
let msg = format!("Pattern has unexpected def: {:?} and type {}",
|
||||
def,
|
||||
cmt_pat.ty.repr(tcx));
|
||||
tcx.sess.span_bug(pat.span, &msg[])
|
||||
tcx.sess.span_bug(pat.span, &msg[..])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -142,7 +142,7 @@ pub trait Combine<'tcx> : Sized {
|
||||
for _ in a_regions {
|
||||
invariance.push(ty::Invariant);
|
||||
}
|
||||
&invariance[]
|
||||
&invariance[..]
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -200,9 +200,9 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
ref trace_origins,
|
||||
ref same_regions) => {
|
||||
if !same_regions.is_empty() {
|
||||
self.report_processed_errors(&var_origins[],
|
||||
&trace_origins[],
|
||||
&same_regions[]);
|
||||
self.report_processed_errors(&var_origins[..],
|
||||
&trace_origins[..],
|
||||
&same_regions[..]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -824,7 +824,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
let parent = self.tcx.map.get_parent(scope_id);
|
||||
let parent_node = self.tcx.map.find(parent);
|
||||
let taken = lifetimes_in_scope(self.tcx, scope_id);
|
||||
let life_giver = LifeGiver::with_taken(&taken[]);
|
||||
let life_giver = LifeGiver::with_taken(&taken[..]);
|
||||
let node_inner = match parent_node {
|
||||
Some(ref node) => match *node {
|
||||
ast_map::NodeItem(ref item) => {
|
||||
@ -942,7 +942,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> {
|
||||
}
|
||||
expl_self_opt = self.rebuild_expl_self(expl_self_opt, lifetime,
|
||||
&anon_nums, ®ion_names);
|
||||
inputs = self.rebuild_args_ty(&inputs[], lifetime,
|
||||
inputs = self.rebuild_args_ty(&inputs[..], lifetime,
|
||||
&anon_nums, ®ion_names);
|
||||
output = self.rebuild_output(&output, lifetime, &anon_nums, ®ion_names);
|
||||
ty_params = self.rebuild_ty_params(ty_params, lifetime,
|
||||
@ -1426,7 +1426,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
opt_explicit_self, generics);
|
||||
let msg = format!("consider using an explicit lifetime \
|
||||
parameter as shown: {}", suggested_fn);
|
||||
self.tcx.sess.span_help(span, &msg[]);
|
||||
self.tcx.sess.span_help(span, &msg[..]);
|
||||
}
|
||||
|
||||
fn report_inference_failure(&self,
|
||||
@ -1771,7 +1771,7 @@ impl LifeGiver {
|
||||
s.push_str(&num_to_string(self.counter.get())[]);
|
||||
if !self.taken.contains(&s) {
|
||||
lifetime = name_to_dummy_lifetime(
|
||||
token::str_to_ident(&s[]).name);
|
||||
token::str_to_ident(&s[..]).name);
|
||||
self.generated.borrow_mut().push(lifetime);
|
||||
break;
|
||||
}
|
||||
|
@ -977,7 +977,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
self.expansion(&mut var_data);
|
||||
self.contraction(&mut var_data);
|
||||
let values =
|
||||
self.extract_values_and_collect_conflicts(&var_data[],
|
||||
self.extract_values_and_collect_conflicts(&var_data[..],
|
||||
errors);
|
||||
self.collect_concrete_region_errors(&values, errors);
|
||||
values
|
||||
|
@ -149,7 +149,7 @@ impl<'a, 'v> Visitor<'v> for LanguageItemCollector<'a> {
|
||||
fn visit_item(&mut self, item: &ast::Item) {
|
||||
match extract(&item.attrs) {
|
||||
Some(value) => {
|
||||
let item_index = self.item_refs.get(&value[]).map(|x| *x);
|
||||
let item_index = self.item_refs.get(&value[..]).map(|x| *x);
|
||||
|
||||
match item_index {
|
||||
Some(item_index) => {
|
||||
|
@ -1119,7 +1119,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
|
||||
// Uninteresting cases: just propagate in rev exec order
|
||||
|
||||
ast::ExprVec(ref exprs) => {
|
||||
self.propagate_through_exprs(&exprs[], succ)
|
||||
self.propagate_through_exprs(&exprs[..], succ)
|
||||
}
|
||||
|
||||
ast::ExprRepeat(ref element, ref count) => {
|
||||
@ -1143,7 +1143,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
|
||||
} else {
|
||||
succ
|
||||
};
|
||||
let succ = self.propagate_through_exprs(&args[], succ);
|
||||
let succ = self.propagate_through_exprs(&args[..], succ);
|
||||
self.propagate_through_expr(&**f, succ)
|
||||
}
|
||||
|
||||
@ -1156,11 +1156,11 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
|
||||
} else {
|
||||
succ
|
||||
};
|
||||
self.propagate_through_exprs(&args[], succ)
|
||||
self.propagate_through_exprs(&args[..], succ)
|
||||
}
|
||||
|
||||
ast::ExprTup(ref exprs) => {
|
||||
self.propagate_through_exprs(&exprs[], succ)
|
||||
self.propagate_through_exprs(&exprs[..], succ)
|
||||
}
|
||||
|
||||
ast::ExprBinary(op, ref l, ref r) if ast_util::lazy_binop(op.node) => {
|
||||
|
@ -998,7 +998,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
|
||||
let all_bounds =
|
||||
util::transitive_bounds(
|
||||
self.tcx(), &caller_trait_refs[]);
|
||||
self.tcx(), &caller_trait_refs[..]);
|
||||
|
||||
let matching_bounds =
|
||||
all_bounds.filter(
|
||||
|
@ -2331,7 +2331,7 @@ impl ClosureKind {
|
||||
};
|
||||
match result {
|
||||
Ok(trait_did) => trait_did,
|
||||
Err(err) => cx.sess.fatal(&err[]),
|
||||
Err(err) => cx.sess.fatal(&err[..]),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2661,7 +2661,7 @@ impl FlagComputation {
|
||||
}
|
||||
|
||||
&ty_tup(ref ts) => {
|
||||
self.add_tys(&ts[]);
|
||||
self.add_tys(&ts[..]);
|
||||
}
|
||||
|
||||
&ty_bare_fn(_, ref f) => {
|
||||
@ -3447,7 +3447,7 @@ pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents {
|
||||
ty_struct(did, substs) => {
|
||||
let flds = struct_fields(cx, did, substs);
|
||||
let mut res =
|
||||
TypeContents::union(&flds[],
|
||||
TypeContents::union(&flds[..],
|
||||
|f| tc_mt(cx, f.mt, cache));
|
||||
|
||||
if !lookup_repr_hints(cx, did).contains(&attr::ReprExtern) {
|
||||
@ -3470,14 +3470,14 @@ pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents {
|
||||
}
|
||||
|
||||
ty_tup(ref tys) => {
|
||||
TypeContents::union(&tys[],
|
||||
TypeContents::union(&tys[..],
|
||||
|ty| tc_ty(cx, *ty, cache))
|
||||
}
|
||||
|
||||
ty_enum(did, substs) => {
|
||||
let variants = substd_enum_variants(cx, did, substs);
|
||||
let mut res =
|
||||
TypeContents::union(&variants[], |variant| {
|
||||
TypeContents::union(&variants[..], |variant| {
|
||||
TypeContents::union(&variant.args[],
|
||||
|arg_ty| {
|
||||
tc_ty(cx, *arg_ty, cache)
|
||||
@ -4940,7 +4940,7 @@ pub fn provided_trait_methods<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId)
|
||||
match item.node {
|
||||
ItemTrait(_, _, _, ref ms) => {
|
||||
let (_, p) =
|
||||
ast_util::split_trait_methods(&ms[]);
|
||||
ast_util::split_trait_methods(&ms[..]);
|
||||
p.iter()
|
||||
.map(|m| {
|
||||
match impl_or_trait_item(
|
||||
@ -6625,7 +6625,7 @@ pub fn with_freevars<T, F>(tcx: &ty::ctxt, fid: ast::NodeId, f: F) -> T where
|
||||
{
|
||||
match tcx.freevars.borrow().get(&fid) {
|
||||
None => f(&[]),
|
||||
Some(d) => f(&d[])
|
||||
Some(d) => f(&d[..])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -55,7 +55,7 @@ pub fn check_crate(krate: &ast::Crate,
|
||||
|
||||
pub fn link_name(attrs: &[ast::Attribute]) -> Option<InternedString> {
|
||||
lang_items::extract(attrs).and_then(|name| {
|
||||
$(if &name[] == stringify!($name) {
|
||||
$(if &name[..] == stringify!($name) {
|
||||
Some(InternedString::new(stringify!($sym)))
|
||||
} else)* {
|
||||
None
|
||||
|
@ -111,19 +111,19 @@ impl<'a> PluginLoader<'a> {
|
||||
// inside this crate, so continue would spew "macro undefined"
|
||||
// errors
|
||||
Err(err) => {
|
||||
self.sess.span_fatal(span, &err[])
|
||||
self.sess.span_fatal(span, &err[..])
|
||||
}
|
||||
};
|
||||
|
||||
unsafe {
|
||||
let registrar =
|
||||
match lib.symbol(&symbol[]) {
|
||||
match lib.symbol(&symbol[..]) {
|
||||
Ok(registrar) => {
|
||||
mem::transmute::<*mut u8,PluginRegistrarFun>(registrar)
|
||||
}
|
||||
// again fatal if we can't register macros
|
||||
Err(err) => {
|
||||
self.sess.span_fatal(span, &err[])
|
||||
self.sess.span_fatal(span, &err[..])
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -629,7 +629,7 @@ pub fn build_configuration(sess: &Session) -> ast::CrateConfig {
|
||||
append_configuration(&mut user_cfg, InternedString::new("test"))
|
||||
}
|
||||
let mut v = user_cfg.into_iter().collect::<Vec<_>>();
|
||||
v.push_all(&default_cfg[]);
|
||||
v.push_all(&default_cfg[..]);
|
||||
v
|
||||
}
|
||||
|
||||
@ -824,7 +824,7 @@ pub fn parse_cfgspecs(cfgspecs: Vec<String> ) -> ast::CrateConfig {
|
||||
pub fn build_session_options(matches: &getopts::Matches) -> Options {
|
||||
let unparsed_crate_types = matches.opt_strs("crate-type");
|
||||
let crate_types = parse_crate_types_from_list(unparsed_crate_types)
|
||||
.unwrap_or_else(|e| early_error(&e[]));
|
||||
.unwrap_or_else(|e| early_error(&e[..]));
|
||||
|
||||
let mut lint_opts = vec!();
|
||||
let mut describe_lints = false;
|
||||
@ -923,7 +923,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
|
||||
|
||||
let mut search_paths = SearchPaths::new();
|
||||
for s in &matches.opt_strs("L") {
|
||||
search_paths.add_path(&s[]);
|
||||
search_paths.add_path(&s[..]);
|
||||
}
|
||||
|
||||
let libs = matches.opt_strs("l").into_iter().map(|s| {
|
||||
@ -981,7 +981,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options {
|
||||
--debuginfo");
|
||||
}
|
||||
|
||||
let color = match matches.opt_str("color").as_ref().map(|s| &s[]) {
|
||||
let color = match matches.opt_str("color").as_ref().map(|s| &s[..]) {
|
||||
Some("auto") => Auto,
|
||||
Some("always") => Always,
|
||||
Some("never") => Never,
|
||||
@ -1119,7 +1119,7 @@ mod test {
|
||||
let sessopts = build_session_options(matches);
|
||||
let sess = build_session(sessopts, None, registry);
|
||||
let cfg = build_configuration(&sess);
|
||||
assert!((attr::contains_name(&cfg[], "test")));
|
||||
assert!((attr::contains_name(&cfg[..], "test")));
|
||||
}
|
||||
|
||||
// When the user supplies --test and --cfg test, don't implicitly add
|
||||
|
@ -75,13 +75,13 @@ impl Session {
|
||||
}
|
||||
pub fn span_err(&self, sp: Span, msg: &str) {
|
||||
match split_msg_into_multilines(msg) {
|
||||
Some(msg) => self.diagnostic().span_err(sp, &msg[]),
|
||||
Some(msg) => self.diagnostic().span_err(sp, &msg[..]),
|
||||
None => self.diagnostic().span_err(sp, msg)
|
||||
}
|
||||
}
|
||||
pub fn span_err_with_code(&self, sp: Span, msg: &str, code: &str) {
|
||||
match split_msg_into_multilines(msg) {
|
||||
Some(msg) => self.diagnostic().span_err_with_code(sp, &msg[], code),
|
||||
Some(msg) => self.diagnostic().span_err_with_code(sp, &msg[..], code),
|
||||
None => self.diagnostic().span_err_with_code(sp, msg, code)
|
||||
}
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ fn test_lev_distance() {
|
||||
for c in (0u32..MAX as u32)
|
||||
.filter_map(|i| from_u32(i))
|
||||
.map(|i| i.to_string()) {
|
||||
assert_eq!(lev_distance(&c[], &c[]), 0);
|
||||
assert_eq!(lev_distance(&c[..], &c[..]), 0);
|
||||
}
|
||||
|
||||
let a = "\nMäry häd ä little lämb\n\nLittle lämb\n";
|
||||
|
@ -292,7 +292,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {
|
||||
Some(def_id) => {
|
||||
s.push_str(" {");
|
||||
let path_str = ty::item_path_str(cx, def_id);
|
||||
s.push_str(&path_str[]);
|
||||
s.push_str(&path_str[..]);
|
||||
s.push_str("}");
|
||||
}
|
||||
None => { }
|
||||
@ -376,7 +376,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String {
|
||||
.iter()
|
||||
.map(|elem| ty_to_string(cx, *elem))
|
||||
.collect::<Vec<_>>();
|
||||
match &strs[] {
|
||||
match &strs[..] {
|
||||
[ref string] => format!("({},)", string),
|
||||
strs => format!("({})", strs.connect(", "))
|
||||
}
|
||||
@ -625,7 +625,7 @@ impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for [T] {
|
||||
|
||||
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for OwnedSlice<T> {
|
||||
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
|
||||
repr_vec(tcx, &self[])
|
||||
repr_vec(tcx, &self[..])
|
||||
}
|
||||
}
|
||||
|
||||
@ -633,7 +633,7 @@ impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for OwnedSlice<T> {
|
||||
// autoderef cannot convert the &[T] handler
|
||||
impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for Vec<T> {
|
||||
fn repr(&self, tcx: &ctxt<'tcx>) -> String {
|
||||
repr_vec(tcx, &self[])
|
||||
repr_vec(tcx, &self[..])
|
||||
}
|
||||
}
|
||||
|
||||
@ -673,7 +673,7 @@ impl<'tcx> UserString<'tcx> for TraitAndProjections<'tcx> {
|
||||
&base,
|
||||
trait_ref.substs,
|
||||
trait_ref.def_id,
|
||||
&projection_bounds[],
|
||||
&projection_bounds[..],
|
||||
|| ty::lookup_trait_def(tcx, trait_ref.def_id).generics.clone())
|
||||
}
|
||||
}
|
||||
@ -1259,7 +1259,7 @@ impl<'tcx, T> UserString<'tcx> for ty::Binder<T>
|
||||
}
|
||||
})
|
||||
});
|
||||
let names: Vec<_> = names.iter().map(|s| &s[]).collect();
|
||||
let names: Vec<_> = names.iter().map(|s| &s[..]).collect();
|
||||
|
||||
let value_str = unbound_value.user_string(tcx);
|
||||
if names.len() == 0 {
|
||||
|
@ -53,7 +53,7 @@ fn run_ar(handler: &ErrorHandler, maybe_ar_prog: &Option<String>,
|
||||
args: &str, cwd: Option<&Path>,
|
||||
paths: &[&Path]) -> ProcessOutput {
|
||||
let ar = match *maybe_ar_prog {
|
||||
Some(ref ar) => &ar[],
|
||||
Some(ref ar) => &ar[..],
|
||||
None => "ar"
|
||||
};
|
||||
let mut cmd = Command::new(ar);
|
||||
@ -84,7 +84,7 @@ fn run_ar(handler: &ErrorHandler, maybe_ar_prog: &Option<String>,
|
||||
o
|
||||
},
|
||||
Err(e) => {
|
||||
handler.err(&format!("could not exec `{}`: {}", &ar[],
|
||||
handler.err(&format!("could not exec `{}`: {}", &ar[..],
|
||||
e)[]);
|
||||
handler.abort_if_errors();
|
||||
panic!("rustc::back::archive::run_ar() should not reach this point");
|
||||
@ -101,10 +101,10 @@ pub fn find_library(name: &str, osprefix: &str, ossuffix: &str,
|
||||
|
||||
for path in search_paths {
|
||||
debug!("looking for {} inside {:?}", name, path.display());
|
||||
let test = path.join(&oslibname[]);
|
||||
let test = path.join(&oslibname[..]);
|
||||
if test.exists() { return test }
|
||||
if oslibname != unixlibname {
|
||||
let test = path.join(&unixlibname[]);
|
||||
let test = path.join(&unixlibname[..]);
|
||||
if test.exists() { return test }
|
||||
}
|
||||
}
|
||||
@ -192,12 +192,12 @@ impl<'a> ArchiveBuilder<'a> {
|
||||
// as simple comparison is not enough - there
|
||||
// might be also an extra name suffix
|
||||
let obj_start = format!("{}", name);
|
||||
let obj_start = &obj_start[];
|
||||
let obj_start = &obj_start[..];
|
||||
// Ignoring all bytecode files, no matter of
|
||||
// name
|
||||
let bc_ext = ".bytecode.deflate";
|
||||
|
||||
self.add_archive(rlib, &name[], |fname: &str| {
|
||||
self.add_archive(rlib, &name[..], |fname: &str| {
|
||||
let skip_obj = lto && fname.starts_with(obj_start)
|
||||
&& fname.ends_with(".o");
|
||||
skip_obj || fname.ends_with(bc_ext) || fname == METADATA_FILENAME
|
||||
@ -234,7 +234,7 @@ impl<'a> ArchiveBuilder<'a> {
|
||||
// allow running `ar s file.a` to update symbols only.
|
||||
if self.should_update_symbols {
|
||||
run_ar(self.archive.handler, &self.archive.maybe_ar_prog,
|
||||
"s", Some(self.work_dir.path()), &args[]);
|
||||
"s", Some(self.work_dir.path()), &args[..]);
|
||||
}
|
||||
return self.archive;
|
||||
}
|
||||
@ -254,7 +254,7 @@ impl<'a> ArchiveBuilder<'a> {
|
||||
// Add the archive members seen so far, without updating the
|
||||
// symbol table (`S`).
|
||||
run_ar(self.archive.handler, &self.archive.maybe_ar_prog,
|
||||
"cruS", Some(self.work_dir.path()), &args[]);
|
||||
"cruS", Some(self.work_dir.path()), &args[..]);
|
||||
|
||||
args.clear();
|
||||
args.push(&abs_dst);
|
||||
@ -269,7 +269,7 @@ impl<'a> ArchiveBuilder<'a> {
|
||||
// necessary.
|
||||
let flags = if self.should_update_symbols { "crus" } else { "cruS" };
|
||||
run_ar(self.archive.handler, &self.archive.maybe_ar_prog,
|
||||
flags, Some(self.work_dir.path()), &args[]);
|
||||
flags, Some(self.work_dir.path()), &args[..]);
|
||||
|
||||
self.archive
|
||||
}
|
||||
@ -312,7 +312,7 @@ impl<'a> ArchiveBuilder<'a> {
|
||||
} else {
|
||||
filename
|
||||
};
|
||||
let new_filename = self.work_dir.path().join(&filename[]);
|
||||
let new_filename = self.work_dir.path().join(&filename[..]);
|
||||
try!(fs::rename(file, &new_filename));
|
||||
self.members.push(Path::new(filename));
|
||||
}
|
||||
|
@ -44,8 +44,8 @@ pub fn get_rpath_flags<F, G>(config: RPathConfig<F, G>) -> Vec<String> where
|
||||
l.map(|p| p.clone())
|
||||
}).collect::<Vec<_>>();
|
||||
|
||||
let rpaths = get_rpaths(config, &libs[]);
|
||||
flags.push_all(&rpaths_to_flags(&rpaths[])[]);
|
||||
let rpaths = get_rpaths(config, &libs[..]);
|
||||
flags.push_all(&rpaths_to_flags(&rpaths[..])[]);
|
||||
flags
|
||||
}
|
||||
|
||||
@ -82,14 +82,14 @@ fn get_rpaths<F, G>(mut config: RPathConfig<F, G>, libs: &[Path]) -> Vec<String>
|
||||
}
|
||||
}
|
||||
|
||||
log_rpaths("relative", &rel_rpaths[]);
|
||||
log_rpaths("fallback", &fallback_rpaths[]);
|
||||
log_rpaths("relative", &rel_rpaths[..]);
|
||||
log_rpaths("fallback", &fallback_rpaths[..]);
|
||||
|
||||
let mut rpaths = rel_rpaths;
|
||||
rpaths.push_all(&fallback_rpaths[]);
|
||||
rpaths.push_all(&fallback_rpaths[..]);
|
||||
|
||||
// Remove duplicates
|
||||
let rpaths = minimize_rpaths(&rpaths[]);
|
||||
let rpaths = minimize_rpaths(&rpaths[..]);
|
||||
return rpaths;
|
||||
}
|
||||
|
||||
@ -139,7 +139,7 @@ fn minimize_rpaths(rpaths: &[String]) -> Vec<String> {
|
||||
let mut set = HashSet::new();
|
||||
let mut minimized = Vec::new();
|
||||
for rpath in rpaths {
|
||||
if set.insert(&rpath[]) {
|
||||
if set.insert(&rpath[..]) {
|
||||
minimized.push(rpath.clone());
|
||||
}
|
||||
}
|
||||
|
@ -254,18 +254,18 @@ impl Target {
|
||||
macro_rules! key {
|
||||
($key_name:ident) => ( {
|
||||
let name = (stringify!($key_name)).replace("_", "-");
|
||||
obj.find(&name[]).map(|o| o.as_string()
|
||||
obj.find(&name[..]).map(|o| o.as_string()
|
||||
.map(|s| base.options.$key_name = s.to_string()));
|
||||
} );
|
||||
($key_name:ident, bool) => ( {
|
||||
let name = (stringify!($key_name)).replace("_", "-");
|
||||
obj.find(&name[])
|
||||
obj.find(&name[..])
|
||||
.map(|o| o.as_boolean()
|
||||
.map(|s| base.options.$key_name = s));
|
||||
} );
|
||||
($key_name:ident, list) => ( {
|
||||
let name = (stringify!($key_name)).replace("_", "-");
|
||||
obj.find(&name[]).map(|o| o.as_array()
|
||||
obj.find(&name[..]).map(|o| o.as_array()
|
||||
.map(|v| base.options.$key_name = v.iter()
|
||||
.map(|a| a.as_string().unwrap().to_string()).collect()
|
||||
)
|
||||
|
@ -656,7 +656,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
|
||||
&self.bccx.loan_path_to_string(move_path)[])
|
||||
};
|
||||
|
||||
self.bccx.span_err(span, &err_message[]);
|
||||
self.bccx.span_err(span, &err_message[..]);
|
||||
self.bccx.span_note(
|
||||
loan_span,
|
||||
&format!("borrow of `{}` occurs here",
|
||||
|
@ -38,7 +38,7 @@ enum Fragment {
|
||||
// This represents the collection of all but one of the elements
|
||||
// from an array at the path described by the move path index.
|
||||
// Note that attached MovePathIndex should have mem_categorization
|
||||
// of InteriorElement (i.e. array dereference `&foo[]`).
|
||||
// of InteriorElement (i.e. array dereference `&foo[..]`).
|
||||
AllButOneFrom(MovePathIndex),
|
||||
}
|
||||
|
||||
@ -198,11 +198,11 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) {
|
||||
// First, filter out duplicates
|
||||
moved.sort();
|
||||
moved.dedup();
|
||||
debug!("fragments 1 moved: {:?}", path_lps(&moved[]));
|
||||
debug!("fragments 1 moved: {:?}", path_lps(&moved[..]));
|
||||
|
||||
assigned.sort();
|
||||
assigned.dedup();
|
||||
debug!("fragments 1 assigned: {:?}", path_lps(&assigned[]));
|
||||
debug!("fragments 1 assigned: {:?}", path_lps(&assigned[..]));
|
||||
|
||||
// Second, build parents from the moved and assigned.
|
||||
for m in &moved {
|
||||
@ -222,14 +222,14 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) {
|
||||
|
||||
parents.sort();
|
||||
parents.dedup();
|
||||
debug!("fragments 2 parents: {:?}", path_lps(&parents[]));
|
||||
debug!("fragments 2 parents: {:?}", path_lps(&parents[..]));
|
||||
|
||||
// Third, filter the moved and assigned fragments down to just the non-parents
|
||||
moved.retain(|f| non_member(*f, &parents[]));
|
||||
debug!("fragments 3 moved: {:?}", path_lps(&moved[]));
|
||||
moved.retain(|f| non_member(*f, &parents[..]));
|
||||
debug!("fragments 3 moved: {:?}", path_lps(&moved[..]));
|
||||
|
||||
assigned.retain(|f| non_member(*f, &parents[]));
|
||||
debug!("fragments 3 assigned: {:?}", path_lps(&assigned[]));
|
||||
assigned.retain(|f| non_member(*f, &parents[..]));
|
||||
debug!("fragments 3 assigned: {:?}", path_lps(&assigned[..]));
|
||||
|
||||
// Fourth, build the leftover from the moved, assigned, and parents.
|
||||
for m in &moved {
|
||||
@ -247,16 +247,16 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) {
|
||||
|
||||
unmoved.sort();
|
||||
unmoved.dedup();
|
||||
debug!("fragments 4 unmoved: {:?}", frag_lps(&unmoved[]));
|
||||
debug!("fragments 4 unmoved: {:?}", frag_lps(&unmoved[..]));
|
||||
|
||||
// Fifth, filter the leftover fragments down to its core.
|
||||
unmoved.retain(|f| match *f {
|
||||
AllButOneFrom(_) => true,
|
||||
Just(mpi) => non_member(mpi, &parents[]) &&
|
||||
non_member(mpi, &moved[]) &&
|
||||
non_member(mpi, &assigned[])
|
||||
Just(mpi) => non_member(mpi, &parents[..]) &&
|
||||
non_member(mpi, &moved[..]) &&
|
||||
non_member(mpi, &assigned[..])
|
||||
});
|
||||
debug!("fragments 5 unmoved: {:?}", frag_lps(&unmoved[]));
|
||||
debug!("fragments 5 unmoved: {:?}", frag_lps(&unmoved[..]));
|
||||
|
||||
// Swap contents back in.
|
||||
fragments.unmoved_fragments = unmoved;
|
||||
@ -437,7 +437,7 @@ fn add_fragment_siblings_for_extension<'tcx>(this: &MoveData<'tcx>,
|
||||
let msg = format!("type {} ({:?}) is not fragmentable",
|
||||
parent_ty.repr(tcx), sty_and_variant_info);
|
||||
let opt_span = origin_id.and_then(|id|tcx.map.opt_span(id));
|
||||
tcx.sess.opt_span_bug(opt_span, &msg[])
|
||||
tcx.sess.opt_span_bug(opt_span, &msg[..])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -137,7 +137,7 @@ fn borrowck_fn(this: &mut BorrowckCtxt,
|
||||
check_loans::check_loans(this,
|
||||
&loan_dfcx,
|
||||
flowed_moves,
|
||||
&all_loans[],
|
||||
&all_loans[..],
|
||||
id,
|
||||
decl,
|
||||
body);
|
||||
|
@ -89,7 +89,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> {
|
||||
set.push_str(", ");
|
||||
}
|
||||
let loan_str = self.borrowck_ctxt.loan_path_to_string(&*lp);
|
||||
set.push_str(&loan_str[]);
|
||||
set.push_str(&loan_str[..]);
|
||||
saw_some = true;
|
||||
true
|
||||
});
|
||||
|
@ -85,7 +85,7 @@ pub fn compile_input(sess: Session,
|
||||
let expanded_crate
|
||||
= match phase_2_configure_and_expand(&sess,
|
||||
krate,
|
||||
&id[],
|
||||
&id[..],
|
||||
addl_plugins) {
|
||||
None => return,
|
||||
Some(k) => k
|
||||
@ -99,20 +99,20 @@ pub fn compile_input(sess: Session,
|
||||
&sess,
|
||||
outdir,
|
||||
&expanded_crate,
|
||||
&id[]));
|
||||
&id[..]));
|
||||
|
||||
let mut forest = ast_map::Forest::new(expanded_crate);
|
||||
let arenas = ty::CtxtArenas::new();
|
||||
let ast_map = assign_node_ids_and_map(&sess, &mut forest);
|
||||
|
||||
write_out_deps(&sess, input, &outputs, &id[]);
|
||||
write_out_deps(&sess, input, &outputs, &id[..]);
|
||||
|
||||
controller_entry_point!(after_write_deps,
|
||||
CompileState::state_after_write_deps(input,
|
||||
&sess,
|
||||
outdir,
|
||||
&ast_map,
|
||||
&id[]));
|
||||
&id[..]));
|
||||
|
||||
let analysis = phase_3_run_analysis_passes(sess,
|
||||
ast_map,
|
||||
|
@ -272,7 +272,7 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls {
|
||||
-> Compilation {
|
||||
match matches.opt_str("explain") {
|
||||
Some(ref code) => {
|
||||
match descriptions.find_description(&code[]) {
|
||||
match descriptions.find_description(&code[..]) {
|
||||
Some(ref description) => {
|
||||
println!("{}", description);
|
||||
}
|
||||
@ -582,7 +582,7 @@ Available lint options:
|
||||
for lint in lints {
|
||||
let name = lint.name_lower().replace("_", "-");
|
||||
println!(" {} {:7.7} {}",
|
||||
padded(&name[]), lint.default_level.as_str(), lint.desc);
|
||||
padded(&name[..]), lint.default_level.as_str(), lint.desc);
|
||||
}
|
||||
println!("\n");
|
||||
};
|
||||
@ -612,7 +612,7 @@ Available lint options:
|
||||
let desc = to.into_iter().map(|x| x.as_str().replace("_", "-"))
|
||||
.collect::<Vec<String>>().connect(", ");
|
||||
println!(" {} {}",
|
||||
padded(&name[]), desc);
|
||||
padded(&name[..]), desc);
|
||||
}
|
||||
println!("\n");
|
||||
};
|
||||
@ -678,7 +678,7 @@ pub fn handle_options(mut args: Vec<String>) -> Option<getopts::Matches> {
|
||||
}
|
||||
|
||||
let matches =
|
||||
match getopts::getopts(&args[], &config::optgroups()[]) {
|
||||
match getopts::getopts(&args[..], &config::optgroups()[]) {
|
||||
Ok(m) => m,
|
||||
Err(f_stable_attempt) => {
|
||||
// redo option parsing, including unstable options this time,
|
||||
@ -803,7 +803,7 @@ pub fn monitor<F:FnOnce()+Send+'static>(f: F) {
|
||||
"run with `RUST_BACKTRACE=1` for a backtrace".to_string(),
|
||||
];
|
||||
for note in &xs {
|
||||
emitter.emit(None, ¬e[], None, diagnostic::Note)
|
||||
emitter.emit(None, ¬e[..], None, diagnostic::Note)
|
||||
}
|
||||
|
||||
match r.read_to_string() {
|
||||
|
@ -383,7 +383,7 @@ impl UserIdentifiedItem {
|
||||
ItemViaNode(node_id) =>
|
||||
NodesMatchingDirect(Some(node_id).into_iter()),
|
||||
ItemViaPath(ref parts) =>
|
||||
NodesMatchingSuffix(map.nodes_matching_suffix(&parts[])),
|
||||
NodesMatchingSuffix(map.nodes_matching_suffix(&parts[..])),
|
||||
}
|
||||
}
|
||||
|
||||
@ -395,7 +395,7 @@ impl UserIdentifiedItem {
|
||||
user_option,
|
||||
self.reconstructed_input(),
|
||||
is_wrong_because);
|
||||
sess.fatal(&message[])
|
||||
sess.fatal(&message[..])
|
||||
};
|
||||
|
||||
let mut saw_node = ast::DUMMY_NODE_ID;
|
||||
@ -522,7 +522,7 @@ pub fn pretty_print_input(sess: Session,
|
||||
let is_expanded = needs_expansion(&ppm);
|
||||
let compute_ast_map = needs_ast_map(&ppm, &opt_uii);
|
||||
let krate = if compute_ast_map {
|
||||
match driver::phase_2_configure_and_expand(&sess, krate, &id[], None) {
|
||||
match driver::phase_2_configure_and_expand(&sess, krate, &id[..], None) {
|
||||
None => return,
|
||||
Some(k) => k
|
||||
}
|
||||
@ -541,7 +541,7 @@ pub fn pretty_print_input(sess: Session,
|
||||
};
|
||||
|
||||
let src_name = driver::source_name(input);
|
||||
let src = sess.codemap().get_filemap(&src_name[])
|
||||
let src = sess.codemap().get_filemap(&src_name[..])
|
||||
.src.as_bytes().to_vec();
|
||||
let mut rdr = MemReader::new(src);
|
||||
|
||||
@ -632,8 +632,8 @@ pub fn pretty_print_input(sess: Session,
|
||||
// point to what was found, if there's an
|
||||
// accessible span.
|
||||
match ast_map.opt_span(nodeid) {
|
||||
Some(sp) => sess.span_fatal(sp, &message[]),
|
||||
None => sess.fatal(&message[])
|
||||
Some(sp) => sess.span_fatal(sp, &message[..]),
|
||||
None => sess.fatal(&message[..])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -278,7 +278,7 @@ impl<'a, 'tcx> Env<'a, 'tcx> {
|
||||
|
||||
pub fn t_param(&self, space: subst::ParamSpace, index: u32) -> Ty<'tcx> {
|
||||
let name = format!("T{}", index);
|
||||
ty::mk_param(self.infcx.tcx, space, index, token::intern(&name[]))
|
||||
ty::mk_param(self.infcx.tcx, space, index, token::intern(&name[..]))
|
||||
}
|
||||
|
||||
pub fn re_early_bound(&self,
|
||||
|
@ -585,10 +585,10 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> {
|
||||
match result {
|
||||
None => true,
|
||||
Some((span, msg, note)) => {
|
||||
self.tcx.sess.span_err(span, &msg[]);
|
||||
self.tcx.sess.span_err(span, &msg[..]);
|
||||
match note {
|
||||
Some((span, msg)) => {
|
||||
self.tcx.sess.span_note(span, &msg[])
|
||||
self.tcx.sess.span_note(span, &msg[..])
|
||||
}
|
||||
None => {},
|
||||
}
|
||||
@ -690,7 +690,7 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> {
|
||||
UnnamedField(idx) => format!("field #{} of {} is private",
|
||||
idx + 1, struct_desc),
|
||||
};
|
||||
self.tcx.sess.span_err(span, &msg[]);
|
||||
self.tcx.sess.span_err(span, &msg[..]);
|
||||
}
|
||||
|
||||
// Given the ID of a method, checks to ensure it's in scope.
|
||||
|
@ -1072,7 +1072,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
&import_directive.module_path[],
|
||||
import_directive.subclass),
|
||||
help);
|
||||
self.resolve_error(span, &msg[]);
|
||||
self.resolve_error(span, &msg[..]);
|
||||
}
|
||||
Indeterminate => break, // Bail out. We'll come around next time.
|
||||
Success(()) => () // Good. Continue.
|
||||
@ -1102,7 +1102,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
.iter()
|
||||
.map(|seg| seg.identifier.name)
|
||||
.collect();
|
||||
self.names_to_string(&names[])
|
||||
self.names_to_string(&names[..])
|
||||
}
|
||||
|
||||
fn import_directive_subclass_to_string(&mut self,
|
||||
@ -1166,7 +1166,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
let module_path = &import_directive.module_path;
|
||||
|
||||
debug!("(resolving import for module) resolving import `{}::...` in `{}`",
|
||||
self.names_to_string(&module_path[]),
|
||||
self.names_to_string(&module_path[..]),
|
||||
self.module_to_string(&*module_));
|
||||
|
||||
// First, resolve the module path for the directive, if necessary.
|
||||
@ -1175,7 +1175,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
Some((self.graph_root.get_module(), LastMod(AllPublic)))
|
||||
} else {
|
||||
match self.resolve_module_path(module_.clone(),
|
||||
&module_path[],
|
||||
&module_path[..],
|
||||
DontUseLexicalScope,
|
||||
import_directive.span,
|
||||
ImportSearch) {
|
||||
@ -1768,7 +1768,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
ValueNS => "value",
|
||||
},
|
||||
&token::get_name(name));
|
||||
span_err!(self.session, import_span, E0252, "{}", &msg[]);
|
||||
span_err!(self.session, import_span, E0252, "{}", &msg[..]);
|
||||
}
|
||||
Some(_) | None => {}
|
||||
}
|
||||
@ -1783,7 +1783,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
if !name_bindings.defined_in_namespace_with(namespace, IMPORTABLE) {
|
||||
let msg = format!("`{}` is not directly importable",
|
||||
token::get_name(name));
|
||||
span_err!(self.session, import_span, E0253, "{}", &msg[]);
|
||||
span_err!(self.session, import_span, E0253, "{}", &msg[..]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1804,7 +1804,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
crate in this module \
|
||||
(maybe you meant `use {0}::*`?)",
|
||||
&token::get_name(name));
|
||||
span_err!(self.session, import_span, E0254, "{}", &msg[]);
|
||||
span_err!(self.session, import_span, E0254, "{}", &msg[..]);
|
||||
}
|
||||
Some(_) | None => {}
|
||||
}
|
||||
@ -1826,7 +1826,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
let msg = format!("import `{}` conflicts with value \
|
||||
in this module",
|
||||
&token::get_name(name));
|
||||
span_err!(self.session, import_span, E0255, "{}", &msg[]);
|
||||
span_err!(self.session, import_span, E0255, "{}", &msg[..]);
|
||||
if let Some(span) = value.value_span {
|
||||
self.session.span_note(span,
|
||||
"conflicting value here");
|
||||
@ -1844,7 +1844,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
let msg = format!("import `{}` conflicts with type in \
|
||||
this module",
|
||||
&token::get_name(name));
|
||||
span_err!(self.session, import_span, E0256, "{}", &msg[]);
|
||||
span_err!(self.session, import_span, E0256, "{}", &msg[..]);
|
||||
if let Some(span) = ty.type_span {
|
||||
self.session.span_note(span,
|
||||
"note conflicting type here")
|
||||
@ -1857,7 +1857,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
let msg = format!("inherent implementations \
|
||||
are only allowed on types \
|
||||
defined in the current module");
|
||||
span_err!(self.session, span, E0257, "{}", &msg[]);
|
||||
span_err!(self.session, span, E0257, "{}", &msg[..]);
|
||||
self.session.span_note(import_span,
|
||||
"import from other module here")
|
||||
}
|
||||
@ -1866,7 +1866,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
let msg = format!("import `{}` conflicts with existing \
|
||||
submodule",
|
||||
&token::get_name(name));
|
||||
span_err!(self.session, import_span, E0258, "{}", &msg[]);
|
||||
span_err!(self.session, import_span, E0258, "{}", &msg[..]);
|
||||
if let Some(span) = ty.type_span {
|
||||
self.session.span_note(span,
|
||||
"note conflicting module here")
|
||||
@ -1953,7 +1953,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
let segment_name = token::get_name(name);
|
||||
let module_name = self.module_to_string(&*search_module);
|
||||
let mut span = span;
|
||||
let msg = if "???" == &module_name[] {
|
||||
let msg = if "???" == &module_name[..] {
|
||||
span.hi = span.lo + Pos::from_usize(segment_name.len());
|
||||
|
||||
match search_parent_externals(name,
|
||||
@ -2066,7 +2066,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
match module_prefix_result {
|
||||
Failed(None) => {
|
||||
let mpath = self.names_to_string(module_path);
|
||||
let mpath = &mpath[];
|
||||
let mpath = &mpath[..];
|
||||
match mpath.rfind(':') {
|
||||
Some(idx) => {
|
||||
let msg = format!("Could not find `{}` in `{}`",
|
||||
@ -2369,11 +2369,11 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
let mut containing_module;
|
||||
let mut i;
|
||||
let first_module_path_string = token::get_name(module_path[0]);
|
||||
if "self" == &first_module_path_string[] {
|
||||
if "self" == &first_module_path_string[..] {
|
||||
containing_module =
|
||||
self.get_nearest_normal_module_parent_or_self(module_);
|
||||
i = 1;
|
||||
} else if "super" == &first_module_path_string[] {
|
||||
} else if "super" == &first_module_path_string[..] {
|
||||
containing_module =
|
||||
self.get_nearest_normal_module_parent_or_self(module_);
|
||||
i = 0; // We'll handle `super` below.
|
||||
@ -2384,7 +2384,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
// Now loop through all the `super`s we find.
|
||||
while i < module_path.len() {
|
||||
let string = token::get_name(module_path[i]);
|
||||
if "super" != &string[] {
|
||||
if "super" != &string[..] {
|
||||
break
|
||||
}
|
||||
debug!("(resolving module prefix) resolving `super` at {}",
|
||||
@ -2515,7 +2515,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
} else {
|
||||
let err = format!("unresolved import (maybe you meant `{}::*`?)",
|
||||
sn);
|
||||
self.resolve_error((*imports)[index].span, &err[]);
|
||||
self.resolve_error((*imports)[index].span, &err[..]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -2853,7 +2853,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
generics,
|
||||
implemented_traits,
|
||||
&**self_type,
|
||||
&impl_items[]);
|
||||
&impl_items[..]);
|
||||
}
|
||||
|
||||
ItemTrait(_, ref generics, ref bounds, ref trait_items) => {
|
||||
@ -3196,7 +3196,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
};
|
||||
|
||||
let msg = format!("attempt to {} a nonexistent trait `{}`", usage_str, path_str);
|
||||
self.resolve_error(trait_reference.path.span, &msg[]);
|
||||
self.resolve_error(trait_reference.path.span, &msg[..]);
|
||||
}
|
||||
Some(def) => {
|
||||
match def {
|
||||
@ -3624,7 +3624,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
None => {
|
||||
let msg = format!("use of undeclared type name `{}`",
|
||||
self.path_names_to_string(path));
|
||||
self.resolve_error(ty.span, &msg[]);
|
||||
self.resolve_error(ty.span, &msg[..]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3825,7 +3825,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
def: {:?}", result);
|
||||
let msg = format!("`{}` does not name a structure",
|
||||
self.path_names_to_string(path));
|
||||
self.resolve_error(path.span, &msg[]);
|
||||
self.resolve_error(path.span, &msg[..]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4082,7 +4082,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
let last_private;
|
||||
let module = self.current_module.clone();
|
||||
match self.resolve_module_path(module,
|
||||
&module_path[],
|
||||
&module_path[..],
|
||||
UseLexicalScope,
|
||||
path.span,
|
||||
PathSearch) {
|
||||
@ -4140,7 +4140,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
let containing_module;
|
||||
let last_private;
|
||||
match self.resolve_module_path_from_root(root_module,
|
||||
&module_path[],
|
||||
&module_path[..],
|
||||
0,
|
||||
path.span,
|
||||
PathSearch,
|
||||
@ -4150,7 +4150,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
Some((span, msg)) => (span, msg),
|
||||
None => {
|
||||
let msg = format!("Use of undeclared module `::{}`",
|
||||
self.names_to_string(&module_path[]));
|
||||
self.names_to_string(&module_path[..]));
|
||||
(path.span, msg)
|
||||
}
|
||||
};
|
||||
@ -4309,7 +4309,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
}
|
||||
} else {
|
||||
match this.resolve_module_path(root,
|
||||
&name_path[],
|
||||
&name_path[..],
|
||||
UseLexicalScope,
|
||||
span,
|
||||
PathSearch) {
|
||||
@ -4347,7 +4347,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
let name_path = path.segments.iter().map(|seg| seg.identifier.name).collect::<Vec<_>>();
|
||||
|
||||
// Look for a method in the current self type's impl module.
|
||||
match get_module(self, path.span, &name_path[]) {
|
||||
match get_module(self, path.span, &name_path[..]) {
|
||||
Some(module) => match module.children.borrow().get(&name) {
|
||||
Some(binding) => {
|
||||
let p_str = self.path_names_to_string(&path);
|
||||
@ -4568,7 +4568,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
|
||||
def: {:?}", result);
|
||||
let msg = format!("`{}` does not name a structure",
|
||||
self.path_names_to_string(path));
|
||||
self.resolve_error(path.span, &msg[]);
|
||||
self.resolve_error(path.span, &msg[..]);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -127,7 +127,7 @@ pub fn find_crate_name(sess: Option<&Session>,
|
||||
attrs: &[ast::Attribute],
|
||||
input: &Input) -> String {
|
||||
let validate = |s: String, span: Option<Span>| {
|
||||
creader::validate_crate_name(sess, &s[], span);
|
||||
creader::validate_crate_name(sess, &s[..], span);
|
||||
s
|
||||
};
|
||||
|
||||
@ -141,11 +141,11 @@ pub fn find_crate_name(sess: Option<&Session>,
|
||||
if let Some(sess) = sess {
|
||||
if let Some(ref s) = sess.opts.crate_name {
|
||||
if let Some((attr, ref name)) = attr_crate_name {
|
||||
if *s != &name[] {
|
||||
if *s != &name[..] {
|
||||
let msg = format!("--crate-name and #[crate_name] are \
|
||||
required to match, but `{}` != `{}`",
|
||||
s, name);
|
||||
sess.span_err(attr.span, &msg[]);
|
||||
sess.span_err(attr.span, &msg[..]);
|
||||
}
|
||||
}
|
||||
return validate(s.clone(), None);
|
||||
@ -195,7 +195,7 @@ fn symbol_hash<'tcx>(tcx: &ty::ctxt<'tcx>,
|
||||
symbol_hasher.input_str("-");
|
||||
symbol_hasher.input_str(link_meta.crate_hash.as_str());
|
||||
for meta in &*tcx.sess.crate_metadata.borrow() {
|
||||
symbol_hasher.input_str(&meta[]);
|
||||
symbol_hasher.input_str(&meta[..]);
|
||||
}
|
||||
symbol_hasher.input_str("-");
|
||||
symbol_hasher.input_str(&encoder::encoded_ty(tcx, t)[]);
|
||||
@ -262,7 +262,7 @@ pub fn sanitize(s: &str) -> String {
|
||||
if result.len() > 0 &&
|
||||
result.as_bytes()[0] != '_' as u8 &&
|
||||
! (result.as_bytes()[0] as char).is_xid_start() {
|
||||
return format!("_{}", &result[]);
|
||||
return format!("_{}", &result[..]);
|
||||
}
|
||||
|
||||
return result;
|
||||
@ -331,17 +331,17 @@ pub fn mangle_exported_name<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, path: PathEl
|
||||
hash.push(EXTRA_CHARS.as_bytes()[extra2] as char);
|
||||
hash.push(EXTRA_CHARS.as_bytes()[extra3] as char);
|
||||
|
||||
exported_name(path, &hash[])
|
||||
exported_name(path, &hash[..])
|
||||
}
|
||||
|
||||
pub fn mangle_internal_name_by_type_and_seq<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
t: Ty<'tcx>,
|
||||
name: &str) -> String {
|
||||
let s = ppaux::ty_to_string(ccx.tcx(), t);
|
||||
let path = [PathName(token::intern(&s[])),
|
||||
let path = [PathName(token::intern(&s[..])),
|
||||
gensym_name(name)];
|
||||
let hash = get_symbol_hash(ccx, t);
|
||||
mangle(path.iter().cloned(), Some(&hash[]))
|
||||
mangle(path.iter().cloned(), Some(&hash[..]))
|
||||
}
|
||||
|
||||
pub fn mangle_internal_name_by_path_and_seq(path: PathElems, flav: &str) -> String {
|
||||
@ -541,7 +541,7 @@ fn link_rlib<'a>(sess: &'a Session,
|
||||
for &(ref l, kind) in &*sess.cstore.get_used_libraries().borrow() {
|
||||
match kind {
|
||||
cstore::NativeStatic => {
|
||||
ab.add_native_library(&l[]).unwrap();
|
||||
ab.add_native_library(&l[..]).unwrap();
|
||||
}
|
||||
cstore::NativeFramework | cstore::NativeUnknown => {}
|
||||
}
|
||||
@ -619,7 +619,7 @@ fn link_rlib<'a>(sess: &'a Session,
|
||||
e)[])
|
||||
};
|
||||
|
||||
let bc_data_deflated = match flate::deflate_bytes(&bc_data[]) {
|
||||
let bc_data_deflated = match flate::deflate_bytes(&bc_data[..]) {
|
||||
Some(compressed) => compressed,
|
||||
None => sess.fatal(&format!("failed to compress bytecode from {}",
|
||||
bc_filename.display())[])
|
||||
@ -678,7 +678,7 @@ fn write_rlib_bytecode_object_v1<T: Writer>(writer: &mut T,
|
||||
try! { writer.write_all(RLIB_BYTECODE_OBJECT_MAGIC) };
|
||||
try! { writer.write_le_u32(1) };
|
||||
try! { writer.write_le_u64(bc_data_deflated_size) };
|
||||
try! { writer.write_all(&bc_data_deflated[]) };
|
||||
try! { writer.write_all(&bc_data_deflated[..]) };
|
||||
|
||||
let number_of_bytes_written_so_far =
|
||||
RLIB_BYTECODE_OBJECT_MAGIC.len() + // magic id
|
||||
@ -733,7 +733,7 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) {
|
||||
continue
|
||||
}
|
||||
};
|
||||
ab.add_rlib(&p, &name[], sess.lto()).unwrap();
|
||||
ab.add_rlib(&p, &name[..], sess.lto()).unwrap();
|
||||
|
||||
let native_libs = csearch::get_native_libraries(&sess.cstore, cnum);
|
||||
all_native_libs.extend(native_libs.into_iter());
|
||||
@ -769,7 +769,7 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool,
|
||||
|
||||
// The invocations of cc share some flags across platforms
|
||||
let pname = get_cc_prog(sess);
|
||||
let mut cmd = Command::new(&pname[]);
|
||||
let mut cmd = Command::new(&pname[..]);
|
||||
|
||||
cmd.args(&sess.target.target.options.pre_link_args[]);
|
||||
link_args(&mut cmd, sess, dylib, tmpdir.path(),
|
||||
@ -798,7 +798,7 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool,
|
||||
sess.note(&format!("{:?}", &cmd)[]);
|
||||
let mut output = prog.error.clone();
|
||||
output.push_all(&prog.output[]);
|
||||
sess.note(str::from_utf8(&output[]).unwrap());
|
||||
sess.note(str::from_utf8(&output[..]).unwrap());
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
debug!("linker stderr:\n{}", String::from_utf8(prog.error).unwrap());
|
||||
@ -868,7 +868,7 @@ fn link_args(cmd: &mut Command,
|
||||
|
||||
let mut v = b"-Wl,-force_load,".to_vec();
|
||||
v.push_all(morestack.as_vec());
|
||||
cmd.arg(&v[]);
|
||||
cmd.arg(&v[..]);
|
||||
} else {
|
||||
cmd.args(&["-Wl,--whole-archive", "-lmorestack", "-Wl,--no-whole-archive"]);
|
||||
}
|
||||
@ -993,7 +993,7 @@ fn link_args(cmd: &mut Command,
|
||||
if sess.opts.cg.rpath {
|
||||
let mut v = "-Wl,-install_name,@rpath/".as_bytes().to_vec();
|
||||
v.push_all(out_filename.filename().unwrap());
|
||||
cmd.arg(&v[]);
|
||||
cmd.arg(&v[..]);
|
||||
}
|
||||
} else {
|
||||
cmd.arg("-shared");
|
||||
@ -1029,7 +1029,7 @@ fn link_args(cmd: &mut Command,
|
||||
// with any #[link_args] attributes found inside the crate
|
||||
let empty = Vec::new();
|
||||
cmd.args(&sess.opts.cg.link_args.as_ref().unwrap_or(&empty)[]);
|
||||
cmd.args(&used_link_args[]);
|
||||
cmd.args(&used_link_args[..]);
|
||||
}
|
||||
|
||||
// # Native library linking
|
||||
@ -1086,14 +1086,14 @@ fn add_local_native_libraries(cmd: &mut Command, sess: &Session) {
|
||||
} else {
|
||||
// -force_load is the OSX equivalent of --whole-archive, but it
|
||||
// involves passing the full path to the library to link.
|
||||
let lib = archive::find_library(&l[],
|
||||
let lib = archive::find_library(&l[..],
|
||||
&sess.target.target.options.staticlib_prefix,
|
||||
&sess.target.target.options.staticlib_suffix,
|
||||
&search_path[],
|
||||
&search_path[..],
|
||||
&sess.diagnostic().handler);
|
||||
let mut v = b"-Wl,-force_load,".to_vec();
|
||||
v.push_all(lib.as_vec());
|
||||
cmd.arg(&v[]);
|
||||
cmd.arg(&v[..]);
|
||||
}
|
||||
}
|
||||
if takes_hints {
|
||||
@ -1106,7 +1106,7 @@ fn add_local_native_libraries(cmd: &mut Command, sess: &Session) {
|
||||
cmd.arg(format!("-l{}", l));
|
||||
}
|
||||
cstore::NativeFramework => {
|
||||
cmd.arg("-framework").arg(&l[]);
|
||||
cmd.arg("-framework").arg(&l[..]);
|
||||
}
|
||||
cstore::NativeStatic => unreachable!(),
|
||||
}
|
||||
@ -1248,7 +1248,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session,
|
||||
|
||||
let mut v = "-l".as_bytes().to_vec();
|
||||
v.push_all(unlib(&sess.target, cratepath.filestem().unwrap()));
|
||||
cmd.arg(&v[]);
|
||||
cmd.arg(&v[..]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1290,7 +1290,7 @@ fn add_upstream_native_libraries(cmd: &mut Command, sess: &Session) {
|
||||
}
|
||||
cstore::NativeFramework => {
|
||||
cmd.arg("-framework");
|
||||
cmd.arg(&lib[]);
|
||||
cmd.arg(&lib[..]);
|
||||
}
|
||||
cstore::NativeStatic => {
|
||||
sess.bug("statics shouldn't be propagated");
|
||||
|
@ -132,7 +132,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
|
||||
bc_decoded.len() as libc::size_t) {
|
||||
write::llvm_err(sess.diagnostic().handler(),
|
||||
format!("failed to load bc of `{}`",
|
||||
&name[]));
|
||||
&name[..]));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -47,14 +47,14 @@ pub fn llvm_err(handler: &diagnostic::Handler, msg: String) -> ! {
|
||||
unsafe {
|
||||
let cstr = llvm::LLVMRustGetLastError();
|
||||
if cstr == ptr::null() {
|
||||
handler.fatal(&msg[]);
|
||||
handler.fatal(&msg[..]);
|
||||
} else {
|
||||
let err = ffi::c_str_to_bytes(&cstr);
|
||||
let err = String::from_utf8_lossy(err).to_string();
|
||||
libc::free(cstr as *mut _);
|
||||
handler.fatal(&format!("{}: {}",
|
||||
&msg[],
|
||||
&err[])[]);
|
||||
&msg[..],
|
||||
&err[..])[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -105,7 +105,7 @@ impl SharedEmitter {
|
||||
Some(ref code) => {
|
||||
handler.emit_with_code(None,
|
||||
&diag.msg[],
|
||||
&code[],
|
||||
&code[..],
|
||||
diag.lvl);
|
||||
},
|
||||
None => {
|
||||
@ -165,7 +165,7 @@ fn get_llvm_opt_level(optimize: config::OptLevel) -> llvm::CodeGenOptLevel {
|
||||
|
||||
fn create_target_machine(sess: &Session) -> TargetMachineRef {
|
||||
let reloc_model_arg = match sess.opts.cg.relocation_model {
|
||||
Some(ref s) => &s[],
|
||||
Some(ref s) => &s[..],
|
||||
None => &sess.target.target.options.relocation_model[]
|
||||
};
|
||||
let reloc_model = match reloc_model_arg {
|
||||
@ -198,7 +198,7 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef {
|
||||
let fdata_sections = ffunction_sections;
|
||||
|
||||
let code_model_arg = match sess.opts.cg.code_model {
|
||||
Some(ref s) => &s[],
|
||||
Some(ref s) => &s[..],
|
||||
None => &sess.target.target.options.code_model[]
|
||||
};
|
||||
|
||||
@ -365,7 +365,7 @@ unsafe extern "C" fn inline_asm_handler(diag: SMDiagnosticRef,
|
||||
let msg = llvm::build_string(|s| llvm::LLVMWriteSMDiagnosticToString(diag, s))
|
||||
.expect("non-UTF8 SMDiagnostic");
|
||||
|
||||
report_inline_asm(cgcx, &msg[], cookie);
|
||||
report_inline_asm(cgcx, &msg[..], cookie);
|
||||
}
|
||||
|
||||
unsafe extern "C" fn diagnostic_handler(info: DiagnosticInfoRef, user: *mut c_void) {
|
||||
@ -711,7 +711,7 @@ pub fn run_passes(sess: &Session,
|
||||
};
|
||||
|
||||
let pname = get_cc_prog(sess);
|
||||
let mut cmd = Command::new(&pname[]);
|
||||
let mut cmd = Command::new(&pname[..]);
|
||||
|
||||
cmd.args(&sess.target.target.options.pre_link_args[]);
|
||||
cmd.arg("-nostdlib");
|
||||
@ -829,12 +829,12 @@ pub fn run_passes(sess: &Session,
|
||||
for i in 0..trans.modules.len() {
|
||||
if modules_config.emit_obj {
|
||||
let ext = format!("{}.o", i);
|
||||
remove(sess, &crate_output.with_extension(&ext[]));
|
||||
remove(sess, &crate_output.with_extension(&ext[..]));
|
||||
}
|
||||
|
||||
if modules_config.emit_bc && !keep_numbered_bitcode {
|
||||
let ext = format!("{}.bc", i);
|
||||
remove(sess, &crate_output.with_extension(&ext[]));
|
||||
remove(sess, &crate_output.with_extension(&ext[..]));
|
||||
}
|
||||
}
|
||||
|
||||
@ -960,7 +960,7 @@ fn run_work_multithreaded(sess: &Session,
|
||||
|
||||
pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) {
|
||||
let pname = get_cc_prog(sess);
|
||||
let mut cmd = Command::new(&pname[]);
|
||||
let mut cmd = Command::new(&pname[..]);
|
||||
|
||||
cmd.arg("-c").arg("-o").arg(outputs.path(config::OutputTypeObject))
|
||||
.arg(outputs.temp_path(config::OutputTypeAssembly));
|
||||
@ -975,7 +975,7 @@ pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) {
|
||||
sess.note(&format!("{:?}", &cmd)[]);
|
||||
let mut note = prog.error.clone();
|
||||
note.push_all(&prog.output[]);
|
||||
sess.note(str::from_utf8(¬e[]).unwrap());
|
||||
sess.note(str::from_utf8(¬e[..]).unwrap());
|
||||
sess.abort_if_errors();
|
||||
}
|
||||
},
|
||||
|
@ -155,7 +155,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
};
|
||||
self.fmt.sub_mod_ref_str(path.span,
|
||||
*span,
|
||||
&qualname[],
|
||||
&qualname[..],
|
||||
self.cur_scope);
|
||||
}
|
||||
}
|
||||
@ -178,7 +178,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
};
|
||||
self.fmt.sub_mod_ref_str(path.span,
|
||||
*span,
|
||||
&qualname[],
|
||||
&qualname[..],
|
||||
self.cur_scope);
|
||||
}
|
||||
}
|
||||
@ -197,7 +197,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
let (ref span, ref qualname) = sub_paths[len-2];
|
||||
self.fmt.sub_type_ref_str(path.span,
|
||||
*span,
|
||||
&qualname[]);
|
||||
&qualname[..]);
|
||||
|
||||
// write the other sub-paths
|
||||
if len <= 2 {
|
||||
@ -207,7 +207,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
for &(ref span, ref qualname) in sub_paths {
|
||||
self.fmt.sub_mod_ref_str(path.span,
|
||||
*span,
|
||||
&qualname[],
|
||||
&qualname[..],
|
||||
self.cur_scope);
|
||||
}
|
||||
}
|
||||
@ -280,7 +280,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
id,
|
||||
qualname,
|
||||
&path_to_string(p)[],
|
||||
&typ[]);
|
||||
&typ[..]);
|
||||
}
|
||||
self.collected_paths.clear();
|
||||
}
|
||||
@ -356,7 +356,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
};
|
||||
|
||||
let qualname = format!("{}::{}", qualname, &get_ident(method.pe_ident()));
|
||||
let qualname = &qualname[];
|
||||
let qualname = &qualname[..];
|
||||
|
||||
// record the decl for this def (if it has one)
|
||||
let decl_id = ty::trait_item_of_item(&self.analysis.ty_cx,
|
||||
@ -436,9 +436,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
Some(sub_span) => self.fmt.field_str(field.span,
|
||||
Some(sub_span),
|
||||
field.node.id,
|
||||
&name[],
|
||||
&qualname[],
|
||||
&typ[],
|
||||
&name[..],
|
||||
&qualname[..],
|
||||
&typ[..],
|
||||
scope_id),
|
||||
None => self.sess.span_bug(field.span,
|
||||
&format!("Could not find sub-span for field {}",
|
||||
@ -470,7 +470,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
self.fmt.typedef_str(full_span,
|
||||
Some(*param_ss),
|
||||
param.id,
|
||||
&name[],
|
||||
&name[..],
|
||||
"");
|
||||
}
|
||||
self.visit_generics(generics);
|
||||
@ -487,10 +487,10 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
self.fmt.fn_str(item.span,
|
||||
sub_span,
|
||||
item.id,
|
||||
&qualname[],
|
||||
&qualname[..],
|
||||
self.cur_scope);
|
||||
|
||||
self.process_formals(&decl.inputs, &qualname[]);
|
||||
self.process_formals(&decl.inputs, &qualname[..]);
|
||||
|
||||
// walk arg and return types
|
||||
for arg in &decl.inputs {
|
||||
@ -504,7 +504,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
// walk the body
|
||||
self.nest(item.id, |v| v.visit_block(&*body));
|
||||
|
||||
self.process_generic_params(ty_params, item.span, &qualname[], item.id);
|
||||
self.process_generic_params(ty_params, item.span, &qualname[..], item.id);
|
||||
}
|
||||
|
||||
fn process_static(&mut self,
|
||||
@ -526,8 +526,8 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
sub_span,
|
||||
item.id,
|
||||
&get_ident(item.ident),
|
||||
&qualname[],
|
||||
&value[],
|
||||
&qualname[..],
|
||||
&value[..],
|
||||
&ty_to_string(&*typ)[],
|
||||
self.cur_scope);
|
||||
|
||||
@ -549,7 +549,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
sub_span,
|
||||
item.id,
|
||||
&get_ident(item.ident),
|
||||
&qualname[],
|
||||
&qualname[..],
|
||||
"",
|
||||
&ty_to_string(&*typ)[],
|
||||
self.cur_scope);
|
||||
@ -575,17 +575,17 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
sub_span,
|
||||
item.id,
|
||||
ctor_id,
|
||||
&qualname[],
|
||||
&qualname[..],
|
||||
self.cur_scope,
|
||||
&val[]);
|
||||
&val[..]);
|
||||
|
||||
// fields
|
||||
for field in &def.fields {
|
||||
self.process_struct_field_def(field, &qualname[], item.id);
|
||||
self.process_struct_field_def(field, &qualname[..], item.id);
|
||||
self.visit_ty(&*field.node.ty);
|
||||
}
|
||||
|
||||
self.process_generic_params(ty_params, item.span, &qualname[], item.id);
|
||||
self.process_generic_params(ty_params, item.span, &qualname[..], item.id);
|
||||
}
|
||||
|
||||
fn process_enum(&mut self,
|
||||
@ -598,9 +598,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
Some(sub_span) => self.fmt.enum_str(item.span,
|
||||
Some(sub_span),
|
||||
item.id,
|
||||
&enum_name[],
|
||||
&enum_name[..],
|
||||
self.cur_scope,
|
||||
&val[]),
|
||||
&val[..]),
|
||||
None => self.sess.span_bug(item.span,
|
||||
&format!("Could not find subspan for enum {}",
|
||||
enum_name)[]),
|
||||
@ -619,9 +619,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
self.span.span_for_first_ident(variant.span),
|
||||
variant.node.id,
|
||||
name,
|
||||
&qualname[],
|
||||
&enum_name[],
|
||||
&val[],
|
||||
&qualname[..],
|
||||
&enum_name[..],
|
||||
&val[..],
|
||||
item.id);
|
||||
for arg in args {
|
||||
self.visit_ty(&*arg.ty);
|
||||
@ -637,9 +637,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
self.span.span_for_first_ident(variant.span),
|
||||
variant.node.id,
|
||||
ctor_id,
|
||||
&qualname[],
|
||||
&enum_name[],
|
||||
&val[],
|
||||
&qualname[..],
|
||||
&enum_name[..],
|
||||
&val[..],
|
||||
item.id);
|
||||
|
||||
for field in &struct_def.fields {
|
||||
@ -650,7 +650,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
self.process_generic_params(ty_params, item.span, &enum_name[], item.id);
|
||||
self.process_generic_params(ty_params, item.span, &enum_name[..], item.id);
|
||||
}
|
||||
|
||||
fn process_impl(&mut self,
|
||||
@ -724,9 +724,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
self.fmt.trait_str(item.span,
|
||||
sub_span,
|
||||
item.id,
|
||||
&qualname[],
|
||||
&qualname[..],
|
||||
self.cur_scope,
|
||||
&val[]);
|
||||
&val[..]);
|
||||
|
||||
// super-traits
|
||||
for super_bound in &**trait_refs {
|
||||
@ -758,7 +758,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
}
|
||||
|
||||
// walk generics and methods
|
||||
self.process_generic_params(generics, item.span, &qualname[], item.id);
|
||||
self.process_generic_params(generics, item.span, &qualname[..], item.id);
|
||||
for method in methods {
|
||||
self.visit_trait_item(method)
|
||||
}
|
||||
@ -776,9 +776,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
self.fmt.mod_str(item.span,
|
||||
sub_span,
|
||||
item.id,
|
||||
&qualname[],
|
||||
&qualname[..],
|
||||
self.cur_scope,
|
||||
&filename[]);
|
||||
&filename[..]);
|
||||
|
||||
self.nest(item.id, |v| visit::walk_mod(v, m));
|
||||
}
|
||||
@ -990,7 +990,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> {
|
||||
self.cur_scope);
|
||||
|
||||
// walk receiver and args
|
||||
visit::walk_exprs(self, &args[]);
|
||||
visit::walk_exprs(self, &args[..]);
|
||||
}
|
||||
|
||||
fn process_pat(&mut self, p:&ast::Pat) {
|
||||
@ -1164,7 +1164,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
|
||||
item.id,
|
||||
cnum,
|
||||
name,
|
||||
&location[],
|
||||
&location[..],
|
||||
self.cur_scope);
|
||||
}
|
||||
ast::ItemFn(ref decl, _, _, ref ty_params, ref body) =>
|
||||
@ -1196,8 +1196,8 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
|
||||
self.fmt.typedef_str(item.span,
|
||||
sub_span,
|
||||
item.id,
|
||||
&qualname[],
|
||||
&value[]);
|
||||
&qualname[..],
|
||||
&value[..]);
|
||||
|
||||
self.visit_ty(&**ty);
|
||||
self.process_generic_params(ty_params, item.span, &qualname, item.id);
|
||||
@ -1260,7 +1260,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
|
||||
};
|
||||
|
||||
qualname.push_str(&get_ident(method_type.ident));
|
||||
let qualname = &qualname[];
|
||||
let qualname = &qualname[..];
|
||||
|
||||
let sub_span = self.span.sub_span_after_keyword(method_type.span, keywords::Fn);
|
||||
self.fmt.method_decl_str(method_type.span,
|
||||
@ -1401,7 +1401,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
|
||||
|
||||
let mut id = String::from_str("$");
|
||||
id.push_str(&ex.id.to_string()[]);
|
||||
self.process_formals(&decl.inputs, &id[]);
|
||||
self.process_formals(&decl.inputs, &id[..]);
|
||||
|
||||
// walk arg and return types
|
||||
for arg in &decl.inputs {
|
||||
@ -1464,7 +1464,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
|
||||
Some(p.span),
|
||||
id,
|
||||
&path_to_string(p)[],
|
||||
&value[],
|
||||
&value[..],
|
||||
"")
|
||||
}
|
||||
def::DefVariant(..) | def::DefTy(..) | def::DefStruct(..) => {
|
||||
@ -1520,8 +1520,8 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> {
|
||||
sub_span,
|
||||
id,
|
||||
&path_to_string(p)[],
|
||||
&value[],
|
||||
&typ[]);
|
||||
&value[..],
|
||||
&typ[..]);
|
||||
}
|
||||
self.collected_paths.clear();
|
||||
|
||||
@ -1603,7 +1603,7 @@ pub fn process_crate(sess: &Session,
|
||||
cur_scope: 0
|
||||
};
|
||||
|
||||
visitor.dump_crate_info(&cratename[], krate);
|
||||
visitor.dump_crate_info(&cratename[..], krate);
|
||||
|
||||
visit::walk_crate(&mut visitor, krate);
|
||||
}
|
||||
|
@ -43,7 +43,7 @@ impl Recorder {
|
||||
assert!(self.dump_spans);
|
||||
let result = format!("span,kind,{},{},text,\"{}\"\n",
|
||||
kind, su.extent_str(span), escape(su.snippet(span)));
|
||||
self.record(&result[]);
|
||||
self.record(&result[..]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -170,14 +170,14 @@ impl<'a> FmtStrs<'a> {
|
||||
if s.len() > 1020 {
|
||||
&s[..1020]
|
||||
} else {
|
||||
&s[]
|
||||
&s[..]
|
||||
}
|
||||
});
|
||||
|
||||
let pairs = fields.iter().zip(values);
|
||||
let strs = pairs.map(|(f, v)| format!(",{},\"{}\"", f, escape(String::from_str(v))));
|
||||
Some(strs.fold(String::new(), |mut s, ss| {
|
||||
s.push_str(&ss[]);
|
||||
s.push_str(&ss[..]);
|
||||
s
|
||||
}))
|
||||
}
|
||||
@ -205,9 +205,9 @@ impl<'a> FmtStrs<'a> {
|
||||
};
|
||||
|
||||
let mut result = String::from_str(label);
|
||||
result.push_str(&values_str[]);
|
||||
result.push_str(&values_str[..]);
|
||||
result.push_str("\n");
|
||||
self.recorder.record(&result[]);
|
||||
self.recorder.record(&result[..]);
|
||||
}
|
||||
|
||||
pub fn record_with_span(&mut self,
|
||||
@ -238,7 +238,7 @@ impl<'a> FmtStrs<'a> {
|
||||
None => return,
|
||||
};
|
||||
let result = format!("{},{}{}\n", label, self.span.extent_str(sub_span), values_str);
|
||||
self.recorder.record(&result[]);
|
||||
self.recorder.record(&result[..]);
|
||||
}
|
||||
|
||||
pub fn check_and_record(&mut self,
|
||||
|
@ -566,7 +566,7 @@ fn enter_opt<'a, 'p, 'blk, 'tcx>(
|
||||
param_env: param_env,
|
||||
};
|
||||
enter_match(bcx, dm, m, col, val, |pats|
|
||||
check_match::specialize(&mcx, &pats[], &ctor, col, variant_size)
|
||||
check_match::specialize(&mcx, &pats[..], &ctor, col, variant_size)
|
||||
)
|
||||
}
|
||||
|
||||
@ -987,7 +987,7 @@ fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
if has_nested_bindings(m, col) {
|
||||
let expanded = expand_nested_bindings(bcx, m, col, val);
|
||||
compile_submatch_continue(bcx,
|
||||
&expanded[],
|
||||
&expanded[..],
|
||||
vals,
|
||||
chk,
|
||||
col,
|
||||
@ -1233,10 +1233,10 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
|
||||
}
|
||||
let opt_ms = enter_opt(opt_cx, pat_id, dm, m, opt, col, size, val);
|
||||
let mut opt_vals = unpacked;
|
||||
opt_vals.push_all(&vals_left[]);
|
||||
opt_vals.push_all(&vals_left[..]);
|
||||
compile_submatch(opt_cx,
|
||||
&opt_ms[],
|
||||
&opt_vals[],
|
||||
&opt_ms[..],
|
||||
&opt_vals[..],
|
||||
branch_chk.as_ref().unwrap_or(chk),
|
||||
has_genuine_default);
|
||||
}
|
||||
@ -1255,8 +1255,8 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
|
||||
}
|
||||
_ => {
|
||||
compile_submatch(else_cx,
|
||||
&defaults[],
|
||||
&vals_left[],
|
||||
&defaults[..],
|
||||
&vals_left[..],
|
||||
chk,
|
||||
has_genuine_default);
|
||||
}
|
||||
@ -1468,7 +1468,7 @@ fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>,
|
||||
&& arm.pats.last().unwrap().node == ast::PatWild(ast::PatWildSingle)
|
||||
});
|
||||
|
||||
compile_submatch(bcx, &matches[], &[discr_datum.val], &chk, has_default);
|
||||
compile_submatch(bcx, &matches[..], &[discr_datum.val], &chk, has_default);
|
||||
|
||||
let mut arm_cxs = Vec::new();
|
||||
for arm_data in &arm_datas {
|
||||
@ -1482,7 +1482,7 @@ fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>,
|
||||
arm_cxs.push(bcx);
|
||||
}
|
||||
|
||||
bcx = scope_cx.fcx.join_blocks(match_id, &arm_cxs[]);
|
||||
bcx = scope_cx.fcx.join_blocks(match_id, &arm_cxs[..]);
|
||||
return bcx;
|
||||
}
|
||||
|
||||
|
@ -155,7 +155,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
t: Ty<'tcx>) -> Repr<'tcx> {
|
||||
match t.sty {
|
||||
ty::ty_tup(ref elems) => {
|
||||
Univariant(mk_struct(cx, &elems[], false, t), false)
|
||||
Univariant(mk_struct(cx, &elems[..], false, t), false)
|
||||
}
|
||||
ty::ty_struct(def_id, substs) => {
|
||||
let fields = ty::lookup_struct_fields(cx.tcx(), def_id);
|
||||
@ -167,13 +167,13 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
let dtor = ty::ty_dtor(cx.tcx(), def_id).has_drop_flag();
|
||||
if dtor { ftys.push(cx.tcx().types.bool); }
|
||||
|
||||
Univariant(mk_struct(cx, &ftys[], packed, t), dtor)
|
||||
Univariant(mk_struct(cx, &ftys[..], packed, t), dtor)
|
||||
}
|
||||
ty::ty_closure(def_id, _, substs) => {
|
||||
let typer = NormalizingClosureTyper::new(cx.tcx());
|
||||
let upvars = typer.closure_upvars(def_id, substs).unwrap();
|
||||
let upvar_types = upvars.iter().map(|u| u.ty).collect::<Vec<_>>();
|
||||
Univariant(mk_struct(cx, &upvar_types[], false, t), false)
|
||||
Univariant(mk_struct(cx, &upvar_types[..], false, t), false)
|
||||
}
|
||||
ty::ty_enum(def_id, substs) => {
|
||||
let cases = get_cases(cx.tcx(), def_id, substs);
|
||||
@ -187,7 +187,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
// (Typechecking will reject discriminant-sizing attrs.)
|
||||
assert_eq!(hint, attr::ReprAny);
|
||||
let ftys = if dtor { vec!(cx.tcx().types.bool) } else { vec!() };
|
||||
return Univariant(mk_struct(cx, &ftys[], false, t),
|
||||
return Univariant(mk_struct(cx, &ftys[..], false, t),
|
||||
dtor);
|
||||
}
|
||||
|
||||
@ -219,7 +219,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
assert_eq!(hint, attr::ReprAny);
|
||||
let mut ftys = cases[0].tys.clone();
|
||||
if dtor { ftys.push(cx.tcx().types.bool); }
|
||||
return Univariant(mk_struct(cx, &ftys[], false, t),
|
||||
return Univariant(mk_struct(cx, &ftys[..], false, t),
|
||||
dtor);
|
||||
}
|
||||
|
||||
@ -320,10 +320,10 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
let mut ftys = vec!(ty_of_inttype(cx.tcx(), ity));
|
||||
ftys.push_all(&c.tys[]);
|
||||
if dtor { ftys.push(cx.tcx().types.bool); }
|
||||
mk_struct(cx, &ftys[], false, t)
|
||||
mk_struct(cx, &ftys[..], false, t)
|
||||
}).collect();
|
||||
|
||||
ensure_enum_fits_in_address_space(cx, &fields[], t);
|
||||
ensure_enum_fits_in_address_space(cx, &fields[..], t);
|
||||
|
||||
General(ity, fields, dtor)
|
||||
}
|
||||
@ -453,9 +453,9 @@ fn mk_struct<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
.map(|&ty| type_of::sizing_type_of(cx, ty)).collect()
|
||||
};
|
||||
|
||||
ensure_struct_fits_in_address_space(cx, &lltys[], packed, scapegoat);
|
||||
ensure_struct_fits_in_address_space(cx, &lltys[..], packed, scapegoat);
|
||||
|
||||
let llty_rec = Type::struct_(cx, &lltys[], packed);
|
||||
let llty_rec = Type::struct_(cx, &lltys[..], packed);
|
||||
Struct {
|
||||
size: machine::llsize_of_alloc(cx, llty_rec),
|
||||
align: machine::llalign_of_min(cx, llty_rec),
|
||||
@ -659,7 +659,7 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
// of the size.
|
||||
//
|
||||
// FIXME #10604: this breaks when vector types are present.
|
||||
let (size, align) = union_size_and_align(&sts[]);
|
||||
let (size, align) = union_size_and_align(&sts[..]);
|
||||
let align_s = align as u64;
|
||||
assert_eq!(size % align_s, 0);
|
||||
let align_units = size / align_s - 1;
|
||||
@ -682,10 +682,10 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
Type::array(&discr_ty, align_s / discr_size - 1),
|
||||
fill_ty];
|
||||
match name {
|
||||
None => Type::struct_(cx, &fields[], false),
|
||||
None => Type::struct_(cx, &fields[..], false),
|
||||
Some(name) => {
|
||||
let mut llty = Type::named_struct(cx, name);
|
||||
llty.set_struct_body(&fields[], false);
|
||||
llty.set_struct_body(&fields[..], false);
|
||||
llty
|
||||
}
|
||||
}
|
||||
@ -763,7 +763,7 @@ pub fn trans_get_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>,
|
||||
|
||||
fn struct_wrapped_nullable_bitdiscr(bcx: Block, nndiscr: Disr, discrfield: &DiscrField,
|
||||
scrutinee: ValueRef) -> ValueRef {
|
||||
let llptrptr = GEPi(bcx, scrutinee, &discrfield[]);
|
||||
let llptrptr = GEPi(bcx, scrutinee, &discrfield[..]);
|
||||
let llptr = Load(bcx, llptrptr);
|
||||
let cmp = if nndiscr == 0 { IntEQ } else { IntNE };
|
||||
ICmp(bcx, cmp, llptr, C_null(val_ty(llptr)), DebugLoc::None)
|
||||
@ -851,7 +851,7 @@ pub fn trans_set_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>,
|
||||
}
|
||||
StructWrappedNullablePointer { nndiscr, ref discrfield, .. } => {
|
||||
if discr != nndiscr {
|
||||
let llptrptr = GEPi(bcx, val, &discrfield[]);
|
||||
let llptrptr = GEPi(bcx, val, &discrfield[..]);
|
||||
let llptrty = val_ty(llptrptr).element_type();
|
||||
Store(bcx, C_null(llptrty), llptrptr)
|
||||
}
|
||||
@ -933,7 +933,7 @@ pub fn struct_field_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, st: &Struct<'tcx>, v
|
||||
let val = if needs_cast {
|
||||
let ccx = bcx.ccx();
|
||||
let fields = st.fields.iter().map(|&ty| type_of::type_of(ccx, ty)).collect::<Vec<_>>();
|
||||
let real_ty = Type::struct_(ccx, &fields[], st.packed);
|
||||
let real_ty = Type::struct_(ccx, &fields[..], st.packed);
|
||||
PointerCast(bcx, val, real_ty.ptr_to())
|
||||
} else {
|
||||
val
|
||||
@ -972,7 +972,7 @@ pub fn fold_variants<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
|
||||
|
||||
let fields = case.fields.iter().map(|&ty|
|
||||
type_of::type_of(bcx.ccx(), ty)).collect::<Vec<_>>();
|
||||
let real_ty = Type::struct_(ccx, &fields[], case.packed);
|
||||
let real_ty = Type::struct_(ccx, &fields[..], case.packed);
|
||||
let variant_value = PointerCast(variant_cx, value, real_ty.ptr_to());
|
||||
|
||||
variant_cx = f(variant_cx, case, variant_value);
|
||||
@ -1045,18 +1045,18 @@ pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr
|
||||
}
|
||||
General(ity, ref cases, _) => {
|
||||
let case = &cases[discr as uint];
|
||||
let (max_sz, _) = union_size_and_align(&cases[]);
|
||||
let (max_sz, _) = union_size_and_align(&cases[..]);
|
||||
let lldiscr = C_integral(ll_inttype(ccx, ity), discr as u64, true);
|
||||
let mut f = vec![lldiscr];
|
||||
f.push_all(vals);
|
||||
let mut contents = build_const_struct(ccx, case, &f[]);
|
||||
let mut contents = build_const_struct(ccx, case, &f[..]);
|
||||
contents.push_all(&[padding(ccx, max_sz - case.size)]);
|
||||
C_struct(ccx, &contents[], false)
|
||||
C_struct(ccx, &contents[..], false)
|
||||
}
|
||||
Univariant(ref st, _dro) => {
|
||||
assert!(discr == 0);
|
||||
let contents = build_const_struct(ccx, st, vals);
|
||||
C_struct(ccx, &contents[], st.packed)
|
||||
C_struct(ccx, &contents[..], st.packed)
|
||||
}
|
||||
RawNullablePointer { nndiscr, nnty, .. } => {
|
||||
if discr == nndiscr {
|
||||
@ -1080,7 +1080,7 @@ pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr
|
||||
}).collect::<Vec<ValueRef>>();
|
||||
C_struct(ccx, &build_const_struct(ccx,
|
||||
nonnull,
|
||||
&vals[])[],
|
||||
&vals[..])[],
|
||||
false)
|
||||
}
|
||||
}
|
||||
|
@ -71,7 +71,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm)
|
||||
callee::DontAutorefArg)
|
||||
})
|
||||
}).collect::<Vec<_>>();
|
||||
inputs.push_all(&ext_inputs[]);
|
||||
inputs.push_all(&ext_inputs[..]);
|
||||
|
||||
// no failure occurred preparing operands, no need to cleanup
|
||||
fcx.pop_custom_cleanup_scope(temp_scope);
|
||||
@ -91,18 +91,18 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm)
|
||||
if !clobbers.is_empty() {
|
||||
clobbers.push(',');
|
||||
}
|
||||
clobbers.push_str(&more_clobbers[]);
|
||||
clobbers.push_str(&more_clobbers[..]);
|
||||
}
|
||||
|
||||
// Add the clobbers to our constraints list
|
||||
if clobbers.len() != 0 && constraints.len() != 0 {
|
||||
constraints.push(',');
|
||||
constraints.push_str(&clobbers[]);
|
||||
constraints.push_str(&clobbers[..]);
|
||||
} else {
|
||||
constraints.push_str(&clobbers[]);
|
||||
constraints.push_str(&clobbers[..]);
|
||||
}
|
||||
|
||||
debug!("Asm Constraints: {}", &constraints[]);
|
||||
debug!("Asm Constraints: {}", &constraints[..]);
|
||||
|
||||
let num_outputs = outputs.len();
|
||||
|
||||
@ -112,7 +112,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm)
|
||||
} else if num_outputs == 1 {
|
||||
output_types[0]
|
||||
} else {
|
||||
Type::struct_(bcx.ccx(), &output_types[], false)
|
||||
Type::struct_(bcx.ccx(), &output_types[..], false)
|
||||
};
|
||||
|
||||
let dialect = match ia.dialect {
|
||||
|
@ -247,7 +247,7 @@ fn get_extern_rust_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_ty: Ty<'tcx>,
|
||||
let f = decl_rust_fn(ccx, fn_ty, name);
|
||||
|
||||
let attrs = csearch::get_item_attrs(&ccx.sess().cstore, did);
|
||||
set_llvm_fn_attrs(ccx, &attrs[], f);
|
||||
set_llvm_fn_attrs(ccx, &attrs[..], f);
|
||||
|
||||
ccx.externs().borrow_mut().insert(name.to_string(), f);
|
||||
f
|
||||
@ -523,7 +523,7 @@ pub fn get_res_dtor<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
ty::mk_nil(ccx.tcx()));
|
||||
get_extern_fn(ccx,
|
||||
&mut *ccx.externs().borrow_mut(),
|
||||
&name[],
|
||||
&name[..],
|
||||
llvm::CCallConv,
|
||||
llty,
|
||||
dtor_ty)
|
||||
@ -898,14 +898,14 @@ pub fn trans_external_path<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
ty::ty_bare_fn(_, ref fn_ty) => {
|
||||
match ccx.sess().target.target.adjust_abi(fn_ty.abi) {
|
||||
Rust | RustCall => {
|
||||
get_extern_rust_fn(ccx, t, &name[], did)
|
||||
get_extern_rust_fn(ccx, t, &name[..], did)
|
||||
}
|
||||
RustIntrinsic => {
|
||||
ccx.sess().bug("unexpected intrinsic in trans_external_path")
|
||||
}
|
||||
_ => {
|
||||
foreign::register_foreign_item_fn(ccx, fn_ty.abi, t,
|
||||
&name[])
|
||||
&name[..])
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -947,7 +947,7 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
|
||||
let llresult = Invoke(bcx,
|
||||
llfn,
|
||||
&llargs[],
|
||||
&llargs[..],
|
||||
normal_bcx.llbb,
|
||||
landing_pad,
|
||||
Some(attributes),
|
||||
@ -961,7 +961,7 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
|
||||
let llresult = Call(bcx,
|
||||
llfn,
|
||||
&llargs[],
|
||||
&llargs[..],
|
||||
Some(attributes),
|
||||
debug_loc);
|
||||
return (llresult, bcx);
|
||||
@ -1646,7 +1646,7 @@ fn copy_closure_args_to_allocas<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
|
||||
"argtuple",
|
||||
arg_scope_id));
|
||||
let untupled_arg_types = match monomorphized_arg_types[0].sty {
|
||||
ty::ty_tup(ref types) => &types[],
|
||||
ty::ty_tup(ref types) => &types[..],
|
||||
_ => {
|
||||
bcx.tcx().sess.span_bug(args[0].pat.span,
|
||||
"first arg to `rust-call` ABI function \
|
||||
@ -1834,12 +1834,12 @@ pub fn trans_closure<'a, 'b, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
|
||||
let arg_datums = if abi != RustCall {
|
||||
create_datums_for_fn_args(&fcx,
|
||||
&monomorphized_arg_types[])
|
||||
&monomorphized_arg_types[..])
|
||||
} else {
|
||||
create_datums_for_fn_args_under_call_abi(
|
||||
bcx,
|
||||
arg_scope,
|
||||
&monomorphized_arg_types[])
|
||||
&monomorphized_arg_types[..])
|
||||
};
|
||||
|
||||
bcx = match closure_env {
|
||||
@ -1855,7 +1855,7 @@ pub fn trans_closure<'a, 'b, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
arg_scope,
|
||||
&decl.inputs[],
|
||||
arg_datums,
|
||||
&monomorphized_arg_types[])
|
||||
&monomorphized_arg_types[..])
|
||||
}
|
||||
};
|
||||
|
||||
@ -2000,7 +2000,7 @@ pub fn trans_named_tuple_constructor<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
|
||||
bcx = expr::trans_adt(bcx,
|
||||
result_ty,
|
||||
disr,
|
||||
&fields[],
|
||||
&fields[..],
|
||||
None,
|
||||
expr::SaveIn(llresult),
|
||||
debug_loc);
|
||||
@ -2070,7 +2070,7 @@ fn trans_enum_variant_or_tuple_like_struct<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx
|
||||
ty::erase_late_bound_regions(
|
||||
ccx.tcx(), &ty::ty_fn_args(ctor_ty));
|
||||
|
||||
let arg_datums = create_datums_for_fn_args(&fcx, &arg_tys[]);
|
||||
let arg_datums = create_datums_for_fn_args(&fcx, &arg_tys[..]);
|
||||
|
||||
if !type_is_zero_size(fcx.ccx, result_ty.unwrap()) {
|
||||
let dest = fcx.get_ret_slot(bcx, result_ty, "eret_slot");
|
||||
@ -2315,7 +2315,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) {
|
||||
ast::ItemImpl(_, _, ref generics, _, _, ref impl_items) => {
|
||||
meth::trans_impl(ccx,
|
||||
item.ident,
|
||||
&impl_items[],
|
||||
&impl_items[..],
|
||||
generics,
|
||||
item.id);
|
||||
}
|
||||
@ -2430,7 +2430,7 @@ fn register_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
_ => panic!("expected bare rust fn")
|
||||
};
|
||||
|
||||
let llfn = decl_rust_fn(ccx, node_type, &sym[]);
|
||||
let llfn = decl_rust_fn(ccx, node_type, &sym[..]);
|
||||
finish_register_fn(ccx, sp, sym, node_id, llfn);
|
||||
llfn
|
||||
}
|
||||
@ -2475,7 +2475,7 @@ pub fn get_fn_llvm_attributes<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_ty: Ty<
|
||||
|
||||
match fn_sig.inputs[1].sty {
|
||||
ty::ty_tup(ref t_in) => {
|
||||
inputs.push_all(&t_in[]);
|
||||
inputs.push_all(&t_in[..]);
|
||||
inputs
|
||||
}
|
||||
_ => ccx.sess().bug("expected tuple'd inputs")
|
||||
@ -2611,7 +2611,7 @@ pub fn register_fn_llvmty(ccx: &CrateContext,
|
||||
debug!("register_fn_llvmty id={} sym={}", node_id, sym);
|
||||
|
||||
let llfn = decl_fn(ccx,
|
||||
&sym[],
|
||||
&sym[..],
|
||||
cc,
|
||||
llfty,
|
||||
ty::FnConverging(ty::mk_nil(ccx.tcx())));
|
||||
@ -2667,7 +2667,7 @@ pub fn create_entry_wrapper(ccx: &CrateContext,
|
||||
let (start_fn, args) = if use_start_lang_item {
|
||||
let start_def_id = match ccx.tcx().lang_items.require(StartFnLangItem) {
|
||||
Ok(id) => id,
|
||||
Err(s) => { ccx.sess().fatal(&s[]); }
|
||||
Err(s) => { ccx.sess().fatal(&s[..]); }
|
||||
};
|
||||
let start_fn = if start_def_id.krate == ast::LOCAL_CRATE {
|
||||
get_item_val(ccx, start_def_id.node)
|
||||
@ -2783,7 +2783,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
|
||||
} else {
|
||||
llvm::LLVMTypeOf(v)
|
||||
};
|
||||
if contains_null(&sym[]) {
|
||||
if contains_null(&sym[..]) {
|
||||
ccx.sess().fatal(
|
||||
&format!("Illegal null byte in export_name \
|
||||
value: `{}`", sym)[]);
|
||||
@ -2988,7 +2988,7 @@ pub fn write_metadata(cx: &SharedCrateContext, krate: &ast::Crate) -> Vec<u8> {
|
||||
Some(compressed) => compressed,
|
||||
None => cx.sess().fatal("failed to compress metadata"),
|
||||
});
|
||||
let llmeta = C_bytes_in_context(cx.metadata_llcx(), &compressed[]);
|
||||
let llmeta = C_bytes_in_context(cx.metadata_llcx(), &compressed[..]);
|
||||
let llconst = C_struct_in_context(cx.metadata_llcx(), &[llmeta], false);
|
||||
let name = format!("rust_metadata_{}_{}",
|
||||
cx.link_meta().crate_name,
|
||||
|
@ -567,7 +567,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
} else {
|
||||
let v = ixs.iter().map(|i| C_i32(self.ccx, *i as i32)).collect::<Vec<ValueRef>>();
|
||||
self.count_insn("gepi");
|
||||
self.inbounds_gep(base, &v[])
|
||||
self.inbounds_gep(base, &v[..])
|
||||
}
|
||||
}
|
||||
|
||||
@ -775,8 +775,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
let s = format!("{} ({})",
|
||||
text,
|
||||
self.ccx.sess().codemap().span_to_string(sp));
|
||||
debug!("{}", &s[]);
|
||||
self.add_comment(&s[]);
|
||||
debug!("{}", &s[..]);
|
||||
self.add_comment(&s[..]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -813,7 +813,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
}).collect::<Vec<_>>();
|
||||
|
||||
debug!("Asm Output Type: {}", self.ccx.tn().type_to_string(output));
|
||||
let fty = Type::func(&argtys[], &output);
|
||||
let fty = Type::func(&argtys[..], &output);
|
||||
unsafe {
|
||||
let v = llvm::LLVMInlineAsm(
|
||||
fty.to_ref(), asm, cons, volatile, alignstack, dia as c_uint);
|
||||
|
@ -323,7 +323,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>(
|
||||
let llfn =
|
||||
decl_internal_rust_fn(ccx,
|
||||
tuple_fn_ty,
|
||||
&function_name[]);
|
||||
&function_name[..]);
|
||||
|
||||
//
|
||||
let empty_substs = tcx.mk_substs(Substs::trans_empty());
|
||||
@ -359,7 +359,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>(
|
||||
DebugLoc::None,
|
||||
bare_fn_ty,
|
||||
|bcx, _| Callee { bcx: bcx, data: Fn(llfnpointer) },
|
||||
ArgVals(&llargs[]),
|
||||
ArgVals(&llargs[..]),
|
||||
dest).bcx;
|
||||
|
||||
finish_fn(&fcx, bcx, sig.output, DebugLoc::None);
|
||||
@ -792,7 +792,7 @@ pub fn trans_call_inner<'a, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
|
||||
// Invoke the actual rust fn and update bcx/llresult.
|
||||
let (llret, b) = base::invoke(bcx,
|
||||
llfn,
|
||||
&llargs[],
|
||||
&llargs[..],
|
||||
callee_ty,
|
||||
debug_loc);
|
||||
bcx = b;
|
||||
@ -833,7 +833,7 @@ pub fn trans_call_inner<'a, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
|
||||
callee_ty,
|
||||
llfn,
|
||||
opt_llretslot.unwrap(),
|
||||
&llargs[],
|
||||
&llargs[..],
|
||||
arg_tys,
|
||||
debug_loc);
|
||||
}
|
||||
|
@ -764,7 +764,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx
|
||||
let name = scope.block_name("clean");
|
||||
debug!("generating cleanups for {}", name);
|
||||
let bcx_in = self.new_block(label.is_unwind(),
|
||||
&name[],
|
||||
&name[..],
|
||||
None);
|
||||
let mut bcx_out = bcx_in;
|
||||
for cleanup in scope.cleanups.iter().rev() {
|
||||
@ -811,7 +811,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx
|
||||
Some(llbb) => { return llbb; }
|
||||
None => {
|
||||
let name = last_scope.block_name("unwind");
|
||||
pad_bcx = self.new_block(true, &name[], None);
|
||||
pad_bcx = self.new_block(true, &name[..], None);
|
||||
last_scope.cached_landing_pad = Some(pad_bcx.llbb);
|
||||
}
|
||||
}
|
||||
|
@ -158,7 +158,7 @@ pub fn get_or_create_declaration_if_closure<'a, 'tcx>(ccx: &CrateContext<'a, 'tc
|
||||
mangle_internal_name_by_path_and_seq(path, "closure")
|
||||
});
|
||||
|
||||
let llfn = decl_internal_rust_fn(ccx, function_type, &symbol[]);
|
||||
let llfn = decl_internal_rust_fn(ccx, function_type, &symbol[..]);
|
||||
|
||||
// set an inline hint for all closures
|
||||
set_inline_hint(llfn);
|
||||
@ -221,7 +221,7 @@ pub fn trans_closure_expr<'a, 'tcx>(dest: Dest<'a, 'tcx>,
|
||||
&[],
|
||||
sig.output,
|
||||
function_type.abi,
|
||||
ClosureEnv::Closure(&freevars[]));
|
||||
ClosureEnv::Closure(&freevars[..]));
|
||||
|
||||
// Don't hoist this to the top of the function. It's perfectly legitimate
|
||||
// to have a zero-size closure (in which case dest will be `Ignore`) and
|
||||
|
@ -1165,8 +1165,8 @@ pub fn langcall(bcx: Block,
|
||||
Err(s) => {
|
||||
let msg = format!("{} {}", msg, s);
|
||||
match span {
|
||||
Some(span) => bcx.tcx().sess.span_fatal(span, &msg[]),
|
||||
None => bcx.tcx().sess.fatal(&msg[]),
|
||||
Some(span) => bcx.tcx().sess.span_fatal(span, &msg[..]),
|
||||
None => bcx.tcx().sess.fatal(&msg[..]),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -75,7 +75,7 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: &ast::Lit)
|
||||
ast::LitBool(b) => C_bool(cx, b),
|
||||
ast::LitStr(ref s, _) => C_str_slice(cx, (*s).clone()),
|
||||
ast::LitBinary(ref data) => {
|
||||
let g = addr_of(cx, C_bytes(cx, &data[]), "binary", e.id);
|
||||
let g = addr_of(cx, C_bytes(cx, &data[..]), "binary", e.id);
|
||||
let base = ptrcast(g, Type::i8p(cx));
|
||||
let prev_const = cx.const_unsized().borrow_mut()
|
||||
.insert(base, g);
|
||||
@ -611,8 +611,8 @@ fn const_expr_unadjusted<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
}
|
||||
ast::ExprTup(ref es) => {
|
||||
let repr = adt::represent_type(cx, ety);
|
||||
let vals = map_list(&es[]);
|
||||
adt::trans_const(cx, &*repr, 0, &vals[])
|
||||
let vals = map_list(&es[..]);
|
||||
adt::trans_const(cx, &*repr, 0, &vals[..])
|
||||
}
|
||||
ast::ExprStruct(_, ref fs, ref base_opt) => {
|
||||
let repr = adt::represent_type(cx, ety);
|
||||
@ -642,9 +642,9 @@ fn const_expr_unadjusted<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
}
|
||||
}).collect::<Vec<_>>();
|
||||
if ty::type_is_simd(cx.tcx(), ety) {
|
||||
C_vector(&cs[])
|
||||
C_vector(&cs[..])
|
||||
} else {
|
||||
adt::trans_const(cx, &*repr, discr, &cs[])
|
||||
adt::trans_const(cx, &*repr, discr, &cs[..])
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -655,9 +655,9 @@ fn const_expr_unadjusted<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
.collect::<Vec<_>>();
|
||||
// If the vector contains enums, an LLVM array won't work.
|
||||
if vs.iter().any(|vi| val_ty(*vi) != llunitty) {
|
||||
C_struct(cx, &vs[], false)
|
||||
C_struct(cx, &vs[..], false)
|
||||
} else {
|
||||
C_array(llunitty, &vs[])
|
||||
C_array(llunitty, &vs[..])
|
||||
}
|
||||
}
|
||||
ast::ExprRepeat(ref elem, ref count) => {
|
||||
@ -671,9 +671,9 @@ fn const_expr_unadjusted<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
let unit_val = const_expr(cx, &**elem, param_substs).0;
|
||||
let vs: Vec<_> = repeat(unit_val).take(n).collect();
|
||||
if val_ty(unit_val) != llunitty {
|
||||
C_struct(cx, &vs[], false)
|
||||
C_struct(cx, &vs[..], false)
|
||||
} else {
|
||||
C_array(llunitty, &vs[])
|
||||
C_array(llunitty, &vs[..])
|
||||
}
|
||||
}
|
||||
ast::ExprPath(_) | ast::ExprQPath(_) => {
|
||||
@ -715,14 +715,14 @@ fn const_expr_unadjusted<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
}
|
||||
ast::ExprCall(ref callee, ref args) => {
|
||||
let opt_def = cx.tcx().def_map.borrow().get(&callee.id).cloned();
|
||||
let arg_vals = map_list(&args[]);
|
||||
let arg_vals = map_list(&args[..]);
|
||||
match opt_def {
|
||||
Some(def::DefStruct(_)) => {
|
||||
if ty::type_is_simd(cx.tcx(), ety) {
|
||||
C_vector(&arg_vals[])
|
||||
C_vector(&arg_vals[..])
|
||||
} else {
|
||||
let repr = adt::represent_type(cx, ety);
|
||||
adt::trans_const(cx, &*repr, 0, &arg_vals[])
|
||||
adt::trans_const(cx, &*repr, 0, &arg_vals[..])
|
||||
}
|
||||
}
|
||||
Some(def::DefVariant(enum_did, variant_did, _)) => {
|
||||
@ -733,7 +733,7 @@ fn const_expr_unadjusted<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
adt::trans_const(cx,
|
||||
&*repr,
|
||||
vinfo.disr_val,
|
||||
&arg_vals[])
|
||||
&arg_vals[..])
|
||||
}
|
||||
_ => cx.sess().span_bug(e.span, "expected a struct or variant def")
|
||||
}
|
||||
|
@ -288,7 +288,7 @@ impl<'tcx> SharedCrateContext<'tcx> {
|
||||
// such as a function name in the module.
|
||||
// 1. http://llvm.org/bugs/show_bug.cgi?id=11479
|
||||
let llmod_id = format!("{}.{}.rs", crate_name, i);
|
||||
let local_ccx = LocalCrateContext::new(&shared_ccx, &llmod_id[]);
|
||||
let local_ccx = LocalCrateContext::new(&shared_ccx, &llmod_id[..]);
|
||||
shared_ccx.local_ccxs.push(local_ccx);
|
||||
}
|
||||
|
||||
|
@ -177,7 +177,7 @@ pub fn trans_if<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
}
|
||||
|
||||
let name = format!("then-block-{}-", thn.id);
|
||||
let then_bcx_in = bcx.fcx.new_id_block(&name[], thn.id);
|
||||
let then_bcx_in = bcx.fcx.new_id_block(&name[..], thn.id);
|
||||
let then_bcx_out = trans_block(then_bcx_in, &*thn, dest);
|
||||
trans::debuginfo::clear_source_location(bcx.fcx);
|
||||
|
||||
@ -378,7 +378,7 @@ pub fn trans_fail<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
let did = langcall(bcx, Some(call_info.span), "", PanicFnLangItem);
|
||||
let bcx = callee::trans_lang_call(bcx,
|
||||
did,
|
||||
&args[],
|
||||
&args[..],
|
||||
Some(expr::Ignore),
|
||||
call_info.debug_loc()).bcx;
|
||||
Unreachable(bcx);
|
||||
@ -407,7 +407,7 @@ pub fn trans_fail_bounds_check<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
let did = langcall(bcx, Some(call_info.span), "", PanicBoundsCheckFnLangItem);
|
||||
let bcx = callee::trans_lang_call(bcx,
|
||||
did,
|
||||
&args[],
|
||||
&args[..],
|
||||
Some(expr::Ignore),
|
||||
call_info.debug_loc()).bcx;
|
||||
Unreachable(bcx);
|
||||
|
@ -299,7 +299,7 @@ impl<'tcx> TypeMap<'tcx> {
|
||||
if self.unique_id_to_metadata.insert(unique_type_id, metadata).is_some() {
|
||||
let unique_type_id_str = self.get_unique_type_id_as_string(unique_type_id);
|
||||
cx.sess().bug(&format!("Type metadata for unique id '{}' is already in the TypeMap!",
|
||||
&unique_type_id_str[])[]);
|
||||
&unique_type_id_str[..])[]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -380,14 +380,14 @@ impl<'tcx> TypeMap<'tcx> {
|
||||
self.get_unique_type_id_of_type(cx, component_type);
|
||||
let component_type_id =
|
||||
self.get_unique_type_id_as_string(component_type_id);
|
||||
unique_type_id.push_str(&component_type_id[]);
|
||||
unique_type_id.push_str(&component_type_id[..]);
|
||||
}
|
||||
},
|
||||
ty::ty_uniq(inner_type) => {
|
||||
unique_type_id.push('~');
|
||||
let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type);
|
||||
let inner_type_id = self.get_unique_type_id_as_string(inner_type_id);
|
||||
unique_type_id.push_str(&inner_type_id[]);
|
||||
unique_type_id.push_str(&inner_type_id[..]);
|
||||
},
|
||||
ty::ty_ptr(ty::mt { ty: inner_type, mutbl } ) => {
|
||||
unique_type_id.push('*');
|
||||
@ -397,7 +397,7 @@ impl<'tcx> TypeMap<'tcx> {
|
||||
|
||||
let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type);
|
||||
let inner_type_id = self.get_unique_type_id_as_string(inner_type_id);
|
||||
unique_type_id.push_str(&inner_type_id[]);
|
||||
unique_type_id.push_str(&inner_type_id[..]);
|
||||
},
|
||||
ty::ty_rptr(_, ty::mt { ty: inner_type, mutbl }) => {
|
||||
unique_type_id.push('&');
|
||||
@ -407,7 +407,7 @@ impl<'tcx> TypeMap<'tcx> {
|
||||
|
||||
let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type);
|
||||
let inner_type_id = self.get_unique_type_id_as_string(inner_type_id);
|
||||
unique_type_id.push_str(&inner_type_id[]);
|
||||
unique_type_id.push_str(&inner_type_id[..]);
|
||||
},
|
||||
ty::ty_vec(inner_type, optional_length) => {
|
||||
match optional_length {
|
||||
@ -421,7 +421,7 @@ impl<'tcx> TypeMap<'tcx> {
|
||||
|
||||
let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type);
|
||||
let inner_type_id = self.get_unique_type_id_as_string(inner_type_id);
|
||||
unique_type_id.push_str(&inner_type_id[]);
|
||||
unique_type_id.push_str(&inner_type_id[..]);
|
||||
},
|
||||
ty::ty_trait(ref trait_data) => {
|
||||
unique_type_id.push_str("trait ");
|
||||
@ -452,7 +452,7 @@ impl<'tcx> TypeMap<'tcx> {
|
||||
self.get_unique_type_id_of_type(cx, parameter_type);
|
||||
let parameter_type_id =
|
||||
self.get_unique_type_id_as_string(parameter_type_id);
|
||||
unique_type_id.push_str(¶meter_type_id[]);
|
||||
unique_type_id.push_str(¶meter_type_id[..]);
|
||||
unique_type_id.push(',');
|
||||
}
|
||||
|
||||
@ -465,7 +465,7 @@ impl<'tcx> TypeMap<'tcx> {
|
||||
ty::FnConverging(ret_ty) => {
|
||||
let return_type_id = self.get_unique_type_id_of_type(cx, ret_ty);
|
||||
let return_type_id = self.get_unique_type_id_as_string(return_type_id);
|
||||
unique_type_id.push_str(&return_type_id[]);
|
||||
unique_type_id.push_str(&return_type_id[..]);
|
||||
}
|
||||
ty::FnDiverging => {
|
||||
unique_type_id.push_str("!");
|
||||
@ -538,7 +538,7 @@ impl<'tcx> TypeMap<'tcx> {
|
||||
type_map.get_unique_type_id_of_type(cx, type_parameter);
|
||||
let param_type_id =
|
||||
type_map.get_unique_type_id_as_string(param_type_id);
|
||||
output.push_str(¶m_type_id[]);
|
||||
output.push_str(¶m_type_id[..]);
|
||||
output.push(',');
|
||||
}
|
||||
|
||||
@ -568,7 +568,7 @@ impl<'tcx> TypeMap<'tcx> {
|
||||
self.get_unique_type_id_of_type(cx, parameter_type);
|
||||
let parameter_type_id =
|
||||
self.get_unique_type_id_as_string(parameter_type_id);
|
||||
unique_type_id.push_str(¶meter_type_id[]);
|
||||
unique_type_id.push_str(¶meter_type_id[..]);
|
||||
unique_type_id.push(',');
|
||||
}
|
||||
|
||||
@ -582,7 +582,7 @@ impl<'tcx> TypeMap<'tcx> {
|
||||
ty::FnConverging(ret_ty) => {
|
||||
let return_type_id = self.get_unique_type_id_of_type(cx, ret_ty);
|
||||
let return_type_id = self.get_unique_type_id_as_string(return_type_id);
|
||||
unique_type_id.push_str(&return_type_id[]);
|
||||
unique_type_id.push_str(&return_type_id[..]);
|
||||
}
|
||||
ty::FnDiverging => {
|
||||
unique_type_id.push_str("!");
|
||||
@ -806,7 +806,7 @@ pub fn create_global_var_metadata(cx: &CrateContext,
|
||||
let namespace_node = namespace_for_item(cx, ast_util::local_def(node_id));
|
||||
let var_name = token::get_ident(ident).to_string();
|
||||
let linkage_name =
|
||||
namespace_node.mangled_name_of_contained_item(&var_name[]);
|
||||
namespace_node.mangled_name_of_contained_item(&var_name[..]);
|
||||
let var_scope = namespace_node.scope;
|
||||
|
||||
let var_name = CString::from_slice(var_name.as_bytes());
|
||||
@ -1287,7 +1287,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
match expr.node {
|
||||
ast::ExprClosure(_, ref fn_decl, ref top_level_block) => {
|
||||
let name = format!("fn{}", token::gensym("fn"));
|
||||
let name = token::str_to_ident(&name[]);
|
||||
let name = token::str_to_ident(&name[..]);
|
||||
(name, &**fn_decl,
|
||||
// This is not quite right. It should actually inherit
|
||||
// the generics of the enclosing function.
|
||||
@ -1366,7 +1366,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
let (linkage_name, containing_scope) = if has_path {
|
||||
let namespace_node = namespace_for_item(cx, ast_util::local_def(fn_ast_id));
|
||||
let linkage_name = namespace_node.mangled_name_of_contained_item(
|
||||
&function_name[]);
|
||||
&function_name[..]);
|
||||
let containing_scope = namespace_node.scope;
|
||||
(linkage_name, containing_scope)
|
||||
} else {
|
||||
@ -1451,7 +1451,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
signature.push(type_metadata(cx, arg_type, codemap::DUMMY_SP));
|
||||
}
|
||||
|
||||
return create_DIArray(DIB(cx), &signature[]);
|
||||
return create_DIArray(DIB(cx), &signature[..]);
|
||||
}
|
||||
|
||||
fn get_template_parameters<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
@ -1486,7 +1486,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
actual_self_type,
|
||||
true);
|
||||
|
||||
name_to_append_suffix_to.push_str(&actual_self_type_name[]);
|
||||
name_to_append_suffix_to.push_str(&actual_self_type_name[..]);
|
||||
|
||||
if generics.is_type_parameterized() {
|
||||
name_to_append_suffix_to.push_str(",");
|
||||
@ -1525,7 +1525,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
let actual_type_name = compute_debuginfo_type_name(cx,
|
||||
actual_type,
|
||||
true);
|
||||
name_to_append_suffix_to.push_str(&actual_type_name[]);
|
||||
name_to_append_suffix_to.push_str(&actual_type_name[..]);
|
||||
|
||||
if index != generics.ty_params.len() - 1 {
|
||||
name_to_append_suffix_to.push_str(",");
|
||||
@ -1552,7 +1552,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
|
||||
name_to_append_suffix_to.push('>');
|
||||
|
||||
return create_DIArray(DIB(cx), &template_params[]);
|
||||
return create_DIArray(DIB(cx), &template_params[..]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1646,7 +1646,7 @@ fn declare_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
let cx: &CrateContext = bcx.ccx();
|
||||
|
||||
let filename = span_start(cx, span).file.name.clone();
|
||||
let file_metadata = file_metadata(cx, &filename[]);
|
||||
let file_metadata = file_metadata(cx, &filename[..]);
|
||||
|
||||
let name = token::get_ident(variable_ident);
|
||||
let loc = span_start(cx, span);
|
||||
@ -1959,7 +1959,7 @@ impl<'tcx> RecursiveTypeDescription<'tcx> {
|
||||
set_members_of_composite_type(cx,
|
||||
metadata_stub,
|
||||
llvm_type,
|
||||
&member_descriptions[]);
|
||||
&member_descriptions[..]);
|
||||
return MetadataCreationResult::new(metadata_stub, true);
|
||||
}
|
||||
}
|
||||
@ -2031,7 +2031,7 @@ fn prepare_struct_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
|
||||
let struct_metadata_stub = create_struct_stub(cx,
|
||||
struct_llvm_type,
|
||||
&struct_name[],
|
||||
&struct_name[..],
|
||||
unique_type_id,
|
||||
containing_scope);
|
||||
|
||||
@ -2098,7 +2098,7 @@ fn prepare_tuple_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
unique_type_id,
|
||||
create_struct_stub(cx,
|
||||
tuple_llvm_type,
|
||||
&tuple_name[],
|
||||
&tuple_name[..],
|
||||
unique_type_id,
|
||||
UNKNOWN_SCOPE_METADATA),
|
||||
tuple_llvm_type,
|
||||
@ -2158,7 +2158,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> {
|
||||
set_members_of_composite_type(cx,
|
||||
variant_type_metadata,
|
||||
variant_llvm_type,
|
||||
&member_descriptions[]);
|
||||
&member_descriptions[..]);
|
||||
MemberDescription {
|
||||
name: "".to_string(),
|
||||
llvm_type: variant_llvm_type,
|
||||
@ -2191,7 +2191,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> {
|
||||
set_members_of_composite_type(cx,
|
||||
variant_type_metadata,
|
||||
variant_llvm_type,
|
||||
&member_descriptions[]);
|
||||
&member_descriptions[..]);
|
||||
vec![
|
||||
MemberDescription {
|
||||
name: "".to_string(),
|
||||
@ -2291,7 +2291,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> {
|
||||
set_members_of_composite_type(cx,
|
||||
variant_type_metadata,
|
||||
variant_llvm_type,
|
||||
&variant_member_descriptions[]);
|
||||
&variant_member_descriptions[..]);
|
||||
|
||||
// Encode the information about the null variant in the union
|
||||
// member's name.
|
||||
@ -2662,7 +2662,7 @@ fn set_members_of_composite_type(cx: &CrateContext,
|
||||
.collect();
|
||||
|
||||
unsafe {
|
||||
let type_array = create_DIArray(DIB(cx), &member_metadata[]);
|
||||
let type_array = create_DIArray(DIB(cx), &member_metadata[..]);
|
||||
llvm::LLVMDICompositeTypeSetTypeArray(DIB(cx), composite_type_metadata, type_array);
|
||||
}
|
||||
}
|
||||
@ -2763,7 +2763,7 @@ fn vec_slice_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
|
||||
let member_llvm_types = slice_llvm_type.field_types();
|
||||
assert!(slice_layout_is_correct(cx,
|
||||
&member_llvm_types[],
|
||||
&member_llvm_types[..],
|
||||
element_type));
|
||||
let member_descriptions = [
|
||||
MemberDescription {
|
||||
@ -2789,7 +2789,7 @@ fn vec_slice_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
|
||||
let metadata = composite_type_metadata(cx,
|
||||
slice_llvm_type,
|
||||
&slice_type_name[],
|
||||
&slice_type_name[..],
|
||||
unique_type_id,
|
||||
&member_descriptions,
|
||||
UNKNOWN_SCOPE_METADATA,
|
||||
@ -2838,7 +2838,7 @@ fn subroutine_type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
llvm::LLVMDIBuilderCreateSubroutineType(
|
||||
DIB(cx),
|
||||
UNKNOWN_FILE_METADATA,
|
||||
create_DIArray(DIB(cx), &signature_metadata[]))
|
||||
create_DIArray(DIB(cx), &signature_metadata[..]))
|
||||
},
|
||||
false);
|
||||
}
|
||||
@ -2864,7 +2864,7 @@ fn trait_pointer_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
let pp_type_name = ppaux::ty_to_string(cx.tcx(), trait_type);
|
||||
cx.sess().bug(&format!("debuginfo: Unexpected trait-object type in \
|
||||
trait_pointer_metadata(): {}",
|
||||
&pp_type_name[])[]);
|
||||
&pp_type_name[..])[]);
|
||||
}
|
||||
};
|
||||
|
||||
@ -2878,7 +2878,7 @@ fn trait_pointer_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
|
||||
composite_type_metadata(cx,
|
||||
trait_llvm_type,
|
||||
&trait_type_name[],
|
||||
&trait_type_name[..],
|
||||
unique_type_id,
|
||||
&[],
|
||||
containing_scope,
|
||||
@ -2998,7 +2998,7 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
ty::ty_tup(ref elements) => {
|
||||
prepare_tuple_metadata(cx,
|
||||
t,
|
||||
&elements[],
|
||||
&elements[..],
|
||||
unique_type_id,
|
||||
usage_site_span).finalize(cx)
|
||||
}
|
||||
@ -3022,9 +3022,9 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
type id '{}' to already be in \
|
||||
the debuginfo::TypeMap but it \
|
||||
was not. (Ty = {})",
|
||||
&unique_type_id_str[],
|
||||
&unique_type_id_str[..],
|
||||
ppaux::ty_to_string(cx.tcx(), t));
|
||||
cx.sess().span_bug(usage_site_span, &error_message[]);
|
||||
cx.sess().span_bug(usage_site_span, &error_message[..]);
|
||||
}
|
||||
};
|
||||
|
||||
@ -3037,9 +3037,9 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
UniqueTypeId maps in \
|
||||
debuginfo::TypeMap. \
|
||||
UniqueTypeId={}, Ty={}",
|
||||
&unique_type_id_str[],
|
||||
&unique_type_id_str[..],
|
||||
ppaux::ty_to_string(cx.tcx(), t));
|
||||
cx.sess().span_bug(usage_site_span, &error_message[]);
|
||||
cx.sess().span_bug(usage_site_span, &error_message[..]);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
@ -3128,7 +3128,7 @@ fn contains_nodebug_attribute(attributes: &[ast::Attribute]) -> bool {
|
||||
attributes.iter().any(|attr| {
|
||||
let meta_item: &ast::MetaItem = &*attr.node.value;
|
||||
match meta_item.node {
|
||||
ast::MetaWord(ref value) => &value[] == "no_debug",
|
||||
ast::MetaWord(ref value) => &value[..] == "no_debug",
|
||||
_ => false
|
||||
}
|
||||
})
|
||||
|
@ -1046,14 +1046,14 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
controlflow::trans_if(bcx, expr.id, &**cond, &**thn, els.as_ref().map(|e| &**e), dest)
|
||||
}
|
||||
ast::ExprMatch(ref discr, ref arms, _) => {
|
||||
_match::trans_match(bcx, expr, &**discr, &arms[], dest)
|
||||
_match::trans_match(bcx, expr, &**discr, &arms[..], dest)
|
||||
}
|
||||
ast::ExprBlock(ref blk) => {
|
||||
controlflow::trans_block(bcx, &**blk, dest)
|
||||
}
|
||||
ast::ExprStruct(_, ref fields, ref base) => {
|
||||
trans_struct(bcx,
|
||||
&fields[],
|
||||
&fields[..],
|
||||
base.as_ref().map(|e| &**e),
|
||||
expr.span,
|
||||
expr.id,
|
||||
@ -1118,7 +1118,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
trans_adt(bcx,
|
||||
expr_ty(bcx, expr),
|
||||
0,
|
||||
&numbered_fields[],
|
||||
&numbered_fields[..],
|
||||
None,
|
||||
dest,
|
||||
expr.debug_loc())
|
||||
@ -1153,13 +1153,13 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
trans_overloaded_call(bcx,
|
||||
expr,
|
||||
&**f,
|
||||
&args[],
|
||||
&args[..],
|
||||
Some(dest))
|
||||
} else {
|
||||
callee::trans_call(bcx,
|
||||
expr,
|
||||
&**f,
|
||||
callee::ArgExprs(&args[]),
|
||||
callee::ArgExprs(&args[..]),
|
||||
dest)
|
||||
}
|
||||
}
|
||||
@ -1167,7 +1167,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
callee::trans_method_call(bcx,
|
||||
expr,
|
||||
&*args[0],
|
||||
callee::ArgExprs(&args[]),
|
||||
callee::ArgExprs(&args[..]),
|
||||
dest)
|
||||
}
|
||||
ast::ExprBinary(op, ref lhs, ref rhs) => {
|
||||
@ -1354,11 +1354,11 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>,
|
||||
ty::ty_struct(did, substs) => {
|
||||
let fields = struct_fields(tcx, did, substs);
|
||||
let fields = monomorphize::normalize_associated_type(tcx, &fields);
|
||||
op(0, &fields[])
|
||||
op(0, &fields[..])
|
||||
}
|
||||
|
||||
ty::ty_tup(ref v) => {
|
||||
op(0, &tup_fields(&v[])[])
|
||||
op(0, &tup_fields(&v[..])[])
|
||||
}
|
||||
|
||||
ty::ty_enum(_, substs) => {
|
||||
@ -1378,7 +1378,7 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>,
|
||||
tcx, enum_id, variant_id);
|
||||
let fields = struct_fields(tcx, variant_id, substs);
|
||||
let fields = monomorphize::normalize_associated_type(tcx, &fields);
|
||||
op(variant_info.disr_val, &fields[])
|
||||
op(variant_info.disr_val, &fields[..])
|
||||
}
|
||||
_ => {
|
||||
tcx.sess.bug("resolve didn't map this expr to a \
|
||||
|
@ -238,7 +238,7 @@ pub fn trans_native_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
_ => ccx.sess().bug("trans_native_call called on non-function type")
|
||||
};
|
||||
let fn_sig = ty::erase_late_bound_regions(ccx.tcx(), fn_sig);
|
||||
let llsig = foreign_signature(ccx, &fn_sig, &passed_arg_tys[]);
|
||||
let llsig = foreign_signature(ccx, &fn_sig, &passed_arg_tys[..]);
|
||||
let fn_type = cabi::compute_abi_info(ccx,
|
||||
&llsig.llarg_tys[],
|
||||
llsig.llret_ty,
|
||||
@ -370,7 +370,7 @@ pub fn trans_native_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
|
||||
let llforeign_retval = CallWithConv(bcx,
|
||||
llfn,
|
||||
&llargs_foreign[],
|
||||
&llargs_foreign[..],
|
||||
cc,
|
||||
Some(attrs),
|
||||
call_debug_loc);
|
||||
@ -611,7 +611,7 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
ccx.tcx().map.path_to_string(id),
|
||||
id, t.repr(tcx));
|
||||
|
||||
let llfn = base::decl_internal_rust_fn(ccx, t, &ps[]);
|
||||
let llfn = base::decl_internal_rust_fn(ccx, t, &ps[..]);
|
||||
base::set_llvm_fn_attrs(ccx, attrs, llfn);
|
||||
base::trans_fn(ccx, decl, body, llfn, param_substs, id, &[]);
|
||||
llfn
|
||||
@ -974,7 +974,7 @@ fn lltype_for_fn_from_foreign_types(ccx: &CrateContext, tys: &ForeignTypes) -> T
|
||||
if tys.fn_sig.variadic {
|
||||
Type::variadic_func(&llargument_tys, &llreturn_ty)
|
||||
} else {
|
||||
Type::func(&llargument_tys[], &llreturn_ty)
|
||||
Type::func(&llargument_tys[..], &llreturn_ty)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -170,7 +170,7 @@ pub fn get_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Val
|
||||
|
||||
let (glue, new_sym) = match ccx.available_drop_glues().borrow().get(&t) {
|
||||
Some(old_sym) => {
|
||||
let glue = decl_cdecl_fn(ccx, &old_sym[], llfnty, ty::mk_nil(ccx.tcx()));
|
||||
let glue = decl_cdecl_fn(ccx, &old_sym[..], llfnty, ty::mk_nil(ccx.tcx()));
|
||||
(glue, None)
|
||||
},
|
||||
None => {
|
||||
@ -304,7 +304,7 @@ fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|
||||
class_did,
|
||||
&[get_drop_glue_type(bcx.ccx(), t)],
|
||||
ty::mk_nil(bcx.tcx()));
|
||||
let (_, variant_cx) = invoke(variant_cx, dtor_addr, &args[], dtor_ty, DebugLoc::None);
|
||||
let (_, variant_cx) = invoke(variant_cx, dtor_addr, &args[..], dtor_ty, DebugLoc::None);
|
||||
|
||||
variant_cx.fcx.pop_and_trans_custom_cleanup_scope(variant_cx, field_scope);
|
||||
variant_cx
|
||||
@ -541,7 +541,7 @@ fn declare_generic_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>,
|
||||
ccx,
|
||||
t,
|
||||
&format!("glue_{}", name)[]);
|
||||
let llfn = decl_cdecl_fn(ccx, &fn_nm[], llfnty, ty::mk_nil(ccx.tcx()));
|
||||
let llfn = decl_cdecl_fn(ccx, &fn_nm[..], llfnty, ty::mk_nil(ccx.tcx()));
|
||||
note_unique_llvm_symbol(ccx, fn_nm.clone());
|
||||
return (fn_nm, llfn);
|
||||
}
|
||||
|
@ -166,7 +166,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
|
||||
let name = token::get_ident(foreign_item.ident);
|
||||
|
||||
// For `transmute` we can just trans the input expr directly into dest
|
||||
if &name[] == "transmute" {
|
||||
if &name[..] == "transmute" {
|
||||
let llret_ty = type_of::type_of(ccx, ret_ty.unwrap());
|
||||
match args {
|
||||
callee::ArgExprs(arg_exprs) => {
|
||||
@ -274,13 +274,13 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
|
||||
let call_debug_location = DebugLoc::At(call_info.id, call_info.span);
|
||||
|
||||
// These are the only intrinsic functions that diverge.
|
||||
if &name[] == "abort" {
|
||||
if &name[..] == "abort" {
|
||||
let llfn = ccx.get_intrinsic(&("llvm.trap"));
|
||||
Call(bcx, llfn, &[], None, call_debug_location);
|
||||
fcx.pop_and_trans_custom_cleanup_scope(bcx, cleanup_scope);
|
||||
Unreachable(bcx);
|
||||
return Result::new(bcx, C_undef(Type::nil(ccx).ptr_to()));
|
||||
} else if &name[] == "unreachable" {
|
||||
} else if &name[..] == "unreachable" {
|
||||
fcx.pop_and_trans_custom_cleanup_scope(bcx, cleanup_scope);
|
||||
Unreachable(bcx);
|
||||
return Result::new(bcx, C_nil(ccx));
|
||||
@ -307,7 +307,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
|
||||
};
|
||||
|
||||
let simple = get_simple_intrinsic(ccx, &*foreign_item);
|
||||
let llval = match (simple, &name[]) {
|
||||
let llval = match (simple, &name[..]) {
|
||||
(Some(llfn), _) => {
|
||||
Call(bcx, llfn, &llargs, None, call_debug_location)
|
||||
}
|
||||
|
@ -131,7 +131,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
|
||||
hash = format!("h{}", state.finish());
|
||||
ccx.tcx().map.with_path(fn_id.node, |path| {
|
||||
exported_name(path, &hash[])
|
||||
exported_name(path, &hash[..])
|
||||
})
|
||||
};
|
||||
|
||||
@ -141,9 +141,9 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
let mut hash_id = Some(hash_id);
|
||||
let mut mk_lldecl = |abi: abi::Abi| {
|
||||
let lldecl = if abi != abi::Rust {
|
||||
foreign::decl_rust_fn_with_foreign_abi(ccx, mono_ty, &s[])
|
||||
foreign::decl_rust_fn_with_foreign_abi(ccx, mono_ty, &s[..])
|
||||
} else {
|
||||
decl_internal_rust_fn(ccx, mono_ty, &s[])
|
||||
decl_internal_rust_fn(ccx, mono_ty, &s[..])
|
||||
};
|
||||
|
||||
ccx.monomorphized().borrow_mut().insert(hash_id.take().unwrap(), lldecl);
|
||||
@ -182,7 +182,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
if abi != abi::Rust {
|
||||
foreign::trans_rust_fn_with_foreign_abi(
|
||||
ccx, &**decl, &**body, &[], d, psubsts, fn_id.node,
|
||||
Some(&hash[]));
|
||||
Some(&hash[..]));
|
||||
} else {
|
||||
trans_fn(ccx, &**decl, &**body, d, psubsts, fn_id.node, &[]);
|
||||
}
|
||||
@ -206,7 +206,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
trans_enum_variant(ccx,
|
||||
parent,
|
||||
&*v,
|
||||
&args[],
|
||||
&args[..],
|
||||
this_tv.disr_val,
|
||||
psubsts,
|
||||
d);
|
||||
|
@ -144,7 +144,7 @@ pub fn type_of_rust_fn<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
let input_tys = inputs.iter().map(|&arg_ty| type_of_explicit_arg(cx, arg_ty));
|
||||
atys.extend(input_tys);
|
||||
|
||||
Type::func(&atys[], &lloutputtype)
|
||||
Type::func(&atys[..], &lloutputtype)
|
||||
}
|
||||
|
||||
// Given a function type and a count of ty params, construct an llvm type
|
||||
@ -332,7 +332,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type {
|
||||
let repr = adt::represent_type(cx, t);
|
||||
let tps = substs.types.get_slice(subst::TypeSpace);
|
||||
let name = llvm_type_name(cx, an_enum, did, tps);
|
||||
adt::incomplete_type_of(cx, &*repr, &name[])
|
||||
adt::incomplete_type_of(cx, &*repr, &name[..])
|
||||
}
|
||||
ty::ty_closure(did, _, ref substs) => {
|
||||
// Only create the named struct, but don't fill it in. We
|
||||
@ -343,7 +343,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type {
|
||||
// contents of the VecPerParamSpace to to construct the llvm
|
||||
// name
|
||||
let name = llvm_type_name(cx, a_closure, did, substs.types.as_slice());
|
||||
adt::incomplete_type_of(cx, &*repr, &name[])
|
||||
adt::incomplete_type_of(cx, &*repr, &name[..])
|
||||
}
|
||||
|
||||
ty::ty_uniq(ty) | ty::ty_rptr(_, ty::mt{ty, ..}) | ty::ty_ptr(ty::mt{ty, ..}) => {
|
||||
@ -399,7 +399,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type {
|
||||
let repr = adt::represent_type(cx, t);
|
||||
let tps = substs.types.get_slice(subst::TypeSpace);
|
||||
let name = llvm_type_name(cx, a_struct, did, tps);
|
||||
adt::incomplete_type_of(cx, &*repr, &name[])
|
||||
adt::incomplete_type_of(cx, &*repr, &name[..])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1139,14 +1139,14 @@ pub fn ast_ty_to_ty<'tcx>(this: &AstConv<'tcx>,
|
||||
ty::mk_vec(tcx, ast_ty_to_ty(this, rscope, &**ty), None)
|
||||
}
|
||||
ast::TyObjectSum(ref ty, ref bounds) => {
|
||||
match ast_ty_to_trait_ref(this, rscope, &**ty, &bounds[]) {
|
||||
match ast_ty_to_trait_ref(this, rscope, &**ty, &bounds[..]) {
|
||||
Ok((trait_ref, projection_bounds)) => {
|
||||
trait_ref_to_object_type(this,
|
||||
rscope,
|
||||
ast_ty.span,
|
||||
trait_ref,
|
||||
projection_bounds,
|
||||
&bounds[])
|
||||
&bounds[..])
|
||||
}
|
||||
Err(ErrorReported) => {
|
||||
this.tcx().types.err
|
||||
@ -1185,7 +1185,7 @@ pub fn ast_ty_to_ty<'tcx>(this: &AstConv<'tcx>,
|
||||
ty::mk_bare_fn(tcx, None, tcx.mk_bare_fn(bare_fn))
|
||||
}
|
||||
ast::TyPolyTraitRef(ref bounds) => {
|
||||
conv_ty_poly_trait_ref(this, rscope, ast_ty.span, &bounds[])
|
||||
conv_ty_poly_trait_ref(this, rscope, ast_ty.span, &bounds[..])
|
||||
}
|
||||
ast::TyPath(ref path, id) => {
|
||||
let a_def = match tcx.def_map.borrow().get(&id) {
|
||||
@ -1424,7 +1424,7 @@ fn ty_of_method_or_bare_fn<'a, 'tcx>(this: &AstConv<'tcx>,
|
||||
// Skip the first argument if `self` is present.
|
||||
&self_and_input_tys[1..]
|
||||
} else {
|
||||
&self_and_input_tys[]
|
||||
&self_and_input_tys[..]
|
||||
};
|
||||
|
||||
let (ior, lfp) = find_implied_output_region(input_tys, input_pats);
|
||||
@ -1623,7 +1623,7 @@ fn conv_ty_poly_trait_ref<'tcx>(
|
||||
ast_bounds: &[ast::TyParamBound])
|
||||
-> Ty<'tcx>
|
||||
{
|
||||
let mut partitioned_bounds = partition_bounds(this.tcx(), span, &ast_bounds[]);
|
||||
let mut partitioned_bounds = partition_bounds(this.tcx(), span, &ast_bounds[..]);
|
||||
|
||||
let mut projection_bounds = Vec::new();
|
||||
let main_trait_bound = if !partitioned_bounds.trait_bounds.is_empty() {
|
||||
|
@ -162,7 +162,7 @@ pub fn check_pat<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>,
|
||||
check_pat_enum(pcx, pat, &path, Some(&[]), expected);
|
||||
}
|
||||
ast::PatEnum(ref path, ref subpats) => {
|
||||
let subpats = subpats.as_ref().map(|v| &v[]);
|
||||
let subpats = subpats.as_ref().map(|v| &v[..]);
|
||||
check_pat_enum(pcx, pat, path, subpats, expected);
|
||||
}
|
||||
ast::PatStruct(ref path, ref fields, etc) => {
|
||||
|
@ -256,7 +256,7 @@ fn confirm_builtin_call<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
|
||||
check_argument_types(fcx,
|
||||
call_expr.span,
|
||||
&fn_sig.inputs,
|
||||
&expected_arg_tys[],
|
||||
&expected_arg_tys[..],
|
||||
arg_exprs,
|
||||
AutorefArgs::No,
|
||||
fn_sig.variadic,
|
||||
|
@ -901,7 +901,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> {
|
||||
debug!("applicable_candidates: {}", applicable_candidates.repr(self.tcx()));
|
||||
|
||||
if applicable_candidates.len() > 1 {
|
||||
match self.collapse_candidates_to_trait_pick(&applicable_candidates[]) {
|
||||
match self.collapse_candidates_to_trait_pick(&applicable_candidates[..]) {
|
||||
Some(pick) => { return Some(Ok(pick)); }
|
||||
None => { }
|
||||
}
|
||||
|
@ -172,7 +172,7 @@ fn suggest_traits_to_import<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
traits_are = if candidates.len() == 1 {"trait is"} else {"traits are"},
|
||||
one_of_them = if candidates.len() == 1 {"it"} else {"one of them"});
|
||||
|
||||
fcx.sess().fileline_help(span, &msg[]);
|
||||
fcx.sess().fileline_help(span, &msg[..]);
|
||||
|
||||
for (i, trait_did) in candidates.iter().enumerate() {
|
||||
fcx.sess().fileline_help(span,
|
||||
@ -218,7 +218,7 @@ fn suggest_traits_to_import<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
one_of_them = if candidates.len() == 1 {"it"} else {"one of them"},
|
||||
name = method_ustring);
|
||||
|
||||
fcx.sess().fileline_help(span, &msg[]);
|
||||
fcx.sess().fileline_help(span, &msg[..]);
|
||||
|
||||
for (i, trait_info) in candidates.iter().enumerate() {
|
||||
fcx.sess().fileline_help(span,
|
||||
|
@ -2209,7 +2209,7 @@ fn check_method_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
|
||||
check_argument_types(fcx,
|
||||
sp,
|
||||
&err_inputs[],
|
||||
&err_inputs[..],
|
||||
&[],
|
||||
args_no_rcvr,
|
||||
autoref_args,
|
||||
@ -2228,7 +2228,7 @@ fn check_method_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
check_argument_types(fcx,
|
||||
sp,
|
||||
&fty.sig.0.inputs[1..],
|
||||
&expected_arg_tys[],
|
||||
&expected_arg_tys[..],
|
||||
args_no_rcvr,
|
||||
autoref_args,
|
||||
fty.sig.0.variadic,
|
||||
@ -3055,7 +3055,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
ty::ty_struct(base_id, substs) => {
|
||||
debug!("struct named {}", ppaux::ty_to_string(tcx, base_t));
|
||||
let fields = ty::lookup_struct_fields(tcx, base_id);
|
||||
fcx.lookup_field_ty(expr.span, base_id, &fields[],
|
||||
fcx.lookup_field_ty(expr.span, base_id, &fields[..],
|
||||
field.node.name, &(*substs))
|
||||
}
|
||||
_ => None
|
||||
@ -3155,7 +3155,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
if tuple_like {
|
||||
debug!("tuple struct named {}", ppaux::ty_to_string(tcx, base_t));
|
||||
let fields = ty::lookup_struct_fields(tcx, base_id);
|
||||
fcx.lookup_tup_field_ty(expr.span, base_id, &fields[],
|
||||
fcx.lookup_tup_field_ty(expr.span, base_id, &fields[..],
|
||||
idx.node, &(*substs))
|
||||
} else {
|
||||
None
|
||||
@ -3328,7 +3328,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
class_id,
|
||||
id,
|
||||
fcx.ccx.tcx.mk_substs(struct_substs),
|
||||
&class_fields[],
|
||||
&class_fields[..],
|
||||
fields,
|
||||
base_expr.is_none(),
|
||||
None);
|
||||
@ -3371,7 +3371,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
variant_id,
|
||||
id,
|
||||
fcx.ccx.tcx.mk_substs(substitutions),
|
||||
&variant_fields[],
|
||||
&variant_fields[..],
|
||||
fields,
|
||||
true,
|
||||
Some(enum_id));
|
||||
@ -3732,10 +3732,10 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
fcx.write_ty(id, fcx.node_ty(b.id));
|
||||
}
|
||||
ast::ExprCall(ref callee, ref args) => {
|
||||
callee::check_call(fcx, expr, &**callee, &args[], expected);
|
||||
callee::check_call(fcx, expr, &**callee, &args[..], expected);
|
||||
}
|
||||
ast::ExprMethodCall(ident, ref tps, ref args) => {
|
||||
check_method_call(fcx, expr, ident, &args[], &tps[], expected, lvalue_pref);
|
||||
check_method_call(fcx, expr, ident, &args[..], &tps[..], expected, lvalue_pref);
|
||||
let arg_tys = args.iter().map(|a| fcx.expr_ty(&**a));
|
||||
let args_err = arg_tys.fold(false,
|
||||
|rest_err, a| {
|
||||
@ -3822,7 +3822,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
ast::ExprTup(ref elts) => {
|
||||
let flds = expected.only_has_type(fcx).and_then(|ty| {
|
||||
match ty.sty {
|
||||
ty::ty_tup(ref flds) => Some(&flds[]),
|
||||
ty::ty_tup(ref flds) => Some(&flds[..]),
|
||||
_ => None
|
||||
}
|
||||
});
|
||||
@ -3856,7 +3856,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
let struct_id = match def {
|
||||
Some(def::DefVariant(enum_id, variant_id, true)) => {
|
||||
check_struct_enum_variant(fcx, id, expr.span, enum_id,
|
||||
variant_id, &fields[]);
|
||||
variant_id, &fields[..]);
|
||||
enum_id
|
||||
}
|
||||
Some(def::DefTrait(def_id)) => {
|
||||
@ -3865,7 +3865,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
pprust::path_to_string(path));
|
||||
check_struct_fields_on_error(fcx,
|
||||
id,
|
||||
&fields[],
|
||||
&fields[..],
|
||||
base_expr);
|
||||
def_id
|
||||
},
|
||||
@ -3878,7 +3878,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
id,
|
||||
expr.span,
|
||||
struct_did,
|
||||
&fields[],
|
||||
&fields[..],
|
||||
base_expr.as_ref().map(|e| &**e));
|
||||
}
|
||||
_ => {
|
||||
@ -3887,7 +3887,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
|
||||
pprust::path_to_string(path));
|
||||
check_struct_fields_on_error(fcx,
|
||||
id,
|
||||
&fields[],
|
||||
&fields[..],
|
||||
base_expr);
|
||||
}
|
||||
}
|
||||
@ -5232,10 +5232,10 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
|
||||
}
|
||||
};
|
||||
(n_tps, inputs, ty::FnConverging(output))
|
||||
} else if &name[] == "abort" || &name[] == "unreachable" {
|
||||
} else if &name[..] == "abort" || &name[..] == "unreachable" {
|
||||
(0, Vec::new(), ty::FnDiverging)
|
||||
} else {
|
||||
let (n_tps, inputs, output) = match &name[] {
|
||||
let (n_tps, inputs, output) = match &name[..] {
|
||||
"breakpoint" => (0, Vec::new(), ty::mk_nil(tcx)),
|
||||
"size_of" |
|
||||
"pref_align_of" | "min_align_of" => (1, Vec::new(), ccx.tcx.types.uint),
|
||||
@ -5260,7 +5260,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) {
|
||||
"get_tydesc" => {
|
||||
let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) {
|
||||
Ok(t) => t,
|
||||
Err(s) => { span_fatal!(tcx.sess, it.span, E0240, "{}", &s[]); }
|
||||
Err(s) => { span_fatal!(tcx.sess, it.span, E0240, "{}", &s[..]); }
|
||||
};
|
||||
let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt {
|
||||
ty: tydesc_ty,
|
||||
|
@ -283,7 +283,7 @@ impl<'a, 'tcx> Rcx<'a, 'tcx> {
|
||||
};
|
||||
|
||||
let len = self.region_bound_pairs.len();
|
||||
self.relate_free_regions(&fn_sig[], body.id);
|
||||
self.relate_free_regions(&fn_sig[..], body.id);
|
||||
link_fn_args(self, CodeExtent::from_node_id(body.id), &fn_decl.inputs[]);
|
||||
self.visit_block(body);
|
||||
self.visit_region_obligations(body.id);
|
||||
@ -674,7 +674,7 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) {
|
||||
}
|
||||
|
||||
ast::ExprMatch(ref discr, ref arms, _) => {
|
||||
link_match(rcx, &**discr, &arms[]);
|
||||
link_match(rcx, &**discr, &arms[..]);
|
||||
|
||||
visit::walk_expr(rcx, expr);
|
||||
}
|
||||
|
@ -268,7 +268,7 @@ fn get_enum_variant_types<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>,
|
||||
ast::TupleVariantKind(ref args) if args.len() > 0 => {
|
||||
let rs = ExplicitRscope;
|
||||
let input_tys: Vec<_> = args.iter().map(|va| ccx.to_ty(&rs, &*va.ty)).collect();
|
||||
ty::mk_ctor_fn(tcx, variant_def_id, &input_tys[], enum_scheme.ty)
|
||||
ty::mk_ctor_fn(tcx, variant_def_id, &input_tys[..], enum_scheme.ty)
|
||||
}
|
||||
|
||||
ast::TupleVariantKind(_) => {
|
||||
@ -313,7 +313,7 @@ fn collect_trait_methods<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>,
|
||||
trait_id,
|
||||
&trait_def.generics,
|
||||
&trait_predicates,
|
||||
&trait_items[],
|
||||
&trait_items[..],
|
||||
&m.id,
|
||||
&m.ident.name,
|
||||
&m.explicit_self,
|
||||
@ -328,7 +328,7 @@ fn collect_trait_methods<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>,
|
||||
trait_id,
|
||||
&trait_def.generics,
|
||||
&trait_predicates,
|
||||
&trait_items[],
|
||||
&trait_items[..],
|
||||
&m.id,
|
||||
&m.pe_ident().name,
|
||||
m.pe_explicit_self(),
|
||||
@ -871,7 +871,7 @@ fn convert_struct<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>,
|
||||
local_def(field.node.id)].ty).collect();
|
||||
let ctor_fn_ty = ty::mk_ctor_fn(tcx,
|
||||
local_def(ctor_id),
|
||||
&inputs[],
|
||||
&inputs[..],
|
||||
selfty);
|
||||
write_ty_to_tcx(tcx, ctor_id, ctor_fn_ty);
|
||||
tcx.tcache.borrow_mut().insert(local_def(ctor_id),
|
||||
@ -1358,7 +1358,7 @@ fn ty_generics_for_fn_or_method<'a,'tcx>(ccx: &CollectCtxt<'a,'tcx>,
|
||||
let early_lifetimes = resolve_lifetime::early_bound_lifetimes(generics);
|
||||
ty_generics(ccx,
|
||||
subst::FnSpace,
|
||||
&early_lifetimes[],
|
||||
&early_lifetimes[..],
|
||||
&generics.ty_params[],
|
||||
&generics.where_clause,
|
||||
base_generics)
|
||||
|
@ -1065,7 +1065,7 @@ impl<'a, 'tcx> SolveContext<'a, 'tcx> {
|
||||
// attribute and report an error with various results if found.
|
||||
if ty::has_attr(tcx, item_def_id, "rustc_variance") {
|
||||
let found = item_variances.repr(tcx);
|
||||
span_err!(tcx.sess, tcx.map.span(item_id), E0208, "{}", &found[]);
|
||||
span_err!(tcx.sess, tcx.map.span(item_id), E0208, "{}", &found[..]);
|
||||
}
|
||||
|
||||
let newly_added = tcx.item_variance_map.borrow_mut()
|
||||
|
@ -34,7 +34,7 @@ pub fn highlight(src: &str, class: Option<&str>, id: Option<&str>) -> String {
|
||||
class,
|
||||
id,
|
||||
&mut out).unwrap();
|
||||
String::from_utf8_lossy(&out[]).into_owned()
|
||||
String::from_utf8_lossy(&out[..]).into_owned()
|
||||
}
|
||||
|
||||
/// Exhausts the `lexer` writing the output into `out`.
|
||||
|
@ -1120,7 +1120,7 @@ impl Json {
|
||||
/// Returns None otherwise.
|
||||
pub fn as_string<'a>(&'a self) -> Option<&'a str> {
|
||||
match *self {
|
||||
Json::String(ref s) => Some(&s[]),
|
||||
Json::String(ref s) => Some(&s[..]),
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
@ -2237,7 +2237,7 @@ impl ::Decoder for Decoder {
|
||||
return Err(ExpectedError("String or Object".to_string(), format!("{}", json)))
|
||||
}
|
||||
};
|
||||
let idx = match names.iter().position(|n| *n == &name[]) {
|
||||
let idx = match names.iter().position(|n| *n == &name[..]) {
|
||||
Some(idx) => idx,
|
||||
None => return Err(UnknownVariantError(name))
|
||||
};
|
||||
@ -3461,7 +3461,7 @@ mod tests {
|
||||
hm.insert(1, true);
|
||||
let mut mem_buf = Vec::new();
|
||||
write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap();
|
||||
let json_str = from_utf8(&mem_buf[]).unwrap();
|
||||
let json_str = from_utf8(&mem_buf[..]).unwrap();
|
||||
match from_str(json_str) {
|
||||
Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
|
||||
_ => {} // it parsed and we are good to go
|
||||
@ -3477,7 +3477,7 @@ mod tests {
|
||||
hm.insert(1, true);
|
||||
let mut mem_buf = Vec::new();
|
||||
write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap();
|
||||
let json_str = from_utf8(&mem_buf[]).unwrap();
|
||||
let json_str = from_utf8(&mem_buf[..]).unwrap();
|
||||
match from_str(json_str) {
|
||||
Err(_) => panic!("Unable to parse json_str: {:?}", json_str),
|
||||
_ => {} // it parsed and we are good to go
|
||||
@ -3517,7 +3517,7 @@ mod tests {
|
||||
write!(&mut writer, "{}",
|
||||
super::as_pretty_json(&json).indent(i)).unwrap();
|
||||
|
||||
let printed = from_utf8(&writer[]).unwrap();
|
||||
let printed = from_utf8(&writer[..]).unwrap();
|
||||
|
||||
// Check for indents at each line
|
||||
let lines: Vec<&str> = printed.lines().collect();
|
||||
@ -3549,7 +3549,7 @@ mod tests {
|
||||
let mut map = HashMap::new();
|
||||
map.insert(Enum::Foo, 0);
|
||||
let result = json::encode(&map).unwrap();
|
||||
assert_eq!(&result[], r#"{"Foo":0}"#);
|
||||
assert_eq!(&result[..], r#"{"Foo":0}"#);
|
||||
let decoded: HashMap<Enum, _> = json::decode(&result).unwrap();
|
||||
assert_eq!(map, decoded);
|
||||
}
|
||||
|
@ -326,7 +326,7 @@ impl Encodable for str {
|
||||
|
||||
impl Encodable for String {
|
||||
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
|
||||
s.emit_str(&self[])
|
||||
s.emit_str(&self[..])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -103,7 +103,7 @@ impl ops::Deref for OsString {
|
||||
|
||||
#[inline]
|
||||
fn deref(&self) -> &OsStr {
|
||||
&self[]
|
||||
&self[..]
|
||||
}
|
||||
}
|
||||
|
||||
@ -267,7 +267,7 @@ impl Debug for OsStr {
|
||||
}
|
||||
|
||||
impl BorrowFrom<OsString> for OsStr {
|
||||
fn borrow_from(owned: &OsString) -> &OsStr { &owned[] }
|
||||
fn borrow_from(owned: &OsString) -> &OsStr { &owned[..] }
|
||||
}
|
||||
|
||||
impl ToOwned<OsString> for OsStr {
|
||||
@ -288,7 +288,7 @@ impl AsOsStr for OsStr {
|
||||
|
||||
impl AsOsStr for OsString {
|
||||
fn as_os_str(&self) -> &OsStr {
|
||||
&self[]
|
||||
&self[..]
|
||||
}
|
||||
}
|
||||
|
||||
@ -300,7 +300,7 @@ impl AsOsStr for str {
|
||||
|
||||
impl AsOsStr for String {
|
||||
fn as_os_str(&self) -> &OsStr {
|
||||
OsStr::from_str(&self[])
|
||||
OsStr::from_str(&self[..])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -618,14 +618,14 @@ mod tests {
|
||||
#[test]
|
||||
fn read_char_buffered() {
|
||||
let buf = [195u8, 159u8];
|
||||
let mut reader = BufReader::with_capacity(1, &buf[]);
|
||||
let mut reader = BufReader::with_capacity(1, &buf[..]);
|
||||
assert_eq!(reader.chars().next(), Some(Ok('ß')));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_chars() {
|
||||
let buf = [195u8, 159u8, b'a'];
|
||||
let mut reader = BufReader::with_capacity(1, &buf[]);
|
||||
let mut reader = BufReader::with_capacity(1, &buf[..]);
|
||||
let mut it = reader.chars();
|
||||
assert_eq!(it.next(), Some(Ok('ß')));
|
||||
assert_eq!(it.next(), Some(Ok('a')));
|
||||
|
@ -180,7 +180,7 @@ mod tests {
|
||||
fn test_buf_writer() {
|
||||
let mut buf = [0 as u8; 9];
|
||||
{
|
||||
let mut writer = Cursor::new(&mut buf[]);
|
||||
let mut writer = Cursor::new(&mut buf[..]);
|
||||
assert_eq!(writer.position(), 0);
|
||||
assert_eq!(writer.write(&[0]), Ok(1));
|
||||
assert_eq!(writer.position(), 1);
|
||||
@ -201,7 +201,7 @@ mod tests {
|
||||
fn test_buf_writer_seek() {
|
||||
let mut buf = [0 as u8; 8];
|
||||
{
|
||||
let mut writer = Cursor::new(&mut buf[]);
|
||||
let mut writer = Cursor::new(&mut buf[..]);
|
||||
assert_eq!(writer.position(), 0);
|
||||
assert_eq!(writer.write(&[1]), Ok(1));
|
||||
assert_eq!(writer.position(), 1);
|
||||
@ -229,7 +229,7 @@ mod tests {
|
||||
#[test]
|
||||
fn test_buf_writer_error() {
|
||||
let mut buf = [0 as u8; 2];
|
||||
let mut writer = Cursor::new(&mut buf[]);
|
||||
let mut writer = Cursor::new(&mut buf[..]);
|
||||
assert_eq!(writer.write(&[0]), Ok(1));
|
||||
assert_eq!(writer.write(&[0, 0]), Ok(1));
|
||||
assert_eq!(writer.write(&[0, 0]), Ok(0));
|
||||
@ -331,7 +331,7 @@ mod tests {
|
||||
#[test]
|
||||
fn seek_past_end() {
|
||||
let buf = [0xff];
|
||||
let mut r = Cursor::new(&buf[]);
|
||||
let mut r = Cursor::new(&buf[..]);
|
||||
assert_eq!(r.seek(SeekFrom::Start(10)), Ok(10));
|
||||
assert_eq!(r.read(&mut [0]), Ok(0));
|
||||
|
||||
@ -340,7 +340,7 @@ mod tests {
|
||||
assert_eq!(r.read(&mut [0]), Ok(0));
|
||||
|
||||
let mut buf = [0];
|
||||
let mut r = Cursor::new(&mut buf[]);
|
||||
let mut r = Cursor::new(&mut buf[..]);
|
||||
assert_eq!(r.seek(SeekFrom::Start(10)), Ok(10));
|
||||
assert_eq!(r.write(&[3]), Ok(0));
|
||||
}
|
||||
@ -348,14 +348,14 @@ mod tests {
|
||||
#[test]
|
||||
fn seek_before_0() {
|
||||
let buf = [0xff_u8];
|
||||
let mut r = Cursor::new(&buf[]);
|
||||
let mut r = Cursor::new(&buf[..]);
|
||||
assert!(r.seek(SeekFrom::End(-2)).is_err());
|
||||
|
||||
let mut r = Cursor::new(vec!(10u8));
|
||||
assert!(r.seek(SeekFrom::End(-2)).is_err());
|
||||
|
||||
let mut buf = [0];
|
||||
let mut r = Cursor::new(&mut buf[]);
|
||||
let mut r = Cursor::new(&mut buf[..]);
|
||||
assert!(r.seek(SeekFrom::End(-2)).is_err());
|
||||
}
|
||||
|
||||
|
@ -546,7 +546,7 @@ mod test {
|
||||
assert_eq!(a, &w.get_ref()[]);
|
||||
let w = w.into_inner();
|
||||
let a: &[_] = &[0, 1];
|
||||
assert_eq!(a, &w[]);
|
||||
assert_eq!(a, &w[..]);
|
||||
}
|
||||
|
||||
// This is just here to make sure that we don't infinite loop in the
|
||||
@ -643,14 +643,14 @@ mod test {
|
||||
#[test]
|
||||
fn read_char_buffered() {
|
||||
let buf = [195u8, 159u8];
|
||||
let mut reader = BufferedReader::with_capacity(1, &buf[]);
|
||||
let mut reader = BufferedReader::with_capacity(1, &buf[..]);
|
||||
assert_eq!(reader.read_char(), Ok('ß'));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_chars() {
|
||||
let buf = [195u8, 159u8, b'a'];
|
||||
let mut reader = BufferedReader::with_capacity(1, &buf[]);
|
||||
let mut reader = BufferedReader::with_capacity(1, &buf[..]);
|
||||
let mut it = reader.chars();
|
||||
assert_eq!(it.next(), Some(Ok('ß')));
|
||||
assert_eq!(it.next(), Some(Ok('a')));
|
||||
|
@ -877,7 +877,7 @@ impl BytesContainer for String {
|
||||
}
|
||||
#[inline]
|
||||
fn container_as_str(&self) -> Option<&str> {
|
||||
Some(&self[])
|
||||
Some(&self[..])
|
||||
}
|
||||
#[inline]
|
||||
fn is_str(_: Option<&String>) -> bool { true }
|
||||
@ -893,7 +893,7 @@ impl BytesContainer for [u8] {
|
||||
impl BytesContainer for Vec<u8> {
|
||||
#[inline]
|
||||
fn container_as_bytes(&self) -> &[u8] {
|
||||
&self[]
|
||||
&self[..]
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -182,7 +182,7 @@ impl GenericPathUnsafe for Path {
|
||||
s.push_str("..");
|
||||
s.push(SEP);
|
||||
s.push_str(filename);
|
||||
self.update_normalized(&s[]);
|
||||
self.update_normalized(&s[..]);
|
||||
}
|
||||
None => {
|
||||
self.update_normalized(filename);
|
||||
@ -192,20 +192,20 @@ impl GenericPathUnsafe for Path {
|
||||
s.push_str(&self.repr[..end]);
|
||||
s.push(SEP);
|
||||
s.push_str(filename);
|
||||
self.update_normalized(&s[]);
|
||||
self.update_normalized(&s[..]);
|
||||
}
|
||||
Some((idxb,idxa,_)) if self.prefix == Some(DiskPrefix) && idxa == self.prefix_len() => {
|
||||
let mut s = String::with_capacity(idxb + filename.len());
|
||||
s.push_str(&self.repr[..idxb]);
|
||||
s.push_str(filename);
|
||||
self.update_normalized(&s[]);
|
||||
self.update_normalized(&s[..]);
|
||||
}
|
||||
Some((idxb,_,_)) => {
|
||||
let mut s = String::with_capacity(idxb + 1 + filename.len());
|
||||
s.push_str(&self.repr[..idxb]);
|
||||
s.push(SEP);
|
||||
s.push_str(filename);
|
||||
self.update_normalized(&s[]);
|
||||
self.update_normalized(&s[..]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -229,7 +229,7 @@ impl GenericPathUnsafe for Path {
|
||||
}
|
||||
fn shares_volume(me: &Path, path: &str) -> bool {
|
||||
// path is assumed to have a prefix of Some(DiskPrefix)
|
||||
let repr = &me.repr[];
|
||||
let repr = &me.repr[..];
|
||||
match me.prefix {
|
||||
Some(DiskPrefix) => {
|
||||
repr.as_bytes()[0] == path.as_bytes()[0].to_ascii_uppercase()
|
||||
@ -261,7 +261,7 @@ impl GenericPathUnsafe for Path {
|
||||
else { None };
|
||||
let pathlen = path_.as_ref().map_or(path.len(), |p| p.len());
|
||||
let mut s = String::with_capacity(me.repr.len() + 1 + pathlen);
|
||||
s.push_str(&me.repr[]);
|
||||
s.push_str(&me.repr[..]);
|
||||
let plen = me.prefix_len();
|
||||
// if me is "C:" we don't want to add a path separator
|
||||
match me.prefix {
|
||||
@ -273,9 +273,9 @@ impl GenericPathUnsafe for Path {
|
||||
}
|
||||
match path_ {
|
||||
None => s.push_str(path),
|
||||
Some(p) => s.push_str(&p[]),
|
||||
Some(p) => s.push_str(&p[..]),
|
||||
};
|
||||
me.update_normalized(&s[])
|
||||
me.update_normalized(&s[..])
|
||||
}
|
||||
|
||||
if !path.is_empty() {
|
||||
@ -329,7 +329,7 @@ impl GenericPath for Path {
|
||||
/// Always returns a `Some` value.
|
||||
#[inline]
|
||||
fn as_str<'a>(&'a self) -> Option<&'a str> {
|
||||
Some(&self.repr[])
|
||||
Some(&self.repr[..])
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@ -351,13 +351,13 @@ impl GenericPath for Path {
|
||||
/// Always returns a `Some` value.
|
||||
fn dirname_str<'a>(&'a self) -> Option<&'a str> {
|
||||
Some(match self.sepidx_or_prefix_len() {
|
||||
None if ".." == self.repr => &self.repr[],
|
||||
None if ".." == self.repr => &self.repr[..],
|
||||
None => ".",
|
||||
Some((_,idxa,end)) if &self.repr[idxa..end] == ".." => {
|
||||
&self.repr[]
|
||||
&self.repr[..]
|
||||
}
|
||||
Some((idxb,_,end)) if &self.repr[idxb..end] == "\\" => {
|
||||
&self.repr[]
|
||||
&self.repr[..]
|
||||
}
|
||||
Some((0,idxa,_)) => &self.repr[..idxa],
|
||||
Some((idxb,idxa,_)) => {
|
||||
@ -379,7 +379,7 @@ impl GenericPath for Path {
|
||||
/// See `GenericPath::filename_str` for info.
|
||||
/// Always returns a `Some` value if `filename` returns a `Some` value.
|
||||
fn filename_str<'a>(&'a self) -> Option<&'a str> {
|
||||
let repr = &self.repr[];
|
||||
let repr = &self.repr[..];
|
||||
match self.sepidx_or_prefix_len() {
|
||||
None if "." == repr || ".." == repr => None,
|
||||
None => Some(repr),
|
||||
@ -639,7 +639,7 @@ impl Path {
|
||||
/// Does not distinguish between absolute and cwd-relative paths, e.g.
|
||||
/// C:\foo and C:foo.
|
||||
pub fn str_components<'a>(&'a self) -> StrComponents<'a> {
|
||||
let repr = &self.repr[];
|
||||
let repr = &self.repr[..];
|
||||
let s = match self.prefix {
|
||||
Some(_) => {
|
||||
let plen = self.prefix_len();
|
||||
@ -667,8 +667,8 @@ impl Path {
|
||||
}
|
||||
|
||||
fn equiv_prefix(&self, other: &Path) -> bool {
|
||||
let s_repr = &self.repr[];
|
||||
let o_repr = &other.repr[];
|
||||
let s_repr = &self.repr[..];
|
||||
let o_repr = &other.repr[..];
|
||||
match (self.prefix, other.prefix) {
|
||||
(Some(DiskPrefix), Some(VerbatimDiskPrefix)) => {
|
||||
self.is_absolute() &&
|
||||
@ -823,7 +823,7 @@ impl Path {
|
||||
fn update_sepidx(&mut self) {
|
||||
let s = if self.has_nonsemantic_trailing_slash() {
|
||||
&self.repr[..self.repr.len()-1]
|
||||
} else { &self.repr[] };
|
||||
} else { &self.repr[..] };
|
||||
let sep_test: fn(char) -> bool = if !prefix_is_verbatim(self.prefix) {
|
||||
is_sep
|
||||
} else {
|
||||
@ -902,7 +902,7 @@ pub fn is_verbatim(path: &Path) -> bool {
|
||||
/// non-verbatim, the non-verbatim version is returned.
|
||||
/// Otherwise, None is returned.
|
||||
pub fn make_non_verbatim(path: &Path) -> Option<Path> {
|
||||
let repr = &path.repr[];
|
||||
let repr = &path.repr[..];
|
||||
let new_path = match path.prefix {
|
||||
Some(VerbatimPrefix(_)) | Some(DeviceNSPrefix(_)) => return None,
|
||||
Some(UNCPrefix(_,_)) | Some(DiskPrefix) | None => return Some(path.clone()),
|
||||
|
@ -37,7 +37,7 @@ pub fn on_panic(obj: &(Any+Send), file: &'static str, line: uint) {
|
||||
let msg = match obj.downcast_ref::<&'static str>() {
|
||||
Some(s) => *s,
|
||||
None => match obj.downcast_ref::<String>() {
|
||||
Some(s) => &s[],
|
||||
Some(s) => &s[..],
|
||||
None => "Box<Any>",
|
||||
}
|
||||
};
|
||||
|
@ -978,7 +978,7 @@ impl ops::Deref for PathBuf {
|
||||
type Target = Path;
|
||||
|
||||
fn deref(&self) -> &Path {
|
||||
unsafe { mem::transmute(&self.inner[]) }
|
||||
unsafe { mem::transmute(&self.inner[..]) }
|
||||
}
|
||||
}
|
||||
|
||||
@ -1010,7 +1010,7 @@ impl cmp::Ord for PathBuf {
|
||||
|
||||
impl AsOsStr for PathBuf {
|
||||
fn as_os_str(&self) -> &OsStr {
|
||||
&self.inner[]
|
||||
&self.inner[..]
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -265,12 +265,12 @@ fn fill_utf16_buf_base<F1, F2, T>(mut f1: F1, f2: F2) -> Result<T, ()>
|
||||
let mut n = stack_buf.len();
|
||||
loop {
|
||||
let buf = if n <= stack_buf.len() {
|
||||
&mut stack_buf[]
|
||||
&mut stack_buf[..]
|
||||
} else {
|
||||
let extra = n - heap_buf.len();
|
||||
heap_buf.reserve(extra);
|
||||
heap_buf.set_len(n);
|
||||
&mut heap_buf[]
|
||||
&mut heap_buf[..]
|
||||
};
|
||||
|
||||
// This function is typically called on windows API functions which
|
||||
|
@ -114,7 +114,7 @@ impl Iterator for Env {
|
||||
|
||||
let (k, v) = match s.iter().position(|&b| b == '=' as u16) {
|
||||
Some(n) => (&s[..n], &s[n+1..]),
|
||||
None => (s, &[][]),
|
||||
None => (s, &[][..]),
|
||||
};
|
||||
Some((OsStringExt::from_wide(k), OsStringExt::from_wide(v)))
|
||||
}
|
||||
@ -186,7 +186,7 @@ impl<'a> Iterator for SplitPaths<'a> {
|
||||
if !must_yield && in_progress.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(super::os2path(&in_progress[]))
|
||||
Some(super::os2path(&in_progress[..]))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -208,14 +208,14 @@ pub fn join_paths<I, T>(paths: I) -> Result<OsString, JoinPathsError>
|
||||
return Err(JoinPathsError)
|
||||
} else if v.contains(&sep) {
|
||||
joined.push(b'"' as u16);
|
||||
joined.push_all(&v[]);
|
||||
joined.push_all(&v[..]);
|
||||
joined.push(b'"' as u16);
|
||||
} else {
|
||||
joined.push_all(&v[]);
|
||||
joined.push_all(&v[..]);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(OsStringExt::from_wide(&joined[]))
|
||||
Ok(OsStringExt::from_wide(&joined[..]))
|
||||
}
|
||||
|
||||
impl fmt::Display for JoinPathsError {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user