diff --git a/src/compiletest/runtest.rs b/src/compiletest/runtest.rs index 875061e69b7a2..afcdcf6439c26 100644 --- a/src/compiletest/runtest.rs +++ b/src/compiletest/runtest.rs @@ -539,18 +539,17 @@ fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testfile: &Path) { script_str.push_str("set print pretty off\n"); // Add the pretty printer directory to GDB's source-file search path - script_str.push_str(format!("directory {}\n", rust_pp_module_abs_path)[]); + script_str.push_str(&format!("directory {}\n", rust_pp_module_abs_path)[]); // Load the target executable - script_str.push_str(format!("file {}\n", - exe_file.as_str().unwrap().replace("\\", "\\\\")) - .as_slice()); + script_str.push_str(&format!("file {}\n", + exe_file.as_str().unwrap().replace("\\", "\\\\"))[]); // Add line breakpoints for line in breakpoint_lines.iter() { - script_str.push_str(format!("break '{}':{}\n", - testfile.filename_display(), - *line)[]); + script_str.push_str(&format!("break '{}':{}\n", + testfile.filename_display(), + *line)[]); } script_str.push_str(cmds.as_slice()); @@ -676,7 +675,7 @@ fn run_debuginfo_lldb_test(config: &Config, props: &TestProps, testfile: &Path) .unwrap() .to_string(); - script_str.push_str(format!("command script import {}\n", rust_pp_module_abs_path[])[]); + script_str.push_str(&format!("command script import {}\n", &rust_pp_module_abs_path[])[]); script_str.push_str("type summary add --no-value "); script_str.push_str("--python-function lldb_rust_formatters.print_val "); script_str.push_str("-x \".*\" --category Rust\n"); @@ -910,7 +909,7 @@ fn check_error_patterns(props: &TestProps, if done { return; } let missing_patterns = - props.error_patterns[next_err_idx..]; + props.error_patterns.index(&(next_err_idx..)); if missing_patterns.len() == 1u { fatal_proc_rec(format!("error pattern '{}' not found!", missing_patterns[0]).as_slice(), diff --git a/src/libcollections/bit.rs b/src/libcollections/bit.rs index c092e000215d3..3d1779445e124 100644 --- a/src/libcollections/bit.rs +++ b/src/libcollections/bit.rs @@ -330,7 +330,7 @@ impl Bitv { if extra_bytes > 0 { let mut last_word = 0u32; - for (i, &byte) in bytes[complete_words*4..].iter().enumerate() { + for (i, &byte) in bytes.index(&((complete_words*4)..)).iter().enumerate() { last_word |= (reverse_bits(byte) as u32) << (i * 8); } bitv.storage.push(last_word); diff --git a/src/libcollections/ring_buf.rs b/src/libcollections/ring_buf.rs index 11775f62b1c54..98e9d6c16b823 100644 --- a/src/libcollections/ring_buf.rs +++ b/src/libcollections/ring_buf.rs @@ -525,7 +525,7 @@ impl RingBuf { /// *num = *num - 2; /// } /// let b: &[_] = &[&mut 3, &mut 1, &mut 2]; - /// assert_eq!(buf.iter_mut().collect::>()[], b); + /// assert_eq!(&buf.iter_mut().collect::>()[], b); /// ``` #[stable] pub fn iter_mut<'a>(&'a mut self) -> IterMut<'a, T> { @@ -556,7 +556,7 @@ impl RingBuf { let buf = self.buffer_as_slice(); if contiguous { let (empty, buf) = buf.split_at(0); - (buf[self.tail..self.head], empty) + (buf.index(&(self.tail..self.head)), empty) } else { let (mid, right) = buf.split_at(self.tail); let (left, _) = mid.split_at(self.head); diff --git a/src/libcollections/slice.rs b/src/libcollections/slice.rs index 9e5aa7d645ba0..e57574fdbcee3 100644 --- a/src/libcollections/slice.rs +++ b/src/libcollections/slice.rs @@ -55,7 +55,7 @@ //! #![feature(slicing_syntax)] //! fn main() { //! let numbers = [0i, 1i, 2i]; -//! let last_numbers = numbers[1..3]; +//! let last_numbers = numbers.index(&(1..3)); //! // last_numbers is now &[1i, 2i] //! } //! ``` @@ -98,7 +98,7 @@ use core::iter::{range, range_step, MultiplicativeIterator}; use core::kinds::Sized; use core::mem::size_of; use core::mem; -use core::ops::{FnMut, SliceMut}; +use core::ops::{FnMut, FullRange, Index, IndexMut}; use core::option::Option::{self, Some, None}; use core::ptr::PtrExt; use core::ptr; @@ -1065,12 +1065,12 @@ impl ElementSwaps { #[unstable = "trait is unstable"] impl BorrowFrom> for [T] { - fn borrow_from(owned: &Vec) -> &[T] { owned[] } + fn borrow_from(owned: &Vec) -> &[T] { owned.index(&FullRange) } } #[unstable = "trait is unstable"] impl BorrowFromMut> for [T] { - fn borrow_from_mut(owned: &mut Vec) -> &mut [T] { owned.as_mut_slice_() } + fn borrow_from_mut(owned: &mut Vec) -> &mut [T] { owned.index_mut(&FullRange) } } #[unstable = "trait is unstable"] @@ -1393,15 +1393,20 @@ fn merge_sort(v: &mut [T], mut compare: F) where F: FnMut(&T, &T) -> Order #[cfg(test)] mod tests { - use prelude::{Some, None, range, Vec, ToString, Clone, Greater, Less, Equal}; - use prelude::{SliceExt, Iterator, IteratorExt}; - use prelude::AsSlice; - use prelude::{RandomAccessIterator, Ord, SliceConcatExt}; + use core::cmp::Ordering::{Greater, Less, Equal}; + use core::prelude::{Some, None, range, Clone}; + use core::prelude::{Iterator, IteratorExt}; + use core::prelude::{AsSlice}; + use core::prelude::{Ord, FullRange}; use core::default::Default; use core::mem; + use core::ops::Index; + use std::iter::RandomAccessIterator; use std::rand::{Rng, thread_rng}; use std::rc::Rc; - use super::ElementSwaps; + use string::ToString; + use vec::Vec; + use super::{ElementSwaps, SliceConcatExt, SliceExt}; fn square(n: uint) -> uint { n * n } @@ -1606,7 +1611,7 @@ mod tests { // Test on stack. let vec_stack: &[_] = &[1i, 2, 3]; - let v_b = vec_stack[1u..3u].to_vec(); + let v_b = vec_stack.index(&(1u..3u)).to_vec(); assert_eq!(v_b.len(), 2u); let v_b = v_b.as_slice(); assert_eq!(v_b[0], 2); @@ -1614,7 +1619,7 @@ mod tests { // Test `Box<[T]>` let vec_unique = vec![1i, 2, 3, 4, 5, 6]; - let v_d = vec_unique[1u..6u].to_vec(); + let v_d = vec_unique.index(&(1u..6u)).to_vec(); assert_eq!(v_d.len(), 5u); let v_d = v_d.as_slice(); assert_eq!(v_d[0], 2); @@ -1627,21 +1632,21 @@ mod tests { #[test] fn test_slice_from() { let vec: &[int] = &[1, 2, 3, 4]; - assert_eq!(vec[0..], vec); + assert_eq!(vec.index(&(0..)), vec); let b: &[int] = &[3, 4]; - assert_eq!(vec[2..], b); + assert_eq!(vec.index(&(2..)), b); let b: &[int] = &[]; - assert_eq!(vec[4..], b); + assert_eq!(vec.index(&(4..)), b); } #[test] fn test_slice_to() { let vec: &[int] = &[1, 2, 3, 4]; - assert_eq!(vec[..4], vec); + assert_eq!(vec.index(&(0..4)), vec); let b: &[int] = &[1, 2]; - assert_eq!(vec[..2], b); + assert_eq!(vec.index(&(0..2)), b); let b: &[int] = &[]; - assert_eq!(vec[..0], b); + assert_eq!(vec.index(&(0..0)), b); } @@ -2567,7 +2572,7 @@ mod tests { } assert_eq!(cnt, 3); - for f in v[1..3].iter() { + for f in v.index(&(1..3)).iter() { assert!(*f == Foo); cnt += 1; } diff --git a/src/libcollections/str.rs b/src/libcollections/str.rs index c0482702ccdb6..5a278f95e976f 100644 --- a/src/libcollections/str.rs +++ b/src/libcollections/str.rs @@ -60,7 +60,7 @@ use core::char::CharExt; use core::clone::Clone; use core::iter::AdditiveIterator; use core::iter::{range, Iterator, IteratorExt}; -use core::ops; +use core::ops::{FullRange, Index}; use core::option::Option::{self, Some, None}; use core::slice::AsSlice; use core::str as core_str; @@ -386,7 +386,7 @@ macro_rules! utf8_acc_cont_byte { #[unstable = "trait is unstable"] impl BorrowFrom for str { - fn borrow_from(owned: &String) -> &str { owned[] } + fn borrow_from(owned: &String) -> &str { owned.index(&FullRange) } } #[unstable = "trait is unstable"] @@ -408,7 +408,7 @@ Section: Trait implementations /// Any string that can be represented as a slice. #[stable] -pub trait StrExt: ops::Slice { +pub trait StrExt: Index { /// Escapes each char in `s` with `char::escape_default`. #[unstable = "return type may change to be an iterator"] fn escape_default(&self) -> String { @@ -464,7 +464,7 @@ pub trait StrExt: ops::Slice { #[unstable = "this functionality may be moved to libunicode"] fn nfd_chars<'a>(&'a self) -> Decompositions<'a> { Decompositions { - iter: self[].chars(), + iter: self.index(&FullRange).chars(), buffer: Vec::new(), sorted: false, kind: Canonical @@ -477,7 +477,7 @@ pub trait StrExt: ops::Slice { #[unstable = "this functionality may be moved to libunicode"] fn nfkd_chars<'a>(&'a self) -> Decompositions<'a> { Decompositions { - iter: self[].chars(), + iter: self.index(&FullRange).chars(), buffer: Vec::new(), sorted: false, kind: Compatible @@ -525,7 +525,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn contains(&self, pat: &str) -> bool { - core_str::StrExt::contains(self[], pat) + core_str::StrExt::contains(self.index(&FullRange), pat) } /// Returns true if a string contains a char pattern. @@ -541,7 +541,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "might get removed in favour of a more generic contains()"] fn contains_char(&self, pat: P) -> bool { - core_str::StrExt::contains_char(self[], pat) + core_str::StrExt::contains_char(self.index(&FullRange), pat) } /// An iterator over the characters of `self`. Note, this iterates @@ -555,7 +555,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn chars(&self) -> Chars { - core_str::StrExt::chars(self[]) + core_str::StrExt::chars(self.index(&FullRange)) } /// An iterator over the bytes of `self` @@ -568,13 +568,13 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn bytes(&self) -> Bytes { - core_str::StrExt::bytes(self[]) + core_str::StrExt::bytes(self.index(&FullRange)) } /// An iterator over the characters of `self` and their byte offsets. #[stable] fn char_indices(&self) -> CharIndices { - core_str::StrExt::char_indices(self[]) + core_str::StrExt::char_indices(self.index(&FullRange)) } /// An iterator over substrings of `self`, separated by characters @@ -597,7 +597,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn split(&self, pat: P) -> Split

{ - core_str::StrExt::split(self[], pat) + core_str::StrExt::split(self.index(&FullRange), pat) } /// An iterator over substrings of `self`, separated by characters @@ -624,7 +624,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn splitn(&self, count: uint, pat: P) -> SplitN

{ - core_str::StrExt::splitn(self[], count, pat) + core_str::StrExt::splitn(self.index(&FullRange), count, pat) } /// An iterator over substrings of `self`, separated by characters @@ -653,7 +653,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "might get removed"] fn split_terminator(&self, pat: P) -> SplitTerminator

{ - core_str::StrExt::split_terminator(self[], pat) + core_str::StrExt::split_terminator(self.index(&FullRange), pat) } /// An iterator over substrings of `self`, separated by characters @@ -674,7 +674,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn rsplitn(&self, count: uint, pat: P) -> RSplitN

{ - core_str::StrExt::rsplitn(self[], count, pat) + core_str::StrExt::rsplitn(self.index(&FullRange), count, pat) } /// An iterator over the start and end indices of the disjoint @@ -699,7 +699,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "might have its iterator type changed"] fn match_indices<'a>(&'a self, pat: &'a str) -> MatchIndices<'a> { - core_str::StrExt::match_indices(self[], pat) + core_str::StrExt::match_indices(self.index(&FullRange), pat) } /// An iterator over the substrings of `self` separated by the pattern `sep`. @@ -715,7 +715,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "might get removed in the future in favor of a more generic split()"] fn split_str<'a>(&'a self, pat: &'a str) -> SplitStr<'a> { - core_str::StrExt::split_str(self[], pat) + core_str::StrExt::split_str(self.index(&FullRange), pat) } /// An iterator over the lines of a string (subsequences separated @@ -731,7 +731,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn lines(&self) -> Lines { - core_str::StrExt::lines(self[]) + core_str::StrExt::lines(self.index(&FullRange)) } /// An iterator over the lines of a string, separated by either @@ -747,7 +747,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn lines_any(&self) -> LinesAny { - core_str::StrExt::lines_any(self[]) + core_str::StrExt::lines_any(self.index(&FullRange)) } /// Returns a slice of the given string from the byte range @@ -782,7 +782,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "use slice notation [a..b] instead"] fn slice(&self, begin: uint, end: uint) -> &str { - core_str::StrExt::slice(self[], begin, end) + core_str::StrExt::slice(self.index(&FullRange), begin, end) } /// Returns a slice of the string from `begin` to its end. @@ -795,7 +795,7 @@ pub trait StrExt: ops::Slice { /// See also `slice`, `slice_to` and `slice_chars`. #[unstable = "use slice notation [a..] instead"] fn slice_from(&self, begin: uint) -> &str { - core_str::StrExt::slice_from(self[], begin) + core_str::StrExt::slice_from(self.index(&FullRange), begin) } /// Returns a slice of the string from the beginning to byte @@ -809,7 +809,7 @@ pub trait StrExt: ops::Slice { /// See also `slice`, `slice_from` and `slice_chars`. #[unstable = "use slice notation [0..a] instead"] fn slice_to(&self, end: uint) -> &str { - core_str::StrExt::slice_to(self[], end) + core_str::StrExt::slice_to(self.index(&FullRange), end) } /// Returns a slice of the string from the character range @@ -837,7 +837,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "may have yet to prove its worth"] fn slice_chars(&self, begin: uint, end: uint) -> &str { - core_str::StrExt::slice_chars(self[], begin, end) + core_str::StrExt::slice_chars(self.index(&FullRange), begin, end) } /// Takes a bytewise (not UTF-8) slice from a string. @@ -848,7 +848,7 @@ pub trait StrExt: ops::Slice { /// the entire slice as well. #[stable] unsafe fn slice_unchecked(&self, begin: uint, end: uint) -> &str { - core_str::StrExt::slice_unchecked(self[], begin, end) + core_str::StrExt::slice_unchecked(self.index(&FullRange), begin, end) } /// Returns true if the pattern `pat` is a prefix of the string. @@ -860,7 +860,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn starts_with(&self, pat: &str) -> bool { - core_str::StrExt::starts_with(self[], pat) + core_str::StrExt::starts_with(self.index(&FullRange), pat) } /// Returns true if the pattern `pat` is a suffix of the string. @@ -872,7 +872,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn ends_with(&self, pat: &str) -> bool { - core_str::StrExt::ends_with(self[], pat) + core_str::StrExt::ends_with(self.index(&FullRange), pat) } /// Returns a string with all pre- and suffixes that match @@ -892,7 +892,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn trim_matches(&self, pat: P) -> &str { - core_str::StrExt::trim_matches(self[], pat) + core_str::StrExt::trim_matches(self.index(&FullRange), pat) } /// Returns a string with all prefixes that match @@ -912,7 +912,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn trim_left_matches(&self, pat: P) -> &str { - core_str::StrExt::trim_left_matches(self[], pat) + core_str::StrExt::trim_left_matches(self.index(&FullRange), pat) } /// Returns a string with all suffixes that match @@ -932,7 +932,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn trim_right_matches(&self, pat: P) -> &str { - core_str::StrExt::trim_right_matches(self[], pat) + core_str::StrExt::trim_right_matches(self.index(&FullRange), pat) } /// Check that `index`-th byte lies at the start and/or end of a @@ -960,7 +960,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "naming is uncertain with container conventions"] fn is_char_boundary(&self, index: uint) -> bool { - core_str::StrExt::is_char_boundary(self[], index) + core_str::StrExt::is_char_boundary(self.index(&FullRange), index) } /// Pluck a character out of a string and return the index of the next @@ -1018,7 +1018,7 @@ pub trait StrExt: ops::Slice { /// If `i` is not the index of the beginning of a valid UTF-8 character. #[unstable = "naming is uncertain with container conventions"] fn char_range_at(&self, start: uint) -> CharRange { - core_str::StrExt::char_range_at(self[], start) + core_str::StrExt::char_range_at(self.index(&FullRange), start) } /// Given a byte position and a str, return the previous char and its position. @@ -1033,7 +1033,7 @@ pub trait StrExt: ops::Slice { /// If `i` is not an index following a valid UTF-8 character. #[unstable = "naming is uncertain with container conventions"] fn char_range_at_reverse(&self, start: uint) -> CharRange { - core_str::StrExt::char_range_at_reverse(self[], start) + core_str::StrExt::char_range_at_reverse(self.index(&FullRange), start) } /// Plucks the character starting at the `i`th byte of a string. @@ -1053,7 +1053,7 @@ pub trait StrExt: ops::Slice { /// If `i` is not the index of the beginning of a valid UTF-8 character. #[unstable = "naming is uncertain with container conventions"] fn char_at(&self, i: uint) -> char { - core_str::StrExt::char_at(self[], i) + core_str::StrExt::char_at(self.index(&FullRange), i) } /// Plucks the character ending at the `i`th byte of a string. @@ -1064,7 +1064,7 @@ pub trait StrExt: ops::Slice { /// If `i` is not an index following a valid UTF-8 character. #[unstable = "naming is uncertain with container conventions"] fn char_at_reverse(&self, i: uint) -> char { - core_str::StrExt::char_at_reverse(self[], i) + core_str::StrExt::char_at_reverse(self.index(&FullRange), i) } /// Work with the byte buffer of a string as a byte slice. @@ -1076,7 +1076,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn as_bytes(&self) -> &[u8] { - core_str::StrExt::as_bytes(self[]) + core_str::StrExt::as_bytes(self.index(&FullRange)) } /// Returns the byte index of the first character of `self` that @@ -1104,7 +1104,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn find(&self, pat: P) -> Option { - core_str::StrExt::find(self[], pat) + core_str::StrExt::find(self.index(&FullRange), pat) } /// Returns the byte index of the last character of `self` that @@ -1132,7 +1132,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn rfind(&self, pat: P) -> Option { - core_str::StrExt::rfind(self[], pat) + core_str::StrExt::rfind(self.index(&FullRange), pat) } /// Returns the byte index of the first matching substring @@ -1156,7 +1156,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "might get removed in favor of a more generic find in the future"] fn find_str(&self, needle: &str) -> Option { - core_str::StrExt::find_str(self[], needle) + core_str::StrExt::find_str(self.index(&FullRange), needle) } /// Retrieves the first character from a string slice and returns @@ -1179,7 +1179,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "awaiting conventions about shifting and slices"] fn slice_shift_char(&self) -> Option<(char, &str)> { - core_str::StrExt::slice_shift_char(self[]) + core_str::StrExt::slice_shift_char(self.index(&FullRange)) } /// Returns the byte offset of an inner slice relative to an enclosing outer slice. @@ -1198,7 +1198,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "awaiting convention about comparability of arbitrary slices"] fn subslice_offset(&self, inner: &str) -> uint { - core_str::StrExt::subslice_offset(self[], inner) + core_str::StrExt::subslice_offset(self.index(&FullRange), inner) } /// Return an unsafe pointer to the strings buffer. @@ -1209,13 +1209,13 @@ pub trait StrExt: ops::Slice { #[stable] #[inline] fn as_ptr(&self) -> *const u8 { - core_str::StrExt::as_ptr(self[]) + core_str::StrExt::as_ptr(self.index(&FullRange)) } /// Return an iterator of `u16` over the string encoded as UTF-16. #[unstable = "this functionality may only be provided by libunicode"] fn utf16_units(&self) -> Utf16Units { - Utf16Units { encoder: Utf16Encoder::new(self[].chars()) } + Utf16Units { encoder: Utf16Encoder::new(self.index(&FullRange).chars()) } } /// Return the number of bytes in this string @@ -1229,7 +1229,7 @@ pub trait StrExt: ops::Slice { #[stable] #[inline] fn len(&self) -> uint { - core_str::StrExt::len(self[]) + core_str::StrExt::len(self.index(&FullRange)) } /// Returns true if this slice contains no bytes @@ -1242,7 +1242,7 @@ pub trait StrExt: ops::Slice { #[inline] #[stable] fn is_empty(&self) -> bool { - core_str::StrExt::is_empty(self[]) + core_str::StrExt::is_empty(self.index(&FullRange)) } /// Parse this string into the specified type. @@ -1256,7 +1256,7 @@ pub trait StrExt: ops::Slice { #[inline] #[unstable = "this method was just created"] fn parse(&self) -> Option { - core_str::StrExt::parse(self[]) + core_str::StrExt::parse(self.index(&FullRange)) } /// Returns an iterator over the @@ -1280,7 +1280,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "this functionality may only be provided by libunicode"] fn graphemes(&self, is_extended: bool) -> Graphemes { - UnicodeStr::graphemes(self[], is_extended) + UnicodeStr::graphemes(self.index(&FullRange), is_extended) } /// Returns an iterator over the grapheme clusters of self and their byte offsets. @@ -1295,7 +1295,7 @@ pub trait StrExt: ops::Slice { /// ``` #[unstable = "this functionality may only be provided by libunicode"] fn grapheme_indices(&self, is_extended: bool) -> GraphemeIndices { - UnicodeStr::grapheme_indices(self[], is_extended) + UnicodeStr::grapheme_indices(self.index(&FullRange), is_extended) } /// An iterator over the words of a string (subsequences separated @@ -1311,7 +1311,7 @@ pub trait StrExt: ops::Slice { /// ``` #[stable] fn words(&self) -> Words { - UnicodeStr::words(self[]) + UnicodeStr::words(self.index(&FullRange)) } /// Returns a string's displayed width in columns, treating control @@ -1325,25 +1325,25 @@ pub trait StrExt: ops::Slice { /// `is_cjk` = `false`) if the locale is unknown. #[unstable = "this functionality may only be provided by libunicode"] fn width(&self, is_cjk: bool) -> uint { - UnicodeStr::width(self[], is_cjk) + UnicodeStr::width(self.index(&FullRange), is_cjk) } /// Returns a string with leading and trailing whitespace removed. #[stable] fn trim(&self) -> &str { - UnicodeStr::trim(self[]) + UnicodeStr::trim(self.index(&FullRange)) } /// Returns a string with leading whitespace removed. #[stable] fn trim_left(&self) -> &str { - UnicodeStr::trim_left(self[]) + UnicodeStr::trim_left(self.index(&FullRange)) } /// Returns a string with trailing whitespace removed. #[stable] fn trim_right(&self) -> &str { - UnicodeStr::trim_right(self[]) + UnicodeStr::trim_right(self.index(&FullRange)) } } @@ -1355,8 +1355,7 @@ mod tests { use prelude::*; use core::iter::AdditiveIterator; - use super::from_utf8; - use super::Utf8Error; + use super::{from_utf8, Utf8Error}; #[test] fn test_le() { @@ -2133,7 +2132,7 @@ mod tests { let mut bytes = [0u8; 4]; for c in range(0u32, 0x110000).filter_map(|c| ::core::char::from_u32(c)) { let len = c.encode_utf8(&mut bytes).unwrap_or(0); - let s = ::core::str::from_utf8(bytes[..len]).unwrap(); + let s = ::core::str::from_utf8(&bytes[..len]).unwrap(); if Some(c) != s.chars().next() { panic!("character {:x}={} does not decode correctly", c as u32, c); } @@ -2145,7 +2144,7 @@ mod tests { let mut bytes = [0u8; 4]; for c in range(0u32, 0x110000).filter_map(|c| ::core::char::from_u32(c)) { let len = c.encode_utf8(&mut bytes).unwrap_or(0); - let s = ::core::str::from_utf8(bytes[..len]).unwrap(); + let s = ::core::str::from_utf8(&bytes[..len]).unwrap(); if Some(c) != s.chars().rev().next() { panic!("character {:x}={} does not decode correctly", c as u32, c); } diff --git a/src/libcollections/string.rs b/src/libcollections/string.rs index 0bf311e4d3f6e..20bc08416dc78 100644 --- a/src/libcollections/string.rs +++ b/src/libcollections/string.rs @@ -22,7 +22,7 @@ use core::fmt; use core::hash; use core::iter::FromIterator; use core::mem; -use core::ops::{self, Deref, Add}; +use core::ops::{self, Deref, Add, Index}; use core::ptr; use core::raw::Slice as RawSlice; use unicode::str as unicode_str; @@ -168,7 +168,7 @@ impl String { if i > 0 { unsafe { - res.as_mut_vec().push_all(v[..i]) + res.as_mut_vec().push_all(v.index(&(0..i))) }; } @@ -185,7 +185,7 @@ impl String { macro_rules! error { () => ({ unsafe { if subseqidx != i_ { - res.as_mut_vec().push_all(v[subseqidx..i_]); + res.as_mut_vec().push_all(v.index(&(subseqidx..i_))); } subseqidx = i; res.as_mut_vec().push_all(REPLACEMENT); @@ -254,7 +254,7 @@ impl String { } if subseqidx < total { unsafe { - res.as_mut_vec().push_all(v[subseqidx..total]) + res.as_mut_vec().push_all(v.index(&(subseqidx..total))) }; } Cow::Owned(res) @@ -818,25 +818,32 @@ impl<'a> Add<&'a str> for String { } } -impl ops::Slice for String { +impl ops::Index> for String { + type Output = str; #[inline] - fn as_slice_<'a>(&'a self) -> &'a str { - unsafe { mem::transmute(self.vec.as_slice()) } + fn index(&self, index: &ops::Range) -> &str { + &self.index(&FullRange)[*index] } - +} +impl ops::Index> for String { + type Output = str; #[inline] - fn slice_from_or_fail<'a>(&'a self, from: &uint) -> &'a str { - self[][*from..] + fn index(&self, index: &ops::RangeTo) -> &str { + &self.index(&FullRange)[*index] } - +} +impl ops::Index> for String { + type Output = str; #[inline] - fn slice_to_or_fail<'a>(&'a self, to: &uint) -> &'a str { - self[][..*to] + fn index(&self, index: &ops::RangeFrom) -> &str { + &self.index(&FullRange)[*index] } - +} +impl ops::Index for String { + type Output = str; #[inline] - fn slice_or_fail<'a>(&'a self, from: &uint, to: &uint) -> &'a str { - self[][*from..*to] + fn index(&self, _index: &ops::FullRange) -> &str { + unsafe { mem::transmute(self.vec.as_slice()) } } } @@ -845,7 +852,7 @@ impl ops::Deref for String { type Target = str; fn deref<'a>(&'a self) -> &'a str { - unsafe { mem::transmute(self.vec[]) } + unsafe { mem::transmute(self.vec.index(&FullRange)) } } } @@ -943,6 +950,7 @@ mod tests { use str::Utf8Error; use core::iter::repeat; use super::{as_string, CowString}; + use core::ops::FullRange; #[test] fn test_as_string() { @@ -1224,10 +1232,10 @@ mod tests { #[test] fn test_slicing() { let s = "foobar".to_string(); - assert_eq!("foobar", s[]); - assert_eq!("foo", s[..3]); - assert_eq!("bar", s[3..]); - assert_eq!("oob", s[1..4]); + assert_eq!("foobar", &s[]); + assert_eq!("foo", &s[..3]); + assert_eq!("bar", &s[3..]); + assert_eq!("oob", &s[1..4]); } #[test] diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs index 99231e7253c3c..1cfbbdf6cb8ff 100644 --- a/src/libcollections/vec.rs +++ b/src/libcollections/vec.rs @@ -1178,7 +1178,7 @@ impl Clone for Vec { // self.len <= other.len due to the truncate above, so the // slice here is always in-bounds. - let slice = other[self.len()..]; + let slice = other.index(&(self.len()..)); self.push_all(slice); } } @@ -1209,48 +1209,66 @@ impl IndexMut for Vec { } } -impl ops::Slice for Vec { + +impl ops::Index> for Vec { + type Output = [T]; #[inline] - fn as_slice_<'a>(&'a self) -> &'a [T] { - self.as_slice() + fn index(&self, index: &ops::Range) -> &[T] { + self.as_slice().index(index) } - +} +impl ops::Index> for Vec { + type Output = [T]; #[inline] - fn slice_from_or_fail<'a>(&'a self, start: &uint) -> &'a [T] { - self.as_slice().slice_from_or_fail(start) + fn index(&self, index: &ops::RangeTo) -> &[T] { + self.as_slice().index(index) } - +} +impl ops::Index> for Vec { + type Output = [T]; #[inline] - fn slice_to_or_fail<'a>(&'a self, end: &uint) -> &'a [T] { - self.as_slice().slice_to_or_fail(end) + fn index(&self, index: &ops::RangeFrom) -> &[T] { + self.as_slice().index(index) } +} +impl ops::Index for Vec { + type Output = [T]; #[inline] - fn slice_or_fail<'a>(&'a self, start: &uint, end: &uint) -> &'a [T] { - self.as_slice().slice_or_fail(start, end) + fn index(&self, _index: &ops::FullRange) -> &[T] { + self.as_slice() } } -impl ops::SliceMut for Vec { +impl ops::IndexMut> for Vec { + type Output = [T]; #[inline] - fn as_mut_slice_<'a>(&'a mut self) -> &'a mut [T] { - self.as_mut_slice() + fn index_mut(&mut self, index: &ops::Range) -> &mut [T] { + self.as_mut_slice().index_mut(index) } - +} +impl ops::IndexMut> for Vec { + type Output = [T]; #[inline] - fn slice_from_or_fail_mut<'a>(&'a mut self, start: &uint) -> &'a mut [T] { - self.as_mut_slice().slice_from_or_fail_mut(start) + fn index_mut(&mut self, index: &ops::RangeTo) -> &mut [T] { + self.as_mut_slice().index_mut(index) } - +} +impl ops::IndexMut> for Vec { + type Output = [T]; #[inline] - fn slice_to_or_fail_mut<'a>(&'a mut self, end: &uint) -> &'a mut [T] { - self.as_mut_slice().slice_to_or_fail_mut(end) + fn index_mut(&mut self, index: &ops::RangeFrom) -> &mut [T] { + self.as_mut_slice().index_mut(index) } +} +impl ops::IndexMut for Vec { + type Output = [T]; #[inline] - fn slice_or_fail_mut<'a>(&'a mut self, start: &uint, end: &uint) -> &'a mut [T] { - self.as_mut_slice().slice_or_fail_mut(start, end) + fn index_mut(&mut self, _index: &ops::FullRange) -> &mut [T] { + self.as_mut_slice() } } + #[stable] impl ops::Deref for Vec { type Target = [T]; @@ -1781,6 +1799,7 @@ mod tests { use prelude::*; use core::mem::size_of; use core::iter::repeat; + use core::ops::FullRange; use test::Bencher; use super::as_vec; @@ -1918,7 +1937,7 @@ mod tests { let (left, right) = values.split_at_mut(2); { let left: &[_] = left; - assert!(left[0..left.len()] == [1, 2][]); + assert!(&left[..left.len()] == &[1, 2][]); } for p in left.iter_mut() { *p += 1; @@ -1926,7 +1945,7 @@ mod tests { { let right: &[_] = right; - assert!(right[0..right.len()] == [3, 4, 5][]); + assert!(&right[..right.len()] == &[3, 4, 5][]); } for p in right.iter_mut() { *p += 2; @@ -2097,35 +2116,35 @@ mod tests { #[should_fail] fn test_slice_out_of_bounds_1() { let x: Vec = vec![1, 2, 3, 4, 5]; - x[-1..]; + &x[(-1)..]; } #[test] #[should_fail] fn test_slice_out_of_bounds_2() { let x: Vec = vec![1, 2, 3, 4, 5]; - x[..6]; + &x[..6]; } #[test] #[should_fail] fn test_slice_out_of_bounds_3() { let x: Vec = vec![1, 2, 3, 4, 5]; - x[-1..4]; + &x[(-1)..4]; } #[test] #[should_fail] fn test_slice_out_of_bounds_4() { let x: Vec = vec![1, 2, 3, 4, 5]; - x[1..6]; + &x[1..6]; } #[test] #[should_fail] fn test_slice_out_of_bounds_5() { let x: Vec = vec![1, 2, 3, 4, 5]; - x[3..2]; + &x[3..2]; } #[test] @@ -2371,7 +2390,7 @@ mod tests { b.bytes = src_len as u64; b.iter(|| { - let dst = src.clone().as_slice().to_vec(); + let dst = src.clone()[].to_vec(); assert_eq!(dst.len(), src_len); assert!(dst.iter().enumerate().all(|(i, x)| i == *x)); }); diff --git a/src/libcollections/vec_map.rs b/src/libcollections/vec_map.rs index cc757b656238e..68469059fead0 100644 --- a/src/libcollections/vec_map.rs +++ b/src/libcollections/vec_map.rs @@ -455,7 +455,8 @@ impl VecMap { if *key >= self.v.len() { return None; } - self.v[*key].take() + let result = &mut self.v[*key]; + result.take() } } diff --git a/src/libcore/array.rs b/src/libcore/array.rs index ba7714ad9bc8c..a4b32f2b6a2cd 100644 --- a/src/libcore/array.rs +++ b/src/libcore/array.rs @@ -18,7 +18,7 @@ use clone::Clone; use cmp::{PartialEq, Eq, PartialOrd, Ord, Ordering}; use fmt; use kinds::Copy; -use ops::Deref; +use ops::{Deref, FullRange, Index}; use option::Option; // macro for implementing n-ary tuple functions and operations @@ -35,7 +35,7 @@ macro_rules! array_impls { #[unstable = "waiting for Show to stabilize"] impl fmt::Show for [T; $N] { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Show::fmt(&self[], f) + fmt::Show::fmt(&self.index(&FullRange), f) } } @@ -43,11 +43,11 @@ macro_rules! array_impls { impl PartialEq<[B; $N]> for [A; $N] where A: PartialEq { #[inline] fn eq(&self, other: &[B; $N]) -> bool { - self[] == other[] + self.index(&FullRange) == other.index(&FullRange) } #[inline] fn ne(&self, other: &[B; $N]) -> bool { - self[] != other[] + self.index(&FullRange) != other.index(&FullRange) } } @@ -57,9 +57,13 @@ macro_rules! array_impls { Rhs: Deref, { #[inline(always)] - fn eq(&self, other: &Rhs) -> bool { PartialEq::eq(self[], &**other) } + fn eq(&self, other: &Rhs) -> bool { + PartialEq::eq(self.index(&FullRange), &**other) + } #[inline(always)] - fn ne(&self, other: &Rhs) -> bool { PartialEq::ne(self[], &**other) } + fn ne(&self, other: &Rhs) -> bool { + PartialEq::ne(self.index(&FullRange), &**other) + } } #[stable] @@ -68,9 +72,13 @@ macro_rules! array_impls { Lhs: Deref { #[inline(always)] - fn eq(&self, other: &[B; $N]) -> bool { PartialEq::eq(&**self, other[]) } + fn eq(&self, other: &[B; $N]) -> bool { + PartialEq::eq(&**self, other.index(&FullRange)) + } #[inline(always)] - fn ne(&self, other: &[B; $N]) -> bool { PartialEq::ne(&**self, other[]) } + fn ne(&self, other: &[B; $N]) -> bool { + PartialEq::ne(&**self, other.index(&FullRange)) + } } #[stable] @@ -80,23 +88,23 @@ macro_rules! array_impls { impl PartialOrd for [T; $N] { #[inline] fn partial_cmp(&self, other: &[T; $N]) -> Option { - PartialOrd::partial_cmp(&self[], &other[]) + PartialOrd::partial_cmp(&self.index(&FullRange), &other.index(&FullRange)) } #[inline] fn lt(&self, other: &[T; $N]) -> bool { - PartialOrd::lt(&self[], &other[]) + PartialOrd::lt(&self.index(&FullRange), &other.index(&FullRange)) } #[inline] fn le(&self, other: &[T; $N]) -> bool { - PartialOrd::le(&self[], &other[]) + PartialOrd::le(&self.index(&FullRange), &other.index(&FullRange)) } #[inline] fn ge(&self, other: &[T; $N]) -> bool { - PartialOrd::ge(&self[], &other[]) + PartialOrd::ge(&self.index(&FullRange), &other.index(&FullRange)) } #[inline] fn gt(&self, other: &[T; $N]) -> bool { - PartialOrd::gt(&self[], &other[]) + PartialOrd::gt(&self.index(&FullRange), &other.index(&FullRange)) } } @@ -104,7 +112,7 @@ macro_rules! array_impls { impl Ord for [T; $N] { #[inline] fn cmp(&self, other: &[T; $N]) -> Ordering { - Ord::cmp(&self[], &other[]) + Ord::cmp(&self.index(&FullRange), &other.index(&FullRange)) } } )+ diff --git a/src/libcore/fmt/float.rs b/src/libcore/fmt/float.rs index f63242b4f859a..d833b8fed7779 100644 --- a/src/libcore/fmt/float.rs +++ b/src/libcore/fmt/float.rs @@ -20,7 +20,7 @@ use fmt; use iter::{IteratorExt, range}; use num::{cast, Float, ToPrimitive}; use num::FpCategory as Fp; -use ops::FnOnce; +use ops::{FnOnce, Index}; use result::Result::Ok; use slice::{self, SliceExt}; use str::{self, StrExt}; @@ -332,5 +332,5 @@ pub fn float_to_str_bytes_common( } } - f(unsafe { str::from_utf8_unchecked(buf[..end]) }) + f(unsafe { str::from_utf8_unchecked(buf.index(&(0..end))) }) } diff --git a/src/libcore/fmt/mod.rs b/src/libcore/fmt/mod.rs index 951f5c29f00e8..76c2671cfd033 100644 --- a/src/libcore/fmt/mod.rs +++ b/src/libcore/fmt/mod.rs @@ -19,8 +19,8 @@ use kinds::{Copy, Sized}; use mem; use option::Option; use option::Option::{Some, None}; -use ops::{Deref, FnOnce}; use result::Result::Ok; +use ops::{Deref, FnOnce, Index}; use result; use slice::SliceExt; use slice; @@ -413,7 +413,7 @@ impl<'a> Formatter<'a> { for c in sign.into_iter() { let mut b = [0; 4]; let n = c.encode_utf8(&mut b).unwrap_or(0); - let b = unsafe { str::from_utf8_unchecked(b[0..n]) }; + let b = unsafe { str::from_utf8_unchecked(b.index(&(0..n))) }; try!(f.buf.write_str(b)); } if prefixed { f.buf.write_str(prefix) } @@ -521,7 +521,7 @@ impl<'a> Formatter<'a> { let mut fill = [0u8; 4]; let len = self.fill.encode_utf8(&mut fill).unwrap_or(0); - let fill = unsafe { str::from_utf8_unchecked(fill[..len]) }; + let fill = unsafe { str::from_utf8_unchecked(fill.index(&(..len))) }; for _ in range(0, pre_pad) { try!(self.buf.write_str(fill)); @@ -620,7 +620,7 @@ impl Show for char { let mut utf8 = [0u8; 4]; let amt = self.encode_utf8(&mut utf8).unwrap_or(0); - let s: &str = unsafe { mem::transmute(utf8[..amt]) }; + let s: &str = unsafe { mem::transmute(utf8.index(&(0..amt))) }; Show::fmt(s, f) } } diff --git a/src/libcore/fmt/num.rs b/src/libcore/fmt/num.rs index e0724fc2da5f5..89337e0584b50 100644 --- a/src/libcore/fmt/num.rs +++ b/src/libcore/fmt/num.rs @@ -16,6 +16,7 @@ use fmt; use iter::IteratorExt; +use ops::Index; use num::{Int, cast}; use slice::SliceExt; use str; @@ -61,7 +62,7 @@ trait GenericRadix { if x == zero { break }; // No more digits left to accumulate. } } - let buf = unsafe { str::from_utf8_unchecked(buf[curr..]) }; + let buf = unsafe { str::from_utf8_unchecked(buf.index(&(curr..))) }; f.pad_integral(is_positive, self.prefix(), buf) } } diff --git a/src/libcore/num/mod.rs b/src/libcore/num/mod.rs index 192d6063f6bbc..3bcdd54463fc6 100644 --- a/src/libcore/num/mod.rs +++ b/src/libcore/num/mod.rs @@ -24,7 +24,7 @@ use iter::IteratorExt; use kinds::Copy; use mem::size_of; use ops::{Add, Sub, Mul, Div, Rem, Neg}; -use ops::{Not, BitAnd, BitOr, BitXor, Shl, Shr}; +use ops::{Not, BitAnd, BitOr, BitXor, Shl, Shr, Index}; use option::Option; use option::Option::{Some, None}; use str::{FromStr, StrExt}; @@ -1577,7 +1577,7 @@ macro_rules! from_str_radix_float_impl { }; // Parse the exponent as decimal integer - let src = src[offset..]; + let src = src.index(&(offset..)); let (is_positive, exp) = match src.slice_shift_char() { Some(('-', src)) => (false, src.parse::()), Some(('+', src)) => (true, src.parse::()), diff --git a/src/libcore/ops.rs b/src/libcore/ops.rs index 97d94e73bb33a..2a7e6eb47955d 100644 --- a/src/libcore/ops.rs +++ b/src/libcore/ops.rs @@ -846,105 +846,6 @@ pub trait IndexMut { fn index_mut<'a>(&'a mut self, index: &Index) -> &'a mut Self::Output; } -/// The `Slice` trait is used to specify the functionality of slicing operations -/// like `arr[from..to]` when used in an immutable context. -/// -/// # Example -/// -/// A trivial implementation of `Slice`. When `Foo[..Foo]` happens, it ends up -/// calling `slice_to`, and therefore, `main` prints `Slicing!`. -/// -/// ```ignore -/// use std::ops::Slice; -/// -/// #[derive(Copy)] -/// struct Foo; -/// -/// impl Slice for Foo { -/// fn as_slice_<'a>(&'a self) -> &'a Foo { -/// println!("Slicing!"); -/// self -/// } -/// fn slice_from_or_fail<'a>(&'a self, _from: &Foo) -> &'a Foo { -/// println!("Slicing!"); -/// self -/// } -/// fn slice_to_or_fail<'a>(&'a self, _to: &Foo) -> &'a Foo { -/// println!("Slicing!"); -/// self -/// } -/// fn slice_or_fail<'a>(&'a self, _from: &Foo, _to: &Foo) -> &'a Foo { -/// println!("Slicing!"); -/// self -/// } -/// } -/// -/// fn main() { -/// Foo[..Foo]; -/// } -/// ``` -#[lang="slice"] -pub trait Slice { - /// The method for the slicing operation foo[] - fn as_slice_<'a>(&'a self) -> &'a Result; - /// The method for the slicing operation foo[from..] - fn slice_from_or_fail<'a>(&'a self, from: &Idx) -> &'a Result; - /// The method for the slicing operation foo[..to] - fn slice_to_or_fail<'a>(&'a self, to: &Idx) -> &'a Result; - /// The method for the slicing operation foo[from..to] - fn slice_or_fail<'a>(&'a self, from: &Idx, to: &Idx) -> &'a Result; -} - -/// The `SliceMut` trait is used to specify the functionality of slicing -/// operations like `arr[from..to]`, when used in a mutable context. -/// -/// # Example -/// -/// A trivial implementation of `SliceMut`. When `Foo[Foo..]` happens, it ends up -/// calling `slice_from_mut`, and therefore, `main` prints `Slicing!`. -/// -/// ```ignore -/// use std::ops::SliceMut; -/// -/// #[derive(Copy)] -/// struct Foo; -/// -/// impl SliceMut for Foo { -/// fn as_mut_slice_<'a>(&'a mut self) -> &'a mut Foo { -/// println!("Slicing!"); -/// self -/// } -/// fn slice_from_or_fail_mut<'a>(&'a mut self, _from: &Foo) -> &'a mut Foo { -/// println!("Slicing!"); -/// self -/// } -/// fn slice_to_or_fail_mut<'a>(&'a mut self, _to: &Foo) -> &'a mut Foo { -/// println!("Slicing!"); -/// self -/// } -/// fn slice_or_fail_mut<'a>(&'a mut self, _from: &Foo, _to: &Foo) -> &'a mut Foo { -/// println!("Slicing!"); -/// self -/// } -/// } -/// -/// pub fn main() { -/// Foo[mut Foo..]; -/// } -/// ``` -#[lang="slice_mut"] -pub trait SliceMut { - /// The method for the slicing operation foo[] - fn as_mut_slice_<'a>(&'a mut self) -> &'a mut Result; - /// The method for the slicing operation foo[from..] - fn slice_from_or_fail_mut<'a>(&'a mut self, from: &Idx) -> &'a mut Result; - /// The method for the slicing operation foo[..to] - fn slice_to_or_fail_mut<'a>(&'a mut self, to: &Idx) -> &'a mut Result; - /// The method for the slicing operation foo[from..to] - fn slice_or_fail_mut<'a>(&'a mut self, from: &Idx, to: &Idx) -> &'a mut Result; -} - - /// An unbounded range. #[derive(Copy)] #[lang="full_range"] @@ -962,8 +863,6 @@ pub struct Range { pub end: Idx, } -// FIXME(#19391) needs a snapshot -//impl> Iterator for Range { #[unstable = "API still in development"] impl Iterator for Range { type Item = Idx; diff --git a/src/libcore/prelude.rs b/src/libcore/prelude.rs index e88cb73c8a9b7..a560b68db01fa 100644 --- a/src/libcore/prelude.rs +++ b/src/libcore/prelude.rs @@ -30,7 +30,7 @@ // Reexported core operators pub use kinds::{Copy, Send, Sized, Sync}; -pub use ops::{Drop, Fn, FnMut, FnOnce}; +pub use ops::{Drop, Fn, FnMut, FnOnce, FullRange}; // Reexported functions pub use iter::range; diff --git a/src/libcore/slice.rs b/src/libcore/slice.rs index 093ed0b242f5f..8ab927f701a1e 100644 --- a/src/libcore/slice.rs +++ b/src/libcore/slice.rs @@ -43,7 +43,7 @@ use default::Default; use iter::*; use kinds::Copy; use num::Int; -use ops::{FnMut, self}; +use ops::{FnMut, self, Index}; use option::Option; use option::Option::{None, Some}; use result::Result; @@ -159,7 +159,7 @@ impl SliceExt for [T] { #[inline] fn split_at(&self, mid: uint) -> (&[T], &[T]) { - (self[..mid], self[mid..]) + (self.index(&(0..mid)), self.index(&(mid..))) } #[inline] @@ -236,11 +236,11 @@ impl SliceExt for [T] { } #[inline] - fn tail(&self) -> &[T] { self[1..] } + fn tail(&self) -> &[T] { self.index(&(1..)) } #[inline] fn init(&self) -> &[T] { - self[..self.len() - 1] + self.index(&(0..(self.len() - 1))) } #[inline] @@ -292,17 +292,17 @@ impl SliceExt for [T] { fn as_mut_slice(&mut self) -> &mut [T] { self } fn slice_mut(&mut self, start: uint, end: uint) -> &mut [T] { - ops::SliceMut::slice_or_fail_mut(self, &start, &end) + ops::IndexMut::index_mut(self, &ops::Range { start: start, end: end } ) } #[inline] fn slice_from_mut(&mut self, start: uint) -> &mut [T] { - ops::SliceMut::slice_from_or_fail_mut(self, &start) + ops::IndexMut::index_mut(self, &ops::RangeFrom { start: start } ) } #[inline] fn slice_to_mut(&mut self, end: uint) -> &mut [T] { - ops::SliceMut::slice_to_or_fail_mut(self, &end) + ops::IndexMut::index_mut(self, &ops::RangeTo { end: end } ) } #[inline] @@ -310,8 +310,8 @@ impl SliceExt for [T] { unsafe { let self2: &mut [T] = mem::transmute_copy(&self); - (ops::SliceMut::slice_to_or_fail_mut(self, &mid), - ops::SliceMut::slice_from_or_fail_mut(self2, &mid)) + (ops::IndexMut::index_mut(self, &ops::RangeTo { end: mid } ), + ops::IndexMut::index_mut(self2, &ops::RangeFrom { start: mid } )) } } @@ -443,13 +443,13 @@ impl SliceExt for [T] { #[inline] fn starts_with(&self, needle: &[T]) -> bool where T: PartialEq { let n = needle.len(); - self.len() >= n && needle == self[..n] + self.len() >= n && needle == self.index(&(0..n)) } #[inline] fn ends_with(&self, needle: &[T]) -> bool where T: PartialEq { let (m, n) = (self.len(), needle.len()); - m >= n && needle == self[m-n..] + m >= n && needle == self.index(&((m-n)..)) } #[unstable] @@ -551,62 +551,79 @@ impl ops::IndexMut for [T] { } } -impl ops::Slice for [T] { +impl ops::Index> for [T] { + type Output = [T]; #[inline] - fn as_slice_<'a>(&'a self) -> &'a [T] { - self - } - - #[inline] - fn slice_from_or_fail<'a>(&'a self, start: &uint) -> &'a [T] { - self.slice_or_fail(start, &self.len()) - } - - #[inline] - fn slice_to_or_fail<'a>(&'a self, end: &uint) -> &'a [T] { - self.slice_or_fail(&0, end) - } - #[inline] - fn slice_or_fail<'a>(&'a self, start: &uint, end: &uint) -> &'a [T] { - assert!(*start <= *end); - assert!(*end <= self.len()); + fn index(&self, index: &ops::Range) -> &[T] { + assert!(index.start <= index.end); + assert!(index.end <= self.len()); unsafe { transmute(RawSlice { - data: self.as_ptr().offset(*start as int), - len: (*end - *start) + data: self.as_ptr().offset(index.start as int), + len: index.end - index.start }) } } } - -impl ops::SliceMut for [T] { +impl ops::Index> for [T] { + type Output = [T]; #[inline] - fn as_mut_slice_<'a>(&'a mut self) -> &'a mut [T] { - self + fn index(&self, index: &ops::RangeTo) -> &[T] { + self.index(&ops::Range{ start: 0, end: index.end }) } - +} +impl ops::Index> for [T] { + type Output = [T]; #[inline] - fn slice_from_or_fail_mut<'a>(&'a mut self, start: &uint) -> &'a mut [T] { - let len = &self.len(); - self.slice_or_fail_mut(start, len) + fn index(&self, index: &ops::RangeFrom) -> &[T] { + self.index(&ops::Range{ start: index.start, end: self.len() }) } - +} +impl ops::Index for [T] { + type Output = [T]; #[inline] - fn slice_to_or_fail_mut<'a>(&'a mut self, end: &uint) -> &'a mut [T] { - self.slice_or_fail_mut(&0, end) + fn index(&self, _index: &ops::FullRange) -> &[T] { + self } +} + +impl ops::IndexMut> for [T] { + type Output = [T]; #[inline] - fn slice_or_fail_mut<'a>(&'a mut self, start: &uint, end: &uint) -> &'a mut [T] { - assert!(*start <= *end); - assert!(*end <= self.len()); + fn index_mut(&mut self, index: &ops::Range) -> &mut [T] { + assert!(index.start <= index.end); + assert!(index.end <= self.len()); unsafe { transmute(RawSlice { - data: self.as_ptr().offset(*start as int), - len: (*end - *start) + data: self.as_ptr().offset(index.start as int), + len: index.end - index.start }) } } } +impl ops::IndexMut> for [T] { + type Output = [T]; + #[inline] + fn index_mut(&mut self, index: &ops::RangeTo) -> &mut [T] { + self.index_mut(&ops::Range{ start: 0, end: index.end }) + } +} +impl ops::IndexMut> for [T] { + type Output = [T]; + #[inline] + fn index_mut(&mut self, index: &ops::RangeFrom) -> &mut [T] { + let len = self.len(); + self.index_mut(&ops::Range{ start: index.start, end: len }) + } +} +impl ops::IndexMut for [T] { + type Output = [T]; + #[inline] + fn index_mut(&mut self, _index: &ops::FullRange) -> &mut [T] { + self + } +} + //////////////////////////////////////////////////////////////////////////////// // Common traits @@ -738,21 +755,38 @@ pub struct Iter<'a, T: 'a> { } #[experimental] -impl<'a, T> ops::Slice for Iter<'a, T> { - fn as_slice_(&self) -> &[T] { - self.as_slice() +impl<'a, T> ops::Index> for Iter<'a, T> { + type Output = [T]; + #[inline] + fn index(&self, index: &ops::Range) -> &[T] { + self.as_slice().index(index) } - fn slice_from_or_fail<'b>(&'b self, from: &uint) -> &'b [T] { - use ops::Slice; - self.as_slice().slice_from_or_fail(from) +} + +#[experimental] +impl<'a, T> ops::Index> for Iter<'a, T> { + type Output = [T]; + #[inline] + fn index(&self, index: &ops::RangeTo) -> &[T] { + self.as_slice().index(index) } - fn slice_to_or_fail<'b>(&'b self, to: &uint) -> &'b [T] { - use ops::Slice; - self.as_slice().slice_to_or_fail(to) +} + +#[experimental] +impl<'a, T> ops::Index> for Iter<'a, T> { + type Output = [T]; + #[inline] + fn index(&self, index: &ops::RangeFrom) -> &[T] { + self.as_slice().index(index) } - fn slice_or_fail<'b>(&'b self, from: &uint, to: &uint) -> &'b [T] { - use ops::Slice; - self.as_slice().slice_or_fail(from, to) +} + +#[experimental] +impl<'a, T> ops::Index for Iter<'a, T> { + type Output = [T]; + #[inline] + fn index(&self, _index: &ops::FullRange) -> &[T] { + self.as_slice() } } @@ -812,44 +846,74 @@ pub struct IterMut<'a, T: 'a> { marker: marker::ContravariantLifetime<'a>, } + #[experimental] -impl<'a, T> ops::Slice for IterMut<'a, T> { - fn as_slice_<'b>(&'b self) -> &'b [T] { - make_slice!(T -> &'b [T]: self.ptr, self.end) +impl<'a, T> ops::Index> for IterMut<'a, T> { + type Output = [T]; + #[inline] + fn index(&self, index: &ops::Range) -> &[T] { + self.index(&ops::FullRange).index(index) } - fn slice_from_or_fail<'b>(&'b self, from: &uint) -> &'b [T] { - use ops::Slice; - self.as_slice_().slice_from_or_fail(from) +} +#[experimental] +impl<'a, T> ops::Index> for IterMut<'a, T> { + type Output = [T]; + #[inline] + fn index(&self, index: &ops::RangeTo) -> &[T] { + self.index(&ops::FullRange).index(index) } - fn slice_to_or_fail<'b>(&'b self, to: &uint) -> &'b [T] { - use ops::Slice; - self.as_slice_().slice_to_or_fail(to) +} +#[experimental] +impl<'a, T> ops::Index> for IterMut<'a, T> { + type Output = [T]; + #[inline] + fn index(&self, index: &ops::RangeFrom) -> &[T] { + self.index(&ops::FullRange).index(index) } - fn slice_or_fail<'b>(&'b self, from: &uint, to: &uint) -> &'b [T] { - use ops::Slice; - self.as_slice_().slice_or_fail(from, to) +} +#[experimental] +impl<'a, T> ops::Index for IterMut<'a, T> { + type Output = [T]; + #[inline] + fn index(&self, _index: &ops::FullRange) -> &[T] { + make_slice!(T -> &[T]: self.ptr, self.end) } } #[experimental] -impl<'a, T> ops::SliceMut for IterMut<'a, T> { - fn as_mut_slice_<'b>(&'b mut self) -> &'b mut [T] { - make_slice!(T -> &'b mut [T]: self.ptr, self.end) +impl<'a, T> ops::IndexMut> for IterMut<'a, T> { + type Output = [T]; + #[inline] + fn index_mut(&mut self, index: &ops::Range) -> &mut [T] { + self.index_mut(&ops::FullRange).index_mut(index) } - fn slice_from_or_fail_mut<'b>(&'b mut self, from: &uint) -> &'b mut [T] { - use ops::SliceMut; - self.as_mut_slice_().slice_from_or_fail_mut(from) +} +#[experimental] +impl<'a, T> ops::IndexMut> for IterMut<'a, T> { + type Output = [T]; + #[inline] + fn index_mut(&mut self, index: &ops::RangeTo) -> &mut [T] { + self.index_mut(&ops::FullRange).index_mut(index) } - fn slice_to_or_fail_mut<'b>(&'b mut self, to: &uint) -> &'b mut [T] { - use ops::SliceMut; - self.as_mut_slice_().slice_to_or_fail_mut(to) +} +#[experimental] +impl<'a, T> ops::IndexMut> for IterMut<'a, T> { + type Output = [T]; + #[inline] + fn index_mut(&mut self, index: &ops::RangeFrom) -> &mut [T] { + self.index_mut(&ops::FullRange).index_mut(index) } - fn slice_or_fail_mut<'b>(&'b mut self, from: &uint, to: &uint) -> &'b mut [T] { - use ops::SliceMut; - self.as_mut_slice_().slice_or_fail_mut(from, to) +} +#[experimental] +impl<'a, T> ops::IndexMut for IterMut<'a, T> { + type Output = [T]; + #[inline] + fn index_mut(&mut self, _index: &ops::FullRange) -> &mut [T] { + make_slice!(T -> &mut [T]: self.ptr, self.end) } } + impl<'a, T> IterMut<'a, T> { /// View the underlying data as a subslice of the original data. /// @@ -908,8 +972,8 @@ impl<'a, T, P> Iterator for Split<'a, T, P> where P: FnMut(&T) -> bool { match self.v.iter().position(|x| (self.pred)(x)) { None => self.finish(), Some(idx) => { - let ret = Some(self.v[..idx]); - self.v = self.v[idx + 1..]; + let ret = Some(self.v.index(&(0..idx))); + self.v = self.v.index(&((idx + 1)..)); ret } } @@ -934,8 +998,8 @@ impl<'a, T, P> DoubleEndedIterator for Split<'a, T, P> where P: FnMut(&T) -> boo match self.v.iter().rposition(|x| (self.pred)(x)) { None => self.finish(), Some(idx) => { - let ret = Some(self.v[idx + 1..]); - self.v = self.v[..idx]; + let ret = Some(self.v.index(&((idx + 1)..))); + self.v = self.v.index(&(0..idx)); ret } } @@ -1131,8 +1195,8 @@ impl<'a, T> Iterator for Windows<'a, T> { if self.size > self.v.len() { None } else { - let ret = Some(self.v[..self.size]); - self.v = self.v[1..]; + let ret = Some(self.v.index(&(0..self.size))); + self.v = self.v.index(&(1..)); ret } } @@ -1219,7 +1283,7 @@ impl<'a, T> RandomAccessIterator for Chunks<'a, T> { let mut hi = lo + self.size; if hi < lo || hi > self.v.len() { hi = self.v.len(); } - Some(self.v[lo..hi]) + Some(self.v.index(&(lo..hi))) } else { None } diff --git a/src/libcore/str/mod.rs b/src/libcore/str/mod.rs index a39787b8207b5..bc995a2af7299 100644 --- a/src/libcore/str/mod.rs +++ b/src/libcore/str/mod.rs @@ -26,7 +26,7 @@ use iter::{Map, Iterator, IteratorExt, DoubleEndedIterator}; use kinds::Sized; use mem; use num::Int; -use ops::{Fn, FnMut}; +use ops::{Fn, FnMut, Index}; use option::Option::{self, None, Some}; use ptr::PtrExt; use raw::{Repr, Slice}; @@ -581,7 +581,7 @@ impl NaiveSearcher { fn next(&mut self, haystack: &[u8], needle: &[u8]) -> Option<(uint, uint)> { while self.position + needle.len() <= haystack.len() { - if haystack[self.position .. self.position + needle.len()] == needle { + if haystack.index(&(self.position .. self.position + needle.len())) == needle { let match_pos = self.position; self.position += needle.len(); // add 1 for all matches return Some((match_pos, match_pos + needle.len())); @@ -702,10 +702,10 @@ impl TwoWaySearcher { // // What's going on is we have some critical factorization (u, v) of the // needle, and we want to determine whether u is a suffix of - // v[..period]. If it is, we use "Algorithm CP1". Otherwise we use + // v.index(&(0..period)). If it is, we use "Algorithm CP1". Otherwise we use // "Algorithm CP2", which is optimized for when the period of the needle // is large. - if needle[..crit_pos] == needle[period.. period + crit_pos] { + if needle.index(&(0..crit_pos)) == needle.index(&(period.. period + crit_pos)) { TwoWaySearcher { crit_pos: crit_pos, period: period, @@ -1119,25 +1119,32 @@ mod traits { } } - impl ops::Slice for str { + impl ops::Index> for str { + type Output = str; #[inline] - fn as_slice_<'a>(&'a self) -> &'a str { - self + fn index(&self, index: &ops::Range) -> &str { + self.slice(index.start, index.end) } - + } + impl ops::Index> for str { + type Output = str; #[inline] - fn slice_from_or_fail<'a>(&'a self, from: &uint) -> &'a str { - self.slice_from(*from) + fn index(&self, index: &ops::RangeTo) -> &str { + self.slice_to(index.end) } - + } + impl ops::Index> for str { + type Output = str; #[inline] - fn slice_to_or_fail<'a>(&'a self, to: &uint) -> &'a str { - self.slice_to(*to) + fn index(&self, index: &ops::RangeFrom) -> &str { + self.slice_from(index.start) } - + } + impl ops::Index for str { + type Output = str; #[inline] - fn slice_or_fail<'a>(&'a self, from: &uint, to: &uint) -> &'a str { - self.slice(*from, *to) + fn index(&self, _index: &ops::FullRange) -> &str { + self } } } @@ -1406,13 +1413,13 @@ impl StrExt for str { #[inline] fn starts_with(&self, needle: &str) -> bool { let n = needle.len(); - self.len() >= n && needle.as_bytes() == self.as_bytes()[..n] + self.len() >= n && needle.as_bytes() == self.as_bytes().index(&(0..n)) } #[inline] fn ends_with(&self, needle: &str) -> bool { let (m, n) = (self.len(), needle.len()); - m >= n && needle.as_bytes() == self.as_bytes()[m-n..] + m >= n && needle.as_bytes() == self.as_bytes().index(&((m-n)..)) } #[inline] diff --git a/src/libcoretest/char.rs b/src/libcoretest/char.rs index b581cdbd71093..f901e8001767d 100644 --- a/src/libcoretest/char.rs +++ b/src/libcoretest/char.rs @@ -167,7 +167,7 @@ fn test_encode_utf8() { fn check(input: char, expect: &[u8]) { let mut buf = [0u8; 4]; let n = input.encode_utf8(buf.as_mut_slice()).unwrap_or(0); - assert_eq!(buf[..n], expect); + assert_eq!(buf.index(&(0..n)), expect); } check('x', &[0x78]); @@ -181,7 +181,7 @@ fn test_encode_utf16() { fn check(input: char, expect: &[u16]) { let mut buf = [0u16; 2]; let n = input.encode_utf16(buf.as_mut_slice()).unwrap_or(0); - assert_eq!(buf[..n], expect); + assert_eq!(buf.index(&(0..n)), expect); } check('x', &[0x0078]); diff --git a/src/libcoretest/iter.rs b/src/libcoretest/iter.rs index 73db72d0313e6..61266a9264944 100644 --- a/src/libcoretest/iter.rs +++ b/src/libcoretest/iter.rs @@ -14,7 +14,6 @@ use core::iter::MinMaxResult::*; use core::num::SignedInt; use core::uint; use core::cmp; -use core::ops::Slice; use test::Bencher; @@ -230,7 +229,7 @@ fn test_inspect() { .collect::>(); assert_eq!(n, xs.len()); - assert_eq!(xs[], ys[]); + assert_eq!(&xs[], &ys[]); } #[test] @@ -281,21 +280,21 @@ fn test_iterator_nth() { fn test_iterator_last() { let v: &[_] = &[0i, 1, 2, 3, 4]; assert_eq!(v.iter().last().unwrap(), &4); - assert_eq!(v[0..1].iter().last().unwrap(), &0); + assert_eq!(v[..1].iter().last().unwrap(), &0); } #[test] fn test_iterator_len() { let v: &[_] = &[0i, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; - assert_eq!(v[0..4].iter().count(), 4); - assert_eq!(v[0..10].iter().count(), 10); + assert_eq!(v[..4].iter().count(), 4); + assert_eq!(v[..10].iter().count(), 10); assert_eq!(v[0..0].iter().count(), 0); } #[test] fn test_iterator_sum() { let v: &[_] = &[0i, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; - assert_eq!(v[0..4].iter().map(|&x| x).sum(), 6); + assert_eq!(v[..4].iter().map(|&x| x).sum(), 6); assert_eq!(v.iter().map(|&x| x).sum(), 55); assert_eq!(v[0..0].iter().map(|&x| x).sum(), 0); } @@ -374,7 +373,7 @@ fn test_all() { assert!(v.iter().all(|&x| x < 10)); assert!(!v.iter().all(|&x| x % 2 == 0)); assert!(!v.iter().all(|&x| x > 100)); - assert!(v.slice_or_fail(&0, &0).iter().all(|_| panic!())); + assert!(v[0..0].iter().all(|_| panic!())); } #[test] @@ -383,7 +382,7 @@ fn test_any() { assert!(v.iter().any(|&x| x < 10)); assert!(v.iter().any(|&x| x % 2 == 0)); assert!(!v.iter().any(|&x| x > 100)); - assert!(!v.slice_or_fail(&0, &0).iter().any(|_| panic!())); + assert!(!v[0..0].iter().any(|_| panic!())); } #[test] @@ -586,7 +585,7 @@ fn check_randacc_iter(a: T, len: uint) where fn test_double_ended_flat_map() { let u = [0u,1]; let v = [5u,6,7,8]; - let mut it = u.iter().flat_map(|x| v[*x..v.len()].iter()); + let mut it = u.iter().flat_map(|x| v[(*x)..v.len()].iter()); assert_eq!(it.next_back().unwrap(), &8); assert_eq!(it.next().unwrap(), &5); assert_eq!(it.next_back().unwrap(), &7); diff --git a/src/libcoretest/slice.rs b/src/libcoretest/slice.rs index 9ef7d6030593a..b714b6a4e417d 100644 --- a/src/libcoretest/slice.rs +++ b/src/libcoretest/slice.rs @@ -43,35 +43,35 @@ fn iterator_to_slice() { { let mut iter = data.iter(); - assert_eq!(iter[], other_data[]); + assert_eq!(&iter[], &other_data[]); iter.next(); - assert_eq!(iter[], other_data[1..]); + assert_eq!(&iter[], &other_data[1..]); iter.next_back(); - assert_eq!(iter[], other_data[1..2]); + assert_eq!(&iter[], &other_data[1..2]); let s = iter.as_slice(); iter.next(); - assert_eq!(s, other_data[1..2]); + assert_eq!(s, &other_data[1..2]); } { let mut iter = data.iter_mut(); - assert_eq!(iter[], other_data[]); + assert_eq!(iter.index(&FullRange), other_data.index(&FullRange)); // mutability: - assert!(iter[mut] == other_data); + assert!(&mut iter[] == other_data); iter.next(); - assert_eq!(iter[], other_data[1..]); - assert!(iter[mut] == other_data[mut 1..]); + assert_eq!(iter.index(&FullRange), other_data.index(&(1..))); + assert!(&mut iter[] == &mut other_data[1..]); iter.next_back(); - assert_eq!(iter[], other_data[1..2]); - assert!(iter[mut] == other_data[mut 1..2]); + assert_eq!(iter.index(&FullRange), other_data.index(&(1..2))); + assert!(&mut iter[] == &mut other_data[1..2]); let s = iter.into_slice(); - assert!(s == other_data[mut 1..2]); + assert!(s == &mut other_data[1..2]); } }} } diff --git a/src/libfmt_macros/lib.rs b/src/libfmt_macros/lib.rs index 917c6e99992f2..3829d98c7a122 100644 --- a/src/libfmt_macros/lib.rs +++ b/src/libfmt_macros/lib.rs @@ -212,11 +212,11 @@ impl<'a> Parser<'a> { self.cur.next(); } Some((_, other)) => { - self.err(format!("expected `{}`, found `{}`", c, other)[]); + self.err(format!("expected `{}`, found `{}`", c, other).index(&FullRange)); } None => { self.err(format!("expected `{}` but string was terminated", - c)[]); + c).index(&FullRange)); } } } @@ -239,12 +239,12 @@ impl<'a> Parser<'a> { // we may not consume the character, so clone the iterator match self.cur.clone().next() { Some((pos, '}')) | Some((pos, '{')) => { - return self.input[start..pos]; + return self.input.index(&(start..pos)); } Some(..) => { self.cur.next(); } None => { self.cur.next(); - return self.input[start..self.input.len()]; + return self.input.index(&(start..self.input.len())); } } } @@ -284,7 +284,7 @@ impl<'a> Parser<'a> { flags: 0, precision: CountImplied, width: CountImplied, - ty: self.input[0..0], + ty: self.input.index(&(0..0)), }; if !self.consume(':') { return spec } @@ -393,7 +393,7 @@ impl<'a> Parser<'a> { self.cur.next(); pos } - Some(..) | None => { return self.input[0..0]; } + Some(..) | None => { return self.input.index(&(0..0)); } }; let mut end; loop { @@ -405,7 +405,7 @@ impl<'a> Parser<'a> { None => { end = self.input.len(); break } } } - self.input[start..end] + self.input.index(&(start..end)) } /// Optionally parses an integer at the current position. This doesn't deal diff --git a/src/libgetopts/lib.rs b/src/libgetopts/lib.rs index 18077795e245f..dd9e5e5a038a7 100644 --- a/src/libgetopts/lib.rs +++ b/src/libgetopts/lib.rs @@ -281,7 +281,7 @@ impl OptGroup { impl Matches { fn opt_vals(&self, nm: &str) -> Vec { - match find_opt(self.opts[], Name::from_str(nm)) { + match find_opt(self.opts.index(&FullRange), Name::from_str(nm)) { Some(id) => self.vals[id].clone(), None => panic!("No option '{}' defined", nm) } @@ -309,7 +309,7 @@ impl Matches { /// Returns true if any of several options were matched. pub fn opts_present(&self, names: &[String]) -> bool { for nm in names.iter() { - match find_opt(self.opts.as_slice(), Name::from_str(nm[])) { + match find_opt(self.opts.as_slice(), Name::from_str(nm.index(&FullRange))) { Some(id) if !self.vals[id].is_empty() => return true, _ => (), }; @@ -320,7 +320,7 @@ impl Matches { /// Returns the string argument supplied to one of several matching options or `None`. pub fn opts_str(&self, names: &[String]) -> Option { for nm in names.iter() { - match self.opt_val(nm[]) { + match self.opt_val(nm.index(&FullRange)) { Some(Val(ref s)) => return Some(s.clone()), _ => () } @@ -585,7 +585,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { while i < l { let cur = args[i].clone(); let curlen = cur.len(); - if !is_arg(cur[]) { + if !is_arg(cur.index(&FullRange)) { free.push(cur); } else if cur == "--" { let mut j = i + 1; @@ -595,7 +595,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { let mut names; let mut i_arg = None; if cur.as_bytes()[1] == b'-' { - let tail = cur[2..curlen]; + let tail = cur.index(&(2..curlen)); let tail_eq: Vec<&str> = tail.split('=').collect(); if tail_eq.len() <= 1 { names = vec!(Long(tail.to_string())); @@ -631,7 +631,7 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { }; if arg_follows && range.next < curlen { - i_arg = Some(cur[range.next..curlen].to_string()); + i_arg = Some(cur.index(&(range.next..curlen)).to_string()); break; } @@ -650,29 +650,34 @@ pub fn getopts(args: &[String], optgrps: &[OptGroup]) -> Result { if name_pos == names.len() && !i_arg.is_none() { return Err(UnexpectedArgument(nm.to_string())); } - vals[optid].push(Given); + let v = &mut vals[optid]; + v.push(Given); } Maybe => { if !i_arg.is_none() { - vals[optid] - .push(Val((i_arg.clone()) + let v = &mut vals[optid]; + v.push(Val((i_arg.clone()) .unwrap())); } else if name_pos < names.len() || i + 1 == l || - is_arg(args[i + 1][]) { - vals[optid].push(Given); + is_arg(args[i + 1].index(&FullRange)) { + let v = &mut vals[optid]; + v.push(Given); } else { i += 1; - vals[optid].push(Val(args[i].clone())); + let v = &mut vals[optid]; + v.push(Val(args[i].clone())); } } Yes => { if !i_arg.is_none() { - vals[optid].push(Val(i_arg.clone().unwrap())); + let v = &mut vals[optid]; + v.push(Val(i_arg.clone().unwrap())); } else if i + 1 == l { return Err(ArgumentMissing(nm.to_string())); } else { i += 1; - vals[optid].push(Val(args[i].clone())); + let v = &mut vals[optid]; + v.push(Val(args[i].clone())); } } } @@ -717,7 +722,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { 0 => {} 1 => { row.push('-'); - row.push_str(short_name[]); + row.push_str(short_name.index(&FullRange)); row.push(' '); } _ => panic!("the short name should only be 1 ascii char long"), @@ -728,7 +733,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { 0 => {} _ => { row.push_str("--"); - row.push_str(long_name[]); + row.push_str(long_name.index(&FullRange)); row.push(' '); } } @@ -736,10 +741,10 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { // arg match hasarg { No => {} - Yes => row.push_str(hint[]), + Yes => row.push_str(hint.index(&FullRange)), Maybe => { row.push('['); - row.push_str(hint[]); + row.push_str(hint.index(&FullRange)); row.push(']'); } } @@ -752,7 +757,7 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { row.push(' '); } } else { - row.push_str(desc_sep[]); + row.push_str(desc_sep.index(&FullRange)); } // Normalize desc to contain words separated by one space character @@ -764,14 +769,14 @@ pub fn usage(brief: &str, opts: &[OptGroup]) -> String { // FIXME: #5516 should be graphemes not codepoints let mut desc_rows = Vec::new(); - each_split_within(desc_normalized_whitespace[], 54, |substr| { + each_split_within(desc_normalized_whitespace.index(&FullRange), 54, |substr| { desc_rows.push(substr.to_string()); true }); // FIXME: #5516 should be graphemes not codepoints // wrapped description - row.push_str(desc_rows.connect(desc_sep[])[]); + row.push_str(desc_rows.connect(desc_sep.index(&FullRange)).index(&FullRange)); row }); @@ -790,10 +795,10 @@ fn format_option(opt: &OptGroup) -> String { // Use short_name is possible, but fallback to long_name. if opt.short_name.len() > 0 { line.push('-'); - line.push_str(opt.short_name[]); + line.push_str(opt.short_name.index(&FullRange)); } else { line.push_str("--"); - line.push_str(opt.long_name[]); + line.push_str(opt.long_name.index(&FullRange)); } if opt.hasarg != No { @@ -801,7 +806,7 @@ fn format_option(opt: &OptGroup) -> String { if opt.hasarg == Maybe { line.push('['); } - line.push_str(opt.hint[]); + line.push_str(opt.hint.index(&FullRange)); if opt.hasarg == Maybe { line.push(']'); } @@ -823,7 +828,7 @@ pub fn short_usage(program_name: &str, opts: &[OptGroup]) -> String { line.push_str(opts.iter() .map(format_option) .collect::>() - .connect(" ")[]); + .connect(" ").index(&FullRange)); line } @@ -886,9 +891,9 @@ fn each_split_within(ss: &str, lim: uint, mut it: F) -> bool where (B, Cr, UnderLim) => { B } (B, Cr, OverLim) if (i - last_start + 1) > lim => panic!("word starting with {} longer than limit!", - ss[last_start..i + 1]), + ss.index(&(last_start..(i + 1)))), (B, Cr, OverLim) => { - *cont = it(ss[slice_start..last_end]); + *cont = it(ss.index(&(slice_start..last_end))); slice_start = last_start; B } @@ -898,7 +903,7 @@ fn each_split_within(ss: &str, lim: uint, mut it: F) -> bool where } (B, Ws, OverLim) => { last_end = i; - *cont = it(ss[slice_start..last_end]); + *cont = it(ss.index(&(slice_start..last_end))); A } @@ -907,14 +912,14 @@ fn each_split_within(ss: &str, lim: uint, mut it: F) -> bool where B } (C, Cr, OverLim) => { - *cont = it(ss[slice_start..last_end]); + *cont = it(ss.index(&(slice_start..last_end))); slice_start = i; last_start = i; last_end = i; B } (C, Ws, OverLim) => { - *cont = it(ss[slice_start..last_end]); + *cont = it(ss.index(&(slice_start..last_end))); A } (C, Ws, UnderLim) => { diff --git a/src/libgraphviz/lib.rs b/src/libgraphviz/lib.rs index 64cc490f4b163..7e8382b26c4a0 100644 --- a/src/libgraphviz/lib.rs +++ b/src/libgraphviz/lib.rs @@ -453,7 +453,7 @@ impl<'a> LabelText<'a> { pub fn escape(&self) -> String { match self { &LabelStr(ref s) => s.escape_default(), - &EscStr(ref s) => LabelText::escape_str(s[]), + &EscStr(ref s) => LabelText::escape_str(s.index(&FullRange)), } } @@ -482,7 +482,7 @@ impl<'a> LabelText<'a> { let mut prefix = self.pre_escaped_content().into_owned(); let suffix = suffix.pre_escaped_content(); prefix.push_str(r"\n\n"); - prefix.push_str(suffix[]); + prefix.push_str(suffix.index(&FullRange)); EscStr(prefix.into_cow()) } } @@ -676,7 +676,7 @@ mod tests { impl<'a> Labeller<'a, Node, &'a Edge> for LabelledGraph { fn graph_id(&'a self) -> Id<'a> { - Id::new(self.name[]).unwrap() + Id::new(self.name.index(&FullRange)).unwrap() } fn node_id(&'a self, n: &Node) -> Id<'a> { id_name(n) diff --git a/src/liblog/lib.rs b/src/liblog/lib.rs index df85e89efd17c..dd4291d6b51c8 100644 --- a/src/liblog/lib.rs +++ b/src/liblog/lib.rs @@ -282,7 +282,7 @@ pub fn log(level: u32, loc: &'static LogLocation, args: fmt::Arguments) { // Test the literal string from args against the current filter, if there // is one. match unsafe { FILTER.as_ref() } { - Some(filter) if !filter.is_match(args.to_string()[]) => return, + Some(filter) if !filter.is_match(args.to_string().index(&FullRange)) => return, _ => {} } @@ -377,7 +377,7 @@ fn enabled(level: u32, // Search for the longest match, the vector is assumed to be pre-sorted. for directive in iter.rev() { match directive.name { - Some(ref name) if !module.starts_with(name[]) => {}, + Some(ref name) if !module.starts_with(name.index(&FullRange)) => {}, Some(..) | None => { return level <= directive.level } @@ -392,7 +392,7 @@ fn enabled(level: u32, /// `Once` primitive (and this function is called from that primitive). fn init() { let (mut directives, filter) = match os::getenv("RUST_LOG") { - Some(spec) => directive::parse_logging_spec(spec[]), + Some(spec) => directive::parse_logging_spec(spec.index(&FullRange)), None => (Vec::new(), None), }; diff --git a/src/librand/distributions/range.rs b/src/librand/distributions/range.rs index 1038009522d67..1002d9693ba69 100644 --- a/src/librand/distributions/range.rs +++ b/src/librand/distributions/range.rs @@ -12,7 +12,7 @@ // this is surprisingly complicated to be both generic & correct -use core::prelude::*; +use core::prelude::{PartialOrd}; use core::num::Int; use Rng; @@ -166,7 +166,7 @@ mod tests { use std::num::Int; use std::prelude::v1::*; use distributions::{Sample, IndependentSample}; - use super::Range; + use super::Range as Range; #[should_fail] #[test] diff --git a/src/librand/lib.rs b/src/librand/lib.rs index c4dd08f9917e2..aeecd69e205ff 100644 --- a/src/librand/lib.rs +++ b/src/librand/lib.rs @@ -290,8 +290,8 @@ pub trait Rng : Sized { /// let choices = [1i, 2, 4, 8, 16, 32]; /// let mut rng = thread_rng(); /// println!("{}", rng.choose(&choices)); - /// # // replace with slicing syntax when it's stable! - /// assert_eq!(rng.choose(choices.slice_to(0)), None); + /// # // uncomment when slicing syntax is stable + /// //assert_eq!(rng.choose(choices.index(&(0..0))), None); /// ``` fn choose<'a, T>(&mut self, values: &'a [T]) -> Option<&'a T> { if values.is_empty() { diff --git a/src/librbml/io.rs b/src/librbml/io.rs index de06471e65ed9..5ebec32d73384 100644 --- a/src/librbml/io.rs +++ b/src/librbml/io.rs @@ -95,7 +95,7 @@ impl Writer for SeekableMemWriter { // there (left), and what will be appended on the end (right) let cap = self.buf.len() - self.pos; let (left, right) = if cap <= buf.len() { - (buf[..cap], buf[cap..]) + (buf.index(&(0..cap)), buf.index(&(cap..))) } else { let result: (_, &[_]) = (buf, &[]); result diff --git a/src/librbml/lib.rs b/src/librbml/lib.rs index e57542a6d14de..ec070269fe1ae 100644 --- a/src/librbml/lib.rs +++ b/src/librbml/lib.rs @@ -65,7 +65,7 @@ impl<'doc> Doc<'doc> { } pub fn as_str_slice<'a>(&'a self) -> &'a str { - str::from_utf8(self.data[self.start..self.end]).unwrap() + str::from_utf8(self.data.index(&(self.start..self.end))).unwrap() } pub fn as_str(&self) -> String { @@ -300,7 +300,7 @@ pub mod reader { pub fn with_doc_data(d: Doc, f: F) -> T where F: FnOnce(&[u8]) -> T, { - f(d.data[d.start..d.end]) + f(d.data.index(&(d.start..d.end))) } diff --git a/src/libregex/compile.rs b/src/libregex/compile.rs index d29a7a425c116..5803da1d3350d 100644 --- a/src/libregex/compile.rs +++ b/src/libregex/compile.rs @@ -105,7 +105,7 @@ impl Program { // This is a bit hacky since we have to skip over the initial // 'Save' instruction. let mut pre = String::with_capacity(5); - for inst in c.insts[1..].iter() { + for inst in c.insts.index(&(1..)).iter() { match *inst { OneChar(c, FLAG_EMPTY) => pre.push(c), _ => break diff --git a/src/libregex/parse.rs b/src/libregex/parse.rs index 07da86afcc971..2d46fa1143e26 100644 --- a/src/libregex/parse.rs +++ b/src/libregex/parse.rs @@ -18,6 +18,7 @@ use std::cmp; use std::fmt; use std::iter; use std::num; +use std::ops::Index; /// Static data containing Unicode ranges for general categories and scripts. use unicode::regex::{UNICODE_CLASSES, PERLD, PERLS, PERLW}; @@ -285,7 +286,7 @@ impl<'a> Parser<'a> { true => Ok(()), false => { self.err(format!("Expected {} but got EOF.", - expected)[]) + expected).index(&FullRange)) } } } @@ -294,10 +295,10 @@ impl<'a> Parser<'a> { match self.next_char() { true if self.cur() == expected => Ok(()), true => self.err(format!("Expected '{}' but got '{}'.", - expected, self.cur())[]), + expected, self.cur()).index(&FullRange)), false => { self.err(format!("Expected '{}' but got EOF.", - expected)[]) + expected).index(&FullRange)) } } } @@ -443,14 +444,14 @@ impl<'a> Parser<'a> { Literal(c3, _) => c2 = c3, // allow literal escapes below ast => return self.err(format!("Expected a literal, but got {}.", - ast)[]), + ast).index(&FullRange)), } } if c2 < c { return self.err(format!("Invalid character class \ range '{}-{}'", c, - c2)[]) + c2).index(&FullRange)) } ranges.push((c, self.cur())) } else { @@ -488,7 +489,7 @@ impl<'a> Parser<'a> { FLAG_EMPTY }; let name = self.slice(name_start, closer - 1); - match find_class(ASCII_CLASSES, name[]) { + match find_class(ASCII_CLASSES, name.index(&FullRange)) { None => None, Some(ranges) => { self.chari = closer; @@ -513,18 +514,18 @@ impl<'a> Parser<'a> { return self.err(format!("No closing brace for counted \ repetition starting at position \ {}.", - start)[]) + start).index(&FullRange)) } }; self.chari = closer; let greed = try!(self.get_next_greedy()); - let inner = self.chars[start+1..closer].iter().cloned() + let inner = self.chars.index(&((start+1)..closer)).iter().cloned() .collect::(); // Parse the min and max values from the regex. let (mut min, mut max): (uint, Option); if !inner.contains(",") { - min = try!(self.parse_uint(inner[])); + min = try!(self.parse_uint(inner.index(&FullRange))); max = Some(min); } else { let pieces: Vec<&str> = inner.splitn(1, ',').collect(); @@ -546,19 +547,19 @@ impl<'a> Parser<'a> { if min > MAX_REPEAT { return self.err(format!( "{} exceeds maximum allowed repetitions ({})", - min, MAX_REPEAT)[]); + min, MAX_REPEAT).index(&FullRange)); } if max.is_some() { let m = max.unwrap(); if m > MAX_REPEAT { return self.err(format!( "{} exceeds maximum allowed repetitions ({})", - m, MAX_REPEAT)[]); + m, MAX_REPEAT).index(&FullRange)); } if m < min { return self.err(format!( "Max repetitions ({}) cannot be smaller than min \ - repetitions ({}).", m, min)[]); + repetitions ({}).", m, min).index(&FullRange)); } } @@ -622,7 +623,7 @@ impl<'a> Parser<'a> { Ok(AstClass(ranges, flags)) } _ => { - self.err(format!("Invalid escape sequence '\\\\{}'", c)[]) + self.err(format!("Invalid escape sequence '\\\\{}'", c).index(&FullRange)) } } } @@ -642,7 +643,7 @@ impl<'a> Parser<'a> { Some(i) => i, None => return self.err(format!( "Missing '}}' for unclosed '{{' at position {}", - self.chari)[]), + self.chari).index(&FullRange)), }; if closer - self.chari + 1 == 0 { return self.err("No Unicode class name found.") @@ -656,10 +657,10 @@ impl<'a> Parser<'a> { name = self.slice(self.chari + 1, self.chari + 2); self.chari += 1; } - match find_class(UNICODE_CLASSES, name[]) { + match find_class(UNICODE_CLASSES, name.index(&FullRange)) { None => { return self.err(format!("Could not find Unicode class '{}'", - name)[]) + name).index(&FullRange)) } Some(ranges) => { Ok(AstClass(ranges, negated | (self.flags & FLAG_NOCASE))) @@ -682,11 +683,11 @@ impl<'a> Parser<'a> { } } let s = self.slice(start, end); - match num::from_str_radix::(s[], 8) { + match num::from_str_radix::(s.index(&FullRange), 8) { Some(n) => Ok(Literal(try!(self.char_from_u32(n)), FLAG_EMPTY)), None => { self.err(format!("Could not parse '{}' as octal number.", - s)[]) + s).index(&FullRange)) } } } @@ -704,12 +705,12 @@ impl<'a> Parser<'a> { None => { return self.err(format!("Missing '}}' for unclosed \ '{{' at position {}", - start)[]) + start).index(&FullRange)) } Some(i) => i, }; self.chari = closer; - self.parse_hex_digits(self.slice(start, closer)[]) + self.parse_hex_digits(self.slice(start, closer).index(&FullRange)) } // Parses a two-digit hex number. @@ -729,7 +730,7 @@ impl<'a> Parser<'a> { match num::from_str_radix::(s, 16) { Some(n) => Ok(Literal(try!(self.char_from_u32(n)), FLAG_EMPTY)), None => { - self.err(format!("Could not parse '{}' as hex number.", s)[]) + self.err(format!("Could not parse '{}' as hex number.", s).index(&FullRange)) } } } @@ -755,7 +756,7 @@ impl<'a> Parser<'a> { } if self.names.contains(&name) { return self.err(format!("Duplicate capture group name '{}'.", - name)[]) + name).index(&FullRange)) } self.names.push(name.clone()); self.chari = closer; @@ -789,7 +790,7 @@ impl<'a> Parser<'a> { if sign < 0 { return self.err(format!( "Cannot negate flags twice in '{}'.", - self.slice(start, self.chari + 1))[]) + self.slice(start, self.chari + 1)).index(&FullRange)) } sign = -1; saw_flag = false; @@ -800,7 +801,7 @@ impl<'a> Parser<'a> { if !saw_flag { return self.err(format!( "A valid flag does not follow negation in '{}'", - self.slice(start, self.chari + 1))[]) + self.slice(start, self.chari + 1)).index(&FullRange)) } flags = flags ^ flags; } @@ -812,7 +813,7 @@ impl<'a> Parser<'a> { return Ok(()) } _ => return self.err(format!( - "Unrecognized flag '{}'.", self.cur())[]), + "Unrecognized flag '{}'.", self.cur()).index(&FullRange)), } } } @@ -910,7 +911,7 @@ impl<'a> Parser<'a> { Some(i) => Ok(i), None => { self.err(format!("Expected an unsigned integer but got '{}'.", - s)[]) + s).index(&FullRange)) } } } @@ -920,7 +921,7 @@ impl<'a> Parser<'a> { Some(c) => Ok(c), None => { self.err(format!("Could not decode '{}' to unicode \ - character.", n)[]) + character.", n).index(&FullRange)) } } } @@ -953,7 +954,7 @@ impl<'a> Parser<'a> { } fn slice(&self, start: uint, end: uint) -> String { - self.chars[start..end].iter().cloned().collect() + self.chars.index(&(start..end)).iter().cloned().collect() } } diff --git a/src/libregex/re.rs b/src/libregex/re.rs index 1840a3343e63c..cb2690ce80ca1 100644 --- a/src/libregex/re.rs +++ b/src/libregex/re.rs @@ -238,19 +238,19 @@ impl Regex { } let (s, e) = cap.pos(0).unwrap(); // captures only reports matches - new.push_str(text[last_match..s]); - new.push_str(rep.reg_replace(&cap)[]); + new.push_str(text.index(&(last_match..s))); + new.push_str(rep.reg_replace(&cap).index(&FullRange)); last_match = e; } - new.push_str(text[last_match..text.len()]); + new.push_str(text.index(&(last_match..text.len()))); return new; } /// Returns the original string of this regex. pub fn as_str<'a>(&'a self) -> &'a str { match *self { - Dynamic(ExDynamic { ref original, .. }) => original[], - Native(ExNative { ref original, .. }) => original[], + Dynamic(ExDynamic { ref original, .. }) => original.index(&FullRange), + Native(ExNative { ref original, .. }) => original.index(&FullRange), } } @@ -347,13 +347,13 @@ impl<'r, 't> Iterator for RegexSplits<'r, 't> { if self.last >= text.len() { None } else { - let s = text[self.last..text.len()]; + let s = text.index(&(self.last..text.len())); self.last = text.len(); Some(s) } } Some((s, e)) => { - let matched = text[self.last..s]; + let matched = text.index(&(self.last..s)); self.last = e; Some(matched) } @@ -384,7 +384,7 @@ impl<'r, 't> Iterator for RegexSplitsN<'r, 't> { } else { self.cur += 1; if self.cur >= self.limit { - Some(text[self.splits.last..text.len()]) + Some(text.index(&(self.splits.last..text.len()))) } else { self.splits.next() } @@ -517,7 +517,7 @@ impl<'t> Captures<'t> { }) }); let re = Regex::new(r"\$\$").unwrap(); - re.replace_all(text[], NoExpand("$")) + re.replace_all(text.index(&FullRange), NoExpand("$")) } /// Returns the number of captured groups. diff --git a/src/libregex/test/tests.rs b/src/libregex/test/tests.rs index d087814b7f401..4f38370d7a150 100644 --- a/src/libregex/test/tests.rs +++ b/src/libregex/test/tests.rs @@ -159,7 +159,7 @@ macro_rules! mat { // actual capture groups to match test set. let mut sgot = got.as_slice(); if sgot.len() > expected.len() { - sgot = sgot[0..expected.len()] + sgot = &sgot[..expected.len()] } if expected != sgot { panic!("For RE '{}' against '{}', expected '{}' but got '{}'", diff --git a/src/libregex/vm.rs b/src/libregex/vm.rs index 914167019d209..04c430da4d290 100644 --- a/src/libregex/vm.rs +++ b/src/libregex/vm.rs @@ -152,7 +152,7 @@ impl<'r, 't> Nfa<'r, 't> { // out early. if self.prog.prefix.len() > 0 && clist.size == 0 { let needle = self.prog.prefix.as_bytes(); - let haystack = self.input.as_bytes()[self.ic..]; + let haystack = self.input.as_bytes().index(&(self.ic..)); match find_prefix(needle, haystack) { None => break, Some(i) => { @@ -503,7 +503,8 @@ impl Threads { #[inline] fn groups<'r>(&'r mut self, i: uint) -> &'r mut [Option] { - self.queue[i].groups.as_mut_slice() + let q = &mut self.queue[i]; + q.groups.as_mut_slice() } } diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs index 425e34cd9f042..1d446817c189d 100644 --- a/src/librustc/lint/builtin.rs +++ b/src/librustc/lint/builtin.rs @@ -506,7 +506,7 @@ impl BoxPointers { if n_uniq > 0 { let s = ty_to_string(cx.tcx, ty); let m = format!("type uses owned (Box type) pointers: {}", s); - cx.span_lint(BOX_POINTERS, span, m[]); + cx.span_lint(BOX_POINTERS, span, m.index(&FullRange)); } } } @@ -586,7 +586,7 @@ impl LintPass for RawPointerDerive { } fn check_item(&mut self, cx: &Context, item: &ast::Item) { - if !attr::contains_name(item.attrs[], "automatically_derived") { + if !attr::contains_name(item.attrs.index(&FullRange), "automatically_derived") { return } let did = match item.node { @@ -769,11 +769,11 @@ impl LintPass for UnusedResults { ty::ty_enum(did, _) => { if ast_util::is_local(did) { if let ast_map::NodeItem(it) = cx.tcx.map.get(did.node) { - warned |= check_must_use(cx, it.attrs[], s.span); + warned |= check_must_use(cx, it.attrs.index(&FullRange), s.span); } } else { csearch::get_item_attrs(&cx.sess().cstore, did, |attrs| { - warned |= check_must_use(cx, attrs[], s.span); + warned |= check_must_use(cx, attrs.index(&FullRange), s.span); }); } } @@ -795,7 +795,7 @@ impl LintPass for UnusedResults { msg.push_str(s.get()); } } - cx.span_lint(UNUSED_MUST_USE, sp, msg[]); + cx.span_lint(UNUSED_MUST_USE, sp, msg.index(&FullRange)); return true; } } @@ -841,7 +841,7 @@ impl NonCamelCaseTypes { } else { format!("{} `{}` should have a camel case name such as `{}`", sort, s, c) }; - cx.span_lint(NON_CAMEL_CASE_TYPES, span, m[]); + cx.span_lint(NON_CAMEL_CASE_TYPES, span, m.index(&FullRange)); } } } @@ -981,7 +981,7 @@ impl NonSnakeCase { if !is_snake_case(ident) { cx.span_lint(NON_SNAKE_CASE, span, format!("{} `{}` should have a snake case name such as `{}`", - sort, s, to_snake_case(s.get()))[]); + sort, s, to_snake_case(s.get())).index(&FullRange)); } } } @@ -1068,7 +1068,7 @@ impl LintPass for NonUpperCaseGlobals { format!("static constant `{}` should have an uppercase name \ such as `{}`", s.get(), s.get().chars().map(|c| c.to_uppercase()) - .collect::()[])[]); + .collect::().index(&FullRange)).index(&FullRange)); } } _ => {} @@ -1085,7 +1085,7 @@ impl LintPass for NonUpperCaseGlobals { format!("static constant in pattern `{}` should have an uppercase \ name such as `{}`", s.get(), s.get().chars().map(|c| c.to_uppercase()) - .collect::()[])[]); + .collect::().index(&FullRange)).index(&FullRange)); } } _ => {} @@ -1110,7 +1110,7 @@ impl UnusedParens { if !necessary { cx.span_lint(UNUSED_PARENS, value.span, format!("unnecessary parentheses around {}", - msg)[]) + msg).index(&FullRange)) } } @@ -1212,7 +1212,7 @@ impl LintPass for UnusedImportBraces { let m = format!("braces around {} is unnecessary", token::get_ident(*name).get()); cx.span_lint(UNUSED_IMPORT_BRACES, view_item.span, - m[]); + m.index(&FullRange)); }, _ => () } @@ -1251,7 +1251,7 @@ impl LintPass for NonShorthandFieldPatterns { if ident.node.as_str() == fieldpat.node.ident.as_str() { cx.span_lint(NON_SHORTHAND_FIELD_PATTERNS, fieldpat.span, format!("the `{}:` in this pattern is redundant and can \ - be removed", ident.node.as_str())[]) + be removed", ident.node.as_str()).index(&FullRange)) } } } @@ -1355,7 +1355,7 @@ impl LintPass for UnusedMut { fn check_expr(&mut self, cx: &Context, e: &ast::Expr) { if let ast::ExprMatch(_, ref arms, _) = e.node { for a in arms.iter() { - self.check_unused_mut_pat(cx, a.pats[]) + self.check_unused_mut_pat(cx, a.pats.index(&FullRange)) } } } @@ -1476,7 +1476,7 @@ impl MissingDoc { }); if !has_doc { cx.span_lint(MISSING_DOCS, sp, - format!("missing documentation for {}", desc)[]); + format!("missing documentation for {}", desc).index(&FullRange)); } } } @@ -1490,7 +1490,7 @@ impl LintPass for MissingDoc { let doc_hidden = self.doc_hidden() || attrs.iter().any(|attr| { attr.check_name("doc") && match attr.meta_item_list() { None => false, - Some(l) => attr::contains_name(l[], "hidden"), + Some(l) => attr::contains_name(l.index(&FullRange), "hidden"), } }); self.doc_hidden_stack.push(doc_hidden); @@ -1512,7 +1512,7 @@ impl LintPass for MissingDoc { } fn check_crate(&mut self, cx: &Context, krate: &ast::Crate) { - self.check_missing_docs_attrs(cx, None, krate.attrs[], + self.check_missing_docs_attrs(cx, None, krate.attrs.index(&FullRange), krate.span, "crate"); } @@ -1526,7 +1526,7 @@ impl LintPass for MissingDoc { ast::ItemTy(..) => "a type alias", _ => return }; - self.check_missing_docs_attrs(cx, Some(it.id), it.attrs[], + self.check_missing_docs_attrs(cx, Some(it.id), it.attrs.index(&FullRange), it.span, desc); } @@ -1539,13 +1539,13 @@ impl LintPass for MissingDoc { // Otherwise, doc according to privacy. This will also check // doc for default methods defined on traits. - self.check_missing_docs_attrs(cx, Some(m.id), m.attrs[], + self.check_missing_docs_attrs(cx, Some(m.id), m.attrs.index(&FullRange), m.span, "a method"); } } fn check_ty_method(&mut self, cx: &Context, tm: &ast::TypeMethod) { - self.check_missing_docs_attrs(cx, Some(tm.id), tm.attrs[], + self.check_missing_docs_attrs(cx, Some(tm.id), tm.attrs.index(&FullRange), tm.span, "a type method"); } @@ -1555,14 +1555,14 @@ impl LintPass for MissingDoc { let cur_struct_def = *self.struct_def_stack.last() .expect("empty struct_def_stack"); self.check_missing_docs_attrs(cx, Some(cur_struct_def), - sf.node.attrs[], sf.span, + sf.node.attrs.index(&FullRange), sf.span, "a struct field") } } } fn check_variant(&mut self, cx: &Context, v: &ast::Variant, _: &ast::Generics) { - self.check_missing_docs_attrs(cx, Some(v.node.id), v.node.attrs[], + self.check_missing_docs_attrs(cx, Some(v.node.id), v.node.attrs.index(&FullRange), v.span, "a variant"); assert!(!self.in_variant); self.in_variant = true; @@ -1674,7 +1674,7 @@ impl Stability { _ => format!("use of {} item", label) }; - cx.span_lint(lint, span, msg[]); + cx.span_lint(lint, span, msg.index(&FullRange)); } fn is_internal(&self, cx: &Context, span: Span) -> bool { diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs index 5c0fd8944368f..51998bdbcf299 100644 --- a/src/librustc/lint/context.rs +++ b/src/librustc/lint/context.rs @@ -104,7 +104,7 @@ impl LintStore { } pub fn get_lints<'t>(&'t self) -> &'t [(&'static Lint, bool)] { - self.lints[] + self.lints.index(&FullRange) } pub fn get_lint_groups<'t>(&'t self) -> Vec<(&'static str, Vec, bool)> { @@ -124,11 +124,11 @@ impl LintStore { match (sess, from_plugin) { // We load builtin lints first, so a duplicate is a compiler bug. // Use early_error when handling -W help with no crate. - (None, _) => early_error(msg[]), - (Some(sess), false) => sess.bug(msg[]), + (None, _) => early_error(msg.index(&FullRange)), + (Some(sess), false) => sess.bug(msg.index(&FullRange)), // A duplicate name from a plugin is a user error. - (Some(sess), true) => sess.err(msg[]), + (Some(sess), true) => sess.err(msg.index(&FullRange)), } } @@ -149,11 +149,11 @@ impl LintStore { match (sess, from_plugin) { // We load builtin lints first, so a duplicate is a compiler bug. // Use early_error when handling -W help with no crate. - (None, _) => early_error(msg[]), - (Some(sess), false) => sess.bug(msg[]), + (None, _) => early_error(msg.index(&FullRange)), + (Some(sess), false) => sess.bug(msg.index(&FullRange)), // A duplicate name from a plugin is a user error. - (Some(sess), true) => sess.err(msg[]), + (Some(sess), true) => sess.err(msg.index(&FullRange)), } } } @@ -267,8 +267,8 @@ impl LintStore { let warning = format!("lint {} has been renamed to {}", lint_name, new_name); match span { - Some(span) => sess.span_warn(span, warning[]), - None => sess.warn(warning[]), + Some(span) => sess.span_warn(span, warning.index(&FullRange)), + None => sess.warn(warning.index(&FullRange)), }; Some(lint_id) } @@ -278,13 +278,13 @@ impl LintStore { pub fn process_command_line(&mut self, sess: &Session) { for &(ref lint_name, level) in sess.opts.lint_opts.iter() { - match self.find_lint(lint_name[], sess, None) { + match self.find_lint(lint_name.index(&FullRange), sess, None) { Some(lint_id) => self.set_level(lint_id, (level, CommandLine)), None => { match self.lint_groups.iter().map(|(&x, pair)| (x, pair.0.clone())) .collect::>>() - .get(lint_name[]) { + .get(lint_name.index(&FullRange)) { Some(v) => { v.iter() .map(|lint_id: &LintId| @@ -292,7 +292,7 @@ impl LintStore { .collect::>(); } None => sess.err(format!("unknown {} flag: {}", - level.as_str(), lint_name)[]), + level.as_str(), lint_name).index(&FullRange)), } } } @@ -403,10 +403,10 @@ pub fn raw_emit_lint(sess: &Session, lint: &'static Lint, if level == Forbid { level = Deny; } match (level, span) { - (Warn, Some(sp)) => sess.span_warn(sp, msg[]), - (Warn, None) => sess.warn(msg[]), - (Deny, Some(sp)) => sess.span_err(sp, msg[]), - (Deny, None) => sess.err(msg[]), + (Warn, Some(sp)) => sess.span_warn(sp, msg.index(&FullRange)), + (Warn, None) => sess.warn(msg.index(&FullRange)), + (Deny, Some(sp)) => sess.span_err(sp, msg.index(&FullRange)), + (Deny, None) => sess.err(msg.index(&FullRange)), _ => sess.bug("impossible level in raw_emit_lint"), } @@ -499,7 +499,7 @@ impl<'a, 'tcx> Context<'a, 'tcx> { None => { self.span_lint(builtin::UNKNOWN_LINTS, span, format!("unknown `{}` attribute: `{}`", - level.as_str(), lint_name)[]); + level.as_str(), lint_name).as_slice()); continue; } } @@ -515,7 +515,7 @@ impl<'a, 'tcx> Context<'a, 'tcx> { self.tcx.sess.span_err(span, format!("{}({}) overruled by outer forbid({})", level.as_str(), lint_name, - lint_name)[]); + lint_name).index(&FullRange)); } else if now != level { let src = self.lints.get_level_source(lint_id).1; self.level_stack.push((lint_id, (now, src))); @@ -550,7 +550,7 @@ impl<'a, 'tcx> Context<'a, 'tcx> { impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> { fn visit_item(&mut self, it: &ast::Item) { - self.with_lint_attrs(it.attrs[], |cx| { + self.with_lint_attrs(it.attrs.index(&FullRange), |cx| { run_lints!(cx, check_item, it); cx.visit_ids(|v| v.visit_item(it)); visit::walk_item(cx, it); @@ -558,14 +558,14 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> { } fn visit_foreign_item(&mut self, it: &ast::ForeignItem) { - self.with_lint_attrs(it.attrs[], |cx| { + self.with_lint_attrs(it.attrs.index(&FullRange), |cx| { run_lints!(cx, check_foreign_item, it); visit::walk_foreign_item(cx, it); }) } fn visit_view_item(&mut self, i: &ast::ViewItem) { - self.with_lint_attrs(i.attrs[], |cx| { + self.with_lint_attrs(i.attrs.index(&FullRange), |cx| { run_lints!(cx, check_view_item, i); cx.visit_ids(|v| v.visit_view_item(i)); visit::walk_view_item(cx, i); @@ -591,7 +591,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> { body: &'v ast::Block, span: Span, id: ast::NodeId) { match fk { visit::FkMethod(_, _, m) => { - self.with_lint_attrs(m.attrs[], |cx| { + self.with_lint_attrs(m.attrs.index(&FullRange), |cx| { run_lints!(cx, check_fn, fk, decl, body, span, id); cx.visit_ids(|v| { v.visit_fn(fk, decl, body, span, id); @@ -607,7 +607,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> { } fn visit_ty_method(&mut self, t: &ast::TypeMethod) { - self.with_lint_attrs(t.attrs[], |cx| { + self.with_lint_attrs(t.attrs.index(&FullRange), |cx| { run_lints!(cx, check_ty_method, t); visit::walk_ty_method(cx, t); }) @@ -624,14 +624,14 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> { } fn visit_struct_field(&mut self, s: &ast::StructField) { - self.with_lint_attrs(s.node.attrs[], |cx| { + self.with_lint_attrs(s.node.attrs.index(&FullRange), |cx| { run_lints!(cx, check_struct_field, s); visit::walk_struct_field(cx, s); }) } fn visit_variant(&mut self, v: &ast::Variant, g: &ast::Generics) { - self.with_lint_attrs(v.node.attrs[], |cx| { + self.with_lint_attrs(v.node.attrs.index(&FullRange), |cx| { run_lints!(cx, check_variant, v, g); visit::walk_variant(cx, v, g); run_lints!(cx, check_variant_post, v, g); @@ -725,7 +725,7 @@ impl<'a, 'tcx> IdVisitingOperation for Context<'a, 'tcx> { None => {} Some(lints) => { for (lint_id, span, msg) in lints.into_iter() { - self.span_lint(lint_id.lint, span, msg[]) + self.span_lint(lint_id.lint, span, msg.index(&FullRange)) } } } @@ -771,7 +771,7 @@ pub fn check_crate(tcx: &ty::ctxt, let mut cx = Context::new(tcx, krate, exported_items); // Visit the whole crate. - cx.with_lint_attrs(krate.attrs[], |cx| { + cx.with_lint_attrs(krate.attrs.index(&FullRange), |cx| { cx.visit_id(ast::CRATE_NODE_ID); cx.visit_ids(|v| { v.visited_outermost = true; @@ -791,7 +791,7 @@ pub fn check_crate(tcx: &ty::ctxt, for &(lint, span, ref msg) in v.iter() { tcx.sess.span_bug(span, format!("unprocessed lint {} at {}: {}", - lint.as_str(), tcx.map.node_to_string(*id), *msg)[]) + lint.as_str(), tcx.map.node_to_string(*id), *msg).as_slice()) } } diff --git a/src/librustc/metadata/creader.rs b/src/librustc/metadata/creader.rs index b44520e28527b..8961c3e728ca5 100644 --- a/src/librustc/metadata/creader.rs +++ b/src/librustc/metadata/creader.rs @@ -65,7 +65,8 @@ fn dump_crates(cstore: &CStore) { } fn should_link(i: &ast::ViewItem) -> bool { - !attr::contains_name(i.attrs[], "no_link") + !attr::contains_name(i.attrs.index(&FullRange), "no_link") + } struct CrateInfo { @@ -89,7 +90,7 @@ pub fn validate_crate_name(sess: Option<&Session>, s: &str, sp: Option) { for c in s.chars() { if c.is_alphanumeric() { continue } if c == '_' || c == '-' { continue } - err(format!("invalid character `{}` in crate name: `{}`", c, s)[]); + err(format!("invalid character `{}` in crate name: `{}`", c, s).index(&FullRange)); } match sess { Some(sess) => sess.abort_if_errors(), @@ -97,6 +98,7 @@ pub fn validate_crate_name(sess: Option<&Session>, s: &str, sp: Option) { } } + fn register_native_lib(sess: &Session, span: Option, name: String, @@ -187,8 +189,8 @@ impl<'a> CrateReader<'a> { match self.extract_crate_info(i) { Some(info) => { let (cnum, _, _) = self.resolve_crate(&None, - info.ident[], - info.name[], + info.ident.index(&FullRange), + info.name.index(&FullRange), None, i.span, PathKind::Crate); @@ -207,7 +209,7 @@ impl<'a> CrateReader<'a> { let name = match *path_opt { Some((ref path_str, _)) => { let name = path_str.get().to_string(); - validate_crate_name(Some(self.sess), name[], + validate_crate_name(Some(self.sess), name.index(&FullRange), Some(i.span)); name } @@ -274,7 +276,7 @@ impl<'a> CrateReader<'a> { } else { self.sess.span_err(m.span, format!("unknown kind: `{}`", - k)[]); + k).index(&FullRange)); cstore::NativeUnknown } } @@ -328,7 +330,7 @@ impl<'a> CrateReader<'a> { match self.sess.opts.externs.get(name) { Some(locs) => { let found = locs.iter().any(|l| { - let l = fs::realpath(&Path::new(l[])).ok(); + let l = fs::realpath(&Path::new(l.index(&FullRange))).ok(); l == source.dylib || l == source.rlib }); if found { @@ -407,7 +409,7 @@ impl<'a> CrateReader<'a> { crate_name: name, hash: hash.map(|a| &*a), filesearch: self.sess.target_filesearch(kind), - triple: self.sess.opts.target_triple[], + triple: self.sess.opts.target_triple.index(&FullRange), root: root, rejected_via_hash: vec!(), rejected_via_triple: vec!(), @@ -433,8 +435,8 @@ impl<'a> CrateReader<'a> { decoder::get_crate_deps(cdata).iter().map(|dep| { debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash); let (local_cnum, _, _) = self.resolve_crate(root, - dep.name[], - dep.name[], + dep.name.index(&FullRange), + dep.name.index(&FullRange), Some(&dep.hash), span, PathKind::Dependency); @@ -445,7 +447,7 @@ impl<'a> CrateReader<'a> { pub fn read_plugin_metadata<'b>(&'b mut self, vi: &'b ast::ViewItem) -> PluginMetadata<'b> { let info = self.extract_crate_info(vi).unwrap(); - let target_triple = self.sess.opts.target_triple[]; + let target_triple = self.sess.opts.target_triple.index(&FullRange); let is_cross = target_triple != config::host_triple(); let mut should_link = info.should_link && !is_cross; let mut target_only = false; @@ -454,8 +456,8 @@ impl<'a> CrateReader<'a> { let mut load_ctxt = loader::Context { sess: self.sess, span: vi.span, - ident: ident[], - crate_name: name[], + ident: ident.index(&FullRange), + crate_name: name.index(&FullRange), hash: None, filesearch: self.sess.host_filesearch(PathKind::Crate), triple: config::host_triple(), @@ -480,11 +482,11 @@ impl<'a> CrateReader<'a> { }; let dylib = library.dylib.clone(); - let register = should_link && self.existing_match(info.name[], None).is_none(); + let register = should_link && self.existing_match(info.name.as_slice(), None).is_none(); let metadata = if register { // Register crate now to avoid double-reading metadata - let (_, cmd, _) = self.register_crate(&None, info.ident[], - info.name[], vi.span, library); + let (_, cmd, _) = self.register_crate(&None, info.ident.index(&FullRange), + info.name.index(&FullRange), vi.span, library); PMDSource::Registered(cmd) } else { // Not registering the crate; just hold on to the metadata @@ -505,8 +507,8 @@ impl<'a> CrateReader<'a> { impl<'a> PluginMetadata<'a> { /// Read exported macros pub fn exported_macros(&self) -> Vec { - let imported_from = Some(token::intern(self.info.ident[]).ident()); - let source_name = format!("<{} macros>", self.info.ident[]); + let imported_from = Some(token::intern(self.info.ident.index(&FullRange)).ident()); + let source_name = format!("<{} macros>", self.info.ident.index(&FullRange)); let mut macros = vec![]; decoder::each_exported_macro(self.metadata.as_slice(), &*self.sess.cstore.intr, @@ -548,7 +550,7 @@ impl<'a> PluginMetadata<'a> { self.info.ident, config::host_triple(), self.sess.opts.target_triple); - self.sess.span_err(self.vi_span, message[]); + self.sess.span_err(self.vi_span, message.index(&FullRange)); self.sess.abort_if_errors(); } @@ -561,7 +563,7 @@ impl<'a> PluginMetadata<'a> { let message = format!("plugin crate `{}` only found in rlib format, \ but must be available in dylib format", self.info.ident); - self.sess.span_err(self.vi_span, message[]); + self.sess.span_err(self.vi_span, message.index(&FullRange)); // No need to abort because the loading code will just ignore this // empty dylib. None diff --git a/src/librustc/metadata/csearch.rs b/src/librustc/metadata/csearch.rs index 1401a7d4a1a6e..9249bce276edd 100644 --- a/src/librustc/metadata/csearch.rs +++ b/src/librustc/metadata/csearch.rs @@ -95,7 +95,7 @@ pub fn get_item_path(tcx: &ty::ctxt, def: ast::DefId) -> Vec // FIXME #1920: This path is not always correct if the crate is not linked // into the root namespace. - let mut r = vec![ast_map::PathMod(token::intern(cdata.name[]))]; + let mut r = vec![ast_map::PathMod(token::intern(cdata.name.index(&FullRange)))]; r.push_all(path.as_slice()); r } diff --git a/src/librustc/metadata/decoder.rs b/src/librustc/metadata/decoder.rs index ed0a1f6211b16..25ce8a6889d0e 100644 --- a/src/librustc/metadata/decoder.rs +++ b/src/librustc/metadata/decoder.rs @@ -75,7 +75,7 @@ fn lookup_hash<'a, F>(d: rbml::Doc<'a>, mut eq_fn: F, hash: u64) -> Option(item_id: ast::NodeId, items: rbml::Doc<'a>) -> Option> { fn eq_item(bytes: &[u8], item_id: ast::NodeId) -> bool { return u64_from_be_bytes( - bytes[0u..4u], 0u, 4u) as ast::NodeId + bytes.index(&(0u..4u)), 0u, 4u) as ast::NodeId == item_id; } lookup_hash(items, @@ -1191,7 +1191,7 @@ pub fn get_crate_deps(data: &[u8]) -> Vec { } reader::tagged_docs(depsdoc, tag_crate_dep, |depdoc| { let name = docstr(depdoc, tag_crate_dep_crate_name); - let hash = Svh::new(docstr(depdoc, tag_crate_dep_hash)[]); + let hash = Svh::new(docstr(depdoc, tag_crate_dep_hash).index(&FullRange)); deps.push(CrateDep { cnum: crate_num, name: name, diff --git a/src/librustc/metadata/encoder.rs b/src/librustc/metadata/encoder.rs index e4226ddde85b6..b19501229aa0b 100644 --- a/src/librustc/metadata/encoder.rs +++ b/src/librustc/metadata/encoder.rs @@ -95,7 +95,7 @@ fn encode_impl_type_basename(rbml_w: &mut Encoder, name: ast::Ident) { } pub fn encode_def_id(rbml_w: &mut Encoder, id: DefId) { - rbml_w.wr_tagged_str(tag_def_id, def_to_string(id)[]); + rbml_w.wr_tagged_str(tag_def_id, def_to_string(id).index(&FullRange)); } #[derive(Clone)] @@ -154,7 +154,7 @@ fn encode_variant_id(rbml_w: &mut Encoder, vid: DefId) { rbml_w.end_tag(); rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(s[]); + rbml_w.wr_str(s.index(&FullRange)); rbml_w.end_tag(); } @@ -264,7 +264,7 @@ fn encode_symbol(ecx: &EncodeContext, } None => { ecx.diag.handler().bug( - format!("encode_symbol: id not found {}", id)[]); + format!("encode_symbol: id not found {}", id).index(&FullRange)); } } rbml_w.end_tag(); @@ -332,8 +332,8 @@ fn encode_enum_variant_info(ecx: &EncodeContext, encode_name(rbml_w, variant.node.name.name); encode_parent_item(rbml_w, local_def(id)); encode_visibility(rbml_w, variant.node.vis); - encode_attributes(rbml_w, variant.node.attrs[]); - encode_repr_attrs(rbml_w, ecx, variant.node.attrs[]); + encode_attributes(rbml_w, variant.node.attrs.index(&FullRange)); + encode_repr_attrs(rbml_w, ecx, variant.node.attrs.index(&FullRange)); let stab = stability::lookup(ecx.tcx, ast_util::local_def(variant.node.id)); encode_stability(rbml_w, stab); @@ -344,9 +344,9 @@ fn encode_enum_variant_info(ecx: &EncodeContext, let fields = ty::lookup_struct_fields(ecx.tcx, def_id); let idx = encode_info_for_struct(ecx, rbml_w, - fields[], + fields.index(&FullRange), index); - encode_struct_fields(rbml_w, fields[], def_id); + encode_struct_fields(rbml_w, fields.index(&FullRange), def_id); encode_index(rbml_w, idx, write_i64); } } @@ -386,12 +386,12 @@ fn encode_reexported_static_method(rbml_w: &mut Encoder, exp.name, token::get_name(method_name)); rbml_w.start_tag(tag_items_data_item_reexport); rbml_w.start_tag(tag_items_data_item_reexport_def_id); - rbml_w.wr_str(def_to_string(method_def_id)[]); + rbml_w.wr_str(def_to_string(method_def_id).index(&FullRange)); rbml_w.end_tag(); rbml_w.start_tag(tag_items_data_item_reexport_name); rbml_w.wr_str(format!("{}::{}", exp.name, - token::get_name(method_name))[]); + token::get_name(method_name)).index(&FullRange)); rbml_w.end_tag(); rbml_w.end_tag(); } @@ -529,7 +529,7 @@ fn encode_reexports(ecx: &EncodeContext, id); rbml_w.start_tag(tag_items_data_item_reexport); rbml_w.start_tag(tag_items_data_item_reexport_def_id); - rbml_w.wr_str(def_to_string(exp.def_id)[]); + rbml_w.wr_str(def_to_string(exp.def_id).index(&FullRange)); rbml_w.end_tag(); rbml_w.start_tag(tag_items_data_item_reexport_name); rbml_w.wr_str(exp.name.as_str()); @@ -562,13 +562,13 @@ fn encode_info_for_mod(ecx: &EncodeContext, // Encode info about all the module children. for item in md.items.iter() { rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(def_to_string(local_def(item.id))[]); + rbml_w.wr_str(def_to_string(local_def(item.id)).index(&FullRange)); rbml_w.end_tag(); each_auxiliary_node_id(&**item, |auxiliary_node_id| { rbml_w.start_tag(tag_mod_child); rbml_w.wr_str(def_to_string(local_def( - auxiliary_node_id))[]); + auxiliary_node_id)).index(&FullRange)); rbml_w.end_tag(); true }); @@ -580,7 +580,7 @@ fn encode_info_for_mod(ecx: &EncodeContext, did, ecx.tcx.map.node_to_string(did)); rbml_w.start_tag(tag_mod_impl); - rbml_w.wr_str(def_to_string(local_def(did))[]); + rbml_w.wr_str(def_to_string(local_def(did)).index(&FullRange)); rbml_w.end_tag(); } } @@ -615,7 +615,7 @@ fn encode_visibility(rbml_w: &mut Encoder, visibility: ast::Visibility) { ast::Public => 'y', ast::Inherited => 'i', }; - rbml_w.wr_str(ch.to_string()[]); + rbml_w.wr_str(ch.to_string().index(&FullRange)); rbml_w.end_tag(); } @@ -627,7 +627,7 @@ fn encode_unboxed_closure_kind(rbml_w: &mut Encoder, ty::FnMutUnboxedClosureKind => 'm', ty::FnOnceUnboxedClosureKind => 'o', }; - rbml_w.wr_str(ch.to_string()[]); + rbml_w.wr_str(ch.to_string().index(&FullRange)); rbml_w.end_tag(); } @@ -788,7 +788,7 @@ fn encode_generics<'a, 'tcx>(rbml_w: &mut Encoder, rbml_w.end_tag(); rbml_w.wr_tagged_str(tag_region_param_def_def_id, - def_to_string(param.def_id)[]); + def_to_string(param.def_id).index(&FullRange)); rbml_w.wr_tagged_u64(tag_region_param_def_space, param.space.to_uint() as u64); @@ -864,9 +864,9 @@ fn encode_info_for_method<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, encode_path(rbml_w, impl_path.chain(Some(elem).into_iter())); match ast_item_opt { Some(&ast::MethodImplItem(ref ast_method)) => { - encode_attributes(rbml_w, ast_method.attrs[]); + encode_attributes(rbml_w, ast_method.attrs.index(&FullRange)); let any_types = !pty.generics.types.is_empty(); - if any_types || is_default_impl || should_inline(ast_method.attrs[]) { + if any_types || is_default_impl || should_inline(ast_method.attrs.index(&FullRange)) { encode_inlined_item(ecx, rbml_w, IIImplItemRef(local_def(parent_id), ast_item_opt.unwrap())); } @@ -912,7 +912,7 @@ fn encode_info_for_associated_type(ecx: &EncodeContext, match typedef_opt { None => {} Some(typedef) => { - encode_attributes(rbml_w, typedef.attrs[]); + encode_attributes(rbml_w, typedef.attrs.index(&FullRange)); encode_type(ecx, rbml_w, ty::node_id_to_type(ecx.tcx, typedef.id)); } @@ -1046,7 +1046,7 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_path(rbml_w, path); encode_visibility(rbml_w, vis); encode_stability(rbml_w, stab); - encode_attributes(rbml_w, item.attrs[]); + encode_attributes(rbml_w, item.attrs.index(&FullRange)); rbml_w.end_tag(); } ast::ItemConst(_, _) => { @@ -1072,8 +1072,8 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_bounds_and_type(rbml_w, ecx, &lookup_item_type(tcx, def_id)); encode_name(rbml_w, item.ident.name); encode_path(rbml_w, path); - encode_attributes(rbml_w, item.attrs[]); - if tps_len > 0u || should_inline(item.attrs[]) { + encode_attributes(rbml_w, item.attrs.index(&FullRange)); + if tps_len > 0u || should_inline(item.attrs.index(&FullRange)) { encode_inlined_item(ecx, rbml_w, IIItemRef(item)); } if tps_len == 0 { @@ -1089,7 +1089,7 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_info_for_mod(ecx, rbml_w, m, - item.attrs[], + item.attrs.index(&FullRange), item.id, path, item.ident, @@ -1106,7 +1106,7 @@ fn encode_info_for_item(ecx: &EncodeContext, // Encode all the items in this module. for foreign_item in fm.items.iter() { rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(def_to_string(local_def(foreign_item.id))[]); + rbml_w.wr_str(def_to_string(local_def(foreign_item.id)).index(&FullRange)); rbml_w.end_tag(); } encode_visibility(rbml_w, vis); @@ -1134,8 +1134,8 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_item_variances(rbml_w, ecx, item.id); encode_bounds_and_type(rbml_w, ecx, &lookup_item_type(tcx, def_id)); encode_name(rbml_w, item.ident.name); - encode_attributes(rbml_w, item.attrs[]); - encode_repr_attrs(rbml_w, ecx, item.attrs[]); + encode_attributes(rbml_w, item.attrs.index(&FullRange)); + encode_repr_attrs(rbml_w, ecx, item.attrs.index(&FullRange)); for v in (*enum_definition).variants.iter() { encode_variant_id(rbml_w, local_def(v.node.id)); } @@ -1152,7 +1152,7 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_enum_variant_info(ecx, rbml_w, item.id, - (*enum_definition).variants[], + (*enum_definition).variants.index(&FullRange), index); } ast::ItemStruct(ref struct_def, _) => { @@ -1164,7 +1164,7 @@ fn encode_info_for_item(ecx: &EncodeContext, class itself */ let idx = encode_info_for_struct(ecx, rbml_w, - fields[], + fields.index(&FullRange), index); /* Index the class*/ @@ -1178,16 +1178,16 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_item_variances(rbml_w, ecx, item.id); encode_name(rbml_w, item.ident.name); - encode_attributes(rbml_w, item.attrs[]); + encode_attributes(rbml_w, item.attrs.index(&FullRange)); encode_path(rbml_w, path.clone()); encode_stability(rbml_w, stab); encode_visibility(rbml_w, vis); - encode_repr_attrs(rbml_w, ecx, item.attrs[]); + encode_repr_attrs(rbml_w, ecx, item.attrs.index(&FullRange)); /* Encode def_ids for each field and method for methods, write all the stuff get_trait_method needs to know*/ - encode_struct_fields(rbml_w, fields[], def_id); + encode_struct_fields(rbml_w, fields.index(&FullRange), def_id); encode_inlined_item(ecx, rbml_w, IIItemRef(item)); @@ -1219,7 +1219,7 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_family(rbml_w, 'i'); encode_bounds_and_type(rbml_w, ecx, &lookup_item_type(tcx, def_id)); encode_name(rbml_w, item.ident.name); - encode_attributes(rbml_w, item.attrs[]); + encode_attributes(rbml_w, item.attrs.index(&FullRange)); encode_unsafety(rbml_w, unsafety); encode_polarity(rbml_w, polarity); match ty.node { @@ -1323,7 +1323,7 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_generics(rbml_w, ecx, &trait_def.generics, tag_item_generics); encode_trait_ref(rbml_w, ecx, &*trait_def.trait_ref, tag_item_trait_ref); encode_name(rbml_w, item.ident.name); - encode_attributes(rbml_w, item.attrs[]); + encode_attributes(rbml_w, item.attrs.index(&FullRange)); encode_visibility(rbml_w, vis); encode_stability(rbml_w, stab); for &method_def_id in ty::trait_item_def_ids(tcx, def_id).iter() { @@ -1341,7 +1341,7 @@ fn encode_info_for_item(ecx: &EncodeContext, rbml_w.end_tag(); rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(def_to_string(method_def_id.def_id())[]); + rbml_w.wr_str(def_to_string(method_def_id.def_id()).index(&FullRange)); rbml_w.end_tag(); } encode_path(rbml_w, path.clone()); @@ -1433,14 +1433,14 @@ fn encode_info_for_item(ecx: &EncodeContext, }; match trait_item { &ast::RequiredMethod(ref m) => { - encode_attributes(rbml_w, m.attrs[]); + encode_attributes(rbml_w, m.attrs.index(&FullRange)); encode_trait_item(rbml_w); encode_item_sort(rbml_w, 'r'); encode_method_argument_names(rbml_w, &*m.decl); } &ast::ProvidedMethod(ref m) => { - encode_attributes(rbml_w, m.attrs[]); + encode_attributes(rbml_w, m.attrs.index(&FullRange)); encode_trait_item(rbml_w); encode_item_sort(rbml_w, 'p'); encode_inlined_item(ecx, rbml_w, IITraitItemRef(def_id, trait_item)); @@ -1449,7 +1449,7 @@ fn encode_info_for_item(ecx: &EncodeContext, &ast::TypeTraitItem(ref associated_type) => { encode_attributes(rbml_w, - associated_type.attrs[]); + associated_type.attrs.index(&FullRange)); encode_item_sort(rbml_w, 't'); } } @@ -1603,7 +1603,7 @@ fn encode_index(rbml_w: &mut Encoder, index: Vec>, mut write_fn: let mut buckets: Vec>> = range(0, 256u16).map(|_| Vec::new()).collect(); for elt in index.into_iter() { let h = hash::hash(&elt.val) as uint; - buckets[h % 256].push(elt); + (&mut buckets[h % 256]).push(elt); } rbml_w.start_tag(tag_index); @@ -1826,10 +1826,10 @@ fn encode_macro_defs(rbml_w: &mut Encoder, rbml_w.start_tag(tag_macro_def); encode_name(rbml_w, def.ident.name); - encode_attributes(rbml_w, def.attrs[]); + encode_attributes(rbml_w, def.attrs.index(&FullRange)); rbml_w.start_tag(tag_macro_def_body); - rbml_w.wr_str(pprust::tts_to_string(def.body[])[]); + rbml_w.wr_str(pprust::tts_to_string(def.body.index(&FullRange)).index(&FullRange)); rbml_w.end_tag(); rbml_w.end_tag(); @@ -1869,7 +1869,7 @@ fn encode_struct_field_attrs(rbml_w: &mut Encoder, krate: &ast::Crate) { fn visit_struct_field(&mut self, field: &ast::StructField) { self.rbml_w.start_tag(tag_struct_field); self.rbml_w.wr_tagged_u32(tag_struct_field_id, field.node.id); - encode_attributes(self.rbml_w, field.node.attrs[]); + encode_attributes(self.rbml_w, field.node.attrs.index(&FullRange)); self.rbml_w.end_tag(); } } @@ -1941,13 +1941,13 @@ fn encode_misc_info(ecx: &EncodeContext, rbml_w.start_tag(tag_misc_info_crate_items); for item in krate.module.items.iter() { rbml_w.start_tag(tag_mod_child); - rbml_w.wr_str(def_to_string(local_def(item.id))[]); + rbml_w.wr_str(def_to_string(local_def(item.id)).index(&FullRange)); rbml_w.end_tag(); each_auxiliary_node_id(&**item, |auxiliary_node_id| { rbml_w.start_tag(tag_mod_child); rbml_w.wr_str(def_to_string(local_def( - auxiliary_node_id))[]); + auxiliary_node_id)).index(&FullRange)); rbml_w.end_tag(); true }); @@ -2116,17 +2116,17 @@ fn encode_metadata_inner(wr: &mut SeekableMemWriter, let mut rbml_w = writer::Encoder::new(wr); - encode_crate_name(&mut rbml_w, ecx.link_meta.crate_name[]); + encode_crate_name(&mut rbml_w, ecx.link_meta.crate_name.index(&FullRange)); encode_crate_triple(&mut rbml_w, tcx.sess .opts .target_triple - []); + .index(&FullRange)); encode_hash(&mut rbml_w, &ecx.link_meta.crate_hash); encode_dylib_dependency_formats(&mut rbml_w, &ecx); let mut i = rbml_w.writer.tell().unwrap(); - encode_attributes(&mut rbml_w, krate.attrs[]); + encode_attributes(&mut rbml_w, krate.attrs.index(&FullRange)); stats.attr_bytes = rbml_w.writer.tell().unwrap() - i; i = rbml_w.writer.tell().unwrap(); diff --git a/src/librustc/metadata/loader.rs b/src/librustc/metadata/loader.rs index 7c0645b4ca204..4106d216bf9a2 100644 --- a/src/librustc/metadata/loader.rs +++ b/src/librustc/metadata/loader.rs @@ -315,14 +315,14 @@ impl<'a> Context<'a> { &Some(ref r) => format!("{} which `{}` depends on", message, r.ident) }; - self.sess.span_err(self.span, message[]); + self.sess.span_err(self.span, message.index(&FullRange)); if self.rejected_via_triple.len() > 0 { let mismatches = self.rejected_via_triple.iter(); for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() { self.sess.fileline_note(self.span, format!("crate `{}`, path #{}, triple {}: {}", - self.ident, i+1, got, path.display())[]); + self.ident, i+1, got, path.display()).index(&FullRange)); } } if self.rejected_via_hash.len() > 0 { @@ -332,7 +332,7 @@ impl<'a> Context<'a> { for (i, &CrateMismatch{ ref path, .. }) in mismatches.enumerate() { self.sess.fileline_note(self.span, format!("crate `{}` path {}{}: {}", - self.ident, "#", i+1, path.display())[]); + self.ident, "#", i+1, path.display()).index(&FullRange)); } match self.root { &None => {} @@ -340,7 +340,7 @@ impl<'a> Context<'a> { for (i, path) in r.paths().iter().enumerate() { self.sess.fileline_note(self.span, format!("crate `{}` path #{}: {}", - r.ident, i+1, path.display())[]); + r.ident, i+1, path.display()).index(&FullRange)); } } } @@ -386,7 +386,7 @@ impl<'a> Context<'a> { None => return FileDoesntMatch, Some(file) => file, }; - let (hash, rlib) = if file.starts_with(rlib_prefix[]) && + let (hash, rlib) = if file.starts_with(rlib_prefix.index(&FullRange)) && file.ends_with(".rlib") { (file.slice(rlib_prefix.len(), file.len() - ".rlib".len()), true) @@ -446,26 +446,26 @@ impl<'a> Context<'a> { _ => { self.sess.span_err(self.span, format!("multiple matching crates for `{}`", - self.crate_name)[]); + self.crate_name).index(&FullRange)); self.sess.note("candidates:"); for lib in libraries.iter() { match lib.dylib { Some(ref p) => { self.sess.note(format!("path: {}", - p.display())[]); + p.display()).index(&FullRange)); } None => {} } match lib.rlib { Some(ref p) => { self.sess.note(format!("path: {}", - p.display())[]); + p.display()).index(&FullRange)); } None => {} } let data = lib.metadata.as_slice(); let name = decoder::get_crate_name(data); - note_crate_name(self.sess.diagnostic(), name[]); + note_crate_name(self.sess.diagnostic(), name.index(&FullRange)); } None } @@ -519,11 +519,11 @@ impl<'a> Context<'a> { format!("multiple {} candidates for `{}` \ found", flavor, - self.crate_name)[]); + self.crate_name).index(&FullRange)); self.sess.span_note(self.span, format!(r"candidate #1: {}", ret.as_ref().unwrap() - .display())[]); + .display()).index(&FullRange)); error = 1; ret = None; } @@ -531,7 +531,7 @@ impl<'a> Context<'a> { error += 1; self.sess.span_note(self.span, format!(r"candidate #{}: {}", error, - lib.display())[]); + lib.display()).index(&FullRange)); continue } *slot = Some(metadata); @@ -606,17 +606,17 @@ impl<'a> Context<'a> { let mut rlibs = HashSet::new(); let mut dylibs = HashSet::new(); { - let mut locs = locs.iter().map(|l| Path::new(l[])).filter(|loc| { + let mut locs = locs.iter().map(|l| Path::new(l.index(&FullRange))).filter(|loc| { if !loc.exists() { sess.err(format!("extern location for {} does not exist: {}", - self.crate_name, loc.display())[]); + self.crate_name, loc.display()).index(&FullRange)); return false; } let file = match loc.filename_str() { Some(file) => file, None => { sess.err(format!("extern location for {} is not a file: {}", - self.crate_name, loc.display())[]); + self.crate_name, loc.display()).index(&FullRange)); return false; } }; @@ -624,12 +624,13 @@ impl<'a> Context<'a> { return true } else { let (ref prefix, ref suffix) = dylibname; - if file.starts_with(prefix[]) && file.ends_with(suffix[]) { + if file.starts_with(prefix.index(&FullRange)) && + file.ends_with(suffix.index(&FullRange)) { return true } } sess.err(format!("extern location for {} is of an unknown type: {}", - self.crate_name, loc.display())[]); + self.crate_name, loc.display()).index(&FullRange)); false }); @@ -662,7 +663,7 @@ impl<'a> Context<'a> { } pub fn note_crate_name(diag: &SpanHandler, name: &str) { - diag.handler().note(format!("crate name: {}", name)[]); + diag.handler().note(format!("crate name: {}", name).index(&FullRange)); } impl ArchiveMetadata { diff --git a/src/librustc/metadata/tydecode.rs b/src/librustc/metadata/tydecode.rs index 07dc13ff0d48f..135cd8d8d081e 100644 --- a/src/librustc/metadata/tydecode.rs +++ b/src/librustc/metadata/tydecode.rs @@ -98,7 +98,7 @@ fn scan(st: &mut PState, mut is_last: F, op: G) -> R where } let end_pos = st.pos; st.pos += 1; - return op(st.data[start_pos..end_pos]); + return op(st.data.index(&(start_pos..end_pos))); } pub fn parse_ident(st: &mut PState, last: char) -> ast::Ident { @@ -251,7 +251,7 @@ fn parse_trait_store_(st: &mut PState, conv: &mut F) -> ty::TraitStore where '&' => ty::RegionTraitStore(parse_region_(st, conv), parse_mutability(st)), c => { st.tcx.sess.bug(format!("parse_trait_store(): bad input '{}'", - c)[]) + c).index(&FullRange)) } } } @@ -318,7 +318,7 @@ fn parse_bound_region_(st: &mut PState, conv: &mut F) -> ty::BoundRegion wher } '[' => { let def = parse_def_(st, RegionParameter, conv); - let ident = token::str_to_ident(parse_str(st, ']')[]); + let ident = token::str_to_ident(parse_str(st, ']').index(&FullRange)); ty::BrNamed(def, ident.name) } 'f' => { @@ -357,7 +357,7 @@ fn parse_region_(st: &mut PState, conv: &mut F) -> ty::Region where assert_eq!(next(st), '|'); let index = parse_u32(st); assert_eq!(next(st), '|'); - let nm = token::str_to_ident(parse_str(st, ']')[]); + let nm = token::str_to_ident(parse_str(st, ']').index(&FullRange)); ty::ReEarlyBound(node_id, space, index, nm.name) } 'f' => { @@ -481,7 +481,7 @@ fn parse_ty_<'a, 'tcx, F>(st: &mut PState<'a, 'tcx>, conv: &mut F) -> Ty<'tcx> w assert_eq!(next(st), '|'); let space = parse_param_space(st); assert_eq!(next(st), '|'); - let name = token::intern(parse_str(st, ']')[]); + let name = token::intern(parse_str(st, ']').index(&FullRange)); return ty::mk_param(tcx, space, index, name); } '~' => return ty::mk_uniq(tcx, parse_ty_(st, conv)), @@ -637,7 +637,7 @@ fn parse_abi_set(st: &mut PState) -> abi::Abi { assert_eq!(next(st), '['); scan(st, |c| c == ']', |bytes| { let abi_str = str::from_utf8(bytes).unwrap(); - abi::lookup(abi_str[]).expect(abi_str) + abi::lookup(abi_str.index(&FullRange)).expect(abi_str) }) } @@ -733,8 +733,8 @@ pub fn parse_def_id(buf: &[u8]) -> ast::DefId { panic!(); } - let crate_part = buf[0u..colon_idx]; - let def_part = buf[colon_idx + 1u..len]; + let crate_part = buf.index(&(0u..colon_idx)); + let def_part = buf.index(&((colon_idx + 1u)..len)); let crate_num = match str::from_utf8(crate_part).ok().and_then(|s| s.parse::()) { Some(cn) => cn as ast::CrateNum, diff --git a/src/librustc/middle/astconv_util.rs b/src/librustc/middle/astconv_util.rs index b2ad77467276f..955f522b8041b 100644 --- a/src/librustc/middle/astconv_util.rs +++ b/src/librustc/middle/astconv_util.rs @@ -48,7 +48,7 @@ pub fn ast_ty_to_prim_ty<'tcx>(tcx: &ty::ctxt<'tcx>, ast_ty: &ast::Ty) None => { tcx.sess.span_bug(ast_ty.span, format!("unbound path {}", - path.repr(tcx))[]) + path.repr(tcx)).index(&FullRange)) } Some(&d) => d }; diff --git a/src/librustc/middle/astencode.rs b/src/librustc/middle/astencode.rs index 93a19a01f668f..7f11317a4b96a 100644 --- a/src/librustc/middle/astencode.rs +++ b/src/librustc/middle/astencode.rs @@ -132,7 +132,7 @@ pub fn decode_inlined_item<'tcx>(cdata: &cstore::crate_metadata, // Do an Option dance to use the path after it is moved below. let s = ast_map::path_to_string(ast_map::Values(path.iter())); path_as_str = Some(s); - path_as_str.as_ref().map(|x| x[]) + path_as_str.as_ref().map(|x| x.index(&FullRange)) }); let mut ast_dsr = reader::Decoder::new(ast_doc); let from_id_range = Decodable::decode(&mut ast_dsr).unwrap(); @@ -1900,7 +1900,7 @@ fn decode_side_tables(dcx: &DecodeContext, None => { dcx.tcx.sess.bug( format!("unknown tag found in side tables: {:x}", - tag)[]); + tag).index(&FullRange)); } Some(value) => { let val_doc = entry_doc.get(c::tag_table_val as uint); @@ -1985,7 +1985,7 @@ fn decode_side_tables(dcx: &DecodeContext, _ => { dcx.tcx.sess.bug( format!("unknown tag found in side tables: {:x}", - tag)[]); + tag).index(&FullRange)); } } } diff --git a/src/librustc/middle/cfg/construct.rs b/src/librustc/middle/cfg/construct.rs index 3c672d0fdb6fa..2f0e3aeee9b52 100644 --- a/src/librustc/middle/cfg/construct.rs +++ b/src/librustc/middle/cfg/construct.rs @@ -362,7 +362,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { let mut cond_exit = discr_exit; for arm in arms.iter() { cond_exit = self.add_dummy_node(&[cond_exit]); // 2 - let pats_exit = self.pats_any(arm.pats[], + let pats_exit = self.pats_any(arm.pats.index(&FullRange), cond_exit); // 3 let guard_exit = self.opt_expr(&arm.guard, pats_exit); // 4 @@ -616,14 +616,14 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { self.tcx.sess.span_bug( expr.span, format!("no loop scope for id {}", - loop_id)[]); + loop_id).index(&FullRange)); } r => { self.tcx.sess.span_bug( expr.span, format!("bad entry `{}` in def_map for label", - r)[]); + r).index(&FullRange)); } } } diff --git a/src/librustc/middle/cfg/graphviz.rs b/src/librustc/middle/cfg/graphviz.rs index 8a2ecbca20d55..8b9a0d89b380c 100644 --- a/src/librustc/middle/cfg/graphviz.rs +++ b/src/librustc/middle/cfg/graphviz.rs @@ -52,7 +52,7 @@ fn replace_newline_with_backslash_l(s: String) -> String { } impl<'a, 'ast> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a, 'ast> { - fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new(self.name[]).unwrap() } + fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new(self.name.index(&FullRange)).unwrap() } fn node_id(&'a self, &(i,_): &Node<'a>) -> dot::Id<'a> { dot::Id::new(format!("N{}", i.node_id())).unwrap() @@ -85,7 +85,9 @@ impl<'a, 'ast> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a, 'ast> { let s = self.ast_map.node_to_string(node_id); // left-aligns the lines let s = replace_newline_with_backslash_l(s); - label.push_str(format!("exiting scope_{} {}", i, s[])[]); + label.push_str(format!("exiting scope_{} {}", + i, + s.index(&FullRange)).index(&FullRange)); } dot::LabelText::EscStr(label.into_cow()) } diff --git a/src/librustc/middle/check_loop.rs b/src/librustc/middle/check_loop.rs index e68a9fb50efd0..5024e5c4f77de 100644 --- a/src/librustc/middle/check_loop.rs +++ b/src/librustc/middle/check_loop.rs @@ -74,11 +74,11 @@ impl<'a> CheckLoopVisitor<'a> { Loop => {} Closure => { self.sess.span_err(span, - format!("`{}` inside of a closure", name)[]); + format!("`{}` inside of a closure", name).index(&FullRange)); } Normal => { self.sess.span_err(span, - format!("`{}` outside of loop", name)[]); + format!("`{}` outside of loop", name).index(&FullRange)); } } } diff --git a/src/librustc/middle/check_match.rs b/src/librustc/middle/check_match.rs index f2b9ecb5ec432..5712ce3792ea6 100644 --- a/src/librustc/middle/check_match.rs +++ b/src/librustc/middle/check_match.rs @@ -47,7 +47,7 @@ struct Matrix<'a>(Vec>); /// Pretty-printer for matrices of patterns, example: /// ++++++++++++++++++++++++++ -/// + _ + [] + +/// + _ + .index(&FullRange) + /// ++++++++++++++++++++++++++ /// + true + [First] + /// ++++++++++++++++++++++++++ @@ -161,7 +161,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) { // First, check legality of move bindings. check_legality_of_move_bindings(cx, arm.guard.is_some(), - arm.pats[]); + arm.pats.index(&FullRange)); // Second, if there is a guard on each arm, make sure it isn't // assigning or borrowing anything mutably. @@ -198,7 +198,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) { } // Fourth, check for unreachable arms. - check_arms(cx, inlined_arms[], source); + check_arms(cx, inlined_arms.index(&FullRange), source); // Finally, check if the whole match expression is exhaustive. // Check for empty enum, because is_useful only works on inhabited types. @@ -230,7 +230,7 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &ast::Expr) { pat.span, format!("refutable pattern in `for` loop binding: \ `{}` not covered", - pat_to_string(uncovered_pat))[]); + pat_to_string(uncovered_pat)).index(&FullRange)); }); // Check legality of move bindings. @@ -303,7 +303,7 @@ fn check_arms(cx: &MatchCheckCtxt, for pat in pats.iter() { let v = vec![&**pat]; - match is_useful(cx, &seen, v[], LeaveOutWitness) { + match is_useful(cx, &seen, v.index(&FullRange), LeaveOutWitness) { NotUseful => { match source { ast::MatchSource::IfLetDesugar { .. } => { @@ -355,7 +355,7 @@ fn raw_pat<'a>(p: &'a Pat) -> &'a Pat { fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, matrix: &Matrix) { match is_useful(cx, matrix, &[DUMMY_WILD_PAT], ConstructWitness) { UsefulWithWitness(pats) => { - let witness = match pats[] { + let witness = match pats.index(&FullRange) { [ref witness] => &**witness, [] => DUMMY_WILD_PAT, _ => unreachable!() @@ -609,7 +609,7 @@ fn is_useful(cx: &MatchCheckCtxt, UsefulWithWitness(pats) => UsefulWithWitness({ let arity = constructor_arity(cx, &c, left_ty); let mut result = { - let pat_slice = pats[]; + let pat_slice = pats.index(&FullRange); let subpats: Vec<_> = range(0, arity).map(|i| { pat_slice.get(i).map_or(DUMMY_WILD_PAT, |p| &**p) }).collect(); @@ -656,10 +656,10 @@ fn is_useful_specialized(cx: &MatchCheckCtxt, &Matrix(ref m): &Matrix, witness: WitnessPreference) -> Usefulness { let arity = constructor_arity(cx, &ctor, lty); let matrix = Matrix(m.iter().filter_map(|r| { - specialize(cx, r[], &ctor, 0u, arity) + specialize(cx, r.index(&FullRange), &ctor, 0u, arity) }).collect()); match specialize(cx, v, &ctor, 0u, arity) { - Some(v) => is_useful(cx, &matrix, v[], witness), + Some(v) => is_useful(cx, &matrix, v.index(&FullRange), witness), None => NotUseful } } @@ -729,7 +729,7 @@ fn pat_constructors(cx: &MatchCheckCtxt, p: &Pat, /// This computes the arity of a constructor. The arity of a constructor /// is how many subpattern patterns of that constructor should be expanded to. /// -/// For instance, a tuple pattern (_, 42u, Some([])) has the arity of 3. +/// For instance, a tuple pattern (_, 42u, Some(.index(&FullRange))) has the arity of 3. /// A struct pattern's arity is the number of fields it contains, etc. pub fn constructor_arity(cx: &MatchCheckCtxt, ctor: &Constructor, ty: Ty) -> uint { match ty.sty { @@ -926,8 +926,8 @@ pub fn specialize<'a>(cx: &MatchCheckCtxt, r: &[&'a Pat], } }; head.map(|mut head| { - head.push_all(r[..col]); - head.push_all(r[col + 1..]); + head.push_all(r.index(&(0..col))); + head.push_all(r.index(&((col + 1)..))); head }) } @@ -1044,7 +1044,7 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt, format!("binding pattern {} is not an \ identifier: {}", p.id, - p.node)[]); + p.node).index(&FullRange)); } } } diff --git a/src/librustc/middle/check_static.rs b/src/librustc/middle/check_static.rs index df51cb7e6bc4b..994a2b0dc8abc 100644 --- a/src/librustc/middle/check_static.rs +++ b/src/librustc/middle/check_static.rs @@ -112,7 +112,7 @@ impl<'a, 'tcx> CheckStaticVisitor<'a, 'tcx> { }; self.tcx.sess.span_err(e.span, format!("mutable statics are not allowed \ - to have {}", suffix)[]); + to have {}", suffix).index(&FullRange)); } fn check_static_type(&self, e: &ast::Expr) { @@ -170,7 +170,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckStaticVisitor<'a, 'tcx> { ty::ty_enum(did, _) if ty::has_dtor(self.tcx, did) => { self.tcx.sess.span_err(e.span, format!("{} are not allowed to have \ - destructors", self.msg())[]) + destructors", self.msg()).index(&FullRange)) } _ => {} } @@ -234,7 +234,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckStaticVisitor<'a, 'tcx> { let msg = "constants cannot refer to other statics, \ insert an intermediate constant \ instead"; - self.tcx.sess.span_err(e.span, msg[]); + self.tcx.sess.span_err(e.span, msg.index(&FullRange)); } _ => {} } diff --git a/src/librustc/middle/check_static_recursion.rs b/src/librustc/middle/check_static_recursion.rs index c36b4aa7f231e..75851f0a85333 100644 --- a/src/librustc/middle/check_static_recursion.rs +++ b/src/librustc/middle/check_static_recursion.rs @@ -105,7 +105,7 @@ impl<'a, 'ast, 'v> Visitor<'v> for CheckItemRecursionVisitor<'a, 'ast> { _ => { self.sess.span_err(e.span, format!("expected item, found {}", - self.ast_map.node_to_string(def_id.node))[]); + self.ast_map.node_to_string(def_id.node)).index(&FullRange)); return; }, } diff --git a/src/librustc/middle/const_eval.rs b/src/librustc/middle/const_eval.rs index 32482fce4daa8..213b4ac4736b6 100644 --- a/src/librustc/middle/const_eval.rs +++ b/src/librustc/middle/const_eval.rs @@ -48,7 +48,7 @@ use std::rc::Rc; // target uses". This _includes_ integer-constants, plus the following // constructors: // -// fixed-size vectors and strings: [] and ""/_ +// fixed-size vectors and strings: .index(&FullRange) and ""/_ // vector and string slices: &[] and &"" // tuples: (,) // enums: foo(...) @@ -117,7 +117,7 @@ fn lookup_variant_by_id<'a>(tcx: &'a ty::ctxt, None => None, Some(ast_map::NodeItem(it)) => match it.node { ast::ItemEnum(ast::EnumDef { ref variants }, _) => { - variant_expr(variants[], variant_def.node) + variant_expr(variants.index(&FullRange), variant_def.node) } _ => None }, @@ -138,7 +138,7 @@ fn lookup_variant_by_id<'a>(tcx: &'a ty::ctxt, // NOTE this doesn't do the right thing, it compares inlined // NodeId's to the original variant_def's NodeId, but they // come from different crates, so they will likely never match. - variant_expr(variants[], variant_def.node).map(|e| e.id) + variant_expr(variants.index(&FullRange), variant_def.node).map(|e| e.id) } _ => None }, @@ -364,7 +364,7 @@ pub fn const_expr_to_pat(tcx: &ty::ctxt, expr: &Expr) -> P { pub fn eval_const_expr(tcx: &ty::ctxt, e: &Expr) -> const_val { match eval_const_expr_partial(tcx, e) { Ok(r) => r, - Err(s) => tcx.sess.span_fatal(e.span, s[]) + Err(s) => tcx.sess.span_fatal(e.span, s.index(&FullRange)) } } diff --git a/src/librustc/middle/dataflow.rs b/src/librustc/middle/dataflow.rs index e78b8047f6958..49ae1bce193a0 100644 --- a/src/librustc/middle/dataflow.rs +++ b/src/librustc/middle/dataflow.rs @@ -312,7 +312,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { let mut t = on_entry.to_vec(); self.apply_gen_kill(cfgidx, t.as_mut_slice()); temp_bits = t; - temp_bits[] + temp_bits.index(&FullRange) } }; debug!("{} each_bit_for_node({}, cfgidx={}) bits={}", @@ -421,7 +421,7 @@ impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { let bits = self.kills.slice_mut(start, end); debug!("{} add_kills_from_flow_exits flow_exit={} bits={} [before]", self.analysis_name, flow_exit, mut_bits_to_string(bits)); - bits.clone_from_slice(orig_kills[]); + bits.clone_from_slice(orig_kills.index(&FullRange)); debug!("{} add_kills_from_flow_exits flow_exit={} bits={} [after]", self.analysis_name, flow_exit, mut_bits_to_string(bits)); } @@ -554,7 +554,7 @@ fn bits_to_string(words: &[uint]) -> String { let mut v = word; for _ in range(0u, uint::BYTES) { result.push(sep); - result.push_str(format!("{:02x}", v & 0xFF)[]); + result.push_str(format!("{:02x}", v & 0xFF).index(&FullRange)); v >>= 8; sep = '-'; } diff --git a/src/librustc/middle/dependency_format.rs b/src/librustc/middle/dependency_format.rs index 6b56ece28bdb2..8182dec2db064 100644 --- a/src/librustc/middle/dependency_format.rs +++ b/src/librustc/middle/dependency_format.rs @@ -118,7 +118,7 @@ fn calculate_type(sess: &session::Session, let src = sess.cstore.get_used_crate_source(cnum).unwrap(); if src.rlib.is_some() { return } sess.err(format!("dependency `{}` not found in rlib format", - data.name)[]); + data.name).index(&FullRange)); }); return Vec::new(); } @@ -197,7 +197,7 @@ fn calculate_type(sess: &session::Session, match kind { cstore::RequireStatic => "rlib", cstore::RequireDynamic => "dylib", - })[]); + }).index(&FullRange)); } } } @@ -222,7 +222,7 @@ fn add_library(sess: &session::Session, let data = sess.cstore.get_crate_data(cnum); sess.err(format!("cannot satisfy dependencies so `{}` only \ shows up once", - data.name)[]); + data.name).index(&FullRange)); sess.help("having upstream crates all available in one format \ will likely make this go away"); } diff --git a/src/librustc/middle/expr_use_visitor.rs b/src/librustc/middle/expr_use_visitor.rs index df2a4e4c2532a..1f89121488ea1 100644 --- a/src/librustc/middle/expr_use_visitor.rs +++ b/src/librustc/middle/expr_use_visitor.rs @@ -441,28 +441,12 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { } ast::ExprIndex(ref lhs, ref rhs) => { // lhs[rhs] - match rhs.node { - ast::ExprRange(ref start, ref end) => { - // Hacked slicing syntax (KILLME). - let args = match (start, end) { - (&Some(ref e1), &Some(ref e2)) => vec![&**e1, &**e2], - (&Some(ref e), &None) => vec![&**e], - (&None, &Some(ref e)) => vec![&**e], - (&None, &None) => Vec::new() - }; - let overloaded = - self.walk_overloaded_operator(expr, &**lhs, args, PassArgs::ByRef); - assert!(overloaded); - } - _ => { - if !self.walk_overloaded_operator(expr, - &**lhs, - vec![&**rhs], - PassArgs::ByRef) { - self.select_from_expr(&**lhs); - self.consume_expr(&**rhs); - } - } + if !self.walk_overloaded_operator(expr, + &**lhs, + vec![&**rhs], + PassArgs::ByRef) { + self.select_from_expr(&**lhs); + self.consume_expr(&**rhs); } } @@ -869,7 +853,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { ty::ty_rptr(r, ref m) => (m.mutbl, r), _ => self.tcx().sess.span_bug(expr.span, format!("bad overloaded deref type {}", - method_ty.repr(self.tcx()))[]) + method_ty.repr(self.tcx())).index(&FullRange)) }; let bk = ty::BorrowKind::from_mutbl(m); self.delegate.borrow(expr.id, expr.span, cmt, @@ -1174,7 +1158,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { let msg = format!("Pattern has unexpected type: {} and type {}", def, cmt_pat.ty.repr(tcx)); - tcx.sess.span_bug(pat.span, msg[]) + tcx.sess.span_bug(pat.span, msg.as_slice()) } } @@ -1191,7 +1175,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> { let msg = format!("Pattern has unexpected def: {} and type {}", def, cmt_pat.ty.repr(tcx)); - tcx.sess.span_bug(pat.span, msg[]) + tcx.sess.span_bug(pat.span, msg.index(&FullRange)) } } } diff --git a/src/librustc/middle/infer/combine.rs b/src/librustc/middle/infer/combine.rs index dd711fcbf022e..9122359bb9ef3 100644 --- a/src/librustc/middle/infer/combine.rs +++ b/src/librustc/middle/infer/combine.rs @@ -142,7 +142,7 @@ pub trait Combine<'tcx> : Sized { for _ in a_regions.iter() { invariance.push(ty::Invariant); } - invariance[] + invariance.index(&FullRange) } }; @@ -480,7 +480,7 @@ pub fn super_tys<'tcx, C: Combine<'tcx>>(this: &C, format!("{}: bot and var types should have been handled ({},{})", this.tag(), a.repr(this.infcx().tcx), - b.repr(this.infcx().tcx))[]); + b.repr(this.infcx().tcx)).index(&FullRange)); } (&ty::ty_err, _) | (_, &ty::ty_err) => { @@ -856,7 +856,7 @@ impl<'cx, 'tcx> ty_fold::TypeFolder<'tcx> for Generalizer<'cx, 'tcx> { self.tcx().sess.span_bug( self.span, format!("Encountered early bound region when generalizing: {}", - r.repr(self.tcx()))[]); + r.repr(self.tcx())).index(&FullRange)); } // Always make a fresh region variable for skolemized regions; diff --git a/src/librustc/middle/infer/error_reporting.rs b/src/librustc/middle/infer/error_reporting.rs index e58ff53b00cb9..1566a7938d3dd 100644 --- a/src/librustc/middle/infer/error_reporting.rs +++ b/src/librustc/middle/infer/error_reporting.rs @@ -200,9 +200,9 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { ref trace_origins, ref same_regions) => { if !same_regions.is_empty() { - self.report_processed_errors(var_origins[], - trace_origins[], - same_regions[]); + self.report_processed_errors(var_origins.index(&FullRange), + trace_origins.index(&FullRange), + same_regions.index(&FullRange)); } } } @@ -376,7 +376,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { format!("{}: {} ({})", message_root_str, expected_found_str, - ty::type_err_to_str(self.tcx, terr))[]); + ty::type_err_to_str(self.tcx, terr)).index(&FullRange)); match trace.origin { infer::MatchExpressionArm(_, arm_span) => @@ -445,25 +445,25 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { // Does the required lifetime have a nice name we can print? self.tcx.sess.span_err( origin.span(), - format!("{} may not live long enough", labeled_user_string)[]); + format!("{} may not live long enough", labeled_user_string).index(&FullRange)); self.tcx.sess.span_help( origin.span(), format!( "consider adding an explicit lifetime bound `{}: {}`...", bound_kind.user_string(self.tcx), - sub.user_string(self.tcx))[]); + sub.user_string(self.tcx)).index(&FullRange)); } ty::ReStatic => { // Does the required lifetime have a nice name we can print? self.tcx.sess.span_err( origin.span(), - format!("{} may not live long enough", labeled_user_string)[]); + format!("{} may not live long enough", labeled_user_string).index(&FullRange)); self.tcx.sess.span_help( origin.span(), format!( "consider adding an explicit lifetime bound `{}: 'static`...", - bound_kind.user_string(self.tcx))[]); + bound_kind.user_string(self.tcx)).index(&FullRange)); } _ => { @@ -472,15 +472,15 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { origin.span(), format!( "{} may not live long enough", - labeled_user_string)[]); + labeled_user_string).index(&FullRange)); self.tcx.sess.span_help( origin.span(), format!( "consider adding an explicit lifetime bound for `{}`", - bound_kind.user_string(self.tcx))[]); + bound_kind.user_string(self.tcx)).index(&FullRange)); note_and_explain_region( self.tcx, - format!("{} must be valid for ", labeled_user_string)[], + format!("{} must be valid for ", labeled_user_string).index(&FullRange), sub, "..."); } @@ -522,7 +522,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { ty::local_var_name_str(self.tcx, upvar_id.var_id) .get() - .to_string())[]); + .to_string()).index(&FullRange)); note_and_explain_region( self.tcx, "...the borrowed pointer is valid for ", @@ -534,7 +534,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { ty::local_var_name_str(self.tcx, upvar_id.var_id) .get() - .to_string())[], + .to_string()).index(&FullRange), sup, ""); } @@ -580,7 +580,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { outlive the enclosing closure", ty::local_var_name_str(self.tcx, id).get() - .to_string())[]); + .to_string()).index(&FullRange)); note_and_explain_region( self.tcx, "captured variable is valid for ", @@ -622,7 +622,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { span, format!("the type `{}` does not fulfill the \ required lifetime", - self.ty_to_string(ty))[]); + self.ty_to_string(ty)).index(&FullRange)); note_and_explain_region(self.tcx, "type must outlive ", sub, @@ -648,7 +648,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { span, format!("the type `{}` (provided as the value of \ a type parameter) is not valid at this point", - self.ty_to_string(ty))[]); + self.ty_to_string(ty)).index(&FullRange)); note_and_explain_region(self.tcx, "type must outlive ", sub, @@ -714,7 +714,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { span, format!("type of expression contains references \ that are not valid during the expression: `{}`", - self.ty_to_string(t))[]); + self.ty_to_string(t)).index(&FullRange)); note_and_explain_region( self.tcx, "type is only valid for ", @@ -736,7 +736,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { span, format!("in type `{}`, reference has a longer lifetime \ than the data it references", - self.ty_to_string(ty))[]); + self.ty_to_string(ty)).index(&FullRange)); note_and_explain_region( self.tcx, "the pointer is valid for ", @@ -861,7 +861,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { let (fn_decl, generics, unsafety, ident, expl_self, span) = node_inner.expect("expect item fn"); let taken = lifetimes_in_scope(self.tcx, scope_id); - let life_giver = LifeGiver::with_taken(taken[]); + let life_giver = LifeGiver::with_taken(taken.index(&FullRange)); let rebuilder = Rebuilder::new(self.tcx, fn_decl, expl_self, generics, same_regions, &life_giver); let (fn_decl, expl_self, generics) = rebuilder.rebuild(); @@ -937,7 +937,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { } expl_self_opt = self.rebuild_expl_self(expl_self_opt, lifetime, &anon_nums, ®ion_names); - inputs = self.rebuild_args_ty(inputs[], lifetime, + inputs = self.rebuild_args_ty(inputs.index(&FullRange), lifetime, &anon_nums, ®ion_names); output = self.rebuild_output(&output, lifetime, &anon_nums, ®ion_names); ty_params = self.rebuild_ty_params(ty_params, lifetime, @@ -972,7 +972,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { names.push(lt_name); } names.sort(); - let name = token::str_to_ident(names[0][]).name; + let name = token::str_to_ident(names[0].index(&FullRange)).name; return (name_to_dummy_lifetime(name), Kept); } return (self.life_giver.give_lifetime(), Fresh); @@ -1222,7 +1222,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { .sess .fatal(format!( "unbound path {}", - pprust::path_to_string(path))[]) + pprust::path_to_string(path)).index(&FullRange)) } Some(&d) => d }; @@ -1420,7 +1420,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { opt_explicit_self, generics); let msg = format!("consider using an explicit lifetime \ parameter as shown: {}", suggested_fn); - self.tcx.sess.span_help(span, msg[]); + self.tcx.sess.span_help(span, msg.index(&FullRange)); } fn report_inference_failure(&self, @@ -1463,7 +1463,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { var_origin.span(), format!("cannot infer an appropriate lifetime{} \ due to conflicting requirements", - var_description)[]); + var_description).index(&FullRange)); } fn note_region_origin(&self, origin: &SubregionOrigin<'tcx>) { @@ -1511,7 +1511,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { self.tcx.sess.span_note( trace.origin.span(), format!("...so that {} ({})", - desc, values_str)[]); + desc, values_str).index(&FullRange)); } None => { // Really should avoid printing this error at @@ -1520,7 +1520,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { // doing right now. - nmatsakis self.tcx.sess.span_note( trace.origin.span(), - format!("...so that {}", desc)[]); + format!("...so that {}", desc).index(&FullRange)); } } } @@ -1537,7 +1537,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { "...so that closure can access `{}`", ty::local_var_name_str(self.tcx, upvar_id.var_id) .get() - .to_string())[]) + .to_string()).index(&FullRange)) } infer::InfStackClosure(span) => { self.tcx.sess.span_note( @@ -1562,7 +1562,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { does not outlive the enclosing closure", ty::local_var_name_str( self.tcx, - id).get().to_string())[]); + id).get().to_string()).index(&FullRange)); } infer::IndexSlice(span) => { self.tcx.sess.span_note( @@ -1606,7 +1606,7 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { span, format!("...so type `{}` of expression is valid during the \ expression", - self.ty_to_string(t))[]); + self.ty_to_string(t)).index(&FullRange)); } infer::BindingTypeIsNotValidAtDecl(span) => { self.tcx.sess.span_note( @@ -1618,14 +1618,14 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { span, format!("...so that the reference type `{}` \ does not outlive the data it points at", - self.ty_to_string(ty))[]); + self.ty_to_string(ty)).index(&FullRange)); } infer::RelateParamBound(span, t) => { self.tcx.sess.span_note( span, format!("...so that the type `{}` \ will meet the declared lifetime bounds", - self.ty_to_string(t))[]); + self.ty_to_string(t)).index(&FullRange)); } infer::RelateDefaultParamBound(span, t) => { self.tcx.sess.span_note( @@ -1633,13 +1633,13 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { format!("...so that type parameter \ instantiated with `{}`, \ will meet its declared lifetime bounds", - self.ty_to_string(t))[]); + self.ty_to_string(t)).index(&FullRange)); } infer::RelateRegionParamBound(span) => { self.tcx.sess.span_note( span, format!("...so that the declared lifetime parameter bounds \ - are satisfied")[]); + are satisfied").index(&FullRange)); } } } @@ -1691,7 +1691,7 @@ fn lifetimes_in_scope(tcx: &ty::ctxt, Some(node) => match node { ast_map::NodeItem(item) => match item.node { ast::ItemFn(_, _, _, ref gen, _) => { - taken.push_all(gen.lifetimes[]); + taken.push_all(gen.lifetimes.index(&FullRange)); None }, _ => None @@ -1699,7 +1699,7 @@ fn lifetimes_in_scope(tcx: &ty::ctxt, ast_map::NodeImplItem(ii) => { match *ii { ast::MethodImplItem(ref m) => { - taken.push_all(m.pe_generics().lifetimes[]); + taken.push_all(m.pe_generics().lifetimes.index(&FullRange)); Some(m.id) } ast::TypeImplItem(_) => None, @@ -1758,10 +1758,10 @@ impl LifeGiver { let mut lifetime; loop { let mut s = String::from_str("'"); - s.push_str(num_to_string(self.counter.get())[]); + s.push_str(num_to_string(self.counter.get()).index(&FullRange)); if !self.taken.contains(&s) { lifetime = name_to_dummy_lifetime( - token::str_to_ident(s[]).name); + token::str_to_ident(s.index(&FullRange)).name); self.generated.borrow_mut().push(lifetime); break; } diff --git a/src/librustc/middle/infer/higher_ranked/mod.rs b/src/librustc/middle/infer/higher_ranked/mod.rs index bf0a9cfbea66d..cd4705d4fb2db 100644 --- a/src/librustc/middle/infer/higher_ranked/mod.rs +++ b/src/librustc/middle/infer/higher_ranked/mod.rs @@ -189,7 +189,7 @@ impl<'tcx,C> HigherRankedRelations<'tcx> for C span, format!("region {} is not associated with \ any bound region from A!", - r0)[]) + r0).index(&FullRange)) } } @@ -322,7 +322,7 @@ impl<'tcx,C> HigherRankedRelations<'tcx> for C } infcx.tcx.sess.span_bug( span, - format!("could not find original bound region for {}", r)[]); + format!("could not find original bound region for {}", r).index(&FullRange)); } fn fresh_bound_variable(infcx: &InferCtxt, debruijn: ty::DebruijnIndex) -> ty::Region { @@ -339,7 +339,7 @@ fn var_ids<'tcx, T: Combine<'tcx>>(combiner: &T, r => { combiner.infcx().tcx.sess.span_bug( combiner.trace().origin.span(), - format!("found non-region-vid: {}", r)[]); + format!("found non-region-vid: {}", r).index(&FullRange)); } }).collect() } diff --git a/src/librustc/middle/infer/mod.rs b/src/librustc/middle/infer/mod.rs index c2db81d311483..a3d94e8b844b5 100644 --- a/src/librustc/middle/infer/mod.rs +++ b/src/librustc/middle/infer/mod.rs @@ -1002,7 +1002,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { self.tcx.sess.span_err(sp, format!("{}{}", mk_msg(resolved_expected.map(|t| self.ty_to_string(t)), actual_ty), - error_str)[]); + error_str).index(&FullRange)); for err in err.iter() { ty::note_and_explain_type_err(self.tcx, *err) diff --git a/src/librustc/middle/infer/region_inference/mod.rs b/src/librustc/middle/infer/region_inference/mod.rs index d30a6ff1cd9d5..ea5d8456117ba 100644 --- a/src/librustc/middle/infer/region_inference/mod.rs +++ b/src/librustc/middle/infer/region_inference/mod.rs @@ -475,7 +475,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { origin.span(), format!("cannot relate bound region: {} <= {}", sub.repr(self.tcx), - sup.repr(self.tcx))[]); + sup.repr(self.tcx)).index(&FullRange)); } (_, ReStatic) => { // all regions are subregions of static, so we can ignore this @@ -736,7 +736,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { self.tcx.sess.bug( format!("cannot relate bound region: LUB({}, {})", a.repr(self.tcx), - b.repr(self.tcx))[]); + b.repr(self.tcx)).index(&FullRange)); } (ReStatic, _) | (_, ReStatic) => { @@ -753,7 +753,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { format!("lub_concrete_regions invoked with \ non-concrete regions: {}, {}", a, - b)[]); + b).index(&FullRange)); } (ReFree(ref fr), ReScope(s_id)) | @@ -836,7 +836,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { self.tcx.sess.bug( format!("cannot relate bound region: GLB({}, {})", a.repr(self.tcx), - b.repr(self.tcx))[]); + b.repr(self.tcx)).index(&FullRange)); } (ReStatic, r) | (r, ReStatic) => { @@ -856,7 +856,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { format!("glb_concrete_regions invoked with \ non-concrete regions: {}, {}", a, - b)[]); + b).index(&FullRange)); } (ReFree(ref fr), ReScope(s_id)) | @@ -977,7 +977,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { self.expansion(var_data.as_mut_slice()); self.contraction(var_data.as_mut_slice()); let values = - self.extract_values_and_collect_conflicts(var_data[], + self.extract_values_and_collect_conflicts(var_data.index(&FullRange), errors); self.collect_concrete_region_errors(&values, errors); values @@ -1415,7 +1415,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { for var {}, lower_bounds={}, upper_bounds={}", node_idx, lower_bounds.repr(self.tcx), - upper_bounds.repr(self.tcx))[]); + upper_bounds.repr(self.tcx)).index(&FullRange)); } fn collect_error_for_contracting_node( @@ -1459,7 +1459,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { format!("collect_error_for_contracting_node() could not find error \ for var {}, upper_bounds={}", node_idx, - upper_bounds.repr(self.tcx))[]); + upper_bounds.repr(self.tcx)).index(&FullRange)); } fn collect_concrete_regions(&self, diff --git a/src/librustc/middle/infer/resolve.rs b/src/librustc/middle/infer/resolve.rs index 3ed866d4aba8f..9035d72e9a2fb 100644 --- a/src/librustc/middle/infer/resolve.rs +++ b/src/librustc/middle/infer/resolve.rs @@ -96,7 +96,7 @@ impl<'a, 'tcx> ty_fold::TypeFolder<'tcx> for FullTypeResolver<'a, 'tcx> { ty::ty_infer(_) => { self.infcx.tcx.sess.bug( format!("Unexpected type in full type resolver: {}", - t.repr(self.infcx.tcx))[]); + t.repr(self.infcx.tcx)).index(&FullRange)); } _ => { ty_fold::super_fold_ty(self, t) diff --git a/src/librustc/middle/lang_items.rs b/src/librustc/middle/lang_items.rs index bbb11b9f93bbd..2f81d8c0f701e 100644 --- a/src/librustc/middle/lang_items.rs +++ b/src/librustc/middle/lang_items.rs @@ -266,8 +266,6 @@ lets_do_this! { ShrTraitLangItem, "shr", shr_trait; IndexTraitLangItem, "index", index_trait; IndexMutTraitLangItem, "index_mut", index_mut_trait; - SliceTraitLangItem, "slice", slice_trait; - SliceMutTraitLangItem, "slice_mut", slice_mut_trait; RangeStructLangItem, "range", range_struct; RangeFromStructLangItem, "range_from", range_from_struct; RangeToStructLangItem, "range_to", range_to_struct; diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index 77875139be3a3..24cfe51123058 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -326,7 +326,7 @@ impl<'a, 'tcx> IrMaps<'a, 'tcx> { self.tcx .sess .span_bug(span, format!("no variable registered for id {}", - node_id)[]); + node_id).index(&FullRange)); } } } @@ -597,7 +597,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { self.ir.tcx.sess.span_bug( span, format!("no live node registered for node {}", - node_id)[]); + node_id).index(&FullRange)); } } } @@ -1132,7 +1132,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { // Uninteresting cases: just propagate in rev exec order ast::ExprVec(ref exprs) => { - self.propagate_through_exprs(exprs[], succ) + self.propagate_through_exprs(exprs.index(&FullRange), succ) } ast::ExprRepeat(ref element, ref count) => { @@ -1157,7 +1157,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { } else { succ }; - let succ = self.propagate_through_exprs(args[], succ); + let succ = self.propagate_through_exprs(args.index(&FullRange), succ); self.propagate_through_expr(&**f, succ) } @@ -1170,11 +1170,11 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { } else { succ }; - self.propagate_through_exprs(args[], succ) + self.propagate_through_exprs(args.index(&FullRange), succ) } ast::ExprTup(ref exprs) => { - self.propagate_through_exprs(exprs[], succ) + self.propagate_through_exprs(exprs.index(&FullRange), succ) } ast::ExprBinary(op, ref l, ref r) if ast_util::lazy_binop(op) => { diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index 2b8c9b532e593..81d4d66a88eb9 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -482,28 +482,20 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> { Ok(self.cat_tup_field(expr, base_cmt, idx.node, expr_ty)) } - ast::ExprIndex(ref base, ref idx) => { - match idx.node { - ast::ExprRange(..) => { - // Slicing syntax special case (KILLME). - Ok(self.cat_rvalue_node(expr.id(), expr.span(), expr_ty)) + ast::ExprIndex(ref base, _) => { + let method_call = ty::MethodCall::expr(expr.id()); + match self.typer.node_method_ty(method_call) { + Some(method_ty) => { + // If this is an index implemented by a method call, then it will + // include an implicit deref of the result. + let ret_ty = ty::ty_fn_ret(method_ty).unwrap(); + self.cat_deref(expr, + self.cat_rvalue_node(expr.id(), + expr.span(), + ret_ty), 1, true) } - _ => { - let method_call = ty::MethodCall::expr(expr.id()); - match self.typer.node_method_ty(method_call) { - Some(method_ty) => { - // If this is an index implemented by a method call, then it will - // include an implicit deref of the result. - let ret_ty = ty::ty_fn_ret(method_ty).unwrap(); - self.cat_deref(expr, - self.cat_rvalue_node(expr.id(), - expr.span(), - ret_ty), 1, true) - } - None => { - self.cat_index(expr, try!(self.cat_expr(&**base))) - } - } + None => { + self.cat_index(expr, try!(self.cat_expr(&**base))) } } } @@ -594,7 +586,7 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> { span, format!("Upvar of non-closure {} - {}", fn_node_id, - ty.repr(self.tcx()))[]); + ty.repr(self.tcx())).index(&FullRange)); } } } diff --git a/src/librustc/middle/privacy.rs b/src/librustc/middle/privacy.rs index 50e328ef0e3c3..df545c7570a02 100644 --- a/src/librustc/middle/privacy.rs +++ b/src/librustc/middle/privacy.rs @@ -615,10 +615,10 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { match result { None => true, Some((span, msg, note)) => { - self.tcx.sess.span_err(span, msg[]); + self.tcx.sess.span_err(span, msg.index(&FullRange)); match note { Some((span, msg)) => { - self.tcx.sess.span_note(span, msg[]) + self.tcx.sess.span_note(span, msg.index(&FullRange)) } None => {}, } @@ -720,7 +720,7 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { UnnamedField(idx) => format!("field #{} of {} is private", idx + 1, struct_desc), }; - self.tcx.sess.span_err(span, msg[]); + self.tcx.sess.span_err(span, msg.index(&FullRange)); } // Given the ID of a method, checks to ensure it's in scope. @@ -742,7 +742,7 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { method_id, None, format!("method `{}`", - string)[])); + string).index(&FullRange))); } // Checks that a path is in scope. @@ -756,7 +756,7 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { self.ensure_public(span, def, Some(origdid), - format!("{} `{}`", tyname, name)[]) + format!("{} `{}`", tyname, name).index(&FullRange)) }; match self.last_private_map[path_id] { diff --git a/src/librustc/middle/reachable.rs b/src/librustc/middle/reachable.rs index 5736e3072862a..51602e88f9342 100644 --- a/src/librustc/middle/reachable.rs +++ b/src/librustc/middle/reachable.rs @@ -50,7 +50,7 @@ fn generics_require_inlining(generics: &ast::Generics) -> bool { // monomorphized or it was marked with `#[inline]`. This will only return // true for functions. fn item_might_be_inlined(item: &ast::Item) -> bool { - if attributes_specify_inlining(item.attrs[]) { + if attributes_specify_inlining(item.attrs.index(&FullRange)) { return true } @@ -65,7 +65,7 @@ fn item_might_be_inlined(item: &ast::Item) -> bool { fn method_might_be_inlined(tcx: &ty::ctxt, method: &ast::Method, impl_src: ast::DefId) -> bool { - if attributes_specify_inlining(method.attrs[]) || + if attributes_specify_inlining(method.attrs.index(&FullRange)) || generics_require_inlining(method.pe_generics()) { return true } @@ -202,7 +202,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { ast::MethodImplItem(ref method) => { if generics_require_inlining(method.pe_generics()) || attributes_specify_inlining( - method.attrs[]) { + method.attrs.index(&FullRange)) { true } else { let impl_did = self.tcx @@ -249,7 +249,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { None => { self.tcx.sess.bug(format!("found unmapped ID in worklist: \ {}", - search_item)[]) + search_item).index(&FullRange)) } } } @@ -341,7 +341,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { .bug(format!("found unexpected thingy in worklist: {}", self.tcx .map - .node_to_string(search_item))[]) + .node_to_string(search_item)).index(&FullRange)) } } } diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index 68e257bc0c5ec..5be092eb1bd3e 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -643,7 +643,7 @@ fn resolve_local(visitor: &mut RegionResolutionVisitor, local: &ast::Local) { // A, but the inner rvalues `a()` and `b()` have an extended lifetime // due to rule C. // - // FIXME(#6308) -- Note that `[]` patterns work more smoothly post-DST. + // FIXME(#6308) -- Note that `.index(&FullRange)` patterns work more smoothly post-DST. match local.init { Some(ref expr) => { diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs index 68cb8ca39b492..6cdbd9872bc4e 100644 --- a/src/librustc/middle/resolve_lifetime.rs +++ b/src/librustc/middle/resolve_lifetime.rs @@ -399,7 +399,7 @@ impl<'a> LifetimeContext<'a> { self.sess.span_err( lifetime_ref.span, format!("use of undeclared lifetime name `{}`", - token::get_name(lifetime_ref.name))[]); + token::get_name(lifetime_ref.name)).index(&FullRange)); } fn check_lifetime_defs(&mut self, old_scope: Scope, lifetimes: &Vec) { @@ -413,7 +413,7 @@ impl<'a> LifetimeContext<'a> { lifetime.lifetime.span, format!("illegal lifetime parameter name: `{}`", token::get_name(lifetime.lifetime.name)) - []); + .index(&FullRange)); } } @@ -427,7 +427,7 @@ impl<'a> LifetimeContext<'a> { format!("lifetime name `{}` declared twice in \ the same scope", token::get_name(lifetime_j.lifetime.name)) - []); + .index(&FullRange)); } } diff --git a/src/librustc/middle/subst.rs b/src/librustc/middle/subst.rs index cd29ce28ac174..a3973d5854263 100644 --- a/src/librustc/middle/subst.rs +++ b/src/librustc/middle/subst.rs @@ -604,7 +604,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for SubstFolder<'a, 'tcx> { (space={}, index={})", region_name.as_str(), self.root_ty.repr(self.tcx()), - space, i)[]); + space, i).index(&FullRange)); } } } @@ -661,7 +661,7 @@ impl<'a,'tcx> SubstFolder<'a,'tcx> { p.space, p.idx, self.root_ty.repr(self.tcx()), - self.substs.repr(self.tcx()))[]); + self.substs.repr(self.tcx())).index(&FullRange)); } }; diff --git a/src/librustc/middle/traits/coherence.rs b/src/librustc/middle/traits/coherence.rs index 42b6e54420b40..49c7d6aafaa5e 100644 --- a/src/librustc/middle/traits/coherence.rs +++ b/src/librustc/middle/traits/coherence.rs @@ -137,7 +137,7 @@ fn ty_is_local_constructor<'tcx>(tcx: &ty::ctxt<'tcx>, ty: Ty<'tcx>) -> bool { ty::ty_err => { tcx.sess.bug( format!("ty_is_local invoked on unexpected type: {}", - ty.repr(tcx))[]) + ty.repr(tcx)).index(&FullRange)) } } } diff --git a/src/librustc/middle/traits/error_reporting.rs b/src/librustc/middle/traits/error_reporting.rs index 59322fcc632e8..fd6773afb765d 100644 --- a/src/librustc/middle/traits/error_reporting.rs +++ b/src/librustc/middle/traits/error_reporting.rs @@ -339,5 +339,5 @@ pub fn suggest_new_overflow_limit(tcx: &ty::ctxt, span: Span) { span, format!( "consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate", - suggested_limit)[]); + suggested_limit).index(&FullRange)); } diff --git a/src/librustc/middle/traits/fulfill.rs b/src/librustc/middle/traits/fulfill.rs index c2327adece8e0..c5800f3b48caa 100644 --- a/src/librustc/middle/traits/fulfill.rs +++ b/src/librustc/middle/traits/fulfill.rs @@ -227,7 +227,7 @@ impl<'tcx> FulfillmentContext<'tcx> { } pub fn pending_obligations(&self) -> &[PredicateObligation<'tcx>] { - self.predicates[] + self.predicates.index(&FullRange) } /// Attempts to select obligations using `selcx`. If `only_new_obligations` is true, then it diff --git a/src/librustc/middle/traits/mod.rs b/src/librustc/middle/traits/mod.rs index ce926fd8d10de..3638d98ae8800 100644 --- a/src/librustc/middle/traits/mod.rs +++ b/src/librustc/middle/traits/mod.rs @@ -378,7 +378,7 @@ pub fn type_known_to_meet_builtin_bound<'a,'tcx>(infcx: &InferCtxt<'a,'tcx>, span, format!("overflow evaluating whether `{}` is `{}`", ty.user_string(infcx.tcx), - bound.user_string(infcx.tcx))[]); + bound.user_string(infcx.tcx)).as_slice()); suggest_new_overflow_limit(infcx.tcx, span); false } diff --git a/src/librustc/middle/traits/object_safety.rs b/src/librustc/middle/traits/object_safety.rs index 8880cb7ce733f..17792ef78ab53 100644 --- a/src/librustc/middle/traits/object_safety.rs +++ b/src/librustc/middle/traits/object_safety.rs @@ -178,7 +178,7 @@ fn object_safety_violations_for_method<'tcx>(tcx: &ty::ctxt<'tcx>, // The `Self` type is erased, so it should not appear in list of // arguments or return type apart from the receiver. let ref sig = method.fty.sig; - for &input_ty in sig.0.inputs[1..].iter() { + for &input_ty in sig.0.inputs.index(&(1..)).iter() { if contains_illegal_self_type_reference(tcx, trait_def_id, input_ty) { return Some(MethodViolationCode::ReferencesSelf); } diff --git a/src/librustc/middle/traits/project.rs b/src/librustc/middle/traits/project.rs index 65f7ad296db51..67a8508e60dd1 100644 --- a/src/librustc/middle/traits/project.rs +++ b/src/librustc/middle/traits/project.rs @@ -490,7 +490,7 @@ fn assemble_candidates_from_object_type<'cx,'tcx>( selcx.tcx().sess.span_bug( obligation.cause.span, format!("assemble_candidates_from_object_type called with non-object: {}", - object_ty.repr(selcx.tcx()))[]); + object_ty.repr(selcx.tcx())).as_slice()); } }; let projection_bounds = data.projection_bounds_with_self_ty(selcx.tcx(), object_ty); diff --git a/src/librustc/middle/traits/select.rs b/src/librustc/middle/traits/select.rs index 2393b7d733d00..4ffae39ea44f0 100644 --- a/src/librustc/middle/traits/select.rs +++ b/src/librustc/middle/traits/select.rs @@ -903,7 +903,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let all_bounds = util::transitive_bounds( - self.tcx(), caller_trait_refs[]); + self.tcx(), caller_trait_refs.index(&FullRange)); let matching_bounds = all_bounds.filter( @@ -1467,7 +1467,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { self.tcx().sess.bug( format!( "asked to assemble builtin bounds of unexpected type: {}", - self_ty.repr(self.tcx()))[]); + self_ty.repr(self.tcx())).index(&FullRange)); } }; @@ -1637,7 +1637,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { self.tcx().sess.span_bug( obligation.cause.span, format!("builtin bound for {} was ambig", - obligation.repr(self.tcx()))[]); + obligation.repr(self.tcx())).index(&FullRange)); } } } @@ -1816,7 +1816,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { self.tcx().sess.span_bug( obligation.cause.span, format!("Fn pointer candidate for inappropriate self type: {}", - self_ty.repr(self.tcx()))[]); + self_ty.repr(self.tcx())).index(&FullRange)); } }; @@ -1946,7 +1946,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { self.tcx().sess.bug( format!("Impl {} was matchable against {} but now is not", impl_def_id.repr(self.tcx()), - obligation.repr(self.tcx()))[]); + obligation.repr(self.tcx())).index(&FullRange)); } } } diff --git a/src/librustc/middle/ty.rs b/src/librustc/middle/ty.rs index c359233eca173..e24fe46b34db7 100644 --- a/src/librustc/middle/ty.rs +++ b/src/librustc/middle/ty.rs @@ -1047,7 +1047,7 @@ pub struct ClosureTy<'tcx> { pub abi: abi::Abi, } -#[derive(Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Clone, Copy, PartialEq, Eq, Hash, Show)] pub enum FnOutput<'tcx> { FnConverging(Ty<'tcx>), FnDiverging @@ -1699,8 +1699,7 @@ impl fmt::Show for RegionVid { impl<'tcx> fmt::Show for FnSig<'tcx> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - // grr, without tcx not much we can do. - write!(f, "(...)") + write!(f, "({}; variadic: {})->{}", self.inputs, self.variadic, self.output) } } @@ -2006,8 +2005,8 @@ impl<'tcx> Predicate<'tcx> { /// struct Foo> { ... } /// /// Here, the `Generics` for `Foo` would contain a list of bounds like -/// `[[], [U:Bar]]`. Now if there were some particular reference -/// like `Foo`, then the `GenericBounds` would be `[[], +/// `[.index(&FullRange), [U:Bar]]`. Now if there were some particular reference +/// like `Foo`, then the `GenericBounds` would be `[.index(&FullRange), /// [uint:Bar]]`. #[derive(Clone, Show)] pub struct GenericBounds<'tcx> { @@ -2184,7 +2183,7 @@ impl<'a, 'tcx> ParameterEnvironment<'a, 'tcx> { _ => { cx.sess.bug(format!("ParameterEnvironment::from_item(): \ `{}` is not an item", - cx.map.node_to_string(id))[]) + cx.map.node_to_string(id)).index(&FullRange)) } } } @@ -2269,7 +2268,7 @@ impl UnboxedClosureKind { }; match result { Ok(trait_did) => trait_did, - Err(err) => cx.sess.fatal(err[]), + Err(err) => cx.sess.fatal(err.index(&FullRange)), } } } @@ -2590,7 +2589,7 @@ impl FlagComputation { } &ty_tup(ref ts) => { - self.add_tys(ts[]); + self.add_tys(ts.index(&FullRange)); } &ty_bare_fn(_, ref f) => { @@ -2613,7 +2612,7 @@ impl FlagComputation { fn add_fn_sig(&mut self, fn_sig: &PolyFnSig) { let mut computation = FlagComputation::new(); - computation.add_tys(fn_sig.0.inputs[]); + computation.add_tys(fn_sig.0.inputs.index(&FullRange)); if let ty::FnConverging(output) = fn_sig.0.output { computation.add_ty(output); @@ -2782,7 +2781,7 @@ pub fn mk_trait<'tcx>(cx: &ctxt<'tcx>, fn bound_list_is_sorted(bounds: &[ty::PolyProjectionPredicate]) -> bool { bounds.len() == 0 || - bounds[1..].iter().enumerate().all( + bounds.index(&(1..)).iter().enumerate().all( |(index, bound)| bounds[index].sort_key() <= bound.sort_key()) } @@ -3037,7 +3036,7 @@ pub fn sequence_element_type<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { ty_str => mk_mach_uint(cx, ast::TyU8), ty_open(ty) => sequence_element_type(cx, ty), _ => cx.sess.bug(format!("sequence_element_type called on non-sequence value: {}", - ty_to_string(cx, ty))[]), + ty_to_string(cx, ty)).index(&FullRange)), } } @@ -3371,7 +3370,7 @@ pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents { ty_struct(did, substs) => { let flds = struct_fields(cx, did, substs); let mut res = - TypeContents::union(flds[], + TypeContents::union(flds.index(&FullRange), |f| tc_mt(cx, f.mt, cache)); if !lookup_repr_hints(cx, did).contains(&attr::ReprExtern) { @@ -3395,15 +3394,15 @@ pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents { } ty_tup(ref tys) => { - TypeContents::union(tys[], + TypeContents::union(tys.index(&FullRange), |ty| tc_ty(cx, *ty, cache)) } ty_enum(did, substs) => { let variants = substd_enum_variants(cx, did, substs); let mut res = - TypeContents::union(variants[], |variant| { - TypeContents::union(variant.args[], + TypeContents::union(variants.index(&FullRange), |variant| { + TypeContents::union(variant.args.index(&FullRange), |arg_ty| { tc_ty(cx, *arg_ty, cache) }) @@ -3655,7 +3654,8 @@ pub fn is_instantiable<'tcx>(cx: &ctxt<'tcx>, r_ty: Ty<'tcx>) -> bool { ty_unboxed_closure(..) => { // this check is run on type definitions, so we don't expect to see // inference by-products or unboxed closure types - cx.sess.bug(format!("requires check invoked on inapplicable type: {}", ty)[]) + cx.sess.bug(format!("requires check invoked on inapplicable type: {}", + ty).as_slice()) } ty_tup(ref ts) => { @@ -3748,7 +3748,8 @@ pub fn is_type_representable<'tcx>(cx: &ctxt<'tcx>, sp: Span, ty: Ty<'tcx>) ty_unboxed_closure(..) => { // this check is run on type definitions, so we don't expect to see // unboxed closure types - cx.sess.bug(format!("requires check invoked on inapplicable type: {}", ty)[]) + cx.sess.bug(format!("requires check invoked on inapplicable type: {}", + ty).as_slice()) } _ => Representable, } @@ -3986,7 +3987,7 @@ pub fn close_type<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> Ty<'tcx> { match ty.sty { ty_open(ty) => mk_rptr(cx, cx.mk_region(ReStatic), mt {ty: ty, mutbl:ast::MutImmutable}), _ => cx.sess.bug(format!("Trying to close a non-open type {}", - ty_to_string(cx, ty))[]) + ty_to_string(cx, ty)).index(&FullRange)) } } @@ -4087,7 +4088,7 @@ pub fn node_id_to_trait_ref<'tcx>(cx: &ctxt<'tcx>, id: ast::NodeId) Some(ty) => ty.clone(), None => cx.sess.bug( format!("node_id_to_trait_ref: no trait ref for node `{}`", - cx.map.node_to_string(id))[]) + cx.map.node_to_string(id)).index(&FullRange)) } } @@ -4100,7 +4101,7 @@ pub fn node_id_to_type<'tcx>(cx: &ctxt<'tcx>, id: ast::NodeId) -> Ty<'tcx> { Some(ty) => ty, None => cx.sess.bug( format!("node_id_to_type: no type for node `{}`", - cx.map.node_to_string(id))[]) + cx.map.node_to_string(id)).index(&FullRange)) } } @@ -4187,7 +4188,7 @@ pub fn ty_region(tcx: &ctxt, tcx.sess.span_bug( span, format!("ty_region() invoked on an inappropriate ty: {}", - s)[]); + s).index(&FullRange)); } } } @@ -4248,11 +4249,11 @@ pub fn expr_span(cx: &ctxt, id: NodeId) -> Span { Some(f) => { cx.sess.bug(format!("Node id {} is not an expr: {}", id, - f)[]); + f).index(&FullRange)); } None => { cx.sess.bug(format!("Node id {} is not present \ - in the node map", id)[]); + in the node map", id).index(&FullRange)); } } } @@ -4268,14 +4269,14 @@ pub fn local_var_name_str(cx: &ctxt, id: NodeId) -> InternedString { cx.sess.bug( format!("Variable id {} maps to {}, not local", id, - pat)[]); + pat).index(&FullRange)); } } } r => { cx.sess.bug(format!("Variable id {} maps to {}, not local", id, - r)[]); + r).index(&FullRange)); } } } @@ -4306,7 +4307,7 @@ pub fn adjust_ty<'tcx, F>(cx: &ctxt<'tcx>, cx.sess.bug( format!("AdjustReifyFnPointer adjustment on non-fn-item: \ {}", - b)[]); + b).index(&FullRange)); } } } @@ -4334,7 +4335,7 @@ pub fn adjust_ty<'tcx, F>(cx: &ctxt<'tcx>, {}", i, ty_to_string(cx, adjusted_ty)) - []); + .index(&FullRange)); } } } @@ -4397,7 +4398,7 @@ pub fn unsize_ty<'tcx>(cx: &ctxt<'tcx>, } _ => cx.sess.span_bug(span, format!("UnsizeLength with bad sty: {}", - ty_to_string(cx, ty))[]) + ty_to_string(cx, ty)).index(&FullRange)) }, &UnsizeStruct(box ref k, tp_index) => match ty.sty { ty_struct(did, substs) => { @@ -4409,7 +4410,7 @@ pub fn unsize_ty<'tcx>(cx: &ctxt<'tcx>, } _ => cx.sess.span_bug(span, format!("UnsizeStruct with bad sty: {}", - ty_to_string(cx, ty))[]) + ty_to_string(cx, ty)).index(&FullRange)) }, &UnsizeVtable(TyTrait { ref principal, ref bounds }, _) => { mk_trait(cx, principal.clone(), bounds.clone()) @@ -4422,7 +4423,7 @@ pub fn resolve_expr(tcx: &ctxt, expr: &ast::Expr) -> def::Def { Some(&def) => def, None => { tcx.sess.span_bug(expr.span, format!( - "no def-map entry for expr {}", expr.id)[]); + "no def-map entry for expr {}", expr.id).index(&FullRange)); } } } @@ -4517,7 +4518,7 @@ pub fn expr_kind(tcx: &ctxt, expr: &ast::Expr) -> ExprKind { expr.span, format!("uncategorized def for expr {}: {}", expr.id, - def)[]); + def).index(&FullRange)); } } } @@ -4642,7 +4643,7 @@ pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field]) token::get_name(name), fields.iter() .map(|f| token::get_name(f.name).get().to_string()) - .collect::>())[]); + .collect::>()).index(&FullRange)); } pub fn impl_or_trait_item_idx(id: ast::Name, trait_items: &[ImplOrTraitItem]) @@ -4897,7 +4898,7 @@ pub fn provided_trait_methods<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId) match item.node { ItemTrait(_, _, _, ref ms) => { let (_, p) = - ast_util::split_trait_methods(ms[]); + ast_util::split_trait_methods(ms.index(&FullRange)); p.iter() .map(|m| { match impl_or_trait_item( @@ -4916,14 +4917,14 @@ pub fn provided_trait_methods<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId) _ => { cx.sess.bug(format!("provided_trait_methods: `{}` is \ not a trait", - id)[]) + id).index(&FullRange)) } } } _ => { cx.sess.bug(format!("provided_trait_methods: `{}` is not a \ trait", - id)[]) + id).index(&FullRange)) } } } else { @@ -5160,7 +5161,7 @@ impl<'tcx> VariantInfo<'tcx> { }, ast::StructVariantKind(ref struct_def) => { - let fields: &[StructField] = struct_def.fields[]; + let fields: &[StructField] = struct_def.fields.index(&FullRange); assert!(fields.len() > 0); @@ -5311,7 +5312,7 @@ pub fn enum_variants<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId) cx.sess .span_err(e.span, format!("expected constant: {}", - *err)[]); + *err).index(&FullRange)); } }, None => {} @@ -5601,7 +5602,7 @@ pub fn lookup_struct_fields(cx: &ctxt, did: ast::DefId) -> Vec { _ => { cx.sess.bug( format!("ID not mapped to struct fields: {}", - cx.map.node_to_string(did.node))[]); + cx.map.node_to_string(did.node)).index(&FullRange)); } } } else { @@ -5634,7 +5635,7 @@ pub fn struct_fields<'tcx>(cx: &ctxt<'tcx>, did: ast::DefId, substs: &Substs<'tc pub fn tup_fields<'tcx>(v: &[Ty<'tcx>]) -> Vec> { v.iter().enumerate().map(|(i, &f)| { field { - name: token::intern(i.to_string()[]), + name: token::intern(i.to_string().index(&FullRange)), mt: mt { ty: f, mutbl: MutImmutable @@ -5845,7 +5846,7 @@ pub fn eval_repeat_count(tcx: &ctxt, count_expr: &ast::Expr) -> uint { }; tcx.sess.span_err(count_expr.span, format!( "expected positive integer for repeat count, found {}", - found)[]); + found).index(&FullRange)); } Err(_) => { let found = match count_expr.node { @@ -5860,7 +5861,7 @@ pub fn eval_repeat_count(tcx: &ctxt, count_expr: &ast::Expr) -> uint { }; tcx.sess.span_err(count_expr.span, format!( "expected constant integer for repeat count, found {}", - found)[]); + found).index(&FullRange)); } } 0 @@ -6644,7 +6645,7 @@ pub fn with_freevars(tcx: &ty::ctxt, fid: ast::NodeId, f: F) -> T where { match tcx.freevars.borrow().get(&fid) { None => f(&[]), - Some(d) => f(d[]) + Some(d) => f(d.index(&FullRange)) } } diff --git a/src/librustc/plugin/load.rs b/src/librustc/plugin/load.rs index 44a223954858a..a38298d52dd87 100644 --- a/src/librustc/plugin/load.rs +++ b/src/librustc/plugin/load.rs @@ -223,17 +223,17 @@ impl<'a> PluginLoader<'a> { // this is fatal: there are almost certainly macros we need // inside this crate, so continue would spew "macro undefined" // errors - Err(err) => self.sess.span_fatal(vi.span, err[]) + Err(err) => self.sess.span_fatal(vi.span, err.index(&FullRange)) }; unsafe { let registrar = - match lib.symbol(symbol[]) { + match lib.symbol(symbol.index(&FullRange)) { Ok(registrar) => { mem::transmute::<*mut u8,PluginRegistrarFun>(registrar) } // again fatal if we can't register macros - Err(err) => self.sess.span_fatal(vi.span, err[]) + Err(err) => self.sess.span_fatal(vi.span, err.index(&FullRange)) }; // Intentionally leak the dynamic library. We can't ever unload it diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index 138f648049c73..d3f8c8284e0dc 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -559,17 +559,17 @@ pub fn build_codegen_options(matches: &getopts::Matches) -> CodegenOptions match (value, opt_type_desc) { (Some(..), None) => { early_error(format!("codegen option `{}` takes no \ - value", key)[]) + value", key).index(&FullRange)) } (None, Some(type_desc)) => { early_error(format!("codegen option `{0}` requires \ {1} (-C {0}=)", - key, type_desc)[]) + key, type_desc).index(&FullRange)) } (Some(value), Some(type_desc)) => { early_error(format!("incorrect value `{}` for codegen \ option `{}` - {} was expected", - value, key, type_desc)[]) + value, key, type_desc).index(&FullRange)) } (None, None) => unreachable!() } @@ -579,7 +579,7 @@ pub fn build_codegen_options(matches: &getopts::Matches) -> CodegenOptions } if !found { early_error(format!("unknown codegen option: `{}`", - key)[]); + key).index(&FullRange)); } } return cg; @@ -592,10 +592,10 @@ pub fn default_lib_output() -> CrateType { pub fn default_configuration(sess: &Session) -> ast::CrateConfig { use syntax::parse::token::intern_and_get_ident as intern; - let end = sess.target.target.target_endian[]; - let arch = sess.target.target.arch[]; - let wordsz = sess.target.target.target_word_size[]; - let os = sess.target.target.target_os[]; + let end = sess.target.target.target_endian.index(&FullRange); + let arch = sess.target.target.arch.index(&FullRange); + let wordsz = sess.target.target.target_word_size.index(&FullRange); + let os = sess.target.target.target_os.index(&FullRange); let fam = match sess.target.target.options.is_like_windows { true => InternedString::new("windows"), @@ -631,23 +631,23 @@ pub fn build_configuration(sess: &Session) -> ast::CrateConfig { append_configuration(&mut user_cfg, InternedString::new("test")) } let mut v = user_cfg.into_iter().collect::>(); - v.push_all(default_cfg[]); + v.push_all(default_cfg.index(&FullRange)); v } pub fn build_target_config(opts: &Options, sp: &SpanHandler) -> Config { - let target = match Target::search(opts.target_triple[]) { + let target = match Target::search(opts.target_triple.index(&FullRange)) { Ok(t) => t, Err(e) => { - sp.handler().fatal((format!("Error loading target specification: {}", e))[]); + sp.handler().fatal((format!("Error loading target specification: {}", e)).as_slice()); } }; - let (int_type, uint_type) = match target.target_word_size[] { + let (int_type, uint_type) = match target.target_word_size.index(&FullRange) { "32" => (ast::TyI32, ast::TyU32), "64" => (ast::TyI64, ast::TyU64), w => sp.handler().fatal((format!("target specification was invalid: unrecognized \ - target-word-size {}", w))[]) + target-word-size {}", w)).index(&FullRange)) }; Config { @@ -845,7 +845,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { let unparsed_crate_types = matches.opt_strs("crate-type"); let crate_types = parse_crate_types_from_list(unparsed_crate_types) - .unwrap_or_else(|e| early_error(e[])); + .unwrap_or_else(|e| early_error(e.index(&FullRange))); let mut lint_opts = vec!(); let mut describe_lints = false; @@ -873,7 +873,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { } if this_bit == 0 { early_error(format!("unknown debug flag: {}", - *debug_flag)[]) + *debug_flag).index(&FullRange)) } debugging_opts |= this_bit; } @@ -918,7 +918,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { "dep-info" => OutputTypeDepInfo, _ => { early_error(format!("unknown emission type: `{}`", - part)[]) + part).index(&FullRange)) } }; output_types.push(output_type) @@ -957,7 +957,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { Some(arg) => { early_error(format!("optimization level needs to be \ between 0-3 (instead was `{}`)", - arg)[]); + arg).index(&FullRange)); } } } else { @@ -995,7 +995,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { Some(arg) => { early_error(format!("debug info level needs to be between \ 0-2 (instead was `{}`)", - arg)[]); + arg).index(&FullRange)); } } } else { @@ -1013,7 +1013,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { let mut search_paths = SearchPaths::new(); for s in matches.opt_strs("L").iter() { - search_paths.add_path(s[]); + search_paths.add_path(s.index(&FullRange)); } let libs = matches.opt_strs("l").into_iter().map(|s| { @@ -1027,7 +1027,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { s => { early_error(format!("unknown library kind `{}`, expected \ one of dylib, framework, or static", - s)[]); + s).as_slice()); } }; return (name.to_string(), kind) @@ -1045,7 +1045,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { (_, s) => { early_error(format!("unknown library kind `{}`, expected \ one of dylib, framework, or static", - s)[]); + s).index(&FullRange)); } }; (name.to_string(), kind) @@ -1089,7 +1089,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { --debuginfo"); } - let color = match matches.opt_str("color").as_ref().map(|s| s[]) { + let color = match matches.opt_str("color").as_ref().map(|s| s.index(&FullRange)) { Some("auto") => Auto, Some("always") => Always, Some("never") => Never, @@ -1099,7 +1099,7 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { Some(arg) => { early_error(format!("argument for --color must be auto, always \ or never (instead was `{}`)", - arg)[]) + arg).index(&FullRange)) } }; @@ -1201,7 +1201,7 @@ mod test { #[test] fn test_switch_implies_cfg_test() { let matches = - &match getopts(&["--test".to_string()], optgroups()[]) { + &match getopts(&["--test".to_string()], optgroups().index(&FullRange)) { Ok(m) => m, Err(f) => panic!("test_switch_implies_cfg_test: {}", f) }; @@ -1209,7 +1209,7 @@ mod test { let sessopts = build_session_options(matches); let sess = build_session(sessopts, None, registry); let cfg = build_configuration(&sess); - assert!((attr::contains_name(cfg[], "test"))); + assert!((attr::contains_name(cfg.index(&FullRange), "test"))); } // When the user supplies --test and --cfg test, don't implicitly add @@ -1218,7 +1218,7 @@ mod test { fn test_switch_implies_cfg_test_unless_cfg_test() { let matches = &match getopts(&["--test".to_string(), "--cfg=test".to_string()], - optgroups()[]) { + optgroups().index(&FullRange)) { Ok(m) => m, Err(f) => { panic!("test_switch_implies_cfg_test_unless_cfg_test: {}", f) @@ -1238,7 +1238,7 @@ mod test { { let matches = getopts(&[ "-Awarnings".to_string() - ], optgroups()[]).unwrap(); + ], optgroups().index(&FullRange)).unwrap(); let registry = diagnostics::registry::Registry::new(&[]); let sessopts = build_session_options(&matches); let sess = build_session(sessopts, None, registry); @@ -1249,7 +1249,7 @@ mod test { let matches = getopts(&[ "-Awarnings".to_string(), "-Dwarnings".to_string() - ], optgroups()[]).unwrap(); + ], optgroups().index(&FullRange)).unwrap(); let registry = diagnostics::registry::Registry::new(&[]); let sessopts = build_session_options(&matches); let sess = build_session(sessopts, None, registry); @@ -1259,7 +1259,7 @@ mod test { { let matches = getopts(&[ "-Adead_code".to_string() - ], optgroups()[]).unwrap(); + ], optgroups().index(&FullRange)).unwrap(); let registry = diagnostics::registry::Registry::new(&[]); let sessopts = build_session_options(&matches); let sess = build_session(sessopts, None, registry); diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index abb780615ae7b..94a6bca4e06e5 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -174,7 +174,7 @@ impl Session { // cases later on pub fn impossible_case(&self, sp: Span, msg: &str) -> ! { self.span_bug(sp, - format!("impossible case reached: {}", msg)[]); + format!("impossible case reached: {}", msg).index(&FullRange)); } pub fn verbose(&self) -> bool { self.debugging_opt(config::VERBOSE) } pub fn time_passes(&self) -> bool { self.debugging_opt(config::TIME_PASSES) } @@ -216,7 +216,7 @@ impl Session { } pub fn target_filesearch(&self, kind: PathKind) -> filesearch::FileSearch { filesearch::FileSearch::new(self.sysroot(), - self.opts.target_triple[], + self.opts.target_triple.index(&FullRange), &self.opts.search_paths, kind) } diff --git a/src/librustc/util/lev_distance.rs b/src/librustc/util/lev_distance.rs index 79bd0d4e306e4..8f5820d92c589 100644 --- a/src/librustc/util/lev_distance.rs +++ b/src/librustc/util/lev_distance.rs @@ -48,7 +48,7 @@ fn test_lev_distance() { for c in range(0u32, MAX as u32) .filter_map(|i| from_u32(i)) .map(|i| i.to_string()) { - assert_eq!(lev_distance(c[], c[]), 0); + assert_eq!(lev_distance(c.index(&FullRange), c.index(&FullRange)), 0); } let a = "\nMäry häd ä little lämb\n\nLittle lämb\n"; diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index 8c2a9993004d9..55bba3e4215d6 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -55,12 +55,12 @@ pub fn note_and_explain_region(cx: &ctxt, (ref str, Some(span)) => { cx.sess.span_note( span, - format!("{}{}{}", prefix, *str, suffix)[]); + format!("{}{}{}", prefix, *str, suffix).index(&FullRange)); Some(span) } (ref str, None) => { cx.sess.note( - format!("{}{}{}", prefix, *str, suffix)[]); + format!("{}{}{}", prefix, *str, suffix).index(&FullRange)); None } } @@ -271,7 +271,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { }; if abi != abi::Rust { - s.push_str(format!("extern {} ", abi.to_string())[]); + s.push_str(format!("extern {} ", abi.to_string()).index(&FullRange)); }; s.push_str("fn"); @@ -290,7 +290,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { Some(def_id) => { s.push_str(" {"); let path_str = ty::item_path_str(cx, def_id); - s.push_str(path_str[]); + s.push_str(path_str.index(&FullRange)); s.push_str("}"); } None => { } @@ -305,7 +305,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { match cty.store { ty::UniqTraitStore => {} ty::RegionTraitStore(region, _) => { - s.push_str(region_to_string(cx, "", true, region)[]); + s.push_str(region_to_string(cx, "", true, region).index(&FullRange)); } } @@ -324,7 +324,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { assert_eq!(cty.onceness, ast::Once); s.push_str("proc"); push_sig_to_string(cx, &mut s, '(', ')', &cty.sig, - bounds_str[]); + bounds_str.index(&FullRange)); } ty::RegionTraitStore(..) => { match cty.onceness { @@ -332,7 +332,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { ast::Once => s.push_str("once ") } push_sig_to_string(cx, &mut s, '|', '|', &cty.sig, - bounds_str[]); + bounds_str.index(&FullRange)); } } @@ -365,7 +365,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { ty::FnConverging(t) => { if !ty::type_is_nil(t) { s.push_str(" -> "); - s.push_str(ty_to_string(cx, t)[]); + s.push_str(ty_to_string(cx, t).index(&FullRange)); } } ty::FnDiverging => { @@ -402,7 +402,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { } ty_rptr(r, ref tm) => { let mut buf = region_ptr_to_string(cx, *r); - buf.push_str(mt_to_string(cx, tm)[]); + buf.push_str(mt_to_string(cx, tm).index(&FullRange)); buf } ty_open(typ) => @@ -412,7 +412,7 @@ pub fn ty_to_string<'tcx>(cx: &ctxt<'tcx>, typ: &ty::TyS<'tcx>) -> String { .iter() .map(|elem| ty_to_string(cx, *elem)) .collect::>(); - match strs[] { + match strs.index(&FullRange) { [ref string] => format!("({},)", string), strs => format!("({})", strs.connect(", ")) } @@ -541,7 +541,7 @@ pub fn parameterized<'tcx>(cx: &ctxt<'tcx>, 0 }; - for t in tps[..tps.len() - num_defaults].iter() { + for t in tps.index(&(0..(tps.len() - num_defaults))).iter() { strs.push(ty_to_string(cx, *t)) } @@ -549,11 +549,11 @@ pub fn parameterized<'tcx>(cx: &ctxt<'tcx>, format!("{}({}){}", base, if strs[0].starts_with("(") && strs[0].ends_with(",)") { - strs[0][1 .. strs[0].len() - 2] // Remove '(' and ',)' + strs[0].index(&(1 .. (strs[0].len() - 2))) // Remove '(' and ',)' } else if strs[0].starts_with("(") && strs[0].ends_with(")") { - strs[0][1 .. strs[0].len() - 1] // Remove '(' and ')' + strs[0].index(&(1 .. (strs[0].len() - 1))) // Remove '(' and ')' } else { - strs[0][] + strs[0].index(&FullRange) }, if &*strs[1] == "()" { String::new() } else { format!(" -> {}", strs[1]) }) } else if strs.len() > 0 { @@ -566,7 +566,7 @@ pub fn parameterized<'tcx>(cx: &ctxt<'tcx>, pub fn ty_to_short_str<'tcx>(cx: &ctxt<'tcx>, typ: Ty<'tcx>) -> String { let mut s = typ.repr(cx).to_string(); if s.len() >= 32u { - s = s[0u..32u].to_string(); + s = s.index(&(0u..32u)).to_string(); } return s; } @@ -631,7 +631,7 @@ impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for [T] { impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for OwnedSlice { fn repr(&self, tcx: &ctxt<'tcx>) -> String { - repr_vec(tcx, self[]) + repr_vec(tcx, self.index(&FullRange)) } } @@ -639,7 +639,7 @@ impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for OwnedSlice { // autoderef cannot convert the &[T] handler impl<'tcx, T:Repr<'tcx>> Repr<'tcx> for Vec { fn repr(&self, tcx: &ctxt<'tcx>) -> String { - repr_vec(tcx, self[]) + repr_vec(tcx, self.index(&FullRange)) } } diff --git a/src/librustc/util/snapshot_vec.rs b/src/librustc/util/snapshot_vec.rs index 11820c908eeba..6040f55ceeba0 100644 --- a/src/librustc/util/snapshot_vec.rs +++ b/src/librustc/util/snapshot_vec.rs @@ -116,7 +116,7 @@ impl> SnapshotVec { pub fn actions_since_snapshot(&self, snapshot: &Snapshot) -> &[UndoLog] { - self.undo_log[snapshot.length..] + self.undo_log.index(&(snapshot.length..)) } fn assert_open_snapshot(&self, snapshot: &Snapshot) { diff --git a/src/librustc_back/archive.rs b/src/librustc_back/archive.rs index 0bd4265e487a6..5a5a310c56dee 100644 --- a/src/librustc_back/archive.rs +++ b/src/librustc_back/archive.rs @@ -53,7 +53,7 @@ fn run_ar(handler: &ErrorHandler, maybe_ar_prog: &Option, args: &str, cwd: Option<&Path>, paths: &[&Path]) -> ProcessOutput { let ar = match *maybe_ar_prog { - Some(ref ar) => ar[], + Some(ref ar) => ar.index(&FullRange), None => "ar" }; let mut cmd = Command::new(ar); @@ -75,22 +75,22 @@ fn run_ar(handler: &ErrorHandler, maybe_ar_prog: &Option, if !o.status.success() { handler.err(format!("{} failed with: {}", cmd, - o.status)[]); + o.status).index(&FullRange)); handler.note(format!("stdout ---\n{}", str::from_utf8(o.output - []).unwrap()) - []); + .index(&FullRange)).unwrap()) + .index(&FullRange)); handler.note(format!("stderr ---\n{}", str::from_utf8(o.error - []).unwrap()) - []); + .index(&FullRange)).unwrap()) + .index(&FullRange)); handler.abort_if_errors(); } o }, Err(e) => { - handler.err(format!("could not exec `{}`: {}", ar[], - e)[]); + handler.err(format!("could not exec `{}`: {}", ar.index(&FullRange), + e).index(&FullRange)); handler.abort_if_errors(); panic!("rustc::back::archive::run_ar() should not reach this point"); } @@ -106,16 +106,16 @@ pub fn find_library(name: &str, osprefix: &str, ossuffix: &str, for path in search_paths.iter() { debug!("looking for {} inside {}", name, path.display()); - let test = path.join(oslibname[]); + let test = path.join(oslibname.index(&FullRange)); if test.exists() { return test } if oslibname != unixlibname { - let test = path.join(unixlibname[]); + let test = path.join(unixlibname.index(&FullRange)); if test.exists() { return test } } } handler.fatal(format!("could not find native static library `{}`, \ perhaps an -L flag is missing?", - name)[]); + name).index(&FullRange)); } impl<'a> Archive<'a> { @@ -147,7 +147,7 @@ impl<'a> Archive<'a> { /// Lists all files in an archive pub fn files(&self) -> Vec { let output = run_ar(self.handler, &self.maybe_ar_prog, "t", None, &[&self.dst]); - let output = str::from_utf8(output.output[]).unwrap(); + let output = str::from_utf8(output.output.index(&FullRange)).unwrap(); // use lines_any because windows delimits output with `\r\n` instead of // just `\n` output.lines_any().map(|s| s.to_string()).collect() @@ -179,9 +179,9 @@ impl<'a> ArchiveBuilder<'a> { /// search in the relevant locations for a library named `name`. pub fn add_native_library(&mut self, name: &str) -> io::IoResult<()> { let location = find_library(name, - self.archive.slib_prefix[], - self.archive.slib_suffix[], - self.archive.lib_search_paths[], + self.archive.slib_prefix.index(&FullRange), + self.archive.slib_suffix.index(&FullRange), + self.archive.lib_search_paths.index(&FullRange), self.archive.handler); self.add_archive(&location, name, |_| false) } @@ -197,12 +197,12 @@ impl<'a> ArchiveBuilder<'a> { // as simple comparison is not enough - there // might be also an extra name suffix let obj_start = format!("{}", name); - let obj_start = obj_start[]; + let obj_start = obj_start.index(&FullRange); // Ignoring all bytecode files, no matter of // name let bc_ext = ".bytecode.deflate"; - self.add_archive(rlib, name[], |fname: &str| { + self.add_archive(rlib, name.index(&FullRange), |fname: &str| { let skip_obj = lto && fname.starts_with(obj_start) && fname.ends_with(".o"); skip_obj || fname.ends_with(bc_ext) || fname == METADATA_FILENAME @@ -239,7 +239,7 @@ impl<'a> ArchiveBuilder<'a> { // allow running `ar s file.a` to update symbols only. if self.should_update_symbols { run_ar(self.archive.handler, &self.archive.maybe_ar_prog, - "s", Some(self.work_dir.path()), args[]); + "s", Some(self.work_dir.path()), args.index(&FullRange)); } return self.archive; } @@ -259,7 +259,7 @@ impl<'a> ArchiveBuilder<'a> { // Add the archive members seen so far, without updating the // symbol table (`S`). run_ar(self.archive.handler, &self.archive.maybe_ar_prog, - "cruS", Some(self.work_dir.path()), args[]); + "cruS", Some(self.work_dir.path()), args.index(&FullRange)); args.clear(); args.push(&abs_dst); @@ -274,7 +274,7 @@ impl<'a> ArchiveBuilder<'a> { // necessary. let flags = if self.should_update_symbols { "crus" } else { "cruS" }; run_ar(self.archive.handler, &self.archive.maybe_ar_prog, - flags, Some(self.work_dir.path()), args[]); + flags, Some(self.work_dir.path()), args.index(&FullRange)); self.archive } @@ -316,7 +316,7 @@ impl<'a> ArchiveBuilder<'a> { } else { filename }; - let new_filename = self.work_dir.path().join(filename[]); + let new_filename = self.work_dir.path().join(filename.index(&FullRange)); try!(fs::rename(file, &new_filename)); self.members.push(Path::new(filename)); } diff --git a/src/librustc_back/rpath.rs b/src/librustc_back/rpath.rs index 955081a3af64c..b6f080094cfba 100644 --- a/src/librustc_back/rpath.rs +++ b/src/librustc_back/rpath.rs @@ -44,15 +44,15 @@ pub fn get_rpath_flags(config: RPathConfig) -> Vec where l.map(|p| p.clone()) }).collect::>(); - let rpaths = get_rpaths(config, libs[]); - flags.push_all(rpaths_to_flags(rpaths[])[]); + let rpaths = get_rpaths(config, libs.index(&FullRange)); + flags.push_all(rpaths_to_flags(rpaths.index(&FullRange)).index(&FullRange)); flags } fn rpaths_to_flags(rpaths: &[String]) -> Vec { let mut ret = Vec::new(); for rpath in rpaths.iter() { - ret.push(format!("-Wl,-rpath,{}", (*rpath)[])); + ret.push(format!("-Wl,-rpath,{}", (*rpath).index(&FullRange))); } return ret; } @@ -82,14 +82,14 @@ fn get_rpaths(mut config: RPathConfig, libs: &[Path]) -> Vec } } - log_rpaths("relative", rel_rpaths[]); - log_rpaths("fallback", fallback_rpaths[]); + log_rpaths("relative", rel_rpaths.index(&FullRange)); + log_rpaths("fallback", fallback_rpaths.index(&FullRange)); let mut rpaths = rel_rpaths; - rpaths.push_all(fallback_rpaths[]); + rpaths.push_all(fallback_rpaths.index(&FullRange)); // Remove duplicates - let rpaths = minimize_rpaths(rpaths[]); + let rpaths = minimize_rpaths(rpaths.index(&FullRange)); return rpaths; } @@ -140,7 +140,7 @@ fn minimize_rpaths(rpaths: &[String]) -> Vec { let mut set = HashSet::new(); let mut minimized = Vec::new(); for rpath in rpaths.iter() { - if set.insert(rpath[]) { + if set.insert(rpath.index(&FullRange)) { minimized.push(rpath.clone()); } } diff --git a/src/librustc_back/sha2.rs b/src/librustc_back/sha2.rs index d606c5158d0f7..f33971a6ac0c2 100644 --- a/src/librustc_back/sha2.rs +++ b/src/librustc_back/sha2.rs @@ -140,7 +140,7 @@ impl FixedBuffer for FixedBuffer64 { if input.len() >= buffer_remaining { copy_memory( self.buffer.slice_mut(self.buffer_idx, size), - input[..buffer_remaining]); + input.index(&(0..buffer_remaining))); self.buffer_idx = 0; func(&self.buffer); i += buffer_remaining; @@ -156,7 +156,7 @@ impl FixedBuffer for FixedBuffer64 { // While we have at least a full buffer size chunk's worth of data, process that data // without copying it into the buffer while input.len() - i >= size { - func(input[i..i + size]); + func(input.index(&(i..(i + size)))); i += size; } @@ -166,7 +166,7 @@ impl FixedBuffer for FixedBuffer64 { let input_remaining = input.len() - i; copy_memory( self.buffer.slice_to_mut(input_remaining), - input[i..]); + input.index(&(i..))); self.buffer_idx += input_remaining; } @@ -188,7 +188,7 @@ impl FixedBuffer for FixedBuffer64 { fn full_buffer<'s>(&'s mut self) -> &'s [u8] { assert!(self.buffer_idx == 64); self.buffer_idx = 0; - return self.buffer[..64]; + return self.buffer.index(&(0..64)); } fn position(&self) -> uint { self.buffer_idx } diff --git a/src/librustc_back/svh.rs b/src/librustc_back/svh.rs index 86bd74d3f85e5..00f1f2402c76e 100644 --- a/src/librustc_back/svh.rs +++ b/src/librustc_back/svh.rs @@ -65,7 +65,7 @@ impl Svh { } pub fn as_str<'a>(&'a self) -> &'a str { - self.hash[] + self.hash.index(&FullRange) } pub fn calculate(metadata: &Vec, krate: &ast::Crate) -> Svh { @@ -358,7 +358,7 @@ mod svh_visitor { fn macro_name(mac: &Mac) -> token::InternedString { match &mac.node { &MacInvocTT(ref path, ref _tts, ref _stx_ctxt) => { - let s = path.segments[]; + let s = path.segments.index(&FullRange); assert_eq!(s.len(), 1); content(s[0].identifier) } diff --git a/src/librustc_back/target/mod.rs b/src/librustc_back/target/mod.rs index f14583bb9aa81..02bb6aa4b0d38 100644 --- a/src/librustc_back/target/mod.rs +++ b/src/librustc_back/target/mod.rs @@ -225,7 +225,7 @@ impl Target { Some(val) => val, None => handler.fatal((format!("Field {} in target specification is required", name)) - []) + .index(&FullRange)) } }; @@ -242,16 +242,18 @@ impl Target { macro_rules! key { ($key_name:ident) => ( { let name = (stringify!($key_name)).replace("_", "-"); - obj.find(name[]).map(|o| o.as_string() + obj.find(name.index(&FullRange)).map(|o| o.as_string() .map(|s| base.options.$key_name = s.to_string())); } ); ($key_name:ident, bool) => ( { let name = (stringify!($key_name)).replace("_", "-"); - obj.find(name[]).map(|o| o.as_boolean().map(|s| base.options.$key_name = s)); + obj.find(name.index(&FullRange)) + .map(|o| o.as_boolean() + .map(|s| base.options.$key_name = s)); } ); ($key_name:ident, list) => ( { let name = (stringify!($key_name)).replace("_", "-"); - obj.find(name[]).map(|o| o.as_array() + obj.find(name.index(&FullRange)).map(|o| o.as_array() .map(|v| base.options.$key_name = v.iter() .map(|a| a.as_string().unwrap().to_string()).collect() ) @@ -367,7 +369,7 @@ impl Target { let target_path = os::getenv("RUST_TARGET_PATH").unwrap_or(String::new()); - let paths = os::split_paths(target_path[]); + let paths = os::split_paths(target_path.index(&FullRange)); // FIXME 16351: add a sane default search path? for dir in paths.iter() { diff --git a/src/librustc_borrowck/borrowck/check_loans.rs b/src/librustc_borrowck/borrowck/check_loans.rs index cb77519671cf5..837630c0d61ea 100644 --- a/src/librustc_borrowck/borrowck/check_loans.rs +++ b/src/librustc_borrowck/borrowck/check_loans.rs @@ -465,7 +465,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { new_loan.span, format!("cannot borrow `{}`{} as mutable \ more than once at a time", - nl, new_loan_msg)[]) + nl, new_loan_msg).index(&FullRange)) } (ty::UniqueImmBorrow, _) => { @@ -473,7 +473,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { new_loan.span, format!("closure requires unique access to `{}` \ but {} is already borrowed{}", - nl, ol_pronoun, old_loan_msg)[]); + nl, ol_pronoun, old_loan_msg).index(&FullRange)); } (_, ty::UniqueImmBorrow) => { @@ -481,7 +481,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { new_loan.span, format!("cannot borrow `{}`{} as {} because \ previous closure requires unique access", - nl, new_loan_msg, new_loan.kind.to_user_str())[]); + nl, new_loan_msg, new_loan.kind.to_user_str()).index(&FullRange)); } (_, _) => { @@ -494,7 +494,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { new_loan.kind.to_user_str(), ol_pronoun, old_loan.kind.to_user_str(), - old_loan_msg)[]); + old_loan_msg).index(&FullRange)); } } @@ -503,7 +503,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { self.bccx.span_note( span, format!("borrow occurs due to use of `{}` in closure", - nl)[]); + nl).index(&FullRange)); } _ => { } } @@ -552,7 +552,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { self.bccx.span_note( old_loan.span, - format!("{}; {}", borrow_summary, rule_summary)[]); + format!("{}; {}", borrow_summary, rule_summary).index(&FullRange)); let old_loan_span = self.tcx().map.span(old_loan.kill_scope.node_id()); self.bccx.span_end_note(old_loan_span, @@ -622,13 +622,13 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { self.bccx.span_err( span, format!("cannot use `{}` because it was mutably borrowed", - self.bccx.loan_path_to_string(copy_path)[]) - []); + self.bccx.loan_path_to_string(copy_path).index(&FullRange)) + .index(&FullRange)); self.bccx.span_note( loan_span, format!("borrow of `{}` occurs here", - self.bccx.loan_path_to_string(&*loan_path)[]) - []); + self.bccx.loan_path_to_string(&*loan_path).index(&FullRange)) + .index(&FullRange)); } } } @@ -647,20 +647,20 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { let err_message = match move_kind { move_data::Captured => format!("cannot move `{}` into closure because it is borrowed", - self.bccx.loan_path_to_string(move_path)[]), + self.bccx.loan_path_to_string(move_path).index(&FullRange)), move_data::Declared | move_data::MoveExpr | move_data::MovePat => format!("cannot move out of `{}` because it is borrowed", - self.bccx.loan_path_to_string(move_path)[]) + self.bccx.loan_path_to_string(move_path).index(&FullRange)) }; - self.bccx.span_err(span, err_message[]); + self.bccx.span_err(span, err_message.index(&FullRange)); self.bccx.span_note( loan_span, format!("borrow of `{}` occurs here", - self.bccx.loan_path_to_string(&*loan_path)[]) - []); + self.bccx.loan_path_to_string(&*loan_path).index(&FullRange)) + .index(&FullRange)); } } } @@ -810,7 +810,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { self.bccx.span_err( assignment_span, format!("cannot assign to {}", - self.bccx.cmt_to_string(&*assignee_cmt))[]); + self.bccx.cmt_to_string(&*assignee_cmt)).index(&FullRange)); self.bccx.span_help( self.tcx().map.span(upvar_id.closure_expr_id), "consider changing this closure to take self by mutable reference"); @@ -819,7 +819,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { assignment_span, format!("cannot assign to {} {}", assignee_cmt.mutbl.to_user_str(), - self.bccx.cmt_to_string(&*assignee_cmt))[]); + self.bccx.cmt_to_string(&*assignee_cmt)).index(&FullRange)); } } _ => match opt_loan_path(&assignee_cmt) { @@ -829,14 +829,14 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { format!("cannot assign to {} {} `{}`", assignee_cmt.mutbl.to_user_str(), self.bccx.cmt_to_string(&*assignee_cmt), - self.bccx.loan_path_to_string(&*lp))[]); + self.bccx.loan_path_to_string(&*lp)).index(&FullRange)); } None => { self.bccx.span_err( assignment_span, format!("cannot assign to {} {}", assignee_cmt.mutbl.to_user_str(), - self.bccx.cmt_to_string(&*assignee_cmt))[]); + self.bccx.cmt_to_string(&*assignee_cmt)).index(&FullRange)); } } } @@ -956,10 +956,10 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { self.bccx.span_err( span, format!("cannot assign to `{}` because it is borrowed", - self.bccx.loan_path_to_string(loan_path))[]); + self.bccx.loan_path_to_string(loan_path)).index(&FullRange)); self.bccx.span_note( loan.span, format!("borrow of `{}` occurs here", - self.bccx.loan_path_to_string(loan_path))[]); + self.bccx.loan_path_to_string(loan_path)).index(&FullRange)); } } diff --git a/src/librustc_borrowck/borrowck/doc.rs b/src/librustc_borrowck/borrowck/doc.rs index ac2ab56b2c5f8..2100d5a9bc307 100644 --- a/src/librustc_borrowck/borrowck/doc.rs +++ b/src/librustc_borrowck/borrowck/doc.rs @@ -660,7 +660,7 @@ //! necessary to add any restrictions at all to the final result. //! //! ```text -//! RESTRICTIONS(*LV, LT, []) = [] // R-Deref-Freeze-Borrowed +//! RESTRICTIONS(*LV, LT, []) = [] // R-Deref-Freeze-Borrowed //! TYPE(LV) = &const Ty //! ``` //! diff --git a/src/librustc_borrowck/borrowck/fragments.rs b/src/librustc_borrowck/borrowck/fragments.rs index 0d86811af9f49..8b4029f30d93f 100644 --- a/src/librustc_borrowck/borrowck/fragments.rs +++ b/src/librustc_borrowck/borrowck/fragments.rs @@ -38,7 +38,7 @@ enum Fragment { // This represents the collection of all but one of the elements // from an array at the path described by the move path index. // Note that attached MovePathIndex should have mem_categorization - // of InteriorElement (i.e. array dereference `[]`). + // of InteriorElement (i.e. array dereference `.index(&FullRange)`). AllButOneFrom(MovePathIndex), } @@ -123,12 +123,12 @@ pub fn instrument_move_fragments<'tcx>(this: &MoveData<'tcx>, let attrs : &[ast::Attribute]; attrs = match tcx.map.find(id) { Some(ast_map::NodeItem(ref item)) => - item.attrs[], + item.attrs.index(&FullRange), Some(ast_map::NodeImplItem(&ast::MethodImplItem(ref m))) => - m.attrs[], + m.attrs.index(&FullRange), Some(ast_map::NodeTraitItem(&ast::ProvidedMethod(ref m))) => - m.attrs[], - _ => [][], + m.attrs.index(&FullRange), + _ => [].index(&FullRange), }; let span_err = @@ -144,7 +144,7 @@ pub fn instrument_move_fragments<'tcx>(this: &MoveData<'tcx>, for (i, mpi) in vec_rc.iter().enumerate() { let render = |&:| this.path_loan_path(*mpi).user_string(tcx); if span_err { - tcx.sess.span_err(sp, format!("{}: `{}`", kind, render())[]); + tcx.sess.span_err(sp, format!("{}: `{}`", kind, render()).index(&FullRange)); } if print { println!("id:{} {}[{}] `{}`", id, kind, i, render()); @@ -156,7 +156,7 @@ pub fn instrument_move_fragments<'tcx>(this: &MoveData<'tcx>, for (i, f) in vec_rc.iter().enumerate() { let render = |&:| f.loan_path_user_string(this, tcx); if span_err { - tcx.sess.span_err(sp, format!("{}: `{}`", kind, render())[]); + tcx.sess.span_err(sp, format!("{}: `{}`", kind, render()).index(&FullRange)); } if print { println!("id:{} {}[{}] `{}`", id, kind, i, render()); @@ -198,11 +198,11 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) { // First, filter out duplicates moved.sort(); moved.dedup(); - debug!("fragments 1 moved: {}", path_lps(moved[])); + debug!("fragments 1 moved: {}", path_lps(moved.index(&FullRange))); assigned.sort(); assigned.dedup(); - debug!("fragments 1 assigned: {}", path_lps(assigned[])); + debug!("fragments 1 assigned: {}", path_lps(assigned.index(&FullRange))); // Second, build parents from the moved and assigned. for m in moved.iter() { @@ -222,14 +222,14 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) { parents.sort(); parents.dedup(); - debug!("fragments 2 parents: {}", path_lps(parents[])); + debug!("fragments 2 parents: {}", path_lps(parents.index(&FullRange))); // Third, filter the moved and assigned fragments down to just the non-parents - moved.retain(|f| non_member(*f, parents[])); - debug!("fragments 3 moved: {}", path_lps(moved[])); + moved.retain(|f| non_member(*f, parents.index(&FullRange))); + debug!("fragments 3 moved: {}", path_lps(moved.index(&FullRange))); - assigned.retain(|f| non_member(*f, parents[])); - debug!("fragments 3 assigned: {}", path_lps(assigned[])); + assigned.retain(|f| non_member(*f, parents.index(&FullRange))); + debug!("fragments 3 assigned: {}", path_lps(assigned.index(&FullRange))); // Fourth, build the leftover from the moved, assigned, and parents. for m in moved.iter() { @@ -247,16 +247,16 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &ty::ctxt<'tcx>) { unmoved.sort(); unmoved.dedup(); - debug!("fragments 4 unmoved: {}", frag_lps(unmoved[])); + debug!("fragments 4 unmoved: {}", frag_lps(unmoved.index(&FullRange))); // Fifth, filter the leftover fragments down to its core. unmoved.retain(|f| match *f { AllButOneFrom(_) => true, - Just(mpi) => non_member(mpi, parents[]) && - non_member(mpi, moved[]) && - non_member(mpi, assigned[]) + Just(mpi) => non_member(mpi, parents.index(&FullRange)) && + non_member(mpi, moved.index(&FullRange)) && + non_member(mpi, assigned.index(&FullRange)) }); - debug!("fragments 5 unmoved: {}", frag_lps(unmoved[])); + debug!("fragments 5 unmoved: {}", frag_lps(unmoved.index(&FullRange))); // Swap contents back in. fragments.unmoved_fragments = unmoved; @@ -433,7 +433,7 @@ fn add_fragment_siblings_for_extension<'tcx>(this: &MoveData<'tcx>, let msg = format!("type {} ({}) is not fragmentable", parent_ty.repr(tcx), sty_and_variant_info); let opt_span = origin_id.and_then(|id|tcx.map.opt_span(id)); - tcx.sess.opt_span_bug(opt_span, msg[]) + tcx.sess.opt_span_bug(opt_span, msg.index(&FullRange)) } } } diff --git a/src/librustc_borrowck/borrowck/gather_loans/mod.rs b/src/librustc_borrowck/borrowck/gather_loans/mod.rs index 1e9e5b22aa0ed..86498af7d950c 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/mod.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/mod.rs @@ -307,7 +307,7 @@ impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> { self.tcx().sess.span_bug( cmt.span, format!("invalid borrow lifetime: {}", - loan_region)[]); + loan_region).index(&FullRange)); } }; debug!("loan_scope = {}", loan_scope); diff --git a/src/librustc_borrowck/borrowck/gather_loans/move_error.rs b/src/librustc_borrowck/borrowck/gather_loans/move_error.rs index 95c5d9415a125..1bb143e1dc8d3 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/move_error.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/move_error.rs @@ -120,7 +120,7 @@ fn report_cannot_move_out_of<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, bccx.span_err( move_from.span, format!("cannot move out of {}", - bccx.cmt_to_string(&*move_from))[]); + bccx.cmt_to_string(&*move_from)).index(&FullRange)); } mc::cat_downcast(ref b, _) | @@ -132,7 +132,7 @@ fn report_cannot_move_out_of<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, move_from.span, format!("cannot move out of type `{}`, \ which defines the `Drop` trait", - b.ty.user_string(bccx.tcx))[]); + b.ty.user_string(bccx.tcx)).index(&FullRange)); }, _ => panic!("this path should not cause illegal move") } @@ -155,10 +155,10 @@ fn note_move_destination(bccx: &BorrowckCtxt, format!("to prevent the move, \ use `ref {0}` or `ref mut {0}` to capture value by \ reference", - pat_name)[]); + pat_name).index(&FullRange)); } else { bccx.span_note(move_to_span, format!("and here (use `ref {0}` or `ref mut {0}`)", - pat_name)[]); + pat_name).index(&FullRange)); } } diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs index 20949151557cb..75dee49623498 100644 --- a/src/librustc_borrowck/borrowck/mod.rs +++ b/src/librustc_borrowck/borrowck/mod.rs @@ -137,7 +137,7 @@ fn borrowck_fn(this: &mut BorrowckCtxt, check_loans::check_loans(this, &loan_dfcx, flowed_moves, - all_loans[], + all_loans.index(&FullRange), id, decl, body); @@ -505,7 +505,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { pub fn report(&self, err: BckError<'tcx>) { self.span_err( err.span, - self.bckerr_to_string(&err)[]); + self.bckerr_to_string(&err).index(&FullRange)); self.note_and_explain_bckerr(err); } @@ -527,7 +527,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { use_span, format!("{} of possibly uninitialized variable: `{}`", verb, - self.loan_path_to_string(lp))[]); + self.loan_path_to_string(lp)).index(&FullRange)); (self.loan_path_to_string(moved_lp), String::new()) } @@ -569,7 +569,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { format!("{} of {}moved value: `{}`", verb, msg, - nl)[]); + nl).index(&FullRange)); (ol, moved_lp_msg) } }; @@ -588,7 +588,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { self.tcx.sess.bug(format!("MoveExpr({}) maps to \ {}, not Expr", the_move.id, - r)[]) + r).index(&FullRange)) } }; let (suggestion, _) = @@ -599,7 +599,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { ol, moved_lp_msg, expr_ty.user_string(self.tcx), - suggestion)[]); + suggestion).index(&FullRange)); } move_data::MovePat => { @@ -610,7 +610,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { which is moved by default", ol, moved_lp_msg, - pat_ty.user_string(self.tcx))[]); + pat_ty.user_string(self.tcx)).index(&FullRange)); self.tcx.sess.span_help(span, "use `ref` to override"); } @@ -626,7 +626,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { self.tcx.sess.bug(format!("Captured({}) maps to \ {}, not Expr", the_move.id, - r)[]) + r).index(&FullRange)) } }; let (suggestion, help) = @@ -642,7 +642,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { ol, moved_lp_msg, expr_ty.user_string(self.tcx), - suggestion)[]); + suggestion).index(&FullRange)); self.tcx.sess.span_help(expr_span, help); } } @@ -673,7 +673,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { self.tcx.sess.span_err( span, format!("re-assignment of immutable variable `{}`", - self.loan_path_to_string(lp))[]); + self.loan_path_to_string(lp)).index(&FullRange)); self.tcx.sess.span_note(assign.span, "prior assignment occurs here"); } @@ -799,12 +799,12 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { self.tcx.sess.span_err( span, format!("{} in an aliasable location", - prefix)[]); + prefix).index(&FullRange)); } mc::AliasableClosure(id) => { self.tcx.sess.span_err(span, format!("{} in a captured outer \ - variable in an `Fn` closure", prefix)[]); + variable in an `Fn` closure", prefix).as_slice()); span_help!(self.tcx.sess, self.tcx.map.span(id), "consider changing this closure to take self by mutable reference"); } @@ -812,12 +812,12 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { mc::AliasableStaticMut(..) => { self.tcx.sess.span_err( span, - format!("{} in a static location", prefix)[]); + format!("{} in a static location", prefix).index(&FullRange)); } mc::AliasableBorrowed => { self.tcx.sess.span_err( span, - format!("{} in a `&` reference", prefix)[]); + format!("{} in a `&` reference", prefix).index(&FullRange)); } } @@ -885,12 +885,12 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { note_and_explain_region( self.tcx, format!("{} would have to be valid for ", - descr)[], + descr).index(&FullRange), loan_scope, "..."); note_and_explain_region( self.tcx, - format!("...but {} is only valid for ", descr)[], + format!("...but {} is only valid for ", descr).index(&FullRange), ptr_scope, ""); } @@ -910,7 +910,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { out.push('('); self.append_loan_path_to_string(&**lp_base, out); out.push_str(DOWNCAST_PRINTED_OPERATOR); - out.push_str(ty::item_path_str(self.tcx, variant_def_id)[]); + out.push_str(ty::item_path_str(self.tcx, variant_def_id).index(&FullRange)); out.push(')'); } @@ -924,7 +924,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { } mc::PositionalField(idx) => { out.push('.'); - out.push_str(idx.to_string()[]); + out.push_str(idx.to_string().index(&FullRange)); } } } @@ -956,7 +956,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { out.push('('); self.append_autoderefd_loan_path_to_string(&**lp_base, out); out.push(':'); - out.push_str(ty::item_path_str(self.tcx, variant_def_id)[]); + out.push_str(ty::item_path_str(self.tcx, variant_def_id).index(&FullRange)); out.push(')'); } diff --git a/src/librustc_borrowck/graphviz.rs b/src/librustc_borrowck/graphviz.rs index f2c35851d0d7c..648b389414a56 100644 --- a/src/librustc_borrowck/graphviz.rs +++ b/src/librustc_borrowck/graphviz.rs @@ -60,7 +60,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> { if seen_one { sets.push_str(" "); } else { seen_one = true; } sets.push_str(variant.short_name()); sets.push_str(": "); - sets.push_str(self.dataflow_for_variant(e, n, variant)[]); + sets.push_str(self.dataflow_for_variant(e, n, variant).index(&FullRange)); } sets } @@ -89,7 +89,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> { set.push_str(", "); } let loan_str = self.borrowck_ctxt.loan_path_to_string(&*lp); - set.push_str(loan_str[]); + set.push_str(loan_str.index(&FullRange)); saw_some = true; true }); @@ -101,7 +101,8 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> { let dfcx = &self.analysis_data.loans; let loan_index_to_path = |&mut: loan_index| { let all_loans = &self.analysis_data.all_loans; - all_loans[loan_index].loan_path() + let l: &borrowck::Loan = &all_loans[loan_index]; + l.loan_path() }; self.build_set(e, cfgidx, dfcx, loan_index_to_path) } @@ -111,7 +112,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> { let move_index_to_path = |&mut: move_index| { let move_data = &self.analysis_data.move_data.move_data; let moves = move_data.moves.borrow(); - let the_move = &(*moves)[move_index]; + let the_move: &borrowck::move_data::Move = &(*moves)[move_index]; move_data.path_loan_path(the_move.path) }; self.build_set(e, cfgidx, dfcx, move_index_to_path) @@ -122,7 +123,7 @@ impl<'a, 'tcx> DataflowLabeller<'a, 'tcx> { let assign_index_to_path = |&mut: assign_index| { let move_data = &self.analysis_data.move_data.move_data; let assignments = move_data.var_assignments.borrow(); - let assignment = &(*assignments)[assign_index]; + let assignment: &borrowck::move_data::Assignment = &(*assignments)[assign_index]; move_data.path_loan_path(assignment.path) }; self.build_set(e, cfgidx, dfcx, assign_index_to_path) diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 74f81ae9d6d1e..baeca7c8ffdbb 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -58,12 +58,12 @@ pub fn compile_input(sess: Session, let outputs = build_output_filenames(input, outdir, output, - krate.attrs[], + krate.attrs.index(&FullRange), &sess); - let id = link::find_crate_name(Some(&sess), krate.attrs[], + let id = link::find_crate_name(Some(&sess), krate.attrs.index(&FullRange), input); let expanded_crate - = match phase_2_configure_and_expand(&sess, krate, id[], + = match phase_2_configure_and_expand(&sess, krate, id.index(&FullRange), addl_plugins) { None => return, Some(k) => k @@ -75,7 +75,7 @@ pub fn compile_input(sess: Session, let mut forest = ast_map::Forest::new(expanded_crate); let ast_map = assign_node_ids_and_map(&sess, &mut forest); - write_out_deps(&sess, input, &outputs, id[]); + write_out_deps(&sess, input, &outputs, id.index(&FullRange)); if stop_after_phase_2(&sess) { return; } @@ -171,9 +171,9 @@ pub fn phase_2_configure_and_expand(sess: &Session, let time_passes = sess.time_passes(); *sess.crate_types.borrow_mut() = - collect_crate_types(sess, krate.attrs[]); + collect_crate_types(sess, krate.attrs.index(&FullRange)); *sess.crate_metadata.borrow_mut() = - collect_crate_metadata(sess, krate.attrs[]); + collect_crate_metadata(sess, krate.attrs.index(&FullRange)); time(time_passes, "recursion limit", (), |_| { middle::recursion_limit::update_recursion_limit(sess, &krate); @@ -268,8 +268,8 @@ pub fn phase_2_configure_and_expand(sess: &Session, if cfg!(windows) { _old_path = os::getenv("PATH").unwrap_or(_old_path); let mut new_path = sess.host_filesearch(PathKind::All).get_dylib_search_paths(); - new_path.extend(os::split_paths(_old_path[]).into_iter()); - os::setenv("PATH", os::join_paths(new_path[]).unwrap()); + new_path.extend(os::split_paths(_old_path.index(&FullRange)).into_iter()); + os::setenv("PATH", os::join_paths(new_path.index(&FullRange)).unwrap()); } let cfg = syntax::ext::expand::ExpansionConfig { crate_name: crate_name.to_string(), @@ -533,7 +533,7 @@ pub fn phase_5_run_llvm_passes(sess: &Session, time(sess.time_passes(), "LLVM passes", (), |_| write::run_passes(sess, trans, - sess.opts.output_types[], + sess.opts.output_types.index(&FullRange), outputs)); } @@ -547,14 +547,14 @@ pub fn phase_6_link_output(sess: &Session, outputs: &OutputFilenames) { let old_path = os::getenv("PATH").unwrap_or_else(||String::new()); let mut new_path = sess.host_filesearch(PathKind::All).get_tools_search_paths(); - new_path.extend(os::split_paths(old_path[]).into_iter()); - os::setenv("PATH", os::join_paths(new_path[]).unwrap()); + new_path.extend(os::split_paths(old_path.index(&FullRange)).into_iter()); + os::setenv("PATH", os::join_paths(new_path.index(&FullRange)).unwrap()); time(sess.time_passes(), "linking", (), |_| link::link_binary(sess, trans, outputs, - trans.link.crate_name[])); + trans.link.crate_name.index(&FullRange))); os::setenv("PATH", old_path); } @@ -643,7 +643,7 @@ fn write_out_deps(sess: &Session, // write Makefile-compatible dependency rules let files: Vec = sess.codemap().files.borrow() .iter().filter(|fmap| fmap.is_real_file()) - .map(|fmap| escape_dep_filename(fmap.name[])) + .map(|fmap| escape_dep_filename(fmap.name.index(&FullRange))) .collect(); let mut file = try!(io::File::create(&deps_filename)); for path in out_filenames.iter() { @@ -657,7 +657,7 @@ fn write_out_deps(sess: &Session, Ok(()) => {} Err(e) => { sess.fatal(format!("error writing dependencies to `{}`: {}", - deps_filename.display(), e)[]); + deps_filename.display(), e).index(&FullRange)); } } } @@ -728,7 +728,7 @@ pub fn collect_crate_types(session: &Session, if !res { session.warn(format!("dropping unsupported crate type `{}` \ for target `{}`", - *crate_type, session.opts.target_triple)[]); + *crate_type, session.opts.target_triple).index(&FullRange)); } res diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index 89b2e0f257acd..3fd5198ee4c4e 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -105,12 +105,12 @@ fn run_compiler(args: &[String]) { let descriptions = diagnostics::registry::Registry::new(&DIAGNOSTICS); match matches.opt_str("explain") { Some(ref code) => { - match descriptions.find_description(code[]) { + match descriptions.find_description(code.index(&FullRange)) { Some(ref description) => { println!("{}", description); } None => { - early_error(format!("no extended information for {}", code)[]); + early_error(format!("no extended information for {}", code).index(&FullRange)); } } return; @@ -136,7 +136,7 @@ fn run_compiler(args: &[String]) { early_error("no input filename given"); } 1u => { - let ifile = matches.free[0][]; + let ifile = matches.free[0].index(&FullRange); if ifile == "-" { let contents = io::stdin().read_to_end().unwrap(); let src = String::from_utf8(contents).unwrap(); @@ -313,7 +313,7 @@ Available lint options: for lint in lints.into_iter() { let name = lint.name_lower().replace("_", "-"); println!(" {} {:7.7} {}", - padded(name[]), lint.default_level.as_str(), lint.desc); + padded(name.index(&FullRange)), lint.default_level.as_str(), lint.desc); } println!("\n"); }; @@ -343,7 +343,7 @@ Available lint options: let desc = to.into_iter().map(|x| x.as_str().replace("_", "-")) .collect::>().connect(", "); println!(" {} {}", - padded(name[]), desc); + padded(name.index(&FullRange)), desc); } println!("\n"); }; @@ -409,7 +409,7 @@ pub fn handle_options(mut args: Vec) -> Option { } let matches = - match getopts::getopts(args[], config::optgroups()[]) { + match getopts::getopts(args.index(&FullRange), config::optgroups().index(&FullRange)) { Ok(m) => m, Err(f_stable_attempt) => { // redo option parsing, including unstable options this time, @@ -583,7 +583,7 @@ pub fn monitor(f: F) { "run with `RUST_BACKTRACE=1` for a backtrace".to_string(), ]; for note in xs.iter() { - emitter.emit(None, note[], None, diagnostic::Note) + emitter.emit(None, note.index(&FullRange), None, diagnostic::Note) } match r.read_to_string() { @@ -591,7 +591,7 @@ pub fn monitor(f: F) { Err(e) => { emitter.emit(None, format!("failed to read internal \ - stderr: {}", e)[], + stderr: {}", e).index(&FullRange), None, diagnostic::Error) } diff --git a/src/librustc_driver/pretty.rs b/src/librustc_driver/pretty.rs index 61fd7d16ab7dd..06ef06a214f48 100644 --- a/src/librustc_driver/pretty.rs +++ b/src/librustc_driver/pretty.rs @@ -296,7 +296,7 @@ impl<'tcx> pprust::PpAnn for TypedAnnotation<'tcx> { try!(pp::word(&mut s.s, ppaux::ty_to_string( tcx, - ty::expr_ty(tcx, expr))[])); + ty::expr_ty(tcx, expr)).index(&FullRange))); s.pclose() } _ => Ok(()) @@ -370,7 +370,7 @@ impl UserIdentifiedItem { ItemViaNode(node_id) => NodesMatchingDirect(Some(node_id).into_iter()), ItemViaPath(ref parts) => - NodesMatchingSuffix(map.nodes_matching_suffix(parts[])), + NodesMatchingSuffix(map.nodes_matching_suffix(parts.index(&FullRange))), } } @@ -382,7 +382,7 @@ impl UserIdentifiedItem { user_option, self.reconstructed_input(), is_wrong_because); - sess.fatal(message[]) + sess.fatal(message.index(&FullRange)) }; let mut saw_node = ast::DUMMY_NODE_ID; @@ -509,7 +509,7 @@ pub fn pretty_print_input(sess: Session, let is_expanded = needs_expansion(&ppm); let compute_ast_map = needs_ast_map(&ppm, &opt_uii); let krate = if compute_ast_map { - match driver::phase_2_configure_and_expand(&sess, krate, id[], None) { + match driver::phase_2_configure_and_expand(&sess, krate, id.index(&FullRange), None) { None => return, Some(k) => k } @@ -528,7 +528,7 @@ pub fn pretty_print_input(sess: Session, }; let src_name = driver::source_name(input); - let src = sess.codemap().get_filemap(src_name[]) + let src = sess.codemap().get_filemap(src_name.index(&FullRange)) .src.as_bytes().to_vec(); let mut rdr = MemReader::new(src); @@ -589,7 +589,7 @@ pub fn pretty_print_input(sess: Session, debug!("pretty printing flow graph for {}", opt_uii); let uii = opt_uii.unwrap_or_else(|| { sess.fatal(format!("`pretty flowgraph=..` needs NodeId (int) or - unique path suffix (b::c::d)")[]) + unique path suffix (b::c::d)").index(&FullRange)) }); let ast_map = ast_map.expect("--pretty flowgraph missing ast_map"); @@ -597,7 +597,7 @@ pub fn pretty_print_input(sess: Session, let node = ast_map.find(nodeid).unwrap_or_else(|| { sess.fatal(format!("--pretty flowgraph couldn't find id: {}", - nodeid)[]) + nodeid).index(&FullRange)) }); let code = blocks::Code::from_node(node); @@ -615,8 +615,8 @@ pub fn pretty_print_input(sess: Session, // point to what was found, if there's an // accessible span. match ast_map.opt_span(nodeid) { - Some(sp) => sess.span_fatal(sp, message[]), - None => sess.fatal(message[]) + Some(sp) => sess.span_fatal(sp, message.index(&FullRange)), + None => sess.fatal(message.index(&FullRange)) } } } diff --git a/src/librustc_driver/test.rs b/src/librustc_driver/test.rs index b1e65dce6045a..3cacc1e232a8b 100644 --- a/src/librustc_driver/test.rs +++ b/src/librustc_driver/test.rs @@ -279,7 +279,7 @@ impl<'a, 'tcx> Env<'a, 'tcx> { pub fn t_param(&self, space: subst::ParamSpace, index: u32) -> Ty<'tcx> { let name = format!("T{}", index); - ty::mk_param(self.infcx.tcx, space, index, token::intern(name[])) + ty::mk_param(self.infcx.tcx, space, index, token::intern(name.index(&FullRange))) } pub fn re_early_bound(&self, diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index 10788f9f7cb83..0a64404a7828e 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -221,14 +221,14 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> { self.resolve_error(sp, format!("duplicate definition of {} `{}`", namespace_error_to_string(duplicate_type), - token::get_name(name))[]); + token::get_name(name)).index(&FullRange)); { let r = child.span_for_namespace(ns); for sp in r.iter() { self.session.span_note(*sp, format!("first definition of {} `{}` here", namespace_error_to_string(duplicate_type), - token::get_name(name))[]); + token::get_name(name)).index(&FullRange)); } } } @@ -1201,7 +1201,7 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> { debug!("(building import directive) building import \ directive: {}::{}", self.names_to_string(module_.imports.borrow().last().unwrap() - .module_path[]), + .module_path.index(&FullRange)), token::get_name(target)); let mut import_resolutions = module_.import_resolutions diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index 58102fe5629d9..debbf2767ac95 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -1071,10 +1071,10 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let msg = format!("unresolved import `{}`{}", self.import_path_to_string( import_directive.module_path - [], + .index(&FullRange), import_directive.subclass), help); - self.resolve_error(span, msg[]); + self.resolve_error(span, msg.index(&FullRange)); } Indeterminate => break, // Bail out. We'll come around next time. Success(()) => () // Good. Continue. @@ -1104,7 +1104,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { .iter() .map(|seg| seg.identifier.name) .collect(); - self.names_to_string(names[]) + self.names_to_string(names.index(&FullRange)) } fn import_directive_subclass_to_string(&mut self, @@ -1168,7 +1168,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let module_path = &import_directive.module_path; debug!("(resolving import for module) resolving import `{}::...` in `{}`", - self.names_to_string(module_path[]), + self.names_to_string(module_path.index(&FullRange)), self.module_to_string(&*module_)); // First, resolve the module path for the directive, if necessary. @@ -1177,7 +1177,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { Some((self.graph_root.get_module(), LastMod(AllPublic))) } else { match self.resolve_module_path(module_.clone(), - module_path[], + module_path.index(&FullRange), DontUseLexicalScope, import_directive.span, ImportSearch) { @@ -1774,7 +1774,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { ValueNS => "value", }, token::get_name(name).get()); - self.session.span_err(import_span, msg[]); + self.session.span_err(import_span, msg.index(&FullRange)); } Some(_) | None => {} } @@ -1789,7 +1789,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { if !name_bindings.defined_in_namespace_with(namespace, IMPORTABLE) { let msg = format!("`{}` is not directly importable", token::get_name(name)); - self.session.span_err(import_span, msg[]); + self.session.span_err(import_span, msg.index(&FullRange)); } } @@ -1814,7 +1814,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { crate in this module \ (maybe you meant `use {0}::*`?)", token::get_name(name).get()); - self.session.span_err(import_span, msg[]); + self.session.span_err(import_span, msg.index(&FullRange)); } Some(_) | None => {} } @@ -1836,7 +1836,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let msg = format!("import `{}` conflicts with value \ in this module", token::get_name(name).get()); - self.session.span_err(import_span, msg[]); + self.session.span_err(import_span, msg.index(&FullRange)); if let Some(span) = value.value_span { self.session.span_note(span, "conflicting value here"); @@ -1854,7 +1854,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let msg = format!("import `{}` conflicts with type in \ this module", token::get_name(name).get()); - self.session.span_err(import_span, msg[]); + self.session.span_err(import_span, msg.index(&FullRange)); if let Some(span) = ty.type_span { self.session.span_note(span, "note conflicting type here") @@ -1867,7 +1867,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let msg = format!("inherent implementations \ are only allowed on types \ defined in the current module"); - self.session.span_err(span, msg[]); + self.session.span_err(span, msg.index(&FullRange)); self.session.span_note(import_span, "import from other module here") } @@ -1876,7 +1876,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let msg = format!("import `{}` conflicts with existing \ submodule", token::get_name(name).get()); - self.session.span_err(import_span, msg[]); + self.session.span_err(import_span, msg.index(&FullRange)); if let Some(span) = ty.type_span { self.session.span_note(span, "note conflicting module here") @@ -1906,7 +1906,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { .span_err(span, format!("an external crate named `{}` has already \ been imported into this module", - token::get_name(name).get())[]); + token::get_name(name).get()).index(&FullRange)); } } @@ -1925,7 +1925,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { format!("the name `{}` conflicts with an external \ crate that has been imported into this \ module", - token::get_name(name).get())[]); + token::get_name(name).get()).index(&FullRange)); } } @@ -1973,7 +1973,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let segment_name = token::get_name(name); let module_name = self.module_to_string(&*search_module); let mut span = span; - let msg = if "???" == module_name[] { + let msg = if "???" == module_name.index(&FullRange) { span.hi = span.lo + Pos::from_uint(segment_name.get().len()); match search_parent_externals(name, @@ -2086,14 +2086,14 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { match module_prefix_result { Failed(None) => { let mpath = self.names_to_string(module_path); - let mpath = mpath[]; + let mpath = mpath.index(&FullRange); match mpath.rfind(':') { Some(idx) => { let msg = format!("Could not find `{}` in `{}`", // idx +- 1 to account for the // colons on either side - mpath[idx + 1..], - mpath[0..idx - 1]); + mpath.index(&((idx + 1)..)), + mpath.index(&(0..(idx - 1)))); return Failed(Some((span, msg))); }, None => { @@ -2268,7 +2268,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { true) { Failed(Some((span, msg))) => self.resolve_error(span, format!("failed to resolve. {}", - msg)[]), + msg).index(&FullRange)), Failed(None) => (), // Continue up the search chain. Indeterminate => { // We couldn't see through the higher scope because of an @@ -2528,7 +2528,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } else { let err = format!("unresolved import (maybe you meant `{}::*`?)", sn); - self.resolve_error((*imports)[index].span, err[]); + self.resolve_error((*imports)[index].span, err.index(&FullRange)); } } @@ -2620,7 +2620,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { match def_like { DlDef(d @ DefUpvar(..)) => { self.session.span_bug(span, - format!("unexpected {} in bindings", d)[]) + format!("unexpected {} in bindings", d).index(&FullRange)) } DlDef(d @ DefLocal(_)) => { let node_id = d.def_id().node; @@ -2766,7 +2766,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { for (i, rib) in ribs.iter().enumerate().rev() { match rib.bindings.get(&name).cloned() { Some(def_like) => { - return self.upvarify(ribs[i + 1..], def_like, span); + return self.upvarify(ribs.index(&((i + 1)..)), def_like, span); } None => { // Continue. @@ -2859,7 +2859,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { generics, implemented_traits, &**self_type, - impl_items[]); + impl_items.index(&FullRange)); } ItemTrait(_, ref generics, ref bounds, ref trait_items) => { @@ -2937,7 +2937,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { ItemStruct(ref struct_def, ref generics) => { self.resolve_struct(item.id, generics, - struct_def.fields[]); + struct_def.fields.index(&FullRange)); } ItemMod(ref module_) => { @@ -3010,7 +3010,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { parameter in this type \ parameter list", token::get_name( - name))[]) + name)).index(&FullRange)) } seen_bindings.insert(name); @@ -3182,7 +3182,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { }; let msg = format!("attempt to {} a nonexistent trait `{}`", usage_str, path_str); - self.resolve_error(trait_reference.path.span, msg[]); + self.resolve_error(trait_reference.path.span, msg.index(&FullRange)); } Some(def) => { match def { @@ -3194,14 +3194,14 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { self.resolve_error(trait_reference.path.span, format!("`{}` is not a trait", self.path_names_to_string( - &trait_reference.path))[]); + &trait_reference.path)).index(&FullRange)); // If it's a typedef, give a note if let DefTy(..) = def { self.session.span_note( trait_reference.path.span, format!("`type` aliases cannot be used for traits") - []); + .index(&FullRange)); } } } @@ -3398,7 +3398,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { self.resolve_error(span, format!("method `{}` is not a member of trait `{}`", token::get_name(name), - path_str)[]); + path_str).index(&FullRange)); } } } @@ -3467,7 +3467,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { format!("variable `{}` from pattern #1 is \ not bound in pattern #{}", token::get_name(key), - i + 1)[]); + i + 1).index(&FullRange)); } Some(binding_i) => { if binding_0.binding_mode != binding_i.binding_mode { @@ -3476,7 +3476,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { format!("variable `{}` is bound with different \ mode in pattern #{} than in pattern #1", token::get_name(key), - i + 1)[]); + i + 1).index(&FullRange)); } } } @@ -3489,7 +3489,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { format!("variable `{}` from pattern {}{} is \ not bound in pattern {}1", token::get_name(key), - "#", i + 1, "#")[]); + "#", i + 1, "#").index(&FullRange)); } } } @@ -3604,7 +3604,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { None => { let msg = format!("use of undeclared type name `{}`", self.path_names_to_string(path)); - self.resolve_error(ty.span, msg[]); + self.resolve_error(ty.span, msg.index(&FullRange)); } } } @@ -3676,7 +3676,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { format!("declaration of `{}` shadows an enum \ variant or unit-like struct in \ scope", - token::get_name(renamed))[]); + token::get_name(renamed)).index(&FullRange)); } FoundConst(ref def, lp) if mode == RefutableMode => { debug!("(resolving pattern) resolving `{}` to \ @@ -3728,7 +3728,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { list", token::get_ident( ident)) - []) + .index(&FullRange)) } else if bindings_list.get(&renamed) == Some(&pat_id) { // Then this is a duplicate variable in the @@ -3737,7 +3737,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { format!("identifier `{}` is bound \ more than once in the same \ pattern", - token::get_ident(ident))[]); + token::get_ident(ident)).index(&FullRange)); } // Else, not bound in the same pattern: do // nothing. @@ -3763,13 +3763,13 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { self.resolve_error(path.span, format!("`{}` is not an enum variant, struct or const", token::get_ident( - path.segments.last().unwrap().identifier))[]); + path.segments.last().unwrap().identifier)).as_slice()); } None => { self.resolve_error(path.span, format!("unresolved enum variant, struct or const `{}`", token::get_ident( - path.segments.last().unwrap().identifier))[]); + path.segments.last().unwrap().identifier)).as_slice()); } } @@ -3800,7 +3800,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { def: {}", result); let msg = format!("`{}` does not name a structure", self.path_names_to_string(path)); - self.resolve_error(path.span, msg[]); + self.resolve_error(path.span, msg.index(&FullRange)); } } } @@ -3862,7 +3862,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { match err { Some((span, msg)) => { self.resolve_error(span, format!("failed to resolve: {}", - msg)[]); + msg).index(&FullRange)); } None => () } @@ -4057,7 +4057,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let last_private; let module = self.current_module.clone(); match self.resolve_module_path(module, - module_path[], + module_path.index(&FullRange), UseLexicalScope, path.span, PathSearch) { @@ -4072,7 +4072,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { }; self.resolve_error(span, format!("failed to resolve. {}", - msg)[]); + msg).index(&FullRange)); return None; } Indeterminate => panic!("indeterminate unexpected"), @@ -4115,7 +4115,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let containing_module; let last_private; match self.resolve_module_path_from_root(root_module, - module_path[], + module_path.index(&FullRange), 0, path.span, PathSearch, @@ -4125,13 +4125,13 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { Some((span, msg)) => (span, msg), None => { let msg = format!("Use of undeclared module `::{}`", - self.names_to_string(module_path[])); + self.names_to_string(module_path.index(&FullRange))); (path.span, msg) } }; self.resolve_error(span, format!("failed to resolve. {}", - msg)[]); + msg).index(&FullRange)); return None; } @@ -4172,7 +4172,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } TypeNS => { let name = ident.name; - self.search_ribs(self.type_ribs[], name, span) + self.search_ribs(self.type_ribs.index(&FullRange), name, span) } }; @@ -4227,7 +4227,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { match err { Some((span, msg)) => self.resolve_error(span, format!("failed to resolve. {}", - msg)[]), + msg).index(&FullRange)), None => () } @@ -4284,7 +4284,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } } else { match this.resolve_module_path(root, - name_path[], + name_path.index(&FullRange), UseLexicalScope, span, PathSearch) { @@ -4322,7 +4322,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let name_path = path.segments.iter().map(|seg| seg.identifier.name).collect::>(); // Look for a method in the current self type's impl module. - match get_module(self, path.span, name_path[]) { + match get_module(self, path.span, name_path.index(&FullRange)) { Some(module) => match module.children.borrow().get(&name) { Some(binding) => { let p_str = self.path_names_to_string(&path); @@ -4533,7 +4533,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { def: {}", result); let msg = format!("`{}` does not name a structure", self.path_names_to_string(path)); - self.resolve_error(path.span, msg[]); + self.resolve_error(path.span, msg.index(&FullRange)); } } @@ -4594,7 +4594,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { self.resolve_error( expr.span, format!("use of undeclared label `{}`", - token::get_ident(label))[]) + token::get_ident(label)).index(&FullRange)) } Some(DlDef(def @ DefLabel(_))) => { // Since this def is a label, it is never read. @@ -4733,7 +4733,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { then {}", node_id, *entry.get(), - def)[]); + def).index(&FullRange)); }, Vacant(entry) => { entry.insert(def); }, } @@ -4749,7 +4749,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { self.resolve_error(pat.span, format!("cannot use `ref` binding mode \ with {}", - descr)[]); + descr).index(&FullRange)); } } } @@ -4785,7 +4785,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { return "???".to_string(); } self.names_to_string(names.into_iter().rev() - .collect::>()[]) + .collect::>().index(&FullRange)) } #[allow(dead_code)] // useful for debugging diff --git a/src/librustc_trans/back/link.rs b/src/librustc_trans/back/link.rs index 8fbeadc55b387..aff5f00e64eb7 100644 --- a/src/librustc_trans/back/link.rs +++ b/src/librustc_trans/back/link.rs @@ -128,7 +128,7 @@ pub fn find_crate_name(sess: Option<&Session>, attrs: &[ast::Attribute], input: &Input) -> String { let validate = |&: s: String, span: Option| { - creader::validate_crate_name(sess, s[], span); + creader::validate_crate_name(sess, s.index(&FullRange), span); s }; @@ -146,7 +146,7 @@ pub fn find_crate_name(sess: Option<&Session>, let msg = format!("--crate-name and #[crate_name] are \ required to match, but `{}` != `{}`", s, name); - sess.span_err(attr.span, msg[]); + sess.span_err(attr.span, msg.index(&FullRange)); } } return validate(s.clone(), None); @@ -192,17 +192,17 @@ fn symbol_hash<'tcx>(tcx: &ty::ctxt<'tcx>, // to be independent of one another in the crate. symbol_hasher.reset(); - symbol_hasher.input_str(link_meta.crate_name[]); + symbol_hasher.input_str(link_meta.crate_name.index(&FullRange)); symbol_hasher.input_str("-"); symbol_hasher.input_str(link_meta.crate_hash.as_str()); for meta in tcx.sess.crate_metadata.borrow().iter() { - symbol_hasher.input_str(meta[]); + symbol_hasher.input_str(meta.index(&FullRange)); } symbol_hasher.input_str("-"); - symbol_hasher.input_str(encoder::encoded_ty(tcx, t)[]); + symbol_hasher.input_str(encoder::encoded_ty(tcx, t).index(&FullRange)); // Prefix with 'h' so that it never blends into adjacent digits let mut hash = String::from_str("h"); - hash.push_str(truncated_hash_result(symbol_hasher)[]); + hash.push_str(truncated_hash_result(symbol_hasher).index(&FullRange)); hash } @@ -251,7 +251,7 @@ pub fn sanitize(s: &str) -> String { let mut tstr = String::new(); for c in c.escape_unicode() { tstr.push(c) } result.push('$'); - result.push_str(tstr[1..]); + result.push_str(tstr.index(&(1..))); } } } @@ -260,7 +260,7 @@ pub fn sanitize(s: &str) -> String { if result.len() > 0u && result.as_bytes()[0] != '_' as u8 && ! (result.as_bytes()[0] as char).is_xid_start() { - return format!("_{}", result[]); + return format!("_{}", result.index(&FullRange)); } return result; @@ -286,12 +286,12 @@ pub fn mangle>(mut path: PI, fn push(n: &mut String, s: &str) { let sani = sanitize(s); - n.push_str(format!("{}{}", sani.len(), sani)[]); + n.push_str(format!("{}{}", sani.len(), sani).index(&FullRange)); } // First, connect each component with pairs. for e in path { - push(&mut n, token::get_name(e.name()).get()[]) + push(&mut n, token::get_name(e.name()).get().index(&FullRange)) } match hash { @@ -329,17 +329,17 @@ pub fn mangle_exported_name<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, path: PathEl hash.push(EXTRA_CHARS.as_bytes()[extra2] as char); hash.push(EXTRA_CHARS.as_bytes()[extra3] as char); - exported_name(path, hash[]) + exported_name(path, hash.index(&FullRange)) } pub fn mangle_internal_name_by_type_and_seq<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>, name: &str) -> String { let s = ppaux::ty_to_string(ccx.tcx(), t); - let path = [PathName(token::intern(s[])), + let path = [PathName(token::intern(s.index(&FullRange))), gensym_name(name)]; let hash = get_symbol_hash(ccx, t); - mangle(ast_map::Values(path.iter()), Some(hash[])) + mangle(ast_map::Values(path.iter()), Some(hash.index(&FullRange))) } pub fn mangle_internal_name_by_path_and_seq(path: PathElems, flav: &str) -> String { @@ -359,7 +359,7 @@ pub fn remove(sess: &Session, path: &Path) { Err(e) => { sess.err(format!("failed to remove {}: {}", path.display(), - e)[]); + e).index(&FullRange)); } } } @@ -374,7 +374,7 @@ pub fn link_binary(sess: &Session, for &crate_type in sess.crate_types.borrow().iter() { if invalid_output_for_target(sess, crate_type) { sess.bug(format!("invalid output type `{}` for target os `{}`", - crate_type, sess.opts.target_triple)[]); + crate_type, sess.opts.target_triple).index(&FullRange)); } let out_file = link_binary_output(sess, trans, crate_type, outputs, crate_name); @@ -439,8 +439,8 @@ pub fn filename_for_input(sess: &Session, out_filename.with_filename(format!("lib{}.rlib", libname)) } config::CrateTypeDylib => { - let (prefix, suffix) = (sess.target.target.options.dll_prefix[], - sess.target.target.options.dll_suffix[]); + let (prefix, suffix) = (sess.target.target.options.dll_prefix.index(&FullRange), + sess.target.target.options.dll_suffix.index(&FullRange)); out_filename.with_filename(format!("{}{}{}", prefix, libname, @@ -450,7 +450,7 @@ pub fn filename_for_input(sess: &Session, out_filename.with_filename(format!("lib{}.a", libname)) } config::CrateTypeExecutable => { - let suffix = sess.target.target.options.exe_suffix[]; + let suffix = sess.target.target.options.exe_suffix.index(&FullRange); out_filename.with_filename(format!("{}{}", libname, suffix)) } } @@ -479,12 +479,12 @@ fn link_binary_output(sess: &Session, if !out_is_writeable { sess.fatal(format!("output file {} is not writeable -- check its \ permissions.", - out_filename.display())[]); + out_filename.display()).index(&FullRange)); } else if !obj_is_writeable { sess.fatal(format!("object file {} is not writeable -- check its \ permissions.", - obj_filename.display())[]); + obj_filename.display()).index(&FullRange)); } match crate_type { @@ -539,7 +539,7 @@ fn link_rlib<'a>(sess: &'a Session, for &(ref l, kind) in sess.cstore.get_used_libraries().borrow().iter() { match kind { cstore::NativeStatic => { - ab.add_native_library(l[]).unwrap(); + ab.add_native_library(l.index(&FullRange)).unwrap(); } cstore::NativeFramework | cstore::NativeUnknown => {} } @@ -587,12 +587,12 @@ fn link_rlib<'a>(sess: &'a Session, let tmpdir = TempDir::new("rustc").ok().expect("needs a temp dir"); let metadata = tmpdir.path().join(METADATA_FILENAME); match fs::File::create(&metadata).write(trans.metadata - []) { + .index(&FullRange)) { Ok(..) => {} Err(e) => { sess.err(format!("failed to write {}: {}", metadata.display(), - e)[]); + e).index(&FullRange)); sess.abort_if_errors(); } } @@ -608,27 +608,27 @@ fn link_rlib<'a>(sess: &'a Session, // extension to it. This is to work around a bug in LLDB that // would cause it to crash if the name of a file in an archive // was exactly 16 bytes. - let bc_filename = obj_filename.with_extension(format!("{}.bc", i)[]); + let bc_filename = obj_filename.with_extension(format!("{}.bc", i).as_slice()); let bc_deflated_filename = obj_filename.with_extension( - format!("{}.bytecode.deflate", i)[]); + format!("{}.bytecode.deflate", i).index(&FullRange)); let bc_data = match fs::File::open(&bc_filename).read_to_end() { Ok(buffer) => buffer, Err(e) => sess.fatal(format!("failed to read bytecode: {}", - e)[]) + e).index(&FullRange)) }; - let bc_data_deflated = match flate::deflate_bytes(bc_data[]) { + let bc_data_deflated = match flate::deflate_bytes(bc_data.index(&FullRange)) { Some(compressed) => compressed, None => sess.fatal(format!("failed to compress bytecode from {}", - bc_filename.display())[]) + bc_filename.display()).index(&FullRange)) }; let mut bc_file_deflated = match fs::File::create(&bc_deflated_filename) { Ok(file) => file, Err(e) => { sess.fatal(format!("failed to create compressed bytecode \ - file: {}", e)[]) + file: {}", e).index(&FullRange)) } }; @@ -637,7 +637,7 @@ fn link_rlib<'a>(sess: &'a Session, Ok(()) => {} Err(e) => { sess.err(format!("failed to write compressed bytecode: \ - {}", e)[]); + {}", e).index(&FullRange)); sess.abort_if_errors() } }; @@ -677,7 +677,7 @@ fn write_rlib_bytecode_object_v1(writer: &mut T, try! { writer.write(RLIB_BYTECODE_OBJECT_MAGIC) }; try! { writer.write_le_u32(1) }; try! { writer.write_le_u64(bc_data_deflated_size) }; - try! { writer.write(bc_data_deflated[]) }; + try! { writer.write(bc_data_deflated.index(&FullRange)) }; let number_of_bytes_written_so_far = RLIB_BYTECODE_OBJECT_MAGIC.len() + // magic id @@ -728,11 +728,11 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) { let p = match *path { Some(ref p) => p.clone(), None => { sess.err(format!("could not find rlib for: `{}`", - name)[]); + name).index(&FullRange)); continue } }; - ab.add_rlib(&p, name[], sess.lto()).unwrap(); + ab.add_rlib(&p, name.index(&FullRange), sess.lto()).unwrap(); let native_libs = csearch::get_native_libraries(&sess.cstore, cnum); all_native_libs.extend(native_libs.into_iter()); @@ -754,7 +754,7 @@ fn link_staticlib(sess: &Session, obj_filename: &Path, out_filename: &Path) { cstore::NativeUnknown => "library", cstore::NativeFramework => "framework", }; - sess.note(format!("{}: {}", name, *lib)[]); + sess.note(format!("{}: {}", name, *lib).index(&FullRange)); } } @@ -768,12 +768,12 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool, // The invocations of cc share some flags across platforms let pname = get_cc_prog(sess); - let mut cmd = Command::new(pname[]); + let mut cmd = Command::new(pname.index(&FullRange)); - cmd.args(sess.target.target.options.pre_link_args[]); + cmd.args(sess.target.target.options.pre_link_args.index(&FullRange)); link_args(&mut cmd, sess, dylib, tmpdir.path(), trans, obj_filename, out_filename); - cmd.args(sess.target.target.options.post_link_args[]); + cmd.args(sess.target.target.options.post_link_args.index(&FullRange)); if !sess.target.target.options.no_compiler_rt { cmd.arg("-lcompiler-rt"); } @@ -793,11 +793,11 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool, if !prog.status.success() { sess.err(format!("linking with `{}` failed: {}", pname, - prog.status)[]); - sess.note(format!("{}", &cmd)[]); + prog.status).index(&FullRange)); + sess.note(format!("{}", &cmd).index(&FullRange)); let mut output = prog.error.clone(); - output.push_all(prog.output[]); - sess.note(str::from_utf8(output[]).unwrap()); + output.push_all(prog.output.index(&FullRange)); + sess.note(str::from_utf8(output.index(&FullRange)).unwrap()); sess.abort_if_errors(); } debug!("linker stderr:\n{}", String::from_utf8(prog.error).unwrap()); @@ -806,7 +806,7 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool, Err(e) => { sess.err(format!("could not exec the linker `{}`: {}", pname, - e)[]); + e).index(&FullRange)); sess.abort_if_errors(); } } @@ -818,7 +818,7 @@ fn link_natively(sess: &Session, trans: &CrateTranslation, dylib: bool, match Command::new("dsymutil").arg(out_filename).output() { Ok(..) => {} Err(e) => { - sess.err(format!("failed to run dsymutil: {}", e)[]); + sess.err(format!("failed to run dsymutil: {}", e).index(&FullRange)); sess.abort_if_errors(); } } @@ -867,7 +867,7 @@ fn link_args(cmd: &mut Command, let mut v = b"-Wl,-force_load,".to_vec(); v.push_all(morestack.as_vec()); - cmd.arg(v[]); + cmd.arg(v.index(&FullRange)); } else { cmd.args(&["-Wl,--whole-archive", "-lmorestack", "-Wl,--no-whole-archive"]); } @@ -992,7 +992,7 @@ fn link_args(cmd: &mut Command, if sess.opts.cg.rpath { let mut v = "-Wl,-install_name,@rpath/".as_bytes().to_vec(); v.push_all(out_filename.filename().unwrap()); - cmd.arg(v[]); + cmd.arg(v.index(&FullRange)); } } else { cmd.arg("-shared"); @@ -1004,7 +1004,7 @@ fn link_args(cmd: &mut Command, // addl_lib_search_paths if sess.opts.cg.rpath { let sysroot = sess.sysroot(); - let target_triple = sess.opts.target_triple[]; + let target_triple = sess.opts.target_triple.index(&FullRange); let get_install_prefix_lib_path = |:| { let install_prefix = option_env!("CFG_PREFIX").expect("CFG_PREFIX"); let tlib = filesearch::relative_target_lib_path(sysroot, target_triple); @@ -1021,14 +1021,14 @@ fn link_args(cmd: &mut Command, get_install_prefix_lib_path: get_install_prefix_lib_path, realpath: ::util::fs::realpath }; - cmd.args(rpath::get_rpath_flags(rpath_config)[]); + cmd.args(rpath::get_rpath_flags(rpath_config).index(&FullRange)); } // Finally add all the linker arguments provided on the command line along // with any #[link_args] attributes found inside the crate let empty = Vec::new(); - cmd.args(sess.opts.cg.link_args.as_ref().unwrap_or(&empty)[]); - cmd.args(used_link_args[]); + cmd.args(sess.opts.cg.link_args.as_ref().unwrap_or(&empty).index(&FullRange)); + cmd.args(used_link_args.index(&FullRange)); } // # Native library linking @@ -1082,14 +1082,14 @@ fn add_local_native_libraries(cmd: &mut Command, sess: &Session) { } else { // -force_load is the OSX equivalent of --whole-archive, but it // involves passing the full path to the library to link. - let lib = archive::find_library(l[], - sess.target.target.options.staticlib_prefix[], - sess.target.target.options.staticlib_suffix[], - search_path[], + let lib = archive::find_library(l.index(&FullRange), + sess.target.target.options.staticlib_prefix.as_slice(), + sess.target.target.options.staticlib_suffix.as_slice(), + search_path.index(&FullRange), &sess.diagnostic().handler); let mut v = b"-Wl,-force_load,".to_vec(); v.push_all(lib.as_vec()); - cmd.arg(v[]); + cmd.arg(v.index(&FullRange)); } } if takes_hints { @@ -1102,7 +1102,7 @@ fn add_local_native_libraries(cmd: &mut Command, sess: &Session) { cmd.arg(format!("-l{}", l)); } cstore::NativeFramework => { - cmd.arg("-framework").arg(l[]); + cmd.arg("-framework").arg(l.index(&FullRange)); } cstore::NativeStatic => unreachable!(), } @@ -1158,7 +1158,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session, // Converts a library file-stem into a cc -l argument fn unlib<'a>(config: &config::Config, stem: &'a [u8]) -> &'a [u8] { if stem.starts_with("lib".as_bytes()) && !config.target.options.is_like_windows { - stem[3..] + stem.index(&(3..)) } else { stem } @@ -1183,9 +1183,9 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session, // against the archive. if sess.lto() { let name = cratepath.filename_str().unwrap(); - let name = name[3..name.len() - 5]; // chop off lib/.rlib + let name = name.index(&(3..(name.len() - 5))); // chop off lib/.rlib time(sess.time_passes(), - format!("altering {}.rlib", name)[], + format!("altering {}.rlib", name).index(&FullRange), (), |()| { let dst = tmpdir.join(cratepath.filename().unwrap()); match fs::copy(&cratepath, &dst) { @@ -1194,7 +1194,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session, sess.err(format!("failed to copy {} to {}: {}", cratepath.display(), dst.display(), - e)[]); + e).index(&FullRange)); sess.abort_if_errors(); } } @@ -1206,7 +1206,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session, Err(e) => { sess.err(format!("failed to chmod {} when preparing \ for LTO: {}", dst.display(), - e)[]); + e).index(&FullRange)); sess.abort_if_errors(); } } @@ -1220,9 +1220,9 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session, maybe_ar_prog: sess.opts.cg.ar.clone() }; let mut archive = Archive::open(config); - archive.remove_file(format!("{}.o", name)[]); + archive.remove_file(format!("{}.o", name).index(&FullRange)); let files = archive.files(); - if files.iter().any(|s| s[].ends_with(".o")) { + if files.iter().any(|s| s.index(&FullRange).ends_with(".o")) { cmd.arg(dst); } }); @@ -1244,7 +1244,7 @@ fn add_upstream_rust_crates(cmd: &mut Command, sess: &Session, let mut v = "-l".as_bytes().to_vec(); v.push_all(unlib(&sess.target, cratepath.filestem().unwrap())); - cmd.arg(v[]); + cmd.arg(v.index(&FullRange)); } } @@ -1286,7 +1286,7 @@ fn add_upstream_native_libraries(cmd: &mut Command, sess: &Session) { } cstore::NativeFramework => { cmd.arg("-framework"); - cmd.arg(lib[]); + cmd.arg(lib.index(&FullRange)); } cstore::NativeStatic => { sess.bug("statics shouldn't be propagated"); diff --git a/src/librustc_trans/back/lto.rs b/src/librustc_trans/back/lto.rs index f3e90c43a8414..ecf2e9ed72425 100644 --- a/src/librustc_trans/back/lto.rs +++ b/src/librustc_trans/back/lto.rs @@ -54,21 +54,21 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, Some(p) => p, None => { sess.fatal(format!("could not find rlib for: `{}`", - name)[]); + name).index(&FullRange)); } }; let archive = ArchiveRO::open(&path).expect("wanted an rlib"); let file = path.filename_str().unwrap(); - let file = file[3..file.len() - 5]; // chop off lib/.rlib + let file = file.index(&(3..(file.len() - 5))); // chop off lib/.rlib debug!("reading {}", file); for i in iter::count(0u, 1) { let bc_encoded = time(sess.time_passes(), - format!("check for {}.{}.bytecode.deflate", name, i)[], + format!("check for {}.{}.bytecode.deflate", name, i).as_slice(), (), |_| { archive.read(format!("{}.{}.bytecode.deflate", - file, i)[]) + file, i).index(&FullRange)) }); let bc_encoded = match bc_encoded { Some(data) => data, @@ -76,7 +76,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, if i == 0 { // No bitcode was found at all. sess.fatal(format!("missing compressed bytecode in {}", - path.display())[]); + path.display()).index(&FullRange)); } // No more bitcode files to read. break; @@ -91,20 +91,20 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, if version == 1 { // The only version existing so far let data_size = extract_compressed_bytecode_size_v1(bc_encoded); - let compressed_data = bc_encoded[ + let compressed_data = bc_encoded.index(&( link::RLIB_BYTECODE_OBJECT_V1_DATA_OFFSET.. - link::RLIB_BYTECODE_OBJECT_V1_DATA_OFFSET + data_size as uint]; + (link::RLIB_BYTECODE_OBJECT_V1_DATA_OFFSET + data_size as uint))); match flate::inflate_bytes(compressed_data) { Some(inflated) => inflated, None => { sess.fatal(format!("failed to decompress bc of `{}`", - name)[]) + name).index(&FullRange)) } } } else { sess.fatal(format!("Unsupported bytecode format version {}", - version)[]) + version).index(&FullRange)) } }) } else { @@ -115,7 +115,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, Some(bc) => bc, None => { sess.fatal(format!("failed to decompress bc of `{}`", - name)[]) + name).index(&FullRange)) } } }) @@ -124,7 +124,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, let ptr = bc_decoded.as_slice().as_ptr(); debug!("linking {}, part {}", name, i); time(sess.time_passes(), - format!("ll link {}.{}", name, i)[], + format!("ll link {}.{}", name, i).index(&FullRange), (), |()| unsafe { if !llvm::LLVMRustLinkInExternalBitcode(llmod, @@ -132,7 +132,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, bc_decoded.len() as libc::size_t) { write::llvm_err(sess.diagnostic().handler(), format!("failed to load bc of `{}`", - name[])); + name.index(&FullRange))); } }); } @@ -186,7 +186,7 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, fn is_versioned_bytecode_format(bc: &[u8]) -> bool { let magic_id_byte_count = link::RLIB_BYTECODE_OBJECT_MAGIC.len(); return bc.len() > magic_id_byte_count && - bc[..magic_id_byte_count] == link::RLIB_BYTECODE_OBJECT_MAGIC; + bc.index(&(0..magic_id_byte_count)) == link::RLIB_BYTECODE_OBJECT_MAGIC; } fn extract_bytecode_format_version(bc: &[u8]) -> u32 { @@ -198,8 +198,8 @@ fn extract_compressed_bytecode_size_v1(bc: &[u8]) -> u64 { } fn read_from_le_bytes(bytes: &[u8], position_in_bytes: uint) -> T { - let byte_data = bytes[position_in_bytes.. - position_in_bytes + mem::size_of::()]; + let byte_data = bytes.index(&(position_in_bytes.. + (position_in_bytes + mem::size_of::()))); let data = unsafe { *(byte_data.as_ptr() as *const T) }; diff --git a/src/librustc_trans/back/write.rs b/src/librustc_trans/back/write.rs index 98e2b4b9dddb5..56d7fb096627c 100644 --- a/src/librustc_trans/back/write.rs +++ b/src/librustc_trans/back/write.rs @@ -47,14 +47,14 @@ pub fn llvm_err(handler: &diagnostic::Handler, msg: String) -> ! { unsafe { let cstr = llvm::LLVMRustGetLastError(); if cstr == ptr::null() { - handler.fatal(msg[]); + handler.fatal(msg.index(&FullRange)); } else { let err = ffi::c_str_to_bytes(&cstr); let err = String::from_utf8_lossy(err.as_slice()).to_string(); libc::free(cstr as *mut _); handler.fatal(format!("{}: {}", - msg[], - err[])[]); + msg.index(&FullRange), + err.index(&FullRange)).index(&FullRange)); } } } @@ -104,13 +104,13 @@ impl SharedEmitter { match diag.code { Some(ref code) => { handler.emit_with_code(None, - diag.msg[], - code[], + diag.msg.index(&FullRange), + code.index(&FullRange), diag.lvl); }, None => { handler.emit(None, - diag.msg[], + diag.msg.index(&FullRange), diag.lvl); }, } @@ -165,8 +165,8 @@ fn get_llvm_opt_level(optimize: config::OptLevel) -> llvm::CodeGenOptLevel { fn create_target_machine(sess: &Session) -> TargetMachineRef { let reloc_model_arg = match sess.opts.cg.relocation_model { - Some(ref s) => s[], - None => sess.target.target.options.relocation_model[] + Some(ref s) => s.index(&FullRange), + None => sess.target.target.options.relocation_model.index(&FullRange) }; let reloc_model = match reloc_model_arg { "pic" => llvm::RelocPIC, @@ -177,7 +177,7 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef { sess.err(format!("{} is not a valid relocation mode", sess.opts .cg - .relocation_model)[]); + .relocation_model).index(&FullRange)); sess.abort_if_errors(); unreachable!(); } @@ -198,8 +198,8 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef { let fdata_sections = ffunction_sections; let code_model_arg = match sess.opts.cg.code_model { - Some(ref s) => s[], - None => sess.target.target.options.code_model[] + Some(ref s) => s.index(&FullRange), + None => sess.target.target.options.code_model.index(&FullRange) }; let code_model = match code_model_arg { @@ -212,13 +212,13 @@ fn create_target_machine(sess: &Session) -> TargetMachineRef { sess.err(format!("{} is not a valid code model", sess.opts .cg - .code_model)[]); + .code_model).index(&FullRange)); sess.abort_if_errors(); unreachable!(); } }; - let triple = sess.target.target.llvm_target[]; + let triple = sess.target.target.llvm_target.index(&FullRange); let tm = unsafe { let triple = CString::from_slice(triple.as_bytes()); @@ -350,13 +350,13 @@ unsafe extern "C" fn inline_asm_handler(diag: SMDiagnosticRef, match cgcx.lto_ctxt { Some((sess, _)) => { sess.codemap().with_expn_info(ExpnId::from_llvm_cookie(cookie), |info| match info { - Some(ei) => sess.span_err(ei.call_site, msg[]), - None => sess.err(msg[]), + Some(ei) => sess.span_err(ei.call_site, msg.index(&FullRange)), + None => sess.err(msg.index(&FullRange)), }); } None => { - cgcx.handler.err(msg[]); + cgcx.handler.err(msg.index(&FullRange)); cgcx.handler.note("build without -C codegen-units for more exact errors"); } } @@ -381,8 +381,8 @@ unsafe extern "C" fn diagnostic_handler(info: DiagnosticInfoRef, user: *mut c_vo cgcx.handler.note(format!("optimization {} for {} at {}: {}", opt.kind.describe(), pass_name, - if loc.is_empty() { "[unknown]" } else { loc[] }, - llvm::twine_to_string(opt.message))[]); + if loc.is_empty() { "[unknown]" } else { loc.as_slice() }, + llvm::twine_to_string(opt.message)).as_slice()); } } @@ -518,14 +518,14 @@ unsafe fn optimize_and_codegen(cgcx: &CodegenContext, } if config.emit_asm { - let path = output_names.with_extension(format!("{}.s", name_extra)[]); + let path = output_names.with_extension(format!("{}.s", name_extra).index(&FullRange)); with_codegen(tm, llmod, config.no_builtins, |cpm| { write_output_file(cgcx.handler, tm, cpm, llmod, &path, llvm::AssemblyFileType); }); } if config.emit_obj { - let path = output_names.with_extension(format!("{}.o", name_extra)[]); + let path = output_names.with_extension(format!("{}.o", name_extra).index(&FullRange)); with_codegen(tm, llmod, config.no_builtins, |cpm| { write_output_file(cgcx.handler, tm, cpm, llmod, &path, llvm::ObjectFileType); }); @@ -639,7 +639,7 @@ pub fn run_passes(sess: &Session, // Process the work items, optionally using worker threads. if sess.opts.cg.codegen_units == 1 { - run_work_singlethreaded(sess, trans.reachable[], work_items); + run_work_singlethreaded(sess, trans.reachable.index(&FullRange), work_items); } else { run_work_multithreaded(sess, work_items, sess.opts.cg.codegen_units); } @@ -667,7 +667,7 @@ pub fn run_passes(sess: &Session, // 2) Multiple codegen units, with `-o some_name`. We have // no good solution for this case, so warn the user. sess.warn(format!("ignoring -o because multiple .{} files were produced", - ext)[]); + ext).index(&FullRange)); } else { // 3) Multiple codegen units, but no `-o some_name`. We // just leave the `foo.0.x` files in place. @@ -700,20 +700,20 @@ pub fn run_passes(sess: &Session, }; let pname = get_cc_prog(sess); - let mut cmd = Command::new(pname[]); + let mut cmd = Command::new(pname.index(&FullRange)); - cmd.args(sess.target.target.options.pre_link_args[]); + cmd.args(sess.target.target.options.pre_link_args.index(&FullRange)); cmd.arg("-nostdlib"); for index in range(0, trans.modules.len()) { - cmd.arg(crate_output.with_extension(format!("{}.o", index)[])); + cmd.arg(crate_output.with_extension(format!("{}.o", index).index(&FullRange))); } cmd.arg("-r") .arg("-o") .arg(windows_output_path.as_ref().unwrap_or(output_path)); - cmd.args(sess.target.target.options.post_link_args[]); + cmd.args(sess.target.target.options.post_link_args.index(&FullRange)); if (sess.opts.debugging_opts & config::PRINT_LINK_ARGS) != 0 { println!("{}", &cmd); @@ -726,14 +726,14 @@ pub fn run_passes(sess: &Session, Ok(status) => { if !status.success() { sess.err(format!("linking of {} with `{}` failed", - output_path.display(), cmd)[]); + output_path.display(), cmd).index(&FullRange)); sess.abort_if_errors(); } }, Err(e) => { sess.err(format!("could not exec the linker `{}`: {}", pname, - e)[]); + e).index(&FullRange)); sess.abort_if_errors(); }, } @@ -818,12 +818,12 @@ pub fn run_passes(sess: &Session, for i in range(0, trans.modules.len()) { if modules_config.emit_obj { let ext = format!("{}.o", i); - remove(sess, &crate_output.with_extension(ext[])); + remove(sess, &crate_output.with_extension(ext.index(&FullRange))); } if modules_config.emit_bc && !keep_numbered_bitcode { let ext = format!("{}.bc", i); - remove(sess, &crate_output.with_extension(ext[])); + remove(sess, &crate_output.with_extension(ext.index(&FullRange))); } } @@ -949,7 +949,7 @@ fn run_work_multithreaded(sess: &Session, pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) { let pname = get_cc_prog(sess); - let mut cmd = Command::new(pname[]); + let mut cmd = Command::new(pname.index(&FullRange)); cmd.arg("-c").arg("-o").arg(outputs.path(config::OutputTypeObject)) .arg(outputs.temp_path(config::OutputTypeAssembly)); @@ -960,18 +960,18 @@ pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) { if !prog.status.success() { sess.err(format!("linking with `{}` failed: {}", pname, - prog.status)[]); - sess.note(format!("{}", &cmd)[]); + prog.status).index(&FullRange)); + sess.note(format!("{}", &cmd).index(&FullRange)); let mut note = prog.error.clone(); - note.push_all(prog.output[]); - sess.note(str::from_utf8(note[]).unwrap()); + note.push_all(prog.output.index(&FullRange)); + sess.note(str::from_utf8(note.index(&FullRange)).unwrap()); sess.abort_if_errors(); } }, Err(e) => { sess.err(format!("could not exec the linker `{}`: {}", pname, - e)[]); + e).index(&FullRange)); sess.abort_if_errors(); } } @@ -1004,7 +1004,7 @@ unsafe fn configure_llvm(sess: &Session) { if sess.print_llvm_passes() { add("-debug-pass=Structure"); } for arg in sess.opts.cg.llvm_args.iter() { - add((*arg)[]); + add((*arg).index(&FullRange)); } } diff --git a/src/librustc_trans/save/mod.rs b/src/librustc_trans/save/mod.rs index 8e6276b61f949..4ac9f1c7c930f 100644 --- a/src/librustc_trans/save/mod.rs +++ b/src/librustc_trans/save/mod.rs @@ -94,7 +94,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { // dump info about all the external crates referenced from this crate self.sess.cstore.iter_crate_data(|n, cmd| { - self.fmt.external_crate_str(krate.span, cmd.name[], n); + self.fmt.external_crate_str(krate.span, cmd.name.index(&FullRange), n); }); self.fmt.recorder.record("end_external_crates\n"); } @@ -143,7 +143,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { for &(ref span, ref qualname) in sub_paths.iter() { self.fmt.sub_mod_ref_str(path.span, *span, - qualname[], + qualname.index(&FullRange), self.cur_scope); } } @@ -161,7 +161,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { for &(ref span, ref qualname) in sub_paths.iter() { self.fmt.sub_mod_ref_str(path.span, *span, - qualname[], + qualname.index(&FullRange), self.cur_scope); } } @@ -180,17 +180,17 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { let (ref span, ref qualname) = sub_paths[len-2]; self.fmt.sub_type_ref_str(path.span, *span, - qualname[]); + qualname.index(&FullRange)); // write the other sub-paths if len <= 2 { return; } - let sub_paths = sub_paths[..len-2]; + let sub_paths = sub_paths.index(&(0..(len-2))); for &(ref span, ref qualname) in sub_paths.iter() { self.fmt.sub_mod_ref_str(path.span, *span, - qualname[], + qualname.index(&FullRange), self.cur_scope); } } @@ -199,7 +199,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { fn lookup_type_ref(&self, ref_id: NodeId) -> Option { if !self.analysis.ty_cx.def_map.borrow().contains_key(&ref_id) { self.sess.bug(format!("def_map has no key for {} in lookup_type_ref", - ref_id)[]); + ref_id).index(&FullRange)); } let def = (*self.analysis.ty_cx.def_map.borrow())[ref_id]; match def { @@ -212,7 +212,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { let def_map = self.analysis.ty_cx.def_map.borrow(); if !def_map.contains_key(&ref_id) { self.sess.span_bug(span, format!("def_map has no key for {} in lookup_def_kind", - ref_id)[]); + ref_id).index(&FullRange)); } let def = (*def_map)[ref_id]; match def { @@ -241,7 +241,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { def::DefMethod(..) | def::DefPrimTy(_) => { self.sess.span_bug(span, format!("lookup_def_kind for unexpected item: {}", - def)[]); + def).index(&FullRange)); }, } } @@ -262,8 +262,8 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { span_utils.span_for_last_ident(p.span), id, qualname, - path_to_string(p)[], - typ[]); + path_to_string(p).index(&FullRange), + typ.index(&FullRange)); } self.collected_paths.clear(); } @@ -285,14 +285,14 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { match item.node { ast::ItemImpl(_, _, _, _, ref ty, _) => { let mut result = String::from_str("<"); - result.push_str(ty_to_string(&**ty)[]); + result.push_str(ty_to_string(&**ty).index(&FullRange)); match ty::trait_of_item(&self.analysis.ty_cx, ast_util::local_def(method.id)) { Some(def_id) => { result.push_str(" as "); result.push_str( - ty::item_path_str(&self.analysis.ty_cx, def_id)[]); + ty::item_path_str(&self.analysis.ty_cx, def_id).as_slice()); }, None => {} } @@ -302,7 +302,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { _ => { self.sess.span_bug(method.span, format!("Container {} for method {} not an impl?", - impl_id.node, method.id)[]); + impl_id.node, method.id).index(&FullRange)); }, } }, @@ -312,7 +312,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { impl_id.node, method.id, self.analysis.ty_cx.map.get(impl_id.node) - )[]); + ).index(&FullRange)); }, }, None => match ty::trait_of_item(&self.analysis.ty_cx, @@ -328,20 +328,20 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { _ => { self.sess.span_bug(method.span, format!("Could not find container {} for method {}", - def_id.node, method.id)[]); + def_id.node, method.id).index(&FullRange)); } } }, None => { self.sess.span_bug(method.span, format!("Could not find container for method {}", - method.id)[]); + method.id).index(&FullRange)); }, }, }; qualname.push_str(get_ident(method.pe_ident()).get()); - let qualname = qualname[]; + let qualname = qualname.index(&FullRange); // record the decl for this def (if it has one) let decl_id = ty::trait_item_of_item(&self.analysis.ty_cx, @@ -430,13 +430,13 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { Some(sub_span) => self.fmt.field_str(field.span, Some(sub_span), field.node.id, - name.get()[], - qualname[], - typ[], + name.get().index(&FullRange), + qualname.index(&FullRange), + typ.index(&FullRange), scope_id), None => self.sess.span_bug(field.span, format!("Could not find sub-span for field {}", - qualname)[]), + qualname).index(&FullRange)), } }, _ => (), @@ -463,7 +463,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.fmt.typedef_str(full_span, Some(*param_ss), param.id, - name[], + name.index(&FullRange), ""); } self.visit_generics(generics); @@ -480,10 +480,10 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.fmt.fn_str(item.span, sub_span, item.id, - qualname[], + qualname.index(&FullRange), self.cur_scope); - self.process_formals(&decl.inputs, qualname[]); + self.process_formals(&decl.inputs, qualname.index(&FullRange)); // walk arg and return types for arg in decl.inputs.iter() { @@ -497,7 +497,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { // walk the body self.nest(item.id, |v| v.visit_block(&*body)); - self.process_generic_params(ty_params, item.span, qualname[], item.id); + self.process_generic_params(ty_params, item.span, qualname.index(&FullRange), item.id); } fn process_static(&mut self, @@ -519,9 +519,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { sub_span, item.id, get_ident(item.ident).get(), - qualname[], - value[], - ty_to_string(&*typ)[], + qualname.index(&FullRange), + value.index(&FullRange), + ty_to_string(&*typ).index(&FullRange), self.cur_scope); // walk type and init value @@ -542,9 +542,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { sub_span, item.id, get_ident(item.ident).get(), - qualname[], + qualname.index(&FullRange), "", - ty_to_string(&*typ)[], + ty_to_string(&*typ).index(&FullRange), self.cur_scope); // walk type and init value @@ -568,17 +568,17 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { sub_span, item.id, ctor_id, - qualname[], + qualname.index(&FullRange), self.cur_scope, - val[]); + val.index(&FullRange)); // fields for field in def.fields.iter() { - self.process_struct_field_def(field, qualname[], item.id); + self.process_struct_field_def(field, qualname.index(&FullRange), item.id); self.visit_ty(&*field.node.ty); } - self.process_generic_params(ty_params, item.span, qualname[], item.id); + self.process_generic_params(ty_params, item.span, qualname.index(&FullRange), item.id); } fn process_enum(&mut self, @@ -591,12 +591,12 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { Some(sub_span) => self.fmt.enum_str(item.span, Some(sub_span), item.id, - enum_name[], + enum_name.index(&FullRange), self.cur_scope, - val[]), + val.index(&FullRange)), None => self.sess.span_bug(item.span, format!("Could not find subspan for enum {}", - enum_name)[]), + enum_name).index(&FullRange)), } for variant in enum_definition.variants.iter() { let name = get_ident(variant.node.name); @@ -612,9 +612,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.span.span_for_first_ident(variant.span), variant.node.id, name, - qualname[], - enum_name[], - val[], + qualname.index(&FullRange), + enum_name.index(&FullRange), + val.index(&FullRange), item.id); for arg in args.iter() { self.visit_ty(&*arg.ty); @@ -630,20 +630,20 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.span.span_for_first_ident(variant.span), variant.node.id, ctor_id, - qualname[], - enum_name[], - val[], + qualname.index(&FullRange), + enum_name.index(&FullRange), + val.index(&FullRange), item.id); for field in struct_def.fields.iter() { - self.process_struct_field_def(field, qualname[], variant.node.id); + self.process_struct_field_def(field, qualname.as_slice(), variant.node.id); self.visit_ty(&*field.node.ty); } } } } - self.process_generic_params(ty_params, item.span, enum_name[], item.id); + self.process_generic_params(ty_params, item.span, enum_name.index(&FullRange), item.id); } fn process_impl(&mut self, @@ -703,9 +703,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.fmt.trait_str(item.span, sub_span, item.id, - qualname[], + qualname.index(&FullRange), self.cur_scope, - val[]); + val.index(&FullRange)); // super-traits for super_bound in trait_refs.iter() { @@ -737,7 +737,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { } // walk generics and methods - self.process_generic_params(generics, item.span, qualname[], item.id); + self.process_generic_params(generics, item.span, qualname.index(&FullRange), item.id); for method in methods.iter() { self.visit_trait_item(method) } @@ -755,9 +755,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.fmt.mod_str(item.span, sub_span, item.id, - qualname[], + qualname.index(&FullRange), self.cur_scope, - filename[]); + filename.index(&FullRange)); self.nest(item.id, |v| visit::walk_mod(v, m)); } @@ -774,7 +774,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { let def_map = self.analysis.ty_cx.def_map.borrow(); if !def_map.contains_key(&id) { self.sess.span_bug(span, - format!("def_map has no key for {} in visit_expr", id)[]); + format!("def_map has no key for {} in visit_expr", id).as_slice()); } let def = &(*def_map)[id]; let sub_span = self.span.span_for_last_ident(span); @@ -841,7 +841,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.cur_scope), _ => self.sess.span_bug(span, format!("Unexpected def kind while looking up path in '{}'", - self.span.snippet(span))[]), + self.span.snippet(span)).index(&FullRange)), } // modules or types in the path prefix match *def { @@ -959,7 +959,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.cur_scope); // walk receiver and args - visit::walk_exprs(self, args[]); + visit::walk_exprs(self, args.index(&FullRange)); } fn process_pat(&mut self, p:&ast::Pat) { @@ -976,7 +976,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { None => { self.sess.span_bug(p.span, format!("Could not find struct_def for `{}`", - self.span.snippet(p.span))[]); + self.span.snippet(p.span)).index(&FullRange)); } }; for &Spanned { node: ref field, span } in fields.iter() { @@ -1061,11 +1061,11 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { self.fmt.typedef_str(item.span, sub_span, item.id, - qualname[], - value[]); + qualname.index(&FullRange), + value.index(&FullRange)); self.visit_ty(&**ty); - self.process_generic_params(ty_params, item.span, qualname[], item.id); + self.process_generic_params(ty_params, item.span, qualname.as_slice(), item.id); }, ast::ItemMac(_) => (), _ => visit::walk_item(self, item), @@ -1122,12 +1122,12 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { None => { self.sess.span_bug(method_type.span, format!("Could not find trait for method {}", - method_type.id)[]); + method_type.id).index(&FullRange)); }, }; qualname.push_str(get_ident(method_type.ident).get()); - let qualname = qualname[]; + let qualname = qualname.index(&FullRange); let sub_span = self.span.sub_span_after_keyword(method_type.span, keywords::Fn); self.fmt.method_decl_str(method_type.span, @@ -1262,7 +1262,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { id, cnum, name, - s[], + s.index(&FullRange), self.cur_scope); }, } @@ -1371,8 +1371,8 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { } let mut id = String::from_str("$"); - id.push_str(ex.id.to_string()[]); - self.process_formals(&decl.inputs, id[]); + id.push_str(ex.id.to_string().index(&FullRange)); + self.process_formals(&decl.inputs, id.index(&FullRange)); // walk arg and return types for arg in decl.inputs.iter() { @@ -1418,7 +1418,8 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { let def_map = self.analysis.ty_cx.def_map.borrow(); if !def_map.contains_key(&id) { self.sess.span_bug(p.span, - format!("def_map has no key for {} in visit_arm", id)[]); + format!("def_map has no key for {} in visit_arm", + id).index(&FullRange)); } let def = &(*def_map)[id]; match *def { @@ -1433,8 +1434,8 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { self.fmt.variable_str(p.span, Some(p.span), id, - path_to_string(p)[], - value[], + path_to_string(p).index(&FullRange), + value.index(&FullRange), "") } def::DefVariant(..) => { @@ -1488,9 +1489,9 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { self.fmt.variable_str(p.span, sub_span, id, - path_to_string(p)[], - value[], - typ[]); + path_to_string(p).index(&FullRange), + value.index(&FullRange), + typ.index(&FullRange)); } self.collected_paths.clear(); @@ -1509,7 +1510,7 @@ pub fn process_crate(sess: &Session, } assert!(analysis.glob_map.is_some()); - let cratename = match attr::find_crate_name(krate.attrs[]) { + let cratename = match attr::find_crate_name(krate.attrs.index(&FullRange)) { Some(name) => name.get().to_string(), None => { info!("Could not find crate name, using 'unknown_crate'"); @@ -1530,7 +1531,7 @@ pub fn process_crate(sess: &Session, match fs::mkdir_recursive(&root_path, io::USER_RWX) { Err(e) => sess.err(format!("Could not create directory {}: {}", - root_path.display(), e)[]), + root_path.display(), e).index(&FullRange)), _ => (), } @@ -1547,7 +1548,7 @@ pub fn process_crate(sess: &Session, Ok(f) => box f, Err(e) => { let disp = root_path.display(); - sess.fatal(format!("Could not open {}: {}", disp, e)[]); + sess.fatal(format!("Could not open {}: {}", disp, e).index(&FullRange)); } }; root_path.pop(); @@ -1573,7 +1574,7 @@ pub fn process_crate(sess: &Session, cur_scope: 0 }; - visitor.dump_crate_info(cratename[], krate); + visitor.dump_crate_info(cratename.index(&FullRange), krate); visit::walk_crate(&mut visitor, krate); } diff --git a/src/librustc_trans/save/recorder.rs b/src/librustc_trans/save/recorder.rs index 679a8d2d07bc8..bb0fb38700208 100644 --- a/src/librustc_trans/save/recorder.rs +++ b/src/librustc_trans/save/recorder.rs @@ -41,7 +41,7 @@ impl Recorder { assert!(self.dump_spans); let result = format!("span,kind,{},{},text,\"{}\"\n", kind, su.extent_str(span), escape(su.snippet(span))); - self.record(result[]); + self.record(result.index(&FullRange)); } } @@ -160,15 +160,15 @@ impl<'a> FmtStrs<'a> { if values.len() != fields.len() { self.span.sess.span_bug(span, format!( "Mismatch between length of fields for '{}', expected '{}', found '{}'", - kind, fields.len(), values.len())[]); + kind, fields.len(), values.len()).index(&FullRange)); } let values = values.iter().map(|s| { // Never take more than 1020 chars if s.len() > 1020 { - s[..1020] + s.index(&(0..1020)) } else { - s[] + s.index(&FullRange) } }); @@ -184,7 +184,7 @@ impl<'a> FmtStrs<'a> { } ))); Some(strs.fold(String::new(), |mut s, ss| { - s.push_str(ss[]); + s.push_str(ss.index(&FullRange)); s })) } @@ -198,7 +198,7 @@ impl<'a> FmtStrs<'a> { if needs_span { self.span.sess.span_bug(span, format!( "Called record_without_span for '{}' which does requires a span", - label)[]); + label).index(&FullRange)); } assert!(!dump_spans); @@ -212,9 +212,9 @@ impl<'a> FmtStrs<'a> { }; let mut result = String::from_str(label); - result.push_str(values_str[]); + result.push_str(values_str.index(&FullRange)); result.push_str("\n"); - self.recorder.record(result[]); + self.recorder.record(result.index(&FullRange)); } pub fn record_with_span(&mut self, @@ -237,7 +237,7 @@ impl<'a> FmtStrs<'a> { if !needs_span { self.span.sess.span_bug(span, format!("Called record_with_span for '{}' \ - which does not require a span", label)[]); + which does not require a span", label).as_slice()); } let values_str = match self.make_values_str(label, fields, values, span) { @@ -245,7 +245,7 @@ impl<'a> FmtStrs<'a> { None => return, }; let result = format!("{},{}{}\n", label, self.span.extent_str(sub_span), values_str); - self.recorder.record(result[]); + self.recorder.record(result.index(&FullRange)); } pub fn check_and_record(&mut self, @@ -275,7 +275,7 @@ impl<'a> FmtStrs<'a> { // variable def's node id let mut qualname = String::from_str(name); qualname.push_str("$"); - qualname.push_str(id.to_string()[]); + qualname.push_str(id.to_string().index(&FullRange)); self.check_and_record(Variable, span, sub_span, diff --git a/src/librustc_trans/save/span_utils.rs b/src/librustc_trans/save/span_utils.rs index 14c6475c87df9..8d249b8bfe903 100644 --- a/src/librustc_trans/save/span_utils.rs +++ b/src/librustc_trans/save/span_utils.rs @@ -218,7 +218,7 @@ impl<'a> SpanUtils<'a> { let loc = self.sess.codemap().lookup_char_pos(span.lo); self.sess.span_bug(span, format!("Mis-counted brackets when breaking path? Parsing '{}' in {}, line {}", - self.snippet(span), loc.file.name, loc.line)[]); + self.snippet(span), loc.file.name, loc.line).index(&FullRange)); } if result.is_none() && prev.tok.is_ident() && bracket_count == 0 { return self.make_sub_span(span, Some(prev.sp)); @@ -244,7 +244,7 @@ impl<'a> SpanUtils<'a> { let loc = self.sess.codemap().lookup_char_pos(span.lo); self.sess.span_bug(span, format!( "Mis-counted brackets when breaking path? Parsing '{}' in {}, line {}", - self.snippet(span), loc.file.name, loc.line)[]); + self.snippet(span), loc.file.name, loc.line).index(&FullRange)); } return result } diff --git a/src/librustc_trans/trans/_match.rs b/src/librustc_trans/trans/_match.rs index fed0931cab71d..438cfe0b6c710 100644 --- a/src/librustc_trans/trans/_match.rs +++ b/src/librustc_trans/trans/_match.rs @@ -427,7 +427,7 @@ fn enter_match<'a, 'b, 'p, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, let _indenter = indenter(); m.iter().filter_map(|br| { - e(br.pats[]).map(|pats| { + e(br.pats.index(&FullRange)).map(|pats| { let this = br.pats[col]; let mut bound_ptrs = br.bound_ptrs.clone(); match this.node { @@ -471,8 +471,8 @@ fn enter_default<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // Collect all of the matches that can match against anything. enter_match(bcx, dm, m, col, val, |pats| { if pat_is_binding_or_wild(dm, &*pats[col]) { - let mut r = pats[..col].to_vec(); - r.push_all(pats[col + 1..]); + let mut r = pats.index(&(0..col)).to_vec(); + r.push_all(pats.index(&((col + 1)..))); Some(r) } else { None @@ -548,7 +548,7 @@ fn enter_opt<'a, 'p, 'blk, 'tcx>( param_env: param_env, }; enter_match(bcx, dm, m, col, val, |pats| - check_match::specialize(&mcx, pats[], &ctor, col, variant_size) + check_match::specialize(&mcx, pats.index(&FullRange), &ctor, col, variant_size) ) } @@ -790,7 +790,7 @@ fn compare_values<'blk, 'tcx>(cx: Block<'blk, 'tcx>, let did = langcall(cx, None, format!("comparison of `{}`", - cx.ty_to_string(rhs_t))[], + cx.ty_to_string(rhs_t)).index(&FullRange), StrEqFnLangItem); callee::trans_lang_call(cx, did, &[lhs, rhs], None) } @@ -945,7 +945,7 @@ fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, if has_nested_bindings(m, col) { let expanded = expand_nested_bindings(bcx, m, col, val); compile_submatch_continue(bcx, - expanded[], + expanded.index(&FullRange), vals, chk, col, @@ -967,7 +967,7 @@ fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, bcx = compile_guard(bcx, &**guard_expr, m[0].data, - m[1..m.len()], + m.index(&(1..m.len())), vals, chk, has_genuine_default); @@ -990,8 +990,8 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, let tcx = bcx.tcx(); let dm = &tcx.def_map; - let mut vals_left = vals[0u..col].to_vec(); - vals_left.push_all(vals[col + 1u..]); + let mut vals_left = vals.index(&(0u..col)).to_vec(); + vals_left.push_all(vals.index(&((col + 1u)..))); let ccx = bcx.fcx.ccx; // Find a real id (we're adding placeholder wildcard patterns, but @@ -1037,8 +1037,8 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, field_vals.len()) ); let mut vals = field_vals; - vals.push_all(vals_left[]); - compile_submatch(bcx, pats[], vals[], chk, has_genuine_default); + vals.push_all(vals_left.as_slice()); + compile_submatch(bcx, pats.as_slice(), vals.as_slice(), chk, has_genuine_default); return; } _ => () @@ -1191,10 +1191,10 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, } let opt_ms = enter_opt(opt_cx, pat_id, dm, m, opt, col, size, val); let mut opt_vals = unpacked; - opt_vals.push_all(vals_left[]); + opt_vals.push_all(vals_left.index(&FullRange)); compile_submatch(opt_cx, - opt_ms[], - opt_vals[], + opt_ms.index(&FullRange), + opt_vals.index(&FullRange), branch_chk.as_ref().unwrap_or(chk), has_genuine_default); } @@ -1213,8 +1213,8 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, } _ => { compile_submatch(else_cx, - defaults[], - vals_left[], + defaults.index(&FullRange), + vals_left.index(&FullRange), chk, has_genuine_default); } @@ -1333,7 +1333,7 @@ fn create_bindings_map<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat: &ast::Pat, "__llmatch"); trmode = TrByCopy(alloca_no_lifetime(bcx, llvariable_ty, - bcx.ident(ident)[])); + bcx.ident(ident).index(&FullRange))); } ast::BindByValue(_) => { // in this case, the final type of the variable will be T, @@ -1341,13 +1341,13 @@ fn create_bindings_map<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat: &ast::Pat, // above llmatch = alloca_no_lifetime(bcx, llvariable_ty.ptr_to(), - bcx.ident(ident)[]); + bcx.ident(ident).index(&FullRange)); trmode = TrByMove; } ast::BindByRef(_) => { llmatch = alloca_no_lifetime(bcx, llvariable_ty, - bcx.ident(ident)[]); + bcx.ident(ident).index(&FullRange)); trmode = TrByRef; } }; @@ -1415,7 +1415,7 @@ fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>, && arm.pats.last().unwrap().node == ast::PatWild(ast::PatWildSingle) }); - compile_submatch(bcx, matches[], &[discr_datum.val], &chk, has_default); + compile_submatch(bcx, matches.index(&FullRange), &[discr_datum.val], &chk, has_default); let mut arm_cxs = Vec::new(); for arm_data in arm_datas.iter() { @@ -1429,7 +1429,7 @@ fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>, arm_cxs.push(bcx); } - bcx = scope_cx.fcx.join_blocks(match_id, arm_cxs[]); + bcx = scope_cx.fcx.join_blocks(match_id, arm_cxs.index(&FullRange)); return bcx; } @@ -1582,7 +1582,7 @@ fn mk_binding_alloca<'blk, 'tcx, A, F>(bcx: Block<'blk, 'tcx>, let var_ty = node_id_type(bcx, p_id); // Allocate memory on stack for the binding. - let llval = alloc_ty(bcx, var_ty, bcx.ident(*ident)[]); + let llval = alloc_ty(bcx, var_ty, bcx.ident(*ident).index(&FullRange)); // Subtle: be sure that we *populate* the memory *before* // we schedule the cleanup. @@ -1620,7 +1620,7 @@ fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, if bcx.sess().asm_comments() { add_comment(bcx, format!("bind_irrefutable_pat(pat={})", - pat.repr(bcx.tcx()))[]); + pat.repr(bcx.tcx())).index(&FullRange)); } let _indenter = indenter(); diff --git a/src/librustc_trans/trans/adt.rs b/src/librustc_trans/trans/adt.rs index 01b47b728b6ba..60fc29c7c831d 100644 --- a/src/librustc_trans/trans/adt.rs +++ b/src/librustc_trans/trans/adt.rs @@ -154,7 +154,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Repr<'tcx> { match t.sty { ty::ty_tup(ref elems) => { - Univariant(mk_struct(cx, elems[], false, t), false) + Univariant(mk_struct(cx, elems.index(&FullRange), false, t), false) } ty::ty_struct(def_id, substs) => { let fields = ty::lookup_struct_fields(cx.tcx(), def_id); @@ -165,17 +165,17 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let dtor = ty::ty_dtor(cx.tcx(), def_id).has_drop_flag(); if dtor { ftys.push(cx.tcx().types.bool); } - Univariant(mk_struct(cx, ftys[], packed, t), dtor) + Univariant(mk_struct(cx, ftys.index(&FullRange), packed, t), dtor) } ty::ty_unboxed_closure(def_id, _, substs) => { let typer = NormalizingUnboxedClosureTyper::new(cx.tcx()); let upvars = typer.unboxed_closure_upvars(def_id, substs).unwrap(); let upvar_types = upvars.iter().map(|u| u.ty).collect::>(); - Univariant(mk_struct(cx, upvar_types[], false, t), false) + Univariant(mk_struct(cx, upvar_types.index(&FullRange), false, t), false) } ty::ty_enum(def_id, substs) => { let cases = get_cases(cx.tcx(), def_id, substs); - let hint = *ty::lookup_repr_hints(cx.tcx(), def_id)[].get(0) + let hint = *ty::lookup_repr_hints(cx.tcx(), def_id).index(&FullRange).get(0) .unwrap_or(&attr::ReprAny); let dtor = ty::ty_dtor(cx.tcx(), def_id).has_drop_flag(); @@ -185,7 +185,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, // (Typechecking will reject discriminant-sizing attrs.) assert_eq!(hint, attr::ReprAny); let ftys = if dtor { vec!(cx.tcx().types.bool) } else { vec!() }; - return Univariant(mk_struct(cx, ftys[], false, t), + return Univariant(mk_struct(cx, ftys.index(&FullRange), false, t), dtor); } @@ -208,7 +208,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, cx.sess().bug(format!("non-C-like enum {} with specified \ discriminants", ty::item_path_str(cx.tcx(), - def_id))[]); + def_id)).index(&FullRange)); } if cases.len() == 1 { @@ -217,7 +217,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, assert_eq!(hint, attr::ReprAny); let mut ftys = cases[0].tys.clone(); if dtor { ftys.push(cx.tcx().types.bool); } - return Univariant(mk_struct(cx, ftys[], false, t), + return Univariant(mk_struct(cx, ftys.index(&FullRange), false, t), dtor); } @@ -226,7 +226,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let mut discr = 0; while discr < 2 { if cases[1 - discr].is_zerolen(cx, t) { - let st = mk_struct(cx, cases[discr].tys[], + let st = mk_struct(cx, cases[discr].tys.index(&FullRange), false, t); match cases[discr].find_ptr(cx) { Some(ref df) if df.len() == 1 && st.fields.len() == 1 => { @@ -316,17 +316,17 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let fields : Vec<_> = cases.iter().map(|c| { let mut ftys = vec!(ty_of_inttype(cx.tcx(), ity)); - ftys.push_all(c.tys[]); + ftys.push_all(c.tys.index(&FullRange)); if dtor { ftys.push(cx.tcx().types.bool); } - mk_struct(cx, ftys[], false, t) + mk_struct(cx, ftys.index(&FullRange), false, t) }).collect(); - ensure_enum_fits_in_address_space(cx, ity, fields[], t); + ensure_enum_fits_in_address_space(cx, ity, fields.index(&FullRange), t); General(ity, fields, dtor) } _ => cx.sess().bug(format!("adt::represent_type called on non-ADT type: {}", - ty_to_string(cx.tcx(), t))[]) + ty_to_string(cx.tcx(), t)).index(&FullRange)) } } @@ -412,7 +412,7 @@ fn find_discr_field_candidate<'tcx>(tcx: &ty::ctxt<'tcx>, impl<'tcx> Case<'tcx> { fn is_zerolen<'a>(&self, cx: &CrateContext<'a, 'tcx>, scapegoat: Ty<'tcx>) -> bool { - mk_struct(cx, self.tys[], false, scapegoat).size == 0 + mk_struct(cx, self.tys.index(&FullRange), false, scapegoat).size == 0 } fn find_ptr<'a>(&self, cx: &CrateContext<'a, 'tcx>) -> Option { @@ -451,9 +451,9 @@ fn mk_struct<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, .map(|&ty| type_of::sizing_type_of(cx, ty)).collect() }; - ensure_struct_fits_in_address_space(cx, lltys[], packed, scapegoat); + ensure_struct_fits_in_address_space(cx, lltys.index(&FullRange), packed, scapegoat); - let llty_rec = Type::struct_(cx, lltys[], packed); + let llty_rec = Type::struct_(cx, lltys.index(&FullRange), packed); Struct { size: machine::llsize_of_alloc(cx, llty_rec), align: machine::llalign_of_min(cx, llty_rec), @@ -502,7 +502,7 @@ fn range_to_inttype(cx: &CrateContext, hint: Hint, bounds: &IntBounds) -> IntTyp return ity; } attr::ReprExtern => { - attempts = match cx.sess().target.target.arch[] { + attempts = match cx.sess().target.target.arch.index(&FullRange) { // WARNING: the ARM EABI has two variants; the one corresponding to `at_least_32` // appears to be used on Linux and NetBSD, but some systems may use the variant // corresponding to `choose_shortest`. However, we don't run on those yet...? @@ -628,7 +628,7 @@ pub fn finish_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, match *r { CEnum(..) | General(..) | RawNullablePointer { .. } => { } Univariant(ref st, _) | StructWrappedNullablePointer { nonnull: ref st, .. } => - llty.set_struct_body(struct_llfields(cx, st, false, false)[], + llty.set_struct_body(struct_llfields(cx, st, false, false).index(&FullRange), st.packed) } } @@ -644,7 +644,7 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, Univariant(ref st, _) | StructWrappedNullablePointer { nonnull: ref st, .. } => { match name { None => { - Type::struct_(cx, struct_llfields(cx, st, sizing, dst)[], + Type::struct_(cx, struct_llfields(cx, st, sizing, dst).index(&FullRange), st.packed) } Some(name) => { assert_eq!(sizing, false); Type::named_struct(cx, name) } @@ -663,7 +663,7 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, // of the size. // // FIXME #10604: this breaks when vector types are present. - let (size, align) = union_size_and_align(sts[]); + let (size, align) = union_size_and_align(sts.index(&FullRange)); let align_s = align as u64; let discr_ty = ll_inttype(cx, ity); let discr_size = machine::llsize_of_alloc(cx, discr_ty); @@ -684,10 +684,10 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, Type::array(&discr_ty, align_s / discr_size - 1), fill_ty]; match name { - None => Type::struct_(cx, fields[], false), + None => Type::struct_(cx, fields.index(&FullRange), false), Some(name) => { let mut llty = Type::named_struct(cx, name); - llty.set_struct_body(fields[], false); + llty.set_struct_body(fields.index(&FullRange), false); llty } } @@ -765,7 +765,7 @@ pub fn trans_get_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>, fn struct_wrapped_nullable_bitdiscr(bcx: Block, nndiscr: Disr, discrfield: &DiscrField, scrutinee: ValueRef) -> ValueRef { - let llptrptr = GEPi(bcx, scrutinee, discrfield[]); + let llptrptr = GEPi(bcx, scrutinee, discrfield.index(&FullRange)); let llptr = Load(bcx, llptrptr); let cmp = if nndiscr == 0 { IntEQ } else { IntNE }; ICmp(bcx, cmp, llptr, C_null(val_ty(llptr))) @@ -853,7 +853,7 @@ pub fn trans_set_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>, } StructWrappedNullablePointer { nndiscr, ref discrfield, .. } => { if discr != nndiscr { - let llptrptr = GEPi(bcx, val, discrfield[]); + let llptrptr = GEPi(bcx, val, discrfield.index(&FullRange)); let llptrty = val_ty(llptrptr).element_type(); Store(bcx, C_null(llptrty), llptrptr) } @@ -935,7 +935,7 @@ pub fn struct_field_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, st: &Struct<'tcx>, v let val = if needs_cast { let ccx = bcx.ccx(); let fields = st.fields.iter().map(|&ty| type_of::type_of(ccx, ty)).collect::>(); - let real_ty = Type::struct_(ccx, fields[], st.packed); + let real_ty = Type::struct_(ccx, fields.index(&FullRange), st.packed); PointerCast(bcx, val, real_ty.ptr_to()) } else { val @@ -967,14 +967,14 @@ pub fn fold_variants<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, for (discr, case) in cases.iter().enumerate() { let mut variant_cx = fcx.new_temp_block( - format!("enum-variant-iter-{}", discr.to_string())[] + format!("enum-variant-iter-{}", discr.to_string()).index(&FullRange) ); let rhs_val = C_integral(ll_inttype(ccx, ity), discr as u64, true); AddCase(llswitch, rhs_val, variant_cx.llbb); let fields = case.fields.iter().map(|&ty| type_of::type_of(bcx.ccx(), ty)).collect::>(); - let real_ty = Type::struct_(ccx, fields[], case.packed); + let real_ty = Type::struct_(ccx, fields.index(&FullRange), case.packed); let variant_value = PointerCast(variant_cx, value, real_ty.ptr_to()); variant_cx = f(variant_cx, case, variant_value); @@ -1051,14 +1051,14 @@ pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr let lldiscr = C_integral(ll_inttype(ccx, ity), discr as u64, true); let mut f = vec![lldiscr]; f.push_all(vals); - let mut contents = build_const_struct(ccx, case, f[]); + let mut contents = build_const_struct(ccx, case, f.index(&FullRange)); contents.push_all(&[padding(ccx, max_sz - case.size)]); - C_struct(ccx, contents[], false) + C_struct(ccx, contents.index(&FullRange), false) } Univariant(ref st, _dro) => { assert!(discr == 0); let contents = build_const_struct(ccx, st, vals); - C_struct(ccx, contents[], st.packed) + C_struct(ccx, contents.index(&FullRange), st.packed) } RawNullablePointer { nndiscr, nnty, .. } => { if discr == nndiscr { @@ -1072,7 +1072,7 @@ pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr if discr == nndiscr { C_struct(ccx, build_const_struct(ccx, nonnull, - vals)[], + vals).index(&FullRange), false) } else { let vals = nonnull.fields.iter().map(|&ty| { @@ -1082,7 +1082,7 @@ pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr }).collect::>(); C_struct(ccx, build_const_struct(ccx, nonnull, - vals[])[], + vals.index(&FullRange)).index(&FullRange), false) } } diff --git a/src/librustc_trans/trans/asm.rs b/src/librustc_trans/trans/asm.rs index f18d483f70328..890f046be1b2e 100644 --- a/src/librustc_trans/trans/asm.rs +++ b/src/librustc_trans/trans/asm.rs @@ -71,7 +71,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm) callee::DontAutorefArg) }) }).collect::>(); - inputs.push_all(ext_inputs[]); + inputs.push_all(ext_inputs.index(&FullRange)); // no failure occurred preparing operands, no need to cleanup fcx.pop_custom_cleanup_scope(temp_scope); @@ -91,18 +91,18 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm) if !clobbers.is_empty() { clobbers.push(','); } - clobbers.push_str(more_clobbers[]); + clobbers.push_str(more_clobbers.index(&FullRange)); } // Add the clobbers to our constraints list if clobbers.len() != 0 && constraints.len() != 0 { constraints.push(','); - constraints.push_str(clobbers[]); + constraints.push_str(clobbers.index(&FullRange)); } else { - constraints.push_str(clobbers[]); + constraints.push_str(clobbers.index(&FullRange)); } - debug!("Asm Constraints: {}", constraints[]); + debug!("Asm Constraints: {}", constraints.index(&FullRange)); let num_outputs = outputs.len(); @@ -112,7 +112,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm) } else if num_outputs == 1 { output_types[0] } else { - Type::struct_(bcx.ccx(), output_types[], false) + Type::struct_(bcx.ccx(), output_types.index(&FullRange), false) }; let dialect = match ia.dialect { diff --git a/src/librustc_trans/trans/base.rs b/src/librustc_trans/trans/base.rs index edcfaae0f802d..458c1fa923ecb 100644 --- a/src/librustc_trans/trans/base.rs +++ b/src/librustc_trans/trans/base.rs @@ -249,7 +249,7 @@ fn get_extern_rust_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_ty: Ty<'tcx>, let f = decl_rust_fn(ccx, fn_ty, name); csearch::get_item_attrs(&ccx.sess().cstore, did, |attrs| { - set_llvm_fn_attrs(ccx, attrs[], f) + set_llvm_fn_attrs(ccx, attrs.index(&FullRange), f) }); ccx.externs().borrow_mut().insert(name.to_string(), f); @@ -303,7 +303,7 @@ pub fn decl_rust_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, _ => panic!("expected closure or fn") }; - let llfty = type_of_rust_fn(ccx, env, inputs[], output, abi); + let llfty = type_of_rust_fn(ccx, env, inputs.index(&FullRange), output, abi); debug!("decl_rust_fn(input count={},type={})", inputs.len(), ccx.tn().type_to_string(llfty)); @@ -369,7 +369,7 @@ fn require_alloc_fn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, Err(s) => { bcx.sess().fatal(format!("allocation of `{}` {}", bcx.ty_to_string(info_ty), - s)[]); + s).index(&FullRange)); } } } @@ -488,7 +488,7 @@ pub fn unset_split_stack(f: ValueRef) { // silently mangles such symbols, breaking our linkage model. pub fn note_unique_llvm_symbol(ccx: &CrateContext, sym: String) { if ccx.all_llvm_symbols().borrow().contains(&sym) { - ccx.sess().bug(format!("duplicate LLVM symbol: {}", sym)[]); + ccx.sess().bug(format!("duplicate LLVM symbol: {}", sym).index(&FullRange)); } ccx.all_llvm_symbols().borrow_mut().insert(sym); } @@ -525,7 +525,7 @@ pub fn get_res_dtor<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty::mk_nil(ccx.tcx())); get_extern_fn(ccx, &mut *ccx.externs().borrow_mut(), - name[], + name.index(&FullRange), llvm::CCallConv, llty, dtor_ty) @@ -774,8 +774,8 @@ pub fn iter_structural_ty<'blk, 'tcx, F>(cx: Block<'blk, 'tcx>, let variant_cx = fcx.new_temp_block( format!("enum-iter-variant-{}", - variant.disr_val.to_string()[]) - []); + variant.disr_val.to_string().index(&FullRange)) + .index(&FullRange)); match adt::trans_case(cx, &*repr, variant.disr_val) { _match::SingleResult(r) => { AddCase(llswitch, r.val, variant_cx.llbb) @@ -800,7 +800,7 @@ pub fn iter_structural_ty<'blk, 'tcx, F>(cx: Block<'blk, 'tcx>, } _ => { cx.sess().unimpl(format!("type in iter_structural_ty: {}", - ty_to_string(cx.tcx(), t))[]) + ty_to_string(cx.tcx(), t)).index(&FullRange)) } } return cx; @@ -882,7 +882,7 @@ pub fn fail_if_zero_or_overflows<'blk, 'tcx>( } _ => { cx.sess().bug(format!("fail-if-zero on unexpected type: {}", - ty_to_string(cx.tcx(), rhs_t))[]); + ty_to_string(cx.tcx(), rhs_t)).index(&FullRange)); } }; let bcx = with_cond(cx, is_zero, |bcx| { @@ -936,14 +936,14 @@ pub fn trans_external_path<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty::ty_bare_fn(_, ref fn_ty) => { match ccx.sess().target.target.adjust_abi(fn_ty.abi) { Rust | RustCall => { - get_extern_rust_fn(ccx, t, name[], did) + get_extern_rust_fn(ccx, t, name.index(&FullRange), did) } RustIntrinsic => { ccx.sess().bug("unexpected intrinsic in trans_external_path") } _ => { foreign::register_foreign_item_fn(ccx, fn_ty.abi, t, - name[]) + name.index(&FullRange)) } } } @@ -990,7 +990,7 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let llresult = Invoke(bcx, llfn, - llargs[], + llargs.index(&FullRange), normal_bcx.llbb, landing_pad, Some(attributes)); @@ -1006,7 +1006,7 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, None => debuginfo::clear_source_location(bcx.fcx) }; - let llresult = Call(bcx, llfn, llargs[], Some(attributes)); + let llresult = Call(bcx, llfn, llargs.index(&FullRange), Some(attributes)); return (llresult, bcx); } } @@ -1123,7 +1123,7 @@ pub fn call_lifetime_end(cx: Block, ptr: ValueRef) { pub fn call_memcpy(cx: Block, dst: ValueRef, src: ValueRef, n_bytes: ValueRef, align: u32) { let _icx = push_ctxt("call_memcpy"); let ccx = cx.ccx(); - let key = match ccx.sess().target.target.target_word_size[] { + let key = match ccx.sess().target.target.target_word_size.index(&FullRange) { "32" => "llvm.memcpy.p0i8.p0i8.i32", "64" => "llvm.memcpy.p0i8.p0i8.i64", tws => panic!("Unsupported target word size for memcpy: {}", tws), @@ -1170,7 +1170,7 @@ fn memzero<'a, 'tcx>(b: &Builder<'a, 'tcx>, llptr: ValueRef, ty: Ty<'tcx>) { let llty = type_of::type_of(ccx, ty); - let intrinsic_key = match ccx.sess().target.target.target_word_size[] { + let intrinsic_key = match ccx.sess().target.target.target_word_size.index(&FullRange) { "32" => "llvm.memset.p0i8.i32", "64" => "llvm.memset.p0i8.i64", tws => panic!("Unsupported target word size for memset: {}", tws), @@ -1658,7 +1658,7 @@ fn copy_unboxed_closure_args_to_allocas<'blk, 'tcx>( "argtuple", arg_scope_id)); let untupled_arg_types = match monomorphized_arg_types[0].sty { - ty::ty_tup(ref types) => types[], + ty::ty_tup(ref types) => types.index(&FullRange), _ => { bcx.tcx().sess.span_bug(args[0].pat.span, "first arg to `rust-call` ABI function \ @@ -1846,12 +1846,12 @@ pub fn trans_closure<'a, 'b, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let arg_datums = if abi != RustCall { create_datums_for_fn_args(&fcx, - monomorphized_arg_types[]) + monomorphized_arg_types.index(&FullRange)) } else { create_datums_for_fn_args_under_call_abi( bcx, arg_scope, - monomorphized_arg_types[]) + monomorphized_arg_types.index(&FullRange)) }; bcx = match closure_env.kind { @@ -1859,16 +1859,16 @@ pub fn trans_closure<'a, 'b, 'tcx>(ccx: &CrateContext<'a, 'tcx>, copy_args_to_allocas(&fcx, arg_scope, bcx, - decl.inputs[], + decl.inputs.index(&FullRange), arg_datums) } closure::UnboxedClosure(..) => { copy_unboxed_closure_args_to_allocas( bcx, arg_scope, - decl.inputs[], + decl.inputs.index(&FullRange), arg_datums, - monomorphized_arg_types[]) + monomorphized_arg_types.index(&FullRange)) } }; @@ -1985,7 +1985,7 @@ pub fn trans_named_tuple_constructor<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, _ => ccx.sess().bug( format!("trans_enum_variant_constructor: \ unexpected ctor return type {}", - ctor_ty.repr(tcx))[]) + ctor_ty.repr(tcx)).index(&FullRange)) }; // Get location to store the result. If the user does not care about @@ -2008,7 +2008,7 @@ pub fn trans_named_tuple_constructor<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, bcx = expr::trans_adt(bcx, result_ty, disr, - fields[], + fields.index(&FullRange), None, expr::SaveIn(llresult), call_info); @@ -2057,7 +2057,7 @@ fn trans_enum_variant_or_tuple_like_struct<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx _ => ccx.sess().bug( format!("trans_enum_variant_or_tuple_like_struct: \ unexpected ctor return type {}", - ty_to_string(ccx.tcx(), ctor_ty))[]) + ty_to_string(ccx.tcx(), ctor_ty)).index(&FullRange)) }; let arena = TypedArena::new(); @@ -2069,7 +2069,7 @@ fn trans_enum_variant_or_tuple_like_struct<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx let arg_tys = ty::ty_fn_args(ctor_ty); - let arg_datums = create_datums_for_fn_args(&fcx, arg_tys[]); + let arg_datums = create_datums_for_fn_args(&fcx, arg_tys.index(&FullRange)); if !type_is_zero_size(fcx.ccx, result_ty.unwrap()) { let dest = fcx.get_ret_slot(bcx, result_ty, "eret_slot"); @@ -2155,7 +2155,7 @@ fn enum_variant_size_lint(ccx: &CrateContext, enum_def: &ast::EnumDef, sp: Span, *lvlsrc.unwrap(), Some(sp), format!("enum variant is more than three times larger \ ({} bytes) than the next largest (ignoring padding)", - largest)[]); + largest).index(&FullRange)); ccx.sess().span_note(enum_def.variants[largest_index].span, "this variant is the largest"); @@ -2239,7 +2239,7 @@ pub fn update_linkage(ccx: &CrateContext, if let Some(id) = id { let item = ccx.tcx().map.get(id); if let ast_map::NodeItem(i) = item { - if let Some(name) = attr::first_attr_value_str_by_name(i.attrs[], "linkage") { + if let Some(name) = attr::first_attr_value_str_by_name(i.attrs.as_slice(), "linkage") { if let Some(linkage) = llvm_linkage_by_name(name.get()) { llvm::SetLinkage(llval, linkage); } else { @@ -2273,7 +2273,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) { match item.node { ast::ItemFn(ref decl, _fn_style, abi, ref generics, ref body) => { if !generics.is_type_parameterized() { - let trans_everywhere = attr::requests_inline(item.attrs[]); + let trans_everywhere = attr::requests_inline(item.attrs.index(&FullRange)); // Ignore `trans_everywhere` for cross-crate inlined items // (`from_external`). `trans_item` will be called once for each // compilation unit that references the item, so it will still get @@ -2284,7 +2284,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) { foreign::trans_rust_fn_with_foreign_abi(ccx, &**decl, &**body, - item.attrs[], + item.attrs.index(&FullRange), llfn, &Substs::trans_empty(), item.id, @@ -2296,7 +2296,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) { llfn, &Substs::trans_empty(), item.id, - item.attrs[]); + item.attrs.index(&FullRange)); } update_linkage(ccx, llfn, @@ -2313,7 +2313,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) { ast::ItemImpl(_, _, ref generics, _, _, ref impl_items) => { meth::trans_impl(ccx, item.ident, - impl_items[], + impl_items.index(&FullRange), generics, item.id); } @@ -2343,7 +2343,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) { // Do static_assert checking. It can't really be done much earlier // because we need to get the value of the bool out of LLVM - if attr::contains_name(item.attrs[], "static_assert") { + if attr::contains_name(item.attrs.index(&FullRange), "static_assert") { if m == ast::MutMutable { ccx.sess().span_fatal(expr.span, "cannot have static_assert on a mutable \ @@ -2420,7 +2420,7 @@ fn register_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, _ => panic!("expected bare rust fn") }; - let llfn = decl_rust_fn(ccx, node_type, sym[]); + let llfn = decl_rust_fn(ccx, node_type, sym.index(&FullRange)); finish_register_fn(ccx, sp, sym, node_id, llfn); llfn } @@ -2462,7 +2462,7 @@ pub fn get_fn_llvm_attributes<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_ty: Ty< match fn_sig.0.inputs[1].sty { ty::ty_tup(ref t_in) => { - inputs.push_all(t_in[]); + inputs.push_all(t_in.index(&FullRange)); inputs } _ => ccx.sess().bug("expected tuple'd inputs") @@ -2597,7 +2597,11 @@ pub fn register_fn_llvmty(ccx: &CrateContext, llfty: Type) -> ValueRef { debug!("register_fn_llvmty id={} sym={}", node_id, sym); - let llfn = decl_fn(ccx, sym[], cc, llfty, ty::FnConverging(ty::mk_nil(ccx.tcx()))); + let llfn = decl_fn(ccx, + sym.index(&FullRange), + cc, + llfty, + ty::FnConverging(ty::mk_nil(ccx.tcx()))); finish_register_fn(ccx, sp, sym, node_id, llfn); llfn } @@ -2650,7 +2654,7 @@ pub fn create_entry_wrapper(ccx: &CrateContext, let (start_fn, args) = if use_start_lang_item { let start_def_id = match ccx.tcx().lang_items.require(StartFnLangItem) { Ok(id) => id, - Err(s) => { ccx.sess().fatal(s[]); } + Err(s) => { ccx.sess().fatal(s.index(&FullRange)); } }; let start_fn = if start_def_id.krate == ast::LOCAL_CRATE { get_item_val(ccx, start_def_id.node) @@ -2742,7 +2746,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { let val = match item { ast_map::NodeItem(i) => { let ty = ty::node_id_to_type(ccx.tcx(), i.id); - let sym = |&:| exported_name(ccx, id, ty, i.attrs[]); + let sym = |&:| exported_name(ccx, id, ty, i.attrs.index(&FullRange)); let v = match i.node { ast::ItemStatic(_, _, ref expr) => { @@ -2765,16 +2769,16 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { } else { llvm::LLVMTypeOf(v) }; - if contains_null(sym[]) { + if contains_null(sym.index(&FullRange)) { ccx.sess().fatal( format!("Illegal null byte in export_name \ - value: `{}`", sym)[]); + value: `{}`", sym).index(&FullRange)); } let buf = CString::from_slice(sym.as_bytes()); let g = llvm::LLVMAddGlobal(ccx.llmod(), llty, buf.as_ptr()); - if attr::contains_name(i.attrs[], + if attr::contains_name(i.attrs.index(&FullRange), "thread_local") { llvm::set_thread_local(g, true); } @@ -2799,19 +2803,19 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { sym, i.id) }; - set_llvm_fn_attrs(ccx, i.attrs[], llfn); + set_llvm_fn_attrs(ccx, i.attrs.index(&FullRange), llfn); llfn } _ => panic!("get_item_val: weird result in table") }; - match attr::first_attr_value_str_by_name(i.attrs[], + match attr::first_attr_value_str_by_name(i.attrs.index(&FullRange), "link_section") { Some(sect) => { if contains_null(sect.get()) { ccx.sess().fatal(format!("Illegal null byte in link_section value: `{}`", - sect.get())[]); + sect.get()).index(&FullRange)); } unsafe { let buf = CString::from_slice(sect.get().as_bytes()); @@ -2854,7 +2858,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { let abi = ccx.tcx().map.get_foreign_abi(id); let ty = ty::node_id_to_type(ccx.tcx(), ni.id); let name = foreign::link_name(&*ni); - foreign::register_foreign_item_fn(ccx, abi, ty, name.get()[]) + foreign::register_foreign_item_fn(ccx, abi, ty, name.get().index(&FullRange)) } ast::ForeignItemStatic(..) => { foreign::register_static(ccx, &*ni) @@ -2877,7 +2881,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { let sym = exported_name(ccx, id, ty, - enm.attrs[]); + enm.attrs.index(&FullRange)); llfn = match enm.node { ast::ItemEnum(_, _) => { @@ -2905,7 +2909,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { id, ty, struct_item.attrs - []); + .index(&FullRange)); let llfn = register_fn(ccx, struct_item.span, sym, ctor_id, ty); set_inline_hint(llfn); @@ -2914,7 +2918,7 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { ref variant => { ccx.sess().bug(format!("get_item_val(): unexpected variant: {}", - variant)[]) + variant).index(&FullRange)) } }; @@ -2935,10 +2939,10 @@ fn register_method(ccx: &CrateContext, id: ast::NodeId, m: &ast::Method) -> ValueRef { let mty = ty::node_id_to_type(ccx.tcx(), id); - let sym = exported_name(ccx, id, mty, m.attrs[]); + let sym = exported_name(ccx, id, mty, m.attrs.index(&FullRange)); let llfn = register_fn(ccx, m.span, sym, id, mty); - set_llvm_fn_attrs(ccx, m.attrs[], llfn); + set_llvm_fn_attrs(ccx, m.attrs.index(&FullRange), llfn); llfn } @@ -2977,7 +2981,7 @@ pub fn write_metadata(cx: &SharedCrateContext, krate: &ast::Crate) -> Vec { Some(compressed) => compressed, None => cx.sess().fatal("failed to compress metadata"), }.as_slice()); - let llmeta = C_bytes_in_context(cx.metadata_llcx(), compressed[]); + let llmeta = C_bytes_in_context(cx.metadata_llcx(), compressed.index(&FullRange)); let llconst = C_struct_in_context(cx.metadata_llcx(), &[llmeta], false); let name = format!("rust_metadata_{}_{}", cx.link_meta().crate_name, @@ -3106,7 +3110,7 @@ pub fn trans_crate<'tcx>(analysis: ty::CrateAnalysis<'tcx>) let link_meta = link::build_link_meta(&tcx.sess, krate, name); let codegen_units = tcx.sess.opts.cg.codegen_units; - let shared_ccx = SharedCrateContext::new(link_meta.crate_name[], + let shared_ccx = SharedCrateContext::new(link_meta.crate_name.index(&FullRange), codegen_units, tcx, export_map, @@ -3208,7 +3212,7 @@ pub fn trans_crate<'tcx>(analysis: ty::CrateAnalysis<'tcx>) llmod: shared_ccx.metadata_llmod(), }; let formats = shared_ccx.tcx().dependency_formats.borrow().clone(); - let no_builtins = attr::contains_name(krate.attrs[], "no_builtins"); + let no_builtins = attr::contains_name(krate.attrs.index(&FullRange), "no_builtins"); let translation = CrateTranslation { modules: modules, diff --git a/src/librustc_trans/trans/builder.rs b/src/librustc_trans/trans/builder.rs index e09d36ddae923..d0eaf799af1bd 100644 --- a/src/librustc_trans/trans/builder.rs +++ b/src/librustc_trans/trans/builder.rs @@ -552,11 +552,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { for (small_vec_e, &ix) in small_vec.iter_mut().zip(ixs.iter()) { *small_vec_e = C_i32(self.ccx, ix as i32); } - self.inbounds_gep(base, small_vec[..ixs.len()]) + self.inbounds_gep(base, small_vec.index(&(0..ixs.len()))) } else { let v = ixs.iter().map(|i| C_i32(self.ccx, *i as i32)).collect::>(); self.count_insn("gepi"); - self.inbounds_gep(base, v[]) + self.inbounds_gep(base, v.index(&FullRange)) } } @@ -764,8 +764,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let s = format!("{} ({})", text, self.ccx.sess().codemap().span_to_string(sp)); - debug!("{}", s[]); - self.add_comment(s[]); + debug!("{}", s.index(&FullRange)); + self.add_comment(s.index(&FullRange)); } } @@ -802,7 +802,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { }).collect::>(); debug!("Asm Output Type: {}", self.ccx.tn().type_to_string(output)); - let fty = Type::func(argtys[], &output); + let fty = Type::func(argtys.index(&FullRange), &output); unsafe { let v = llvm::LLVMInlineAsm( fty.to_ref(), asm, cons, volatile, alignstack, dia as c_uint); diff --git a/src/librustc_trans/trans/cabi.rs b/src/librustc_trans/trans/cabi.rs index 0e38dd0e5b596..a901142467b8d 100644 --- a/src/librustc_trans/trans/cabi.rs +++ b/src/librustc_trans/trans/cabi.rs @@ -108,7 +108,7 @@ pub fn compute_abi_info(ccx: &CrateContext, atys: &[Type], rty: Type, ret_def: bool) -> FnType { - match ccx.sess().target.target.arch[] { + match ccx.sess().target.target.arch.index(&FullRange) { "x86" => cabi_x86::compute_abi_info(ccx, atys, rty, ret_def), "x86_64" => if ccx.sess().target.target.options.is_like_windows { cabi_x86_win64::compute_abi_info(ccx, atys, rty, ret_def) @@ -119,6 +119,6 @@ pub fn compute_abi_info(ccx: &CrateContext, "aarch64" => cabi_aarch64::compute_abi_info(ccx, atys, rty, ret_def), "mips" => cabi_mips::compute_abi_info(ccx, atys, rty, ret_def), a => ccx.sess().fatal((format!("unrecognized arch \"{}\" in target specification", a)) - []), + .index(&FullRange)), } } diff --git a/src/librustc_trans/trans/cabi_x86_64.rs b/src/librustc_trans/trans/cabi_x86_64.rs index 9ec0c822bf5fe..f40072d1cba3e 100644 --- a/src/librustc_trans/trans/cabi_x86_64.rs +++ b/src/librustc_trans/trans/cabi_x86_64.rs @@ -318,7 +318,7 @@ fn llreg_ty(ccx: &CrateContext, cls: &[RegClass]) -> Type { tys.push(Type::i64(ccx)); } SSEFv => { - let vec_len = llvec_len(cls[i + 1u..]); + let vec_len = llvec_len(cls.index(&((i + 1u)..))); let vec_ty = Type::vector(&Type::f32(ccx), (vec_len * 2u) as u64); tys.push(vec_ty); i += vec_len; diff --git a/src/librustc_trans/trans/callee.rs b/src/librustc_trans/trans/callee.rs index 65e6d7e1924b6..498182c17e223 100644 --- a/src/librustc_trans/trans/callee.rs +++ b/src/librustc_trans/trans/callee.rs @@ -114,7 +114,7 @@ fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr) expr.span, format!("type of callee is neither bare-fn nor closure: \ {}", - bcx.ty_to_string(datum.ty))[]); + bcx.ty_to_string(datum.ty)).index(&FullRange)); } } } @@ -207,7 +207,7 @@ fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr) bcx.tcx().sess.span_bug( ref_expr.span, format!("cannot translate def {} \ - to a callable thing!", def)[]); + to a callable thing!", def).index(&FullRange)); } } } @@ -293,7 +293,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>( _ => { tcx.sess.bug(format!("trans_fn_pointer_shim invoked on invalid type: {}", - bare_fn_ty.repr(tcx))[]); + bare_fn_ty.repr(tcx)).index(&FullRange)); } }; let tuple_input_ty = ty::mk_tup(tcx, input_tys.to_vec()); @@ -317,7 +317,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>( let llfn = decl_internal_rust_fn(ccx, tuple_fn_ty, - function_name[]); + function_name.index(&FullRange)); // let block_arena = TypedArena::new(); @@ -352,7 +352,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>( None, bare_fn_ty, |bcx, _| Callee { bcx: bcx, data: Fn(llfnpointer) }, - ArgVals(llargs[]), + ArgVals(llargs.index(&FullRange)), dest).bcx; finish_fn(&fcx, bcx, output_ty); @@ -775,7 +775,7 @@ pub fn trans_call_inner<'a, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, // Invoke the actual rust fn and update bcx/llresult. let (llret, b) = base::invoke(bcx, llfn, - llargs[], + llargs.index(&FullRange), callee_ty, call_info); bcx = b; @@ -814,7 +814,7 @@ pub fn trans_call_inner<'a, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, bcx = foreign::trans_native_call(bcx, callee_ty, llfn, opt_llretslot.unwrap(), - llargs[], arg_tys); + llargs.index(&FullRange), arg_tys); } fcx.pop_and_trans_custom_cleanup_scope(bcx, arg_cleanup_scope); diff --git a/src/librustc_trans/trans/cleanup.rs b/src/librustc_trans/trans/cleanup.rs index 79a5898e3d33e..561391400a1c0 100644 --- a/src/librustc_trans/trans/cleanup.rs +++ b/src/librustc_trans/trans/cleanup.rs @@ -24,8 +24,7 @@ use trans::common; use trans::common::{Block, FunctionContext, ExprId, NodeInfo}; use trans::debuginfo; use trans::glue; -// Temporary due to slicing syntax hacks (KILLME) -//use middle::region; +use middle::region; use trans::type_::Type; use middle::ty::{self, Ty}; use std::fmt; @@ -129,8 +128,7 @@ impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> { // excluding id's that correspond to closure bodies only). For // now we just say that if there is already an AST scope on the stack, // this new AST scope had better be its immediate child. - // Temporarily removed due to slicing syntax hacks (KILLME). - /*let top_scope = self.top_ast_scope(); + let top_scope = self.top_ast_scope(); if top_scope.is_some() { assert_eq!(self.ccx .tcx() @@ -138,7 +136,7 @@ impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> { .opt_encl_scope(region::CodeExtent::from_node_id(debug_loc.id)) .map(|s|s.node_id()), top_scope); - }*/ + } self.push_scope(CleanupScope::new(AstScopeKind(debug_loc.id), Some(debug_loc))); @@ -406,7 +404,7 @@ impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> { self.ccx.sess().bug( format!("no cleanup scope {} found", - self.ccx.tcx().map.node_to_string(cleanup_scope))[]); + self.ccx.tcx().map.node_to_string(cleanup_scope)).index(&FullRange)); } /// Schedules a cleanup to occur in the top-most scope, which must be a temporary scope. @@ -588,7 +586,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx LoopExit(id, _) => { self.ccx.sess().bug(format!( "cannot exit from scope {}, \ - not in scope", id)[]); + not in scope", id).index(&FullRange)); } } } @@ -657,7 +655,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx let name = scope.block_name("clean"); debug!("generating cleanups for {}", name); let bcx_in = self.new_block(label.is_unwind(), - name[], + name.index(&FullRange), None); let mut bcx_out = bcx_in; for cleanup in scope.cleanups.iter().rev() { @@ -704,7 +702,7 @@ impl<'blk, 'tcx> CleanupHelperMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx Some(llbb) => { return llbb; } None => { let name = last_scope.block_name("unwind"); - pad_bcx = self.new_block(true, name[], None); + pad_bcx = self.new_block(true, name.index(&FullRange), None); last_scope.cached_landing_pad = Some(pad_bcx.llbb); } } @@ -1025,7 +1023,7 @@ pub fn temporary_scope(tcx: &ty::ctxt, } None => { tcx.sess.bug(format!("no temporary scope available for expr {}", - id)[]) + id).index(&FullRange)) } } } diff --git a/src/librustc_trans/trans/closure.rs b/src/librustc_trans/trans/closure.rs index 6f2def16e7674..233cbe8f28003 100644 --- a/src/librustc_trans/trans/closure.rs +++ b/src/librustc_trans/trans/closure.rs @@ -154,7 +154,7 @@ pub fn store_environment<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let tcx = ccx.tcx(); // compute the type of the closure - let cdata_ty = mk_closure_tys(tcx, bound_values[]); + let cdata_ty = mk_closure_tys(tcx, bound_values.index(&FullRange)); // cbox_ty has the form of a tuple: (a, b, c) we want a ptr to a // tuple. This could be a ptr in uniq or a box or on stack, @@ -183,7 +183,7 @@ pub fn store_environment<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, if ccx.sess().asm_comments() { add_comment(bcx, format!("Copy {} into closure", - bv.to_string(ccx))[]); + bv.to_string(ccx)).index(&FullRange)); } let bound_data = GEPi(bcx, llbox, &[0u, abi::BOX_FIELD_BODY, i]); @@ -420,7 +420,7 @@ pub fn trans_expr_fn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let s = tcx.map.with_path(id, |path| { mangle_internal_name_by_path_and_seq(path, "closure") }); - let llfn = decl_internal_rust_fn(ccx, fty, s[]); + let llfn = decl_internal_rust_fn(ccx, fty, s.index(&FullRange)); // set an inline hint for all closures set_inline_hint(llfn); @@ -444,7 +444,7 @@ pub fn trans_expr_fn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, &[], ty::ty_fn_ret(fty), ty::ty_fn_abi(fty), - ClosureEnv::new(freevars[], + ClosureEnv::new(freevars.index(&FullRange), BoxedClosure(cdata_ty, store))); fill_fn_pair(bcx, dest_addr, llfn, llbox); bcx @@ -489,7 +489,7 @@ pub fn get_or_create_declaration_if_unboxed_closure<'a, 'tcx>(ccx: &CrateContext mangle_internal_name_by_path_and_seq(path, "unboxed_closure") }); - let llfn = decl_internal_rust_fn(ccx, function_type, symbol[]); + let llfn = decl_internal_rust_fn(ccx, function_type, symbol.index(&FullRange)); // set an inline hint for all closures set_inline_hint(llfn); @@ -542,7 +542,7 @@ pub fn trans_unboxed_closure<'blk, 'tcx>( &[], function_type.sig.0.output, function_type.abi, - ClosureEnv::new(freevars[], + ClosureEnv::new(freevars.index(&FullRange), UnboxedClosure(freevar_mode))); // Don't hoist this to the top of the function. It's perfectly legitimate @@ -579,3 +579,4 @@ pub fn trans_unboxed_closure<'blk, 'tcx>( bcx } + diff --git a/src/librustc_trans/trans/common.rs b/src/librustc_trans/trans/common.rs index 094f98e988aad..8596583d4a592 100644 --- a/src/librustc_trans/trans/common.rs +++ b/src/librustc_trans/trans/common.rs @@ -218,7 +218,7 @@ pub fn gensym_name(name: &str) -> PathElem { let num = token::gensym(name).uint(); // use one colon which will get translated to a period by the mangler, and // we're guaranteed that `num` is globally unique for this crate. - PathName(token::gensym(format!("{}:{}", name, num)[])) + PathName(token::gensym(format!("{}:{}", name, num).index(&FullRange))) } #[derive(Copy)] @@ -546,7 +546,7 @@ impl<'blk, 'tcx> BlockS<'blk, 'tcx> { Some(v) => v.clone(), None => { self.tcx().sess.bug(format!( - "no def associated with node id {}", nid)[]); + "no def associated with node id {}", nid).index(&FullRange)); } } } @@ -976,7 +976,7 @@ pub fn fulfill_obligation<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, span, format!("Encountered error `{}` selecting `{}` during trans", e.repr(tcx), - trait_ref.repr(tcx))[]) + trait_ref.repr(tcx)).index(&FullRange)) } }; @@ -1069,7 +1069,7 @@ pub fn drain_fulfillment_cx<'a,'tcx,T>(span: Span, infcx.tcx.sess.span_bug( span, format!("Encountered errors `{}` fulfilling during trans", - errors.repr(infcx.tcx))[]); + errors.repr(infcx.tcx)).index(&FullRange)); } } } @@ -1109,7 +1109,7 @@ pub fn node_id_substs<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, if substs.types.any(|t| ty::type_needs_infer(*t)) { tcx.sess.bug(format!("type parameters for node {} include inference types: {}", - node, substs.repr(tcx))[]); + node, substs.repr(tcx)).index(&FullRange)); } monomorphize::apply_param_substs(tcx, @@ -1127,8 +1127,8 @@ pub fn langcall(bcx: Block, Err(s) => { let msg = format!("{} {}", msg, s); match span { - Some(span) => bcx.tcx().sess.span_fatal(span, msg[]), - None => bcx.tcx().sess.fatal(msg[]), + Some(span) => bcx.tcx().sess.span_fatal(span, msg.index(&FullRange)), + None => bcx.tcx().sess.fatal(msg.index(&FullRange)), } } } diff --git a/src/librustc_trans/trans/consts.rs b/src/librustc_trans/trans/consts.rs index a3861e71d83de..f9b14e1de4fdb 100644 --- a/src/librustc_trans/trans/consts.rs +++ b/src/librustc_trans/trans/consts.rs @@ -54,7 +54,7 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: &ast::Lit) _ => cx.sess().span_bug(lit.span, format!("integer literal has type {} (expected int \ or uint)", - ty_to_string(cx.tcx(), lit_int_ty))[]) + ty_to_string(cx.tcx(), lit_int_ty)).index(&FullRange)) } } ast::LitFloat(ref fs, t) => { @@ -74,7 +74,7 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: &ast::Lit) } ast::LitBool(b) => C_bool(cx, b), ast::LitStr(ref s, _) => C_str_slice(cx, (*s).clone()), - ast::LitBinary(ref data) => C_binary_slice(cx, data[]), + ast::LitBinary(ref data) => C_binary_slice(cx, data.index(&FullRange)), } } @@ -93,9 +93,9 @@ fn const_vec(cx: &CrateContext, e: &ast::Expr, .collect::>(); // If the vector contains enums, an LLVM array won't work. let v = if vs.iter().any(|vi| val_ty(*vi) != llunitty) { - C_struct(cx, vs[], false) + C_struct(cx, vs.index(&FullRange), false) } else { - C_array(llunitty, vs[]) + C_array(llunitty, vs.index(&FullRange)) }; (v, llunitty) } @@ -149,13 +149,13 @@ fn const_deref<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, v: ValueRef, } _ => { cx.sess().bug(format!("unexpected dereferenceable type {}", - ty_to_string(cx.tcx(), t))[]) + ty_to_string(cx.tcx(), t)).index(&FullRange)) } } } None => { cx.sess().bug(format!("cannot dereference const of type {}", - ty_to_string(cx.tcx(), t))[]) + ty_to_string(cx.tcx(), t)).index(&FullRange)) } } } @@ -252,7 +252,7 @@ pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, e: &ast::Expr) } _ => cx.sess().span_bug(e.span, format!("unimplemented type in const unsize: {}", - ty_to_string(cx.tcx(), ty))[]) + ty_to_string(cx.tcx(), ty)).index(&FullRange)) } } _ => { @@ -260,7 +260,7 @@ pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, e: &ast::Expr) .span_bug(e.span, format!("unimplemented const \ autoref {}", - autoref)[]) + autoref).index(&FullRange)) } } } @@ -281,7 +281,7 @@ pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, e: &ast::Expr) } cx.sess().bug(format!("const {} of type {} has size {} instead of {}", e.repr(cx.tcx()), ty_to_string(cx.tcx(), ety), - csize, tsize)[]); + csize, tsize).index(&FullRange)); } (llconst, ety_adjusted) } @@ -431,7 +431,7 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { _ => cx.sess().span_bug(base.span, format!("index-expr base must be a vector \ or string type, found {}", - ty_to_string(cx.tcx(), bt))[]) + ty_to_string(cx.tcx(), bt)).index(&FullRange)) }, ty::ty_rptr(_, mt) => match mt.ty.sty { ty::ty_vec(_, Some(u)) => { @@ -440,12 +440,12 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { _ => cx.sess().span_bug(base.span, format!("index-expr base must be a vector \ or string type, found {}", - ty_to_string(cx.tcx(), bt))[]) + ty_to_string(cx.tcx(), bt)).index(&FullRange)) }, _ => cx.sess().span_bug(base.span, format!("index-expr base must be a vector \ or string type, found {}", - ty_to_string(cx.tcx(), bt))[]) + ty_to_string(cx.tcx(), bt)).index(&FullRange)) }; let len = llvm::LLVMConstIntGetZExtValue(len) as u64; @@ -546,8 +546,8 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { ast::ExprTup(ref es) => { let ety = ty::expr_ty(cx.tcx(), e); let repr = adt::represent_type(cx, ety); - let vals = map_list(es[]); - adt::trans_const(cx, &*repr, 0, vals[]) + let vals = map_list(es.index(&FullRange)); + adt::trans_const(cx, &*repr, 0, vals.index(&FullRange)) } ast::ExprStruct(_, ref fs, ref base_opt) => { let ety = ty::expr_ty(cx.tcx(), e); @@ -578,7 +578,7 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { } } }).collect::>(); - adt::trans_const(cx, &*repr, discr, cs[]) + adt::trans_const(cx, &*repr, discr, cs.index(&FullRange)) }) } ast::ExprVec(ref es) => { @@ -595,9 +595,9 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { }; let vs: Vec<_> = repeat(const_expr(cx, &**elem).0).take(n).collect(); if vs.iter().any(|vi| val_ty(*vi) != llunitty) { - C_struct(cx, vs[], false) + C_struct(cx, vs.index(&FullRange), false) } else { - C_array(llunitty, vs[]) + C_array(llunitty, vs.index(&FullRange)) } } ast::ExprPath(_) => { @@ -645,8 +645,8 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { Some(def::DefStruct(_)) => { let ety = ty::expr_ty(cx.tcx(), e); let repr = adt::represent_type(cx, ety); - let arg_vals = map_list(args[]); - adt::trans_const(cx, &*repr, 0, arg_vals[]) + let arg_vals = map_list(args.index(&FullRange)); + adt::trans_const(cx, &*repr, 0, arg_vals.index(&FullRange)) } Some(def::DefVariant(enum_did, variant_did, _)) => { let ety = ty::expr_ty(cx.tcx(), e); @@ -654,11 +654,11 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef { let vinfo = ty::enum_variant_with_id(cx.tcx(), enum_did, variant_did); - let arg_vals = map_list(args[]); + let arg_vals = map_list(args.index(&FullRange)); adt::trans_const(cx, &*repr, vinfo.disr_val, - arg_vals[]) + arg_vals.index(&FullRange)) } _ => cx.sess().span_bug(e.span, "expected a struct or variant def") } diff --git a/src/librustc_trans/trans/context.rs b/src/librustc_trans/trans/context.rs index 3726cf14023ee..585b338b0262b 100644 --- a/src/librustc_trans/trans/context.rs +++ b/src/librustc_trans/trans/context.rs @@ -284,7 +284,7 @@ impl<'tcx> SharedCrateContext<'tcx> { // such as a function name in the module. // 1. http://llvm.org/bugs/show_bug.cgi?id=11479 let llmod_id = format!("{}.{}.rs", crate_name, i); - let local_ccx = LocalCrateContext::new(&shared_ccx, llmod_id[]); + let local_ccx = LocalCrateContext::new(&shared_ccx, llmod_id.index(&FullRange)); shared_ccx.local_ccxs.push(local_ccx); } @@ -374,7 +374,7 @@ impl<'tcx> LocalCrateContext<'tcx> { .target .target .data_layout - []); + .index(&FullRange)); let dbg_cx = if shared.tcx.sess.opts.debuginfo != NoDebugInfo { Some(debuginfo::CrateDebugContext::new(llmod)) @@ -721,7 +721,7 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> { /// currently conservatively bounded to 1 << 47 as that is enough to cover the current usable /// address space on 64-bit ARMv8 and x86_64. pub fn obj_size_bound(&self) -> u64 { - match self.sess().target.target.target_word_size[] { + match self.sess().target.target.target_word_size.index(&FullRange) { "32" => 1 << 31, "64" => 1 << 47, _ => unreachable!() // error handled by config::build_target_config @@ -731,7 +731,7 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> { pub fn report_overbig_object(&self, obj: Ty<'tcx>) -> ! { self.sess().fatal( format!("the type `{}` is too big for the current architecture", - obj.repr(self.tcx()))[]) + obj.repr(self.tcx())).index(&FullRange)) } } diff --git a/src/librustc_trans/trans/controlflow.rs b/src/librustc_trans/trans/controlflow.rs index 768de89d5935d..67e8b4f686a3d 100644 --- a/src/librustc_trans/trans/controlflow.rs +++ b/src/librustc_trans/trans/controlflow.rs @@ -48,7 +48,7 @@ pub fn trans_stmt<'blk, 'tcx>(cx: Block<'blk, 'tcx>, debug!("trans_stmt({})", s.repr(cx.tcx())); if cx.sess().asm_comments() { - add_span_comment(cx, s.span, s.repr(cx.tcx())[]); + add_span_comment(cx, s.span, s.repr(cx.tcx()).index(&FullRange)); } let mut bcx = cx; @@ -188,7 +188,7 @@ pub fn trans_if<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } let name = format!("then-block-{}-", thn.id); - let then_bcx_in = bcx.fcx.new_id_block(name[], thn.id); + let then_bcx_in = bcx.fcx.new_id_block(name.index(&FullRange), thn.id); let then_bcx_out = trans_block(then_bcx_in, &*thn, dest); trans::debuginfo::clear_source_location(bcx.fcx); @@ -437,7 +437,7 @@ pub fn trans_break_cont<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, Some(&def::DefLabel(loop_id)) => loop_id, ref r => { bcx.tcx().sess.bug(format!("{} in def-map for label", - r)[]) + r).index(&FullRange)) } } } @@ -501,7 +501,7 @@ pub fn trans_fail<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let v_str = C_str_slice(ccx, fail_str); let loc = bcx.sess().codemap().lookup_char_pos(sp.lo); - let filename = token::intern_and_get_ident(loc.file.name[]); + let filename = token::intern_and_get_ident(loc.file.name.index(&FullRange)); let filename = C_str_slice(ccx, filename); let line = C_uint(ccx, loc.line); let expr_file_line_const = C_struct(ccx, &[v_str, filename, line], false); @@ -510,7 +510,7 @@ pub fn trans_fail<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let did = langcall(bcx, Some(sp), "", PanicFnLangItem); let bcx = callee::trans_lang_call(bcx, did, - args[], + args.index(&FullRange), Some(expr::Ignore)).bcx; Unreachable(bcx); return bcx; @@ -526,7 +526,7 @@ pub fn trans_fail_bounds_check<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // Extract the file/line from the span let loc = bcx.sess().codemap().lookup_char_pos(sp.lo); - let filename = token::intern_and_get_ident(loc.file.name[]); + let filename = token::intern_and_get_ident(loc.file.name.index(&FullRange)); // Invoke the lang item let filename = C_str_slice(ccx, filename); @@ -537,7 +537,7 @@ pub fn trans_fail_bounds_check<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let did = langcall(bcx, Some(sp), "", PanicBoundsCheckFnLangItem); let bcx = callee::trans_lang_call(bcx, did, - args[], + args.index(&FullRange), Some(expr::Ignore)).bcx; Unreachable(bcx); return bcx; diff --git a/src/librustc_trans/trans/datum.rs b/src/librustc_trans/trans/datum.rs index d73b3f6b4e420..6a1df82056d49 100644 --- a/src/librustc_trans/trans/datum.rs +++ b/src/librustc_trans/trans/datum.rs @@ -464,7 +464,7 @@ impl<'tcx> Datum<'tcx, Lvalue> { } _ => bcx.tcx().sess.bug( format!("Unexpected unsized type in get_element: {}", - bcx.ty_to_string(self.ty))[]) + bcx.ty_to_string(self.ty)).index(&FullRange)) }; Datum { val: val, diff --git a/src/librustc_trans/trans/debuginfo.rs b/src/librustc_trans/trans/debuginfo.rs index 3f0f7fd9bd303..44890407d0869 100644 --- a/src/librustc_trans/trans/debuginfo.rs +++ b/src/librustc_trans/trans/debuginfo.rs @@ -285,7 +285,7 @@ impl<'tcx> TypeMap<'tcx> { metadata: DIType) { if self.type_to_metadata.insert(type_, metadata).is_some() { cx.sess().bug(format!("Type metadata for Ty '{}' is already in the TypeMap!", - ppaux::ty_to_string(cx.tcx(), type_))[]); + ppaux::ty_to_string(cx.tcx(), type_)).index(&FullRange)); } } @@ -298,7 +298,7 @@ impl<'tcx> TypeMap<'tcx> { if self.unique_id_to_metadata.insert(unique_type_id, metadata).is_some() { let unique_type_id_str = self.get_unique_type_id_as_string(unique_type_id); cx.sess().bug(format!("Type metadata for unique id '{}' is already in the TypeMap!", - unique_type_id_str[])[]); + unique_type_id_str.index(&FullRange)).index(&FullRange)); } } @@ -335,13 +335,13 @@ impl<'tcx> TypeMap<'tcx> { // unique ptr (~) -> {~ :pointee-uid:} // @-ptr (@) -> {@ :pointee-uid:} // sized vec ([T; x]) -> {[:size:] :element-uid:} - // unsized vec ([T]) -> {[] :element-uid:} + // unsized vec ([T]) -> {.index(&FullRange) :element-uid:} // trait (T) -> {trait_:svh: / :node-id:_<(:param-uid:),*> } // closure -> { :store-sigil: |(:param-uid:),* <,_...>| -> \ // :return-type-uid: : (:bounds:)*} // function -> { fn( (:param-uid:)* <,_...> ) -> \ // :return-type-uid:} - // unique vec box (~[]) -> {HEAP_VEC_BOX<:pointee-uid:>} + // unique vec box (~.index(&FullRange)) -> {HEAP_VEC_BOX<:pointee-uid:>} // gc box -> {GC_BOX<:pointee-uid:>} match self.type_to_unique_id.get(&type_).cloned() { @@ -379,14 +379,14 @@ impl<'tcx> TypeMap<'tcx> { self.get_unique_type_id_of_type(cx, component_type); let component_type_id = self.get_unique_type_id_as_string(component_type_id); - unique_type_id.push_str(component_type_id[]); + unique_type_id.push_str(component_type_id.index(&FullRange)); } }, ty::ty_uniq(inner_type) => { unique_type_id.push('~'); let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(inner_type_id[]); + unique_type_id.push_str(inner_type_id.index(&FullRange)); }, ty::ty_ptr(ty::mt { ty: inner_type, mutbl } ) => { unique_type_id.push('*'); @@ -396,7 +396,7 @@ impl<'tcx> TypeMap<'tcx> { let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(inner_type_id[]); + unique_type_id.push_str(inner_type_id.index(&FullRange)); }, ty::ty_rptr(_, ty::mt { ty: inner_type, mutbl }) => { unique_type_id.push('&'); @@ -406,12 +406,12 @@ impl<'tcx> TypeMap<'tcx> { let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(inner_type_id[]); + unique_type_id.push_str(inner_type_id.index(&FullRange)); }, ty::ty_vec(inner_type, optional_length) => { match optional_length { Some(len) => { - unique_type_id.push_str(format!("[{}]", len)[]); + unique_type_id.push_str(format!("[{}]", len).index(&FullRange)); } None => { unique_type_id.push_str("[]"); @@ -420,7 +420,7 @@ impl<'tcx> TypeMap<'tcx> { let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(inner_type_id[]); + unique_type_id.push_str(inner_type_id.index(&FullRange)); }, ty::ty_trait(ref trait_data) => { unique_type_id.push_str("trait "); @@ -445,7 +445,7 @@ impl<'tcx> TypeMap<'tcx> { self.get_unique_type_id_of_type(cx, parameter_type); let parameter_type_id = self.get_unique_type_id_as_string(parameter_type_id); - unique_type_id.push_str(parameter_type_id[]); + unique_type_id.push_str(parameter_type_id.index(&FullRange)); unique_type_id.push(','); } @@ -458,7 +458,7 @@ impl<'tcx> TypeMap<'tcx> { ty::FnConverging(ret_ty) => { let return_type_id = self.get_unique_type_id_of_type(cx, ret_ty); let return_type_id = self.get_unique_type_id_as_string(return_type_id); - unique_type_id.push_str(return_type_id[]); + unique_type_id.push_str(return_type_id.index(&FullRange)); } ty::FnDiverging => { unique_type_id.push_str("!"); @@ -474,8 +474,8 @@ impl<'tcx> TypeMap<'tcx> { }, _ => { cx.sess().bug(format!("get_unique_type_id_of_type() - unexpected type: {}, {}", - ppaux::ty_to_string(cx.tcx(), type_)[], - type_.sty)[]) + ppaux::ty_to_string(cx.tcx(), type_).index(&FullRange), + type_.sty).index(&FullRange)) } }; @@ -518,7 +518,7 @@ impl<'tcx> TypeMap<'tcx> { output.push_str(crate_hash.as_str()); output.push_str("/"); - output.push_str(format!("{:x}", def_id.node)[]); + output.push_str(format!("{:x}", def_id.node).index(&FullRange)); // Maybe check that there is no self type here. @@ -531,7 +531,7 @@ impl<'tcx> TypeMap<'tcx> { type_map.get_unique_type_id_of_type(cx, type_parameter); let param_type_id = type_map.get_unique_type_id_as_string(param_type_id); - output.push_str(param_type_id[]); + output.push_str(param_type_id.index(&FullRange)); output.push(','); } @@ -573,7 +573,7 @@ impl<'tcx> TypeMap<'tcx> { self.get_unique_type_id_of_type(cx, parameter_type); let parameter_type_id = self.get_unique_type_id_as_string(parameter_type_id); - unique_type_id.push_str(parameter_type_id[]); + unique_type_id.push_str(parameter_type_id.index(&FullRange)); unique_type_id.push(','); } @@ -587,7 +587,7 @@ impl<'tcx> TypeMap<'tcx> { ty::FnConverging(ret_ty) => { let return_type_id = self.get_unique_type_id_of_type(cx, ret_ty); let return_type_id = self.get_unique_type_id_as_string(return_type_id); - unique_type_id.push_str(return_type_id[]); + unique_type_id.push_str(return_type_id.index(&FullRange)); } ty::FnDiverging => { unique_type_id.push_str("!"); @@ -618,7 +618,7 @@ impl<'tcx> TypeMap<'tcx> { let enum_type_id = self.get_unique_type_id_of_type(cx, enum_type); let enum_variant_type_id = format!("{}::{}", self.get_unique_type_id_as_string(enum_type_id) - [], + .index(&FullRange), variant_name); let interner_key = self.unique_id_interner.intern(Rc::new(enum_variant_type_id)); UniqueTypeId(interner_key) @@ -799,19 +799,19 @@ pub fn create_global_var_metadata(cx: &CrateContext, create_global_var_metadata() - Captured var-id refers to \ unexpected ast_item variant: {}", - var_item)[]) + var_item).index(&FullRange)) } } }, _ => cx.sess().bug(format!("debuginfo::create_global_var_metadata() \ - Captured var-id refers to unexpected \ ast_map variant: {}", - var_item)[]) + var_item).index(&FullRange)) }; let (file_metadata, line_number) = if span != codemap::DUMMY_SP { let loc = span_start(cx, span); - (file_metadata(cx, loc.file.name[]), loc.line as c_uint) + (file_metadata(cx, loc.file.name.index(&FullRange)), loc.line as c_uint) } else { (UNKNOWN_FILE_METADATA, UNKNOWN_LINE_NUMBER) }; @@ -822,7 +822,7 @@ pub fn create_global_var_metadata(cx: &CrateContext, let namespace_node = namespace_for_item(cx, ast_util::local_def(node_id)); let var_name = token::get_ident(ident).get().to_string(); let linkage_name = - namespace_node.mangled_name_of_contained_item(var_name[]); + namespace_node.mangled_name_of_contained_item(var_name.index(&FullRange)); let var_scope = namespace_node.scope; let var_name = CString::from_slice(var_name.as_bytes()); @@ -861,7 +861,7 @@ pub fn create_local_var_metadata(bcx: Block, local: &ast::Local) { None => { bcx.sess().span_bug(span, format!("no entry in lllocals table for {}", - node_id)[]); + node_id).index(&FullRange)); } }; @@ -915,7 +915,7 @@ pub fn create_captured_var_metadata<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, "debuginfo::create_captured_var_metadata() - \ Captured var-id refers to unexpected \ ast_map variant: {}", - ast_item)[]); + ast_item).index(&FullRange)); } } } @@ -925,7 +925,7 @@ pub fn create_captured_var_metadata<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, format!("debuginfo::create_captured_var_metadata() - \ Captured var-id refers to unexpected \ ast_map variant: {}", - ast_item)[]); + ast_item).index(&FullRange)); } }; @@ -955,7 +955,7 @@ pub fn create_captured_var_metadata<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let variable_access = IndirectVariable { alloca: env_pointer, - address_operations: address_operations[..address_op_count] + address_operations: address_operations.index(&(0..address_op_count)) }; declare_local(bcx, @@ -1032,7 +1032,7 @@ pub fn create_argument_metadata(bcx: Block, arg: &ast::Arg) { None => { bcx.sess().span_bug(span, format!("no entry in lllocals table for {}", - node_id)[]); + node_id).index(&FullRange)); } }; @@ -1146,7 +1146,7 @@ pub fn get_cleanup_debug_loc_for_ast_node<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, if let Some(code_snippet) = code_snippet { let bytes = code_snippet.as_bytes(); - if bytes.len() > 0 && bytes[bytes.len()-1 ..] == b"}" { + if bytes.len() > 0 && bytes.index(&((bytes.len()-1)..)) == b"}" { cleanup_span = Span { lo: node_span.hi - codemap::BytePos(1), hi: node_span.hi, @@ -1290,7 +1290,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, match expr.node { ast::ExprClosure(_, _, ref fn_decl, ref top_level_block) => { let name = format!("fn{}", token::gensym("fn")); - let name = token::str_to_ident(name[]); + let name = token::str_to_ident(name.index(&FullRange)); (name, &**fn_decl, // This is not quite right. It should actually inherit // the generics of the enclosing function. @@ -1322,7 +1322,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, cx.sess() .bug(format!("create_function_debug_context: \ unexpected sort of node: {}", - fnitem)[]) + fnitem).index(&FullRange)) } } } @@ -1333,7 +1333,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, } _ => cx.sess().bug(format!("create_function_debug_context: \ unexpected sort of node: {}", - fnitem)[]) + fnitem).index(&FullRange)) }; // This can be the case for functions inlined from another crate @@ -1342,7 +1342,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, } let loc = span_start(cx, span); - let file_metadata = file_metadata(cx, loc.file.name[]); + let file_metadata = file_metadata(cx, loc.file.name.index(&FullRange)); let function_type_metadata = unsafe { let fn_signature = get_function_signature(cx, @@ -1369,7 +1369,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let (linkage_name, containing_scope) = if has_path { let namespace_node = namespace_for_item(cx, ast_util::local_def(fn_ast_id)); let linkage_name = namespace_node.mangled_name_of_contained_item( - function_name[]); + function_name.index(&FullRange)); let containing_scope = namespace_node.scope; (linkage_name, containing_scope) } else { @@ -1457,7 +1457,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, signature.push(type_metadata(cx, arg_type, codemap::DUMMY_SP)); } - return create_DIArray(DIB(cx), signature[]); + return create_DIArray(DIB(cx), signature.index(&FullRange)); } fn get_template_parameters<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, @@ -1492,7 +1492,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, actual_self_type, true); - name_to_append_suffix_to.push_str(actual_self_type_name[]); + name_to_append_suffix_to.push_str(actual_self_type_name.index(&FullRange)); if generics.is_type_parameterized() { name_to_append_suffix_to.push_str(","); @@ -1531,7 +1531,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let actual_type_name = compute_debuginfo_type_name(cx, actual_type, true); - name_to_append_suffix_to.push_str(actual_type_name[]); + name_to_append_suffix_to.push_str(actual_type_name.index(&FullRange)); if index != generics.ty_params.len() - 1 { name_to_append_suffix_to.push_str(","); @@ -1558,7 +1558,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, name_to_append_suffix_to.push('>'); - return create_DIArray(DIB(cx), template_params[]); + return create_DIArray(DIB(cx), template_params.index(&FullRange)); } } @@ -1652,7 +1652,7 @@ fn declare_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let cx: &CrateContext = bcx.ccx(); let filename = span_start(cx, span).file.name.clone(); - let file_metadata = file_metadata(cx, filename[]); + let file_metadata = file_metadata(cx, filename.index(&FullRange)); let name = token::get_ident(variable_ident); let loc = span_start(cx, span); @@ -1738,7 +1738,7 @@ fn file_metadata(cx: &CrateContext, full_path: &str) -> DIFile { let work_dir = cx.sess().working_dir.as_str().unwrap(); let file_name = if full_path.starts_with(work_dir) { - full_path[work_dir.len() + 1u..full_path.len()] + full_path.index(&((work_dir.len() + 1u)..full_path.len())) } else { full_path }; @@ -1770,7 +1770,7 @@ fn scope_metadata(fcx: &FunctionContext, fcx.ccx.sess().span_bug(error_reporting_span, format!("debuginfo: Could not find scope info for node {}", - node)[]); + node).index(&FullRange)); } } } @@ -1966,7 +1966,7 @@ impl<'tcx> RecursiveTypeDescription<'tcx> { cx.sess().bug(format!("Forward declaration of potentially recursive type \ '{}' was not found in TypeMap!", ppaux::ty_to_string(cx.tcx(), unfinished_type)) - []); + .index(&FullRange)); } } @@ -1978,7 +1978,7 @@ impl<'tcx> RecursiveTypeDescription<'tcx> { set_members_of_composite_type(cx, metadata_stub, llvm_type, - member_descriptions[]); + member_descriptions.index(&FullRange)); return MetadataCreationResult::new(metadata_stub, true); } } @@ -2050,7 +2050,7 @@ fn prepare_struct_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let struct_metadata_stub = create_struct_stub(cx, struct_llvm_type, - struct_name[], + struct_name.index(&FullRange), unique_type_id, containing_scope); @@ -2111,7 +2111,7 @@ fn prepare_tuple_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, unique_type_id, create_struct_stub(cx, tuple_llvm_type, - tuple_name[], + tuple_name.index(&FullRange), unique_type_id, UNKNOWN_SCOPE_METADATA), tuple_llvm_type, @@ -2171,7 +2171,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> { set_members_of_composite_type(cx, variant_type_metadata, variant_llvm_type, - member_descriptions[]); + member_descriptions.index(&FullRange)); MemberDescription { name: "".to_string(), llvm_type: variant_llvm_type, @@ -2204,7 +2204,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> { set_members_of_composite_type(cx, variant_type_metadata, variant_llvm_type, - member_descriptions[]); + member_descriptions.index(&FullRange)); vec![ MemberDescription { name: "".to_string(), @@ -2304,7 +2304,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> { set_members_of_composite_type(cx, variant_type_metadata, variant_llvm_type, - variant_member_descriptions[]); + variant_member_descriptions.index(&FullRange)); // Encode the information about the null variant in the union // member's name. @@ -2383,7 +2383,7 @@ fn describe_enum_variant<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, .iter() .map(|&t| type_of::type_of(cx, t)) .collect::>() - [], + .index(&FullRange), struct_def.packed); // Could do some consistency checks here: size, align, field count, discr type @@ -2450,7 +2450,7 @@ fn prepare_enum_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let (containing_scope, definition_span) = get_namespace_and_span_for_item(cx, enum_def_id); let loc = span_start(cx, definition_span); - let file_metadata = file_metadata(cx, loc.file.name[]); + let file_metadata = file_metadata(cx, loc.file.name.index(&FullRange)); let variants = ty::enum_variants(cx.tcx(), enum_def_id); @@ -2637,7 +2637,7 @@ fn set_members_of_composite_type(cx: &CrateContext, Please use a rustc built with anewer \ version of LLVM.", llvm_version_major, - llvm_version_minor)[]); + llvm_version_minor).index(&FullRange)); } else { cx.sess().bug("debuginfo::set_members_of_composite_type() - \ Already completed forward declaration re-encountered."); @@ -2675,7 +2675,7 @@ fn set_members_of_composite_type(cx: &CrateContext, .collect(); unsafe { - let type_array = create_DIArray(DIB(cx), member_metadata[]); + let type_array = create_DIArray(DIB(cx), member_metadata.index(&FullRange)); llvm::LLVMDICompositeTypeSetTypeArray(composite_type_metadata, type_array); } } @@ -2774,7 +2774,7 @@ fn vec_slice_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let member_llvm_types = slice_llvm_type.field_types(); assert!(slice_layout_is_correct(cx, - member_llvm_types[], + member_llvm_types.index(&FullRange), element_type)); let member_descriptions = [ MemberDescription { @@ -2796,11 +2796,11 @@ fn vec_slice_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, assert!(member_descriptions.len() == member_llvm_types.len()); let loc = span_start(cx, span); - let file_metadata = file_metadata(cx, loc.file.name[]); + let file_metadata = file_metadata(cx, loc.file.name.index(&FullRange)); let metadata = composite_type_metadata(cx, slice_llvm_type, - slice_type_name[], + slice_type_name.index(&FullRange), unique_type_id, &member_descriptions, UNKNOWN_SCOPE_METADATA, @@ -2846,7 +2846,7 @@ fn subroutine_type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, llvm::LLVMDIBuilderCreateSubroutineType( DIB(cx), UNKNOWN_FILE_METADATA, - create_DIArray(DIB(cx), signature_metadata[])) + create_DIArray(DIB(cx), signature_metadata.index(&FullRange))) }, false); } @@ -2872,7 +2872,7 @@ fn trait_pointer_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let pp_type_name = ppaux::ty_to_string(cx.tcx(), trait_type); cx.sess().bug(format!("debuginfo: Unexpected trait-object type in \ trait_pointer_metadata(): {}", - pp_type_name[])[]); + pp_type_name.index(&FullRange)).index(&FullRange)); } }; @@ -2886,7 +2886,7 @@ fn trait_pointer_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, composite_type_metadata(cx, trait_llvm_type, - trait_type_name[], + trait_type_name.index(&FullRange), unique_type_id, &[], containing_scope, @@ -3006,13 +3006,13 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, ty::ty_tup(ref elements) => { prepare_tuple_metadata(cx, t, - elements[], + elements.index(&FullRange), unique_type_id, usage_site_span).finalize(cx) } _ => { cx.sess().bug(format!("debuginfo: unexpected type in type_metadata: {}", - sty)[]) + sty).index(&FullRange)) } }; @@ -3030,9 +3030,9 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, type id '{}' to already be in \ the debuginfo::TypeMap but it \ was not. (Ty = {})", - unique_type_id_str[], + unique_type_id_str.index(&FullRange), ppaux::ty_to_string(cx.tcx(), t)); - cx.sess().span_bug(usage_site_span, error_message[]); + cx.sess().span_bug(usage_site_span, error_message.index(&FullRange)); } }; @@ -3045,9 +3045,9 @@ fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, UniqueTypeId maps in \ debuginfo::TypeMap. \ UniqueTypeId={}, Ty={}", - unique_type_id_str[], + unique_type_id_str.index(&FullRange), ppaux::ty_to_string(cx.tcx(), t)); - cx.sess().span_bug(usage_site_span, error_message[]); + cx.sess().span_bug(usage_site_span, error_message.index(&FullRange)); } } None => { @@ -3253,7 +3253,7 @@ fn create_scope_map(cx: &CrateContext, { // Create a new lexical scope and push it onto the stack let loc = cx.sess().codemap().lookup_char_pos(scope_span.lo); - let file_metadata = file_metadata(cx, loc.file.name[]); + let file_metadata = file_metadata(cx, loc.file.name.index(&FullRange)); let parent_scope = scope_stack.last().unwrap().scope_metadata; let scope_metadata = unsafe { @@ -3375,7 +3375,7 @@ fn create_scope_map(cx: &CrateContext, if need_new_scope { // Create a new lexical scope and push it onto the stack let loc = cx.sess().codemap().lookup_char_pos(pat.span.lo); - let file_metadata = file_metadata(cx, loc.file.name[]); + let file_metadata = file_metadata(cx, loc.file.name.index(&FullRange)); let parent_scope = scope_stack.last().unwrap().scope_metadata; let scope_metadata = unsafe { @@ -3849,7 +3849,7 @@ fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, ty::ty_projection(..) | ty::ty_param(_) => { cx.sess().bug(format!("debuginfo: Trying to create type name for \ - unexpected type: {}", ppaux::ty_to_string(cx.tcx(), t))[]); + unexpected type: {}", ppaux::ty_to_string(cx.tcx(), t)).index(&FullRange)); } } @@ -3932,13 +3932,13 @@ impl NamespaceTreeNode { None => {} } let string = token::get_name(node.name); - output.push_str(format!("{}", string.get().len())[]); + output.push_str(format!("{}", string.get().len()).index(&FullRange)); output.push_str(string.get()); } let mut name = String::from_str("_ZN"); fill_nested(self, &mut name); - name.push_str(format!("{}", item_name.len())[]); + name.push_str(format!("{}", item_name.len()).index(&FullRange)); name.push_str(item_name); name.push('E'); name @@ -3946,7 +3946,7 @@ impl NamespaceTreeNode { } fn crate_root_namespace<'a>(cx: &'a CrateContext) -> &'a str { - cx.link_meta().crate_name[] + cx.link_meta().crate_name.index(&FullRange) } fn namespace_for_item(cx: &CrateContext, def_id: ast::DefId) -> Rc { @@ -4023,7 +4023,7 @@ fn namespace_for_item(cx: &CrateContext, def_id: ast::DefId) -> Rc { cx.sess().bug(format!("debuginfo::namespace_for_item(): \ path too short for {}", - def_id)[]); + def_id).index(&FullRange)); } } }) diff --git a/src/librustc_trans/trans/expr.rs b/src/librustc_trans/trans/expr.rs index 9221ae09df98a..da19fc52c8c36 100644 --- a/src/librustc_trans/trans/expr.rs +++ b/src/librustc_trans/trans/expr.rs @@ -318,7 +318,7 @@ fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, unsized_info(bcx, k, id, ty_substs[tp_index], identity) } _ => bcx.sess().bug(format!("UnsizeStruct with bad sty: {}", - bcx.ty_to_string(unadjusted_ty))[]) + bcx.ty_to_string(unadjusted_ty)).index(&FullRange)) }, &ty::UnsizeVtable(ty::TyTrait { ref principal, .. }, _) => { // Note that we preserve binding levels here: @@ -451,7 +451,7 @@ fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let unboxed_ty = match datum_ty.sty { ty::ty_uniq(t) => t, _ => bcx.sess().bug(format!("Expected ty_uniq, found {}", - bcx.ty_to_string(datum_ty))[]) + bcx.ty_to_string(datum_ty)).index(&FullRange)) }; let result_ty = ty::mk_uniq(tcx, ty::unsize_ty(tcx, unboxed_ty, k, expr.span)); @@ -573,40 +573,7 @@ fn trans_datum_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, trans_rec_tup_field(bcx, &**base, idx.node) } ast::ExprIndex(ref base, ref idx) => { - match idx.node { - ast::ExprRange(ref start, ref end) => { - // Special case for slicing syntax (KILLME). - let _icx = push_ctxt("trans_slice"); - let ccx = bcx.ccx(); - - let method_call = MethodCall::expr(expr.id); - let method_ty = ccx.tcx() - .method_map - .borrow() - .get(&method_call) - .map(|method| method.ty); - let base_datum = unpack_datum!(bcx, trans(bcx, &**base)); - - let mut args = vec![]; - start.as_ref().map(|e| args.push((unpack_datum!(bcx, trans(bcx, &**e)), e.id))); - end.as_ref().map(|e| args.push((unpack_datum!(bcx, trans(bcx, &**e)), e.id))); - - let result_ty = ty::ty_fn_ret(monomorphize_type(bcx, - method_ty.unwrap())).unwrap(); - let scratch = rvalue_scratch_datum(bcx, result_ty, "trans_slice"); - - unpack_result!(bcx, - trans_overloaded_op(bcx, - expr, - method_call, - base_datum, - args, - Some(SaveIn(scratch.val)), - true)); - DatumBlock::new(bcx, scratch.to_expr_datum()) - } - _ => trans_index(bcx, expr, &**base, &**idx, MethodCall::expr(expr.id)) - } + trans_index(bcx, expr, &**base, &**idx, MethodCall::expr(expr.id)) } ast::ExprBox(_, ref contents) => { // Special case for `Box` @@ -657,7 +624,7 @@ fn trans_datum_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr.span, format!("trans_rvalue_datum_unadjusted reached \ fall-through case: {}", - expr.node)[]); + expr.node).index(&FullRange)); } } } @@ -1006,7 +973,7 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr.span, format!("trans_rvalue_stmt_unadjusted reached \ fall-through case: {}", - expr.node)[]); + expr.node).index(&FullRange)); } } } @@ -1032,14 +999,14 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, controlflow::trans_if(bcx, expr.id, &**cond, &**thn, els.as_ref().map(|e| &**e), dest) } ast::ExprMatch(ref discr, ref arms, _) => { - _match::trans_match(bcx, expr, &**discr, arms[], dest) + _match::trans_match(bcx, expr, &**discr, arms.index(&FullRange), dest) } ast::ExprBlock(ref blk) => { controlflow::trans_block(bcx, &**blk, dest) } ast::ExprStruct(_, ref fields, ref base) => { trans_struct(bcx, - fields[], + fields.index(&FullRange), base.as_ref().map(|e| &**e), expr.span, expr.id, @@ -1104,7 +1071,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, trans_adt(bcx, expr_ty(bcx, expr), 0, - numbered_fields[], + numbered_fields.index(&FullRange), None, dest, Some(NodeInfo { id: expr.id, span: expr.span })) @@ -1148,13 +1115,13 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, trans_overloaded_call(bcx, expr, &**f, - args[], + args.index(&FullRange), Some(dest)) } else { callee::trans_call(bcx, expr, &**f, - callee::ArgExprs(args[]), + callee::ArgExprs(args.index(&FullRange)), dest) } } @@ -1162,7 +1129,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, callee::trans_method_call(bcx, expr, &*args[0], - callee::ArgExprs(args[]), + callee::ArgExprs(args.index(&FullRange)), dest) } ast::ExprBinary(op, ref lhs, ref rhs) => { @@ -1211,7 +1178,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr.span, format!("trans_rvalue_dps_unadjusted reached fall-through \ case: {}", - expr.node)[]); + expr.node).index(&FullRange)); } } } @@ -1261,7 +1228,7 @@ fn trans_def_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, _ => { bcx.tcx().sess.span_bug(ref_expr.span, format!( "Non-DPS def {} referened by {}", - def, bcx.node_id_to_string(ref_expr.id))[]); + def, bcx.node_id_to_string(ref_expr.id)).index(&FullRange)); } } } @@ -1290,7 +1257,7 @@ pub fn trans_def_fn_unadjusted<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ccx.tcx().sess.span_bug(ref_expr.span, format!( "trans_def_fn_unadjusted invoked on: {} for {}", def, - ref_expr.repr(ccx.tcx()))[]); + ref_expr.repr(ccx.tcx())).index(&FullRange)); } } } @@ -1310,7 +1277,7 @@ pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, None => { bcx.sess().bug(format!( "trans_local_var: no llval for upvar {} found", - nid)[]); + nid).index(&FullRange)); } } } @@ -1320,7 +1287,7 @@ pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, None => { bcx.sess().bug(format!( "trans_local_var: no datum for local/arg {} found", - nid)[]); + nid).index(&FullRange)); } }; debug!("take_local(nid={}, v={}, ty={})", @@ -1330,7 +1297,7 @@ pub fn trans_local_var<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, _ => { bcx.sess().unimpl(format!( "unsupported def type in trans_local_var: {}", - def)[]); + def).index(&FullRange)); } } } @@ -1347,11 +1314,11 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>, { match ty.sty { ty::ty_struct(did, substs) => { - op(0, struct_fields(tcx, did, substs)[]) + op(0, struct_fields(tcx, did, substs).index(&FullRange)) } ty::ty_tup(ref v) => { - op(0, tup_fields(v[])[]) + op(0, tup_fields(v.index(&FullRange)).index(&FullRange)) } ty::ty_enum(_, substs) => { @@ -1361,7 +1328,7 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>, tcx.sess.bug(format!( "cannot get field types from the enum type {} \ without a node ID", - ty.repr(tcx))[]); + ty.repr(tcx)).index(&FullRange)); } Some(node_id) => { let def = tcx.def_map.borrow()[node_id].clone(); @@ -1372,7 +1339,7 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>, op(variant_info.disr_val, struct_fields(tcx, variant_id, - substs)[]) + substs).index(&FullRange)) } _ => { tcx.sess.bug("resolve didn't map this expr to a \ @@ -1386,7 +1353,7 @@ pub fn with_field_tys<'tcx, R, F>(tcx: &ty::ctxt<'tcx>, _ => { tcx.sess.bug(format!( "cannot get field types from the type {}", - ty.repr(tcx))[]); + ty.repr(tcx)).index(&FullRange)); } } } @@ -1402,13 +1369,13 @@ fn trans_struct<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let tcx = bcx.tcx(); with_field_tys(tcx, ty, Some(expr_id), |discr, field_tys| { - let mut need_base: Vec<_> = repeat(true).take(field_tys.len()).collect(); + let mut need_base: Vec = repeat(true).take(field_tys.len()).collect(); let numbered_fields = fields.iter().map(|field| { let opt_pos = field_tys.iter().position(|field_ty| field_ty.name == field.ident.node.name); - match opt_pos { + let result = match opt_pos { Some(i) => { need_base[i] = false; (i, &*field.expr) @@ -1417,14 +1384,15 @@ fn trans_struct<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, tcx.sess.span_bug(field.span, "Couldn't find field in struct type") } - } + }; + result }).collect::>(); let optbase = match base { Some(base_expr) => { let mut leftovers = Vec::new(); for (i, b) in need_base.iter().enumerate() { if *b { - leftovers.push((i, field_tys[i].mt.ty)) + leftovers.push((i, field_tys[i].mt.ty)); } } Some(StructBaseInfo {expr: base_expr, @@ -1441,7 +1409,7 @@ fn trans_struct<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, trans_adt(bcx, ty, discr, - numbered_fields[], + numbered_fields.as_slice(), optbase, dest, Some(NodeInfo { id: expr_id, span: expr_span })) @@ -2078,7 +2046,7 @@ fn trans_imm_cast<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t_in.repr(bcx.tcx()), k_in, t_out.repr(bcx.tcx()), - k_out)[]) + k_out).index(&FullRange)) } } } @@ -2087,7 +2055,7 @@ fn trans_imm_cast<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t_in.repr(bcx.tcx()), k_in, t_out.repr(bcx.tcx()), - k_out)[]) + k_out).index(&FullRange)) }; return immediate_rvalue_bcx(bcx, newval, t_out).to_expr_datumblock(); } @@ -2249,7 +2217,7 @@ fn deref_once<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, bcx.tcx().sess.span_bug( expr.span, format!("deref invoked on expr of illegal type {}", - datum.ty.repr(bcx.tcx()))[]); + datum.ty.repr(bcx.tcx())).index(&FullRange)); } }; diff --git a/src/librustc_trans/trans/foreign.rs b/src/librustc_trans/trans/foreign.rs index 1c9be6ae4a8ba..c8112e03707f2 100644 --- a/src/librustc_trans/trans/foreign.rs +++ b/src/librustc_trans/trans/foreign.rs @@ -109,7 +109,7 @@ pub fn register_static(ccx: &CrateContext, let llty = type_of::type_of(ccx, ty); let ident = link_name(foreign_item); - match attr::first_attr_value_str_by_name(foreign_item.attrs[], + match attr::first_attr_value_str_by_name(foreign_item.attrs.index(&FullRange), "linkage") { // If this is a static with a linkage specified, then we need to handle // it a little specially. The typesystem prevents things like &T and @@ -233,13 +233,13 @@ pub fn trans_native_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ty::ty_bare_fn(_, ref fn_ty) => (fn_ty.abi, fn_ty.sig.clone()), _ => ccx.sess().bug("trans_native_call called on non-function type") }; - let llsig = foreign_signature(ccx, &fn_sig, passed_arg_tys[]); + let llsig = foreign_signature(ccx, &fn_sig, passed_arg_tys.index(&FullRange)); let fn_type = cabi::compute_abi_info(ccx, - llsig.llarg_tys[], + llsig.llarg_tys.index(&FullRange), llsig.llret_ty, llsig.ret_def); - let arg_tys: &[cabi::ArgType] = fn_type.arg_tys[]; + let arg_tys: &[cabi::ArgType] = fn_type.arg_tys.index(&FullRange); let mut llargs_foreign = Vec::new(); @@ -365,7 +365,7 @@ pub fn trans_native_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let llforeign_retval = CallWithConv(bcx, llfn, - llargs_foreign[], + llargs_foreign.index(&FullRange), cc, Some(attrs)); @@ -435,7 +435,7 @@ pub fn trans_foreign_mod(ccx: &CrateContext, foreign_mod: &ast::ForeignMod) { abi => { let ty = ty::node_id_to_type(ccx.tcx(), foreign_item.id); register_foreign_item_fn(ccx, abi, ty, - lname.get()[]); + lname.get().index(&FullRange)); // Unlike for other items, we shouldn't call // `base::update_linkage` here. Foreign items have // special linkage requirements, which are handled @@ -567,7 +567,7 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ccx.sess().bug(format!("build_rust_fn: extern fn {} has ty {}, \ expected a bare fn ty", ccx.tcx().map.path_to_string(id), - t.repr(tcx))[]); + t.repr(tcx)).index(&FullRange)); } }; @@ -575,7 +575,7 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ccx.tcx().map.path_to_string(id), id, t.repr(tcx)); - let llfn = base::decl_internal_rust_fn(ccx, t, ps[]); + let llfn = base::decl_internal_rust_fn(ccx, t, ps.index(&FullRange)); base::set_llvm_fn_attrs(ccx, attrs, llfn); base::trans_fn(ccx, decl, body, llfn, param_substs, id, &[]); llfn @@ -748,7 +748,7 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, debug!("calling llrustfn = {}, t = {}", ccx.tn().val_to_string(llrustfn), t.repr(ccx.tcx())); let attributes = base::get_fn_llvm_attributes(ccx, t); - let llrust_ret_val = builder.call(llrustfn, llrust_args[], Some(attributes)); + let llrust_ret_val = builder.call(llrustfn, llrust_args.as_slice(), Some(attributes)); // Get the return value where the foreign fn expects it. let llforeign_ret_ty = match tys.fn_ty.ret_ty.cast { @@ -815,9 +815,9 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, // the massive simplifications that have occurred. pub fn link_name(i: &ast::ForeignItem) -> InternedString { - match attr::first_attr_value_str_by_name(i.attrs[], "link_name") { + match attr::first_attr_value_str_by_name(i.attrs.index(&FullRange), "link_name") { Some(ln) => ln.clone(), - None => match weak_lang_items::link_name(i.attrs[]) { + None => match weak_lang_items::link_name(i.attrs.index(&FullRange)) { Some(name) => name, None => token::get_ident(i.ident), } @@ -858,7 +858,7 @@ fn foreign_types_for_fn_ty<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, }; let llsig = foreign_signature(ccx, &fn_sig, fn_sig.0.inputs.as_slice()); let fn_ty = cabi::compute_abi_info(ccx, - llsig.llarg_tys[], + llsig.llarg_tys.index(&FullRange), llsig.llret_ty, llsig.ret_def); debug!("foreign_types_for_fn_ty(\ @@ -867,9 +867,9 @@ fn foreign_types_for_fn_ty<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_ty={} -> {}, \ ret_def={}", ty.repr(ccx.tcx()), - ccx.tn().types_to_str(llsig.llarg_tys[]), + ccx.tn().types_to_str(llsig.llarg_tys.index(&FullRange)), ccx.tn().type_to_string(llsig.llret_ty), - ccx.tn().types_to_str(fn_ty.arg_tys.iter().map(|t| t.ty).collect::>()[]), + ccx.tn().types_to_str(fn_ty.arg_tys.iter().map(|t| t.ty).collect::>().as_slice()), ccx.tn().type_to_string(fn_ty.ret_ty.ty), llsig.ret_def); @@ -919,7 +919,7 @@ fn lltype_for_fn_from_foreign_types(ccx: &CrateContext, tys: &ForeignTypes) -> T if tys.fn_sig.0.variadic { Type::variadic_func(llargument_tys.as_slice(), &llreturn_ty) } else { - Type::func(llargument_tys[], &llreturn_ty) + Type::func(llargument_tys.index(&FullRange), &llreturn_ty) } } diff --git a/src/librustc_trans/trans/glue.rs b/src/librustc_trans/trans/glue.rs index 2fd9031fdfe4c..056842657ad29 100644 --- a/src/librustc_trans/trans/glue.rs +++ b/src/librustc_trans/trans/glue.rs @@ -161,7 +161,7 @@ pub fn get_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Val let (glue, new_sym) = match ccx.available_drop_glues().borrow().get(&t) { Some(old_sym) => { - let glue = decl_cdecl_fn(ccx, old_sym[], llfnty, ty::mk_nil(ccx.tcx())); + let glue = decl_cdecl_fn(ccx, old_sym.index(&FullRange), llfnty, ty::mk_nil(ccx.tcx())); (glue, None) }, None => { @@ -232,7 +232,7 @@ fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, f.sig.0.inputs[0] } _ => bcx.sess().bug(format!("Expected function type, found {}", - bcx.ty_to_string(fty))[]) + bcx.ty_to_string(fty)).index(&FullRange)) }; let (struct_data, info) = if type_is_sized(bcx.tcx(), t) { @@ -293,7 +293,7 @@ fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, class_did, &[get_drop_glue_type(bcx.ccx(), t)], ty::mk_nil(bcx.tcx())); - let (_, variant_cx) = invoke(variant_cx, dtor_addr, args[], dtor_ty, None); + let (_, variant_cx) = invoke(variant_cx, dtor_addr, args.index(&FullRange), dtor_ty, None); variant_cx.fcx.pop_and_trans_custom_cleanup_scope(variant_cx, field_scope); variant_cx @@ -352,7 +352,7 @@ fn size_and_align_of_dst<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, info: (Mul(bcx, info, C_uint(bcx.ccx(), unit_size)), C_uint(bcx.ccx(), 8u)) } _ => bcx.sess().bug(format!("Unexpected unsized type, found {}", - bcx.ty_to_string(t))[]) + bcx.ty_to_string(t)).index(&FullRange)) } } @@ -424,7 +424,7 @@ fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, t: Ty<'tcx>) bcx.sess().warn(format!("Ignoring drop flag in destructor for {}\ because the struct is unsized. See issue\ #16758", - bcx.ty_to_string(t))[]); + bcx.ty_to_string(t)).index(&FullRange)); trans_struct_drop(bcx, t, v0, dtor, did, substs) } } @@ -494,7 +494,7 @@ pub fn declare_tydesc<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) note_unique_llvm_symbol(ccx, name); let ty_name = token::intern_and_get_ident( - ppaux::ty_to_string(ccx.tcx(), t)[]); + ppaux::ty_to_string(ccx.tcx(), t).index(&FullRange)); let ty_name = C_str_slice(ccx, ty_name); debug!("--- declare_tydesc {}", ppaux::ty_to_string(ccx.tcx(), t)); @@ -513,8 +513,8 @@ fn declare_generic_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>, let fn_nm = mangle_internal_name_by_type_and_seq( ccx, t, - format!("glue_{}", name)[]); - let llfn = decl_cdecl_fn(ccx, fn_nm[], llfnty, ty::mk_nil(ccx.tcx())); + format!("glue_{}", name).index(&FullRange)); + let llfn = decl_cdecl_fn(ccx, fn_nm.index(&FullRange), llfnty, ty::mk_nil(ccx.tcx())); note_unique_llvm_symbol(ccx, fn_nm.clone()); return (fn_nm, llfn); } diff --git a/src/librustc_trans/trans/meth.rs b/src/librustc_trans/trans/meth.rs index f6d69959dadf9..53ce8dd64b87d 100644 --- a/src/librustc_trans/trans/meth.rs +++ b/src/librustc_trans/trans/meth.rs @@ -77,7 +77,7 @@ pub fn trans_impl(ccx: &CrateContext, match *impl_item { ast::MethodImplItem(ref method) => { if method.pe_generics().ty_params.len() == 0u { - let trans_everywhere = attr::requests_inline(method.attrs[]); + let trans_everywhere = attr::requests_inline(method.attrs.index(&FullRange)); for (ref ccx, is_origin) in ccx.maybe_iter(trans_everywhere) { let llfn = get_item_val(ccx, method.id); trans_fn(ccx, @@ -229,7 +229,7 @@ pub fn trans_static_method_callee<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, // Here, in this call, which I've written with explicit UFCS // notation, the set of type parameters will be: // - // rcvr_type: [] <-- nothing declared on the trait itself + // rcvr_type: .index(&FullRange) <-- nothing declared on the trait itself // rcvr_self: [Vec] <-- the self type // rcvr_method: [String] <-- method type parameter // @@ -268,11 +268,11 @@ pub fn trans_static_method_callee<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, // // Recall that we matched ` as Convert>`. Trait // resolution will have given us a substitution - // containing `impl_substs=[[T=int],[],[]]` (the type + // containing `impl_substs=[[T=int],.index(&FullRange),.index(&FullRange)]` (the type // parameters defined on the impl). We combine // that with the `rcvr_method` from before, which tells us // the type parameters from the *method*, to yield - // `callee_substs=[[T=int],[],[U=String]]`. + // `callee_substs=[[T=int],.index(&FullRange),[U=String]]`. let subst::SeparateVecsPerParamSpace { types: impl_type, selfs: impl_self, @@ -290,7 +290,7 @@ pub fn trans_static_method_callee<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, } _ => { tcx.sess.bug(format!("static call to invalid vtable: {}", - vtbl.repr(tcx))[]); + vtbl.repr(tcx)).index(&FullRange)); } } } @@ -378,7 +378,7 @@ fn trans_monomorphized_callee<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, traits::VtableParam(..) => { bcx.sess().bug( format!("resolved vtable bad vtable {} in trans", - vtable.repr(bcx.tcx()))[]); + vtable.repr(bcx.tcx())).index(&FullRange)); } } } @@ -728,7 +728,7 @@ pub fn get_vtable<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, bcx.sess().bug( format!("resolved vtable for {} to bad vtable {} in trans", trait_ref.repr(bcx.tcx()), - vtable.repr(bcx.tcx()))[]); + vtable.repr(bcx.tcx())).index(&FullRange)); } } }); @@ -760,7 +760,7 @@ pub fn make_vtable>(ccx: &CrateContext, let components: Vec<_> = head.into_iter().chain(ptrs).collect(); unsafe { - let tbl = C_struct(ccx, components[], false); + let tbl = C_struct(ccx, components.index(&FullRange), false); let sym = token::gensym("vtable"); let buf = CString::from_vec(format!("vtable{}", sym.uint()).into_bytes()); let vt_gvar = llvm::LLVMAddGlobal(ccx.llmod(), val_ty(tbl).to_ref(), diff --git a/src/librustc_trans/trans/monomorphize.rs b/src/librustc_trans/trans/monomorphize.rs index e6db462a342da..df67866e5b025 100644 --- a/src/librustc_trans/trans/monomorphize.rs +++ b/src/librustc_trans/trans/monomorphize.rs @@ -131,7 +131,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, hash = format!("h{}", state.result()); ccx.tcx().map.with_path(fn_id.node, |path| { - exported_name(path, hash[]) + exported_name(path, hash.index(&FullRange)) }) }; @@ -141,9 +141,9 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let mut hash_id = Some(hash_id); let mut mk_lldecl = |&mut : abi: abi::Abi| { let lldecl = if abi != abi::Rust { - foreign::decl_rust_fn_with_foreign_abi(ccx, mono_ty, s[]) + foreign::decl_rust_fn_with_foreign_abi(ccx, mono_ty, s.index(&FullRange)) } else { - decl_internal_rust_fn(ccx, mono_ty, s[]) + decl_internal_rust_fn(ccx, mono_ty, s.index(&FullRange)) }; ccx.monomorphized().borrow_mut().insert(hash_id.take().unwrap(), lldecl); @@ -177,12 +177,12 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, .. } => { let d = mk_lldecl(abi); - let needs_body = setup_lldecl(d, i.attrs[]); + let needs_body = setup_lldecl(d, i.attrs.index(&FullRange)); if needs_body { if abi != abi::Rust { foreign::trans_rust_fn_with_foreign_abi( ccx, &**decl, &**body, &[], d, psubsts, fn_id.node, - Some(hash[])); + Some(hash.index(&FullRange))); } else { trans_fn(ccx, &**decl, &**body, d, psubsts, fn_id.node, &[]); } @@ -206,7 +206,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, trans_enum_variant(ccx, parent, &*v, - args[], + args.index(&FullRange), this_tv.disr_val, psubsts, d); @@ -220,7 +220,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, match *ii { ast::MethodImplItem(ref mth) => { let d = mk_lldecl(abi::Rust); - let needs_body = setup_lldecl(d, mth.attrs[]); + let needs_body = setup_lldecl(d, mth.attrs.index(&FullRange)); if needs_body { trans_fn(ccx, mth.pe_fn_decl(), @@ -241,7 +241,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, match *method { ast::ProvidedMethod(ref mth) => { let d = mk_lldecl(abi::Rust); - let needs_body = setup_lldecl(d, mth.attrs[]); + let needs_body = setup_lldecl(d, mth.attrs.index(&FullRange)); if needs_body { trans_fn(ccx, mth.pe_fn_decl(), mth.pe_body(), d, psubsts, mth.id, &[]); @@ -250,7 +250,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, } _ => { ccx.sess().bug(format!("can't monomorphize a {}", - map_node)[]) + map_node).index(&FullRange)) } } } @@ -258,7 +258,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let d = mk_lldecl(abi::Rust); set_inline_hint(d); base::trans_tuple_struct(ccx, - struct_def.fields[], + struct_def.fields.index(&FullRange), struct_def.ctor_id.expect("ast-mapped tuple struct \ didn't have a ctor id"), psubsts, @@ -276,7 +276,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ast_map::NodePat(..) | ast_map::NodeLocal(..) => { ccx.sess().bug(format!("can't monomorphize a {}", - map_node)[]) + map_node).index(&FullRange)) } }; diff --git a/src/librustc_trans/trans/type_.rs b/src/librustc_trans/trans/type_.rs index 3785c2fb9bc54..a4f51bc50e567 100644 --- a/src/librustc_trans/trans/type_.rs +++ b/src/librustc_trans/trans/type_.rs @@ -103,7 +103,7 @@ impl Type { } pub fn int(ccx: &CrateContext) -> Type { - match ccx.tcx().sess.target.target.target_word_size[] { + match ccx.tcx().sess.target.target.target_word_size.index(&FullRange) { "32" => Type::i32(ccx), "64" => Type::i64(ccx), tws => panic!("Unsupported target word size for int: {}", tws), diff --git a/src/librustc_trans/trans/type_of.rs b/src/librustc_trans/trans/type_of.rs index 3e499ea8498fb..c6c2749f37834 100644 --- a/src/librustc_trans/trans/type_of.rs +++ b/src/librustc_trans/trans/type_of.rs @@ -137,7 +137,7 @@ pub fn type_of_rust_fn<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let input_tys = inputs.iter().map(|&arg_ty| type_of_explicit_arg(cx, arg_ty)); atys.extend(input_tys); - Type::func(atys[], &lloutputtype) + Type::func(atys.index(&FullRange), &lloutputtype) } // Given a function type and a count of ty params, construct an llvm type @@ -182,7 +182,7 @@ pub fn sizing_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Typ let llsizingty = match t.sty { _ if !lltype_is_sized(cx.tcx(), t) => { cx.sess().bug(format!("trying to take the sizing type of {}, an unsized type", - ppaux::ty_to_string(cx.tcx(), t))[]) + ppaux::ty_to_string(cx.tcx(), t)).index(&FullRange)) } ty::ty_bool => Type::bool(cx), @@ -235,7 +235,7 @@ pub fn sizing_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Typ ty::ty_projection(..) | ty::ty_infer(..) | ty::ty_param(..) | ty::ty_err(..) => { cx.sess().bug(format!("fictitious type {} in sizing_type_of()", - ppaux::ty_to_string(cx.tcx(), t))[]) + ppaux::ty_to_string(cx.tcx(), t)).index(&FullRange)) } ty::ty_vec(_, None) | ty::ty_trait(..) | ty::ty_str => panic!("unreachable") }; @@ -312,7 +312,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type { let repr = adt::represent_type(cx, t); let tps = substs.types.get_slice(subst::TypeSpace); let name = llvm_type_name(cx, an_enum, did, tps); - adt::incomplete_type_of(cx, &*repr, name[]) + adt::incomplete_type_of(cx, &*repr, name.index(&FullRange)) } ty::ty_unboxed_closure(did, _, ref substs) => { // Only create the named struct, but don't fill it in. We @@ -323,7 +323,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type { // contents of the VecPerParamSpace to to construct the llvm // name let name = llvm_type_name(cx, an_unboxed_closure, did, substs.types.as_slice()); - adt::incomplete_type_of(cx, &*repr, name[]) + adt::incomplete_type_of(cx, &*repr, name.index(&FullRange)) } ty::ty_uniq(ty) | ty::ty_rptr(_, ty::mt{ty, ..}) | ty::ty_ptr(ty::mt{ty, ..}) => { @@ -379,7 +379,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type { let repr = adt::represent_type(cx, t); let tps = substs.types.get_slice(subst::TypeSpace); let name = llvm_type_name(cx, a_struct, did, tps); - adt::incomplete_type_of(cx, &*repr, name[]) + adt::incomplete_type_of(cx, &*repr, name.index(&FullRange)) } } @@ -398,7 +398,7 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type { } ty::ty_trait(..) => Type::opaque_trait(cx), _ => cx.sess().bug(format!("ty_open with sized type: {}", - ppaux::ty_to_string(cx.tcx(), t))[]) + ppaux::ty_to_string(cx.tcx(), t)).index(&FullRange)) }, ty::ty_infer(..) => cx.sess().bug("type_of with ty_infer"), diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs index 183b8c5c830ef..a2b1c32f0a502 100644 --- a/src/librustc_typeck/astconv.rs +++ b/src/librustc_typeck/astconv.rs @@ -197,7 +197,7 @@ pub fn opt_ast_region_to_region<'tcx>( help_name } else { format!("one of {}'s {} elided lifetimes", help_name, n) - }[]); + }.index(&FullRange)); if len == 2 && i == 0 { m.push_str(" or "); @@ -347,7 +347,7 @@ fn create_substs_for_ast_path<'tcx>( format!("wrong number of type arguments: {} {}, found {}", expected, required_ty_param_count, - supplied_ty_param_count)[]); + supplied_ty_param_count).index(&FullRange)); } else if supplied_ty_param_count > formal_ty_param_count { let expected = if required_ty_param_count < formal_ty_param_count { "expected at most" @@ -358,7 +358,7 @@ fn create_substs_for_ast_path<'tcx>( format!("wrong number of type arguments: {} {}, found {}", expected, formal_ty_param_count, - supplied_ty_param_count)[]); + supplied_ty_param_count).index(&FullRange)); } let mut substs = Substs::new_type(types, regions); @@ -377,7 +377,7 @@ fn create_substs_for_ast_path<'tcx>( } } - for param in ty_param_defs[supplied_ty_param_count..].iter() { + for param in ty_param_defs.index(&(supplied_ty_param_count..)).iter() { match param.default { Some(default) => { // This is a default type parameter. @@ -556,7 +556,8 @@ pub fn instantiate_trait_ref<'tcx>( _ => { this.tcx().sess.span_fatal( ast_trait_ref.path.span, - format!("`{}` is not a trait", ast_trait_ref.path.user_string(this.tcx()))[]); + format!("`{}` is not a trait", + ast_trait_ref.path.user_string(this.tcx())).index(&FullRange)); } } } @@ -825,7 +826,7 @@ pub fn ast_ty_to_builtin_ty<'tcx>( .sess .span_bug(ast_ty.span, format!("unbound path {}", - path.repr(this.tcx()))[]) + path.repr(this.tcx())).index(&FullRange)) } Some(&d) => d }; @@ -847,7 +848,7 @@ pub fn ast_ty_to_builtin_ty<'tcx>( this.tcx().sess.span_bug( path.span, format!("converting `Box` to `{}`", - ty.repr(this.tcx()))[]); + ty.repr(this.tcx())).index(&FullRange)); } } } @@ -1067,10 +1068,14 @@ pub fn ast_ty_to_ty<'tcx>( ty::mk_vec(tcx, ast_ty_to_ty(this, rscope, &**ty), None) } ast::TyObjectSum(ref ty, ref bounds) => { - match ast_ty_to_trait_ref(this, rscope, &**ty, bounds[]) { + match ast_ty_to_trait_ref(this, rscope, &**ty, bounds.index(&FullRange)) { Ok((trait_ref, projection_bounds)) => { - trait_ref_to_object_type(this, rscope, ast_ty.span, - trait_ref, projection_bounds, bounds[]) + trait_ref_to_object_type(this, + rscope, + ast_ty.span, + trait_ref, + projection_bounds, + bounds.index(&FullRange)) } Err(ErrorReported) => { this.tcx().types.err @@ -1105,7 +1110,7 @@ pub fn ast_ty_to_ty<'tcx>( ty::mk_bare_fn(tcx, None, tcx.mk_bare_fn(bare_fn)) } ast::TyPolyTraitRef(ref bounds) => { - conv_ty_poly_trait_ref(this, rscope, ast_ty.span, bounds[]) + conv_ty_poly_trait_ref(this, rscope, ast_ty.span, bounds.index(&FullRange)) } ast::TyPath(ref path, id) => { let a_def = match tcx.def_map.borrow().get(&id) { @@ -1113,7 +1118,7 @@ pub fn ast_ty_to_ty<'tcx>( tcx.sess .span_bug(ast_ty.span, format!("unbound path {}", - path.repr(tcx))[]) + path.repr(tcx)).index(&FullRange)) } Some(&d) => d }; @@ -1152,7 +1157,7 @@ pub fn ast_ty_to_ty<'tcx>( def::DefMod(id) => { tcx.sess.span_fatal(ast_ty.span, format!("found module name used as a type: {}", - tcx.map.node_to_string(id.node))[]); + tcx.map.node_to_string(id.node)).index(&FullRange)); } def::DefPrimTy(_) => { panic!("DefPrimTy arm missed in previous ast_ty_to_prim_ty call"); @@ -1171,7 +1176,7 @@ pub fn ast_ty_to_ty<'tcx>( .last() .unwrap() .identifier) - .get())[]); + .get()).index(&FullRange)); this.tcx().types.err } def::DefAssociatedPath(provenance, assoc_ident) => { @@ -1181,7 +1186,7 @@ pub fn ast_ty_to_ty<'tcx>( tcx.sess.span_fatal(ast_ty.span, format!("found value name used \ as a type: {}", - a_def)[]); + a_def).index(&FullRange)); } } } @@ -1209,7 +1214,7 @@ pub fn ast_ty_to_ty<'tcx>( ast_ty.span, format!("expected constant expr for array \ length: {}", - *r)[]); + *r).index(&FullRange)); } } } @@ -1331,7 +1336,7 @@ fn ty_of_method_or_bare_fn<'a, 'tcx>(this: &AstConv<'tcx>, let input_params = if self_ty.is_some() { decl.inputs.slice_from(1) } else { - decl.inputs[] + decl.inputs.index(&FullRange) }; let input_tys = input_params.iter().map(|a| ty_of_arg(this, &rb, a, None)); let input_pats: Vec = input_params.iter() @@ -1546,7 +1551,7 @@ fn conv_ty_poly_trait_ref<'tcx>( ast_bounds: &[ast::TyParamBound]) -> Ty<'tcx> { - let mut partitioned_bounds = partition_bounds(this.tcx(), span, ast_bounds[]); + let mut partitioned_bounds = partition_bounds(this.tcx(), span, ast_bounds.index(&FullRange)); let mut projection_bounds = Vec::new(); let main_trait_bound = if !partitioned_bounds.trait_bounds.is_empty() { @@ -1596,7 +1601,7 @@ pub fn conv_existential_bounds_from_partitioned_bounds<'tcx>( this.tcx().sess.span_err( b.trait_ref.path.span, format!("only the builtin traits can be used \ - as closure or object bounds")[]); + as closure or object bounds").index(&FullRange)); } let region_bound = compute_region_bound(this, @@ -1669,7 +1674,7 @@ fn compute_opt_region_bound<'tcx>(tcx: &ty::ctxt<'tcx>, tcx.sess.span_err( span, format!("ambiguous lifetime bound, \ - explicit lifetime bound required")[]); + explicit lifetime bound required").index(&FullRange)); } return Some(r); } @@ -1695,7 +1700,7 @@ fn compute_region_bound<'tcx>( None => { this.tcx().sess.span_err( span, - format!("explicit lifetime bound required")[]); + format!("explicit lifetime bound required").index(&FullRange)); ty::ReStatic } } diff --git a/src/librustc_typeck/check/callee.rs b/src/librustc_typeck/check/callee.rs index b1dc033b567bb..dff216ac2935f 100644 --- a/src/librustc_typeck/check/callee.rs +++ b/src/librustc_typeck/check/callee.rs @@ -195,7 +195,7 @@ fn confirm_builtin_call<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, let arg_exprs: Vec<_> = arg_exprs.iter().collect(); // for some weird reason we take &[&P<...>]. check_argument_types(fcx, call_expr.span, - fn_sig.inputs[], + fn_sig.inputs.as_slice(), arg_exprs.as_slice(), AutorefArgs::No, fn_sig.variadic, diff --git a/src/librustc_typeck/check/method/confirm.rs b/src/librustc_typeck/check/method/confirm.rs index adea5084aab2b..652540bde70a9 100644 --- a/src/librustc_typeck/check/method/confirm.rs +++ b/src/librustc_typeck/check/method/confirm.rs @@ -315,7 +315,7 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { self.tcx().sess.span_bug( self.span, format!("self-type `{}` for ObjectPick never dereferenced to an object", - self_ty.repr(self.tcx()))[]) + self_ty.repr(self.tcx())).index(&FullRange)) } } } @@ -370,7 +370,7 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { format!( "{} was a subtype of {} but now is not?", self_ty.repr(self.tcx()), - method_self_ty.repr(self.tcx()))[]); + method_self_ty.repr(self.tcx())).index(&FullRange)); } } } @@ -562,6 +562,7 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { self.fcx.adjust_expr_ty( &**base_expr, Some(&ty::AdjustDerefRef(base_adjustment.clone()))); + let index_expr_ty = self.fcx.expr_ty(&**index_expr); let result = check::try_index_step( self.fcx, @@ -570,10 +571,10 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { &**base_expr, adjusted_base_ty, base_adjustment, - PreferMutLvalue); + PreferMutLvalue, + index_expr_ty); if let Some((input_ty, return_ty)) = result { - let index_expr_ty = self.fcx.expr_ty(&**index_expr); demand::suptype(self.fcx, index_expr.span, input_ty, index_expr_ty); let expr_ty = self.fcx.expr_ty(&**expr); @@ -639,7 +640,7 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { self.span, format!("cannot upcast `{}` to `{}`", source_trait_ref.repr(self.tcx()), - target_trait_def_id.repr(self.tcx()))[]); + target_trait_def_id.repr(self.tcx())).as_slice()); } } } diff --git a/src/librustc_typeck/check/method/mod.rs b/src/librustc_typeck/check/method/mod.rs index ad43dd84ef6b2..f495885bd108d 100644 --- a/src/librustc_typeck/check/method/mod.rs +++ b/src/librustc_typeck/check/method/mod.rs @@ -266,7 +266,7 @@ pub fn lookup_in_trait_adjusted<'a, 'tcx>(fcx: &'a FnCtxt<'a, 'tcx>, span, format!( "trait method is &self but first arg is: {}", - transformed_self_ty.repr(fcx.tcx()))[]); + transformed_self_ty.repr(fcx.tcx())).index(&FullRange)); } } } @@ -276,7 +276,7 @@ pub fn lookup_in_trait_adjusted<'a, 'tcx>(fcx: &'a FnCtxt<'a, 'tcx>, span, format!( "unexpected explicit self type in operator method: {}", - method_ty.explicit_self)[]); + method_ty.explicit_self).index(&FullRange)); } } } @@ -330,7 +330,7 @@ pub fn report_error<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, if is_field { cx.sess.span_note(span, format!("use `(s.{0})(...)` if you meant to call the \ - function stored in the `{0}` field", method_ustring)[]); + function stored in the `{0}` field", method_ustring).index(&FullRange)); } if static_sources.len() > 0 { diff --git a/src/librustc_typeck/check/method/probe.rs b/src/librustc_typeck/check/method/probe.rs index 115711ae92b29..3bc29bb9ef737 100644 --- a/src/librustc_typeck/check/method/probe.rs +++ b/src/librustc_typeck/check/method/probe.rs @@ -576,7 +576,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { self.tcx().sess.span_bug( self.span, format!("No entry for unboxed closure: {}", - closure_def_id.repr(self.tcx()))[]); + closure_def_id.repr(self.tcx())).index(&FullRange)); } }; @@ -745,7 +745,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { debug!("pick_method(self_ty={})", self.infcx().ty_to_string(self_ty)); debug!("searching inherent candidates"); - match self.consider_candidates(self_ty, self.inherent_candidates[]) { + match self.consider_candidates(self_ty, self.inherent_candidates.index(&FullRange)) { None => {} Some(pick) => { return Some(pick); @@ -753,7 +753,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { } debug!("searching extension candidates"); - self.consider_candidates(self_ty, self.extension_candidates[]) + self.consider_candidates(self_ty, self.extension_candidates.index(&FullRange)) } fn consider_candidates(&self, @@ -768,7 +768,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { debug!("applicable_candidates: {}", applicable_candidates.repr(self.tcx())); if applicable_candidates.len() > 1 { - match self.collapse_candidates_to_trait_pick(applicable_candidates[]) { + match self.collapse_candidates_to_trait_pick(applicable_candidates.index(&FullRange)) { Some(pick) => { return Some(Ok(pick)); } None => { } } @@ -864,7 +864,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { Some(data) => data, None => return None, }; - if probes[1..].iter().any(|p| p.to_trait_data() != Some(trait_data)) { + if probes.index(&(1..)).iter().any(|p| p.to_trait_data() != Some(trait_data)) { return None; } diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 1b51434a58cc1..eb85654e68ec1 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -593,7 +593,7 @@ fn check_fn<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>, let tcx = ccx.tcx; let err_count_on_creation = tcx.sess.err_count(); - let arg_tys = fn_sig.inputs[]; + let arg_tys = fn_sig.inputs.index(&FullRange); let ret_ty = fn_sig.output; debug!("check_fn(arg_tys={}, ret_ty={}, fn_id={})", @@ -691,7 +691,7 @@ pub fn check_item(ccx: &CrateCtxt, it: &ast::Item) { ast::ItemEnum(ref enum_definition, _) => { check_enum_variants(ccx, it.span, - enum_definition.variants[], + enum_definition.variants.index(&FullRange), it.id); } ast::ItemFn(ref decl, _, _, _, ref body) => { @@ -989,7 +989,7 @@ fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>, but not in the trait", token::get_name(trait_m.name), ppaux::explicit_self_category_to_str( - &impl_m.explicit_self))[]); + &impl_m.explicit_self)).index(&FullRange)); return; } (_, &ty::StaticExplicitSelfCategory) => { @@ -999,7 +999,7 @@ fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>, but not in the impl", token::get_name(trait_m.name), ppaux::explicit_self_category_to_str( - &trait_m.explicit_self))[]); + &trait_m.explicit_self)).index(&FullRange)); return; } _ => { @@ -1360,7 +1360,7 @@ fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>, span, format!("lifetime parameters or bounds on method `{}` do \ not match the trait declaration", - token::get_name(impl_m.name))[]); + token::get_name(impl_m.name)).index(&FullRange)); return false; } @@ -1412,7 +1412,7 @@ fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>, from its counterpart `{}` \ declared in the trait", impl_param.name.user_string(tcx), - trait_param.name.user_string(tcx))[]); + trait_param.name.user_string(tcx)).index(&FullRange)); true } else { false @@ -1422,14 +1422,14 @@ fn compare_impl_method<'tcx>(tcx: &ty::ctxt<'tcx>, tcx.sess.span_note( span, format!("the impl is missing the following bounds: `{}`", - missing.user_string(tcx))[]); + missing.user_string(tcx)).index(&FullRange)); } if extra.len() != 0 { tcx.sess.span_note( span, format!("the impl has the following extra bounds: `{}`", - extra.user_string(tcx))[]); + extra.user_string(tcx)).index(&FullRange)); } if err { @@ -1700,7 +1700,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.tcx().sess.span_bug( span, format!("no type for local variable {}", - nid)[]); + nid).index(&FullRange)); } } } @@ -2034,7 +2034,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Some(&t) => t, None => { self.tcx().sess.bug(format!("no type for expr in fcx {}", - self.tag())[]); + self.tag()).index(&FullRange)); } } } @@ -2064,7 +2064,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.tcx().sess.bug( format!("no type for node {}: {} in fcx {}", id, self.tcx().map.node_to_string(id), - self.tag())[]); + self.tag()).index(&FullRange)); } } } @@ -2377,90 +2377,6 @@ fn autoderef_for_index<'a, 'tcx, T, F>(fcx: &FnCtxt<'a, 'tcx>, } } -/// Checks for a `Slice` (or `SliceMut`) impl at the relevant level of autoderef. If it finds one, -/// installs method info and returns type of method (else None). -fn try_overloaded_slice_step<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - method_call: MethodCall, - expr: &ast::Expr, - base_expr: &ast::Expr, - base_ty: Ty<'tcx>, // autoderef'd type - autoderefref: ty::AutoDerefRef<'tcx>, - lvalue_pref: LvaluePreference, - start_expr: &Option>, - end_expr: &Option>) - -> Option<(Ty<'tcx>, /* index type */ - Ty<'tcx>)> /* return type */ -{ - let input_ty = fcx.infcx().next_ty_var(); - let return_ty = fcx.infcx().next_ty_var(); - - let method = match lvalue_pref { - PreferMutLvalue => { - // Try `SliceMut` first, if preferred. - match fcx.tcx().lang_items.slice_mut_trait() { - Some(trait_did) => { - let method_name = match (start_expr, end_expr) { - (&Some(_), &Some(_)) => "slice_or_fail_mut", - (&Some(_), &None) => "slice_from_or_fail_mut", - (&None, &Some(_)) => "slice_to_or_fail_mut", - (&None, &None) => "as_mut_slice_", - }; - - method::lookup_in_trait_adjusted(fcx, - expr.span, - Some(&*base_expr), - token::intern(method_name), - trait_did, - autoderefref, - base_ty, - Some(vec![input_ty, return_ty])) - } - _ => None, - } - } - NoPreference => { - // Otherwise, fall back to `Slice`. - match fcx.tcx().lang_items.slice_trait() { - Some(trait_did) => { - let method_name = match (start_expr, end_expr) { - (&Some(_), &Some(_)) => "slice_or_fail", - (&Some(_), &None) => "slice_from_or_fail", - (&None, &Some(_)) => "slice_to_or_fail", - (&None, &None) => "as_slice_", - }; - - method::lookup_in_trait_adjusted(fcx, - expr.span, - Some(&*base_expr), - token::intern(method_name), - trait_did, - autoderefref, - base_ty, - Some(vec![input_ty, return_ty])) - } - _ => None, - } - } - }; - - // If some lookup succeeded, install method in table - method.map(|method| { - let method_ty = method.ty; - make_overloaded_lvalue_return_type(fcx, Some(method_call), Some(method)); - - let result_ty = ty::ty_fn_ret(method_ty); - let result_ty = match result_ty { - ty::FnConverging(result_ty) => result_ty, - ty::FnDiverging => { - fcx.tcx().sess.span_bug(expr.span, - "slice trait does not define a `!` return") - } - }; - - (input_ty, result_ty) - }) -} - /// To type-check `base_expr[index_expr]`, we progressively autoderef (and otherwise adjust) /// `base_expr`, looking for a type which either supports builtin indexing or overloaded indexing. /// This loop implements one step in that search; the autoderef loop is implemented by @@ -2471,29 +2387,32 @@ fn try_index_step<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, base_expr: &ast::Expr, adjusted_ty: Ty<'tcx>, adjustment: ty::AutoDerefRef<'tcx>, - lvalue_pref: LvaluePreference) + lvalue_pref: LvaluePreference, + index_ty: Ty<'tcx>) -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)> { - debug!("try_index_step(expr={}, base_expr.id={}, adjusted_ty={}, adjustment={})", - expr.repr(fcx.tcx()), - base_expr.repr(fcx.tcx()), - adjusted_ty.repr(fcx.tcx()), - adjustment); - - // Try built-in indexing first. - match ty::index(adjusted_ty) { - Some(ty) => { + let tcx = fcx.tcx(); + debug!("try_index_step(expr={}, base_expr.id={}, adjusted_ty={}, adjustment={}, index_ty={})", + expr.repr(tcx), + base_expr.repr(tcx), + adjusted_ty.repr(tcx), + adjustment, + index_ty.repr(tcx)); + + let input_ty = fcx.infcx().next_ty_var(); + + // First, try built-in indexing. + match (ty::index(adjusted_ty), &index_ty.sty) { + (Some(ty), &ty::ty_uint(ast::TyU)) | (Some(ty), &ty::ty_infer(ty::IntVar(_))) => { + debug!("try_index_step: success, using built-in indexing"); fcx.write_adjustment(base_expr.id, base_expr.span, ty::AdjustDerefRef(adjustment)); - return Some((fcx.tcx().types.uint, ty)); + return Some((tcx.types.uint, ty)); } - - None => { } + _ => {} } - let input_ty = fcx.infcx().next_ty_var(); - // Try `IndexMut` first, if preferred. - let method = match (lvalue_pref, fcx.tcx().lang_items.index_mut_trait()) { + let method = match (lvalue_pref, tcx.lang_items.index_mut_trait()) { (PreferMutLvalue, Some(trait_did)) => { method::lookup_in_trait_adjusted(fcx, expr.span, @@ -2508,14 +2427,14 @@ fn try_index_step<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, }; // Otherwise, fall back to `Index`. - let method = match (method, fcx.tcx().lang_items.index_trait()) { + let method = match (method, tcx.lang_items.index_trait()) { (None, Some(trait_did)) => { method::lookup_in_trait_adjusted(fcx, expr.span, Some(&*base_expr), token::intern("index"), trait_did, - adjustment, + adjustment.clone(), adjusted_ty, Some(vec![input_ty])) } @@ -2526,6 +2445,7 @@ fn try_index_step<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, // type from the method signature. // If some lookup succeeded, install method in table method.and_then(|method| { + debug!("try_index_step: success, using overloaded indexing"); make_overloaded_lvalue_return_type(fcx, Some(method_call), Some(method)). map(|ret| (input_ty, ret.ty)) }) @@ -2544,7 +2464,7 @@ fn lookup_method_for_for_loop<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, Ok(trait_did) => trait_did, Err(ref err_string) => { fcx.tcx().sess.span_err(iterator_expr.span, - err_string[]); + err_string.index(&FullRange)); return fcx.tcx().types.err } }; @@ -2571,7 +2491,7 @@ fn lookup_method_for_for_loop<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, format!("`for` loop expression has type `{}` which does \ not implement the `Iterator` trait; \ maybe try .iter()", - ty_string)[]); + ty_string).index(&FullRange)); } fcx.tcx().types.err } @@ -2609,7 +2529,7 @@ fn lookup_method_for_for_loop<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, format!("`next` method of the `Iterator` \ trait has an unexpected type `{}`", fcx.infcx().ty_to_string(return_type)) - []); + .index(&FullRange)); fcx.tcx().types.err } } @@ -2636,7 +2556,7 @@ fn check_method_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, check_argument_types(fcx, sp, - err_inputs[], + err_inputs.index(&FullRange), args_no_rcvr, autoref_args, false, @@ -3088,7 +3008,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, }; // Call the generic checker. - let args: Vec<_> = args[1..].iter().map(|x| x).collect(); + let args: Vec<_> = args.index(&(1..)).iter().map(|x| x).collect(); let ret_ty = check_method_argument_types(fcx, method_name.span, fn_ty, @@ -3406,7 +3326,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, ty::ty_struct(base_id, substs) => { debug!("struct named {}", ppaux::ty_to_string(tcx, base_t)); let fields = ty::lookup_struct_fields(tcx, base_id); - lookup_field_ty(tcx, base_id, fields[], + lookup_field_ty(tcx, base_id, fields.index(&FullRange), field.node.name, &(*substs)) } _ => None @@ -3469,7 +3389,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, if tuple_like { debug!("tuple struct named {}", ppaux::ty_to_string(tcx, base_t)); let fields = ty::lookup_struct_fields(tcx, base_id); - lookup_tup_field_ty(tcx, base_id, fields[], + lookup_tup_field_ty(tcx, base_id, fields.index(&FullRange), idx.node, &(*substs)) } else { None @@ -3634,7 +3554,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, class_id, id, fcx.ccx.tcx.mk_substs(struct_substs), - class_fields[], + class_fields.index(&FullRange), fields, base_expr.is_none(), None); @@ -3677,7 +3597,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, variant_id, id, fcx.ccx.tcx.mk_substs(substitutions), - variant_fields[], + variant_fields.index(&FullRange), fields, true, Some(enum_id)); @@ -4056,7 +3976,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, callee::check_call(fcx, expr, &**callee, args.as_slice()); } ast::ExprMethodCall(ident, ref tps, ref args) => { - check_method_call(fcx, expr, ident, args[], tps[], lvalue_pref); + check_method_call(fcx, expr, ident, args.as_slice(), tps.as_slice(), lvalue_pref); let arg_tys = args.iter().map(|a| fcx.expr_ty(&**a)); let args_err = arg_tys.fold(false, |rest_err, a| { @@ -4144,7 +4064,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, let expected = expected.only_has_type(); let flds = expected.map_to_option(fcx, |ty| { match ty.sty { - ty::ty_tup(ref flds) => Some(flds[]), + ty::ty_tup(ref flds) => Some(flds.index(&FullRange)), _ => None } }); @@ -4178,7 +4098,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, let struct_id = match def { Some(def::DefVariant(enum_id, variant_id, true)) => { check_struct_enum_variant(fcx, id, expr.span, enum_id, - variant_id, fields[]); + variant_id, fields.index(&FullRange)); enum_id } Some(def::DefTrait(def_id)) => { @@ -4187,7 +4107,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, pprust::path_to_string(path)); check_struct_fields_on_error(fcx, id, - fields[], + fields.index(&FullRange), base_expr); def_id }, @@ -4200,7 +4120,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, id, expr.span, struct_did, - fields[], + fields.index(&FullRange), base_expr.as_ref().map(|e| &**e)); } _ => { @@ -4209,7 +4129,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, pprust::path_to_string(path)); check_struct_fields_on_error(fcx, id, - fields[], + fields.index(&FullRange), base_expr); } } @@ -4250,7 +4170,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, fcx.infcx() .ty_to_string( actual_structure_type), - type_error_description)[]); + type_error_description).index(&FullRange)); ty::note_and_explain_type_err(tcx, &type_error); } } @@ -4270,91 +4190,45 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, if ty::type_is_error(base_t) { fcx.write_ty(id, base_t); } else { - match idx.node { - ast::ExprRange(ref start, ref end) => { - // A slice, rather than an index. Special cased for now (KILLME). + check_expr(fcx, &**idx); + let idx_t = fcx.expr_ty(&**idx); + if ty::type_is_error(idx_t) { + fcx.write_ty(id, idx_t); + } else { let base_t = structurally_resolved_type(fcx, expr.span, base_t); let result = autoderef_for_index(fcx, &**base, base_t, lvalue_pref, |adj_ty, adj| { - try_overloaded_slice_step(fcx, - MethodCall::expr(expr.id), - expr, - &**base, - adj_ty, - adj, - lvalue_pref, - start, - end) + try_index_step(fcx, + MethodCall::expr(expr.id), + expr, + &**base, + adj_ty, + adj, + lvalue_pref, + idx_t) }); - let mut args = vec![]; - start.as_ref().map(|x| args.push(x)); - end.as_ref().map(|x| args.push(x)); - match result { Some((index_ty, element_ty)) => { - for a in args.iter() { - check_expr_has_type(fcx, &***a, index_ty); - } - fcx.write_ty(idx.id, element_ty); - fcx.write_ty(id, element_ty) + // FIXME: we've already checked idx above, we should + // probably just demand subtype or something here. + check_expr_has_type(fcx, &**idx, index_ty); + fcx.write_ty(id, element_ty); } _ => { - for a in args.iter() { - check_expr(fcx, &***a); - } - fcx.type_error_message(expr.span, - |actual| { - format!("cannot take a slice of a value with type `{}`", + check_expr_has_type(fcx, &**idx, fcx.tcx().types.err); + fcx.type_error_message( + expr.span, + |actual| { + format!("cannot index a value of type `{}`", actual) - }, - base_t, - None); - fcx.write_ty(idx.id, fcx.tcx().types.err); + }, + base_t, + None); fcx.write_ty(id, fcx.tcx().types.err); } } - } - _ => { - check_expr(fcx, &**idx); - let idx_t = fcx.expr_ty(&**idx); - if ty::type_is_error(idx_t) { - fcx.write_ty(id, idx_t); - } else { - let base_t = structurally_resolved_type(fcx, expr.span, base_t); - - let result = - autoderef_for_index(fcx, &**base, base_t, lvalue_pref, |adj_ty, adj| { - try_index_step(fcx, - MethodCall::expr(expr.id), - expr, - &**base, - adj_ty, - adj, - lvalue_pref) - }); - - match result { - Some((index_ty, element_ty)) => { - check_expr_has_type(fcx, &**idx, index_ty); - fcx.write_ty(id, element_ty); - } - _ => { - check_expr_has_type(fcx, &**idx, fcx.tcx().types.err); - fcx.type_error_message( - expr.span, - |actual| { - format!("cannot index a value of type `{}`", - actual) - }, - base_t, - None); - fcx.write_ty(id, fcx.tcx().types.err); - } - } - } - } } } } @@ -4387,7 +4261,7 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, }; // Note that we don't check the type of start/end satisfy any - // bounds because right the range structs do not have any. If we add + // bounds because right now the range structs do not have any. If we add // some bounds, then we'll need to check `t_start` against them here. let range_type = match idx_type { @@ -4971,7 +4845,7 @@ pub fn check_enum_variants(ccx: &CrateCtxt, } let hint = *ty::lookup_repr_hints(ccx.tcx, ast::DefId { krate: ast::LOCAL_CRATE, node: id }) - [].get(0).unwrap_or(&attr::ReprAny); + .index(&FullRange).get(0).unwrap_or(&attr::ReprAny); if hint != attr::ReprAny && vs.len() <= 1 { if vs.len() == 1 { @@ -5642,7 +5516,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) { "get_tydesc" => { let tydesc_ty = match ty::get_tydesc_ty(ccx.tcx) { Ok(t) => t, - Err(s) => { tcx.sess.span_fatal(it.span, s[]); } + Err(s) => { tcx.sess.span_fatal(it.span, s.index(&FullRange)); } }; let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt { ty: tydesc_ty, @@ -5658,7 +5532,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) { ty::mk_struct(ccx.tcx, did, ccx.tcx.mk_substs(subst::Substs::empty()))), Err(msg) => { - tcx.sess.span_fatal(it.span, msg[]); + tcx.sess.span_fatal(it.span, msg.index(&FullRange)); } } }, diff --git a/src/librustc_typeck/check/regionck.rs b/src/librustc_typeck/check/regionck.rs index c7df5ed8453fd..ffb721dd7ac7d 100644 --- a/src/librustc_typeck/check/regionck.rs +++ b/src/librustc_typeck/check/regionck.rs @@ -189,7 +189,7 @@ fn region_of_def(fcx: &FnCtxt, def: def::Def) -> ty::Region { } _ => { tcx.sess.bug(format!("unexpected def in region_of_def: {}", - def)[]) + def).index(&FullRange)) } } } @@ -282,13 +282,13 @@ impl<'a, 'tcx> Rcx<'a, 'tcx> { Some(f) => f, None => { self.tcx().sess.bug( - format!("No fn-sig entry for id={}", id)[]); + format!("No fn-sig entry for id={}", id).index(&FullRange)); } }; let len = self.region_bound_pairs.len(); - self.relate_free_regions(fn_sig[], body.id); - link_fn_args(self, CodeExtent::from_node_id(body.id), fn_decl.inputs[]); + self.relate_free_regions(fn_sig.index(&FullRange), body.id); + link_fn_args(self, CodeExtent::from_node_id(body.id), fn_decl.inputs.index(&FullRange)); self.visit_block(body); self.visit_region_obligations(body.id); self.region_bound_pairs.truncate(len); @@ -627,7 +627,7 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) { } ast::ExprMatch(ref discr, ref arms, _) => { - link_match(rcx, &**discr, arms[]); + link_match(rcx, &**discr, arms.index(&FullRange)); visit::walk_expr(rcx, expr); } @@ -952,7 +952,7 @@ fn constrain_autoderefs<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>, ty::ty_rptr(r, ref m) => (m.mutbl, r), _ => rcx.tcx().sess.span_bug(deref_expr.span, format!("bad overloaded deref type {}", - method.ty.repr(rcx.tcx()))[]) + method.ty.repr(rcx.tcx())).index(&FullRange)) }; { let mc = mc::MemCategorizationContext::new(rcx.fcx); @@ -1318,7 +1318,7 @@ fn link_reborrowed_region<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>, span, format!("Illegal upvar id: {}", upvar_id.repr( - rcx.tcx()))[]); + rcx.tcx())).index(&FullRange)); } } } diff --git a/src/librustc_typeck/check/regionmanip.rs b/src/librustc_typeck/check/regionmanip.rs index 7ca21bdf5b83f..84d94b0392e9d 100644 --- a/src/librustc_typeck/check/regionmanip.rs +++ b/src/librustc_typeck/check/regionmanip.rs @@ -147,7 +147,7 @@ impl<'a, 'tcx> Wf<'a, 'tcx> { ty::ty_open(_) => { self.tcx.sess.bug( format!("Unexpected type encountered while doing wf check: {}", - ty.repr(self.tcx))[]); + ty.repr(self.tcx)).index(&FullRange)); } } } diff --git a/src/librustc_typeck/check/vtable.rs b/src/librustc_typeck/check/vtable.rs index 87ede24226bdb..e302609bf228b 100644 --- a/src/librustc_typeck/check/vtable.rs +++ b/src/librustc_typeck/check/vtable.rs @@ -74,7 +74,7 @@ pub fn check_object_cast<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, source_expr.span, format!("can only cast an boxed pointer \ to a boxed object, not a {}", - ty::ty_sort_string(fcx.tcx(), source_ty))[]); + ty::ty_sort_string(fcx.tcx(), source_ty)).index(&FullRange)); } (_, &ty::ty_rptr(..)) => { @@ -82,7 +82,7 @@ pub fn check_object_cast<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, source_expr.span, format!("can only cast a &-pointer \ to an &-object, not a {}", - ty::ty_sort_string(fcx.tcx(), source_ty))[]); + ty::ty_sort_string(fcx.tcx(), source_ty)).index(&FullRange)); } _ => { diff --git a/src/librustc_typeck/coherence/mod.rs b/src/librustc_typeck/coherence/mod.rs index 09ab98745bd6a..7597a410f62bf 100644 --- a/src/librustc_typeck/coherence/mod.rs +++ b/src/librustc_typeck/coherence/mod.rs @@ -81,7 +81,7 @@ fn get_base_type_def_id<'a, 'tcx>(inference_context: &InferCtxt<'a, 'tcx>, inference_context.tcx.sess.span_bug( span, format!("coherence encountered unexpected type searching for base type: {}", - ty.repr(inference_context.tcx))[]); + ty.repr(inference_context.tcx)).index(&FullRange)); } } } @@ -490,7 +490,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> { format!("the trait `Copy` may not be \ implemented for this type; field \ `{}` does not implement `Copy`", - token::get_name(name))[]) + token::get_name(name)).index(&FullRange)) } Err(ty::VariantDoesNotImplementCopy(name)) => { tcx.sess @@ -498,7 +498,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> { format!("the trait `Copy` may not be \ implemented for this type; variant \ `{}` does not implement `Copy`", - token::get_name(name))[]) + token::get_name(name)).index(&FullRange)) } Err(ty::TypeIsStructural) => { tcx.sess diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs index bbafcdae1bba1..7d52418a9db4f 100644 --- a/src/librustc_typeck/collect.rs +++ b/src/librustc_typeck/collect.rs @@ -211,7 +211,7 @@ fn get_enum_variant_types<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, ast::TupleVariantKind(ref args) if args.len() > 0 => { let rs = ExplicitRscope; let input_tys: Vec<_> = args.iter().map(|va| ccx.to_ty(&rs, &*va.ty)).collect(); - ty::mk_ctor_fn(tcx, variant_def_id, input_tys[], enum_ty) + ty::mk_ctor_fn(tcx, variant_def_id, input_tys.index(&FullRange), enum_ty) } ast::TupleVariantKind(_) => { @@ -258,7 +258,7 @@ fn collect_trait_methods<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, ccx, trait_id, &trait_def.generics, - trait_items[], + trait_items.index(&FullRange), &m.id, &m.ident.name, &m.explicit_self, @@ -272,7 +272,7 @@ fn collect_trait_methods<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, ccx, trait_id, &trait_def.generics, - trait_items[], + trait_items.index(&FullRange), &m.id, &m.pe_ident().name, m.pe_explicit_self(), @@ -774,7 +774,7 @@ fn convert_struct<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, local_def(field.node.id)].ty).collect(); let ctor_fn_ty = ty::mk_ctor_fn(tcx, local_def(ctor_id), - inputs[], + inputs.index(&FullRange), selfty); write_ty_to_tcx(tcx, ctor_id, ctor_fn_ty); tcx.tcache.borrow_mut().insert(local_def(ctor_id), @@ -815,7 +815,7 @@ fn get_trait_def<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, ast_map::NodeItem(item) => trait_def_of_item(ccx, &*item), _ => { ccx.tcx.sess.bug(format!("get_trait_def({}): not an item", - trait_id.node)[]) + trait_id.node).index(&FullRange)) } } } @@ -840,7 +840,7 @@ fn trait_def_of_item<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, ref s => { tcx.sess.span_bug( it.span, - format!("trait_def_of_item invoked on {}", s)[]); + format!("trait_def_of_item invoked on {}", s).index(&FullRange)); } }; @@ -1025,8 +1025,8 @@ fn ty_generics_for_type_or_impl<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, -> ty::Generics<'tcx> { ty_generics(ccx, subst::TypeSpace, - generics.lifetimes[], - generics.ty_params[], + generics.lifetimes.index(&FullRange), + generics.ty_params.index(&FullRange), ty::Generics::empty(), &generics.where_clause) } @@ -1044,8 +1044,8 @@ fn ty_generics_for_trait<'a, 'tcx>(ccx: &CollectCtxt<'a, 'tcx>, let mut generics = ty_generics(ccx, subst::TypeSpace, - ast_generics.lifetimes[], - ast_generics.ty_params[], + ast_generics.lifetimes.index(&FullRange), + ast_generics.ty_params.index(&FullRange), ty::Generics::empty(), &ast_generics.where_clause); @@ -1130,8 +1130,8 @@ fn ty_generics_for_fn_or_method<'a,'tcx>(ccx: &CollectCtxt<'a,'tcx>, let early_lifetimes = resolve_lifetime::early_bound_lifetimes(generics); ty_generics(ccx, subst::FnSpace, - early_lifetimes[], - generics.ty_params[], + early_lifetimes.index(&FullRange), + generics.ty_params.index(&FullRange), base_generics, &generics.where_clause) } @@ -1318,7 +1318,7 @@ fn get_or_create_type_parameter_def<'a,'tcx>(ccx: &CollectCtxt<'a,'tcx>, let param_ty = ty::ParamTy::new(space, index, param.ident.name); let bounds = compute_bounds(ccx, param_ty.to_ty(ccx.tcx), - param.bounds[], + param.bounds.index(&FullRange), SizedByDefault::Yes, param.span); let default = match param.default { @@ -1399,7 +1399,7 @@ fn check_bounds_compatible<'tcx>(tcx: &ty::ctxt<'tcx>, if !param_bounds.builtin_bounds.contains(&ty::BoundSized) { ty::each_bound_trait_and_supertraits( tcx, - param_bounds.trait_bounds[], + param_bounds.trait_bounds.index(&FullRange), |trait_ref| { let trait_def = ty::lookup_trait_def(tcx, trait_ref.def_id()); if trait_def.bounds.builtin_bounds.contains(&ty::BoundSized) { diff --git a/src/librustc_typeck/lib.rs b/src/librustc_typeck/lib.rs index dc434f1401585..f7abd8a1cc6a6 100644 --- a/src/librustc_typeck/lib.rs +++ b/src/librustc_typeck/lib.rs @@ -207,7 +207,7 @@ fn require_same_types<'a, 'tcx, M>(tcx: &ty::ctxt<'tcx>, format!("{}: {}", msg(), ty::type_err_to_str(tcx, - terr))[]); + terr)).index(&FullRange)); ty::note_and_explain_type_err(tcx, terr); false } @@ -256,7 +256,7 @@ fn check_main_fn_ty(ccx: &CrateCtxt, format!("main has a non-function type: found \ `{}`", ppaux::ty_to_string(tcx, - main_t))[]); + main_t)).index(&FullRange)); } } } @@ -307,7 +307,7 @@ fn check_start_fn_ty(ccx: &CrateCtxt, tcx.sess.span_bug(start_span, format!("start has a non-function type: found \ `{}`", - ppaux::ty_to_string(tcx, start_t))[]); + ppaux::ty_to_string(tcx, start_t)).index(&FullRange)); } } } diff --git a/src/librustc_typeck/variance.rs b/src/librustc_typeck/variance.rs index e58c2275fcd96..3ca0e40c51ed4 100644 --- a/src/librustc_typeck/variance.rs +++ b/src/librustc_typeck/variance.rs @@ -564,7 +564,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { None => { self.tcx().sess.bug(format!( "no inferred index entry for {}", - self.tcx().map.node_to_string(param_id))[]); + self.tcx().map.node_to_string(param_id)).index(&FullRange)); } } } @@ -839,7 +839,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { self.tcx().sess.bug( format!("unexpected type encountered in \ variance inference: {}", - ty.repr(self.tcx()))[]); + ty.repr(self.tcx())).index(&FullRange)); } } } @@ -919,7 +919,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { .sess .bug(format!("unexpected region encountered in variance \ inference: {}", - region.repr(self.tcx()))[]); + region.repr(self.tcx())).index(&FullRange)); } } } @@ -1055,7 +1055,7 @@ impl<'a, 'tcx> SolveContext<'a, 'tcx> { // attribute and report an error with various results if found. if ty::has_attr(tcx, item_def_id, "rustc_variance") { let found = item_variances.repr(tcx); - tcx.sess.span_err(tcx.map.span(item_id), found[]); + tcx.sess.span_err(tcx.map.span(item_id), found.index(&FullRange)); } let newly_added = tcx.item_variance_map.borrow_mut() diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index bb9a9ac430340..e6d1dfd62d0fa 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -798,7 +798,7 @@ impl Clean for ast::Method { let all_inputs = &self.pe_fn_decl().inputs; let inputs = match self.pe_explicit_self().node { ast::SelfStatic => all_inputs.as_slice(), - _ => all_inputs[1..] + _ => all_inputs.index(&(1..)) }; let decl = FnDecl { inputs: Arguments { @@ -836,7 +836,7 @@ impl Clean for ast::TypeMethod { fn clean(&self, cx: &DocContext) -> Item { let inputs = match self.explicit_self.node { ast::SelfStatic => self.decl.inputs.as_slice(), - _ => self.decl.inputs[1..] + _ => self.decl.inputs.index(&(1..)) }; let decl = FnDecl { inputs: Arguments { @@ -1132,7 +1132,7 @@ impl<'tcx> Clean for ty::Method<'tcx> { self.fty.sig.clone()), s => { let sig = ty::Binder(ty::FnSig { - inputs: self.fty.sig.0.inputs[1..].to_vec(), + inputs: self.fty.sig.0.inputs.index(&(1..)).to_vec(), ..self.fty.sig.0.clone() }); let s = match s { diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs index 9004d11b5bccf..abc669729fe80 100644 --- a/src/librustdoc/html/format.rs +++ b/src/librustdoc/html/format.rs @@ -97,7 +97,7 @@ impl fmt::Show for clean::Generics { if i > 0 { try!(f.write_str(", ")) } - try!(f.write_str(tp.name[])); + try!(f.write_str(tp.name.as_slice())); if tp.bounds.len() > 0 { try!(write!(f, ": {}", TyParamBounds(tp.bounds.as_slice()))); @@ -311,7 +311,7 @@ fn path(w: &mut fmt::Formatter, match rel_root { Some(root) => { let mut root = String::from_str(root.as_slice()); - for seg in path.segments[..amt].iter() { + for seg in path.segments.index(&(0..amt)).iter() { if "super" == seg.name || "self" == seg.name { try!(write!(w, "{}::", seg.name)); @@ -326,7 +326,7 @@ fn path(w: &mut fmt::Formatter, } } None => { - for seg in path.segments[..amt].iter() { + for seg in path.segments.index(&(0..amt)).iter() { try!(write!(w, "{}::", seg.name)); } } @@ -337,7 +337,7 @@ fn path(w: &mut fmt::Formatter, // This is a documented path, link to it! Some((ref fqp, shortty)) if abs_root.is_some() => { let mut url = String::from_str(abs_root.unwrap().as_slice()); - let to_link = fqp[..fqp.len() - 1]; + let to_link = &fqp[..(fqp.len() - 1)]; for component in to_link.iter() { url.push_str(component.as_slice()); url.push_str("/"); @@ -433,7 +433,7 @@ impl fmt::Show for clean::Type { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { clean::TyParamBinder(id) => { - f.write_str(cache().typarams[ast_util::local_def(id)][]) + f.write_str(cache().typarams[ast_util::local_def(id)].as_slice()) } clean::Generic(ref name) => { f.write_str(name.as_slice()) diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index 30b9d6c63c5bb..3d2c5e2cbb5fb 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -34,7 +34,7 @@ pub fn highlight(src: &str, class: Option<&str>, id: Option<&str>) -> String { class, id, &mut out).unwrap(); - String::from_utf8_lossy(out[]).into_owned() + String::from_utf8_lossy(out.index(&FullRange)).into_owned() } /// Exhausts the `lexer` writing the output into `out`. diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index ddb14d6944b9d..a01fcd399521a 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -404,7 +404,7 @@ fn build_index(krate: &clean::Crate, cache: &mut Cache) -> io::IoResult search_index.push(IndexItem { ty: shortty(item), name: item.name.clone().unwrap(), - path: fqp[..fqp.len() - 1].connect("::"), + path: fqp[..(fqp.len() - 1)].connect("::"), desc: shorter(item.doc_value()).to_string(), parent: Some(did), }); @@ -559,7 +559,7 @@ fn write_shared(cx: &Context, }; let mut mydst = dst.clone(); - for part in remote_path[..remote_path.len() - 1].iter() { + for part in remote_path[..(remote_path.len() - 1)].iter() { mydst.push(part.as_slice()); try!(mkdir(&mydst)); } @@ -842,7 +842,7 @@ impl DocFolder for Cache { clean::StructFieldItem(..) | clean::VariantItem(..) => { ((Some(*self.parent_stack.last().unwrap()), - Some(self.stack[..self.stack.len() - 1])), + Some(&self.stack[..(self.stack.len() - 1)])), false) } clean::MethodItem(..) => { @@ -853,13 +853,13 @@ impl DocFolder for Cache { let did = *last; let path = match self.paths.get(&did) { Some(&(_, ItemType::Trait)) => - Some(self.stack[..self.stack.len() - 1]), + Some(&self.stack[..(self.stack.len() - 1)]), // The current stack not necessarily has correlation for // where the type was defined. On the other hand, // `paths` always has the right information if present. Some(&(ref fqp, ItemType::Struct)) | Some(&(ref fqp, ItemType::Enum)) => - Some(fqp[..fqp.len() - 1]), + Some(&fqp[..(fqp.len() - 1)]), Some(..) => Some(self.stack.as_slice()), None => None }; @@ -1185,7 +1185,7 @@ impl Context { .collect::(); match cache().paths.get(&it.def_id) { Some(&(ref names, _)) => { - for name in names[..names.len() - 1].iter() { + for name in (&names[..(names.len() - 1)]).iter() { url.push_str(name.as_slice()); url.push_str("/"); } @@ -2267,7 +2267,7 @@ fn item_macro(w: &mut fmt::Formatter, it: &clean::Item, t: &clean::Macro) -> fmt::Result { try!(w.write_str(highlight::highlight(t.source.as_slice(), Some("macro"), - None)[])); + None).as_slice())); document(w, it) } diff --git a/src/libserialize/json.rs b/src/libserialize/json.rs index 8b0fb75b438fe..c520b6f4723c0 100644 --- a/src/libserialize/json.rs +++ b/src/libserialize/json.rs @@ -193,7 +193,6 @@ //! ``` use self::JsonEvent::*; -use self::StackElement::*; use self::ErrorCode::*; use self::ParserError::*; use self::DecoderError::*; @@ -208,7 +207,7 @@ use std::num::{Float, Int}; use std::num::FpCategory as Fp; use std::str::FromStr; use std::string; -use std::ops; +use std::ops::Index; use unicode::str as unicode_str; use unicode::str::Utf16Item; @@ -384,7 +383,7 @@ fn escape_str(wr: &mut fmt::Writer, v: &str) -> fmt::Result { }; if start < i { - try!(wr.write_str(v[start..i])); + try!(wr.write_str(v.index(&(start..i)))); } try!(wr.write_str(escaped)); @@ -393,7 +392,7 @@ fn escape_str(wr: &mut fmt::Writer, v: &str) -> fmt::Result { } if start != v.len() { - try!(wr.write_str(v[start..])); + try!(wr.write_str(v.index(&(start..)))); } wr.write_str("\"") @@ -402,7 +401,7 @@ fn escape_str(wr: &mut fmt::Writer, v: &str) -> fmt::Result { fn escape_char(writer: &mut fmt::Writer, v: char) -> fmt::Result { let mut buf = [0; 4]; let n = v.encode_utf8(&mut buf).unwrap(); - let buf = unsafe { str::from_utf8_unchecked(buf[0..n]) }; + let buf = unsafe { str::from_utf8_unchecked(buf.index(&(0..n))) }; escape_str(writer, buf) } @@ -415,7 +414,7 @@ fn spaces(wr: &mut fmt::Writer, mut n: uint) -> fmt::Result { } if n > 0 { - wr.write_str(BUF[..n]) + wr.write_str(BUF.index(&(0..n))) } else { Ok(()) } @@ -624,7 +623,7 @@ impl<'a> ::Encoder for Encoder<'a> { let mut check_encoder = Encoder::new(&mut buf); try!(f(transmute(&mut check_encoder))); } - let out = str::from_utf8(buf[]).unwrap(); + let out = str::from_utf8(buf.index(&FullRange)).unwrap(); let needs_wrapping = out.char_at(0) != '"' && out.char_at_reverse(out.len()) != '"'; if needs_wrapping { try!(write!(self.writer, "\"")); } try!(f(self)); @@ -895,7 +894,7 @@ impl<'a> ::Encoder for PrettyEncoder<'a> { let mut check_encoder = PrettyEncoder::new(&mut buf); try!(f(transmute(&mut check_encoder))); } - let out = str::from_utf8(buf[]).unwrap(); + let out = str::from_utf8(buf.index(&FullRange)).unwrap(); let needs_wrapping = out.char_at(0) != '"' && out.char_at_reverse(out.len()) != '"'; if needs_wrapping { try!(write!(self.writer, "\"")); } try!(f(self)); @@ -1028,7 +1027,7 @@ impl Json { /// Returns None otherwise. pub fn as_string<'a>(&'a self) -> Option<&'a str> { match *self { - Json::String(ref s) => Some(s[]), + Json::String(ref s) => Some(s.index(&FullRange)), _ => None } } @@ -1125,7 +1124,7 @@ impl Json { } } -impl<'a> ops::Index<&'a str> for Json { +impl<'a> Index<&'a str> for Json { type Output = Json; fn index(&self, idx: & &str) -> &Json { @@ -1133,7 +1132,7 @@ impl<'a> ops::Index<&'a str> for Json { } } -impl ops::Index for Json { +impl Index for Json { type Output = Json; fn index<'a>(&'a self, idx: &uint) -> &'a Json { @@ -1187,7 +1186,8 @@ pub struct Stack { } /// StackElements compose a Stack. -/// For example, Key("foo"), Key("bar"), Index(3) and Key("x") are the +/// For example, StackElement::Key("foo"), StackElement::Key("bar"), +/// StackElement::Index(3) and StackElement::Key("x") are the /// StackElements compositing the stack that represents foo.bar[3].x #[derive(PartialEq, Clone, Show)] pub enum StackElement<'l> { @@ -1219,10 +1219,11 @@ impl Stack { /// at the top. pub fn get<'l>(&'l self, idx: uint) -> StackElement<'l> { match self.stack[idx] { - InternalIndex(i) => Index(i), + InternalIndex(i) => StackElement::Index(i), InternalKey(start, size) => { - Key(str::from_utf8( - self.str_buffer[start as uint .. start as uint + size as uint]).unwrap()) + StackElement::Key(str::from_utf8( + self.str_buffer.index(&((start as uint) .. (start as uint + size as uint)))) + .unwrap()) } } } @@ -1261,16 +1262,16 @@ impl Stack { pub fn top<'l>(&'l self) -> Option> { return match self.stack.last() { None => None, - Some(&InternalIndex(i)) => Some(Index(i)), + Some(&InternalIndex(i)) => Some(StackElement::Index(i)), Some(&InternalKey(start, size)) => { - Some(Key(str::from_utf8( - self.str_buffer[start as uint .. (start+size) as uint] + Some(StackElement::Key(str::from_utf8( + self.str_buffer.index(&((start as uint) .. (start+size) as uint)) ).unwrap())) } } } - // Used by Parser to insert Key elements at the top of the stack. + // Used by Parser to insert StackElement::Key elements at the top of the stack. fn push_key(&mut self, key: string::String) { self.stack.push(InternalKey(self.str_buffer.len() as u16, key.len() as u16)); for c in key.as_bytes().iter() { @@ -1278,7 +1279,7 @@ impl Stack { } } - // Used by Parser to insert Index elements at the top of the stack. + // Used by Parser to insert StackElement::Index elements at the top of the stack. fn push_index(&mut self, index: u32) { self.stack.push(InternalIndex(index)); } @@ -1948,7 +1949,7 @@ impl> Builder { _ => {} } let key = match self.parser.stack().top() { - Some(Key(k)) => { k.to_string() } + Some(StackElement::Key(k)) => { k.to_string() } _ => { panic!("invalid state"); } }; match self.build_value() { @@ -2143,7 +2144,7 @@ impl ::Decoder for Decoder { return Err(ExpectedError("String or Object".to_string(), format!("{}", json))) } }; - let idx = match names.iter().position(|n| *n == name[]) { + let idx = match names.iter().position(|n| *n == name.index(&FullRange)) { Some(idx) => idx, None => return Err(UnknownVariantError(name)) }; @@ -2505,12 +2506,12 @@ mod tests { use super::ParserError::*; use super::DecoderError::*; use super::JsonEvent::*; - use super::StackElement::*; use super::{Json, from_str, DecodeResult, DecoderError, JsonEvent, Parser, StackElement, Stack, Decoder}; - use std::{i64, u64, f32, f64}; + use std::{i64, u64, f32, f64, io}; use std::collections::BTreeMap; use std::num::Float; + use std::ops::Index; use std::string; #[derive(RustcDecodable, Eq, PartialEq, Show)] @@ -3351,7 +3352,7 @@ mod tests { hm.insert(1, true); let mut mem_buf = Vec::new(); write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap(); - let json_str = from_utf8(mem_buf[]).unwrap(); + let json_str = from_utf8(mem_buf.index(&FullRange)).unwrap(); match from_str(json_str) { Err(_) => panic!("Unable to parse json_str: {}", json_str), _ => {} // it parsed and we are good to go @@ -3367,7 +3368,7 @@ mod tests { hm.insert(1, true); let mut mem_buf = Vec::new(); write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap(); - let json_str = from_utf8(mem_buf[]).unwrap(); + let json_str = from_utf8(mem_buf.index(&FullRange)).unwrap(); match from_str(json_str) { Err(_) => panic!("Unable to parse json_str: {}", json_str), _ => {} // it parsed and we are good to go @@ -3407,7 +3408,7 @@ mod tests { write!(&mut writer, "{}", super::as_pretty_json(&json).indent(i)).unwrap(); - let printed = from_utf8(writer[]).unwrap(); + let printed = from_utf8(writer.index(&FullRange)).unwrap(); // Check for indents at each line let lines: Vec<&str> = printed.lines().collect(); @@ -3477,20 +3478,23 @@ mod tests { r#"{ "foo":"bar", "array" : [0, 1, 2, 3, 4, 5], "idents":[null,true,false]}"#, vec![ (ObjectStart, vec![]), - (StringValue("bar".to_string()), vec![Key("foo")]), - (ArrayStart, vec![Key("array")]), - (U64Value(0), vec![Key("array"), Index(0)]), - (U64Value(1), vec![Key("array"), Index(1)]), - (U64Value(2), vec![Key("array"), Index(2)]), - (U64Value(3), vec![Key("array"), Index(3)]), - (U64Value(4), vec![Key("array"), Index(4)]), - (U64Value(5), vec![Key("array"), Index(5)]), - (ArrayEnd, vec![Key("array")]), - (ArrayStart, vec![Key("idents")]), - (NullValue, vec![Key("idents"), Index(0)]), - (BooleanValue(true), vec![Key("idents"), Index(1)]), - (BooleanValue(false), vec![Key("idents"), Index(2)]), - (ArrayEnd, vec![Key("idents")]), + (StringValue("bar".to_string()), vec![StackElement::Key("foo")]), + (ArrayStart, vec![StackElement::Key("array")]), + (U64Value(0), vec![StackElement::Key("array"), StackElement::Index(0)]), + (U64Value(1), vec![StackElement::Key("array"), StackElement::Index(1)]), + (U64Value(2), vec![StackElement::Key("array"), StackElement::Index(2)]), + (U64Value(3), vec![StackElement::Key("array"), StackElement::Index(3)]), + (U64Value(4), vec![StackElement::Key("array"), StackElement::Index(4)]), + (U64Value(5), vec![StackElement::Key("array"), StackElement::Index(5)]), + (ArrayEnd, vec![StackElement::Key("array")]), + (ArrayStart, vec![StackElement::Key("idents")]), + (NullValue, vec![StackElement::Key("idents"), + StackElement::Index(0)]), + (BooleanValue(true), vec![StackElement::Key("idents"), + StackElement::Index(1)]), + (BooleanValue(false), vec![StackElement::Key("idents"), + StackElement::Index(2)]), + (ArrayEnd, vec![StackElement::Key("idents")]), (ObjectEnd, vec![]), ] ); @@ -3530,7 +3534,7 @@ mod tests { "{\"a\": 3}", vec![ (ObjectStart, vec![]), - (U64Value(3), vec![Key("a")]), + (U64Value(3), vec![StackElement::Key("a")]), (ObjectEnd, vec![]), ] ); @@ -3538,8 +3542,8 @@ mod tests { "{ \"a\": null, \"b\" : true }", vec![ (ObjectStart, vec![]), - (NullValue, vec![Key("a")]), - (BooleanValue(true), vec![Key("b")]), + (NullValue, vec![StackElement::Key("a")]), + (BooleanValue(true), vec![StackElement::Key("b")]), (ObjectEnd, vec![]), ] ); @@ -3547,10 +3551,10 @@ mod tests { "{\"a\" : 1.0 ,\"b\": [ true ]}", vec![ (ObjectStart, vec![]), - (F64Value(1.0), vec![Key("a")]), - (ArrayStart, vec![Key("b")]), - (BooleanValue(true),vec![Key("b"), Index(0)]), - (ArrayEnd, vec![Key("b")]), + (F64Value(1.0), vec![StackElement::Key("a")]), + (ArrayStart, vec![StackElement::Key("b")]), + (BooleanValue(true),vec![StackElement::Key("b"), StackElement::Index(0)]), + (ArrayEnd, vec![StackElement::Key("b")]), (ObjectEnd, vec![]), ] ); @@ -3565,16 +3569,27 @@ mod tests { }"#, vec![ (ObjectStart, vec![]), - (F64Value(1.0), vec![Key("a")]), - (ArrayStart, vec![Key("b")]), - (BooleanValue(true), vec![Key("b"), Index(0)]), - (StringValue("foo\nbar".to_string()), vec![Key("b"), Index(1)]), - (ObjectStart, vec![Key("b"), Index(2)]), - (ObjectStart, vec![Key("b"), Index(2), Key("c")]), - (NullValue, vec![Key("b"), Index(2), Key("c"), Key("d")]), - (ObjectEnd, vec![Key("b"), Index(2), Key("c")]), - (ObjectEnd, vec![Key("b"), Index(2)]), - (ArrayEnd, vec![Key("b")]), + (F64Value(1.0), vec![StackElement::Key("a")]), + (ArrayStart, vec![StackElement::Key("b")]), + (BooleanValue(true), vec![StackElement::Key("b"), + StackElement::Index(0)]), + (StringValue("foo\nbar".to_string()), vec![StackElement::Key("b"), + StackElement::Index(1)]), + (ObjectStart, vec![StackElement::Key("b"), + StackElement::Index(2)]), + (ObjectStart, vec![StackElement::Key("b"), + StackElement::Index(2), + StackElement::Key("c")]), + (NullValue, vec![StackElement::Key("b"), + StackElement::Index(2), + StackElement::Key("c"), + StackElement::Key("d")]), + (ObjectEnd, vec![StackElement::Key("b"), + StackElement::Index(2), + StackElement::Key("c")]), + (ObjectEnd, vec![StackElement::Key("b"), + StackElement::Index(2)]), + (ArrayEnd, vec![StackElement::Key("b")]), (ObjectEnd, vec![]), ] ); @@ -3600,7 +3615,7 @@ mod tests { "[true]", vec![ (ArrayStart, vec![]), - (BooleanValue(true), vec![Index(0)]), + (BooleanValue(true), vec![StackElement::Index(0)]), (ArrayEnd, vec![]), ] ); @@ -3608,7 +3623,7 @@ mod tests { "[ false ]", vec![ (ArrayStart, vec![]), - (BooleanValue(false), vec![Index(0)]), + (BooleanValue(false), vec![StackElement::Index(0)]), (ArrayEnd, vec![]), ] ); @@ -3616,7 +3631,7 @@ mod tests { "[null]", vec![ (ArrayStart, vec![]), - (NullValue, vec![Index(0)]), + (NullValue, vec![StackElement::Index(0)]), (ArrayEnd, vec![]), ] ); @@ -3624,8 +3639,8 @@ mod tests { "[3, 1]", vec![ (ArrayStart, vec![]), - (U64Value(3), vec![Index(0)]), - (U64Value(1), vec![Index(1)]), + (U64Value(3), vec![StackElement::Index(0)]), + (U64Value(1), vec![StackElement::Index(1)]), (ArrayEnd, vec![]), ] ); @@ -3633,8 +3648,8 @@ mod tests { "\n[3, 2]\n", vec![ (ArrayStart, vec![]), - (U64Value(3), vec![Index(0)]), - (U64Value(2), vec![Index(1)]), + (U64Value(3), vec![StackElement::Index(0)]), + (U64Value(2), vec![StackElement::Index(1)]), (ArrayEnd, vec![]), ] ); @@ -3642,11 +3657,11 @@ mod tests { "[2, [4, 1]]", vec![ (ArrayStart, vec![]), - (U64Value(2), vec![Index(0)]), - (ArrayStart, vec![Index(1)]), - (U64Value(4), vec![Index(1), Index(0)]), - (U64Value(1), vec![Index(1), Index(1)]), - (ArrayEnd, vec![Index(1)]), + (U64Value(2), vec![StackElement::Index(0)]), + (ArrayStart, vec![StackElement::Index(1)]), + (U64Value(4), vec![StackElement::Index(1), StackElement::Index(0)]), + (U64Value(1), vec![StackElement::Index(1), StackElement::Index(1)]), + (ArrayEnd, vec![StackElement::Index(1)]), (ArrayEnd, vec![]), ] ); @@ -3695,50 +3710,56 @@ mod tests { stack.bump_index(); assert!(stack.len() == 1); - assert!(stack.is_equal_to(&[Index(1)])); - assert!(stack.starts_with(&[Index(1)])); - assert!(stack.ends_with(&[Index(1)])); + assert!(stack.is_equal_to(&[StackElement::Index(1)])); + assert!(stack.starts_with(&[StackElement::Index(1)])); + assert!(stack.ends_with(&[StackElement::Index(1)])); assert!(stack.last_is_index()); - assert!(stack.get(0) == Index(1)); + assert!(stack.get(0) == StackElement::Index(1)); stack.push_key("foo".to_string()); assert!(stack.len() == 2); - assert!(stack.is_equal_to(&[Index(1), Key("foo")])); - assert!(stack.starts_with(&[Index(1), Key("foo")])); - assert!(stack.starts_with(&[Index(1)])); - assert!(stack.ends_with(&[Index(1), Key("foo")])); - assert!(stack.ends_with(&[Key("foo")])); + assert!(stack.is_equal_to(&[StackElement::Index(1), StackElement::Key("foo")])); + assert!(stack.starts_with(&[StackElement::Index(1), StackElement::Key("foo")])); + assert!(stack.starts_with(&[StackElement::Index(1)])); + assert!(stack.ends_with(&[StackElement::Index(1), StackElement::Key("foo")])); + assert!(stack.ends_with(&[StackElement::Key("foo")])); assert!(!stack.last_is_index()); - assert!(stack.get(0) == Index(1)); - assert!(stack.get(1) == Key("foo")); + assert!(stack.get(0) == StackElement::Index(1)); + assert!(stack.get(1) == StackElement::Key("foo")); stack.push_key("bar".to_string()); assert!(stack.len() == 3); - assert!(stack.is_equal_to(&[Index(1), Key("foo"), Key("bar")])); - assert!(stack.starts_with(&[Index(1)])); - assert!(stack.starts_with(&[Index(1), Key("foo")])); - assert!(stack.starts_with(&[Index(1), Key("foo"), Key("bar")])); - assert!(stack.ends_with(&[Key("bar")])); - assert!(stack.ends_with(&[Key("foo"), Key("bar")])); - assert!(stack.ends_with(&[Index(1), Key("foo"), Key("bar")])); + assert!(stack.is_equal_to(&[StackElement::Index(1), + StackElement::Key("foo"), + StackElement::Key("bar")])); + assert!(stack.starts_with(&[StackElement::Index(1)])); + assert!(stack.starts_with(&[StackElement::Index(1), StackElement::Key("foo")])); + assert!(stack.starts_with(&[StackElement::Index(1), + StackElement::Key("foo"), + StackElement::Key("bar")])); + assert!(stack.ends_with(&[StackElement::Key("bar")])); + assert!(stack.ends_with(&[StackElement::Key("foo"), StackElement::Key("bar")])); + assert!(stack.ends_with(&[StackElement::Index(1), + StackElement::Key("foo"), + StackElement::Key("bar")])); assert!(!stack.last_is_index()); - assert!(stack.get(0) == Index(1)); - assert!(stack.get(1) == Key("foo")); - assert!(stack.get(2) == Key("bar")); + assert!(stack.get(0) == StackElement::Index(1)); + assert!(stack.get(1) == StackElement::Key("foo")); + assert!(stack.get(2) == StackElement::Key("bar")); stack.pop(); assert!(stack.len() == 2); - assert!(stack.is_equal_to(&[Index(1), Key("foo")])); - assert!(stack.starts_with(&[Index(1), Key("foo")])); - assert!(stack.starts_with(&[Index(1)])); - assert!(stack.ends_with(&[Index(1), Key("foo")])); - assert!(stack.ends_with(&[Key("foo")])); + assert!(stack.is_equal_to(&[StackElement::Index(1), StackElement::Key("foo")])); + assert!(stack.starts_with(&[StackElement::Index(1), StackElement::Key("foo")])); + assert!(stack.starts_with(&[StackElement::Index(1)])); + assert!(stack.ends_with(&[StackElement::Index(1), StackElement::Key("foo")])); + assert!(stack.ends_with(&[StackElement::Key("foo")])); assert!(!stack.last_is_index()); - assert!(stack.get(0) == Index(1)); - assert!(stack.get(1) == Key("foo")); + assert!(stack.get(0) == StackElement::Index(1)); + assert!(stack.get(1) == StackElement::Key("foo")); } #[test] diff --git a/src/libserialize/json_stage0.rs b/src/libserialize/json_stage0.rs index a157d91727498..1c2855f674529 100644 --- a/src/libserialize/json_stage0.rs +++ b/src/libserialize/json_stage0.rs @@ -211,6 +211,7 @@ use std::string; use std::ops; use unicode::str as unicode_str; use unicode::str::Utf16Item; +use std::ops::Index as IndexOp; use Encodable; @@ -386,7 +387,7 @@ fn escape_str(wr: &mut fmt::Writer, v: &str) -> fmt::Result { }; if start < i { - try!(wr.write_str(v[start..i])); + try!(wr.write_str(v.index(&(start..i)))); } try!(wr.write_str(escaped)); @@ -395,7 +396,7 @@ fn escape_str(wr: &mut fmt::Writer, v: &str) -> fmt::Result { } if start != v.len() { - try!(wr.write_str(v[start..])); + try!(wr.write_str(v.index(&(start..)))); } wr.write_str("\"") @@ -404,7 +405,7 @@ fn escape_str(wr: &mut fmt::Writer, v: &str) -> fmt::Result { fn escape_char(writer: &mut fmt::Writer, v: char) -> fmt::Result { let mut buf = [0; 4]; let n = v.encode_utf8(&mut buf).unwrap(); - let buf = unsafe { str::from_utf8_unchecked(buf[0..n]) }; + let buf = unsafe { str::from_utf8_unchecked(buf.index(&(0..n))) }; escape_str(writer, buf) } @@ -417,7 +418,7 @@ fn spaces(wr: &mut fmt::Writer, mut n: uint) -> fmt::Result { } if n > 0 { - wr.write_str(BUF[..n]) + wr.write_str(BUF.index(&(..n))) } else { Ok(()) } @@ -624,7 +625,7 @@ impl<'a> ::Encoder for Encoder<'a> { let mut check_encoder = Encoder::new(&mut buf); try!(f(transmute(&mut check_encoder))); } - let out = str::from_utf8(buf[]).unwrap(); + let out = str::from_utf8(buf.index(&FullRange)).unwrap(); let needs_wrapping = out.char_at(0) != '"' && out.char_at_reverse(out.len()) != '"'; if needs_wrapping { try!(write!(self.writer, "\"")); } try!(f(self)); @@ -893,7 +894,7 @@ impl<'a> ::Encoder for PrettyEncoder<'a> { let mut check_encoder = PrettyEncoder::new(&mut buf); try!(f(transmute(&mut check_encoder))); } - let out = str::from_utf8(buf[]).unwrap(); + let out = str::from_utf8(buf.index(&FullRange)).unwrap(); let needs_wrapping = out.char_at(0) != '"' && out.char_at_reverse(out.len()) != '"'; if needs_wrapping { try!(write!(self.writer, "\"")); } try!(f(self)); @@ -1026,7 +1027,7 @@ impl Json { /// Returns None otherwise. pub fn as_string<'a>(&'a self) -> Option<&'a str> { match *self { - Json::String(ref s) => Some(s[]), + Json::String(ref s) => Some(s.index(&FullRange)), _ => None } } @@ -1220,7 +1221,8 @@ impl Stack { InternalIndex(i) => Index(i), InternalKey(start, size) => { Key(str::from_utf8( - self.str_buffer[start as uint .. start as uint + size as uint]).unwrap()) + self.str_buffer.index( + &((start as uint) .. (start as uint + size as uint)))).unwrap()) } } } @@ -1262,7 +1264,7 @@ impl Stack { Some(&InternalIndex(i)) => Some(Index(i)), Some(&InternalKey(start, size)) => { Some(Key(str::from_utf8( - self.str_buffer[start as uint .. (start+size) as uint] + self.str_buffer.index(&(start as uint) .. ((start+size) as uint)) ).unwrap())) } } @@ -2139,7 +2141,7 @@ impl ::Decoder for Decoder { return Err(ExpectedError("String or Object".to_string(), format!("{}", json))) } }; - let idx = match names.iter().position(|n| *n == name[]) { + let idx = match names.iter().position(|n| *n == name.index(&FullRange)) { Some(idx) => idx, None => return Err(UnknownVariantError(name)) }; @@ -3352,7 +3354,7 @@ mod tests { hm.insert(1, true); let mut mem_buf = Vec::new(); write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap(); - let json_str = from_utf8(mem_buf[]).unwrap(); + let json_str = from_utf8(&mem_buf.index(&FullRange)).unwrap(); match from_str(json_str) { Err(_) => panic!("Unable to parse json_str: {}", json_str), _ => {} // it parsed and we are good to go @@ -3368,7 +3370,7 @@ mod tests { hm.insert(1, true); let mut mem_buf = Vec::new(); write!(&mut mem_buf, "{}", super::as_pretty_json(&hm)).unwrap(); - let json_str = from_utf8(mem_buf[]).unwrap(); + let json_str = from_utf8(&mem_buf.index(&FullRange)).unwrap(); match from_str(json_str) { Err(_) => panic!("Unable to parse json_str: {}", json_str), _ => {} // it parsed and we are good to go @@ -3408,7 +3410,7 @@ mod tests { write!(&mut writer, "{}", super::as_pretty_json(&json).indent(i)).unwrap(); - let printed = from_utf8(writer[]).unwrap(); + let printed = from_utf8(&writer.index(&FullRange)).unwrap(); // Check for indents at each line let lines: Vec<&str> = printed.lines().collect(); diff --git a/src/libserialize/serialize.rs b/src/libserialize/serialize.rs index a04f67f765108..edaee31462c7f 100644 --- a/src/libserialize/serialize.rs +++ b/src/libserialize/serialize.rs @@ -326,7 +326,7 @@ impl Encodable for str { impl Encodable for String { fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_str(self[]) + s.emit_str(self.index(&FullRange)) } } diff --git a/src/libserialize/serialize_stage0.rs b/src/libserialize/serialize_stage0.rs index fd37bb63230af..87ff5922c1a7e 100644 --- a/src/libserialize/serialize_stage0.rs +++ b/src/libserialize/serialize_stage0.rs @@ -14,6 +14,7 @@ Core encoding and decoding interfaces. */ +use std::ops::FullRange; use std::path; use std::rc::Rc; use std::cell::{Cell, RefCell}; @@ -308,7 +309,7 @@ impl> Encodable for str { impl> Encodable for String { fn encode(&self, s: &mut S) -> Result<(), E> { - s.emit_str(self[]) + s.emit_str(self.index(&FullRange)) } } diff --git a/src/libstd/failure.rs b/src/libstd/failure.rs index e48137047b0a9..50538d3e43def 100644 --- a/src/libstd/failure.rs +++ b/src/libstd/failure.rs @@ -37,7 +37,7 @@ pub fn on_fail(obj: &(Any+Send), file: &'static str, line: uint) { let msg = match obj.downcast_ref::<&'static str>() { Some(s) => *s, None => match obj.downcast_ref::() { - Some(s) => s[], + Some(s) => s.index(&FullRange), None => "Box", } }; diff --git a/src/libstd/io/buffered.rs b/src/libstd/io/buffered.rs index d590aa8419453..c54a5ffa665f6 100644 --- a/src/libstd/io/buffered.rs +++ b/src/libstd/io/buffered.rs @@ -15,7 +15,7 @@ use cmp; use io::{Reader, Writer, Stream, Buffer, DEFAULT_BUF_SIZE, IoResult}; use iter::{IteratorExt, ExactSizeIterator}; -use ops::Drop; +use ops::{Drop, Index}; use option::Option; use option::Option::{Some, None}; use result::Result::Ok; @@ -100,7 +100,7 @@ impl Buffer for BufferedReader { self.cap = try!(self.inner.read(self.buf.as_mut_slice())); self.pos = 0; } - Ok(self.buf[self.pos..self.cap]) + Ok(self.buf.index(&(self.pos..self.cap))) } fn consume(&mut self, amt: uint) { @@ -117,7 +117,7 @@ impl Reader for BufferedReader { let nread = { let available = try!(self.fill_buf()); let nread = cmp::min(available.len(), buf.len()); - slice::bytes::copy_memory(buf, available[..nread]); + slice::bytes::copy_memory(buf, available.index(&(0..nread))); nread }; self.pos += nread; @@ -171,7 +171,7 @@ impl BufferedWriter { fn flush_buf(&mut self) -> IoResult<()> { if self.pos != 0 { - let ret = self.inner.as_mut().unwrap().write(self.buf[..self.pos]); + let ret = self.inner.as_mut().unwrap().write(self.buf.index(&(0..self.pos))); self.pos = 0; ret } else { @@ -263,9 +263,9 @@ impl Writer for LineBufferedWriter { fn write(&mut self, buf: &[u8]) -> IoResult<()> { match buf.iter().rposition(|&b| b == b'\n') { Some(i) => { - try!(self.inner.write(buf[..i + 1])); + try!(self.inner.write(buf.index(&(0..(i + 1))))); try!(self.inner.flush()); - try!(self.inner.write(buf[i + 1..])); + try!(self.inner.write(buf.index(&((i + 1)..)))); Ok(()) } None => self.inner.write(buf), @@ -472,41 +472,37 @@ mod test { writer.write(&[0, 1]).unwrap(); let b: &[_] = &[]; - assert_eq!(writer.get_ref()[], b); + assert_eq!(&writer.get_ref()[], b); writer.write(&[2]).unwrap(); let b: &[_] = &[0, 1]; - assert_eq!(writer.get_ref()[], b); + assert_eq!(&writer.get_ref()[], b); writer.write(&[3]).unwrap(); - assert_eq!(writer.get_ref()[], b); + assert_eq!(&writer.get_ref()[], b); writer.flush().unwrap(); let a: &[_] = &[0, 1, 2, 3]; - assert_eq!(a, writer.get_ref()[]); + assert_eq!(a, &writer.get_ref()[]); writer.write(&[4]).unwrap(); writer.write(&[5]).unwrap(); - assert_eq!(a, writer.get_ref()[]); + assert_eq!(a, &writer.get_ref()[]); writer.write(&[6]).unwrap(); let a: &[_] = &[0, 1, 2, 3, 4, 5]; - assert_eq!(a, - writer.get_ref()[]); + assert_eq!(a, &writer.get_ref()[]); writer.write(&[7, 8]).unwrap(); let a: &[_] = &[0, 1, 2, 3, 4, 5, 6]; - assert_eq!(a, - writer.get_ref()[]); + assert_eq!(a, &writer.get_ref()[]); writer.write(&[9, 10, 11]).unwrap(); let a: &[_] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]; - assert_eq!(a, - writer.get_ref()[]); + assert_eq!(a, &writer.get_ref()[]); writer.flush().unwrap(); - assert_eq!(a, - writer.get_ref()[]); + assert_eq!(a, &writer.get_ref()[]); } #[test] @@ -514,10 +510,10 @@ mod test { let mut w = BufferedWriter::with_capacity(3, Vec::new()); w.write(&[0, 1]).unwrap(); let a: &[_] = &[]; - assert_eq!(a, w.get_ref()[]); + assert_eq!(a, &w.get_ref()[]); let w = w.into_inner(); let a: &[_] = &[0, 1]; - assert_eq!(a, w[]); + assert_eq!(a, w.index(&FullRange)); } // This is just here to make sure that we don't infinite loop in the @@ -559,21 +555,21 @@ mod test { let mut writer = LineBufferedWriter::new(Vec::new()); writer.write(&[0]).unwrap(); let b: &[_] = &[]; - assert_eq!(writer.get_ref()[], b); + assert_eq!(&writer.get_ref()[], b); writer.write(&[1]).unwrap(); - assert_eq!(writer.get_ref()[], b); + assert_eq!(&writer.get_ref()[], b); writer.flush().unwrap(); let b: &[_] = &[0, 1]; - assert_eq!(writer.get_ref()[], b); + assert_eq!(&writer.get_ref()[], b); writer.write(&[0, b'\n', 1, b'\n', 2]).unwrap(); let b: &[_] = &[0, 1, 0, b'\n', 1, b'\n']; - assert_eq!(writer.get_ref()[], b); + assert_eq!(&writer.get_ref()[], b); writer.flush().unwrap(); let b: &[_] = &[0, 1, 0, b'\n', 1, b'\n', 2]; - assert_eq!(writer.get_ref()[], b); + assert_eq!(&writer.get_ref()[], b); writer.write(&[3, b'\n']).unwrap(); let b: &[_] = &[0, 1, 0, b'\n', 1, b'\n', 2, 3, b'\n']; - assert_eq!(writer.get_ref()[], b); + assert_eq!(&writer.get_ref()[], b); } #[test] @@ -614,14 +610,14 @@ mod test { #[test] fn read_char_buffered() { let buf = [195u8, 159u8]; - let mut reader = BufferedReader::with_capacity(1, buf[]); + let mut reader = BufferedReader::with_capacity(1, buf.index(&FullRange)); assert_eq!(reader.read_char(), Ok('ß')); } #[test] fn test_chars() { let buf = [195u8, 159u8, b'a']; - let mut reader = BufferedReader::with_capacity(1, buf[]); + let mut reader = BufferedReader::with_capacity(1, buf.index(&FullRange)); let mut it = reader.chars(); assert_eq!(it.next(), Some(Ok('ß'))); assert_eq!(it.next(), Some(Ok('a'))); diff --git a/src/libstd/io/comm_adapters.rs b/src/libstd/io/comm_adapters.rs index f47f6237b7291..bcd0c09b77d61 100644 --- a/src/libstd/io/comm_adapters.rs +++ b/src/libstd/io/comm_adapters.rs @@ -13,6 +13,7 @@ use cmp; use sync::mpsc::{Sender, Receiver}; use io; use option::Option::{None, Some}; +use ops::Index; use result::Result::{Ok, Err}; use slice::{bytes, SliceExt}; use super::{Buffer, Reader, Writer, IoResult}; @@ -90,7 +91,7 @@ impl Reader for ChanReader { Some(src) => { let dst = buf.slice_from_mut(num_read); let count = cmp::min(src.len(), dst.len()); - bytes::copy_memory(dst, src[..count]); + bytes::copy_memory(dst, src.index(&(0..count))); count }, None => 0, diff --git a/src/libstd/io/fs.rs b/src/libstd/io/fs.rs index 4691c06c1de16..0fffb2fafbe6c 100644 --- a/src/libstd/io/fs.rs +++ b/src/libstd/io/fs.rs @@ -889,7 +889,7 @@ mod test { let mut read_buf = [0; 1028]; let read_str = match check!(read_stream.read(&mut read_buf)) { -1|0 => panic!("shouldn't happen"), - n => str::from_utf8(read_buf[..n]).unwrap().to_string() + n => str::from_utf8(read_buf.index(&(0..n))).unwrap().to_string() }; assert_eq!(read_str.as_slice(), message); } diff --git a/src/libstd/io/mem.rs b/src/libstd/io/mem.rs index 5c17644a1ace7..9e6af86925b8a 100644 --- a/src/libstd/io/mem.rs +++ b/src/libstd/io/mem.rs @@ -13,6 +13,7 @@ //! Readers and Writers for in-memory buffers use cmp::min; +use prelude::v1::Index; use option::Option::None; use result::Result::{Err, Ok}; use io; @@ -159,7 +160,7 @@ impl Reader for MemReader { let write_len = min(buf.len(), self.buf.len() - self.pos); { - let input = self.buf[self.pos.. self.pos + write_len]; + let input = self.buf.index(&(self.pos.. (self.pos + write_len))); let output = buf.slice_to_mut(write_len); assert_eq!(input.len(), output.len()); slice::bytes::copy_memory(output, input); @@ -187,7 +188,7 @@ impl Buffer for MemReader { #[inline] fn fill_buf<'a>(&'a mut self) -> IoResult<&'a [u8]> { if self.pos < self.buf.len() { - Ok(self.buf[self.pos..]) + Ok(self.buf.index(&(self.pos..))) } else { Err(io::standard_error(io::EndOfFile)) } @@ -204,7 +205,7 @@ impl<'a> Reader for &'a [u8] { let write_len = min(buf.len(), self.len()); { - let input = self[..write_len]; + let input = self.index(&(0..write_len)); let output = buf.slice_to_mut(write_len); slice::bytes::copy_memory(output, input); } @@ -227,7 +228,7 @@ impl<'a> Buffer for &'a [u8] { #[inline] fn consume(&mut self, amt: uint) { - *self = self[amt..]; + *self = self.index(&(amt..)); } } @@ -286,7 +287,7 @@ impl<'a> Writer for BufWriter<'a> { Ok(()) } else { - slice::bytes::copy_memory(dst, src[..dst_len]); + slice::bytes::copy_memory(dst, src.index(&(0..dst_len))); self.pos += dst_len; @@ -349,7 +350,7 @@ impl<'a> Reader for BufReader<'a> { let write_len = min(buf.len(), self.buf.len() - self.pos); { - let input = self.buf[self.pos.. self.pos + write_len]; + let input = self.buf.index(&(self.pos.. (self.pos + write_len))); let output = buf.slice_to_mut(write_len); assert_eq!(input.len(), output.len()); slice::bytes::copy_memory(output, input); @@ -377,7 +378,7 @@ impl<'a> Buffer for BufReader<'a> { #[inline] fn fill_buf(&mut self) -> IoResult<&[u8]> { if self.pos < self.buf.len() { - Ok(self.buf[self.pos..]) + Ok(self.buf.index(&(self.pos..))) } else { Err(io::standard_error(io::EndOfFile)) } @@ -390,9 +391,9 @@ impl<'a> Buffer for BufReader<'a> { #[cfg(test)] mod test { extern crate "test" as test_crate; - use prelude::v1::*; - - use io::{SeekSet, SeekCur, SeekEnd}; + use io::{SeekSet, SeekCur, SeekEnd, Reader, Writer, Seek}; + use prelude::v1::{Ok, Err, range, Vec, Buffer, AsSlice, SliceExt}; + use prelude::v1::{IteratorExt, Index}; use io; use iter::repeat; use self::test_crate::Bencher; @@ -498,7 +499,7 @@ mod test { assert_eq!(buf, b); assert_eq!(reader.read(&mut buf), Ok(3)); let b: &[_] = &[5, 6, 7]; - assert_eq!(buf[0..3], b); + assert_eq!(buf.index(&(0..3)), b); assert!(reader.read(&mut buf).is_err()); let mut reader = MemReader::new(vec!(0, 1, 2, 3, 4, 5, 6, 7)); assert_eq!(reader.read_until(3).unwrap(), vec!(0, 1, 2, 3)); @@ -524,7 +525,7 @@ mod test { assert_eq!(buf.as_slice(), b); assert_eq!(reader.read(&mut buf), Ok(3)); let b: &[_] = &[5, 6, 7]; - assert_eq!(buf[0..3], b); + assert_eq!(buf.index(&(0..3)), b); assert!(reader.read(&mut buf).is_err()); let mut reader = &mut in_buf.as_slice(); assert_eq!(reader.read_until(3).unwrap(), vec!(0, 1, 2, 3)); @@ -551,7 +552,7 @@ mod test { assert_eq!(buf, b); assert_eq!(reader.read(&mut buf), Ok(3)); let b: &[_] = &[5, 6, 7]; - assert_eq!(buf[0..3], b); + assert_eq!(buf.index(&(0..3)), b); assert!(reader.read(&mut buf).is_err()); let mut reader = BufReader::new(in_buf.as_slice()); assert_eq!(reader.read_until(3).unwrap(), vec!(0, 1, 2, 3)); diff --git a/src/libstd/io/mod.rs b/src/libstd/io/mod.rs index 5bef473db990c..465c4f9c5c7d2 100644 --- a/src/libstd/io/mod.rs +++ b/src/libstd/io/mod.rs @@ -234,7 +234,7 @@ use int; use iter::{Iterator, IteratorExt}; use kinds::Sized; use mem::transmute; -use ops::FnOnce; +use ops::{FnOnce, Index}; use option::Option; use option::Option::{Some, None}; use os; @@ -1068,7 +1068,7 @@ pub trait Writer { fn write_char(&mut self, c: char) -> IoResult<()> { let mut buf = [0u8; 4]; let n = c.encode_utf8(buf.as_mut_slice()).unwrap_or(0); - self.write(buf[..n]) + self.write(buf.index(&(0..n))) } /// Write the result of passing n through `int::to_str_bytes`. @@ -1453,7 +1453,7 @@ pub trait Buffer: Reader { }; match available.iter().position(|&b| b == byte) { Some(i) => { - res.push_all(available[..i + 1]); + res.push_all(available.index(&(0..(i + 1)))); used = i + 1; break } @@ -1492,7 +1492,7 @@ pub trait Buffer: Reader { } } } - match str::from_utf8(buf[..width]).ok() { + match str::from_utf8(buf.index(&(0..width))).ok() { Some(s) => Ok(s.char_at(0)), None => Err(standard_error(InvalidInput)) } diff --git a/src/libstd/io/net/ip.rs b/src/libstd/io/net/ip.rs index d398b61fe64cf..6cb2463fcbca5 100644 --- a/src/libstd/io/net/ip.rs +++ b/src/libstd/io/net/ip.rs @@ -22,7 +22,7 @@ use fmt; use io::{self, IoResult, IoError}; use io::net; use iter::{Iterator, IteratorExt}; -use ops::{FnOnce, FnMut}; +use ops::{FnOnce, FnMut, Index}; use option::Option; use option::Option::{None, Some}; use result::Result::{Ok, Err}; @@ -313,7 +313,7 @@ impl<'a> Parser<'a> { let mut tail = [0u16; 8]; let (tail_size, _) = read_groups(self, &mut tail, 8 - head_size); - Some(ipv6_addr_from_head_tail(head[..head_size], tail[..tail_size])) + Some(ipv6_addr_from_head_tail(head.index(&(0..head_size)), tail.index(&(0..tail_size)))) } fn read_ipv6_addr(&mut self) -> Option { diff --git a/src/libstd/io/process.rs b/src/libstd/io/process.rs index ea232ad0c3f1b..5f77ab38d7453 100644 --- a/src/libstd/io/process.rs +++ b/src/libstd/io/process.rs @@ -752,12 +752,12 @@ impl Drop for Process { #[cfg(test)] mod tests { - use prelude::v1::*; - + use io::{Truncate, Write, TimedOut, timer, process, FileNotFound}; + use prelude::v1::{Ok, Err, range, drop, Some, None, Vec}; + use prelude::v1::{Path, String, Reader, Writer, Clone}; + use prelude::v1::{SliceExt, Str, StrExt, AsSlice, ToString, GenericPath}; use io::fs::PathExtensions; - use io::process; use io::timer::*; - use io::{Truncate, Write, TimedOut, timer, FileNotFound}; use rt::running_on_valgrind; use str; use super::{CreatePipe}; diff --git a/src/libstd/io/util.rs b/src/libstd/io/util.rs index 86fa68d63ac89..c0254a3e7a205 100644 --- a/src/libstd/io/util.rs +++ b/src/libstd/io/util.rs @@ -59,7 +59,7 @@ impl Reader for LimitReader { impl Buffer for LimitReader { fn fill_buf<'a>(&'a mut self) -> io::IoResult<&'a [u8]> { let amt = try!(self.inner.fill_buf()); - let buf = amt[..cmp::min(amt.len(), self.limit)]; + let buf = amt.index(&(0..cmp::min(amt.len(), self.limit))); if buf.len() == 0 { Err(io::standard_error(io::EndOfFile)) } else { @@ -220,7 +220,7 @@ impl TeeReader { impl Reader for TeeReader { fn read(&mut self, buf: &mut [u8]) -> io::IoResult { self.reader.read(buf).and_then(|len| { - self.writer.write(buf[mut ..len]).map(|()| len) + self.writer.write(buf.index_mut(&(0..len))).map(|()| len) }) } } @@ -234,7 +234,7 @@ pub fn copy(r: &mut R, w: &mut W) -> io::IoResult<()> { Err(ref e) if e.kind == io::EndOfFile => return Ok(()), Err(e) => return Err(e), }; - try!(w.write(buf[..len])); + try!(w.write(buf.index(&(0..len)))); } } diff --git a/src/libstd/path/mod.rs b/src/libstd/path/mod.rs index b020164158cff..87188c0d4a25c 100644 --- a/src/libstd/path/mod.rs +++ b/src/libstd/path/mod.rs @@ -68,6 +68,7 @@ use fmt; use iter::IteratorExt; use option::Option; use option::Option::{None, Some}; +use prelude::v1::{FullRange, Index}; use str; use str::StrExt; use string::{String, CowString}; @@ -351,7 +352,7 @@ pub trait GenericPath: Clone + GenericPathUnsafe { match name.rposition_elem(&dot) { None | Some(0) => name, Some(1) if name == b".." => name, - Some(pos) => name[..pos] + Some(pos) => name.index(&(0..pos)) } }) } @@ -398,7 +399,7 @@ pub trait GenericPath: Clone + GenericPathUnsafe { match name.rposition_elem(&dot) { None | Some(0) => None, Some(1) if name == b".." => None, - Some(pos) => Some(name[pos+1..]) + Some(pos) => Some(name.index(&((pos+1)..))) } } } @@ -474,7 +475,7 @@ pub trait GenericPath: Clone + GenericPathUnsafe { let extlen = extension.container_as_bytes().len(); match (name.rposition_elem(&dot), extlen) { (None, 0) | (Some(0), 0) => None, - (Some(idx), 0) => Some(name[..idx].to_vec()), + (Some(idx), 0) => Some(name.index(&(0..idx)).to_vec()), (idx, extlen) => { let idx = match idx { None | Some(0) => name.len(), @@ -483,7 +484,7 @@ pub trait GenericPath: Clone + GenericPathUnsafe { let mut v; v = Vec::with_capacity(idx + extlen + 1); - v.push_all(name[..idx]); + v.push_all(name.index(&(0..idx))); v.push(dot); v.push_all(extension.container_as_bytes()); Some(v) @@ -869,7 +870,7 @@ impl BytesContainer for String { } #[inline] fn container_as_str(&self) -> Option<&str> { - Some(self[]) + Some(self.index(&FullRange)) } #[inline] fn is_str(_: Option<&String>) -> bool { true } @@ -885,7 +886,7 @@ impl BytesContainer for [u8] { impl BytesContainer for Vec { #[inline] fn container_as_bytes(&self) -> &[u8] { - self[] + self.index(&FullRange) } } diff --git a/src/libstd/path/posix.rs b/src/libstd/path/posix.rs index d9981ace0301e..7f37d3b23c8f0 100644 --- a/src/libstd/path/posix.rs +++ b/src/libstd/path/posix.rs @@ -16,6 +16,7 @@ use hash; use io::Writer; use iter::{AdditiveIterator, Extend}; use iter::{Iterator, IteratorExt, Map}; +use prelude::v1::Index; use kinds::Sized; use option::Option::{self, Some, None}; use slice::{AsSlice, Split, SliceExt, SliceConcatExt}; @@ -119,7 +120,7 @@ impl GenericPathUnsafe for Path { None => { self.repr = Path::normalize(filename); } - Some(idx) if self.repr[idx+1..] == b".." => { + Some(idx) if self.repr.index(&((idx+1)..)) == b".." => { let mut v = Vec::with_capacity(self.repr.len() + 1 + filename.len()); v.push_all(self.repr.as_slice()); v.push(SEP_BYTE); @@ -129,7 +130,7 @@ impl GenericPathUnsafe for Path { } Some(idx) => { let mut v = Vec::with_capacity(idx + 1 + filename.len()); - v.push_all(self.repr[..idx+1]); + v.push_all(self.repr.index(&(0..(idx+1)))); v.push_all(filename); // FIXME: this is slow self.repr = Path::normalize(v.as_slice()); @@ -170,9 +171,9 @@ impl GenericPath for Path { match self.sepidx { None if b".." == self.repr => self.repr.as_slice(), None => dot_static, - Some(0) => self.repr[..1], - Some(idx) if self.repr[idx+1..] == b".." => self.repr.as_slice(), - Some(idx) => self.repr[..idx] + Some(0) => self.repr.index(&(0..1)), + Some(idx) if self.repr.index(&((idx+1)..)) == b".." => self.repr.as_slice(), + Some(idx) => self.repr.index(&(0..idx)) } } @@ -181,9 +182,9 @@ impl GenericPath for Path { None if b"." == self.repr || b".." == self.repr => None, None => Some(self.repr.as_slice()), - Some(idx) if self.repr[idx+1..] == b".." => None, - Some(0) if self.repr[1..].is_empty() => None, - Some(idx) => Some(self.repr[idx+1..]) + Some(idx) if self.repr.index(&((idx+1)..)) == b".." => None, + Some(0) if self.repr.index(&(1..)).is_empty() => None, + Some(idx) => Some(self.repr.index(&((idx+1)..))) } } @@ -325,7 +326,7 @@ impl Path { // borrowck is being very picky let val = { let is_abs = !v.as_slice().is_empty() && v.as_slice()[0] == SEP_BYTE; - let v_ = if is_abs { v.as_slice()[1..] } else { v.as_slice() }; + let v_ = if is_abs { v.as_slice().index(&(1..)) } else { v.as_slice() }; let comps = normalize_helper(v_, is_abs); match comps { None => None, @@ -364,7 +365,7 @@ impl Path { /// A path of "/" yields no components. A path of "." yields one component. pub fn components<'a>(&'a self) -> Components<'a> { let v = if self.repr[0] == SEP_BYTE { - self.repr[1..] + self.repr.index(&(1..)) } else { self.repr.as_slice() }; let is_sep_byte: fn(&u8) -> bool = is_sep_byte; // coerce to fn ptr let mut ret = v.split(is_sep_byte); diff --git a/src/libstd/path/windows.rs b/src/libstd/path/windows.rs index 4b5d793355b60..97545bc202278 100644 --- a/src/libstd/path/windows.rs +++ b/src/libstd/path/windows.rs @@ -24,6 +24,7 @@ use iter::{AdditiveIterator, Extend}; use iter::{Iterator, IteratorExt, Map, repeat}; use mem; use option::Option::{self, Some, None}; +use prelude::v1::{FullRange, Index}; use slice::{SliceExt, SliceConcatExt}; use str::{SplitTerminator, FromStr, StrExt}; use string::{String, ToString}; @@ -165,30 +166,30 @@ impl GenericPathUnsafe for Path { s.push_str(".."); s.push(SEP); s.push_str(filename); - self.update_normalized(s[]); + self.update_normalized(s.index(&FullRange)); } None => { self.update_normalized(filename); } - Some((_,idxa,end)) if self.repr[idxa..end] == ".." => { + Some((_,idxa,end)) if self.repr.index(&(idxa..end)) == ".." => { let mut s = String::with_capacity(end + 1 + filename.len()); - s.push_str(self.repr[0..end]); + s.push_str(self.repr.index(&(0..end))); s.push(SEP); s.push_str(filename); - self.update_normalized(s[]); + self.update_normalized(s.index(&FullRange)); } Some((idxb,idxa,_)) if self.prefix == Some(DiskPrefix) && idxa == self.prefix_len() => { let mut s = String::with_capacity(idxb + filename.len()); - s.push_str(self.repr[0..idxb]); + s.push_str(self.repr.index(&(0..idxb))); s.push_str(filename); - self.update_normalized(s[]); + self.update_normalized(s.index(&FullRange)); } Some((idxb,_,_)) => { let mut s = String::with_capacity(idxb + 1 + filename.len()); - s.push_str(self.repr[0..idxb]); + s.push_str(self.repr.index(&(0..idxb))); s.push(SEP); s.push_str(filename); - self.update_normalized(s[]); + self.update_normalized(s.index(&FullRange)); } } } @@ -207,12 +208,12 @@ impl GenericPathUnsafe for Path { let path = path.container_as_str().unwrap(); fn is_vol_abs(path: &str, prefix: Option) -> bool { // assume prefix is Some(DiskPrefix) - let rest = path[prefix_len(prefix)..]; + let rest = path.index(&(prefix_len(prefix)..)); !rest.is_empty() && rest.as_bytes()[0].is_ascii() && is_sep(rest.as_bytes()[0] as char) } fn shares_volume(me: &Path, path: &str) -> bool { // path is assumed to have a prefix of Some(DiskPrefix) - let repr = me.repr[]; + let repr = me.repr.index(&FullRange); match me.prefix { Some(DiskPrefix) => { repr.as_bytes()[0] == path.as_bytes()[0].to_ascii_uppercase() @@ -244,7 +245,7 @@ impl GenericPathUnsafe for Path { else { None }; let pathlen = path_.as_ref().map_or(path.len(), |p| p.len()); let mut s = String::with_capacity(me.repr.len() + 1 + pathlen); - s.push_str(me.repr[]); + s.push_str(me.repr.index(&FullRange)); let plen = me.prefix_len(); // if me is "C:" we don't want to add a path separator match me.prefix { @@ -256,9 +257,9 @@ impl GenericPathUnsafe for Path { } match path_ { None => s.push_str(path), - Some(p) => s.push_str(p[]), + Some(p) => s.push_str(p.index(&FullRange)), }; - me.update_normalized(s[]) + me.update_normalized(s.index(&FullRange)) } if !path.is_empty() { @@ -266,7 +267,7 @@ impl GenericPathUnsafe for Path { match prefix { Some(DiskPrefix) if !is_vol_abs(path, prefix) && shares_volume(self, path) => { // cwd-relative path, self is on the same volume - append_path(self, path[prefix_len(prefix)..]); + append_path(self, path.index(&(prefix_len(prefix)..))); } Some(_) => { // absolute path, or cwd-relative and self is not same volume @@ -312,7 +313,7 @@ impl GenericPath for Path { /// Always returns a `Some` value. #[inline] fn as_str<'a>(&'a self) -> Option<&'a str> { - Some(self.repr[]) + Some(self.repr.index(&FullRange)) } #[inline] @@ -334,17 +335,21 @@ impl GenericPath for Path { /// Always returns a `Some` value. fn dirname_str<'a>(&'a self) -> Option<&'a str> { Some(match self.sepidx_or_prefix_len() { - None if ".." == self.repr => self.repr[], + None if ".." == self.repr => self.repr.index(&FullRange), None => ".", - Some((_,idxa,end)) if self.repr[idxa..end] == ".." => self.repr[], - Some((idxb,_,end)) if self.repr[idxb..end] == "\\" => self.repr[], - Some((0,idxa,_)) => self.repr[0..idxa], + Some((_,idxa,end)) if self.repr.index(&(idxa..end)) == ".." => { + self.repr.index(&FullRange) + } + Some((idxb,_,end)) if self.repr.index(&(idxb..end)) == "\\" => { + self.repr.index(&FullRange) + } + Some((0,idxa,_)) => self.repr.index(&(0..idxa)), Some((idxb,idxa,_)) => { match self.prefix { Some(DiskPrefix) | Some(VerbatimDiskPrefix) if idxb == self.prefix_len() => { - self.repr[0..idxa] + self.repr.index(&(0..idxa)) } - _ => self.repr[0..idxb] + _ => self.repr.index(&(0..idxb)) } } }) @@ -358,13 +363,13 @@ impl GenericPath for Path { /// See `GenericPath::filename_str` for info. /// Always returns a `Some` value if `filename` returns a `Some` value. fn filename_str<'a>(&'a self) -> Option<&'a str> { - let repr = self.repr[]; + let repr = self.repr.index(&FullRange); match self.sepidx_or_prefix_len() { None if "." == repr || ".." == repr => None, None => Some(repr), - Some((_,idxa,end)) if repr[idxa..end] == ".." => None, + Some((_,idxa,end)) if repr.index(&(idxa..end)) == ".." => None, Some((_,idxa,end)) if idxa == end => None, - Some((_,idxa,end)) => Some(repr[idxa..end]) + Some((_,idxa,end)) => Some(repr.index(&(idxa..end))) } } @@ -396,7 +401,7 @@ impl GenericPath for Path { true } Some((idxb,idxa,end)) if idxb == idxa && idxb == end => false, - Some((idxb,_,end)) if self.repr[idxb..end] == "\\" => false, + Some((idxb,_,end)) if self.repr.index(&(idxb..end)) == "\\" => false, Some((idxb,idxa,_)) => { let trunc = match self.prefix { Some(DiskPrefix) | Some(VerbatimDiskPrefix) | None => { @@ -416,15 +421,15 @@ impl GenericPath for Path { if self.prefix.is_some() { Some(Path::new(match self.prefix { Some(DiskPrefix) if self.is_absolute() => { - self.repr[0..self.prefix_len()+1] + self.repr.index(&(0..(self.prefix_len()+1))) } Some(VerbatimDiskPrefix) => { - self.repr[0..self.prefix_len()+1] + self.repr.index(&(0..(self.prefix_len()+1))) } - _ => self.repr[0..self.prefix_len()] + _ => self.repr.index(&(0..self.prefix_len())) })) } else if is_vol_relative(self) { - Some(Path::new(self.repr[0..1])) + Some(Path::new(self.repr.index(&(0..1)))) } else { None } @@ -443,7 +448,7 @@ impl GenericPath for Path { fn is_absolute(&self) -> bool { match self.prefix { Some(DiskPrefix) => { - let rest = self.repr[self.prefix_len()..]; + let rest = self.repr.index(&(self.prefix_len()..)); rest.len() > 0 && rest.as_bytes()[0] == SEP_BYTE } Some(_) => true, @@ -618,15 +623,15 @@ impl Path { /// Does not distinguish between absolute and cwd-relative paths, e.g. /// C:\foo and C:foo. pub fn str_components<'a>(&'a self) -> StrComponents<'a> { - let repr = self.repr[]; + let repr = self.repr.index(&FullRange); let s = match self.prefix { Some(_) => { let plen = self.prefix_len(); if repr.len() > plen && repr.as_bytes()[plen] == SEP_BYTE { - repr[plen+1..] - } else { repr[plen..] } + repr.index(&((plen+1)..)) + } else { repr.index(&(plen..)) } } - None if repr.as_bytes()[0] == SEP_BYTE => repr[1..], + None if repr.as_bytes()[0] == SEP_BYTE => repr.index(&(1..)), None => repr }; let some: fn(&'a str) -> Option<&'a str> = Some; // coerce to fn ptr @@ -646,8 +651,8 @@ impl Path { } fn equiv_prefix(&self, other: &Path) -> bool { - let s_repr = self.repr[]; - let o_repr = other.repr[]; + let s_repr = self.repr.index(&FullRange); + let o_repr = other.repr.index(&FullRange); match (self.prefix, other.prefix) { (Some(DiskPrefix), Some(VerbatimDiskPrefix)) => { self.is_absolute() && @@ -664,14 +669,14 @@ impl Path { o_repr.as_bytes()[4].to_ascii_lowercase() } (Some(UNCPrefix(_,_)), Some(VerbatimUNCPrefix(_,_))) => { - s_repr[2..self.prefix_len()] == o_repr[8..other.prefix_len()] + s_repr.index(&(2..self.prefix_len())) == o_repr.index(&(8..other.prefix_len())) } (Some(VerbatimUNCPrefix(_,_)), Some(UNCPrefix(_,_))) => { - s_repr[8..self.prefix_len()] == o_repr[2..other.prefix_len()] + s_repr.index(&(8..self.prefix_len())) == o_repr.index(&(2..other.prefix_len())) } (None, None) => true, (a, b) if a == b => { - s_repr[0..self.prefix_len()] == o_repr[0..other.prefix_len()] + s_repr.index(&(0..self.prefix_len())) == o_repr.index(&(0..other.prefix_len())) } _ => false } @@ -725,7 +730,7 @@ impl Path { match prefix.unwrap() { DiskPrefix => { let len = prefix_len(prefix) + is_abs as uint; - let mut s = String::from_str(s[0..len]); + let mut s = String::from_str(s.index(&(0..len))); unsafe { let v = s.as_mut_vec(); v[0] = (*v)[0].to_ascii_uppercase(); @@ -740,7 +745,7 @@ impl Path { } VerbatimDiskPrefix => { let len = prefix_len(prefix) + is_abs as uint; - let mut s = String::from_str(s[0..len]); + let mut s = String::from_str(s.index(&(0..len))); unsafe { let v = s.as_mut_vec(); v[4] = (*v)[4].to_ascii_uppercase(); @@ -750,14 +755,14 @@ impl Path { _ => { let plen = prefix_len(prefix); if s.len() > plen { - Some(String::from_str(s[0..plen])) + Some(String::from_str(s.index(&(0..plen)))) } else { None } } } } else if is_abs && comps.is_empty() { Some(repeat(SEP).take(1).collect()) } else { - let prefix_ = s[0..prefix_len(prefix)]; + let prefix_ = s.index(&(0..prefix_len(prefix))); let n = prefix_.len() + if is_abs { comps.len() } else { comps.len() - 1} + comps.iter().map(|v| v.len()).sum(); @@ -768,15 +773,15 @@ impl Path { s.push(':'); } Some(VerbatimDiskPrefix) => { - s.push_str(prefix_[0..4]); + s.push_str(prefix_.index(&(0..4))); s.push(prefix_.as_bytes()[4].to_ascii_uppercase() as char); - s.push_str(prefix_[5..]); + s.push_str(prefix_.index(&(5..))); } Some(UNCPrefix(a,b)) => { s.push_str("\\\\"); - s.push_str(prefix_[2..a+2]); + s.push_str(prefix_.index(&(2..(a+2)))); s.push(SEP); - s.push_str(prefix_[3+a..3+a+b]); + s.push_str(prefix_.index(&((3+a)..(3+a+b)))); } Some(_) => s.push_str(prefix_), None => () @@ -801,8 +806,8 @@ impl Path { fn update_sepidx(&mut self) { let s = if self.has_nonsemantic_trailing_slash() { - self.repr[0..self.repr.len()-1] - } else { self.repr[] }; + self.repr.index(&(0..(self.repr.len()-1))) + } else { self.repr.index(&FullRange) }; let sep_test: fn(char) -> bool = if !prefix_is_verbatim(self.prefix) { is_sep } else { @@ -881,17 +886,17 @@ pub fn is_verbatim(path: &Path) -> bool { /// non-verbatim, the non-verbatim version is returned. /// Otherwise, None is returned. pub fn make_non_verbatim(path: &Path) -> Option { - let repr = path.repr[]; + let repr = path.repr.index(&FullRange); let new_path = match path.prefix { Some(VerbatimPrefix(_)) | Some(DeviceNSPrefix(_)) => return None, Some(UNCPrefix(_,_)) | Some(DiskPrefix) | None => return Some(path.clone()), Some(VerbatimDiskPrefix) => { // \\?\D:\ - Path::new(repr[4..]) + Path::new(repr.index(&(4..))) } Some(VerbatimUNCPrefix(_,_)) => { // \\?\UNC\server\share - Path::new(format!(r"\{}", repr[7..])) + Path::new(format!(r"\{}", repr.index(&(7..)))) } }; if new_path.prefix.is_none() { @@ -900,8 +905,8 @@ pub fn make_non_verbatim(path: &Path) -> Option { return None; } // now ensure normalization didn't change anything - if repr[path.prefix_len()..] == - new_path.repr[new_path.prefix_len()..] { + if repr.index(&(path.prefix_len()..)) == + new_path.repr.index(&(new_path.prefix_len()..)) { Some(new_path) } else { None @@ -966,13 +971,13 @@ pub enum PathPrefix { fn parse_prefix<'a>(mut path: &'a str) -> Option { if path.starts_with("\\\\") { // \\ - path = path[2..]; + path = path.index(&(2..)); if path.starts_with("?\\") { // \\?\ - path = path[2..]; + path = path.index(&(2..)); if path.starts_with("UNC\\") { // \\?\UNC\server\share - path = path[4..]; + path = path.index(&(4..)); let (idx_a, idx_b) = match parse_two_comps(path, is_sep_verbatim) { Some(x) => x, None => (path.len(), 0) @@ -993,7 +998,7 @@ fn parse_prefix<'a>(mut path: &'a str) -> Option { } } else if path.starts_with(".\\") { // \\.\path - path = path[2..]; + path = path.index(&(2..)); let idx = path.find('\\').unwrap_or(path.len()); return Some(DeviceNSPrefix(idx)); } @@ -1018,7 +1023,7 @@ fn parse_prefix<'a>(mut path: &'a str) -> Option { None => return None, Some(x) => x }; - path = path[idx_a+1..]; + path = path.index(&((idx_a+1)..)); let idx_b = path.find(f).unwrap_or(path.len()); Some((idx_a, idx_b)) } @@ -1032,8 +1037,8 @@ fn normalize_helper<'a>(s: &'a str, prefix: Option) -> (bool, Option is_sep_verbatim }; let is_abs = s.len() > prefix_len(prefix) && f(s.char_at(prefix_len(prefix))); - let s_ = s[prefix_len(prefix)..]; - let s_ = if is_abs { s_[1..] } else { s_ }; + let s_ = s.index(&(prefix_len(prefix)..)); + let s_ = if is_abs { s_.index(&(1..)) } else { s_ }; if is_abs && s_.is_empty() { return (is_abs, match prefix { diff --git a/src/libstd/prelude/v1.rs b/src/libstd/prelude/v1.rs index 9e9a483e1a5e3..d0dcf77002ff4 100644 --- a/src/libstd/prelude/v1.rs +++ b/src/libstd/prelude/v1.rs @@ -14,7 +14,8 @@ // Reexported core operators #[stable] #[doc(no_inline)] pub use kinds::{Copy, Send, Sized, Sync}; -#[stable] #[doc(no_inline)] pub use ops::{Drop, Fn, FnMut, FnOnce}; +#[stable] #[doc(no_inline)] pub use ops::{Drop, Fn, FnMut, FnOnce, FullRange}; +#[unstable] #[doc(no_inline)] pub use ops::{Index, IndexMut}; // Reexported functions #[stable] #[doc(no_inline)] pub use mem::drop; diff --git a/src/libstd/rt/unwind.rs b/src/libstd/rt/unwind.rs index 71169386c186a..fd84f22094282 100644 --- a/src/libstd/rt/unwind.rs +++ b/src/libstd/rt/unwind.rs @@ -544,7 +544,7 @@ fn begin_unwind_inner(msg: Box, file_line: &(&'static str, uint)) -> // MAX_CALLBACKS, so we're sure to clamp it as necessary. let callbacks = { let amt = CALLBACK_CNT.load(Ordering::SeqCst); - CALLBACKS[..cmp::min(amt, MAX_CALLBACKS)] + CALLBACKS.index(&(0..cmp::min(amt, MAX_CALLBACKS))) }; for cb in callbacks.iter() { match cb.load(Ordering::SeqCst) { diff --git a/src/libstd/rt/util.rs b/src/libstd/rt/util.rs index bc01ce926f8bc..59f654a95caa6 100644 --- a/src/libstd/rt/util.rs +++ b/src/libstd/rt/util.rs @@ -131,7 +131,7 @@ pub fn abort(args: fmt::Arguments) -> ! { impl<'a> fmt::Writer for BufWriter<'a> { fn write_str(&mut self, bytes: &str) -> fmt::Result { let left = self.buf.slice_from_mut(self.pos); - let to_write = bytes.as_bytes()[..cmp::min(bytes.len(), left.len())]; + let to_write = bytes.as_bytes().index(&(0..cmp::min(bytes.len(), left.len()))); slice::bytes::copy_memory(left, to_write); self.pos += to_write.len(); Ok(()) @@ -142,7 +142,7 @@ pub fn abort(args: fmt::Arguments) -> ! { let mut msg = [0u8; 512]; let mut w = BufWriter { buf: &mut msg, pos: 0 }; let _ = write!(&mut w, "{}", args); - let msg = str::from_utf8(w.buf[mut ..w.pos]).unwrap_or("aborted"); + let msg = str::from_utf8(w.buf.index_mut(&(0..w.pos))).unwrap_or("aborted"); let msg = if msg.is_empty() {"aborted"} else {msg}; // Give some context to the message diff --git a/src/libstd/sync/mpsc/sync.rs b/src/libstd/sync/mpsc/sync.rs index 6836888e67e55..b2cc807eb111d 100644 --- a/src/libstd/sync/mpsc/sync.rs +++ b/src/libstd/sync/mpsc/sync.rs @@ -437,7 +437,8 @@ impl Buffer { let start = self.start; self.size -= 1; self.start = (self.start + 1) % self.buf.len(); - self.buf[start].take().unwrap() + let result = &mut self.buf[start]; + result.take().unwrap() } fn size(&self) -> uint { self.size } diff --git a/src/libstd/sys/common/net.rs b/src/libstd/sys/common/net.rs index 4cf891ac4985e..902942d7244f7 100644 --- a/src/libstd/sys/common/net.rs +++ b/src/libstd/sys/common/net.rs @@ -469,7 +469,7 @@ pub fn write(fd: sock_t, // Also as with read(), we use MSG_DONTWAIT to guard ourselves // against unforeseen circumstances. let _guard = lock(); - let ptr = buf[written..].as_ptr(); + let ptr = buf.index(&(written..)).as_ptr(); let len = buf.len() - written; match retry(|| write(deadline.is_some(), ptr, len)) { -1 if wouldblock() => {} diff --git a/src/libstd/sys/windows/backtrace.rs b/src/libstd/sys/windows/backtrace.rs index 4ccecfd1f5f2e..eb76f13afe721 100644 --- a/src/libstd/sys/windows/backtrace.rs +++ b/src/libstd/sys/windows/backtrace.rs @@ -23,6 +23,7 @@ use dynamic_lib::DynamicLibrary; use ffi; +use core::ops::Index; use intrinsics; use io::{IoResult, Writer}; use libc; @@ -361,7 +362,7 @@ pub fn write(w: &mut Writer) -> IoResult<()> { let bytes = unsafe { ffi::c_str_to_bytes(&ptr) }; match str::from_utf8(bytes) { Ok(s) => try!(demangle(w, s)), - Err(..) => try!(w.write(bytes[..bytes.len()-1])), + Err(..) => try!(w.write(bytes.index(&(..(bytes.len()-1))))), } } try!(w.write(&['\n' as u8])); diff --git a/src/libstd/sys/windows/os.rs b/src/libstd/sys/windows/os.rs index dfdee0e0385c5..fcde5c01080fd 100644 --- a/src/libstd/sys/windows/os.rs +++ b/src/libstd/sys/windows/os.rs @@ -36,7 +36,7 @@ const BUF_BYTES : uint = 2048u; pub fn truncate_utf16_at_nul<'a>(v: &'a [u16]) -> &'a [u16] { match v.iter().position(|c| *c == 0) { // don't include the 0 - Some(i) => v[..i], + Some(i) => v.index(&(0..i)), None => v } } diff --git a/src/libstd/sys/windows/pipe.rs b/src/libstd/sys/windows/pipe.rs index 9996909f2f5bb..016757ef63e7d 100644 --- a/src/libstd/sys/windows/pipe.rs +++ b/src/libstd/sys/windows/pipe.rs @@ -453,7 +453,7 @@ impl UnixStream { } let ret = unsafe { libc::WriteFile(self.handle(), - buf[offset..].as_ptr() as libc::LPVOID, + buf.index(&(offset..)).as_ptr() as libc::LPVOID, (buf.len() - offset) as libc::DWORD, &mut bytes_written, &mut overlapped) diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 7aa7c4fcfb301..da3744d83f52e 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -199,14 +199,14 @@ impl Encodable for Ident { #[cfg(stage0)] impl, E> Decodable for Ident { fn decode(d: &mut D) -> Result { - Ok(str_to_ident(try!(d.read_str())[])) + Ok(str_to_ident(try!(d.read_str()).index(&FullRange))) } } #[cfg(not(stage0))] impl Decodable for Ident { fn decode(d: &mut D) -> Result { - Ok(str_to_ident(try!(d.read_str())[])) + Ok(str_to_ident(try!(d.read_str()).index(&FullRange))) } } diff --git a/src/libsyntax/ast_map/mod.rs b/src/libsyntax/ast_map/mod.rs index cf09e2777f7c6..efd35f73d4534 100644 --- a/src/libsyntax/ast_map/mod.rs +++ b/src/libsyntax/ast_map/mod.rs @@ -99,7 +99,7 @@ pub fn path_to_string>(path: PI) -> String { if !s.is_empty() { s.push_str("::"); } - s.push_str(e[]); + s.push_str(e.index(&FullRange)); s }).to_string() } @@ -476,20 +476,20 @@ impl<'ast> Map<'ast> { F: FnOnce(Option<&[Attribute]>) -> T, { let attrs = match self.get(id) { - NodeItem(i) => Some(i.attrs[]), - NodeForeignItem(fi) => Some(fi.attrs[]), + NodeItem(i) => Some(i.attrs.index(&FullRange)), + NodeForeignItem(fi) => Some(fi.attrs.index(&FullRange)), NodeTraitItem(ref tm) => match **tm { - RequiredMethod(ref type_m) => Some(type_m.attrs[]), - ProvidedMethod(ref m) => Some(m.attrs[]), - TypeTraitItem(ref typ) => Some(typ.attrs[]), + RequiredMethod(ref type_m) => Some(type_m.attrs.index(&FullRange)), + ProvidedMethod(ref m) => Some(m.attrs.index(&FullRange)), + TypeTraitItem(ref typ) => Some(typ.attrs.index(&FullRange)), }, NodeImplItem(ref ii) => { match **ii { - MethodImplItem(ref m) => Some(m.attrs[]), - TypeImplItem(ref t) => Some(t.attrs[]), + MethodImplItem(ref m) => Some(m.attrs.index(&FullRange)), + TypeImplItem(ref t) => Some(t.attrs.index(&FullRange)), } } - NodeVariant(ref v) => Some(v.node.attrs[]), + NodeVariant(ref v) => Some(v.node.attrs.index(&FullRange)), // unit/tuple structs take the attributes straight from // the struct definition. // FIXME(eddyb) make this work again (requires access to the map). @@ -513,7 +513,7 @@ impl<'ast> Map<'ast> { NodesMatchingSuffix { map: self, item_name: parts.last().unwrap(), - in_which: parts[..parts.len() - 1], + in_which: parts.index(&(0..(parts.len() - 1))), idx: 0, } } @@ -590,7 +590,7 @@ impl<'a, 'ast> NodesMatchingSuffix<'a, 'ast> { None => return false, Some((node_id, name)) => (node_id, name), }; - if part[] != mod_name.as_str() { + if part.index(&FullRange) != mod_name.as_str() { return false; } cursor = self.map.get_parent(mod_id); @@ -628,7 +628,7 @@ impl<'a, 'ast> NodesMatchingSuffix<'a, 'ast> { // We are looking at some node `n` with a given name and parent // id; do their names match what I am seeking? fn matches_names(&self, parent_of_n: NodeId, name: Name) -> bool { - name.as_str() == self.item_name[] && + name.as_str() == self.item_name.index(&FullRange) && self.suffix_matches(parent_of_n) } } @@ -1040,7 +1040,7 @@ impl<'a> NodePrinter for pprust::State<'a> { fn node_id_to_string(map: &Map, id: NodeId, include_id: bool) -> String { let id_str = format!(" (id={})", id); - let id_str = if include_id { id_str[] } else { "" }; + let id_str = if include_id { id_str.index(&FullRange) } else { "" }; match map.find(id) { Some(NodeItem(item)) => { diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index 5e03afec16cf8..aad6f11520659 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -238,11 +238,11 @@ pub fn impl_pretty_name(trait_ref: &Option, ty: &Ty) -> Ident { match *trait_ref { Some(ref trait_ref) => { pretty.push('.'); - pretty.push_str(pprust::path_to_string(&trait_ref.path)[]); + pretty.push_str(pprust::path_to_string(&trait_ref.path).index(&FullRange)); } None => {} } - token::gensym_ident(pretty[]) + token::gensym_ident(pretty.index(&FullRange)) } pub fn trait_method_to_ty_method(method: &Method) -> TypeMethod { @@ -704,7 +704,7 @@ pub fn pat_is_ident(pat: P) -> bool { pub fn path_name_eq(a : &ast::Path, b : &ast::Path) -> bool { (a.span == b.span) && (a.global == b.global) - && (segments_name_eq(a.segments[], b.segments[])) + && (segments_name_eq(a.segments.index(&FullRange), b.segments.index(&FullRange))) } // are two arrays of segments equal when compared unhygienically? @@ -792,13 +792,13 @@ mod test { #[test] fn idents_name_eq_test() { assert!(segments_name_eq( [Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}] - .iter().map(ident_to_segment).collect::>()[], + .iter().map(ident_to_segment).collect::>().index(&FullRange), [Ident{name:Name(3),ctxt:104}, Ident{name:Name(78),ctxt:182}] - .iter().map(ident_to_segment).collect::>()[])); + .iter().map(ident_to_segment).collect::>().index(&FullRange))); assert!(!segments_name_eq( [Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}] - .iter().map(ident_to_segment).collect::>()[], + .iter().map(ident_to_segment).collect::>().index(&FullRange), [Ident{name:Name(3),ctxt:104}, Ident{name:Name(77),ctxt:182}] - .iter().map(ident_to_segment).collect::>()[])); + .iter().map(ident_to_segment).collect::>().index(&FullRange))); } } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 43e23f26e930e..68bbde35ae615 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -97,7 +97,7 @@ impl AttrMetaMethods for MetaItem { fn meta_item_list<'a>(&'a self) -> Option<&'a [P]> { match self.node { - MetaList(_, ref l) => Some(l[]), + MetaList(_, ref l) => Some(l.index(&FullRange)), _ => None } } @@ -136,7 +136,7 @@ impl AttributeMethods for Attribute { let meta = mk_name_value_item_str( InternedString::new("doc"), token::intern_and_get_ident(strip_doc_comment_decoration( - comment.get())[])); + comment.get()).index(&FullRange))); if self.node.style == ast::AttrOuter { f(&mk_attr_outer(self.node.id, meta)) } else { @@ -296,9 +296,9 @@ pub fn find_inline_attr(attrs: &[Attribute]) -> InlineAttr { } MetaList(ref n, ref items) if *n == "inline" => { mark_used(attr); - if contains_name(items[], "always") { + if contains_name(items.index(&FullRange), "always") { InlineAlways - } else if contains_name(items[], "never") { + } else if contains_name(items.index(&FullRange), "never") { InlineNever } else { InlineHint @@ -332,7 +332,7 @@ pub fn cfg_matches(diagnostic: &SpanHandler, cfgs: &[P], cfg: &ast::Me !cfg_matches(diagnostic, cfgs, &*mis[0]) } ast::MetaList(ref pred, _) => { - diagnostic.span_err(cfg.span, format!("invalid predicate `{}`", pred)[]); + diagnostic.span_err(cfg.span, format!("invalid predicate `{}`", pred).as_slice()); false }, ast::MetaWord(_) | ast::MetaNameValue(..) => contains(cfgs, cfg), @@ -396,7 +396,7 @@ pub fn require_unique_names(diagnostic: &SpanHandler, metas: &[P]) { if !set.insert(name.clone()) { diagnostic.span_fatal(meta.span, - format!("duplicate meta item `{}`", name)[]); + format!("duplicate meta item `{}`", name).index(&FullRange)); } } } diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index a49f2614cd787..6d329fe614c99 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -321,9 +321,9 @@ impl FileMap { lines.get(line_number).map(|&line| { let begin: BytePos = line - self.start_pos; let begin = begin.to_uint(); - let slice = self.src[begin..]; + let slice = self.src.index(&(begin..)); match slice.find('\n') { - Some(e) => slice[0..e], + Some(e) => slice.index(&(0..e)), None => slice }.to_string() }) @@ -368,9 +368,9 @@ impl CodeMap { // FIXME #12884: no efficient/safe way to remove from the start of a string // and reuse the allocation. let mut src = if src.starts_with("\u{feff}") { - String::from_str(src[3..]) + String::from_str(src.index(&(3..))) } else { - String::from_str(src[]) + String::from_str(src.index(&FullRange)) }; // Append '\n' in case it's not already there. @@ -457,8 +457,8 @@ impl CodeMap { if begin.fm.start_pos != end.fm.start_pos { None } else { - Some(begin.fm.src[begin.pos.to_uint().. - end.pos.to_uint()].to_string()) + Some(begin.fm.src.index(&(begin.pos.to_uint().. + end.pos.to_uint())).to_string()) } } diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index c19c06c315587..f43a236341eff 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -123,7 +123,7 @@ impl SpanHandler { panic!(ExplicitBug); } pub fn span_unimpl(&self, sp: Span, msg: &str) -> ! { - self.span_bug(sp, format!("unimplemented {}", msg)[]); + self.span_bug(sp, format!("unimplemented {}", msg).index(&FullRange)); } pub fn handler<'a>(&'a self) -> &'a Handler { &self.handler @@ -166,7 +166,7 @@ impl Handler { self.err_count.get()); } } - self.fatal(s[]); + self.fatal(s.index(&FullRange)); } pub fn warn(&self, msg: &str) { self.emit.borrow_mut().emit(None, msg, None, Warning); @@ -182,7 +182,7 @@ impl Handler { panic!(ExplicitBug); } pub fn unimpl(&self, msg: &str) -> ! { - self.bug(format!("unimplemented {}", msg)[]); + self.bug(format!("unimplemented {}", msg).index(&FullRange)); } pub fn emit(&self, cmsp: Option<(&codemap::CodeMap, Span)>, @@ -277,7 +277,7 @@ fn print_maybe_styled(w: &mut EmitterWriter, // to be miscolored. We assume this is rare enough that we don't // have to worry about it. if msg.ends_with("\n") { - try!(t.write_str(msg[0..msg.len()-1])); + try!(t.write_str(msg.index(&(0..(msg.len()-1))))); try!(t.reset()); try!(t.write_str("\n")); } else { @@ -299,16 +299,16 @@ fn print_diagnostic(dst: &mut EmitterWriter, topic: &str, lvl: Level, } try!(print_maybe_styled(dst, - format!("{}: ", lvl.to_string())[], + format!("{}: ", lvl.to_string()).index(&FullRange), term::attr::ForegroundColor(lvl.color()))); try!(print_maybe_styled(dst, - format!("{}", msg)[], + format!("{}", msg).index(&FullRange), term::attr::Bold)); match code { Some(code) => { let style = term::attr::ForegroundColor(term::color::BRIGHT_MAGENTA); - try!(print_maybe_styled(dst, format!(" [{}]", code.clone())[], style)); + try!(print_maybe_styled(dst, format!(" [{}]", code.clone()).index(&FullRange), style)); } None => () } @@ -398,12 +398,12 @@ fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan, // the span) let span_end = Span { lo: sp.hi, hi: sp.hi, expn_id: sp.expn_id}; let ses = cm.span_to_string(span_end); - try!(print_diagnostic(dst, ses[], lvl, msg, code)); + try!(print_diagnostic(dst, ses.index(&FullRange), lvl, msg, code)); if rsp.is_full_span() { try!(custom_highlight_lines(dst, cm, sp, lvl, lines)); } } else { - try!(print_diagnostic(dst, ss[], lvl, msg, code)); + try!(print_diagnostic(dst, ss.index(&FullRange), lvl, msg, code)); if rsp.is_full_span() { try!(highlight_lines(dst, cm, sp, lvl, lines)); } @@ -413,9 +413,9 @@ fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan, Some(code) => match dst.registry.as_ref().and_then(|registry| registry.find_description(code)) { Some(_) => { - try!(print_diagnostic(dst, ss[], Help, + try!(print_diagnostic(dst, ss.index(&FullRange), Help, format!("pass `--explain {}` to see a detailed \ - explanation", code)[], None)); + explanation", code).index(&FullRange), None)); } None => () }, @@ -432,9 +432,9 @@ fn highlight_lines(err: &mut EmitterWriter, let fm = &*lines.file; let mut elided = false; - let mut display_lines = lines.lines[]; + let mut display_lines = lines.lines.index(&FullRange); if display_lines.len() > MAX_LINES { - display_lines = display_lines[0u..MAX_LINES]; + display_lines = display_lines.index(&(0u..MAX_LINES)); elided = true; } // Print the offending lines @@ -494,7 +494,7 @@ fn highlight_lines(err: &mut EmitterWriter, } } try!(print_maybe_styled(err, - format!("{}\n", s)[], + format!("{}\n", s).index(&FullRange), term::attr::ForegroundColor(lvl.color()))); } Ok(()) @@ -514,7 +514,7 @@ fn custom_highlight_lines(w: &mut EmitterWriter, -> io::IoResult<()> { let fm = &*lines.file; - let lines = lines.lines[]; + let lines = lines.lines.index(&FullRange); if lines.len() > MAX_LINES { if let Some(line) = fm.get_line(lines[0]) { try!(write!(&mut w.dst, "{}:{} {}\n", fm.name, @@ -545,7 +545,7 @@ fn custom_highlight_lines(w: &mut EmitterWriter, s.push('^'); s.push('\n'); print_maybe_styled(w, - s[], + s.index(&FullRange), term::attr::ForegroundColor(lvl.color())) } @@ -560,12 +560,12 @@ fn print_macro_backtrace(w: &mut EmitterWriter, codemap::MacroAttribute => ("#[", "]"), codemap::MacroBang => ("", "!") }; - try!(print_diagnostic(w, ss[], Note, + try!(print_diagnostic(w, ss.index(&FullRange), Note, format!("in expansion of {}{}{}", pre, ei.callee.name, - post)[], None)); + post).index(&FullRange), None)); let ss = cm.span_to_string(ei.call_site); - try!(print_diagnostic(w, ss[], Note, "expansion site", None)); + try!(print_diagnostic(w, ss.index(&FullRange), Note, "expansion site", None)); Ok(Some(ei.call_site)) } None => Ok(None) @@ -578,6 +578,6 @@ pub fn expect(diag: &SpanHandler, opt: Option, msg: M) -> T where { match opt { Some(t) => t, - None => diag.handler().bug(msg()[]), + None => diag.handler().bug(msg().index(&FullRange)), } } diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index 720a907fe77f0..0f4ebd74b66c7 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -58,7 +58,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, Some(previous_span) => { ecx.span_warn(span, format!( "diagnostic code {} already used", token::get_ident(code).get() - )[]); + ).index(&FullRange)); ecx.span_note(previous_span, "previous invocation"); }, None => () @@ -87,12 +87,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, if diagnostics.insert(code.name, description).is_some() { ecx.span_err(span, format!( "diagnostic code {} already registered", token::get_ident(*code).get() - )[]); + ).index(&FullRange)); } }); let sym = Ident::new(token::gensym(( "__register_diagnostic_".to_string() + token::get_ident(*code).get() - )[])); + ).index(&FullRange))); MacItems::new(vec![quote_item!(ecx, mod $sym {}).unwrap()].into_iter()) } diff --git a/src/libsyntax/ext/asm.rs b/src/libsyntax/ext/asm.rs index b77b822a6b2a1..04dec0e802841 100644 --- a/src/libsyntax/ext/asm.rs +++ b/src/libsyntax/ext/asm.rs @@ -100,7 +100,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) Some(('=', _)) => None, Some(('+', operand)) => { Some(token::intern_and_get_ident(format!( - "={}", operand)[])) + "={}", operand).index(&FullRange))) } _ => { cx.span_err(span, "output operand constraint lacks '=' or '+'"); diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 91ae7396ea469..52e402689ba77 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -539,7 +539,7 @@ impl<'a> ExtCtxt<'a> { pub fn mod_pop(&mut self) { self.mod_path.pop().unwrap(); } pub fn mod_path(&self) -> Vec { let mut v = Vec::new(); - v.push(token::str_to_ident(self.ecfg.crate_name[])); + v.push(token::str_to_ident(self.ecfg.crate_name.index(&FullRange))); v.extend(self.mod_path.iter().map(|a| *a)); return v; } @@ -548,7 +548,7 @@ impl<'a> ExtCtxt<'a> { if self.recursion_count > self.ecfg.recursion_limit { self.span_fatal(ei.call_site, format!("recursion limit reached while expanding the macro `{}`", - ei.callee.name)[]); + ei.callee.name).index(&FullRange)); } let mut call_site = ei.call_site; @@ -670,7 +670,7 @@ pub fn check_zero_tts(cx: &ExtCtxt, tts: &[ast::TokenTree], name: &str) { if tts.len() != 0 { - cx.span_err(sp, format!("{} takes no arguments", name)[]); + cx.span_err(sp, format!("{} takes no arguments", name).index(&FullRange)); } } @@ -683,12 +683,12 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt, -> Option { let mut p = cx.new_parser_from_tts(tts); if p.token == token::Eof { - cx.span_err(sp, format!("{} takes 1 argument", name)[]); + cx.span_err(sp, format!("{} takes 1 argument", name).index(&FullRange)); return None } let ret = cx.expander().fold_expr(p.parse_expr()); if p.token != token::Eof { - cx.span_err(sp, format!("{} takes 1 argument", name)[]); + cx.span_err(sp, format!("{} takes 1 argument", name).index(&FullRange)); } expr_to_string(cx, ret, "argument must be a string literal").map(|(s, _)| { s.get().to_string() diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index ea345f3a458f4..e3561e8607084 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -709,7 +709,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { let loc = self.codemap().lookup_char_pos(span.lo); let expr_file = self.expr_str(span, token::intern_and_get_ident(loc.file - .name[])); + .name.index(&FullRange))); let expr_line = self.expr_uint(span, loc.line); let expr_file_line_tuple = self.expr_tuple(span, vec!(expr_file, expr_line)); let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple); diff --git a/src/libsyntax/ext/concat.rs b/src/libsyntax/ext/concat.rs index 03dd08fdf7fe4..1f1781dceb30b 100644 --- a/src/libsyntax/ext/concat.rs +++ b/src/libsyntax/ext/concat.rs @@ -40,14 +40,14 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, ast::LitInt(i, ast::UnsignedIntLit(_)) | ast::LitInt(i, ast::SignedIntLit(_, ast::Plus)) | ast::LitInt(i, ast::UnsuffixedIntLit(ast::Plus)) => { - accumulator.push_str(format!("{}", i)[]); + accumulator.push_str(format!("{}", i).index(&FullRange)); } ast::LitInt(i, ast::SignedIntLit(_, ast::Minus)) | ast::LitInt(i, ast::UnsuffixedIntLit(ast::Minus)) => { - accumulator.push_str(format!("-{}", i)[]); + accumulator.push_str(format!("-{}", i).index(&FullRange)); } ast::LitBool(b) => { - accumulator.push_str(format!("{}", b)[]); + accumulator.push_str(format!("{}", b).index(&FullRange)); } ast::LitByte(..) | ast::LitBinary(..) => { @@ -62,5 +62,5 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, } base::MacExpr::new(cx.expr_str( sp, - token::intern_and_get_ident(accumulator[]))) + token::intern_and_get_ident(accumulator.index(&FullRange)))) } diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index 2cf60d30a1b25..02f702248cb66 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -40,7 +40,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree] } } } - let res = str_to_ident(res_str[]); + let res = str_to_ident(res_str.index(&FullRange)); let e = P(ast::Expr { id: ast::DUMMY_NODE_ID, diff --git a/src/libsyntax/ext/deriving/bounds.rs b/src/libsyntax/ext/deriving/bounds.rs index cf29bb048d644..440bdff147169 100644 --- a/src/libsyntax/ext/deriving/bounds.rs +++ b/src/libsyntax/ext/deriving/bounds.rs @@ -29,12 +29,13 @@ pub fn expand_deriving_bound(cx: &mut ExtCtxt, "Send" | "Sync" => { return cx.span_err(span, format!("{} is an unsafe trait and it \ - should be implemented explicitly", *tname)[]) + should be implemented explicitly", + *tname).as_slice()) } ref tname => { cx.span_bug(span, format!("expected built-in trait name but \ - found {}", *tname)[]) + found {}", *tname).as_slice()) } } }, diff --git a/src/libsyntax/ext/deriving/clone.rs b/src/libsyntax/ext/deriving/clone.rs index 3c74a9f4431df..d9d6cebd05c90 100644 --- a/src/libsyntax/ext/deriving/clone.rs +++ b/src/libsyntax/ext/deriving/clone.rs @@ -80,11 +80,11 @@ fn cs_clone( EnumNonMatchingCollapsed (..) => { cx.span_bug(trait_span, format!("non-matching enum variants in \ - `deriving({})`", name)[]) + `deriving({})`", name).index(&FullRange)) } StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, - format!("static method in `deriving({})`", name)[]) + format!("static method in `deriving({})`", name).index(&FullRange)) } } @@ -101,7 +101,7 @@ fn cs_clone( None => { cx.span_bug(trait_span, format!("unnamed field in normal struct in \ - `deriving({})`", name)[]) + `deriving({})`", name).index(&FullRange)) } }; cx.field_imm(field.span, ident, subcall(field)) diff --git a/src/libsyntax/ext/deriving/decodable.rs b/src/libsyntax/ext/deriving/decodable.rs index 8094f0d3de8cc..a9289f0175a69 100644 --- a/src/libsyntax/ext/deriving/decodable.rs +++ b/src/libsyntax/ext/deriving/decodable.rs @@ -198,7 +198,7 @@ fn decode_static_fields(cx: &mut ExtCtxt, let fields = fields.iter().enumerate().map(|(i, &span)| { getarg(cx, span, token::intern_and_get_ident(format!("_field{}", - i)[]), + i).index(&FullRange)), i) }).collect(); diff --git a/src/libsyntax/ext/deriving/encodable.rs b/src/libsyntax/ext/deriving/encodable.rs index 0fceb0fbfdac4..7114217d51d0a 100644 --- a/src/libsyntax/ext/deriving/encodable.rs +++ b/src/libsyntax/ext/deriving/encodable.rs @@ -183,7 +183,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, let name = match name { Some(id) => token::get_ident(id), None => { - token::intern_and_get_ident(format!("_field{}", i)[]) + token::intern_and_get_ident(format!("_field{}", i).index(&FullRange)) } }; let enc = cx.expr_method_call(span, self_.clone(), diff --git a/src/libsyntax/ext/deriving/generic/mod.rs b/src/libsyntax/ext/deriving/generic/mod.rs index 1aa430c4a0829..8ef9a7dc01239 100644 --- a/src/libsyntax/ext/deriving/generic/mod.rs +++ b/src/libsyntax/ext/deriving/generic/mod.rs @@ -510,15 +510,15 @@ impl<'a> TraitDef<'a> { self, struct_def, type_ident, - self_args[], - nonself_args[]) + self_args.index(&FullRange), + nonself_args.index(&FullRange)) } else { method_def.expand_struct_method_body(cx, self, struct_def, type_ident, - self_args[], - nonself_args[]) + self_args.index(&FullRange), + nonself_args.index(&FullRange)) }; method_def.create_method(cx, @@ -550,15 +550,15 @@ impl<'a> TraitDef<'a> { self, enum_def, type_ident, - self_args[], - nonself_args[]) + self_args.index(&FullRange), + nonself_args.index(&FullRange)) } else { method_def.expand_enum_method_body(cx, self, enum_def, type_ident, self_args, - nonself_args[]) + nonself_args.index(&FullRange)) }; method_def.create_method(cx, @@ -645,7 +645,7 @@ impl<'a> MethodDef<'a> { for (i, ty) in self.args.iter().enumerate() { let ast_ty = ty.to_ty(cx, trait_.span, type_ident, generics); - let ident = cx.ident_of(format!("__arg_{}", i)[]); + let ident = cx.ident_of(format!("__arg_{}", i).index(&FullRange)); arg_tys.push((ident, ast_ty)); let arg_expr = cx.expr_ident(trait_.span, ident); @@ -752,7 +752,7 @@ impl<'a> MethodDef<'a> { struct_path, struct_def, format!("__self_{}", - i)[], + i).index(&FullRange), ast::MutImmutable); patterns.push(pat); raw_fields.push(ident_expr); @@ -908,22 +908,22 @@ impl<'a> MethodDef<'a> { .collect::>(); let self_arg_idents = self_arg_names.iter() - .map(|name|cx.ident_of(name[])) + .map(|name|cx.ident_of(name.index(&FullRange))) .collect::>(); // The `vi_idents` will be bound, solely in the catch-all, to // a series of let statements mapping each self_arg to a uint // corresponding to its variant index. let vi_idents: Vec = self_arg_names.iter() - .map(|name| { let vi_suffix = format!("{}_vi", name[]); - cx.ident_of(vi_suffix[]) }) + .map(|name| { let vi_suffix = format!("{}_vi", name.index(&FullRange)); + cx.ident_of(vi_suffix.index(&FullRange)) }) .collect::>(); // Builds, via callback to call_substructure_method, the // delegated expression that handles the catch-all case, // using `__variants_tuple` to drive logic if necessary. let catch_all_substructure = EnumNonMatchingCollapsed( - self_arg_idents, variants[], vi_idents[]); + self_arg_idents, variants.index(&FullRange), vi_idents.index(&FullRange)); // These arms are of the form: // (Variant1, Variant1, ...) => Body1 @@ -945,12 +945,12 @@ impl<'a> MethodDef<'a> { let mut subpats = Vec::with_capacity(self_arg_names.len()); let mut self_pats_idents = Vec::with_capacity(self_arg_names.len() - 1); let first_self_pat_idents = { - let (p, idents) = mk_self_pat(cx, self_arg_names[0][]); + let (p, idents) = mk_self_pat(cx, self_arg_names[0].index(&FullRange)); subpats.push(p); idents }; for self_arg_name in self_arg_names.tail().iter() { - let (p, idents) = mk_self_pat(cx, self_arg_name[]); + let (p, idents) = mk_self_pat(cx, self_arg_name.index(&FullRange)); subpats.push(p); self_pats_idents.push(idents); } @@ -1006,7 +1006,7 @@ impl<'a> MethodDef<'a> { &**variant, field_tuples); let arm_expr = self.call_substructure_method( - cx, trait_, type_ident, self_args[], nonself_args, + cx, trait_, type_ident, self_args.index(&FullRange), nonself_args, &substructure); cx.arm(sp, vec![single_pat], arm_expr) @@ -1059,7 +1059,7 @@ impl<'a> MethodDef<'a> { } let arm_expr = self.call_substructure_method( - cx, trait_, type_ident, self_args[], nonself_args, + cx, trait_, type_ident, self_args.index(&FullRange), nonself_args, &catch_all_substructure); // Builds the expression: @@ -1263,7 +1263,7 @@ impl<'a> TraitDef<'a> { cx.span_bug(sp, "a struct with named and unnamed fields in `derive`"); } }; - let ident = cx.ident_of(format!("{}_{}", prefix, i)[]); + let ident = cx.ident_of(format!("{}_{}", prefix, i).index(&FullRange)); paths.push(codemap::Spanned{span: sp, node: ident}); let val = cx.expr( sp, ast::ExprParen(cx.expr_deref(sp, cx.expr_path(cx.path_ident(sp,ident))))); @@ -1309,7 +1309,7 @@ impl<'a> TraitDef<'a> { let mut ident_expr = Vec::new(); for (i, va) in variant_args.iter().enumerate() { let sp = self.set_expn_info(cx, va.ty.span); - let ident = cx.ident_of(format!("{}_{}", prefix, i)[]); + let ident = cx.ident_of(format!("{}_{}", prefix, i).index(&FullRange)); let path1 = codemap::Spanned{span: sp, node: ident}; paths.push(path1); let expr_path = cx.expr_path(cx.path_ident(sp, ident)); @@ -1352,7 +1352,7 @@ pub fn cs_fold(use_foldl: bool, field.span, old, field.self_.clone(), - field.other[]) + field.other.index(&FullRange)) }) } else { all_fields.iter().rev().fold(base, |old, field| { @@ -1360,12 +1360,12 @@ pub fn cs_fold(use_foldl: bool, field.span, old, field.self_.clone(), - field.other[]) + field.other.index(&FullRange)) }) } }, EnumNonMatchingCollapsed(ref all_args, _, tuple) => - enum_nonmatch_f.call_mut((cx, trait_span, (all_args[], tuple), + enum_nonmatch_f.call_mut((cx, trait_span, (all_args.index(&FullRange), tuple), substructure.nonself_args)), StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, "static function in `derive`") @@ -1405,7 +1405,7 @@ pub fn cs_same_method(f: F, f(cx, trait_span, called) }, EnumNonMatchingCollapsed(ref all_self_args, _, tuple) => - enum_nonmatch_f.call_mut((cx, trait_span, (all_self_args[], tuple), + enum_nonmatch_f.call_mut((cx, trait_span, (all_self_args.index(&FullRange), tuple), substructure.nonself_args)), StaticEnum(..) | StaticStruct(..) => { cx.span_bug(trait_span, "static function in `derive`") diff --git a/src/libsyntax/ext/deriving/mod.rs b/src/libsyntax/ext/deriving/mod.rs index e72c83b67c89b..b44aa9dbd9fcb 100644 --- a/src/libsyntax/ext/deriving/mod.rs +++ b/src/libsyntax/ext/deriving/mod.rs @@ -123,7 +123,7 @@ pub fn expand_meta_derive(cx: &mut ExtCtxt, cx.span_err(titem.span, format!("unknown `derive` \ trait: `{}`", - *tname)[]); + *tname).index(&FullRange)); } }; } diff --git a/src/libsyntax/ext/deriving/show.rs b/src/libsyntax/ext/deriving/show.rs index eceac4e9a8368..70d6da0f88b02 100644 --- a/src/libsyntax/ext/deriving/show.rs +++ b/src/libsyntax/ext/deriving/show.rs @@ -127,7 +127,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, let formatter = substr.nonself_args[0].clone(); let meth = cx.ident_of("write_fmt"); - let s = token::intern_and_get_ident(format_string[]); + let s = token::intern_and_get_ident(format_string.index(&FullRange)); let format_string = cx.expr_str(span, s); // phew, not our responsibility any more! diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs index 9fedc4a158e15..eb3544e3c5c7f 100644 --- a/src/libsyntax/ext/env.rs +++ b/src/libsyntax/ext/env.rs @@ -30,7 +30,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT Some(v) => v }; - let e = match os::getenv(var[]) { + let e = match os::getenv(var.index(&FullRange)) { None => { cx.expr_path(cx.path_all(sp, true, @@ -56,7 +56,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT cx.ident_of("Some")), vec!(cx.expr_str(sp, token::intern_and_get_ident( - s[])))) + s.index(&FullRange))))) } }; MacExpr::new(e) @@ -83,7 +83,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) None => { token::intern_and_get_ident(format!("environment variable `{}` \ not defined", - var)[]) + var).index(&FullRange)) } Some(second) => { match expr_to_string(cx, second, "expected string literal") { @@ -106,7 +106,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) cx.span_err(sp, msg.get()); cx.expr_uint(sp, 0) } - Some(s) => cx.expr_str(sp, token::intern_and_get_ident(s[])) + Some(s) => cx.expr_str(sp, token::intern_and_get_ident(s.index(&FullRange))) }; MacExpr::new(e) } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 212ec3b090325..75aea623de658 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -287,7 +287,7 @@ fn expand_mac_invoc(mac: ast::Mac, span: codemap::Span, fld.cx.span_err( pth.span, format!("macro undefined: '{}!'", - extnamestr.get())[]); + extnamestr.get()).index(&FullRange)); // let compilation continue None @@ -303,7 +303,7 @@ fn expand_mac_invoc(mac: ast::Mac, span: codemap::Span, }, }); let fm = fresh_mark(); - let marked_before = mark_tts(tts[], fm); + let marked_before = mark_tts(tts.index(&FullRange), fm); // The span that we pass to the expanders we want to // be the root of the call stack. That's the most @@ -314,7 +314,7 @@ fn expand_mac_invoc(mac: ast::Mac, span: codemap::Span, let opt_parsed = { let expanded = expandfun.expand(fld.cx, mac_span, - marked_before[]); + marked_before.index(&FullRange)); parse_thunk(expanded) }; let parsed = match opt_parsed { @@ -323,8 +323,8 @@ fn expand_mac_invoc(mac: ast::Mac, span: codemap::Span, fld.cx.span_err( pth.span, format!("non-expression macro in expression position: {}", - extnamestr.get()[] - )[]); + extnamestr.get().index(&FullRange) + ).index(&FullRange)); return None; } }; @@ -334,7 +334,7 @@ fn expand_mac_invoc(mac: ast::Mac, span: codemap::Span, fld.cx.span_err( pth.span, format!("'{}' is not a tt-style macro", - extnamestr.get())[]); + extnamestr.get()).index(&FullRange)); None } } @@ -439,7 +439,7 @@ pub fn expand_item(it: P, fld: &mut MacroExpander) if valid_ident { fld.cx.mod_push(it.ident); } - let macro_use = contains_macro_use(fld, new_attrs[]); + let macro_use = contains_macro_use(fld, new_attrs.index(&FullRange)); let result = with_exts_frame!(fld.cx.syntax_env, macro_use, noop_fold_item(it, fld)); @@ -566,7 +566,7 @@ pub fn expand_item_mac(it: P, None => { fld.cx.span_err(path_span, format!("macro undefined: '{}!'", - extnamestr)[]); + extnamestr).index(&FullRange)); // let compilation continue return SmallVector::zero(); } @@ -579,7 +579,7 @@ pub fn expand_item_mac(it: P, format!("macro {}! expects no ident argument, \ given '{}'", extnamestr, - token::get_ident(it.ident))[]); + token::get_ident(it.ident)).index(&FullRange)); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { @@ -591,14 +591,14 @@ pub fn expand_item_mac(it: P, } }); // mark before expansion: - let marked_before = mark_tts(tts[], fm); - expander.expand(fld.cx, it.span, marked_before[]) + let marked_before = mark_tts(tts.index(&FullRange), fm); + expander.expand(fld.cx, it.span, marked_before.index(&FullRange)) } IdentTT(ref expander, span) => { if it.ident.name == parse::token::special_idents::invalid.name { fld.cx.span_err(path_span, format!("macro {}! expects an ident argument", - extnamestr.get())[]); + extnamestr.get()).index(&FullRange)); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { @@ -610,13 +610,14 @@ pub fn expand_item_mac(it: P, } }); // mark before expansion: - let marked_tts = mark_tts(tts[], fm); + let marked_tts = mark_tts(tts.index(&FullRange), fm); expander.expand(fld.cx, it.span, it.ident, marked_tts) } MacroRulesTT => { if it.ident.name == parse::token::special_idents::invalid.name { fld.cx.span_err(path_span, - format!("macro_rules! expects an ident argument")[]); + format!("macro_rules! expects an ident argument") + .index(&FullRange)); return SmallVector::zero(); } fld.cx.bt_push(ExpnInfo { @@ -648,7 +649,7 @@ pub fn expand_item_mac(it: P, _ => { fld.cx.span_err(it.span, format!("{}! is not legal in item position", - extnamestr.get())[]); + extnamestr.get()).index(&FullRange)); return SmallVector::zero(); } } @@ -667,7 +668,7 @@ pub fn expand_item_mac(it: P, None => { fld.cx.span_err(path_span, format!("non-item macro in item position: {}", - extnamestr.get())[]); + extnamestr.get()).index(&FullRange)); return SmallVector::zero(); } }; @@ -913,7 +914,7 @@ fn expand_pat(p: P, fld: &mut MacroExpander) -> P { None => { fld.cx.span_err(pth.span, format!("macro undefined: '{}!'", - extnamestr)[]); + extnamestr).index(&FullRange)); // let compilation continue return DummyResult::raw_pat(span); } @@ -930,11 +931,11 @@ fn expand_pat(p: P, fld: &mut MacroExpander) -> P { }); let fm = fresh_mark(); - let marked_before = mark_tts(tts[], fm); + let marked_before = mark_tts(tts.index(&FullRange), fm); let mac_span = fld.cx.original_span(); let expanded = match expander.expand(fld.cx, mac_span, - marked_before[]).make_pat() { + marked_before.index(&FullRange)).make_pat() { Some(e) => e, None => { fld.cx.span_err( @@ -942,7 +943,7 @@ fn expand_pat(p: P, fld: &mut MacroExpander) -> P { format!( "non-pattern macro in pattern position: {}", extnamestr.get() - )[] + ).index(&FullRange) ); return DummyResult::raw_pat(span); } @@ -954,7 +955,7 @@ fn expand_pat(p: P, fld: &mut MacroExpander) -> P { _ => { fld.cx.span_err(span, format!("{}! is not legal in pattern position", - extnamestr.get())[]); + extnamestr.get()).index(&FullRange)); return DummyResult::raw_pat(span); } } @@ -1231,7 +1232,7 @@ impl Folder for Marker { node: match node { MacInvocTT(path, tts, ctxt) => { MacInvocTT(self.fold_path(path), - self.fold_tts(tts[]), + self.fold_tts(tts.index(&FullRange)), mtwt::apply_mark(self.mark, ctxt)) } }, @@ -1712,7 +1713,7 @@ foo_module!(); let string = ident.get(); "xx" == string }).collect(); - let cxbinds: &[&ast::Ident] = cxbinds[]; + let cxbinds: &[&ast::Ident] = cxbinds.index(&FullRange); let cxbind = match cxbinds { [b] => b, _ => panic!("expected just one binding for ext_cx") diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs index 1f39555f4962c..85a3a5ebcae3a 100644 --- a/src/libsyntax/ext/format.rs +++ b/src/libsyntax/ext/format.rs @@ -113,7 +113,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) _ => { ecx.span_err(p.span, format!("expected ident for named argument, found `{}`", - p.this_token_to_string())[]); + p.this_token_to_string()).index(&FullRange)); return None; } }; @@ -126,7 +126,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) Some(prev) => { ecx.span_err(e.span, format!("duplicate argument named `{}`", - name)[]); + name).index(&FullRange)); ecx.parse_sess.span_diagnostic.span_note(prev.span, "previously here"); continue } @@ -217,7 +217,7 @@ impl<'a, 'b> Context<'a, 'b> { let msg = format!("invalid reference to argument `{}` ({})", arg, self.describe_num_args()); - self.ecx.span_err(self.fmtsp, msg[]); + self.ecx.span_err(self.fmtsp, msg.index(&FullRange)); return; } { @@ -237,7 +237,7 @@ impl<'a, 'b> Context<'a, 'b> { Some(e) => e.span, None => { let msg = format!("there is no argument named `{}`", name); - self.ecx.span_err(self.fmtsp, msg[]); + self.ecx.span_err(self.fmtsp, msg.index(&FullRange)); return; } }; @@ -280,19 +280,19 @@ impl<'a, 'b> Context<'a, 'b> { format!("argument redeclared with type `{}` when \ it was previously `{}`", *ty, - *cur)[]); + *cur).index(&FullRange)); } (&Known(ref cur), _) => { self.ecx.span_err(sp, format!("argument used to format with `{}` was \ attempted to not be used for formatting", - *cur)[]); + *cur).index(&FullRange)); } (_, &Known(ref ty)) => { self.ecx.span_err(sp, format!("argument previously used as a format \ argument attempted to be used as `{}`", - *ty)[]); + *ty).index(&FullRange)); } (_, _) => { self.ecx.span_err(sp, "argument declared with multiple formats"); @@ -357,7 +357,7 @@ impl<'a, 'b> Context<'a, 'b> { /// Translate the accumulated string literals to a literal expression fn trans_literal_string(&mut self) -> P { let sp = self.fmtsp; - let s = token::intern_and_get_ident(self.literal[]); + let s = token::intern_and_get_ident(self.literal.index(&FullRange)); self.literal.clear(); self.ecx.expr_str(sp, s) } @@ -509,7 +509,7 @@ impl<'a, 'b> Context<'a, 'b> { None => continue // error already generated }; - let name = self.ecx.ident_of(format!("__arg{}", i)[]); + let name = self.ecx.ident_of(format!("__arg{}", i).index(&FullRange)); pats.push(self.ecx.pat_ident(e.span, name)); locals.push(Context::format_arg(self.ecx, e.span, arg_ty, self.ecx.expr_ident(e.span, name))); @@ -526,7 +526,7 @@ impl<'a, 'b> Context<'a, 'b> { }; let lname = self.ecx.ident_of(format!("__arg{}", - *name)[]); + *name).index(&FullRange)); pats.push(self.ecx.pat_ident(e.span, lname)); names[self.name_positions[*name]] = Some(Context::format_arg(self.ecx, e.span, arg_ty, @@ -606,7 +606,7 @@ impl<'a, 'b> Context<'a, 'b> { -> P { let trait_ = match *ty { Known(ref tyname) => { - match tyname[] { + match tyname.index(&FullRange) { "" => "Show", "?" => "Show", "e" => "LowerExp", @@ -619,7 +619,7 @@ impl<'a, 'b> Context<'a, 'b> { _ => { ecx.span_err(sp, format!("unknown format trait `{}`", - *tyname)[]); + *tyname).index(&FullRange)); "Dummy" } } @@ -710,7 +710,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span, } if !parser.errors.is_empty() { cx.ecx.span_err(cx.fmtsp, format!("invalid format string: {}", - parser.errors.remove(0))[]); + parser.errors.remove(0)).index(&FullRange)); return DummyResult::raw_expr(sp); } if !cx.literal.is_empty() { diff --git a/src/libsyntax/ext/mtwt.rs b/src/libsyntax/ext/mtwt.rs index 4075b208f7873..49d6b255c81c7 100644 --- a/src/libsyntax/ext/mtwt.rs +++ b/src/libsyntax/ext/mtwt.rs @@ -223,7 +223,7 @@ pub fn marksof(ctxt: SyntaxContext, stopname: Name) -> Vec { } // the internal function for computing marks -// it's not clear to me whether it's better to use a [] mutable +// it's not clear to me whether it's better to use a .index(&FullRange) mutable // vector or a cons-list for this. fn marksof_internal(ctxt: SyntaxContext, stopname: Name, diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index f1b52fa33c386..66c7381e43344 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -473,7 +473,7 @@ pub fn expand_quote_stmt(cx: &mut ExtCtxt, } fn ids_ext(strs: Vec ) -> Vec { - strs.iter().map(|str| str_to_ident((*str)[])).collect() + strs.iter().map(|str| str_to_ident((*str).index(&FullRange))).collect() } fn id_ext(str: &str) -> ast::Ident { @@ -675,7 +675,7 @@ fn mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree) -> Vec> { for i in range(0, tt.len()) { seq.push(tt.get_tt(i)); } - mk_tts(cx, seq[]) + mk_tts(cx, seq.index(&FullRange)) } ast::TtToken(sp, ref tok) => { let e_sp = cx.expr_ident(sp, id_ext("_sp")); @@ -764,7 +764,7 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp)); let mut vector = vec!(stmt_let_sp, stmt_let_tt); - vector.extend(mk_tts(cx, tts[]).into_iter()); + vector.extend(mk_tts(cx, tts.index(&FullRange)).into_iter()); let block = cx.expr_block( cx.block_all(sp, Vec::new(), diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index a49df457cb35b..5c966ed98231f 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -57,7 +57,7 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let topmost = cx.original_span_in_file(); let loc = cx.codemap().lookup_char_pos(topmost.lo); - let filename = token::intern_and_get_ident(loc.file.name[]); + let filename = token::intern_and_get_ident(loc.file.name.index(&FullRange)); base::MacExpr::new(cx.expr_str(topmost, filename)) } @@ -65,7 +65,7 @@ pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> Box { let s = pprust::tts_to_string(tts); base::MacExpr::new(cx.expr_str(sp, - token::intern_and_get_ident(s[]))) + token::intern_and_get_ident(s.index(&FullRange)))) } pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) @@ -78,7 +78,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) .connect("::"); base::MacExpr::new(cx.expr_str( sp, - token::intern_and_get_ident(string[]))) + token::intern_and_get_ident(string.index(&FullRange)))) } /// include! : parse the given file as an expr @@ -137,7 +137,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) cx.span_err(sp, format!("couldn't read {}: {}", file.display(), - e)[]); + e).index(&FullRange)); return DummyResult::expr(sp); } Ok(bytes) => bytes, @@ -147,7 +147,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) // Add this input file to the code map to make it available as // dependency information let filename = file.display().to_string(); - let interned = token::intern_and_get_ident(src[]); + let interned = token::intern_and_get_ident(src.index(&FullRange)); cx.codemap().new_filemap(filename, src); base::MacExpr::new(cx.expr_str(sp, interned)) @@ -155,7 +155,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) Err(_) => { cx.span_err(sp, format!("{} wasn't a utf-8 file", - file.display())[]); + file.display()).index(&FullRange)); return DummyResult::expr(sp); } } @@ -177,7 +177,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) match File::open(&file).read_to_end() { Err(e) => { cx.span_err(sp, - format!("couldn't read {}: {}", file.display(), e)[]); + format!("couldn't read {}: {}", file.display(), e).index(&FullRange)); return DummyResult::expr(sp); } Ok(bytes) => { diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 1438d15255495..581c60bdeb943 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -153,7 +153,7 @@ pub fn count_names(ms: &[TokenTree]) -> uint { seq.num_captures } &TtDelimited(_, ref delim) => { - count_names(delim.tts[]) + count_names(delim.tts.index(&FullRange)) } &TtToken(_, MatchNt(..)) => { 1 @@ -165,7 +165,7 @@ pub fn count_names(ms: &[TokenTree]) -> uint { pub fn initial_matcher_pos(ms: Rc>, sep: Option, lo: BytePos) -> Box { - let match_idx_hi = count_names(ms[]); + let match_idx_hi = count_names(ms.index(&FullRange)); let matches: Vec<_> = range(0, match_idx_hi).map(|_| Vec::new()).collect(); box MatcherPos { stack: vec![], @@ -229,7 +229,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc]) p_s.span_diagnostic .span_fatal(sp, format!("duplicated bind name: {}", - string.get())[]) + string.get()).index(&FullRange)) } } } @@ -254,13 +254,13 @@ pub fn parse_or_else(sess: &ParseSess, rdr: TtReader, ms: Vec ) -> HashMap> { - match parse(sess, cfg, rdr, ms[]) { + match parse(sess, cfg, rdr, ms.index(&FullRange)) { Success(m) => m, Failure(sp, str) => { - sess.span_diagnostic.span_fatal(sp, str[]) + sess.span_diagnostic.span_fatal(sp, str.index(&FullRange)) } Error(sp, str) => { - sess.span_diagnostic.span_fatal(sp, str[]) + sess.span_diagnostic.span_fatal(sp, str.index(&FullRange)) } } } @@ -341,7 +341,7 @@ pub fn parse(sess: &ParseSess, // Only touch the binders we have actually bound for idx in range(ei.match_lo, ei.match_hi) { let sub = (ei.matches[idx]).clone(); - new_pos.matches[idx] + (&mut new_pos.matches[idx]) .push(Rc::new(MatchedSeq(sub, mk_sp(ei.sp_lo, sp.hi)))); } @@ -386,7 +386,7 @@ pub fn parse(sess: &ParseSess, new_ei.idx += 1u; //we specifically matched zero repeats. for idx in range(ei.match_cur, ei.match_cur + seq.num_captures) { - new_ei.matches[idx].push(Rc::new(MatchedSeq(Vec::new(), sp))); + (&mut new_ei.matches[idx]).push(Rc::new(MatchedSeq(vec![], sp))); } cur_eis.push(new_ei); @@ -444,10 +444,10 @@ pub fn parse(sess: &ParseSess, if token_name_eq(&tok, &token::Eof) { if eof_eis.len() == 1u { let mut v = Vec::new(); - for dv in eof_eis[0].matches.iter_mut() { + for dv in (&mut eof_eis[0]).matches.iter_mut() { v.push(dv.pop().unwrap()); } - return Success(nameize(sess, ms, v[])); + return Success(nameize(sess, ms, v.index(&FullRange))); } else if eof_eis.len() > 1u { return Error(sp, "ambiguity: multiple successful parses".to_string()); } else { @@ -486,7 +486,7 @@ pub fn parse(sess: &ParseSess, TtToken(_, MatchNt(_, name, _, _)) => { let name_string = token::get_ident(name); let match_cur = ei.match_cur; - ei.matches[match_cur].push(Rc::new(MatchedNonterminal( + (&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal( parse_nt(&mut rust_parser, name_string.get())))); ei.idx += 1u; ei.match_cur += 1; @@ -522,7 +522,7 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal { _ => { let token_str = pprust::token_to_string(&p.token); p.fatal((format!("expected ident, found {}", - token_str[]))[]) + token_str.index(&FullRange))).index(&FullRange)) } }, "path" => { @@ -536,7 +536,7 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal { res } _ => { - p.fatal(format!("unsupported builtin nonterminal parser: {}", name)[]) + p.fatal(format!("unsupported builtin nonterminal parser: {}", name).index(&FullRange)) } } } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 9837c8088fa45..805af484e281a 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -52,7 +52,7 @@ impl<'a> ParserAnyMacro<'a> { following", token_str); let span = parser.span; - parser.span_err(span, msg[]); + parser.span_err(span, msg.index(&FullRange)); } } } @@ -126,8 +126,8 @@ impl TTMacroExpander for MacroRulesMacroExpander { self.name, self.imported_from, arg, - self.lhses[], - self.rhses[]) + self.lhses.index(&FullRange), + self.rhses.index(&FullRange)) } } @@ -154,7 +154,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, match **lhs { MatchedNonterminal(NtTT(ref lhs_tt)) => { let lhs_tt = match **lhs_tt { - TtDelimited(_, ref delim) => delim.tts[], + TtDelimited(_, ref delim) => delim.tts.index(&FullRange), _ => cx.span_fatal(sp, "malformed macro lhs") }; // `None` is because we're not interpolating @@ -194,13 +194,13 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, best_fail_spot = sp; best_fail_msg = (*msg).clone(); }, - Error(sp, ref msg) => cx.span_fatal(sp, msg[]) + Error(sp, ref msg) => cx.span_fatal(sp, msg.index(&FullRange)) } } _ => cx.bug("non-matcher found in parsed lhses") } } - cx.span_fatal(best_fail_spot, best_fail_msg[]); + cx.span_fatal(best_fail_spot, best_fail_msg.index(&FullRange)); } // Note that macro-by-example's input is also matched against a token tree: diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index e4e6f5ac6b0f0..7ca920a61962a 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -240,7 +240,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } LisContradiction(ref msg) => { // FIXME #2887 blame macro invoker instead - r.sp_diag.span_fatal(sp.clone(), msg[]); + r.sp_diag.span_fatal(sp.clone(), msg.index(&FullRange)); } LisConstraint(len, _) => { if len == 0 { @@ -297,7 +297,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { r.sp_diag.span_fatal( r.cur_span, /* blame the macro writer */ format!("variable '{}' is still repeating at this depth", - token::get_ident(ident))[]); + token::get_ident(ident)).index(&FullRange)); } } } diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 0810d4ee93ac7..2c36a02d44fe3 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -149,7 +149,7 @@ impl<'a> Context<'a> { self.span_handler.span_err(span, explain); self.span_handler.span_help(span, format!("add #![feature({})] to the \ crate attributes to enable", - feature)[]); + feature).index(&FullRange)); } } @@ -240,7 +240,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { } match i.node { ast::ItemForeignMod(ref foreign_module) => { - if attr::contains_name(i.attrs[], "link_args") { + if attr::contains_name(i.attrs.index(&FullRange), "link_args") { self.gate_feature("link_args", i.span, "the `link_args` attribute is not portable \ across platforms, it is recommended to \ @@ -254,14 +254,14 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { } ast::ItemFn(..) => { - if attr::contains_name(i.attrs[], "plugin_registrar") { + if attr::contains_name(i.attrs.index(&FullRange), "plugin_registrar") { self.gate_feature("plugin_registrar", i.span, "compiler plugins are experimental and possibly buggy"); } } ast::ItemStruct(..) => { - if attr::contains_name(i.attrs[], "simd") { + if attr::contains_name(i.attrs.index(&FullRange), "simd") { self.gate_feature("simd", i.span, "SIMD types are experimental and possibly buggy"); } @@ -278,7 +278,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { _ => {} } - if attr::contains_name(i.attrs[], + if attr::contains_name(i.attrs.as_slice(), "unsafe_destructor") { self.gate_feature("unsafe_destructor", i.span, @@ -287,7 +287,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { removed in the future"); } - if attr::contains_name(i.attrs[], + if attr::contains_name(i.attrs.index(&FullRange), "old_orphan_check") { self.gate_feature( "old_orphan_check", @@ -303,13 +303,14 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { } fn visit_foreign_item(&mut self, i: &ast::ForeignItem) { - if attr::contains_name(i.attrs[], "linkage") { + if attr::contains_name(i.attrs.index(&FullRange), "linkage") { self.gate_feature("linkage", i.span, "the `linkage` attribute is experimental \ and not portable across platforms") } - let links_to_llvm = match attr::first_attr_value_str_by_name(i.attrs[], "link_name") { + let links_to_llvm = match attr::first_attr_value_str_by_name(i.attrs.as_slice(), + "link_name") { Some(val) => val.get().starts_with("llvm."), _ => false }; diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 41693d9d47a51..f4810cee5f89d 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -92,7 +92,7 @@ impl<'a> ParserAttr for Parser<'a> { } _ => { let token_str = self.this_token_to_string(); - self.fatal(format!("expected `#`, found `{}`", token_str)[]); + self.fatal(format!("expected `#`, found `{}`", token_str).index(&FullRange)); } }; diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index 0d5592b57b1d1..e7fc5aac9c772 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -82,7 +82,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { while j > i && lines[j - 1].trim().is_empty() { j -= 1; } - return lines[i..j].iter().map(|x| (*x).clone()).collect(); + return lines.index(&(i..j)).iter().map(|x| (*x).clone()).collect(); } /// remove a "[ \t]*\*" block from each line, if possible @@ -116,7 +116,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { if can_trim { lines.iter().map(|line| { - line[i + 1..line.len()].to_string() + line.index(&((i + 1)..line.len())).to_string() }).collect() } else { lines @@ -127,12 +127,12 @@ pub fn strip_doc_comment_decoration(comment: &str) -> String { static ONLINERS: &'static [&'static str] = &["///!", "///", "//!", "//"]; for prefix in ONLINERS.iter() { if comment.starts_with(*prefix) { - return comment[prefix.len()..].to_string(); + return comment.index(&(prefix.len()..)).to_string(); } } if comment.starts_with("/*") { - let lines = comment[3u..comment.len() - 2u] + let lines = comment.index(&(3u..(comment.len() - 2u))) .lines_any() .map(|s| s.to_string()) .collect:: >(); @@ -187,7 +187,7 @@ fn read_line_comments(rdr: &mut StringReader, code_to_the_left: bool, let line = rdr.read_one_line_comment(); debug!("{}", line); // Doc comments are not put in comments. - if is_doc_comment(line[]) { + if is_doc_comment(line.index(&FullRange)) { break; } lines.push(line); @@ -224,10 +224,10 @@ fn all_whitespace(s: &str, col: CharPos) -> Option { fn trim_whitespace_prefix_and_push_line(lines: &mut Vec , s: String, col: CharPos) { let len = s.len(); - let s1 = match all_whitespace(s[], col) { + let s1 = match all_whitespace(s.index(&FullRange), col) { Some(col) => { if col < len { - s[col..len].to_string() + s.index(&(col..len)).to_string() } else { "".to_string() } @@ -261,7 +261,7 @@ fn read_block_comment(rdr: &mut StringReader, rdr.bump(); rdr.bump(); } - if is_block_doc_comment(curr_line[]) { + if is_block_doc_comment(curr_line.index(&FullRange)) { return } assert!(!curr_line.contains_char('\n')); diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index a50b97142c2ef..850d527fe3970 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -196,7 +196,7 @@ impl<'a> StringReader<'a> { let mut m = m.to_string(); m.push_str(": "); for c in c.escape_default() { m.push(c) } - self.fatal_span_(from_pos, to_pos, m[]); + self.fatal_span_(from_pos, to_pos, m.index(&FullRange)); } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -205,7 +205,7 @@ impl<'a> StringReader<'a> { let mut m = m.to_string(); m.push_str(": "); for c in c.escape_default() { m.push(c) } - self.err_span_(from_pos, to_pos, m[]); + self.err_span_(from_pos, to_pos, m.index(&FullRange)); } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the @@ -214,8 +214,8 @@ impl<'a> StringReader<'a> { m.push_str(": "); let from = self.byte_offset(from_pos).to_uint(); let to = self.byte_offset(to_pos).to_uint(); - m.push_str(self.filemap.src[from..to]); - self.fatal_span_(from_pos, to_pos, m[]); + m.push_str(self.filemap.src.index(&(from..to))); + self.fatal_span_(from_pos, to_pos, m.index(&FullRange)); } /// Advance peek_tok and peek_span to refer to the next token, and @@ -301,7 +301,7 @@ impl<'a> StringReader<'a> { while i < s.len() { let str::CharRange { ch, next } = s.char_range_at(i); if ch == '\r' { - if j < i { buf.push_str(s[j..i]); } + if j < i { buf.push_str(s.index(&(j..i))); } j = next; if next >= s.len() || s.char_at(next) != '\n' { let pos = start + BytePos(i as u32); @@ -311,7 +311,7 @@ impl<'a> StringReader<'a> { } i = next; } - if j < s.len() { buf.push_str(s[j..]); } + if j < s.len() { buf.push_str(s.index(&(j..))); } buf } } @@ -556,7 +556,7 @@ impl<'a> StringReader<'a> { self.translate_crlf(start_bpos, string, "bare CR not allowed in block doc-comment") } else { string.into_cow() }; - token::DocComment(token::intern(string[])) + token::DocComment(token::intern(string.index(&FullRange))) } else { token::Comment }; @@ -1110,7 +1110,7 @@ impl<'a> StringReader<'a> { // expansion purposes. See #12512 for the gory details of why // this is necessary. let ident = self.with_str_from(start, |lifetime_name| { - str_to_ident(format!("'{}", lifetime_name)[]) + str_to_ident(format!("'{}", lifetime_name).index(&FullRange)) }); // Conjure up a "keyword checking ident" to make sure that diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index b0969a573e66b..d4650a4bb0395 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -256,17 +256,17 @@ pub fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option) Err(e) => { err(format!("couldn't read {}: {}", path.display(), - e)[]); + e).index(&FullRange)); unreachable!() } }; - match str::from_utf8(bytes[]).ok() { + match str::from_utf8(bytes.index(&FullRange)).ok() { Some(s) => { return string_to_filemap(sess, s.to_string(), path.as_str().unwrap().to_string()) } None => { - err(format!("{} is not UTF-8 encoded", path.display())[]) + err(format!("{} is not UTF-8 encoded", path.display()).index(&FullRange)) } } unreachable!() @@ -398,10 +398,10 @@ pub fn char_lit(lit: &str) -> (char, int) { } let msg = format!("lexer should have rejected a bad character escape {}", lit); - let msg2 = msg[]; + let msg2 = msg.index(&FullRange); fn esc(len: uint, lit: &str) -> Option<(char, int)> { - num::from_str_radix(lit[2..len], 16) + num::from_str_radix(lit.index(&(2..len)), 16) .and_then(char::from_u32) .map(|x| (x, len as int)) } @@ -409,7 +409,7 @@ pub fn char_lit(lit: &str) -> (char, int) { let unicode_escape = |&: | -> Option<(char, int)> if lit.as_bytes()[2] == b'{' { let idx = lit.find('}').expect(msg2); - let subslice = lit[3..idx]; + let subslice = lit.index(&(3..idx)); num::from_str_radix(subslice, 16) .and_then(char::from_u32) .map(|x| (x, subslice.chars().count() as int + 4)) @@ -471,7 +471,7 @@ pub fn str_lit(lit: &str) -> String { eat(&mut chars); } else { // otherwise, a normal escape - let (c, n) = char_lit(lit[i..]); + let (c, n) = char_lit(lit.index(&(i..))); for _ in range(0, n - 1) { // we don't need to move past the first \ chars.next(); } @@ -534,7 +534,7 @@ pub fn raw_str_lit(lit: &str) -> String { fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool { s.len() > 1 && first_chars.contains(&s.char_at(0)) && - s[1..].chars().all(|c| '0' <= c && c <= '9') + s.index(&(1..)).chars().all(|c| '0' <= c && c <= '9') } fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>, @@ -547,7 +547,7 @@ fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>, if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) { // if it looks like a width, lets try to be helpful. sd.span_err(sp, &*format!("illegal width `{}` for float literal, \ - valid widths are 32 and 64", suf[1..])); + valid widths are 32 and 64", suf.index(&(1..)))); } else { sd.span_err(sp, &*format!("illegal suffix `{}` for float literal, \ valid suffixes are `f32` and `f64`", suf)); @@ -583,7 +583,7 @@ pub fn byte_lit(lit: &str) -> (u8, uint) { b'\'' => b'\'', b'0' => b'\0', _ => { - match ::std::num::from_str_radix::(lit[2..4], 16) { + match ::std::num::from_str_radix::(lit.index(&(2..4)), 16) { Some(c) => if c > 0xFF { panic!(err(2)) @@ -633,7 +633,7 @@ pub fn binary_lit(lit: &str) -> Rc> { } _ => { // otherwise, a normal escape - let (c, n) = byte_lit(lit[i..]); + let (c, n) = byte_lit(lit.index(&(i..))); // we don't need to move past the first \ for _ in range(0, n - 1) { chars.next(); @@ -662,7 +662,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> // s can only be ascii, byte indexing is fine let s2 = s.chars().filter(|&c| c != '_').collect::(); - let mut s = s2[]; + let mut s = s2.index(&FullRange); debug!("integer_lit: {}, {}", s, suffix); @@ -695,7 +695,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> } if base != 10 { - s = s[2..]; + s = s.index(&(2..)); } if let Some(suf) = suffix { @@ -717,7 +717,7 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> if looks_like_width_suffix(&['i', 'u'], suf) { sd.span_err(sp, &*format!("illegal width `{}` for integer literal; \ valid widths are 8, 16, 32 and 64", - suf[1..])); + suf.index(&(1..)))); } else { sd.span_err(sp, &*format!("illegal suffix `{}` for numeric literal", suf)); } @@ -815,7 +815,7 @@ mod test { #[test] fn string_to_tts_macro () { let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string()); - let tts: &[ast::TokenTree] = tts[]; + let tts: &[ast::TokenTree] = tts.index(&FullRange); match tts { [ast::TtToken(_, token::Ident(name_macro_rules, token::Plain)), ast::TtToken(_, token::Not), @@ -823,19 +823,19 @@ mod test { ast::TtDelimited(_, ref macro_delimed)] if name_macro_rules.as_str() == "macro_rules" && name_zip.as_str() == "zip" => { - match macro_delimed.tts[] { + match macro_delimed.tts.index(&FullRange) { [ast::TtDelimited(_, ref first_delimed), ast::TtToken(_, token::FatArrow), ast::TtDelimited(_, ref second_delimed)] if macro_delimed.delim == token::Paren => { - match first_delimed.tts[] { + match first_delimed.tts.index(&FullRange) { [ast::TtToken(_, token::Dollar), ast::TtToken(_, token::Ident(name, token::Plain))] if first_delimed.delim == token::Paren && name.as_str() == "a" => {}, _ => panic!("value 3: {}", **first_delimed), } - match second_delimed.tts[] { + match second_delimed.tts.index(&FullRange) { [ast::TtToken(_, token::Dollar), ast::TtToken(_, token::Ident(name, token::Plain))] if second_delimed.delim == token::Paren @@ -1113,24 +1113,24 @@ mod test { let use_s = "use foo::bar::baz;"; let vitem = string_to_view_item(use_s.to_string()); let vitem_s = view_item_to_string(&vitem); - assert_eq!(vitem_s[], use_s); + assert_eq!(vitem_s.index(&FullRange), use_s); let use_s = "use foo::bar as baz;"; let vitem = string_to_view_item(use_s.to_string()); let vitem_s = view_item_to_string(&vitem); - assert_eq!(vitem_s[], use_s); + assert_eq!(vitem_s.index(&FullRange), use_s); } #[test] fn parse_extern_crate() { let ex_s = "extern crate foo;"; let vitem = string_to_view_item(ex_s.to_string()); let vitem_s = view_item_to_string(&vitem); - assert_eq!(vitem_s[], ex_s); + assert_eq!(vitem_s.index(&FullRange), ex_s); let ex_s = "extern crate \"foo\" as bar;"; let vitem = string_to_view_item(ex_s.to_string()); let vitem_s = view_item_to_string(&vitem); - assert_eq!(vitem_s[], ex_s); + assert_eq!(vitem_s.index(&FullRange), ex_s); } fn get_spans_of_pat_idents(src: &str) -> Vec { @@ -1167,10 +1167,10 @@ mod test { for &src in srcs.iter() { let spans = get_spans_of_pat_idents(src); - let Span{lo:lo,hi:hi,..} = spans[0]; - assert!("self" == src[lo.to_uint()..hi.to_uint()], + let Span{ lo, hi, .. } = spans[0]; + assert!("self" == &src[lo.to_uint()..hi.to_uint()], "\"{}\" != \"self\". src=\"{}\"", - src[lo.to_uint()..hi.to_uint()], src) + &src[lo.to_uint()..hi.to_uint()], src) } } @@ -1209,7 +1209,7 @@ mod test { let docs = item.attrs.iter().filter(|a| a.name().get() == "doc") .map(|a| a.value_str().unwrap().get().to_string()).collect::>(); let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; - assert_eq!(docs[], b); + assert_eq!(docs.index(&FullRange), b); let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name, source, Vec::new(), &sess).unwrap(); diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index a49680d7e1c03..23728c74ae8b5 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -127,13 +127,13 @@ impl<'a> ParserObsoleteMethods for parser::Parser<'a> { kind_str: &str, desc: &str) { self.span_err(sp, - format!("obsolete syntax: {}", kind_str)[]); + format!("obsolete syntax: {}", kind_str).index(&FullRange)); if !self.obsolete_set.contains(&kind) { self.sess .span_diagnostic .handler() - .note(format!("{}", desc)[]); + .note(format!("{}", desc).index(&FullRange)); self.obsolete_set.insert(kind); } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 32f8f5ee3d63b..0ecd098951f8d 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -63,7 +63,7 @@ use ast::{ViewPath, ViewPathGlob, ViewPathList, ViewPathSimple}; use ast::{Visibility, WhereClause}; use ast; use ast_util::{self, as_prec, ident_to_path, operator_prec}; -use codemap::{self, Span, BytePos, Spanned, spanned, mk_sp, DUMMY_SP}; +use codemap::{self, Span, BytePos, Spanned, spanned, mk_sp}; use diagnostic; use ext::tt::macro_parser; use parse; @@ -389,12 +389,12 @@ impl<'a> Parser<'a> { let token_str = Parser::token_to_string(t); let last_span = self.last_span; self.span_fatal(last_span, format!("unexpected token: `{}`", - token_str)[]); + token_str).index(&FullRange)); } pub fn unexpected(&mut self) -> ! { let this_token = self.this_token_to_string(); - self.fatal(format!("unexpected token: `{}`", this_token)[]); + self.fatal(format!("unexpected token: `{}`", this_token).index(&FullRange)); } /// Expect and consume the token t. Signal an error if @@ -408,7 +408,7 @@ impl<'a> Parser<'a> { let this_token_str = self.this_token_to_string(); self.fatal(format!("expected `{}`, found `{}`", token_str, - this_token_str)[]) + this_token_str).index(&FullRange)) } } else { self.expect_one_of(slice::ref_slice(t), &[]); @@ -449,7 +449,7 @@ impl<'a> Parser<'a> { expected.push_all(&*self.expected_tokens); expected.sort_by(|a, b| a.to_string().cmp(&b.to_string())); expected.dedup(); - let expect = tokens_to_string(expected[]); + let expect = tokens_to_string(expected.index(&FullRange)); let actual = self.this_token_to_string(); self.fatal( (if expected.len() != 1 { @@ -460,7 +460,7 @@ impl<'a> Parser<'a> { (format!("expected {}, found `{}`", expect, actual)) - })[] + }).index(&FullRange) ) } } @@ -493,7 +493,7 @@ impl<'a> Parser<'a> { // might be unit-struct construction; check for recoverableinput error. let mut expected = edible.iter().map(|x| x.clone()).collect::>(); expected.push_all(inedible); - self.check_for_erroneous_unit_struct_expecting(expected[]); + self.check_for_erroneous_unit_struct_expecting(expected.index(&FullRange)); } self.expect_one_of(edible, inedible) } @@ -510,9 +510,9 @@ impl<'a> Parser<'a> { .as_ref() .map_or(false, |t| t.is_ident() || t.is_path()) { let mut expected = edible.iter().map(|x| x.clone()).collect::>(); - expected.push_all(inedible[]); + expected.push_all(inedible.index(&FullRange)); self.check_for_erroneous_unit_struct_expecting( - expected[]); + expected.index(&FullRange)); } self.expect_one_of(edible, inedible) } @@ -535,7 +535,7 @@ impl<'a> Parser<'a> { _ => { let token_str = self.this_token_to_string(); self.fatal((format!("expected ident, found `{}`", - token_str))[]) + token_str)).index(&FullRange)) } } } @@ -593,7 +593,7 @@ impl<'a> Parser<'a> { let id_interned_str = token::get_name(kw.to_name()); let token_str = self.this_token_to_string(); self.fatal(format!("expected `{}`, found `{}`", - id_interned_str, token_str)[]) + id_interned_str, token_str).index(&FullRange)) } } @@ -604,7 +604,7 @@ impl<'a> Parser<'a> { let span = self.span; self.span_err(span, format!("expected identifier, found keyword `{}`", - token_str)[]); + token_str).index(&FullRange)); } } @@ -613,7 +613,7 @@ impl<'a> Parser<'a> { if self.token.is_reserved_keyword() { let token_str = self.this_token_to_string(); self.fatal(format!("`{}` is a reserved keyword", - token_str)[]) + token_str).index(&FullRange)) } } @@ -633,7 +633,7 @@ impl<'a> Parser<'a> { Parser::token_to_string(&token::BinOp(token::And)); self.fatal(format!("expected `{}`, found `{}`", found_token, - token_str)[]) + token_str).index(&FullRange)) } } } @@ -654,7 +654,7 @@ impl<'a> Parser<'a> { Parser::token_to_string(&token::BinOp(token::Or)); self.fatal(format!("expected `{}`, found `{}`", token_str, - found_token)[]) + found_token).index(&FullRange)) } } } @@ -697,7 +697,7 @@ impl<'a> Parser<'a> { let token_str = Parser::token_to_string(&token::Lt); self.fatal(format!("expected `{}`, found `{}`", token_str, - found_token)[]) + found_token).index(&FullRange)) } } @@ -749,7 +749,7 @@ impl<'a> Parser<'a> { let this_token_str = self.this_token_to_string(); self.fatal(format!("expected `{}`, found `{}`", gt_str, - this_token_str)[]) + this_token_str).index(&FullRange)) } } } @@ -1369,7 +1369,7 @@ impl<'a> Parser<'a> { let (inner_attrs, body) = p.parse_inner_attrs_and_block(); let mut attrs = attrs; - attrs.push_all(inner_attrs[]); + attrs.push_all(inner_attrs.index(&FullRange)); ProvidedMethod(P(ast::Method { attrs: attrs, id: ast::DUMMY_NODE_ID, @@ -1388,7 +1388,7 @@ impl<'a> Parser<'a> { _ => { let token_str = p.this_token_to_string(); p.fatal((format!("expected `;` or `{{`, found `{}`", - token_str))[]) + token_str)).index(&FullRange)) } } } @@ -1584,7 +1584,7 @@ impl<'a> Parser<'a> { } else { let this_token_str = self.this_token_to_string(); let msg = format!("expected type, found `{}`", this_token_str); - self.fatal(msg[]); + self.fatal(msg.index(&FullRange)); }; let sp = mk_sp(lo, self.last_span.hi); @@ -1726,14 +1726,14 @@ impl<'a> Parser<'a> { token::Str_(s) => { (true, - LitStr(token::intern_and_get_ident(parse::str_lit(s.as_str())[]), + LitStr(token::intern_and_get_ident(parse::str_lit(s.as_str()).as_slice()), ast::CookedStr)) } token::StrRaw(s, n) => { (true, LitStr( token::intern_and_get_ident( - parse::raw_str_lit(s.as_str())[]), + parse::raw_str_lit(s.as_str()).index(&FullRange)), ast::RawStr(n))) } token::Binary(i) => @@ -1977,7 +1977,7 @@ impl<'a> Parser<'a> { }; } _ => { - self.fatal(format!("expected a lifetime name")[]); + self.fatal(format!("expected a lifetime name").index(&FullRange)); } } } @@ -2015,7 +2015,7 @@ impl<'a> Parser<'a> { let msg = format!("expected `,` or `>` after lifetime \ name, found `{}`", this_token_str); - self.fatal(msg[]); + self.fatal(msg.index(&FullRange)); } } } @@ -2103,22 +2103,6 @@ impl<'a> Parser<'a> { ExprIndex(expr, idx) } - pub fn mk_slice(&mut self, - expr: P, - start: Option>, - end: Option>, - _mutbl: Mutability) - -> ast::Expr_ { - // FIXME: we could give more accurate span info here. - let (lo, hi) = match (&start, &end) { - (&Some(ref s), &Some(ref e)) => (s.span.lo, e.span.hi), - (&Some(ref s), &None) => (s.span.lo, s.span.hi), - (&None, &Some(ref e)) => (e.span.lo, e.span.hi), - (&None, &None) => (DUMMY_SP.lo, DUMMY_SP.hi), - }; - ExprIndex(expr, self.mk_expr(lo, hi, ExprRange(start, end))) - } - pub fn mk_range(&mut self, start: Option>, end: Option>) @@ -2515,7 +2499,7 @@ impl<'a> Parser<'a> { let last_span = self.last_span; let fstr = n.as_str(); self.span_err(last_span, - format!("unexpected token: `{}`", n.as_str())[]); + format!("unexpected token: `{}`", n.as_str()).index(&FullRange)); if fstr.chars().all(|x| "0123456789.".contains_char(x)) { let float = match fstr.parse::() { Some(f) => f, @@ -2524,7 +2508,7 @@ impl<'a> Parser<'a> { self.span_help(last_span, format!("try parenthesizing the first index; e.g., `(foo.{}){}`", float.trunc() as uint, - float.fract().to_string()[1..])[]); + float.fract().to_string().index(&(1..))).index(&FullRange)); } self.abort_if_errors(); @@ -2550,87 +2534,44 @@ impl<'a> Parser<'a> { } // expr[...] - // Could be either an index expression or a slicing expression. - // Any slicing non-terminal can have a mutable version with `mut` - // after the opening square bracket. + // An index expression. token::OpenDelim(token::Bracket) => { + let bracket_pos = self.span.lo; self.bump(); - let mutbl = if self.eat_keyword(keywords::Mut) { - MutMutable + + let mut found_dotdot = false; + if self.token == token::DotDot && + self.look_ahead(1, |t| t == &token::CloseDelim(token::Bracket)) { + // Using expr[..], which is a mistake, should be expr[] + self.bump(); + self.bump(); + found_dotdot = true; + } + + if found_dotdot || self.eat(&token::CloseDelim(token::Bracket)) { + // No expression, expand to a FullRange + // FIXME(#20516) It would be better to use a lang item or + // something for FullRange. + hi = self.last_span.hi; + let range = ExprStruct(ident_to_path(mk_sp(lo, hi), + token::special_idents::FullRange), + vec![], + None); + let ix = self.mk_expr(bracket_pos, hi, range); + let index = self.mk_index(e, ix); + e = self.mk_expr(lo, hi, index) } else { - MutImmutable - }; - match self.token { - // e[] - token::CloseDelim(token::Bracket) => { - self.bump(); - hi = self.span.hi; - let slice = self.mk_slice(e, None, None, mutbl); - e = self.mk_expr(lo, hi, slice) - } - // e[..e] - token::DotDot => { - self.bump(); - match self.token { - // e[..] - token::CloseDelim(token::Bracket) => { - self.bump(); - hi = self.span.hi; - let slice = self.mk_slice(e, None, None, mutbl); - e = self.mk_expr(lo, hi, slice); + let ix = self.parse_expr(); + hi = self.span.hi; + self.commit_expr_expecting(&*ix, token::CloseDelim(token::Bracket)); + let index = self.mk_index(e, ix); + e = self.mk_expr(lo, hi, index) + } - self.span_err(e.span, "incorrect slicing expression: `[..]`"); - self.span_note(e.span, - "use `expr[]` to construct a slice of the whole of expr"); - } - // e[..e] - _ => { - hi = self.span.hi; - let e2 = self.parse_expr(); - self.commit_expr_expecting(&*e2, token::CloseDelim(token::Bracket)); - let slice = self.mk_slice(e, None, Some(e2), mutbl); - e = self.mk_expr(lo, hi, slice) - } - } - } - // e[e] | e[e..] | e[e..e] - _ => { - let ix = self.parse_expr_res(RESTRICTION_NO_DOTS); - match self.token { - // e[e..] | e[e..e] - token::DotDot => { - self.bump(); - let e2 = match self.token { - // e[e..] - token::CloseDelim(token::Bracket) => { - self.bump(); - None - } - // e[e..e] - _ => { - let e2 = self.parse_expr_res(RESTRICTION_NO_DOTS); - self.commit_expr_expecting(&*e2, - token::CloseDelim(token::Bracket)); - Some(e2) - } - }; - hi = self.span.hi; - let slice = self.mk_slice(e, Some(ix), e2, mutbl); - e = self.mk_expr(lo, hi, slice) - } - // e[e] - _ => { - if mutbl == ast::MutMutable { - self.span_err(e.span, - "`mut` keyword is invalid in index expressions"); - } - hi = self.span.hi; - self.commit_expr_expecting(&*ix, token::CloseDelim(token::Bracket)); - let index = self.mk_index(e, ix); - e = self.mk_expr(lo, hi, index) - } - } - } + if found_dotdot { + self.span_err(e.span, "incorrect slicing expression: `[..]`"); + self.span_note(e.span, + "use `&expr[]` to construct a slice of the whole of expr"); } } @@ -2711,7 +2652,7 @@ impl<'a> Parser<'a> { }; let token_str = p.this_token_to_string(); p.fatal(format!("incorrect close delimiter: `{}`", - token_str)[]) + token_str).index(&FullRange)) }, /* we ought to allow different depths of unquotation */ token::Dollar if p.quote_depth > 0u => { @@ -2729,7 +2670,7 @@ impl<'a> Parser<'a> { let seq = match seq { Spanned { node, .. } => node, }; - let name_num = macro_parser::count_names(seq[]); + let name_num = macro_parser::count_names(seq.index(&FullRange)); TtSequence(mk_sp(sp.lo, p.span.hi), Rc::new(SequenceRepetition { tts: seq, @@ -2890,7 +2831,7 @@ impl<'a> Parser<'a> { let this_token_to_string = self.this_token_to_string(); self.span_err(span, format!("expected expression, found `{}`", - this_token_to_string)[]); + this_token_to_string).index(&FullRange)); let box_span = mk_sp(lo, self.last_span.hi); self.span_help(box_span, "perhaps you meant `box() (foo)` instead?"); @@ -3273,7 +3214,7 @@ impl<'a> Parser<'a> { if self.token != token::CloseDelim(token::Brace) { let token_str = self.this_token_to_string(); self.fatal(format!("expected `{}`, found `{}`", "}", - token_str)[]) + token_str).index(&FullRange)) } etc = true; break; @@ -3294,7 +3235,7 @@ impl<'a> Parser<'a> { BindByRef(..) | BindByValue(MutMutable) => { let token_str = self.this_token_to_string(); self.fatal(format!("unexpected `{}`", - token_str)[]) + token_str).index(&FullRange)) } _ => {} } @@ -3577,7 +3518,7 @@ impl<'a> Parser<'a> { let span = self.span; let tok_str = self.this_token_to_string(); self.span_fatal(span, - format!("expected identifier, found `{}`", tok_str)[]); + format!("expected identifier, found `{}`", tok_str).index(&FullRange)); } let ident = self.parse_ident(); let last_span = self.last_span; @@ -3674,7 +3615,7 @@ impl<'a> Parser<'a> { let lo = self.span.lo; if self.token.is_keyword(keywords::Let) { - check_expected_item(self, item_attrs[]); + check_expected_item(self, item_attrs.index(&FullRange)); self.expect_keyword(keywords::Let); let decl = self.parse_let(); P(spanned(lo, decl.span.hi, StmtDecl(decl, ast::DUMMY_NODE_ID))) @@ -3683,7 +3624,7 @@ impl<'a> Parser<'a> { && self.look_ahead(1, |t| *t == token::Not) { // it's a macro invocation: - check_expected_item(self, item_attrs[]); + check_expected_item(self, item_attrs.index(&FullRange)); // Potential trouble: if we allow macros with paths instead of // idents, we'd need to look ahead past the whole path here... @@ -3711,7 +3652,7 @@ impl<'a> Parser<'a> { let tok_str = self.this_token_to_string(); self.fatal(format!("expected {}`(` or `{{`, found `{}`", ident_str, - tok_str)[]) + tok_str).index(&FullRange)) }, }; @@ -3759,7 +3700,7 @@ impl<'a> Parser<'a> { } } else { let found_attrs = !item_attrs.is_empty(); - let item_err = Parser::expected_item_err(item_attrs[]); + let item_err = Parser::expected_item_err(item_attrs.index(&FullRange)); match self.parse_item_or_view_item(item_attrs, false) { IoviItem(i) => { let hi = i.span.hi; @@ -3803,7 +3744,7 @@ impl<'a> Parser<'a> { let sp = self.span; let tok = self.this_token_to_string(); self.span_fatal_help(sp, - format!("expected `{{`, found `{}`", tok)[], + format!("expected `{{`, found `{}`", tok).index(&FullRange), "place this code inside a block"); } @@ -3857,13 +3798,13 @@ impl<'a> Parser<'a> { while self.token != token::CloseDelim(token::Brace) { // parsing items even when they're not allowed lets us give // better error messages and recover more gracefully. - attributes_box.push_all(self.parse_outer_attributes()[]); + attributes_box.push_all(self.parse_outer_attributes().index(&FullRange)); match self.token { token::Semi => { if !attributes_box.is_empty() { let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(attributes_box[])); + Parser::expected_item_err(attributes_box.index(&FullRange))); attributes_box = Vec::new(); } self.bump(); // empty @@ -3955,7 +3896,7 @@ impl<'a> Parser<'a> { if !attributes_box.is_empty() { let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(attributes_box[])); + Parser::expected_item_err(attributes_box.index(&FullRange))); } let hi = self.span.hi; @@ -4399,7 +4340,7 @@ impl<'a> Parser<'a> { _ => { let token_str = self.this_token_to_string(); self.fatal(format!("expected `self`, found `{}`", - token_str)[]) + token_str).index(&FullRange)) } } } @@ -4553,7 +4494,7 @@ impl<'a> Parser<'a> { _ => { let token_str = self.this_token_to_string(); self.fatal(format!("expected `,` or `)`, found `{}`", - token_str)[]) + token_str).index(&FullRange)) } } } @@ -4729,7 +4670,7 @@ impl<'a> Parser<'a> { let (inner_attrs, body) = self.parse_inner_attrs_and_block(); let body_span = body.span; let mut new_attrs = attrs; - new_attrs.push_all(inner_attrs[]); + new_attrs.push_all(inner_attrs.index(&FullRange)); (ast::MethDecl(ident, generics, abi, @@ -4948,7 +4889,7 @@ impl<'a> Parser<'a> { if fields.len() == 0 { self.fatal(format!("unit-like struct definition should be \ written as `struct {};`", - token::get_ident(class_name.clone()))[]); + token::get_ident(class_name.clone())).index(&FullRange)); } self.bump(); @@ -4956,7 +4897,7 @@ impl<'a> Parser<'a> { let token_str = self.this_token_to_string(); self.fatal(format!("expected `where`, or `{}` after struct \ name, found `{}`", "{", - token_str)[]); + token_str).index(&FullRange)); } fields @@ -4987,7 +4928,7 @@ impl<'a> Parser<'a> { if fields.len() == 0 { self.fatal(format!("unit-like struct definition should be \ written as `struct {};`", - token::get_ident(class_name.clone()))[]); + token::get_ident(class_name.clone())).index(&FullRange)); } self.parse_where_clause(generics); @@ -5002,7 +4943,7 @@ impl<'a> Parser<'a> { } else { let token_str = self.this_token_to_string(); self.fatal(format!("expected `where`, `{}`, `(`, or `;` after struct \ - name, found `{}`", "{", token_str)[]); + name, found `{}`", "{", token_str).index(&FullRange)); } } @@ -5022,7 +4963,7 @@ impl<'a> Parser<'a> { let token_str = self.this_token_to_string(); self.span_fatal_help(span, format!("expected `,`, or `}}`, found `{}`", - token_str)[], + token_str).index(&FullRange), "struct fields should be separated by commas") } } @@ -5109,7 +5050,7 @@ impl<'a> Parser<'a> { let mut attrs = self.parse_outer_attributes(); if first { let mut tmp = attrs_remaining.clone(); - tmp.push_all(attrs[]); + tmp.push_all(attrs.index(&FullRange)); attrs = tmp; first = false; } @@ -5126,7 +5067,7 @@ impl<'a> Parser<'a> { _ => { let token_str = self.this_token_to_string(); self.fatal(format!("expected item, found `{}`", - token_str)[]) + token_str).index(&FullRange)) } } } @@ -5135,7 +5076,7 @@ impl<'a> Parser<'a> { // We parsed attributes for the first item but didn't find it let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(attrs_remaining[])); + Parser::expected_item_err(attrs_remaining.index(&FullRange))); } ast::Mod { @@ -5205,7 +5146,7 @@ impl<'a> Parser<'a> { -> (ast::Item_, Vec ) { let mut prefix = Path::new(self.sess.span_diagnostic.cm.span_to_filename(self.span)); prefix.pop(); - let mod_path = Path::new(".").join_many(self.mod_path_stack[]); + let mod_path = Path::new(".").join_many(self.mod_path_stack.index(&FullRange)); let dir_path = prefix.join(&mod_path); let mod_string = token::get_ident(id); let (file_path, owns_directory) = match ::attr::first_attr_value_str_by_name( @@ -5215,8 +5156,8 @@ impl<'a> Parser<'a> { let mod_name = mod_string.get().to_string(); let default_path_str = format!("{}.rs", mod_name); let secondary_path_str = format!("{}/mod.rs", mod_name); - let default_path = dir_path.join(default_path_str[]); - let secondary_path = dir_path.join(secondary_path_str[]); + let default_path = dir_path.join(default_path_str.index(&FullRange)); + let secondary_path = dir_path.join(secondary_path_str.index(&FullRange)); let default_exists = default_path.exists(); let secondary_exists = secondary_path.exists(); @@ -5231,13 +5172,13 @@ impl<'a> Parser<'a> { format!("maybe move this module `{0}` \ to its own directory via \ `{0}/mod.rs`", - this_module)[]); + this_module).index(&FullRange)); if default_exists || secondary_exists { self.span_note(id_sp, format!("... or maybe `use` the module \ `{}` instead of possibly \ redeclaring it", - mod_name)[]); + mod_name).index(&FullRange)); } self.abort_if_errors(); } @@ -5248,12 +5189,12 @@ impl<'a> Parser<'a> { (false, false) => { self.span_fatal_help(id_sp, format!("file not found for module `{}`", - mod_name)[], + mod_name).index(&FullRange), format!("name the file either {} or {} inside \ the directory {}", default_path_str, secondary_path_str, - dir_path.display())[]); + dir_path.display()).index(&FullRange)); } (true, true) => { self.span_fatal_help( @@ -5262,7 +5203,7 @@ impl<'a> Parser<'a> { and {}", mod_name, default_path_str, - secondary_path_str)[], + secondary_path_str).index(&FullRange), "delete or rename one of them to remove the ambiguity"); } } @@ -5284,11 +5225,11 @@ impl<'a> Parser<'a> { let mut err = String::from_str("circular modules: "); let len = included_mod_stack.len(); for p in included_mod_stack.slice(i, len).iter() { - err.push_str(p.display().as_cow()[]); + err.push_str(p.display().as_cow().index(&FullRange)); err.push_str(" -> "); } - err.push_str(path.display().as_cow()[]); - self.span_fatal(id_sp, err[]); + err.push_str(path.display().as_cow().index(&FullRange)); + self.span_fatal(id_sp, err.index(&FullRange)); } None => () } @@ -5369,7 +5310,7 @@ impl<'a> Parser<'a> { if !attrs_remaining.is_empty() { let last_span = self.last_span; self.span_err(last_span, - Parser::expected_item_err(attrs_remaining[])); + Parser::expected_item_err(attrs_remaining.index(&FullRange))); } assert!(self.token == token::CloseDelim(token::Brace)); ast::ForeignMod { @@ -5410,7 +5351,7 @@ impl<'a> Parser<'a> { self.span_help(span, format!("perhaps you meant to enclose the crate name `{}` in \ a string?", - the_ident.as_str())[]); + the_ident.as_str()).index(&FullRange)); None } else { None @@ -5436,7 +5377,7 @@ impl<'a> Parser<'a> { self.span_fatal(span, format!("expected extern crate name but \ found `{}`", - token_str)[]); + token_str).index(&FullRange)); } }; @@ -5534,7 +5475,7 @@ impl<'a> Parser<'a> { self.span_err(start_span, format!("unit-like struct variant should be written \ without braces, as `{},`", - token::get_ident(ident))[]); + token::get_ident(ident)).index(&FullRange)); } kind = StructVariantKind(struct_def); } else if self.check(&token::OpenDelim(token::Paren)) { @@ -5619,7 +5560,7 @@ impl<'a> Parser<'a> { format!("illegal ABI: expected one of [{}], \ found `{}`", abi::all_names().connect(", "), - the_string)[]); + the_string).index(&FullRange)); None } } @@ -5681,7 +5622,7 @@ impl<'a> Parser<'a> { format!("`extern mod` is obsolete, use \ `extern crate` instead \ to refer to external \ - crates.")[]) + crates.").index(&FullRange)) } return self.parse_item_extern_crate(lo, visibility, attrs); } @@ -5709,7 +5650,7 @@ impl<'a> Parser<'a> { let token_str = self.this_token_to_string(); self.span_fatal(span, format!("expected `{}` or `fn`, found `{}`", "{", - token_str)[]); + token_str).index(&FullRange)); } if self.eat_keyword(keywords::Virtual) { @@ -5822,7 +5763,7 @@ impl<'a> Parser<'a> { if self.eat_keyword(keywords::Mod) { // MODULE ITEM let (ident, item_, extra_attrs) = - self.parse_item_mod(attrs[]); + self.parse_item_mod(attrs.index(&FullRange)); let last_span = self.last_span; let item = self.mk_item(lo, last_span.hi, @@ -6162,7 +6103,7 @@ impl<'a> Parser<'a> { macros_allowed: bool) -> ParsedItemsAndViewItems { let mut attrs = first_item_attrs; - attrs.push_all(self.parse_outer_attributes()[]); + attrs.push_all(self.parse_outer_attributes().index(&FullRange)); // First, parse view items. let mut view_items : Vec = Vec::new(); let mut items = Vec::new(); @@ -6244,7 +6185,7 @@ impl<'a> Parser<'a> { macros_allowed: bool) -> ParsedItemsAndViewItems { let mut attrs = first_item_attrs; - attrs.push_all(self.parse_outer_attributes()[]); + attrs.push_all(self.parse_outer_attributes().index(&FullRange)); let mut foreign_items = Vec::new(); loop { match self.parse_foreign_item(attrs, macros_allowed) { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 094aacf3207fc..f70ce54bb1c0a 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -479,7 +479,7 @@ macro_rules! declare_special_idents_and_keywords {( $(init_vec.push($si_str);)* $(init_vec.push($sk_str);)* $(init_vec.push($rk_str);)* - interner::StrInterner::prefill(init_vec[]) + interner::StrInterner::prefill(init_vec.index(&FullRange)) } }} @@ -515,66 +515,66 @@ declare_special_idents_and_keywords! { (9, unnamed_field, ""); (10, type_self, "Self"); (11, prelude_import, "prelude_import"); + (12, FullRange, "FullRange"); } pub mod keywords { // These ones are variants of the Keyword enum 'strict: - (12, As, "as"); - (13, Break, "break"); - (14, Crate, "crate"); - (15, Else, "else"); - (16, Enum, "enum"); - (17, Extern, "extern"); - (18, False, "false"); - (19, Fn, "fn"); - (20, For, "for"); - (21, If, "if"); - (22, Impl, "impl"); - (23, In, "in"); - (24, Let, "let"); - (25, Loop, "loop"); - (26, Match, "match"); - (27, Mod, "mod"); - (28, Move, "move"); - (29, Mut, "mut"); - (30, Pub, "pub"); - (31, Ref, "ref"); - (32, Return, "return"); + (13, As, "as"); + (14, Break, "break"); + (15, Crate, "crate"); + (16, Else, "else"); + (17, Enum, "enum"); + (18, Extern, "extern"); + (19, False, "false"); + (20, Fn, "fn"); + (21, For, "for"); + (22, If, "if"); + (23, Impl, "impl"); + (24, In, "in"); + (25, Let, "let"); + (26, Loop, "loop"); + (27, Match, "match"); + (28, Mod, "mod"); + (29, Move, "move"); + (30, Mut, "mut"); + (31, Pub, "pub"); + (32, Ref, "ref"); + (33, Return, "return"); // Static and Self are also special idents (prefill de-dupes) (super::STATIC_KEYWORD_NAME_NUM, Static, "static"); (super::SELF_KEYWORD_NAME_NUM, Self, "self"); - (33, Struct, "struct"); + (34, Struct, "struct"); (super::SUPER_KEYWORD_NAME_NUM, Super, "super"); - (34, True, "true"); - (35, Trait, "trait"); - (36, Type, "type"); - (37, Unsafe, "unsafe"); - (38, Use, "use"); - (39, Virtual, "virtual"); - (40, While, "while"); - (41, Continue, "continue"); - (42, Proc, "proc"); - (43, Box, "box"); - (44, Const, "const"); - (45, Where, "where"); - + (35, True, "true"); + (36, Trait, "trait"); + (37, Type, "type"); + (38, Unsafe, "unsafe"); + (39, Use, "use"); + (40, Virtual, "virtual"); + (41, While, "while"); + (42, Continue, "continue"); + (43, Proc, "proc"); + (44, Box, "box"); + (45, Const, "const"); + (46, Where, "where"); 'reserved: - (46, Alignof, "alignof"); - (47, Be, "be"); - (48, Offsetof, "offsetof"); - (49, Priv, "priv"); - (50, Pure, "pure"); - (51, Sizeof, "sizeof"); - (52, Typeof, "typeof"); - (53, Unsized, "unsized"); - (54, Yield, "yield"); - (55, Do, "do"); - (56, Abstract, "abstract"); - (57, Final, "final"); - (58, Override, "override"); - (59, Macro, "macro"); + (47, Alignof, "alignof"); + (48, Be, "be"); + (49, Offsetof, "offsetof"); + (50, Priv, "priv"); + (51, Pure, "pure"); + (52, Sizeof, "sizeof"); + (53, Typeof, "typeof"); + (54, Unsized, "unsized"); + (55, Yield, "yield"); + (56, Do, "do"); + (57, Abstract, "abstract"); + (58, Final, "final"); + (59, Override, "override"); + (60, Macro, "macro"); } } @@ -628,7 +628,7 @@ impl InternedString { #[inline] pub fn get<'a>(&'a self) -> &'a str { - self.string[] + self.string.index(&FullRange) } } @@ -652,29 +652,29 @@ impl BytesContainer for InternedString { impl fmt::Show for InternedString { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.string[]) + write!(f, "{}", self.string.index(&FullRange)) } } impl<'a> PartialEq<&'a str> for InternedString { #[inline(always)] fn eq(&self, other: & &'a str) -> bool { - PartialEq::eq(self.string[], *other) + PartialEq::eq(self.string.index(&FullRange), *other) } #[inline(always)] fn ne(&self, other: & &'a str) -> bool { - PartialEq::ne(self.string[], *other) + PartialEq::ne(self.string.index(&FullRange), *other) } } impl<'a> PartialEq for &'a str { #[inline(always)] fn eq(&self, other: &InternedString) -> bool { - PartialEq::eq(*self, other.string[]) + PartialEq::eq(*self, other.string.index(&FullRange)) } #[inline(always)] fn ne(&self, other: &InternedString) -> bool { - PartialEq::ne(*self, other.string[]) + PartialEq::ne(*self, other.string.index(&FullRange)) } } @@ -682,29 +682,28 @@ impl<'a> PartialEq for &'a str { impl, E> Decodable for InternedString { fn decode(d: &mut D) -> Result { Ok(get_name(get_ident_interner().intern( - try!(d.read_str())[]))) + try!(d.read_str()).index(&FullRange)))) } } #[cfg(not(stage0))] impl Decodable for InternedString { fn decode(d: &mut D) -> Result { - Ok(get_name(get_ident_interner().intern( - try!(d.read_str())[]))) + Ok(get_name(get_ident_interner().intern(try!(d.read_str()).index(&FullRange)))) } } #[cfg(stage0)] impl, E> Encodable for InternedString { fn encode(&self, s: &mut S) -> Result<(), E> { - s.emit_str(self.string[]) + s.emit_str(self.string.index(&FullRange)) } } #[cfg(not(stage0))] impl Encodable for InternedString { fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_str(self.string[]) + s.emit_str(self.string.index(&FullRange)) } } diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index 11cefc8719bab..52306075c21ec 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -140,7 +140,7 @@ pub fn buf_str(toks: Vec, } s.push_str(format!("{}={}", szs[i], - tok_str(toks[i].clone()))[]); + tok_str(toks[i].clone())).index(&FullRange)); i += 1u; i %= n; } @@ -602,7 +602,7 @@ impl Printer { assert_eq!(l, len); // assert!(l <= space); self.space -= len; - self.print_str(s[]) + self.print_str(s.index(&FullRange)) } Eof => { // Eof should never get here. diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 402583b60fae5..87dcc9e70f461 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -114,7 +114,7 @@ pub fn print_crate<'a>(cm: &'a CodeMap, out, ann, is_expanded); - try!(s.print_mod(&krate.module, krate.attrs[])); + try!(s.print_mod(&krate.module, krate.attrs.index(&FullRange))); try!(s.print_remaining_comments()); eof(&mut s.s) } @@ -580,7 +580,7 @@ impl<'a> State<'a> { pub fn synth_comment(&mut self, text: String) -> IoResult<()> { try!(word(&mut self.s, "/*")); try!(space(&mut self.s)); - try!(word(&mut self.s, text[])); + try!(word(&mut self.s, text.index(&FullRange))); try!(space(&mut self.s)); word(&mut self.s, "*/") } @@ -685,7 +685,7 @@ impl<'a> State<'a> { } ast::TyTup(ref elts) => { try!(self.popen()); - try!(self.commasep(Inconsistent, elts[], + try!(self.commasep(Inconsistent, elts.index(&FullRange), |s, ty| s.print_type(&**ty))); if elts.len() == 1 { try!(word(&mut self.s, ",")); @@ -721,10 +721,10 @@ impl<'a> State<'a> { } ast::TyObjectSum(ref ty, ref bounds) => { try!(self.print_type(&**ty)); - try!(self.print_bounds("+", bounds[])); + try!(self.print_bounds("+", bounds.index(&FullRange))); } ast::TyPolyTraitRef(ref bounds) => { - try!(self.print_bounds("", bounds[])); + try!(self.print_bounds("", bounds.index(&FullRange))); } ast::TyQPath(ref qpath) => { try!(word(&mut self.s, "<")); @@ -759,7 +759,7 @@ impl<'a> State<'a> { item: &ast::ForeignItem) -> IoResult<()> { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(item.span.lo)); - try!(self.print_outer_attributes(item.attrs[])); + try!(self.print_outer_attributes(item.attrs.index(&FullRange))); match item.node { ast::ForeignItemFn(ref decl, ref generics) => { try!(self.print_fn(&**decl, None, abi::Rust, item.ident, generics, @@ -770,7 +770,7 @@ impl<'a> State<'a> { } ast::ForeignItemStatic(ref t, m) => { try!(self.head(visibility_qualified(item.vis, - "static")[])); + "static").index(&FullRange))); if m { try!(self.word_space("mut")); } @@ -787,7 +787,7 @@ impl<'a> State<'a> { fn print_associated_type(&mut self, typedef: &ast::AssociatedType) -> IoResult<()> { - try!(self.print_outer_attributes(typedef.attrs[])); + try!(self.print_outer_attributes(typedef.attrs.index(&FullRange))); try!(self.word_space("type")); try!(self.print_ty_param(&typedef.ty_param)); word(&mut self.s, ";") @@ -806,12 +806,12 @@ impl<'a> State<'a> { pub fn print_item(&mut self, item: &ast::Item) -> IoResult<()> { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(item.span.lo)); - try!(self.print_outer_attributes(item.attrs[])); + try!(self.print_outer_attributes(item.attrs.index(&FullRange))); try!(self.ann.pre(self, NodeItem(item))); match item.node { ast::ItemStatic(ref ty, m, ref expr) => { try!(self.head(visibility_qualified(item.vis, - "static")[])); + "static").index(&FullRange))); if m == ast::MutMutable { try!(self.word_space("mut")); } @@ -828,7 +828,7 @@ impl<'a> State<'a> { } ast::ItemConst(ref ty, ref expr) => { try!(self.head(visibility_qualified(item.vis, - "const")[])); + "const").index(&FullRange))); try!(self.print_ident(item.ident)); try!(self.word_space(":")); try!(self.print_type(&**ty)); @@ -851,29 +851,29 @@ impl<'a> State<'a> { item.vis )); try!(word(&mut self.s, " ")); - try!(self.print_block_with_attrs(&**body, item.attrs[])); + try!(self.print_block_with_attrs(&**body, item.attrs.index(&FullRange))); } ast::ItemMod(ref _mod) => { try!(self.head(visibility_qualified(item.vis, - "mod")[])); + "mod").index(&FullRange))); try!(self.print_ident(item.ident)); try!(self.nbsp()); try!(self.bopen()); - try!(self.print_mod(_mod, item.attrs[])); + try!(self.print_mod(_mod, item.attrs.index(&FullRange))); try!(self.bclose(item.span)); } ast::ItemForeignMod(ref nmod) => { try!(self.head("extern")); - try!(self.word_nbsp(nmod.abi.to_string()[])); + try!(self.word_nbsp(nmod.abi.to_string().index(&FullRange))); try!(self.bopen()); - try!(self.print_foreign_mod(nmod, item.attrs[])); + try!(self.print_foreign_mod(nmod, item.attrs.index(&FullRange))); try!(self.bclose(item.span)); } ast::ItemTy(ref ty, ref params) => { try!(self.ibox(indent_unit)); try!(self.ibox(0u)); try!(self.word_nbsp(visibility_qualified(item.vis, - "type")[])); + "type").index(&FullRange))); try!(self.print_ident(item.ident)); try!(self.print_generics(params)); try!(self.end()); // end the inner ibox @@ -895,7 +895,7 @@ impl<'a> State<'a> { )); } ast::ItemStruct(ref struct_def, ref generics) => { - try!(self.head(visibility_qualified(item.vis,"struct")[])); + try!(self.head(visibility_qualified(item.vis,"struct").index(&FullRange))); try!(self.print_struct(&**struct_def, generics, item.ident, item.span)); } @@ -936,7 +936,7 @@ impl<'a> State<'a> { try!(space(&mut self.s)); try!(self.bopen()); - try!(self.print_inner_attributes(item.attrs[])); + try!(self.print_inner_attributes(item.attrs.index(&FullRange))); for impl_item in impl_items.iter() { match *impl_item { ast::MethodImplItem(ref meth) => { @@ -967,7 +967,7 @@ impl<'a> State<'a> { real_bounds.push(b); } } - try!(self.print_bounds(":", real_bounds[])); + try!(self.print_bounds(":", real_bounds.index(&FullRange))); try!(self.print_where_clause(generics)); try!(word(&mut self.s, " ")); try!(self.bopen()); @@ -985,7 +985,7 @@ impl<'a> State<'a> { try!(self.print_ident(item.ident)); try!(self.cbox(indent_unit)); try!(self.popen()); - try!(self.print_tts(tts[])); + try!(self.print_tts(tts.index(&FullRange))); try!(self.pclose()); try!(word(&mut self.s, ";")); try!(self.end()); @@ -1019,12 +1019,12 @@ impl<'a> State<'a> { generics: &ast::Generics, ident: ast::Ident, span: codemap::Span, visibility: ast::Visibility) -> IoResult<()> { - try!(self.head(visibility_qualified(visibility, "enum")[])); + try!(self.head(visibility_qualified(visibility, "enum").index(&FullRange))); try!(self.print_ident(ident)); try!(self.print_generics(generics)); try!(self.print_where_clause(generics)); try!(space(&mut self.s)); - self.print_variants(enum_definition.variants[], span) + self.print_variants(enum_definition.variants.index(&FullRange), span) } pub fn print_variants(&mut self, @@ -1034,7 +1034,7 @@ impl<'a> State<'a> { for v in variants.iter() { try!(self.space_if_not_bol()); try!(self.maybe_print_comment(v.span.lo)); - try!(self.print_outer_attributes(v.node.attrs[])); + try!(self.print_outer_attributes(v.node.attrs.index(&FullRange))); try!(self.ibox(indent_unit)); try!(self.print_variant(&**v)); try!(word(&mut self.s, ",")); @@ -1062,7 +1062,7 @@ impl<'a> State<'a> { if !struct_def.fields.is_empty() { try!(self.popen()); try!(self.commasep( - Inconsistent, struct_def.fields[], + Inconsistent, struct_def.fields.index(&FullRange), |s, field| { match field.node.kind { ast::NamedField(..) => panic!("unexpected named field"), @@ -1092,7 +1092,7 @@ impl<'a> State<'a> { ast::NamedField(ident, visibility) => { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(field.span.lo)); - try!(self.print_outer_attributes(field.node.attrs[])); + try!(self.print_outer_attributes(field.node.attrs.index(&FullRange))); try!(self.print_visibility(visibility)); try!(self.print_ident(ident)); try!(self.word_nbsp(":")); @@ -1116,7 +1116,7 @@ impl<'a> State<'a> { pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> { match *tt { ast::TtToken(_, ref tk) => { - try!(word(&mut self.s, token_to_string(tk)[])); + try!(word(&mut self.s, token_to_string(tk).index(&FullRange))); match *tk { parse::token::DocComment(..) => { hardbreak(&mut self.s) @@ -1125,11 +1125,11 @@ impl<'a> State<'a> { } } ast::TtDelimited(_, ref delimed) => { - try!(word(&mut self.s, token_to_string(&delimed.open_token())[])); + try!(word(&mut self.s, token_to_string(&delimed.open_token()).index(&FullRange))); try!(space(&mut self.s)); - try!(self.print_tts(delimed.tts[])); + try!(self.print_tts(delimed.tts.index(&FullRange))); try!(space(&mut self.s)); - word(&mut self.s, token_to_string(&delimed.close_token())[]) + word(&mut self.s, token_to_string(&delimed.close_token()).index(&FullRange)) }, ast::TtSequence(_, ref seq) => { try!(word(&mut self.s, "$(")); @@ -1139,7 +1139,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, ")")); match seq.separator { Some(ref tk) => { - try!(word(&mut self.s, token_to_string(tk)[])); + try!(word(&mut self.s, token_to_string(tk).index(&FullRange))); } None => {}, } @@ -1170,7 +1170,7 @@ impl<'a> State<'a> { if !args.is_empty() { try!(self.popen()); try!(self.commasep(Consistent, - args[], + args.index(&FullRange), |s, arg| s.print_type(&*arg.ty))); try!(self.pclose()); } @@ -1194,7 +1194,7 @@ impl<'a> State<'a> { pub fn print_ty_method(&mut self, m: &ast::TypeMethod) -> IoResult<()> { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(m.span.lo)); - try!(self.print_outer_attributes(m.attrs[])); + try!(self.print_outer_attributes(m.attrs.index(&FullRange))); try!(self.print_ty_fn(None, None, m.unsafety, @@ -1226,7 +1226,7 @@ impl<'a> State<'a> { pub fn print_method(&mut self, meth: &ast::Method) -> IoResult<()> { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(meth.span.lo)); - try!(self.print_outer_attributes(meth.attrs[])); + try!(self.print_outer_attributes(meth.attrs.index(&FullRange))); match meth.node { ast::MethDecl(ident, ref generics, @@ -1244,7 +1244,7 @@ impl<'a> State<'a> { Some(&explicit_self.node), vis)); try!(word(&mut self.s, " ")); - self.print_block_with_attrs(&**body, meth.attrs[]) + self.print_block_with_attrs(&**body, meth.attrs.index(&FullRange)) }, ast::MethMac(codemap::Spanned { node: ast::MacInvocTT(ref pth, ref tts, _), ..}) => { @@ -1253,7 +1253,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, "! ")); try!(self.cbox(indent_unit)); try!(self.popen()); - try!(self.print_tts(tts[])); + try!(self.print_tts(tts.index(&FullRange))); try!(self.pclose()); try!(word(&mut self.s, ";")); self.end() @@ -1520,7 +1520,7 @@ impl<'a> State<'a> { ast::ExprVec(ref exprs) => { try!(self.ibox(indent_unit)); try!(word(&mut self.s, "[")); - try!(self.commasep_exprs(Inconsistent, exprs[])); + try!(self.commasep_exprs(Inconsistent, exprs.index(&FullRange))); try!(word(&mut self.s, "]")); try!(self.end()); } @@ -1537,36 +1537,38 @@ impl<'a> State<'a> { ast::ExprStruct(ref path, ref fields, ref wth) => { try!(self.print_path(path, true)); - try!(word(&mut self.s, "{")); - try!(self.commasep_cmnt( - Consistent, - fields[], - |s, field| { - try!(s.ibox(indent_unit)); - try!(s.print_ident(field.ident.node)); - try!(s.word_space(":")); - try!(s.print_expr(&*field.expr)); - s.end() - }, - |f| f.span)); - match *wth { - Some(ref expr) => { - try!(self.ibox(indent_unit)); - if !fields.is_empty() { - try!(word(&mut self.s, ",")); - try!(space(&mut self.s)); + if !(fields.is_empty() && wth.is_none()) { + try!(word(&mut self.s, "{")); + try!(self.commasep_cmnt( + Consistent, + fields.index(&FullRange), + |s, field| { + try!(s.ibox(indent_unit)); + try!(s.print_ident(field.ident.node)); + try!(s.word_space(":")); + try!(s.print_expr(&*field.expr)); + s.end() + }, + |f| f.span)); + match *wth { + Some(ref expr) => { + try!(self.ibox(indent_unit)); + if !fields.is_empty() { + try!(word(&mut self.s, ",")); + try!(space(&mut self.s)); + } + try!(word(&mut self.s, "..")); + try!(self.print_expr(&**expr)); + try!(self.end()); } - try!(word(&mut self.s, "..")); - try!(self.print_expr(&**expr)); - try!(self.end()); + _ => try!(word(&mut self.s, ",")), } - _ => try!(word(&mut self.s, ",")) + try!(word(&mut self.s, "}")); } - try!(word(&mut self.s, "}")); } ast::ExprTup(ref exprs) => { try!(self.popen()); - try!(self.commasep_exprs(Inconsistent, exprs[])); + try!(self.commasep_exprs(Inconsistent, exprs.index(&FullRange))); if exprs.len() == 1 { try!(word(&mut self.s, ",")); } @@ -1574,7 +1576,7 @@ impl<'a> State<'a> { } ast::ExprCall(ref func, ref args) => { try!(self.print_expr_maybe_paren(&**func)); - try!(self.print_call_post(args[])); + try!(self.print_call_post(args.index(&FullRange))); } ast::ExprMethodCall(ident, ref tys, ref args) => { let base_args = args.slice_from(1); @@ -1583,7 +1585,7 @@ impl<'a> State<'a> { try!(self.print_ident(ident.node)); if tys.len() > 0u { try!(word(&mut self.s, "::<")); - try!(self.commasep(Inconsistent, tys[], + try!(self.commasep(Inconsistent, tys.index(&FullRange), |s, ty| s.print_type(&**ty))); try!(word(&mut self.s, ">")); } @@ -1780,11 +1782,11 @@ impl<'a> State<'a> { try!(self.print_string(a.asm.get(), a.asm_str_style)); try!(self.word_space(":")); - try!(self.commasep(Inconsistent, a.outputs[], + try!(self.commasep(Inconsistent, a.outputs.index(&FullRange), |s, &(ref co, ref o, is_rw)| { match co.get().slice_shift_char() { Some(('=', operand)) if is_rw => { - try!(s.print_string(format!("+{}", operand)[], + try!(s.print_string(format!("+{}", operand).index(&FullRange), ast::CookedStr)) } _ => try!(s.print_string(co.get(), ast::CookedStr)) @@ -1797,7 +1799,7 @@ impl<'a> State<'a> { try!(space(&mut self.s)); try!(self.word_space(":")); - try!(self.commasep(Inconsistent, a.inputs[], + try!(self.commasep(Inconsistent, a.inputs.index(&FullRange), |s, &(ref co, ref o)| { try!(s.print_string(co.get(), ast::CookedStr)); try!(s.popen()); @@ -1808,7 +1810,7 @@ impl<'a> State<'a> { try!(space(&mut self.s)); try!(self.word_space(":")); - try!(self.commasep(Inconsistent, a.clobbers[], + try!(self.commasep(Inconsistent, a.clobbers.index(&FullRange), |s, co| { try!(s.print_string(co.get(), ast::CookedStr)); Ok(()) @@ -1882,7 +1884,7 @@ impl<'a> State<'a> { pub fn print_ident(&mut self, ident: ast::Ident) -> IoResult<()> { if self.encode_idents_with_hygiene { let encoded = ident.encode_with_hygiene(); - try!(word(&mut self.s, encoded[])) + try!(word(&mut self.s, encoded.index(&FullRange))) } else { try!(word(&mut self.s, token::get_ident(ident).get())) } @@ -1890,7 +1892,7 @@ impl<'a> State<'a> { } pub fn print_uint(&mut self, i: uint) -> IoResult<()> { - word(&mut self.s, i.to_string()[]) + word(&mut self.s, i.to_string().index(&FullRange)) } pub fn print_name(&mut self, name: ast::Name) -> IoResult<()> { @@ -1964,7 +1966,7 @@ impl<'a> State<'a> { } try!(self.commasep( Inconsistent, - data.types[], + data.types.index(&FullRange), |s, ty| s.print_type(&**ty))); comma = true; } @@ -1987,7 +1989,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, "(")); try!(self.commasep( Inconsistent, - data.inputs[], + data.inputs.index(&FullRange), |s, ty| s.print_type(&**ty))); try!(word(&mut self.s, ")")); @@ -2040,7 +2042,7 @@ impl<'a> State<'a> { Some(ref args) => { if !args.is_empty() { try!(self.popen()); - try!(self.commasep(Inconsistent, args[], + try!(self.commasep(Inconsistent, args.index(&FullRange), |s, p| s.print_pat(&**p))); try!(self.pclose()); } @@ -2052,7 +2054,7 @@ impl<'a> State<'a> { try!(self.nbsp()); try!(self.word_space("{")); try!(self.commasep_cmnt( - Consistent, fields[], + Consistent, fields.index(&FullRange), |s, f| { try!(s.cbox(indent_unit)); if !f.node.is_shorthand { @@ -2073,7 +2075,7 @@ impl<'a> State<'a> { ast::PatTup(ref elts) => { try!(self.popen()); try!(self.commasep(Inconsistent, - elts[], + elts.index(&FullRange), |s, p| s.print_pat(&**p))); if elts.len() == 1 { try!(word(&mut self.s, ",")); @@ -2101,7 +2103,7 @@ impl<'a> State<'a> { ast::PatVec(ref before, ref slice, ref after) => { try!(word(&mut self.s, "[")); try!(self.commasep(Inconsistent, - before[], + before.index(&FullRange), |s, p| s.print_pat(&**p))); for p in slice.iter() { if !before.is_empty() { try!(self.word_space(",")); } @@ -2115,7 +2117,7 @@ impl<'a> State<'a> { if !after.is_empty() { try!(self.word_space(",")); } } try!(self.commasep(Inconsistent, - after[], + after.index(&FullRange), |s, p| s.print_pat(&**p))); try!(word(&mut self.s, "]")); } @@ -2132,7 +2134,7 @@ impl<'a> State<'a> { } try!(self.cbox(indent_unit)); try!(self.ibox(0u)); - try!(self.print_outer_attributes(arm.attrs[])); + try!(self.print_outer_attributes(arm.attrs.index(&FullRange))); let mut first = true; for p in arm.pats.iter() { if first { @@ -2232,7 +2234,7 @@ impl<'a> State<'a> { // HACK(eddyb) ignore the separately printed self argument. let args = if first { - decl.inputs[] + decl.inputs.index(&FullRange) } else { decl.inputs.slice_from(1) }; @@ -2398,7 +2400,7 @@ impl<'a> State<'a> { ints.push(i); } - try!(self.commasep(Inconsistent, ints[], |s, &idx| { + try!(self.commasep(Inconsistent, ints.index(&FullRange), |s, &idx| { if idx < generics.lifetimes.len() { let lifetime = &generics.lifetimes[idx]; s.print_lifetime_def(lifetime) @@ -2415,7 +2417,7 @@ impl<'a> State<'a> { pub fn print_ty_param(&mut self, param: &ast::TyParam) -> IoResult<()> { try!(self.print_ident(param.ident)); - try!(self.print_bounds(":", param.bounds[])); + try!(self.print_bounds(":", param.bounds.index(&FullRange))); match param.default { Some(ref default) => { try!(space(&mut self.s)); @@ -2491,7 +2493,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, name.get())); try!(self.popen()); try!(self.commasep(Consistent, - items[], + items.index(&FullRange), |s, i| s.print_meta_item(&**i))); try!(self.pclose()); } @@ -2527,7 +2529,7 @@ impl<'a> State<'a> { try!(self.print_path(path, false)); try!(word(&mut self.s, "::{")); } - try!(self.commasep(Inconsistent, idents[], |s, w| { + try!(self.commasep(Inconsistent, idents.index(&FullRange), |s, w| { match w.node { ast::PathListIdent { name, .. } => { s.print_ident(name) @@ -2545,7 +2547,7 @@ impl<'a> State<'a> { pub fn print_view_item(&mut self, item: &ast::ViewItem) -> IoResult<()> { try!(self.hardbreak_if_not_bol()); try!(self.maybe_print_comment(item.span.lo)); - try!(self.print_outer_attributes(item.attrs[])); + try!(self.print_outer_attributes(item.attrs.index(&FullRange))); try!(self.print_visibility(item.vis)); match item.node { ast::ViewItemExternCrate(id, ref optional_path, _) => { @@ -2687,7 +2689,7 @@ impl<'a> State<'a> { try!(self.pclose()); } - try!(self.print_bounds(":", bounds[])); + try!(self.print_bounds(":", bounds.index(&FullRange))); try!(self.print_fn_output(decl)); @@ -2746,7 +2748,7 @@ impl<'a> State<'a> { try!(self.maybe_print_comment(lit.span.lo)); match self.next_lit(lit.span.lo) { Some(ref ltrl) => { - return word(&mut self.s, (*ltrl).lit[]); + return word(&mut self.s, (*ltrl).lit.index(&FullRange)); } _ => () } @@ -2756,7 +2758,7 @@ impl<'a> State<'a> { let mut res = String::from_str("b'"); ascii::escape_default(byte, |c| res.push(c as char)); res.push('\''); - word(&mut self.s, res[]) + word(&mut self.s, res.index(&FullRange)) } ast::LitChar(ch) => { let mut res = String::from_str("'"); @@ -2764,27 +2766,27 @@ impl<'a> State<'a> { res.push(c); } res.push('\''); - word(&mut self.s, res[]) + word(&mut self.s, res.index(&FullRange)) } ast::LitInt(i, t) => { match t { ast::SignedIntLit(st, ast::Plus) => { word(&mut self.s, - ast_util::int_ty_to_string(st, Some(i as i64))[]) + ast_util::int_ty_to_string(st, Some(i as i64)).index(&FullRange)) } ast::SignedIntLit(st, ast::Minus) => { let istr = ast_util::int_ty_to_string(st, Some(-(i as i64))); word(&mut self.s, - format!("-{}", istr)[]) + format!("-{}", istr).index(&FullRange)) } ast::UnsignedIntLit(ut) => { - word(&mut self.s, ast_util::uint_ty_to_string(ut, Some(i))[]) + word(&mut self.s, ast_util::uint_ty_to_string(ut, Some(i)).as_slice()) } ast::UnsuffixedIntLit(ast::Plus) => { - word(&mut self.s, format!("{}", i)[]) + word(&mut self.s, format!("{}", i).index(&FullRange)) } ast::UnsuffixedIntLit(ast::Minus) => { - word(&mut self.s, format!("-{}", i)[]) + word(&mut self.s, format!("-{}", i).index(&FullRange)) } } } @@ -2793,7 +2795,7 @@ impl<'a> State<'a> { format!( "{}{}", f.get(), - ast_util::float_ty_to_string(t)[])[]) + ast_util::float_ty_to_string(t).index(&FullRange)).index(&FullRange)) } ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, f.get()), ast::LitBool(val) => { @@ -2805,7 +2807,7 @@ impl<'a> State<'a> { ascii::escape_default(ch as u8, |ch| escaped.push(ch as char)); } - word(&mut self.s, format!("b\"{}\"", escaped)[]) + word(&mut self.s, format!("b\"{}\"", escaped).index(&FullRange)) } } } @@ -2846,7 +2848,7 @@ impl<'a> State<'a> { comments::Mixed => { assert_eq!(cmnt.lines.len(), 1u); try!(zerobreak(&mut self.s)); - try!(word(&mut self.s, cmnt.lines[0][])); + try!(word(&mut self.s, cmnt.lines[0].index(&FullRange))); zerobreak(&mut self.s) } comments::Isolated => { @@ -2855,7 +2857,7 @@ impl<'a> State<'a> { // Don't print empty lines because they will end up as trailing // whitespace if !line.is_empty() { - try!(word(&mut self.s, line[])); + try!(word(&mut self.s, line.index(&FullRange))); } try!(hardbreak(&mut self.s)); } @@ -2864,13 +2866,13 @@ impl<'a> State<'a> { comments::Trailing => { try!(word(&mut self.s, " ")); if cmnt.lines.len() == 1u { - try!(word(&mut self.s, cmnt.lines[0][])); + try!(word(&mut self.s, cmnt.lines[0].index(&FullRange))); hardbreak(&mut self.s) } else { try!(self.ibox(0u)); for line in cmnt.lines.iter() { if !line.is_empty() { - try!(word(&mut self.s, line[])); + try!(word(&mut self.s, line.index(&FullRange))); } try!(hardbreak(&mut self.s)); } @@ -2903,7 +2905,7 @@ impl<'a> State<'a> { string=st)) } }; - word(&mut self.s, st[]) + word(&mut self.s, st.index(&FullRange)) } pub fn next_comment(&mut self) -> Option { @@ -2934,7 +2936,7 @@ impl<'a> State<'a> { Some(abi::Rust) => Ok(()), Some(abi) => { try!(self.word_nbsp("extern")); - self.word_nbsp(abi.to_string()[]) + self.word_nbsp(abi.to_string().index(&FullRange)) } None => Ok(()) } @@ -2945,7 +2947,7 @@ impl<'a> State<'a> { match opt_abi { Some(abi) => { try!(self.word_nbsp("extern")); - self.word_nbsp(abi.to_string()[]) + self.word_nbsp(abi.to_string().index(&FullRange)) } None => Ok(()) } @@ -2961,7 +2963,7 @@ impl<'a> State<'a> { if abi != abi::Rust { try!(self.word_nbsp("extern")); - try!(self.word_nbsp(abi.to_string()[])); + try!(self.word_nbsp(abi.to_string().index(&FullRange))); } word(&mut self.s, "fn") diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs index 4ef7eb97a2189..daa51203287b3 100644 --- a/src/libsyntax/std_inject.rs +++ b/src/libsyntax/std_inject.rs @@ -40,7 +40,7 @@ pub fn maybe_inject_prelude(krate: ast::Crate) -> ast::Crate { } fn use_std(krate: &ast::Crate) -> bool { - !attr::contains_name(krate.attrs[], "no_std") + !attr::contains_name(krate.attrs.index(&FullRange), "no_std") } fn no_prelude(attrs: &[ast::Attribute]) -> bool { @@ -56,7 +56,7 @@ impl<'a> fold::Folder for StandardLibraryInjector<'a> { // The name to use in `extern crate "name" as std;` let actual_crate_name = match self.alt_std_name { - Some(ref s) => token::intern_and_get_ident(s[]), + Some(ref s) => token::intern_and_get_ident(s.index(&FullRange)), None => token::intern_and_get_ident("std"), }; @@ -104,7 +104,7 @@ impl<'a> fold::Folder for PreludeInjector<'a> { attr::mark_used(&no_std_attr); krate.attrs.push(no_std_attr); - if !no_prelude(krate.attrs[]) { + if !no_prelude(krate.attrs.index(&FullRange)) { // only add `use std::prelude::*;` if there wasn't a // `#![no_implicit_prelude]` at the crate level. // fold_mod() will insert glob path. @@ -124,7 +124,7 @@ impl<'a> fold::Folder for PreludeInjector<'a> { } fn fold_item(&mut self, item: P) -> SmallVector> { - if !no_prelude(item.attrs[]) { + if !no_prelude(item.attrs.index(&FullRange)) { // only recur if there wasn't `#![no_implicit_prelude]` // on this item, i.e. this means that the prelude is not // implicitly imported though the whole subtree diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index e480532a41053..0b2c45ee3a779 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -73,14 +73,14 @@ pub fn modify_for_testing(sess: &ParseSess, // We generate the test harness when building in the 'test' // configuration, either with the '--test' or '--cfg test' // command line options. - let should_test = attr::contains_name(krate.config[], "test"); + let should_test = attr::contains_name(krate.config.index(&FullRange), "test"); // Check for #[reexport_test_harness_main = "some_name"] which // creates a `use some_name = __test::main;`. This needs to be // unconditional, so that the attribute is still marked as used in // non-test builds. let reexport_test_harness_main = - attr::first_attr_value_str_by_name(krate.attrs[], + attr::first_attr_value_str_by_name(krate.attrs.index(&FullRange), "reexport_test_harness_main"); if should_test { @@ -119,7 +119,7 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> { self.cx.path.push(ident); } debug!("current path: {}", - ast_util::path_name_i(self.cx.path[])); + ast_util::path_name_i(self.cx.path.index(&FullRange))); if is_test_fn(&self.cx, &*i) || is_bench_fn(&self.cx, &*i) { match i.node { @@ -277,8 +277,8 @@ fn strip_test_functions(krate: ast::Crate) -> ast::Crate { // When not compiling with --test we should not compile the // #[test] functions config::strip_items(krate, |attrs| { - !attr::contains_name(attrs[], "test") && - !attr::contains_name(attrs[], "bench") + !attr::contains_name(attrs.index(&FullRange), "test") && + !attr::contains_name(attrs.index(&FullRange), "bench") }) } @@ -291,7 +291,7 @@ enum HasTestSignature { fn is_test_fn(cx: &TestCtxt, i: &ast::Item) -> bool { - let has_test_attr = attr::contains_name(i.attrs[], "test"); + let has_test_attr = attr::contains_name(i.attrs.index(&FullRange), "test"); fn has_test_signature(i: &ast::Item) -> HasTestSignature { match &i.node { @@ -329,7 +329,7 @@ fn is_test_fn(cx: &TestCtxt, i: &ast::Item) -> bool { } fn is_bench_fn(cx: &TestCtxt, i: &ast::Item) -> bool { - let has_bench_attr = attr::contains_name(i.attrs[], "bench"); + let has_bench_attr = attr::contains_name(i.attrs.index(&FullRange), "bench"); fn has_test_signature(i: &ast::Item) -> bool { match i.node { @@ -384,7 +384,7 @@ We're going to be building a module that looks more or less like: mod __test { extern crate test (name = "test", vers = "..."); fn main() { - test::test_main_static(::os::args()[], tests) + test::test_main_static(::os::args().index(&FullRange), tests) } static tests : &'static [test::TestDescAndFn] = &[ @@ -510,8 +510,8 @@ fn mk_tests(cx: &TestCtxt) -> P { } fn is_test_crate(krate: &ast::Crate) -> bool { - match attr::find_crate_name(krate.attrs[]) { - Some(ref s) if "test" == s.get()[] => true, + match attr::find_crate_name(krate.attrs.index(&FullRange)) { + Some(ref s) if "test" == s.get().index(&FullRange) => true, _ => false } } @@ -551,11 +551,11 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> P { // creates $name: $expr let field = |&: name, expr| ecx.field_imm(span, ecx.ident_of(name), expr); - debug!("encoding {}", ast_util::path_name_i(path[])); + debug!("encoding {}", ast_util::path_name_i(path.index(&FullRange))); // path to the #[test] function: "foo::bar::baz" - let path_string = ast_util::path_name_i(path[]); - let name_expr = ecx.expr_str(span, token::intern_and_get_ident(path_string[])); + let path_string = ast_util::path_name_i(path.index(&FullRange)); + let name_expr = ecx.expr_str(span, token::intern_and_get_ident(path_string.index(&FullRange))); // self::test::StaticTestName($name_expr) let name_expr = ecx.expr_call(span, diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs index 85eea2d9daf27..93de342d487dc 100644 --- a/src/libsyntax/util/interner.rs +++ b/src/libsyntax/util/interner.rs @@ -28,7 +28,7 @@ pub struct Interner { vect: RefCell >, } -// when traits can extend traits, we should extend index to get [] +// when traits can extend traits, we should extend index to get .index(&FullRange) impl Interner { pub fn new() -> Interner { Interner { @@ -109,27 +109,27 @@ impl Eq for RcStr {} impl Ord for RcStr { fn cmp(&self, other: &RcStr) -> Ordering { - self[].cmp(other[]) + self.index(&FullRange).cmp(other.index(&FullRange)) } } impl fmt::Show for RcStr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use std::fmt::Show; - self[].fmt(f) + self.index(&FullRange).fmt(f) } } impl BorrowFrom for str { fn borrow_from(owned: &RcStr) -> &str { - owned.string[] + owned.string.index(&FullRange) } } impl Deref for RcStr { type Target = str; - fn deref(&self) -> &str { self.string[] } + fn deref(&self) -> &str { self.string.index(&FullRange) } } /// A StrInterner differs from Interner in that it accepts @@ -139,7 +139,7 @@ pub struct StrInterner { vect: RefCell >, } -/// When traits can extend traits, we should extend index to get [] +/// When traits can extend traits, we should extend index to get .index(&FullRange) impl StrInterner { pub fn new() -> StrInterner { StrInterner { diff --git a/src/libterm/terminfo/mod.rs b/src/libterm/terminfo/mod.rs index 80d195d921846..71c71017850f2 100644 --- a/src/libterm/terminfo/mod.rs +++ b/src/libterm/terminfo/mod.rs @@ -180,7 +180,7 @@ impl TerminfoTerminal { } }; - let entry = open(term[]); + let entry = open(term.index(&FullRange)); if entry.is_err() { if os::getenv("MSYSCON").map_or(false, |s| { "mintty.exe" == s diff --git a/src/libterm/terminfo/parser/compiled.rs b/src/libterm/terminfo/parser/compiled.rs index 5f0111c7d7a84..4617be4f3f63c 100644 --- a/src/libterm/terminfo/parser/compiled.rs +++ b/src/libterm/terminfo/parser/compiled.rs @@ -284,13 +284,13 @@ pub fn parse(file: &mut io::Reader, longnames: bool) // Find the offset of the NUL we want to go to - let nulpos = string_table[offset as uint .. string_table_bytes as uint] + let nulpos = string_table.index(&((offset as uint) .. (string_table_bytes as uint))) .iter().position(|&b| b == 0); match nulpos { Some(len) => { string_map.insert(name.to_string(), - string_table[offset as uint .. - offset as uint + len].to_vec()) + string_table.index(&((offset as uint) .. + (offset as uint + len))).to_vec()) }, None => { return Err("invalid file: missing NUL in \ diff --git a/src/libterm/terminfo/searcher.rs b/src/libterm/terminfo/searcher.rs index 395fac52d8dac..38be68600798e 100644 --- a/src/libterm/terminfo/searcher.rs +++ b/src/libterm/terminfo/searcher.rs @@ -61,13 +61,13 @@ pub fn get_dbpath_for_term(term: &str) -> Option> { for p in dirs_to_search.iter() { if p.exists() { let f = first_char.to_string(); - let newp = p.join_many(&[f[], term]); + let newp = p.join_many(&[f.index(&FullRange), term]); if newp.exists() { return Some(box newp); } // on some installations the dir is named after the hex of the char (e.g. OS X) let f = format!("{:x}", first_char as uint); - let newp = p.join_many(&[f[], term]); + let newp = p.join_many(&[f.index(&FullRange), term]); if newp.exists() { return Some(box newp); } diff --git a/src/libtest/lib.rs b/src/libtest/lib.rs index c417fd94e22eb..76df9ea45610d 100644 --- a/src/libtest/lib.rs +++ b/src/libtest/lib.rs @@ -952,7 +952,7 @@ fn should_sort_failures_before_printing_them() { st.write_failures().unwrap(); let s = match st.out { - Raw(ref m) => String::from_utf8_lossy(m[]), + Raw(ref m) => String::from_utf8_lossy(m.index(&FullRange)), Pretty(_) => unreachable!() }; diff --git a/src/test/bench/shootout-fannkuch-redux.rs b/src/test/bench/shootout-fannkuch-redux.rs index 7dca2b24fc11e..8ce365f765d85 100644 --- a/src/test/bench/shootout-fannkuch-redux.rs +++ b/src/test/bench/shootout-fannkuch-redux.rs @@ -105,7 +105,7 @@ impl Perm { let d = idx / self.fact[i] as i32; self.cnt[i] = d; idx %= self.fact[i] as i32; - for (place, val) in pp.iter_mut().zip(self.perm.p[..i+1].iter()) { + for (place, val) in pp.iter_mut().zip(self.perm.p[..(i+1)].iter()) { *place = (*val) as u8 } diff --git a/src/test/bench/shootout-fasta-redux.rs b/src/test/bench/shootout-fasta-redux.rs index bbbfb0051f99b..9a6152dc13c9d 100644 --- a/src/test/bench/shootout-fasta-redux.rs +++ b/src/test/bench/shootout-fasta-redux.rs @@ -130,7 +130,7 @@ impl<'a, W: Writer> RepeatFasta<'a, W> { copy_memory(buf.as_mut_slice(), alu); let buf_len = buf.len(); copy_memory(buf.slice_mut(alu_len, buf_len), - alu[..LINE_LEN]); + &alu[0..LINE_LEN]); let mut pos = 0; let mut bytes; @@ -206,7 +206,7 @@ impl<'a, W: Writer> RandomFasta<'a, W> { for i in range(0u, chars_left) { buf[i] = self.nextc(); } - self.out.write(buf[..chars_left]) + self.out.write(&buf[0..chars_left]) } } diff --git a/src/test/bench/shootout-fasta.rs b/src/test/bench/shootout-fasta.rs index 9128930651f25..29994f45d3af2 100644 --- a/src/test/bench/shootout-fasta.rs +++ b/src/test/bench/shootout-fasta.rs @@ -99,7 +99,7 @@ fn make_fasta>( } n -= nb; line[nb] = '\n' as u8; - try!(wr.write(line[..nb+1])); + try!(wr.write(&line[..(nb+1)])); } Ok(()) } diff --git a/src/test/bench/shootout-k-nucleotide-pipes.rs b/src/test/bench/shootout-k-nucleotide-pipes.rs index e6ef6a8c8c9ec..39c839ba11403 100644 --- a/src/test/bench/shootout-k-nucleotide-pipes.rs +++ b/src/test/bench/shootout-k-nucleotide-pipes.rs @@ -101,11 +101,11 @@ fn windows_with_carry(bb: &[u8], nn: uint, mut it: F) -> Vec where let len = bb.len(); while ii < len - (nn - 1u) { - it(bb[ii..ii+nn]); + it(&bb[ii..(ii+nn)]); ii += 1u; } - return bb[len - (nn - 1u)..len].to_vec(); + return bb[(len - (nn - 1u))..len].to_vec(); } fn make_sequence_processor(sz: uint, diff --git a/src/test/bench/shootout-k-nucleotide.rs b/src/test/bench/shootout-k-nucleotide.rs index 28d7488c9bf8f..4f71ea8cbe997 100644 --- a/src/test/bench/shootout-k-nucleotide.rs +++ b/src/test/bench/shootout-k-nucleotide.rs @@ -247,14 +247,14 @@ fn generate_frequencies(mut input: &[u8], frame: uint) -> Table { // Pull first frame. for _ in range(0, frame) { code = code.push_char(input[0]); - input = input[1..]; + input = &input[1..]; } frequencies.lookup(code, BumpCallback); while input.len() != 0 && input[0] != ('>' as u8) { code = code.rotate(input[0], frame); frequencies.lookup(code, BumpCallback); - input = input[1..]; + input = &input[1..]; } frequencies } diff --git a/src/test/compile-fail/borrowck-loan-vec-content.rs b/src/test/compile-fail/borrowck-loan-vec-content.rs index 7849475ec6799..efb7a5253ed3d 100644 --- a/src/test/compile-fail/borrowck-loan-vec-content.rs +++ b/src/test/compile-fail/borrowck-loan-vec-content.rs @@ -17,12 +17,12 @@ fn takes_imm_elt(_v: &int, f: F) where F: FnOnce() { } fn has_mut_vec_and_does_not_try_to_change_it() { - let mut v = vec!(1, 2, 3); + let mut v: Vec = vec!(1, 2, 3); takes_imm_elt(&v[0], || {}) } fn has_mut_vec_but_tries_to_change_it() { - let mut v = vec!(1, 2, 3); + let mut v: Vec = vec!(1, 2, 3); takes_imm_elt( &v[0], || { //~ ERROR cannot borrow `v` as mutable diff --git a/src/test/compile-fail/indexing-requires-a-uint.rs b/src/test/compile-fail/indexing-requires-a-uint.rs index 9ac4bd2566e8b..e5edb2358f8e1 100644 --- a/src/test/compile-fail/indexing-requires-a-uint.rs +++ b/src/test/compile-fail/indexing-requires-a-uint.rs @@ -13,7 +13,8 @@ fn main() { fn bar(_: T) {} - [0][0u8]; //~ ERROR: mismatched types + [0][0u8]; //~ ERROR: the trait `core::ops::Index` is not implemented + //~^ ERROR: the trait `core::ops::Index` is not implemented [0][0]; // should infer to be a uint diff --git a/src/test/compile-fail/integral-indexing.rs b/src/test/compile-fail/integral-indexing.rs index 8b1f9eb19864f..bbceb00abd39d 100644 --- a/src/test/compile-fail/integral-indexing.rs +++ b/src/test/compile-fail/integral-indexing.rs @@ -11,16 +11,24 @@ pub fn main() { let v: Vec = vec!(0, 1, 2, 3, 4, 5); let s: String = "abcdef".to_string(); - assert_eq!(v.as_slice()[3u], 3); - assert_eq!(v.as_slice()[3u8], 3); //~ ERROR: mismatched types - assert_eq!(v.as_slice()[3i8], 3); //~ ERROR: mismatched types - assert_eq!(v.as_slice()[3u32], 3); //~ ERROR: mismatched types - assert_eq!(v.as_slice()[3i32], 3); //~ ERROR: mismatched types - println!("{}", v.as_slice()[3u8]); //~ ERROR: mismatched types - assert_eq!(s.as_bytes()[3u], 'd' as u8); - assert_eq!(s.as_bytes()[3u8], 'd' as u8); //~ ERROR: mismatched types - assert_eq!(s.as_bytes()[3i8], 'd' as u8); //~ ERROR: mismatched types - assert_eq!(s.as_bytes()[3u32], 'd' as u8); //~ ERROR: mismatched types - assert_eq!(s.as_bytes()[3i32], 'd' as u8); //~ ERROR: mismatched types - println!("{}", s.as_bytes()[3u8]); //~ ERROR: mismatched types + v.as_slice()[3u]; + v.as_slice()[3]; + v.as_slice()[3u8]; //~ERROR the trait `core::ops::Index` is not implemented + //~^ ERROR the trait `core::ops::Index` is not implemented + v.as_slice()[3i8]; //~ERROR the trait `core::ops::Index` is not implemented + //~^ ERROR the trait `core::ops::Index` is not implemented + v.as_slice()[3u32]; //~ERROR the trait `core::ops::Index` is not implemented + //~^ ERROR the trait `core::ops::Index` is not implemented + v.as_slice()[3i32]; //~ERROR the trait `core::ops::Index` is not implemented + //~^ ERROR the trait `core::ops::Index` is not implemented + s.as_bytes()[3u]; + s.as_bytes()[3]; + s.as_bytes()[3u8]; //~ERROR the trait `core::ops::Index` is not implemented + //~^ERROR the trait `core::ops::Index` is not implemented + s.as_bytes()[3i8]; //~ERROR the trait `core::ops::Index` is not implemented + //~^ERROR the trait `core::ops::Index` is not implemented + s.as_bytes()[3u32]; //~ERROR the trait `core::ops::Index` is not implemented + //~^ERROR the trait `core::ops::Index` is not implemented + s.as_bytes()[3i32]; //~ERROR the trait `core::ops::Index` is not implemented + //~^ERROR the trait `core::ops::Index` is not implemented } diff --git a/src/test/compile-fail/issue-16709.rs b/src/test/compile-fail/issue-16709.rs deleted file mode 100644 index 327f50ee059f9..0000000000000 --- a/src/test/compile-fail/issue-16709.rs +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use std::ptr; -use std::raw; - -trait Slice {} - -fn main() { - unsafe { - let nil: *const u8 = ptr::null(); - let slice: raw::Slice = - Slice { //~ ERROR use of trait `Slice` as a struct constructor [E0159] - data: nil, - len: 0, - }; - } -} diff --git a/src/test/compile-fail/packed-struct-generic-transmute.rs b/src/test/compile-fail/packed-struct-generic-transmute.rs index 5c0aba42b9683..38177d076455d 100644 --- a/src/test/compile-fail/packed-struct-generic-transmute.rs +++ b/src/test/compile-fail/packed-struct-generic-transmute.rs @@ -34,6 +34,6 @@ fn main() { let foo = Foo { bar: [1u8, 2, 3, 4, 5], baz: 10i32 }; unsafe { let oof: Oof<[u8; 5], i32> = mem::transmute(foo); - println!("{} {}", oof.rab[], oof.zab); + println!("{} {}", &oof.rab[], oof.zab); } } diff --git a/src/test/compile-fail/slice-1.rs b/src/test/compile-fail/slice-1.rs index d0339745c9eaf..903760caf1a1e 100644 --- a/src/test/compile-fail/slice-1.rs +++ b/src/test/compile-fail/slice-1.rs @@ -14,6 +14,6 @@ struct Foo; fn main() { let x = Foo; - x[..]; //~ ERROR incorrect slicing expression: `[..]` - //~^ NOTE use `expr[]` to construct a slice of the whole of expr + &x[..]; //~ ERROR incorrect slicing expression: `[..]` + //~^ NOTE use `&expr[]` to construct a slice of the whole of expr } diff --git a/src/test/compile-fail/slice-2.rs b/src/test/compile-fail/slice-2.rs index 24f710d2ae3f4..a03693b5fada0 100644 --- a/src/test/compile-fail/slice-2.rs +++ b/src/test/compile-fail/slice-2.rs @@ -16,8 +16,8 @@ struct Foo; fn main() { let x = Foo; - x[]; //~ ERROR cannot take a slice of a value with type `Foo` - x[Foo..]; //~ ERROR cannot take a slice of a value with type `Foo` - x[..Foo]; //~ ERROR cannot take a slice of a value with type `Foo` - x[Foo..Foo]; //~ ERROR cannot take a slice of a value with type `Foo` + &x[]; //~ ERROR cannot index a value of type `Foo` + &x[Foo..]; //~ ERROR cannot index a value of type `Foo` + &x[..Foo]; //~ ERROR cannot index a value of type `Foo` + &x[Foo..Foo]; //~ ERROR cannot index a value of type `Foo` } diff --git a/src/test/compile-fail/slice-borrow.rs b/src/test/compile-fail/slice-borrow.rs index 00783b71ea11d..aab187f97515a 100644 --- a/src/test/compile-fail/slice-borrow.rs +++ b/src/test/compile-fail/slice-borrow.rs @@ -16,6 +16,6 @@ fn main() { let y; { let x: &[int] = &[1, 2, 3, 4, 5]; //~ ERROR borrowed value does not live long enough - y = x[1..]; + y = &x[1..]; } } diff --git a/src/test/compile-fail/slice-mut-2.rs b/src/test/compile-fail/slice-mut-2.rs index 8970bcfd153a1..1dedb0cf888e5 100644 --- a/src/test/compile-fail/slice-mut-2.rs +++ b/src/test/compile-fail/slice-mut-2.rs @@ -16,5 +16,5 @@ fn main() { let x: &[int] = &[1, 2, 3, 4, 5]; // Can't mutably slice an immutable slice let slice: &mut [int] = &mut [0, 1]; - x[2..4] = slice; //~ ERROR cannot borrow + let _ = &mut x[2..4]; //~ERROR cannot borrow immutable dereference of `&`-pointer `*x` as mutabl } diff --git a/src/test/compile-fail/slice-mut.rs b/src/test/compile-fail/slice-mut.rs index ad6b384d74701..f0f525a553576 100644 --- a/src/test/compile-fail/slice-mut.rs +++ b/src/test/compile-fail/slice-mut.rs @@ -15,5 +15,5 @@ fn main() { let x: &[int] = &[1, 2, 3, 4, 5]; // Immutable slices are not mutable. - let y: &mut[_] = x[2..4]; //~ ERROR cannot borrow immutable dereference of `&`-pointer as mutabl + let y: &mut[_] = &x[2..4]; //~ ERROR cannot borrow immutable dereference of `&`-pointer as mutab } diff --git a/src/test/compile-fail/str-idx.rs b/src/test/compile-fail/str-idx.rs index 424ffed989b3c..ddd2a4eeedf76 100644 --- a/src/test/compile-fail/str-idx.rs +++ b/src/test/compile-fail/str-idx.rs @@ -10,5 +10,6 @@ pub fn main() { let s: &str = "hello"; - let c: u8 = s[4]; //~ ERROR cannot index a value of type `&str` + let c: u8 = s[4]; //~ ERROR the trait `core::ops::Index<_>` is not implemented + //~^ ERROR the trait `core::ops::Index<_>` is not implemented } diff --git a/src/test/debuginfo/vec-slices.rs b/src/test/debuginfo/vec-slices.rs index 70211d74d885d..14f1dbb9d651c 100644 --- a/src/test/debuginfo/vec-slices.rs +++ b/src/test/debuginfo/vec-slices.rs @@ -93,7 +93,7 @@ fn main() { let empty: &[i64] = &[]; let singleton: &[i64] = &[1]; let multiple: &[i64] = &[2, 3, 4, 5]; - let slice_of_slice = multiple[1..3]; + let slice_of_slice = &multiple[1..3]; let padded_tuple: &[(i32, i16)] = &[(6, 7), (8, 9)]; diff --git a/src/test/run-pass/auto-encode.rs b/src/test/run-pass/auto-encode.rs index 24df95ffd3cb6..9b030de998373 100644 --- a/src/test/run-pass/auto-encode.rs +++ b/src/test/run-pass/auto-encode.rs @@ -35,7 +35,7 @@ fn test_rbml<'a, 'b, A: let mut rbml_w = EBwriter::Encoder::new(&mut wr); a1.encode(&mut rbml_w); - let d: serialize::rbml::Doc<'a> = EBDoc::new(wr[]); + let d: serialize::rbml::Doc<'a> = EBDoc::new(&wr[]); let mut decoder: EBReader::Decoder<'a> = EBreader::Decoder::new(d); let a2: A = Decodable::decode(&mut decoder); assert!(*a1 == a2); diff --git a/src/test/run-pass/deriving-encodable-decodable.rs b/src/test/run-pass/deriving-encodable-decodable.rs index 2466d0adf7bc3..01814e8eab790 100644 --- a/src/test/run-pass/deriving-encodable-decodable.rs +++ b/src/test/run-pass/deriving-encodable-decodable.rs @@ -59,7 +59,7 @@ fn roundtrip<'a, T: Rand + Eq + Encodable> + let mut w = Vec::new(); let mut e = Encoder::new(&mut w); obj.encode(&mut e); - let doc = rbml::Doc::new(@w[]); + let doc = rbml::Doc::new(&w[]); let mut dec = Decoder::new(doc); let obj2 = Decodable::decode(&mut dec); assert!(obj == obj2); diff --git a/src/test/run-pass/issue-15730.rs b/src/test/run-pass/issue-15730.rs index a1a5922e15003..4e1aa454a881d 100644 --- a/src/test/run-pass/issue-15730.rs +++ b/src/test/run-pass/issue-15730.rs @@ -12,5 +12,5 @@ fn main() { let mut array = [1, 2, 3]; - let pie_slice = array[1..2]; + let pie_slice = &array[1..2]; } diff --git a/src/test/run-pass/issue-17503.rs b/src/test/run-pass/issue-17503.rs index 986879e8e461d..8acda17500692 100644 --- a/src/test/run-pass/issue-17503.rs +++ b/src/test/run-pass/issue-17503.rs @@ -15,7 +15,7 @@ fn main() { let ss: &&[int] = &s; let sss: &&&[int] = &ss; - println!("{}", s[..3]); - println!("{}", ss[3..]); - println!("{}", sss[2..4]); + println!("{}", &s[0..3]); + println!("{}", &ss[3..]); + println!("{}", &sss[2..4]); } diff --git a/src/test/run-pass/issue-3888-2.rs b/src/test/run-pass/issue-3888-2.rs index 10add853ee7f3..5ed9729c14258 100644 --- a/src/test/run-pass/issue-3888-2.rs +++ b/src/test/run-pass/issue-3888-2.rs @@ -11,7 +11,7 @@ #![feature(slicing_syntax)] fn vec_peek<'r, T>(v: &'r [T]) -> &'r [T] { - v[1..5] + &v[1..5] } pub fn main() {} diff --git a/src/test/run-pass/issue-4464.rs b/src/test/run-pass/issue-4464.rs index f2c1a715b514b..2581069d29b05 100644 --- a/src/test/run-pass/issue-4464.rs +++ b/src/test/run-pass/issue-4464.rs @@ -10,6 +10,6 @@ #![feature(slicing_syntax)] -fn broken(v: &[u8], i: uint, j: uint) -> &[u8] { v[i..j] } +fn broken(v: &[u8], i: uint, j: uint) -> &[u8] { &v[i..j] } pub fn main() {} diff --git a/src/test/run-pass/issue-8898.rs b/src/test/run-pass/issue-8898.rs index 305f984f98efb..0c19286921a99 100644 --- a/src/test/run-pass/issue-8898.rs +++ b/src/test/run-pass/issue-8898.rs @@ -18,11 +18,11 @@ pub fn main() { let abc = [1i, 2, 3]; let tf = [true, false]; let x = [(), ()]; - let slice = x[0..1]; + let slice = &x[0..1]; - assert_repr_eq(abc[], "[1, 2, 3]".to_string()); - assert_repr_eq(tf[], "[true, false]".to_string()); - assert_repr_eq(x[], "[(), ()]".to_string()); + assert_repr_eq(&abc[], "[1, 2, 3]".to_string()); + assert_repr_eq(&tf[], "[true, false]".to_string()); + assert_repr_eq(&x[], "[(), ()]".to_string()); assert_repr_eq(slice, "[()]".to_string()); - assert_repr_eq(x[], "[(), ()]".to_string()); + assert_repr_eq(&x[], "[(), ()]".to_string()); } diff --git a/src/test/run-pass/repeated-vector-syntax.rs b/src/test/run-pass/repeated-vector-syntax.rs index 0781822cb7482..e854a7326329c 100644 --- a/src/test/run-pass/repeated-vector-syntax.rs +++ b/src/test/run-pass/repeated-vector-syntax.rs @@ -16,8 +16,8 @@ pub fn main() { print!("["); for xi in x.iter() { - print!("{}, ", (*xi)[]); + print!("{}, ", &xi[]); } println!("]"); - println!("{}", y[]); + println!("{}", &y[]); } diff --git a/src/test/run-pass/slice-2.rs b/src/test/run-pass/slice-2.rs index f03b4609637a9..05f318b53c2e5 100644 --- a/src/test/run-pass/slice-2.rs +++ b/src/test/run-pass/slice-2.rs @@ -15,57 +15,57 @@ fn main() { let x: &[int] = &[1, 2, 3, 4, 5]; let cmp: &[int] = &[1, 2, 3, 4, 5]; - assert!(x[] == cmp); + assert!(&x[] == cmp); let cmp: &[int] = &[3, 4, 5]; - assert!(x[2..] == cmp); + assert!(&x[2..] == cmp); let cmp: &[int] = &[1, 2, 3]; - assert!(x[..3] == cmp); + assert!(&x[0..3] == cmp); let cmp: &[int] = &[2, 3, 4]; - assert!(x[1..4] == cmp); + assert!(&x[1..4] == cmp); let x: Vec = vec![1, 2, 3, 4, 5]; let cmp: &[int] = &[1, 2, 3, 4, 5]; - assert!(x[] == cmp); + assert!(&x[] == cmp); let cmp: &[int] = &[3, 4, 5]; - assert!(x[2..] == cmp); + assert!(&x[2..] == cmp); let cmp: &[int] = &[1, 2, 3]; - assert!(x[..3] == cmp); + assert!(&x[0..3] == cmp); let cmp: &[int] = &[2, 3, 4]; - assert!(x[1..4] == cmp); + assert!(&x[1..4] == cmp); let x: &mut [int] = &mut [1, 2, 3, 4, 5]; { let cmp: &mut [int] = &mut [1, 2, 3, 4, 5]; - assert!(x[mut] == cmp); + assert!(&mut x[] == cmp); } { let cmp: &mut [int] = &mut [3, 4, 5]; - assert!(x[mut 2..] == cmp); + assert!(&mut x[2..] == cmp); } { let cmp: &mut [int] = &mut [1, 2, 3]; - assert!(x[mut ..3] == cmp); + assert!(&mut x[..3] == cmp); } { let cmp: &mut [int] = &mut [2, 3, 4]; - assert!(x[mut 1..4] == cmp); + assert!(&mut x[1..4] == cmp); } let mut x: Vec = vec![1, 2, 3, 4, 5]; { let cmp: &mut [int] = &mut [1, 2, 3, 4, 5]; - assert!(x[mut] == cmp); + assert!(&mut x[] == cmp); } { let cmp: &mut [int] = &mut [3, 4, 5]; - assert!(x[mut 2..] == cmp); + assert!(&mut x[2..] == cmp); } { let cmp: &mut [int] = &mut [1, 2, 3]; - assert!(x[mut ..3] == cmp); + assert!(&mut x[..3] == cmp); } { let cmp: &mut [int] = &mut [2, 3, 4]; - assert!(x[mut 1..4] == cmp); + assert!(&mut x[1..4] == cmp); } } diff --git a/src/test/run-pass/slice-panic-1.rs b/src/test/run-pass/slice-panic-1.rs index 13f2971871ba8..ebe6e974c26cf 100644 --- a/src/test/run-pass/slice-panic-1.rs +++ b/src/test/run-pass/slice-panic-1.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// Test that is a slicing expr[..] fails, the correct cleanups happen. +// Test that if a slicing expr[..] fails, the correct cleanups happen. #![feature(slicing_syntax)] @@ -24,7 +24,7 @@ impl Drop for Foo { fn foo() { let x: &[_] = &[Foo, Foo]; - x[3..4]; + &x[3..4]; } fn main() { diff --git a/src/test/run-pass/slice-panic-2.rs b/src/test/run-pass/slice-panic-2.rs index ccbb33d7768c4..6bbd1ac7b2d9f 100644 --- a/src/test/run-pass/slice-panic-2.rs +++ b/src/test/run-pass/slice-panic-2.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// Test that is a slicing expr[..] fails, the correct cleanups happen. +// Test that if a slicing expr[..] fails, the correct cleanups happen. #![feature(slicing_syntax)] @@ -28,7 +28,7 @@ fn bar() -> uint { fn foo() { let x: &[_] = &[Foo, Foo]; - x[3..bar()]; + &x[3..bar()]; } fn main() { diff --git a/src/test/run-pass/slice.rs b/src/test/run-pass/slice.rs index f863c4d330fbf..fca7daeb07d6f 100644 --- a/src/test/run-pass/slice.rs +++ b/src/test/run-pass/slice.rs @@ -11,61 +11,84 @@ // Test slicing sugar. #![feature(slicing_syntax)] +#![feature(associated_types)] extern crate core; -use core::ops::{Slice,SliceMut}; +use core::ops::{Index, Range, RangeTo, RangeFrom, FullRange}; static mut COUNT: uint = 0; struct Foo; -impl Slice for Foo { - fn as_slice_<'a>(&'a self) -> &'a Foo { +impl Index> for Foo { + type Output = Foo; + fn index(&self, index: &Range) -> &Foo { unsafe { COUNT += 1; } self } - fn slice_from_or_fail<'a>(&'a self, _from: &Foo) -> &'a Foo { +} +impl Index> for Foo { + type Output = Foo; + fn index(&self, index: &RangeTo) -> &Foo { unsafe { COUNT += 1; } self } - fn slice_to_or_fail<'a>(&'a self, _to: &Foo) -> &'a Foo { +} +impl Index> for Foo { + type Output = Foo; + fn index(&self, index: &RangeFrom) -> &Foo { unsafe { COUNT += 1; } self } - fn slice_or_fail<'a>(&'a self, _from: &Foo, _to: &Foo) -> &'a Foo { +} +impl Index for Foo { + type Output = Foo; + fn index(&self, _index: &FullRange) -> &Foo { unsafe { COUNT += 1; } self } } -impl SliceMut for Foo { - fn as_mut_slice_<'a>(&'a mut self) -> &'a mut Foo { +impl IndexMut> for Foo { + type Output = Foo; + fn index_mut(&mut self, index: &Range) -> &mut Foo { unsafe { COUNT += 1; } self } - fn slice_from_or_fail_mut<'a>(&'a mut self, _from: &Foo) -> &'a mut Foo { +} +impl IndexMut> for Foo { + type Output = Foo; + fn index_mut(&mut self, index: &RangeTo) -> &mut Foo { unsafe { COUNT += 1; } self } - fn slice_to_or_fail_mut<'a>(&'a mut self, _to: &Foo) -> &'a mut Foo { +} +impl IndexMut> for Foo { + type Output = Foo; + fn index_mut(&mut self, index: &RangeFrom) -> &mut Foo { unsafe { COUNT += 1; } self } - fn slice_or_fail_mut<'a>(&'a mut self, _from: &Foo, _to: &Foo) -> &'a mut Foo { +} +impl IndexMut for Foo { + type Output = Foo; + fn index_mut(&mut self, _index: &FullRange) -> &mut Foo { unsafe { COUNT += 1; } self } } + + fn main() { let mut x = Foo; - x[]; - x[Foo..]; - x[..Foo]; - x[Foo..Foo]; - x[mut]; - x[mut Foo..]; - x[mut ..Foo]; - x[mut Foo..Foo]; + &x[]; + &x[Foo..]; + &x[..Foo]; + &x[Foo..Foo]; + &mut x[]; + &mut x[Foo..]; + &mut x[..Foo]; + &mut x[Foo..Foo]; unsafe { assert!(COUNT == 8); }