Rollup merge of #78934 - DeveloperC286:issue_60302_vec, r=m-ou-se

refactor: removing library/alloc/src/vec/mod.rs ignore-tidy-filelength

This PR removes the need for ignore-tidy-filelength for library/alloc/src/vec/mod.rs which is part of the issue #60302

It is probably easiest to review this PR by looking at it commit by commit rather than looking at the overall diff.
This commit is contained in:
Mara Bos 2020-12-30 20:56:45 +00:00 committed by GitHub
commit 242a252687
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 1378 additions and 1272 deletions

View File

@ -0,0 +1,35 @@
use crate::borrow::Cow;
use core::iter::FromIterator;
use super::Vec;
#[stable(feature = "cow_from_vec", since = "1.8.0")]
impl<'a, T: Clone> From<&'a [T]> for Cow<'a, [T]> {
fn from(s: &'a [T]) -> Cow<'a, [T]> {
Cow::Borrowed(s)
}
}
#[stable(feature = "cow_from_vec", since = "1.8.0")]
impl<'a, T: Clone> From<Vec<T>> for Cow<'a, [T]> {
fn from(v: Vec<T>) -> Cow<'a, [T]> {
Cow::Owned(v)
}
}
#[stable(feature = "cow_from_vec_ref", since = "1.28.0")]
impl<'a, T: Clone> From<&'a Vec<T>> for Cow<'a, [T]> {
fn from(v: &'a Vec<T>) -> Cow<'a, [T]> {
Cow::Borrowed(v.as_slice())
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> FromIterator<T> for Cow<'a, [T]>
where
T: Clone,
{
fn from_iter<I: IntoIterator<Item = T>>(it: I) -> Cow<'a, [T]> {
Cow::Owned(FromIterator::from_iter(it))
}
}

View File

@ -0,0 +1,155 @@
use crate::alloc::{Allocator, Global};
use core::fmt;
use core::iter::{FusedIterator, TrustedLen};
use core::mem::{self};
use core::ptr::{self, NonNull};
use core::slice::{self};
use super::Vec;
/// A draining iterator for `Vec<T>`.
///
/// This `struct` is created by [`Vec::drain`].
/// See its documentation for more.
///
/// # Example
///
/// ```
/// let mut v = vec![0, 1, 2];
/// let iter: std::vec::Drain<_> = v.drain(..);
/// ```
#[stable(feature = "drain", since = "1.6.0")]
pub struct Drain<
'a,
T: 'a,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + 'a = Global,
> {
/// Index of tail to preserve
pub(super) tail_start: usize,
/// Length of tail
pub(super) tail_len: usize,
/// Current remaining range to remove
pub(super) iter: slice::Iter<'a, T>,
pub(super) vec: NonNull<Vec<T, A>>,
}
#[stable(feature = "collection_debug", since = "1.17.0")]
impl<T: fmt::Debug, A: Allocator> fmt::Debug for Drain<'_, T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("Drain").field(&self.iter.as_slice()).finish()
}
}
impl<'a, T, A: Allocator> Drain<'a, T, A> {
/// Returns the remaining items of this iterator as a slice.
///
/// # Examples
///
/// ```
/// let mut vec = vec!['a', 'b', 'c'];
/// let mut drain = vec.drain(..);
/// assert_eq!(drain.as_slice(), &['a', 'b', 'c']);
/// let _ = drain.next().unwrap();
/// assert_eq!(drain.as_slice(), &['b', 'c']);
/// ```
#[stable(feature = "vec_drain_as_slice", since = "1.46.0")]
pub fn as_slice(&self) -> &[T] {
self.iter.as_slice()
}
/// Returns a reference to the underlying allocator.
#[unstable(feature = "allocator_api", issue = "32838")]
#[inline]
pub fn allocator(&self) -> &A {
unsafe { self.vec.as_ref().allocator() }
}
}
#[stable(feature = "vec_drain_as_slice", since = "1.46.0")]
impl<'a, T, A: Allocator> AsRef<[T]> for Drain<'a, T, A> {
fn as_ref(&self) -> &[T] {
self.as_slice()
}
}
#[stable(feature = "drain", since = "1.6.0")]
unsafe impl<T: Sync, A: Sync + Allocator> Sync for Drain<'_, T, A> {}
#[stable(feature = "drain", since = "1.6.0")]
unsafe impl<T: Send, A: Send + Allocator> Send for Drain<'_, T, A> {}
#[stable(feature = "drain", since = "1.6.0")]
impl<T, A: Allocator> Iterator for Drain<'_, T, A> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
self.iter.next().map(|elt| unsafe { ptr::read(elt as *const _) })
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
#[stable(feature = "drain", since = "1.6.0")]
impl<T, A: Allocator> DoubleEndedIterator for Drain<'_, T, A> {
#[inline]
fn next_back(&mut self) -> Option<T> {
self.iter.next_back().map(|elt| unsafe { ptr::read(elt as *const _) })
}
}
#[stable(feature = "drain", since = "1.6.0")]
impl<T, A: Allocator> Drop for Drain<'_, T, A> {
fn drop(&mut self) {
/// Continues dropping the remaining elements in the `Drain`, then moves back the
/// un-`Drain`ed elements to restore the original `Vec`.
struct DropGuard<'r, 'a, T, A: Allocator>(&'r mut Drain<'a, T, A>);
impl<'r, 'a, T, A: Allocator> Drop for DropGuard<'r, 'a, T, A> {
fn drop(&mut self) {
// Continue the same loop we have below. If the loop already finished, this does
// nothing.
self.0.for_each(drop);
if self.0.tail_len > 0 {
unsafe {
let source_vec = self.0.vec.as_mut();
// memmove back untouched tail, update to new length
let start = source_vec.len();
let tail = self.0.tail_start;
if tail != start {
let src = source_vec.as_ptr().add(tail);
let dst = source_vec.as_mut_ptr().add(start);
ptr::copy(src, dst, self.0.tail_len);
}
source_vec.set_len(start + self.0.tail_len);
}
}
}
}
// exhaust self first
while let Some(item) = self.next() {
let guard = DropGuard(self);
drop(item);
mem::forget(guard);
}
// Drop a `DropGuard` to move back the non-drained tail of `self`.
DropGuard(self);
}
}
#[stable(feature = "drain", since = "1.6.0")]
impl<T, A: Allocator> ExactSizeIterator for Drain<'_, T, A> {
fn is_empty(&self) -> bool {
self.iter.is_empty()
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T, A: Allocator> TrustedLen for Drain<'_, T, A> {}
#[stable(feature = "fused", since = "1.26.0")]
impl<T, A: Allocator> FusedIterator for Drain<'_, T, A> {}

View File

@ -0,0 +1,143 @@
use crate::alloc::{Allocator, Global};
use core::ptr::{self};
use core::slice::{self};
use super::Vec;
/// An iterator which uses a closure to determine if an element should be removed.
///
/// This struct is created by [`Vec::drain_filter`].
/// See its documentation for more.
///
/// # Example
///
/// ```
/// #![feature(drain_filter)]
///
/// let mut v = vec![0, 1, 2];
/// let iter: std::vec::DrainFilter<_, _> = v.drain_filter(|x| *x % 2 == 0);
/// ```
#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
#[derive(Debug)]
pub struct DrainFilter<
'a,
T,
F,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
> where
F: FnMut(&mut T) -> bool,
{
pub(super) vec: &'a mut Vec<T, A>,
/// The index of the item that will be inspected by the next call to `next`.
pub(super) idx: usize,
/// The number of items that have been drained (removed) thus far.
pub(super) del: usize,
/// The original length of `vec` prior to draining.
pub(super) old_len: usize,
/// The filter test predicate.
pub(super) pred: F,
/// A flag that indicates a panic has occurred in the filter test predicate.
/// This is used as a hint in the drop implementation to prevent consumption
/// of the remainder of the `DrainFilter`. Any unprocessed items will be
/// backshifted in the `vec`, but no further items will be dropped or
/// tested by the filter predicate.
pub(super) panic_flag: bool,
}
impl<T, F, A: Allocator> DrainFilter<'_, T, F, A>
where
F: FnMut(&mut T) -> bool,
{
/// Returns a reference to the underlying allocator.
#[unstable(feature = "allocator_api", issue = "32838")]
#[inline]
pub fn allocator(&self) -> &A {
self.vec.allocator()
}
}
#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
impl<T, F, A: Allocator> Iterator for DrainFilter<'_, T, F, A>
where
F: FnMut(&mut T) -> bool,
{
type Item = T;
fn next(&mut self) -> Option<T> {
unsafe {
while self.idx < self.old_len {
let i = self.idx;
let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len);
self.panic_flag = true;
let drained = (self.pred)(&mut v[i]);
self.panic_flag = false;
// Update the index *after* the predicate is called. If the index
// is updated prior and the predicate panics, the element at this
// index would be leaked.
self.idx += 1;
if drained {
self.del += 1;
return Some(ptr::read(&v[i]));
} else if self.del > 0 {
let del = self.del;
let src: *const T = &v[i];
let dst: *mut T = &mut v[i - del];
ptr::copy_nonoverlapping(src, dst, 1);
}
}
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
(0, Some(self.old_len - self.idx))
}
}
#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
impl<T, F, A: Allocator> Drop for DrainFilter<'_, T, F, A>
where
F: FnMut(&mut T) -> bool,
{
fn drop(&mut self) {
struct BackshiftOnDrop<'a, 'b, T, F, A: Allocator>
where
F: FnMut(&mut T) -> bool,
{
drain: &'b mut DrainFilter<'a, T, F, A>,
}
impl<'a, 'b, T, F, A: Allocator> Drop for BackshiftOnDrop<'a, 'b, T, F, A>
where
F: FnMut(&mut T) -> bool,
{
fn drop(&mut self) {
unsafe {
if self.drain.idx < self.drain.old_len && self.drain.del > 0 {
// This is a pretty messed up state, and there isn't really an
// obviously right thing to do. We don't want to keep trying
// to execute `pred`, so we just backshift all the unprocessed
// elements and tell the vec that they still exist. The backshift
// is required to prevent a double-drop of the last successfully
// drained item prior to a panic in the predicate.
let ptr = self.drain.vec.as_mut_ptr();
let src = ptr.add(self.drain.idx);
let dst = src.sub(self.drain.del);
let tail_len = self.drain.old_len - self.drain.idx;
src.copy_to(dst, tail_len);
}
self.drain.vec.set_len(self.drain.old_len - self.drain.del);
}
}
}
let backshift = BackshiftOnDrop { drain: self };
// Attempt to consume any remaining elements if the filter predicate
// has not yet panicked. We'll backshift any remaining elements
// whether we've already panicked or if the consumption here panics.
if !backshift.drain.panic_flag {
backshift.drain.for_each(drop);
}
}
}

View File

@ -0,0 +1,24 @@
use core::ptr::{self};
use core::slice::{self};
// A helper struct for in-place iteration that drops the destination slice of iteration,
// i.e. the head. The source slice (the tail) is dropped by IntoIter.
pub(super) struct InPlaceDrop<T> {
pub(super) inner: *mut T,
pub(super) dst: *mut T,
}
impl<T> InPlaceDrop<T> {
fn len(&self) -> usize {
unsafe { self.dst.offset_from(self.inner) as usize }
}
}
impl<T> Drop for InPlaceDrop<T> {
#[inline]
fn drop(&mut self) {
unsafe {
ptr::drop_in_place(slice::from_raw_parts_mut(self.inner, self.len()));
}
}
}

View File

@ -0,0 +1,283 @@
use crate::alloc::{Allocator, Global};
use crate::raw_vec::RawVec;
use core::fmt;
use core::intrinsics::arith_offset;
use core::iter::{FusedIterator, InPlaceIterable, SourceIter, TrustedLen, TrustedRandomAccess};
use core::marker::PhantomData;
use core::mem::{self};
use core::ptr::{self, NonNull};
use core::slice::{self};
/// An iterator that moves out of a vector.
///
/// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec)
/// (provided by the [`IntoIterator`] trait).
///
/// # Example
///
/// ```
/// let v = vec![0, 1, 2];
/// let iter: std::vec::IntoIter<_> = v.into_iter();
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IntoIter<
T,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
> {
pub(super) buf: NonNull<T>,
pub(super) phantom: PhantomData<T>,
pub(super) cap: usize,
pub(super) alloc: A,
pub(super) ptr: *const T,
pub(super) end: *const T,
}
#[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
}
}
impl<T, A: Allocator> IntoIter<T, A> {
/// Returns the remaining items of this iterator as a slice.
///
/// # Examples
///
/// ```
/// let vec = vec!['a', 'b', 'c'];
/// let mut into_iter = vec.into_iter();
/// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
/// let _ = into_iter.next().unwrap();
/// assert_eq!(into_iter.as_slice(), &['b', 'c']);
/// ```
#[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
pub fn as_slice(&self) -> &[T] {
unsafe { slice::from_raw_parts(self.ptr, self.len()) }
}
/// Returns the remaining items of this iterator as a mutable slice.
///
/// # Examples
///
/// ```
/// let vec = vec!['a', 'b', 'c'];
/// let mut into_iter = vec.into_iter();
/// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
/// into_iter.as_mut_slice()[2] = 'z';
/// assert_eq!(into_iter.next().unwrap(), 'a');
/// assert_eq!(into_iter.next().unwrap(), 'b');
/// assert_eq!(into_iter.next().unwrap(), 'z');
/// ```
#[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
pub fn as_mut_slice(&mut self) -> &mut [T] {
unsafe { &mut *self.as_raw_mut_slice() }
}
/// Returns a reference to the underlying allocator.
#[unstable(feature = "allocator_api", issue = "32838")]
#[inline]
pub fn allocator(&self) -> &A {
&self.alloc
}
fn as_raw_mut_slice(&mut self) -> *mut [T] {
ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len())
}
pub(super) fn drop_remaining(&mut self) {
unsafe {
ptr::drop_in_place(self.as_mut_slice());
}
self.ptr = self.end;
}
/// Relinquishes the backing allocation, equivalent to
/// `ptr::write(&mut self, Vec::new().into_iter())`
pub(super) fn forget_allocation(&mut self) {
self.cap = 0;
self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) };
self.ptr = self.buf.as_ptr();
self.end = self.buf.as_ptr();
}
}
#[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")]
impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
fn as_ref(&self) -> &[T] {
self.as_slice()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<T: Sync, A: Allocator> Sync for IntoIter<T, A> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, A: Allocator> Iterator for IntoIter<T, A> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
if self.ptr as *const _ == self.end {
None
} else if mem::size_of::<T>() == 0 {
// purposefully don't use 'ptr.offset' because for
// vectors with 0-size elements this would return the
// same pointer.
self.ptr = unsafe { arith_offset(self.ptr as *const i8, 1) as *mut T };
// Make up a value of this ZST.
Some(unsafe { mem::zeroed() })
} else {
let old = self.ptr;
self.ptr = unsafe { self.ptr.offset(1) };
Some(unsafe { ptr::read(old) })
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let exact = if mem::size_of::<T>() == 0 {
(self.end as usize).wrapping_sub(self.ptr as usize)
} else {
unsafe { self.end.offset_from(self.ptr) as usize }
};
(exact, Some(exact))
}
#[inline]
fn count(self) -> usize {
self.len()
}
unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item
where
Self: TrustedRandomAccess,
{
// SAFETY: the caller must guarantee that `i` is in bounds of the
// `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)`
// is guaranteed to pointer to an element of the `Vec<T>` and
// thus guaranteed to be valid to dereference.
//
// Also note the implementation of `Self: TrustedRandomAccess` requires
// that `T: Copy` so reading elements from the buffer doesn't invalidate
// them for `Drop`.
unsafe {
if mem::size_of::<T>() == 0 { mem::zeroed() } else { ptr::read(self.ptr.add(i)) }
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
#[inline]
fn next_back(&mut self) -> Option<T> {
if self.end == self.ptr {
None
} else if mem::size_of::<T>() == 0 {
// See above for why 'ptr.offset' isn't used
self.end = unsafe { arith_offset(self.end as *const i8, -1) as *mut T };
// Make up a value of this ZST.
Some(unsafe { mem::zeroed() })
} else {
self.end = unsafe { self.end.offset(-1) };
Some(unsafe { ptr::read(self.end) })
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
fn is_empty(&self) -> bool {
self.ptr == self.end
}
}
#[stable(feature = "fused", since = "1.26.0")]
impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
#[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {}
#[doc(hidden)]
#[unstable(issue = "none", feature = "std_internals")]
// T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr
// and thus we can't implement drop-handling
unsafe impl<T, A: Allocator> TrustedRandomAccess for IntoIter<T, A>
where
T: Copy,
{
fn may_have_side_effect() -> bool {
false
}
}
#[stable(feature = "vec_into_iter_clone", since = "1.8.0")]
impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> {
#[cfg(not(test))]
fn clone(&self) -> Self {
self.as_slice().to_vec_in(self.alloc.clone()).into_iter()
}
#[cfg(test)]
fn clone(&self) -> Self {
crate::slice::to_vec(self.as_slice(), self.alloc.clone()).into_iter()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> {
fn drop(&mut self) {
struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>);
impl<T, A: Allocator> Drop for DropGuard<'_, T, A> {
fn drop(&mut self) {
unsafe {
// `IntoIter::alloc` is not used anymore after this
let alloc = ptr::read(&self.0.alloc);
// RawVec handles deallocation
let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc);
}
}
}
let guard = DropGuard(self);
// destroy the remaining elements
unsafe {
ptr::drop_in_place(guard.0.as_raw_mut_slice());
}
// now `guard` will be dropped and do the rest
}
}
#[unstable(issue = "none", feature = "inplace_iteration")]
unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> {}
#[unstable(issue = "none", feature = "inplace_iteration")]
unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> {
type Source = Self;
#[inline]
unsafe fn as_inner(&mut self) -> &mut Self::Source {
self
}
}
// internal helper trait for in-place iteration specialization.
#[rustc_specialization_trait]
pub(crate) trait AsIntoIter {
type Item;
fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item>;
}
impl<T> AsIntoIter for IntoIter<T> {
type Item = T;
fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item> {
self
}
}

View File

@ -0,0 +1,71 @@
use crate::boxed::Box;
#[rustc_specialization_trait]
pub(super) unsafe trait IsZero {
/// Whether this value is zero
fn is_zero(&self) -> bool;
}
macro_rules! impl_is_zero {
($t:ty, $is_zero:expr) => {
unsafe impl IsZero for $t {
#[inline]
fn is_zero(&self) -> bool {
$is_zero(*self)
}
}
};
}
impl_is_zero!(i16, |x| x == 0);
impl_is_zero!(i32, |x| x == 0);
impl_is_zero!(i64, |x| x == 0);
impl_is_zero!(i128, |x| x == 0);
impl_is_zero!(isize, |x| x == 0);
impl_is_zero!(u16, |x| x == 0);
impl_is_zero!(u32, |x| x == 0);
impl_is_zero!(u64, |x| x == 0);
impl_is_zero!(u128, |x| x == 0);
impl_is_zero!(usize, |x| x == 0);
impl_is_zero!(bool, |x| x == false);
impl_is_zero!(char, |x| x == '\0');
impl_is_zero!(f32, |x: f32| x.to_bits() == 0);
impl_is_zero!(f64, |x: f64| x.to_bits() == 0);
unsafe impl<T> IsZero for *const T {
#[inline]
fn is_zero(&self) -> bool {
(*self).is_null()
}
}
unsafe impl<T> IsZero for *mut T {
#[inline]
fn is_zero(&self) -> bool {
(*self).is_null()
}
}
// `Option<&T>` and `Option<Box<T>>` are guaranteed to represent `None` as null.
// For fat pointers, the bytes that would be the pointer metadata in the `Some`
// variant are padding in the `None` variant, so ignoring them and
// zero-initializing instead is ok.
// `Option<&mut T>` never implements `Clone`, so there's no need for an impl of
// `SpecFromElem`.
unsafe impl<T: ?Sized> IsZero for Option<&T> {
#[inline]
fn is_zero(&self) -> bool {
self.is_none()
}
}
unsafe impl<T: ?Sized> IsZero for Option<Box<T>> {
#[inline]
fn is_zero(&self) -> bool {
self.is_none()
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,43 @@
use crate::alloc::Allocator;
use crate::borrow::Cow;
use super::Vec;
macro_rules! __impl_slice_eq1 {
([$($vars:tt)*] $lhs:ty, $rhs:ty $(where $ty:ty: $bound:ident)?, #[$stability:meta]) => {
#[$stability]
impl<T, U, $($vars)*> PartialEq<$rhs> for $lhs
where
T: PartialEq<U>,
$($ty: $bound)?
{
#[inline]
fn eq(&self, other: &$rhs) -> bool { self[..] == other[..] }
#[inline]
fn ne(&self, other: &$rhs) -> bool { self[..] != other[..] }
}
}
}
__impl_slice_eq1! { [A: Allocator] Vec<T, A>, Vec<U, A>, #[stable(feature = "rust1", since = "1.0.0")] }
__impl_slice_eq1! { [A: Allocator] Vec<T, A>, &[U], #[stable(feature = "rust1", since = "1.0.0")] }
__impl_slice_eq1! { [A: Allocator] Vec<T, A>, &mut [U], #[stable(feature = "rust1", since = "1.0.0")] }
__impl_slice_eq1! { [A: Allocator] &[T], Vec<U, A>, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] }
__impl_slice_eq1! { [A: Allocator] &mut [T], Vec<U, A>, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] }
__impl_slice_eq1! { [A: Allocator] Vec<T, A>, [U], #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] }
__impl_slice_eq1! { [A: Allocator] [T], Vec<U, A>, #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] }
__impl_slice_eq1! { [A: Allocator] Cow<'_, [T]>, Vec<U, A> where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
__impl_slice_eq1! { [] Cow<'_, [T]>, &[U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
__impl_slice_eq1! { [] Cow<'_, [T]>, &mut [U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
__impl_slice_eq1! { [A: Allocator, const N: usize] Vec<T, A>, [U; N], #[stable(feature = "rust1", since = "1.0.0")] }
__impl_slice_eq1! { [A: Allocator, const N: usize] Vec<T, A>, &[U; N], #[stable(feature = "rust1", since = "1.0.0")] }
// NOTE: some less important impls are omitted to reduce code bloat
// FIXME(Centril): Reconsider this?
//__impl_slice_eq1! { [const N: usize] Vec<A>, &mut [B; N], }
//__impl_slice_eq1! { [const N: usize] [A; N], Vec<B>, }
//__impl_slice_eq1! { [const N: usize] &[A; N], Vec<B>, }
//__impl_slice_eq1! { [const N: usize] &mut [A; N], Vec<B>, }
//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, [B; N], }
//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, &[B; N], }
//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, &mut [B; N], }

View File

@ -0,0 +1,28 @@
// Set the length of the vec when the `SetLenOnDrop` value goes out of scope.
//
// The idea is: The length field in SetLenOnDrop is a local variable
// that the optimizer will see does not alias with any stores through the Vec's data
// pointer. This is a workaround for alias analysis issue #32155
pub(super) struct SetLenOnDrop<'a> {
len: &'a mut usize,
local_len: usize,
}
impl<'a> SetLenOnDrop<'a> {
#[inline]
pub(super) fn new(len: &'a mut usize) -> Self {
SetLenOnDrop { local_len: *len, len }
}
#[inline]
pub(super) fn increment_len(&mut self, increment: usize) {
self.local_len += increment;
}
}
impl Drop for SetLenOnDrop<'_> {
#[inline]
fn drop(&mut self) {
*self.len = self.local_len;
}
}

View File

@ -0,0 +1,108 @@
use core::iter::{InPlaceIterable, SourceIter};
use core::mem::{self, ManuallyDrop};
use core::ptr::{self};
use super::{AsIntoIter, InPlaceDrop, SpecFromIter, SpecFromIterNested, Vec};
/// Specialization marker for collecting an iterator pipeline into a Vec while reusing the
/// source allocation, i.e. executing the pipeline in place.
///
/// The SourceIter parent trait is necessary for the specializing function to access the allocation
/// which is to be reused. But it is not sufficient for the specialization to be valid. See
/// additional bounds on the impl.
#[rustc_unsafe_specialization_marker]
pub(super) trait SourceIterMarker: SourceIter<Source: AsIntoIter> {}
// The std-internal SourceIter/InPlaceIterable traits are only implemented by chains of
// Adapter<Adapter<Adapter<IntoIter>>> (all owned by core/std). Additional bounds
// on the adapter implementations (beyond `impl<I: Trait> Trait for Adapter<I>`) only depend on other
// traits already marked as specialization traits (Copy, TrustedRandomAccess, FusedIterator).
// I.e. the marker does not depend on lifetimes of user-supplied types. Modulo the Copy hole, which
// several other specializations already depend on.
impl<T> SourceIterMarker for T where T: SourceIter<Source: AsIntoIter> + InPlaceIterable {}
impl<T, I> SpecFromIter<T, I> for Vec<T>
where
I: Iterator<Item = T> + SourceIterMarker,
{
default fn from_iter(mut iterator: I) -> Self {
// Additional requirements which cannot expressed via trait bounds. We rely on const eval
// instead:
// a) no ZSTs as there would be no allocation to reuse and pointer arithmetic would panic
// b) size match as required by Alloc contract
// c) alignments match as required by Alloc contract
if mem::size_of::<T>() == 0
|| mem::size_of::<T>()
!= mem::size_of::<<<I as SourceIter>::Source as AsIntoIter>::Item>()
|| mem::align_of::<T>()
!= mem::align_of::<<<I as SourceIter>::Source as AsIntoIter>::Item>()
{
// fallback to more generic implementations
return SpecFromIterNested::from_iter(iterator);
}
let (src_buf, src_ptr, dst_buf, dst_end, cap) = unsafe {
let inner = iterator.as_inner().as_into_iter();
(
inner.buf.as_ptr(),
inner.ptr,
inner.buf.as_ptr() as *mut T,
inner.end as *const T,
inner.cap,
)
};
// use try-fold since
// - it vectorizes better for some iterator adapters
// - unlike most internal iteration methods, it only takes a &mut self
// - it lets us thread the write pointer through its innards and get it back in the end
let sink = InPlaceDrop { inner: dst_buf, dst: dst_buf };
let sink = iterator
.try_fold::<_, _, Result<_, !>>(sink, write_in_place_with_drop(dst_end))
.unwrap();
// iteration succeeded, don't drop head
let dst = ManuallyDrop::new(sink).dst;
let src = unsafe { iterator.as_inner().as_into_iter() };
// check if SourceIter contract was upheld
// caveat: if they weren't we may not even make it to this point
debug_assert_eq!(src_buf, src.buf.as_ptr());
// check InPlaceIterable contract. This is only possible if the iterator advanced the
// source pointer at all. If it uses unchecked access via TrustedRandomAccess
// then the source pointer will stay in its initial position and we can't use it as reference
if src.ptr != src_ptr {
debug_assert!(
dst as *const _ <= src.ptr,
"InPlaceIterable contract violation, write pointer advanced beyond read pointer"
);
}
// drop any remaining values at the tail of the source
src.drop_remaining();
// but prevent drop of the allocation itself once IntoIter goes out of scope
src.forget_allocation();
let vec = unsafe {
let len = dst.offset_from(dst_buf) as usize;
Vec::from_raw_parts(dst_buf, len, cap)
};
vec
}
}
fn write_in_place_with_drop<T>(
src_end: *const T,
) -> impl FnMut(InPlaceDrop<T>, T) -> Result<InPlaceDrop<T>, !> {
move |mut sink, item| {
unsafe {
// the InPlaceIterable contract cannot be verified precisely here since
// try_fold has an exclusive reference to the source pointer
// all we can do is check if it's still in range
debug_assert!(sink.dst as *const _ <= src_end, "InPlaceIterable contract violation");
ptr::write(sink.dst, item);
sink.dst = sink.dst.add(1);
}
Ok(sink)
}
}

View File

@ -0,0 +1,82 @@
use crate::alloc::Allocator;
use core::iter::TrustedLen;
use core::ptr::{self};
use core::slice::{self};
use super::{IntoIter, SetLenOnDrop, Vec};
// Specialization trait used for Vec::extend
pub(super) trait SpecExtend<T, I> {
fn spec_extend(&mut self, iter: I);
}
impl<T, I, A: Allocator> SpecExtend<T, I> for Vec<T, A>
where
I: Iterator<Item = T>,
{
default fn spec_extend(&mut self, iter: I) {
self.extend_desugared(iter)
}
}
impl<T, I, A: Allocator> SpecExtend<T, I> for Vec<T, A>
where
I: TrustedLen<Item = T>,
{
default fn spec_extend(&mut self, iterator: I) {
// This is the case for a TrustedLen iterator.
let (low, high) = iterator.size_hint();
if let Some(high_value) = high {
debug_assert_eq!(
low,
high_value,
"TrustedLen iterator's size hint is not exact: {:?}",
(low, high)
);
}
if let Some(additional) = high {
self.reserve(additional);
unsafe {
let mut ptr = self.as_mut_ptr().add(self.len());
let mut local_len = SetLenOnDrop::new(&mut self.len);
iterator.for_each(move |element| {
ptr::write(ptr, element);
ptr = ptr.offset(1);
// NB can't overflow since we would have had to alloc the address space
local_len.increment_len(1);
});
}
} else {
self.extend_desugared(iterator)
}
}
}
impl<T, A: Allocator> SpecExtend<T, IntoIter<T>> for Vec<T, A> {
fn spec_extend(&mut self, mut iterator: IntoIter<T>) {
unsafe {
self.append_elements(iterator.as_slice() as _);
}
iterator.ptr = iterator.end;
}
}
impl<'a, T: 'a, I, A: Allocator + 'a> SpecExtend<&'a T, I> for Vec<T, A>
where
I: Iterator<Item = &'a T>,
T: Clone,
{
default fn spec_extend(&mut self, iterator: I) {
self.spec_extend(iterator.cloned())
}
}
impl<'a, T: 'a, A: Allocator + 'a> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec<T, A>
where
T: Copy,
{
fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) {
let slice = iterator.as_slice();
unsafe { self.append_elements(slice) };
}
}

View File

@ -0,0 +1,60 @@
use crate::alloc::Allocator;
use crate::raw_vec::RawVec;
use core::ptr::{self};
use super::{ExtendElement, IsZero, Vec};
// Specialization trait used for Vec::from_elem
pub(super) trait SpecFromElem: Sized {
fn from_elem<A: Allocator>(elem: Self, n: usize, alloc: A) -> Vec<Self, A>;
}
impl<T: Clone> SpecFromElem for T {
default fn from_elem<A: Allocator>(elem: Self, n: usize, alloc: A) -> Vec<Self, A> {
let mut v = Vec::with_capacity_in(n, alloc);
v.extend_with(n, ExtendElement(elem));
v
}
}
impl SpecFromElem for i8 {
#[inline]
fn from_elem<A: Allocator>(elem: i8, n: usize, alloc: A) -> Vec<i8, A> {
if elem == 0 {
return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n };
}
unsafe {
let mut v = Vec::with_capacity_in(n, alloc);
ptr::write_bytes(v.as_mut_ptr(), elem as u8, n);
v.set_len(n);
v
}
}
}
impl SpecFromElem for u8 {
#[inline]
fn from_elem<A: Allocator>(elem: u8, n: usize, alloc: A) -> Vec<u8, A> {
if elem == 0 {
return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n };
}
unsafe {
let mut v = Vec::with_capacity_in(n, alloc);
ptr::write_bytes(v.as_mut_ptr(), elem, n);
v.set_len(n);
v
}
}
}
impl<T: Clone + IsZero> SpecFromElem for T {
#[inline]
fn from_elem<A: Allocator>(elem: T, n: usize, alloc: A) -> Vec<T, A> {
if elem.is_zero() {
return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n };
}
let mut v = Vec::with_capacity_in(n, alloc);
v.extend_with(n, ExtendElement(elem));
v
}
}

View File

@ -0,0 +1,97 @@
use core::mem::ManuallyDrop;
use core::ptr::{self};
use core::slice::{self};
use super::{IntoIter, SpecExtend, SpecFromIterNested, Vec};
/// Specialization trait used for Vec::from_iter
///
/// ## The delegation graph:
///
/// ```text
/// +-------------+
/// |FromIterator |
/// +-+-----------+
/// |
/// v
/// +-+-------------------------------+ +---------------------+
/// |SpecFromIter +---->+SpecFromIterNested |
/// |where I: | | |where I: |
/// | Iterator (default)----------+ | | Iterator (default) |
/// | vec::IntoIter | | | TrustedLen |
/// | SourceIterMarker---fallback-+ | | |
/// | slice::Iter | | |
/// | Iterator<Item = &Clone> | +---------------------+
/// +---------------------------------+
/// ```
pub(super) trait SpecFromIter<T, I> {
fn from_iter(iter: I) -> Self;
}
impl<T, I> SpecFromIter<T, I> for Vec<T>
where
I: Iterator<Item = T>,
{
default fn from_iter(iterator: I) -> Self {
SpecFromIterNested::from_iter(iterator)
}
}
impl<T> SpecFromIter<T, IntoIter<T>> for Vec<T> {
fn from_iter(iterator: IntoIter<T>) -> Self {
// A common case is passing a vector into a function which immediately
// re-collects into a vector. We can short circuit this if the IntoIter
// has not been advanced at all.
// When it has been advanced We can also reuse the memory and move the data to the front.
// But we only do so when the resulting Vec wouldn't have more unused capacity
// than creating it through the generic FromIterator implementation would. That limitation
// is not strictly necessary as Vec's allocation behavior is intentionally unspecified.
// But it is a conservative choice.
let has_advanced = iterator.buf.as_ptr() as *const _ != iterator.ptr;
if !has_advanced || iterator.len() >= iterator.cap / 2 {
unsafe {
let it = ManuallyDrop::new(iterator);
if has_advanced {
ptr::copy(it.ptr, it.buf.as_ptr(), it.len());
}
return Vec::from_raw_parts(it.buf.as_ptr(), it.len(), it.cap);
}
}
let mut vec = Vec::new();
// must delegate to spec_extend() since extend() itself delegates
// to spec_from for empty Vecs
vec.spec_extend(iterator);
vec
}
}
impl<'a, T: 'a, I> SpecFromIter<&'a T, I> for Vec<T>
where
I: Iterator<Item = &'a T>,
T: Clone,
{
default fn from_iter(iterator: I) -> Self {
SpecFromIter::from_iter(iterator.cloned())
}
}
// This utilizes `iterator.as_slice().to_vec()` since spec_extend
// must take more steps to reason about the final capacity + length
// and thus do more work. `to_vec()` directly allocates the correct amount
// and fills it exactly.
impl<'a, T: 'a + Clone> SpecFromIter<&'a T, slice::Iter<'a, T>> for Vec<T> {
#[cfg(not(test))]
fn from_iter(iterator: slice::Iter<'a, T>) -> Self {
iterator.as_slice().to_vec()
}
// HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is
// required for this method definition, is not available. Instead use the
// `slice::to_vec` function which is only available with cfg(test)
// NB see the slice::hack module in slice.rs for more information
#[cfg(test)]
fn from_iter(iterator: slice::Iter<'a, T>) -> Self {
crate::slice::to_vec(iterator.as_slice(), crate::alloc::Global)
}
}

View File

@ -0,0 +1,56 @@
use core::iter::TrustedLen;
use core::ptr::{self};
use super::{SpecExtend, Vec};
/// Another specialization trait for Vec::from_iter
/// necessary to manually prioritize overlapping specializations
/// see [`SpecFromIter`] for details.
pub(super) trait SpecFromIterNested<T, I> {
fn from_iter(iter: I) -> Self;
}
impl<T, I> SpecFromIterNested<T, I> for Vec<T>
where
I: Iterator<Item = T>,
{
default fn from_iter(mut iterator: I) -> Self {
// Unroll the first iteration, as the vector is going to be
// expanded on this iteration in every case when the iterable is not
// empty, but the loop in extend_desugared() is not going to see the
// vector being full in the few subsequent loop iterations.
// So we get better branch prediction.
let mut vector = match iterator.next() {
None => return Vec::new(),
Some(element) => {
let (lower, _) = iterator.size_hint();
let mut vector = Vec::with_capacity(lower.saturating_add(1));
unsafe {
ptr::write(vector.as_mut_ptr(), element);
vector.set_len(1);
}
vector
}
};
// must delegate to spec_extend() since extend() itself delegates
// to spec_from for empty Vecs
<Vec<T> as SpecExtend<T, I>>::spec_extend(&mut vector, iterator);
vector
}
}
impl<T, I> SpecFromIterNested<T, I> for Vec<T>
where
I: TrustedLen<Item = T>,
{
fn from_iter(iterator: I) -> Self {
let mut vector = match iterator.size_hint() {
(_, Some(upper)) => Vec::with_capacity(upper),
_ => Vec::new(),
};
// must delegate to spec_extend() since extend() itself delegates
// to spec_from for empty Vecs
vector.spec_extend(iterator);
vector
}
}

View File

@ -0,0 +1,133 @@
use crate::alloc::{Allocator, Global};
use core::ptr::{self};
use core::slice::{self};
use super::{Drain, Vec};
/// A splicing iterator for `Vec`.
///
/// This struct is created by [`Vec::splice()`].
/// See its documentation for more.
///
/// # Example
///
/// ```
/// let mut v = vec![0, 1, 2];
/// let new = [7, 8];
/// let iter: std::vec::Splice<_> = v.splice(1.., new.iter().cloned());
/// ```
#[derive(Debug)]
#[stable(feature = "vec_splice", since = "1.21.0")]
pub struct Splice<
'a,
I: Iterator + 'a,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + 'a = Global,
> {
pub(super) drain: Drain<'a, I::Item, A>,
pub(super) replace_with: I,
}
#[stable(feature = "vec_splice", since = "1.21.0")]
impl<I: Iterator, A: Allocator> Iterator for Splice<'_, I, A> {
type Item = I::Item;
fn next(&mut self) -> Option<Self::Item> {
self.drain.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.drain.size_hint()
}
}
#[stable(feature = "vec_splice", since = "1.21.0")]
impl<I: Iterator, A: Allocator> DoubleEndedIterator for Splice<'_, I, A> {
fn next_back(&mut self) -> Option<Self::Item> {
self.drain.next_back()
}
}
#[stable(feature = "vec_splice", since = "1.21.0")]
impl<I: Iterator, A: Allocator> ExactSizeIterator for Splice<'_, I, A> {}
#[stable(feature = "vec_splice", since = "1.21.0")]
impl<I: Iterator, A: Allocator> Drop for Splice<'_, I, A> {
fn drop(&mut self) {
self.drain.by_ref().for_each(drop);
unsafe {
if self.drain.tail_len == 0 {
self.drain.vec.as_mut().extend(self.replace_with.by_ref());
return;
}
// First fill the range left by drain().
if !self.drain.fill(&mut self.replace_with) {
return;
}
// There may be more elements. Use the lower bound as an estimate.
// FIXME: Is the upper bound a better guess? Or something else?
let (lower_bound, _upper_bound) = self.replace_with.size_hint();
if lower_bound > 0 {
self.drain.move_tail(lower_bound);
if !self.drain.fill(&mut self.replace_with) {
return;
}
}
// Collect any remaining elements.
// This is a zero-length vector which does not allocate if `lower_bound` was exact.
let mut collected = self.replace_with.by_ref().collect::<Vec<I::Item>>().into_iter();
// Now we have an exact count.
if collected.len() > 0 {
self.drain.move_tail(collected.len());
let filled = self.drain.fill(&mut collected);
debug_assert!(filled);
debug_assert_eq!(collected.len(), 0);
}
}
// Let `Drain::drop` move the tail back if necessary and restore `vec.len`.
}
}
/// Private helper methods for `Splice::drop`
impl<T, A: Allocator> Drain<'_, T, A> {
/// The range from `self.vec.len` to `self.tail_start` contains elements
/// that have been moved out.
/// Fill that range as much as possible with new elements from the `replace_with` iterator.
/// Returns `true` if we filled the entire range. (`replace_with.next()` didnt return `None`.)
unsafe fn fill<I: Iterator<Item = T>>(&mut self, replace_with: &mut I) -> bool {
let vec = unsafe { self.vec.as_mut() };
let range_start = vec.len;
let range_end = self.tail_start;
let range_slice = unsafe {
slice::from_raw_parts_mut(vec.as_mut_ptr().add(range_start), range_end - range_start)
};
for place in range_slice {
if let Some(new_item) = replace_with.next() {
unsafe { ptr::write(place, new_item) };
vec.len += 1;
} else {
return false;
}
}
true
}
/// Makes room for inserting more elements before the tail.
unsafe fn move_tail(&mut self, additional: usize) {
let vec = unsafe { self.vec.as_mut() };
let len = self.tail_start + self.tail_len;
vec.buf.reserve(len, additional);
let new_tail_start = self.tail_start + additional;
unsafe {
let src = vec.as_ptr().add(self.tail_start);
let dst = vec.as_mut_ptr().add(new_tail_start);
ptr::copy(src, dst, self.tail_len);
}
self.tail_start = new_tail_start;
}
}

View File

@ -4,7 +4,7 @@ error[E0277]: the size for values of type `[u8]` cannot be known at compilation
LL | type Ty = Vec<[u8]>;
| ^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time
|
::: $SRC_DIR/alloc/src/vec.rs:LL:COL
::: $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
|
LL | pub struct Vec<T, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global> {
| - required by this bound in `Vec`

View File

@ -15,7 +15,7 @@ error[E0277]: the size for values of type `dyn Trait` cannot be known at compila
LL | let x: Vec<dyn Trait + Sized> = Vec::new();
| ^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time
|
::: $SRC_DIR/alloc/src/vec.rs:LL:COL
::: $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
|
LL | pub struct Vec<T, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global> {
| - required by this bound in `Vec`

View File

@ -4,7 +4,7 @@ error[E0277]: the size for values of type `[i32]` cannot be known at compilation
LL | fn iceman(c: Vec<[i32]>) {}
| ^^^^^^^^^^ doesn't have a size known at compile-time
|
::: $SRC_DIR/alloc/src/vec.rs:LL:COL
::: $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
|
LL | pub struct Vec<T, #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global> {
| - required by this bound in `Vec`

View File

@ -107,7 +107,7 @@ LL | VEC.push(0);
= note: each usage of a `const` item creates a new temporary
= note: the mutable reference will refer to this temporary, not the original `const` item
note: mutable reference created due to call to this method
--> $SRC_DIR/alloc/src/vec.rs:LL:COL
--> $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
|
LL | / pub fn push(&mut self, value: T) {
LL | | // This will panic or abort if we would allocate > isize::MAX bytes