Auto merge of #139430 - scottmcm:polymorphic-array-into-iter, r=cuviper

Polymorphize `array::IntoIter`'s iterator impl

Today we emit all the iterator methods for every different array width.  That's wasteful since the actual array length never even comes into it -- the indices used are from the separate `alive: IndexRange` field, not even the `N` const param.

This PR switches things so that an `array::IntoIter<T, N>` stores a `PolymorphicIter<[MaybeUninit<T>; N]>`, which we *unsize* to `PolymorphicIter<[MaybeUninit<T>]>` and call methods on that non-`Sized` type for all the iterator methods.

That also necessarily makes the layout consistent between the different lengths of arrays, because of the unsizing.  Compare that to today <https://rust.godbolt.org/z/Prb4xMPrb>, where different widths can't even be deduped because the offset to the indices is different for different array widths.
This commit is contained in:
bors 2025-04-11 23:21:31 +00:00
commit 1bc56185ee
4 changed files with 455 additions and 147 deletions

View File

@ -1,38 +1,35 @@
//! Defines the `IntoIter` owned iterator for arrays.
use crate::intrinsics::transmute_unchecked;
use crate::iter::{self, FusedIterator, TrustedLen, TrustedRandomAccessNoCoerce};
use crate::iter::{FusedIterator, TrustedLen, TrustedRandomAccessNoCoerce};
use crate::mem::MaybeUninit;
use crate::num::NonZero;
use crate::ops::{IndexRange, Range};
use crate::ops::{IndexRange, Range, Try};
use crate::{fmt, ptr};
mod iter_inner;
type InnerSized<T, const N: usize> = iter_inner::PolymorphicIter<[MaybeUninit<T>; N]>;
type InnerUnsized<T> = iter_inner::PolymorphicIter<[MaybeUninit<T>]>;
/// A by-value [array] iterator.
#[stable(feature = "array_value_iter", since = "1.51.0")]
#[rustc_insignificant_dtor]
#[rustc_diagnostic_item = "ArrayIntoIter"]
#[derive(Clone)]
pub struct IntoIter<T, const N: usize> {
/// This is the array we are iterating over.
///
/// Elements with index `i` where `alive.start <= i < alive.end` have not
/// been yielded yet and are valid array entries. Elements with indices `i
/// < alive.start` or `i >= alive.end` have been yielded already and must
/// not be accessed anymore! Those dead elements might even be in a
/// completely uninitialized state!
///
/// So the invariants are:
/// - `data[alive]` is alive (i.e. contains valid elements)
/// - `data[..alive.start]` and `data[alive.end..]` are dead (i.e. the
/// elements were already read and must not be touched anymore!)
data: [MaybeUninit<T>; N],
inner: InnerSized<T, N>,
}
/// The elements in `data` that have not been yielded yet.
///
/// Invariants:
/// - `alive.end <= N`
///
/// (And the `IndexRange` type requires `alive.start <= alive.end`.)
alive: IndexRange,
impl<T, const N: usize> IntoIter<T, N> {
#[inline]
fn unsize(&self) -> &InnerUnsized<T> {
&self.inner
}
#[inline]
fn unsize_mut(&mut self) -> &mut InnerUnsized<T> {
&mut self.inner
}
}
// Note: the `#[rustc_skip_during_method_dispatch(array)]` on `trait IntoIterator`
@ -53,6 +50,7 @@ impl<T, const N: usize> IntoIterator for [T; N] {
/// 2021 edition -- see the [array] Editions section for more information.
///
/// [array]: prim@array
#[inline]
fn into_iter(self) -> Self::IntoIter {
// SAFETY: The transmute here is actually safe. The docs of `MaybeUninit`
// promise:
@ -68,7 +66,10 @@ impl<T, const N: usize> IntoIterator for [T; N] {
// FIXME: If normal `transmute` ever gets smart enough to allow this
// directly, use it instead of `transmute_unchecked`.
let data: [MaybeUninit<T>; N] = unsafe { transmute_unchecked(self) };
IntoIter { data, alive: IndexRange::zero_to(N) }
// SAFETY: The original array was entirely initialized and the the alive
// range we're passing here represents that fact.
let inner = unsafe { InnerSized::new_unchecked(IndexRange::zero_to(N), data) };
IntoIter { inner }
}
}
@ -136,13 +137,16 @@ impl<T, const N: usize> IntoIter<T, N> {
/// assert_eq!(r.collect::<Vec<_>>(), vec![10, 11, 12, 13, 14, 15]);
/// ```
#[unstable(feature = "array_into_iter_constructors", issue = "91583")]
#[inline]
pub const unsafe fn new_unchecked(
buffer: [MaybeUninit<T>; N],
initialized: Range<usize>,
) -> Self {
// SAFETY: one of our safety conditions is that the range is canonical.
let alive = unsafe { IndexRange::new_unchecked(initialized.start, initialized.end) };
Self { data: buffer, alive }
// SAFETY: one of our safety condition is that these items are initialized.
let inner = unsafe { InnerSized::new_unchecked(alive, buffer) };
IntoIter { inner }
}
/// Creates an iterator over `T` which returns no elements.
@ -198,172 +202,134 @@ impl<T, const N: usize> IntoIter<T, N> {
/// assert_eq!(get_bytes(false).collect::<Vec<_>>(), vec![]);
/// ```
#[unstable(feature = "array_into_iter_constructors", issue = "91583")]
#[inline]
pub const fn empty() -> Self {
let buffer = [const { MaybeUninit::uninit() }; N];
let initialized = 0..0;
// SAFETY: We're telling it that none of the elements are initialized,
// which is trivially true. And ∀N: usize, 0 <= N.
unsafe { Self::new_unchecked(buffer, initialized) }
let inner = InnerSized::empty();
IntoIter { inner }
}
/// Returns an immutable slice of all elements that have not been yielded
/// yet.
#[stable(feature = "array_value_iter", since = "1.51.0")]
#[inline]
pub fn as_slice(&self) -> &[T] {
// SAFETY: We know that all elements within `alive` are properly initialized.
unsafe {
let slice = self.data.get_unchecked(self.alive.clone());
slice.assume_init_ref()
}
self.unsize().as_slice()
}
/// Returns a mutable slice of all elements that have not been yielded yet.
#[stable(feature = "array_value_iter", since = "1.51.0")]
#[inline]
pub fn as_mut_slice(&mut self) -> &mut [T] {
// SAFETY: We know that all elements within `alive` are properly initialized.
unsafe {
let slice = self.data.get_unchecked_mut(self.alive.clone());
slice.assume_init_mut()
}
self.unsize_mut().as_mut_slice()
}
}
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
impl<T, const N: usize> Iterator for IntoIter<T, N> {
type Item = T;
fn next(&mut self) -> Option<Self::Item> {
// Get the next index from the front.
//
// Increasing `alive.start` by 1 maintains the invariant regarding
// `alive`. However, due to this change, for a short time, the alive
// zone is not `data[alive]` anymore, but `data[idx..alive.end]`.
self.alive.next().map(|idx| {
// Read the element from the array.
// SAFETY: `idx` is an index into the former "alive" region of the
// array. Reading this element means that `data[idx]` is regarded as
// dead now (i.e. do not touch). As `idx` was the start of the
// alive-zone, the alive zone is now `data[alive]` again, restoring
// all invariants.
unsafe { self.data.get_unchecked(idx).assume_init_read() }
})
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.len();
(len, Some(len))
#[inline]
fn next(&mut self) -> Option<Self::Item> {
self.unsize_mut().next()
}
#[inline]
fn fold<Acc, Fold>(mut self, init: Acc, mut fold: Fold) -> Acc
fn size_hint(&self) -> (usize, Option<usize>) {
self.unsize().size_hint()
}
#[inline]
fn fold<Acc, Fold>(mut self, init: Acc, fold: Fold) -> Acc
where
Fold: FnMut(Acc, Self::Item) -> Acc,
{
let data = &mut self.data;
iter::ByRefSized(&mut self.alive).fold(init, |acc, idx| {
// SAFETY: idx is obtained by folding over the `alive` range, which implies the
// value is currently considered alive but as the range is being consumed each value
// we read here will only be read once and then considered dead.
fold(acc, unsafe { data.get_unchecked(idx).assume_init_read() })
})
self.unsize_mut().fold(init, fold)
}
#[inline]
fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
where
Self: Sized,
F: FnMut(B, Self::Item) -> R,
R: Try<Output = B>,
{
self.unsize_mut().try_fold(init, f)
}
#[inline]
fn count(self) -> usize {
self.len()
}
#[inline]
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
#[inline]
fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
// This also moves the start, which marks them as conceptually "dropped",
// so if anything goes bad then our drop impl won't double-free them.
let range_to_drop = self.alive.take_prefix(n);
let remaining = n - range_to_drop.len();
// SAFETY: These elements are currently initialized, so it's fine to drop them.
unsafe {
let slice = self.data.get_unchecked_mut(range_to_drop);
slice.assume_init_drop();
}
NonZero::new(remaining).map_or(Ok(()), Err)
self.unsize_mut().advance_by(n)
}
#[inline]
unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item {
// SAFETY: The caller must provide an idx that is in bound of the remainder.
unsafe { self.data.as_ptr().add(self.alive.start()).add(idx).cast::<T>().read() }
let elem_ref = unsafe { self.as_mut_slice().get_unchecked_mut(idx) };
// SAFETY: We only implement `TrustedRandomAccessNoCoerce` for types
// which are actually `Copy`, so cannot have multiple-drop issues.
unsafe { ptr::read(elem_ref) }
}
}
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
impl<T, const N: usize> DoubleEndedIterator for IntoIter<T, N> {
#[inline]
fn next_back(&mut self) -> Option<Self::Item> {
// Get the next index from the back.
//
// Decreasing `alive.end` by 1 maintains the invariant regarding
// `alive`. However, due to this change, for a short time, the alive
// zone is not `data[alive]` anymore, but `data[alive.start..=idx]`.
self.alive.next_back().map(|idx| {
// Read the element from the array.
// SAFETY: `idx` is an index into the former "alive" region of the
// array. Reading this element means that `data[idx]` is regarded as
// dead now (i.e. do not touch). As `idx` was the end of the
// alive-zone, the alive zone is now `data[alive]` again, restoring
// all invariants.
unsafe { self.data.get_unchecked(idx).assume_init_read() }
})
self.unsize_mut().next_back()
}
#[inline]
fn rfold<Acc, Fold>(mut self, init: Acc, mut rfold: Fold) -> Acc
fn rfold<Acc, Fold>(mut self, init: Acc, rfold: Fold) -> Acc
where
Fold: FnMut(Acc, Self::Item) -> Acc,
{
let data = &mut self.data;
iter::ByRefSized(&mut self.alive).rfold(init, |acc, idx| {
// SAFETY: idx is obtained by folding over the `alive` range, which implies the
// value is currently considered alive but as the range is being consumed each value
// we read here will only be read once and then considered dead.
rfold(acc, unsafe { data.get_unchecked(idx).assume_init_read() })
})
self.unsize_mut().rfold(init, rfold)
}
#[inline]
fn try_rfold<B, F, R>(&mut self, init: B, f: F) -> R
where
Self: Sized,
F: FnMut(B, Self::Item) -> R,
R: Try<Output = B>,
{
self.unsize_mut().try_rfold(init, f)
}
#[inline]
fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
// This also moves the end, which marks them as conceptually "dropped",
// so if anything goes bad then our drop impl won't double-free them.
let range_to_drop = self.alive.take_suffix(n);
let remaining = n - range_to_drop.len();
// SAFETY: These elements are currently initialized, so it's fine to drop them.
unsafe {
let slice = self.data.get_unchecked_mut(range_to_drop);
slice.assume_init_drop();
}
NonZero::new(remaining).map_or(Ok(()), Err)
self.unsize_mut().advance_back_by(n)
}
}
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
impl<T, const N: usize> Drop for IntoIter<T, N> {
#[inline]
fn drop(&mut self) {
// SAFETY: This is safe: `as_mut_slice` returns exactly the sub-slice
// of elements that have not been moved out yet and that remain
// to be dropped.
unsafe { ptr::drop_in_place(self.as_mut_slice()) }
// `inner` now handles this, but it'd technically be a breaking change
// to remove this `impl`, even though it's useless.
}
}
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
impl<T, const N: usize> ExactSizeIterator for IntoIter<T, N> {
#[inline]
fn len(&self) -> usize {
self.alive.len()
self.inner.len()
}
#[inline]
fn is_empty(&self) -> bool {
self.alive.is_empty()
self.inner.len() == 0
}
}
@ -396,32 +362,9 @@ where
const MAY_HAVE_SIDE_EFFECT: bool = false;
}
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
impl<T: Clone, const N: usize> Clone for IntoIter<T, N> {
fn clone(&self) -> Self {
// Note, we don't really need to match the exact same alive range, so
// we can just clone into offset 0 regardless of where `self` is.
let mut new =
Self { data: [const { MaybeUninit::uninit() }; N], alive: IndexRange::zero_to(0) };
// Clone all alive elements.
for (src, dst) in iter::zip(self.as_slice(), &mut new.data) {
// Write a clone into the new array, then update its alive range.
// If cloning panics, we'll correctly drop the previous items.
dst.write(src.clone());
// This addition cannot overflow as we're iterating a slice
new.alive = IndexRange::zero_to(new.alive.end() + 1);
}
new
}
}
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
impl<T: fmt::Debug, const N: usize> fmt::Debug for IntoIter<T, N> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// Only print the elements that were not yielded yet: we cannot
// access the yielded elements anymore.
f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
self.unsize().fmt(f)
}
}

View File

@ -0,0 +1,281 @@
//! Defines the `IntoIter` owned iterator for arrays.
use crate::mem::MaybeUninit;
use crate::num::NonZero;
use crate::ops::{IndexRange, NeverShortCircuit, Try};
use crate::{fmt, iter};
#[allow(private_bounds)]
trait PartialDrop {
/// # Safety
/// `self[alive]` are all initialized before the call,
/// then are never used (without reinitializing them) after it.
unsafe fn partial_drop(&mut self, alive: IndexRange);
}
impl<T> PartialDrop for [MaybeUninit<T>] {
unsafe fn partial_drop(&mut self, alive: IndexRange) {
// SAFETY: We know that all elements within `alive` are properly initialized.
unsafe { self.get_unchecked_mut(alive).assume_init_drop() }
}
}
impl<T, const N: usize> PartialDrop for [MaybeUninit<T>; N] {
unsafe fn partial_drop(&mut self, alive: IndexRange) {
let slice: &mut [MaybeUninit<T>] = self;
// SAFETY: Initialized elements in the array are also initialized in the slice.
unsafe { slice.partial_drop(alive) }
}
}
/// The internals of a by-value array iterator.
///
/// The real `array::IntoIter<T, N>` stores a `PolymorphicIter<[MaybeUninit<T>, N]>`
/// which it unsizes to `PolymorphicIter<[MaybeUninit<T>]>` to iterate.
#[allow(private_bounds)]
pub(super) struct PolymorphicIter<DATA: ?Sized>
where
DATA: PartialDrop,
{
/// The elements in `data` that have not been yielded yet.
///
/// Invariants:
/// - `alive.end <= N`
///
/// (And the `IndexRange` type requires `alive.start <= alive.end`.)
alive: IndexRange,
/// This is the array we are iterating over.
///
/// Elements with index `i` where `alive.start <= i < alive.end` have not
/// been yielded yet and are valid array entries. Elements with indices `i
/// < alive.start` or `i >= alive.end` have been yielded already and must
/// not be accessed anymore! Those dead elements might even be in a
/// completely uninitialized state!
///
/// So the invariants are:
/// - `data[alive]` is alive (i.e. contains valid elements)
/// - `data[..alive.start]` and `data[alive.end..]` are dead (i.e. the
/// elements were already read and must not be touched anymore!)
data: DATA,
}
#[allow(private_bounds)]
impl<DATA: ?Sized> PolymorphicIter<DATA>
where
DATA: PartialDrop,
{
#[inline]
pub(super) const fn len(&self) -> usize {
self.alive.len()
}
}
#[allow(private_bounds)]
impl<DATA: ?Sized> Drop for PolymorphicIter<DATA>
where
DATA: PartialDrop,
{
#[inline]
fn drop(&mut self) {
// SAFETY: by our type invariant `self.alive` is exactly the initialized
// items, and this is drop so nothing can use the items afterwards.
unsafe { self.data.partial_drop(self.alive.clone()) }
}
}
impl<T, const N: usize> PolymorphicIter<[MaybeUninit<T>; N]> {
#[inline]
pub(super) const fn empty() -> Self {
Self { alive: IndexRange::zero_to(0), data: [const { MaybeUninit::uninit() }; N] }
}
/// # Safety
/// `data[alive]` are all initialized.
#[inline]
pub(super) const unsafe fn new_unchecked(alive: IndexRange, data: [MaybeUninit<T>; N]) -> Self {
Self { alive, data }
}
}
impl<T: Clone, const N: usize> Clone for PolymorphicIter<[MaybeUninit<T>; N]> {
#[inline]
fn clone(&self) -> Self {
// Note, we don't really need to match the exact same alive range, so
// we can just clone into offset 0 regardless of where `self` is.
let mut new = Self::empty();
fn clone_into_new<U: Clone>(
source: &PolymorphicIter<[MaybeUninit<U>]>,
target: &mut PolymorphicIter<[MaybeUninit<U>]>,
) {
// Clone all alive elements.
for (src, dst) in iter::zip(source.as_slice(), &mut target.data) {
// Write a clone into the new array, then update its alive range.
// If cloning panics, we'll correctly drop the previous items.
dst.write(src.clone());
// This addition cannot overflow as we're iterating a slice,
// the length of which always fits in usize.
target.alive = IndexRange::zero_to(target.alive.end() + 1);
}
}
clone_into_new(self, &mut new);
new
}
}
impl<T> PolymorphicIter<[MaybeUninit<T>]> {
#[inline]
pub(super) fn as_slice(&self) -> &[T] {
// SAFETY: We know that all elements within `alive` are properly initialized.
unsafe {
let slice = self.data.get_unchecked(self.alive.clone());
slice.assume_init_ref()
}
}
#[inline]
pub(super) fn as_mut_slice(&mut self) -> &mut [T] {
// SAFETY: We know that all elements within `alive` are properly initialized.
unsafe {
let slice = self.data.get_unchecked_mut(self.alive.clone());
slice.assume_init_mut()
}
}
}
impl<T: fmt::Debug> fmt::Debug for PolymorphicIter<[MaybeUninit<T>]> {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// Only print the elements that were not yielded yet: we cannot
// access the yielded elements anymore.
f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
}
}
/// Iterator-equivalent methods.
///
/// We don't implement the actual iterator traits because we want to implement
/// things like `try_fold` that require `Self: Sized` (which we're not).
impl<T> PolymorphicIter<[MaybeUninit<T>]> {
#[inline]
pub(super) fn next(&mut self) -> Option<T> {
// Get the next index from the front.
//
// Increasing `alive.start` by 1 maintains the invariant regarding
// `alive`. However, due to this change, for a short time, the alive
// zone is not `data[alive]` anymore, but `data[idx..alive.end]`.
self.alive.next().map(|idx| {
// Read the element from the array.
// SAFETY: `idx` is an index into the former "alive" region of the
// array. Reading this element means that `data[idx]` is regarded as
// dead now (i.e. do not touch). As `idx` was the start of the
// alive-zone, the alive zone is now `data[alive]` again, restoring
// all invariants.
unsafe { self.data.get_unchecked(idx).assume_init_read() }
})
}
#[inline]
pub(super) fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.len();
(len, Some(len))
}
#[inline]
pub(super) fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
// This also moves the start, which marks them as conceptually "dropped",
// so if anything goes bad then our drop impl won't double-free them.
let range_to_drop = self.alive.take_prefix(n);
let remaining = n - range_to_drop.len();
// SAFETY: These elements are currently initialized, so it's fine to drop them.
unsafe {
let slice = self.data.get_unchecked_mut(range_to_drop);
slice.assume_init_drop();
}
NonZero::new(remaining).map_or(Ok(()), Err)
}
#[inline]
pub(super) fn fold<B>(&mut self, init: B, f: impl FnMut(B, T) -> B) -> B {
self.try_fold(init, NeverShortCircuit::wrap_mut_2(f)).0
}
#[inline]
pub(super) fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R
where
F: FnMut(B, T) -> R,
R: Try<Output = B>,
{
// `alive` is an `IndexRange`, not an arbitrary iterator, so we can
// trust that its `try_fold` isn't going to do something weird like
// call the fold-er multiple times for the same index.
let data = &mut self.data;
self.alive.try_fold(init, move |accum, idx| {
// SAFETY: `idx` has been removed from the alive range, so we're not
// going to drop it (even if `f` panics) and thus its ok to give
// out ownership of that item to `f` to handle.
let elem = unsafe { data.get_unchecked(idx).assume_init_read() };
f(accum, elem)
})
}
#[inline]
pub(super) fn next_back(&mut self) -> Option<T> {
// Get the next index from the back.
//
// Decreasing `alive.end` by 1 maintains the invariant regarding
// `alive`. However, due to this change, for a short time, the alive
// zone is not `data[alive]` anymore, but `data[alive.start..=idx]`.
self.alive.next_back().map(|idx| {
// Read the element from the array.
// SAFETY: `idx` is an index into the former "alive" region of the
// array. Reading this element means that `data[idx]` is regarded as
// dead now (i.e. do not touch). As `idx` was the end of the
// alive-zone, the alive zone is now `data[alive]` again, restoring
// all invariants.
unsafe { self.data.get_unchecked(idx).assume_init_read() }
})
}
#[inline]
pub(super) fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
// This also moves the end, which marks them as conceptually "dropped",
// so if anything goes bad then our drop impl won't double-free them.
let range_to_drop = self.alive.take_suffix(n);
let remaining = n - range_to_drop.len();
// SAFETY: These elements are currently initialized, so it's fine to drop them.
unsafe {
let slice = self.data.get_unchecked_mut(range_to_drop);
slice.assume_init_drop();
}
NonZero::new(remaining).map_or(Ok(()), Err)
}
#[inline]
pub(super) fn rfold<B>(&mut self, init: B, f: impl FnMut(B, T) -> B) -> B {
self.try_rfold(init, NeverShortCircuit::wrap_mut_2(f)).0
}
#[inline]
pub(super) fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R
where
F: FnMut(B, T) -> R,
R: Try<Output = B>,
{
// `alive` is an `IndexRange`, not an arbitrary iterator, so we can
// trust that its `try_rfold` isn't going to do something weird like
// call the fold-er multiple times for the same index.
let data = &mut self.data;
self.alive.try_rfold(init, move |accum, idx| {
// SAFETY: `idx` has been removed from the alive range, so we're not
// going to drop it (even if `f` panics) and thus its ok to give
// out ownership of that item to `f` to handle.
let elem = unsafe { data.get_unchecked(idx).assume_init_read() };
f(accum, elem)
})
}
}

View File

@ -1,5 +1,6 @@
use crate::iter::{FusedIterator, TrustedLen};
use crate::num::NonZero;
use crate::ops::{NeverShortCircuit, Try};
use crate::ub_checks;
/// Like a `Range<usize>`, but with a safety invariant that `start <= end`.
@ -112,6 +113,12 @@ impl IndexRange {
self.end = mid;
suffix
}
#[inline]
fn assume_range(&self) {
// SAFETY: This is the type invariant
unsafe { crate::hint::assert_unchecked(self.start <= self.end) }
}
}
impl Iterator for IndexRange {
@ -138,6 +145,30 @@ impl Iterator for IndexRange {
let taken = self.take_prefix(n);
NonZero::new(n - taken.len()).map_or(Ok(()), Err)
}
#[inline]
fn fold<B, F: FnMut(B, usize) -> B>(mut self, init: B, f: F) -> B {
self.try_fold(init, NeverShortCircuit::wrap_mut_2(f)).0
}
#[inline]
fn try_fold<B, F, R>(&mut self, mut accum: B, mut f: F) -> R
where
Self: Sized,
F: FnMut(B, Self::Item) -> R,
R: Try<Output = B>,
{
// `Range` needs to check `start < end`, but thanks to our type invariant
// we can loop on the stricter `start != end`.
self.assume_range();
while self.start != self.end {
// SAFETY: We just checked that the range is non-empty
let i = unsafe { self.next_unchecked() };
accum = f(accum, i)?;
}
try { accum }
}
}
impl DoubleEndedIterator for IndexRange {
@ -156,6 +187,30 @@ impl DoubleEndedIterator for IndexRange {
let taken = self.take_suffix(n);
NonZero::new(n - taken.len()).map_or(Ok(()), Err)
}
#[inline]
fn rfold<B, F: FnMut(B, usize) -> B>(mut self, init: B, f: F) -> B {
self.try_rfold(init, NeverShortCircuit::wrap_mut_2(f)).0
}
#[inline]
fn try_rfold<B, F, R>(&mut self, mut accum: B, mut f: F) -> R
where
Self: Sized,
F: FnMut(B, Self::Item) -> R,
R: Try<Output = B>,
{
// `Range` needs to check `start < end`, but thanks to our type invariant
// we can loop on the stricter `start != end`.
self.assume_range();
while self.start != self.end {
// SAFETY: We just checked that the range is non-empty
let i = unsafe { self.next_back_unchecked() };
accum = f(accum, i)?;
}
try { accum }
}
}
impl ExactSizeIterator for IndexRange {

View File

@ -1,8 +1,16 @@
//@ compile-flags: -Copt-level=3
//@ revisions: host x86-64-v3
//@ revisions: host x86-64 x86-64-v3
//@ min-llvm-version: 20
// This particular CPU regressed in #131563
//@[host] ignore-x86_64
// Set the base cpu explicitly, in case the default has been changed.
//@[x86-64] only-x86_64
//@[x86-64] compile-flags: -Ctarget-cpu=x86-64
// FIXME(cuviper) x86-64-v3 in particular regressed in #131563, and the workaround
// at the time still sometimes fails, so only verify it for the power-of-two size
// - https://github.com/llvm/llvm-project/issues/134735
//@[x86-64-v3] only-x86_64
//@[x86-64-v3] compile-flags: -Ctarget-cpu=x86-64-v3
@ -11,7 +19,16 @@
#[no_mangle]
pub fn test() -> usize {
// CHECK-LABEL: @test(
// CHECK: ret {{i64|i32}} 165
// host: ret {{i64|i32}} 165
// x86-64: ret {{i64|i32}} 165
// FIXME: Now that this autovectorizes via a masked load, it doesn't actually
// const-fold for certain widths. The `test_eight` case below shows that, yes,
// what we're emitting *can* be const-folded, except that the way LLVM does it
// for certain widths doesn't today. We should be able to put this back to
// the same check after <https://github.com/llvm/llvm-project/issues/134513>
// x86-64-v3: masked.load
let values = [23, 16, 54, 3, 60, 9];
let mut acc = 0;
for item in values {
@ -19,3 +36,15 @@ pub fn test() -> usize {
}
acc
}
#[no_mangle]
pub fn test_eight() -> usize {
// CHECK-LABEL: @test_eight(
// CHECK: ret {{i64|i32}} 220
let values = [23, 16, 54, 3, 60, 9, 13, 42];
let mut acc = 0;
for item in values {
acc += item;
}
acc
}