Rollup merge of #120952 - saethlin:vec-into-iter, r=the8472

Don't use mem::zeroed in vec::IntoIter

`mem::zeroed` is not a trivial function. Maybe it was once, but now it involves multiple locals, copies, and an intrinsic that gets monomorphized into a call to `panic_nounwind` for iterators of types like `Vec<&T>`. Of course all that complexity is trivially optimized out, but generating a bunch of IR where we don't need to just so we can optimize it away later is silly.
This commit is contained in:
Matthias Krüger 2024-02-17 18:47:40 +01:00 committed by GitHub
commit 59972868e6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 44 additions and 36 deletions

View File

@ -11,7 +11,7 @@ use core::iter::{
TrustedRandomAccessNoCoerce,
};
use core::marker::PhantomData;
use core::mem::{self, ManuallyDrop, MaybeUninit, SizedTypeProperties};
use core::mem::{ManuallyDrop, MaybeUninit, SizedTypeProperties};
use core::num::NonZero;
#[cfg(not(no_global_oom_handling))]
use core::ops::Deref;
@ -200,27 +200,23 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
#[inline]
fn next(&mut self) -> Option<T> {
if T::IS_ZST {
if self.ptr.as_ptr() == self.end as *mut _ {
None
} else {
// `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by
// reducing the `end`.
self.end = self.end.wrapping_byte_sub(1);
// Make up a value of this ZST.
Some(unsafe { mem::zeroed() })
let ptr = if T::IS_ZST {
if self.ptr.as_ptr() == self.end as *mut T {
return None;
}
// `ptr` has to stay where it is to remain aligned, so we reduce the length by 1 by
// reducing the `end`.
self.end = self.end.wrapping_byte_sub(1);
self.ptr
} else {
if self.ptr == non_null!(self.end, T) {
None
} else {
let old = self.ptr;
self.ptr = unsafe { old.add(1) };
Some(unsafe { ptr::read(old.as_ptr()) })
return None;
}
}
let old = self.ptr;
self.ptr = unsafe { old.add(1) };
old
};
Some(unsafe { ptr.read() })
}
#[inline]
@ -305,7 +301,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
// Also note the implementation of `Self: TrustedRandomAccess` requires
// that `T: Copy` so reading elements from the buffer doesn't invalidate
// them for `Drop`.
unsafe { if T::IS_ZST { mem::zeroed() } else { self.ptr.add(i).read() } }
unsafe { self.ptr.add(i).read() }
}
}
@ -314,23 +310,22 @@ impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
#[inline]
fn next_back(&mut self) -> Option<T> {
if T::IS_ZST {
if self.end as *mut _ == self.ptr.as_ptr() {
None
} else {
// See above for why 'ptr.offset' isn't used
self.end = self.end.wrapping_byte_sub(1);
// Make up a value of this ZST.
Some(unsafe { mem::zeroed() })
if self.ptr.as_ptr() == self.end as *mut _ {
return None;
}
// See above for why 'ptr.offset' isn't used
self.end = self.end.wrapping_byte_sub(1);
// Note that even though this is next_back() we're reading from `self.ptr`, not
// `self.end`. We track our length using the byte offset from `self.ptr` to `self.end`,
// so the end pointer may not be suitably aligned for T.
Some(unsafe { ptr::read(self.ptr.as_ptr()) })
} else {
if non_null!(self.end, T) == self.ptr {
None
} else {
let new_end = unsafe { non_null!(self.end, T).sub(1) };
*non_null!(mut self.end, T) = new_end;
Some(unsafe { ptr::read(new_end.as_ptr()) })
if self.ptr == non_null!(self.end, T) {
return None;
}
unsafe {
self.end = self.end.sub(1);
Some(ptr::read(self.end))
}
}
}

View File

@ -32,9 +32,9 @@ pub fn vec_iter_is_empty_nonnull(it: &vec::IntoIter<u8>) -> bool {
it.is_empty()
}
// CHECK-LABEL: @vec_iter_next
// CHECK-LABEL: @vec_iter_next_nonnull
#[no_mangle]
pub fn vec_iter_next(it: &mut vec::IntoIter<u8>) -> Option<u8> {
pub fn vec_iter_next_nonnull(it: &mut vec::IntoIter<u8>) -> Option<u8> {
// CHECK: load ptr
// CHECK-SAME: !nonnull
// CHECK-SAME: !noundef
@ -44,3 +44,16 @@ pub fn vec_iter_next(it: &mut vec::IntoIter<u8>) -> Option<u8> {
// CHECK: ret
it.next()
}
// CHECK-LABEL: @vec_iter_next_back_nonnull
#[no_mangle]
pub fn vec_iter_next_back_nonnull(it: &mut vec::IntoIter<u8>) -> Option<u8> {
// CHECK: load ptr
// CHECK-SAME: !nonnull
// CHECK-SAME: !noundef
// CHECK: load ptr
// CHECK-SAME: !nonnull
// CHECK-SAME: !noundef
// CHECK: ret
it.next_back()
}