Rollup merge of #100822 - WaffleLapkin:no_offset_question_mark, r=scottmcm

Replace most uses of `pointer::offset` with `add` and `sub`

As PR title says, it replaces `pointer::offset` in compiler and standard library with `pointer::add` and `pointer::sub`. This generally makes code cleaner, easier to grasp and removes (or, well, hides) integer casts.

This is generally trivially correct, `.offset(-constant)` is just `.sub(constant)`, `.offset(usized as isize)` is just `.add(usized)`, etc. However in some cases we need to be careful with signs of things.

r? ````@scottmcm````

_split off from #100746_
This commit is contained in:
Matthias Krüger 2022-08-21 16:54:07 +02:00 committed by GitHub
commit a45f69f27d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 57 additions and 57 deletions

View File

@ -219,7 +219,7 @@ impl<T> TypedArena<T> {
} else { } else {
let ptr = self.ptr.get(); let ptr = self.ptr.get();
// Advance the pointer. // Advance the pointer.
self.ptr.set(self.ptr.get().offset(1)); self.ptr.set(self.ptr.get().add(1));
// Write into uninitialized memory. // Write into uninitialized memory.
ptr::write(ptr, object); ptr::write(ptr, object);
&mut *ptr &mut *ptr

View File

@ -94,7 +94,7 @@ mod platform {
struct Header(*mut u8); struct Header(*mut u8);
const HEAP_ZERO_MEMORY: DWORD = 0x00000008; const HEAP_ZERO_MEMORY: DWORD = 0x00000008;
unsafe fn get_header<'a>(ptr: *mut u8) -> &'a mut Header { unsafe fn get_header<'a>(ptr: *mut u8) -> &'a mut Header {
&mut *(ptr as *mut Header).offset(-1) &mut *(ptr as *mut Header).sub(1)
} }
unsafe fn align_ptr(ptr: *mut u8, align: usize) -> *mut u8 { unsafe fn align_ptr(ptr: *mut u8, align: usize) -> *mut u8 {
let aligned = ptr.add(align - (ptr as usize & (align - 1))); let aligned = ptr.add(align - (ptr as usize & (align - 1)));

View File

@ -156,7 +156,7 @@ mod platform {
struct Header(*mut u8); struct Header(*mut u8);
const HEAP_ZERO_MEMORY: DWORD = 0x00000008; const HEAP_ZERO_MEMORY: DWORD = 0x00000008;
unsafe fn get_header<'a>(ptr: *mut u8) -> &'a mut Header { unsafe fn get_header<'a>(ptr: *mut u8) -> &'a mut Header {
&mut *(ptr as *mut Header).offset(-1) &mut *(ptr as *mut Header).sub(1)
} }
unsafe fn align_ptr(ptr: *mut u8, align: usize) -> *mut u8 { unsafe fn align_ptr(ptr: *mut u8, align: usize) -> *mut u8 {
let aligned = ptr.add(align - (ptr as usize & (align - 1))); let aligned = ptr.add(align - (ptr as usize & (align - 1)));

View File

@ -273,7 +273,7 @@ impl<D: Decoder, T: Decodable<D>> Decodable<D> for Vec<T> {
unsafe { unsafe {
let ptr: *mut T = vec.as_mut_ptr(); let ptr: *mut T = vec.as_mut_ptr();
for i in 0..len { for i in 0..len {
std::ptr::write(ptr.offset(i as isize), Decodable::decode(d)); std::ptr::write(ptr.add(i), Decodable::decode(d));
} }
vec.set_len(len); vec.set_len(len);
} }

View File

@ -15,7 +15,7 @@ fn allocate_zeroed() {
let end = i.add(layout.size()); let end = i.add(layout.size());
while i < end { while i < end {
assert_eq!(*i, 0); assert_eq!(*i, 0);
i = i.offset(1); i = i.add(1);
} }
Global.deallocate(ptr.as_non_null_ptr(), layout); Global.deallocate(ptr.as_non_null_ptr(), layout);
} }

View File

@ -2447,8 +2447,8 @@ impl<T, A: Allocator> VecDeque<T, A> {
let mut right_offset = 0; let mut right_offset = 0;
for i in left_edge..right_edge { for i in left_edge..right_edge {
right_offset = (i - left_edge) % (cap - right_edge); right_offset = (i - left_edge) % (cap - right_edge);
let src: isize = (right_edge + right_offset) as isize; let src = right_edge + right_offset;
ptr::swap(buf.add(i), buf.offset(src)); ptr::swap(buf.add(i), buf.add(src));
} }
let n_ops = right_edge - left_edge; let n_ops = right_edge - left_edge;
left_edge += n_ops; left_edge += n_ops;

View File

@ -1024,7 +1024,7 @@ where
// Consume the greater side. // Consume the greater side.
// If equal, prefer the right run to maintain stability. // If equal, prefer the right run to maintain stability.
unsafe { unsafe {
let to_copy = if is_less(&*right.offset(-1), &*left.offset(-1)) { let to_copy = if is_less(&*right.sub(1), &*left.sub(1)) {
decrement_and_get(left) decrement_and_get(left)
} else { } else {
decrement_and_get(right) decrement_and_get(right)
@ -1038,12 +1038,12 @@ where
unsafe fn get_and_increment<T>(ptr: &mut *mut T) -> *mut T { unsafe fn get_and_increment<T>(ptr: &mut *mut T) -> *mut T {
let old = *ptr; let old = *ptr;
*ptr = unsafe { ptr.offset(1) }; *ptr = unsafe { ptr.add(1) };
old old
} }
unsafe fn decrement_and_get<T>(ptr: &mut *mut T) -> *mut T { unsafe fn decrement_and_get<T>(ptr: &mut *mut T) -> *mut T {
*ptr = unsafe { ptr.offset(-1) }; *ptr = unsafe { ptr.sub(1) };
*ptr *ptr
} }

View File

@ -267,7 +267,7 @@ where
// one slot in the underlying storage will have been freed up and we can immediately // one slot in the underlying storage will have been freed up and we can immediately
// write back the result. // write back the result.
unsafe { unsafe {
let dst = dst_buf.offset(i as isize); let dst = dst_buf.add(i);
debug_assert!(dst as *const _ <= end, "InPlaceIterable contract violation"); debug_assert!(dst as *const _ <= end, "InPlaceIterable contract violation");
ptr::write(dst, self.__iterator_get_unchecked(i)); ptr::write(dst, self.__iterator_get_unchecked(i));
// Since this executes user code which can panic we have to bump the pointer // Since this executes user code which can panic we have to bump the pointer

View File

@ -160,7 +160,7 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
Some(unsafe { mem::zeroed() }) Some(unsafe { mem::zeroed() })
} else { } else {
let old = self.ptr; let old = self.ptr;
self.ptr = unsafe { self.ptr.offset(1) }; self.ptr = unsafe { self.ptr.add(1) };
Some(unsafe { ptr::read(old) }) Some(unsafe { ptr::read(old) })
} }
@ -272,7 +272,7 @@ impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
// Make up a value of this ZST. // Make up a value of this ZST.
Some(unsafe { mem::zeroed() }) Some(unsafe { mem::zeroed() })
} else { } else {
self.end = unsafe { self.end.offset(-1) }; self.end = unsafe { self.end.sub(1) };
Some(unsafe { ptr::read(self.end) }) Some(unsafe { ptr::read(self.end) })
} }
@ -288,7 +288,7 @@ impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
} }
} else { } else {
// SAFETY: same as for advance_by() // SAFETY: same as for advance_by()
self.end = unsafe { self.end.offset(step_size.wrapping_neg() as isize) }; self.end = unsafe { self.end.sub(step_size) };
} }
let to_drop = ptr::slice_from_raw_parts_mut(self.end as *mut T, step_size); let to_drop = ptr::slice_from_raw_parts_mut(self.end as *mut T, step_size);
// SAFETY: same as for advance_by() // SAFETY: same as for advance_by()

View File

@ -1393,7 +1393,7 @@ impl<T, A: Allocator> Vec<T, A> {
if index < len { if index < len {
// Shift everything over to make space. (Duplicating the // Shift everything over to make space. (Duplicating the
// `index`th element into two consecutive places.) // `index`th element into two consecutive places.)
ptr::copy(p, p.offset(1), len - index); ptr::copy(p, p.add(1), len - index);
} else if index == len { } else if index == len {
// No elements need shifting. // No elements need shifting.
} else { } else {
@ -1455,7 +1455,7 @@ impl<T, A: Allocator> Vec<T, A> {
ret = ptr::read(ptr); ret = ptr::read(ptr);
// Shift everything down to fill in that spot. // Shift everything down to fill in that spot.
ptr::copy(ptr.offset(1), ptr, len - index - 1); ptr::copy(ptr.add(1), ptr, len - index - 1);
} }
self.set_len(len - 1); self.set_len(len - 1);
ret ret
@ -2408,7 +2408,7 @@ impl<T, A: Allocator> Vec<T, A> {
// Write all elements except the last one // Write all elements except the last one
for _ in 1..n { for _ in 1..n {
ptr::write(ptr, value.next()); ptr::write(ptr, value.next());
ptr = ptr.offset(1); ptr = ptr.add(1);
// Increment the length in every step in case next() panics // Increment the length in every step in case next() panics
local_len.increment_len(1); local_len.increment_len(1);
} }

View File

@ -39,7 +39,7 @@ where
let mut local_len = SetLenOnDrop::new(&mut self.len); let mut local_len = SetLenOnDrop::new(&mut self.len);
iterator.for_each(move |element| { iterator.for_each(move |element| {
ptr::write(ptr, element); ptr::write(ptr, element);
ptr = ptr.offset(1); ptr = ptr.add(1);
// Since the loop executes user code which can panic we have to bump the pointer // Since the loop executes user code which can panic we have to bump the pointer
// after each step. // after each step.
// NB can't overflow since we would have had to alloc the address space // NB can't overflow since we would have had to alloc the address space

View File

@ -1010,11 +1010,11 @@ fn test_as_bytes_fail() {
fn test_as_ptr() { fn test_as_ptr() {
let buf = "hello".as_ptr(); let buf = "hello".as_ptr();
unsafe { unsafe {
assert_eq!(*buf.offset(0), b'h'); assert_eq!(*buf.add(0), b'h');
assert_eq!(*buf.offset(1), b'e'); assert_eq!(*buf.add(1), b'e');
assert_eq!(*buf.offset(2), b'l'); assert_eq!(*buf.add(2), b'l');
assert_eq!(*buf.offset(3), b'l'); assert_eq!(*buf.add(3), b'l');
assert_eq!(*buf.offset(4), b'o'); assert_eq!(*buf.add(4), b'o');
} }
} }

View File

@ -2924,7 +2924,7 @@ impl<T> [T] {
let prev_ptr_write = ptr.add(next_write - 1); let prev_ptr_write = ptr.add(next_write - 1);
if !same_bucket(&mut *ptr_read, &mut *prev_ptr_write) { if !same_bucket(&mut *ptr_read, &mut *prev_ptr_write) {
if next_read != next_write { if next_read != next_write {
let ptr_write = prev_ptr_write.offset(1); let ptr_write = prev_ptr_write.add(1);
mem::swap(&mut *ptr_read, &mut *ptr_write); mem::swap(&mut *ptr_read, &mut *ptr_write);
} }
next_write += 1; next_write += 1;

View File

@ -326,8 +326,8 @@ where
unsafe { unsafe {
// Branchless comparison. // Branchless comparison.
*end_l = i as u8; *end_l = i as u8;
end_l = end_l.offset(!is_less(&*elem, pivot) as isize); end_l = end_l.add(!is_less(&*elem, pivot) as usize);
elem = elem.offset(1); elem = elem.add(1);
} }
} }
} }
@ -352,9 +352,9 @@ where
// Plus, `block_r` was asserted to be less than `BLOCK` and `elem` will therefore at most be pointing to the beginning of the slice. // Plus, `block_r` was asserted to be less than `BLOCK` and `elem` will therefore at most be pointing to the beginning of the slice.
unsafe { unsafe {
// Branchless comparison. // Branchless comparison.
elem = elem.offset(-1); elem = elem.sub(1);
*end_r = i as u8; *end_r = i as u8;
end_r = end_r.offset(is_less(&*elem, pivot) as isize); end_r = end_r.add(is_less(&*elem, pivot) as usize);
} }
} }
} }
@ -365,12 +365,12 @@ where
if count > 0 { if count > 0 {
macro_rules! left { macro_rules! left {
() => { () => {
l.offset(*start_l as isize) l.add(*start_l as usize)
}; };
} }
macro_rules! right { macro_rules! right {
() => { () => {
r.offset(-(*start_r as isize) - 1) r.sub((*start_r as usize) + 1)
}; };
} }
@ -398,16 +398,16 @@ where
ptr::copy_nonoverlapping(right!(), left!(), 1); ptr::copy_nonoverlapping(right!(), left!(), 1);
for _ in 1..count { for _ in 1..count {
start_l = start_l.offset(1); start_l = start_l.add(1);
ptr::copy_nonoverlapping(left!(), right!(), 1); ptr::copy_nonoverlapping(left!(), right!(), 1);
start_r = start_r.offset(1); start_r = start_r.add(1);
ptr::copy_nonoverlapping(right!(), left!(), 1); ptr::copy_nonoverlapping(right!(), left!(), 1);
} }
ptr::copy_nonoverlapping(&tmp, right!(), 1); ptr::copy_nonoverlapping(&tmp, right!(), 1);
mem::forget(tmp); mem::forget(tmp);
start_l = start_l.offset(1); start_l = start_l.add(1);
start_r = start_r.offset(1); start_r = start_r.add(1);
} }
} }
@ -420,7 +420,7 @@ where
// safe. Otherwise, the debug assertions in the `is_done` case guarantee that // safe. Otherwise, the debug assertions in the `is_done` case guarantee that
// `width(l, r) == block_l + block_r`, namely, that the block sizes have been adjusted to account // `width(l, r) == block_l + block_r`, namely, that the block sizes have been adjusted to account
// for the smaller number of remaining elements. // for the smaller number of remaining elements.
l = unsafe { l.offset(block_l as isize) }; l = unsafe { l.add(block_l) };
} }
if start_r == end_r { if start_r == end_r {
@ -428,7 +428,7 @@ where
// SAFETY: Same argument as [block-width-guarantee]. Either this is a full block `2*BLOCK`-wide, // SAFETY: Same argument as [block-width-guarantee]. Either this is a full block `2*BLOCK`-wide,
// or `block_r` has been adjusted for the last handful of elements. // or `block_r` has been adjusted for the last handful of elements.
r = unsafe { r.offset(-(block_r as isize)) }; r = unsafe { r.sub(block_r) };
} }
if is_done { if is_done {
@ -457,9 +457,9 @@ where
// - `offsets_l` contains valid offsets into `v` collected during the partitioning of // - `offsets_l` contains valid offsets into `v` collected during the partitioning of
// the last block, so the `l.offset` calls are valid. // the last block, so the `l.offset` calls are valid.
unsafe { unsafe {
end_l = end_l.offset(-1); end_l = end_l.sub(1);
ptr::swap(l.offset(*end_l as isize), r.offset(-1)); ptr::swap(l.add(*end_l as usize), r.sub(1));
r = r.offset(-1); r = r.sub(1);
} }
} }
width(v.as_mut_ptr(), r) width(v.as_mut_ptr(), r)
@ -470,9 +470,9 @@ where
while start_r < end_r { while start_r < end_r {
// SAFETY: See the reasoning in [remaining-elements-safety]. // SAFETY: See the reasoning in [remaining-elements-safety].
unsafe { unsafe {
end_r = end_r.offset(-1); end_r = end_r.sub(1);
ptr::swap(l, r.offset(-(*end_r as isize) - 1)); ptr::swap(l, r.sub((*end_r as usize) + 1));
l = l.offset(1); l = l.add(1);
} }
} }
width(v.as_mut_ptr(), l) width(v.as_mut_ptr(), l)

View File

@ -216,12 +216,12 @@ pub(super) const fn run_utf8_validation(v: &[u8]) -> Result<(), Utf8Error> {
// SAFETY: since `align - index` and `ascii_block_size` are // SAFETY: since `align - index` and `ascii_block_size` are
// multiples of `usize_bytes`, `block = ptr.add(index)` is // multiples of `usize_bytes`, `block = ptr.add(index)` is
// always aligned with a `usize` so it's safe to dereference // always aligned with a `usize` so it's safe to dereference
// both `block` and `block.offset(1)`. // both `block` and `block.add(1)`.
unsafe { unsafe {
let block = ptr.add(index) as *const usize; let block = ptr.add(index) as *const usize;
// break if there is a nonascii byte // break if there is a nonascii byte
let zu = contains_nonascii(*block); let zu = contains_nonascii(*block);
let zv = contains_nonascii(*block.offset(1)); let zv = contains_nonascii(*block.add(1));
if zu || zv { if zu || zv {
break; break;
} }

View File

@ -42,7 +42,7 @@ pub(crate) unsafe fn android_set_abort_message(payload: *mut &mut dyn BoxMeUp) {
return; // allocation failure return; // allocation failure
} }
copy_nonoverlapping(msg.as_ptr(), buf as *mut u8, msg.len()); copy_nonoverlapping(msg.as_ptr(), buf as *mut u8, msg.len());
buf.offset(msg.len() as isize).write(0); buf.add(msg.len()).write(0);
let func = transmute::<usize, SetAbortMessageType>(func_addr); let func = transmute::<usize, SetAbortMessageType>(func_addr);
func(buf); func(buf);

View File

@ -75,7 +75,7 @@ pub unsafe fn find_eh_action(lsda: *const u8, context: &EHContext<'_>) -> Result
let call_site_encoding = reader.read::<u8>(); let call_site_encoding = reader.read::<u8>();
let call_site_table_length = reader.read_uleb128(); let call_site_table_length = reader.read_uleb128();
let action_table = reader.ptr.offset(call_site_table_length as isize); let action_table = reader.ptr.add(call_site_table_length as usize);
let ip = context.ip; let ip = context.ip;
if !USING_SJLJ_EXCEPTIONS { if !USING_SJLJ_EXCEPTIONS {

View File

@ -329,7 +329,7 @@ impl SocketAddr {
crate::ptr::copy_nonoverlapping( crate::ptr::copy_nonoverlapping(
namespace.as_ptr(), namespace.as_ptr(),
addr.sun_path.as_mut_ptr().offset(1) as *mut u8, addr.sun_path.as_mut_ptr().add(1) as *mut u8,
namespace.len(), namespace.len(),
); );
let len = (sun_path_offset(&addr) + 1 + namespace.len()) as libc::socklen_t; let len = (sun_path_offset(&addr) + 1 + namespace.len()) as libc::socklen_t;

View File

@ -17,12 +17,12 @@ fn test_copy_to_userspace_function() {
dst.copy_from_enclave(&[0u8; 100]); dst.copy_from_enclave(&[0u8; 100]);
// Copy src[0..size] to dst + offset // Copy src[0..size] to dst + offset
unsafe { copy_to_userspace(src.as_ptr(), dst.as_mut_ptr().offset(offset), size) }; unsafe { copy_to_userspace(src.as_ptr(), dst.as_mut_ptr().add(offset), size) };
// Verify copy // Verify copy
for byte in 0..size { for byte in 0..size {
unsafe { unsafe {
assert_eq!(*dst.as_ptr().offset(offset + byte as isize), src[byte as usize]); assert_eq!(*dst.as_ptr().add(offset + byte), src[byte as usize]);
} }
} }
} }

View File

@ -168,7 +168,7 @@ unsafe fn allocate(layout: Layout, zeroed: bool) -> *mut u8 {
// SAFETY: Because the size and alignment of a header is <= `MIN_ALIGN` and `aligned` // SAFETY: Because the size and alignment of a header is <= `MIN_ALIGN` and `aligned`
// is aligned to at least `MIN_ALIGN` and has at least `MIN_ALIGN` bytes of padding before // is aligned to at least `MIN_ALIGN` and has at least `MIN_ALIGN` bytes of padding before
// it, it is safe to write a header directly before it. // it, it is safe to write a header directly before it.
unsafe { ptr::write((aligned as *mut Header).offset(-1), Header(ptr)) }; unsafe { ptr::write((aligned as *mut Header).sub(1), Header(ptr)) };
// SAFETY: The returned pointer does not point to the to the start of an allocated block, // SAFETY: The returned pointer does not point to the to the start of an allocated block,
// but there is a header readable directly before it containing the location of the start // but there is a header readable directly before it containing the location of the start
@ -213,7 +213,7 @@ unsafe impl GlobalAlloc for System {
// SAFETY: Because of the contract of `System`, `ptr` is guaranteed to be non-null // SAFETY: Because of the contract of `System`, `ptr` is guaranteed to be non-null
// and have a header readable directly before it. // and have a header readable directly before it.
unsafe { ptr::read((ptr as *mut Header).offset(-1)).0 } unsafe { ptr::read((ptr as *mut Header).sub(1)).0 }
} }
}; };

View File

@ -512,7 +512,7 @@ impl File {
)); ));
} }
}; };
let subst_ptr = path_buffer.offset(subst_off as isize); let subst_ptr = path_buffer.add(subst_off.into());
let mut subst = slice::from_raw_parts(subst_ptr, subst_len as usize); let mut subst = slice::from_raw_parts(subst_ptr, subst_len as usize);
// Absolute paths start with an NT internal namespace prefix `\??\` // Absolute paths start with an NT internal namespace prefix `\??\`
// We should not let it leak through. // We should not let it leak through.
@ -1345,10 +1345,10 @@ fn symlink_junction_inner(original: &Path, junction: &Path) -> io::Result<()> {
let v = br"\??\"; let v = br"\??\";
let v = v.iter().map(|x| *x as u16); let v = v.iter().map(|x| *x as u16);
for c in v.chain(original.as_os_str().encode_wide()) { for c in v.chain(original.as_os_str().encode_wide()) {
*buf.offset(i) = c; *buf.add(i) = c;
i += 1; i += 1;
} }
*buf.offset(i) = 0; *buf.add(i) = 0;
i += 1; i += 1;
(*db).ReparseTag = c::IO_REPARSE_TAG_MOUNT_POINT; (*db).ReparseTag = c::IO_REPARSE_TAG_MOUNT_POINT;
(*db).ReparseTargetMaximumLength = (i * 2) as c::WORD; (*db).ReparseTargetMaximumLength = (i * 2) as c::WORD;

View File

@ -99,11 +99,11 @@ impl Iterator for Env {
} }
let p = self.cur as *const u16; let p = self.cur as *const u16;
let mut len = 0; let mut len = 0;
while *p.offset(len) != 0 { while *p.add(len) != 0 {
len += 1; len += 1;
} }
let s = slice::from_raw_parts(p, len as usize); let s = slice::from_raw_parts(p, len);
self.cur = self.cur.offset(len + 1); self.cur = self.cur.add(len + 1);
// Windows allows environment variables to start with an equals // Windows allows environment variables to start with an equals
// symbol (in any other position, this is the separator between // symbol (in any other position, this is the separator between