mirror of
https://github.com/rust-lang/rust.git
synced 2025-02-13 15:33:53 +00:00
Derive src pointers in sort drop guards from &T
The src pointers in CopyOnDrop and InsertionHole used to be *mut T, and were derived via automatic conversion from &mut T. According to Stacked Borrows 2.1, this means that those pointers become invalidated by interior mutation in the comparison function. But there's no need for mutability in this code path. Thus, we can change the drop guards to use *const and derive those from &T.
This commit is contained in:
parent
daf2204aa4
commit
a5a91c8e07
@ -892,7 +892,7 @@ where
|
|||||||
// performance than with the 2nd method.
|
// performance than with the 2nd method.
|
||||||
//
|
//
|
||||||
// All methods were benchmarked, and the 3rd showed best results. So we chose that one.
|
// All methods were benchmarked, and the 3rd showed best results. So we chose that one.
|
||||||
let mut tmp = mem::ManuallyDrop::new(ptr::read(&v[0]));
|
let tmp = mem::ManuallyDrop::new(ptr::read(&v[0]));
|
||||||
|
|
||||||
// Intermediate state of the insertion process is always tracked by `hole`, which
|
// Intermediate state of the insertion process is always tracked by `hole`, which
|
||||||
// serves two purposes:
|
// serves two purposes:
|
||||||
@ -904,7 +904,7 @@ where
|
|||||||
// If `is_less` panics at any point during the process, `hole` will get dropped and
|
// If `is_less` panics at any point during the process, `hole` will get dropped and
|
||||||
// fill the hole in `v` with `tmp`, thus ensuring that `v` still holds every object it
|
// fill the hole in `v` with `tmp`, thus ensuring that `v` still holds every object it
|
||||||
// initially held exactly once.
|
// initially held exactly once.
|
||||||
let mut hole = InsertionHole { src: &mut *tmp, dest: &mut v[1] };
|
let mut hole = InsertionHole { src: &*tmp, dest: &mut v[1] };
|
||||||
ptr::copy_nonoverlapping(&v[1], &mut v[0], 1);
|
ptr::copy_nonoverlapping(&v[1], &mut v[0], 1);
|
||||||
|
|
||||||
for i in 2..v.len() {
|
for i in 2..v.len() {
|
||||||
@ -920,7 +920,7 @@ where
|
|||||||
|
|
||||||
// When dropped, copies from `src` into `dest`.
|
// When dropped, copies from `src` into `dest`.
|
||||||
struct InsertionHole<T> {
|
struct InsertionHole<T> {
|
||||||
src: *mut T,
|
src: *const T,
|
||||||
dest: *mut T,
|
dest: *mut T,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -12,7 +12,7 @@ use crate::ptr;
|
|||||||
|
|
||||||
/// When dropped, copies from `src` into `dest`.
|
/// When dropped, copies from `src` into `dest`.
|
||||||
struct CopyOnDrop<T> {
|
struct CopyOnDrop<T> {
|
||||||
src: *mut T,
|
src: *const T,
|
||||||
dest: *mut T,
|
dest: *mut T,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -54,9 +54,9 @@ where
|
|||||||
// Read the first element into a stack-allocated variable. If a following comparison
|
// Read the first element into a stack-allocated variable. If a following comparison
|
||||||
// operation panics, `hole` will get dropped and automatically write the element back
|
// operation panics, `hole` will get dropped and automatically write the element back
|
||||||
// into the slice.
|
// into the slice.
|
||||||
let mut tmp = mem::ManuallyDrop::new(ptr::read(v.get_unchecked(0)));
|
let tmp = mem::ManuallyDrop::new(ptr::read(v.get_unchecked(0)));
|
||||||
let v = v.as_mut_ptr();
|
let v = v.as_mut_ptr();
|
||||||
let mut hole = CopyOnDrop { src: &mut *tmp, dest: v.add(1) };
|
let mut hole = CopyOnDrop { src: &*tmp, dest: v.add(1) };
|
||||||
ptr::copy_nonoverlapping(v.add(1), v.add(0), 1);
|
ptr::copy_nonoverlapping(v.add(1), v.add(0), 1);
|
||||||
|
|
||||||
for i in 2..len {
|
for i in 2..len {
|
||||||
@ -100,9 +100,9 @@ where
|
|||||||
// Read the last element into a stack-allocated variable. If a following comparison
|
// Read the last element into a stack-allocated variable. If a following comparison
|
||||||
// operation panics, `hole` will get dropped and automatically write the element back
|
// operation panics, `hole` will get dropped and automatically write the element back
|
||||||
// into the slice.
|
// into the slice.
|
||||||
let mut tmp = mem::ManuallyDrop::new(ptr::read(v.get_unchecked(len - 1)));
|
let tmp = mem::ManuallyDrop::new(ptr::read(v.get_unchecked(len - 1)));
|
||||||
let v = v.as_mut_ptr();
|
let v = v.as_mut_ptr();
|
||||||
let mut hole = CopyOnDrop { src: &mut *tmp, dest: v.add(len - 2) };
|
let mut hole = CopyOnDrop { src: &*tmp, dest: v.add(len - 2) };
|
||||||
ptr::copy_nonoverlapping(v.add(len - 2), v.add(len - 1), 1);
|
ptr::copy_nonoverlapping(v.add(len - 2), v.add(len - 1), 1);
|
||||||
|
|
||||||
for i in (0..len - 2).rev() {
|
for i in (0..len - 2).rev() {
|
||||||
@ -498,8 +498,8 @@ where
|
|||||||
// operation panics, the pivot will be automatically written back into the slice.
|
// operation panics, the pivot will be automatically written back into the slice.
|
||||||
|
|
||||||
// SAFETY: `pivot` is a reference to the first element of `v`, so `ptr::read` is safe.
|
// SAFETY: `pivot` is a reference to the first element of `v`, so `ptr::read` is safe.
|
||||||
let mut tmp = mem::ManuallyDrop::new(unsafe { ptr::read(pivot) });
|
let tmp = mem::ManuallyDrop::new(unsafe { ptr::read(pivot) });
|
||||||
let _pivot_guard = CopyOnDrop { src: &mut *tmp, dest: pivot };
|
let _pivot_guard = CopyOnDrop { src: &*tmp, dest: pivot };
|
||||||
let pivot = &*tmp;
|
let pivot = &*tmp;
|
||||||
|
|
||||||
// Find the first pair of out-of-order elements.
|
// Find the first pair of out-of-order elements.
|
||||||
@ -551,8 +551,8 @@ where
|
|||||||
// Read the pivot into a stack-allocated variable for efficiency. If a following comparison
|
// Read the pivot into a stack-allocated variable for efficiency. If a following comparison
|
||||||
// operation panics, the pivot will be automatically written back into the slice.
|
// operation panics, the pivot will be automatically written back into the slice.
|
||||||
// SAFETY: The pointer here is valid because it is obtained from a reference to a slice.
|
// SAFETY: The pointer here is valid because it is obtained from a reference to a slice.
|
||||||
let mut tmp = mem::ManuallyDrop::new(unsafe { ptr::read(pivot) });
|
let tmp = mem::ManuallyDrop::new(unsafe { ptr::read(pivot) });
|
||||||
let _pivot_guard = CopyOnDrop { src: &mut *tmp, dest: pivot };
|
let _pivot_guard = CopyOnDrop { src: &*tmp, dest: pivot };
|
||||||
let pivot = &*tmp;
|
let pivot = &*tmp;
|
||||||
|
|
||||||
// Now partition the slice.
|
// Now partition the slice.
|
||||||
|
Loading…
Reference in New Issue
Block a user