Rollup merge of #120888 - saethlin:unsafe-precondition-cleanup, r=RalfJung

assert_unsafe_precondition cleanup

I moved the polymorphic `is_nonoverlapping` into the `Cell` function that uses it and renamed `intrinsics::is_nonoverlapping_mono` to just `intrinsics::is_nonoverlapping`.

We now also have some docs for `intrinsics::debug_assertions`.

r? RalfJung
This commit is contained in:
Matthias Krüger 2024-02-11 23:19:10 +01:00 committed by GitHub
commit d71154f80f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 34 additions and 38 deletions

View File

@ -237,9 +237,9 @@
use crate::cmp::Ordering;
use crate::fmt::{self, Debug, Display};
use crate::intrinsics::is_nonoverlapping;
use crate::intrinsics;
use crate::marker::{PhantomData, Unsize};
use crate::mem;
use crate::mem::{self, size_of};
use crate::ops::{CoerceUnsized, Deref, DerefMut, DispatchFromDyn};
use crate::ptr::{self, NonNull};
@ -435,11 +435,15 @@ impl<T> Cell<T> {
#[inline]
#[stable(feature = "move_cell", since = "1.17.0")]
pub fn swap(&self, other: &Self) {
fn is_nonoverlapping<T>(src: *const T, dst: *const T) -> bool {
intrinsics::is_nonoverlapping(src.cast(), dst.cast(), size_of::<T>(), 1)
}
if ptr::eq(self, other) {
// Swapping wouldn't change anything.
return;
}
if !is_nonoverlapping(self, other, 1) {
if !is_nonoverlapping(self, other) {
// See <https://github.com/rust-lang/rust/issues/80778> for why we need to stop here.
panic!("`Cell::swap` on overlapping non-identical `Cell`s");
}

View File

@ -56,7 +56,7 @@
use crate::marker::DiscriminantKind;
use crate::marker::Tuple;
use crate::mem::{self, align_of};
use crate::mem::align_of;
pub mod mir;
pub mod simd;
@ -1027,7 +1027,7 @@ extern "rust-intrinsic" {
/// The size of the referenced value in bytes.
///
/// The stabilized version of this intrinsic is [`mem::size_of_val`].
/// The stabilized version of this intrinsic is [`crate::mem::size_of_val`].
#[rustc_const_unstable(feature = "const_size_of_val", issue = "46571")]
#[rustc_nounwind]
pub fn size_of_val<T: ?Sized>(_: *const T) -> usize;
@ -1107,7 +1107,7 @@ extern "rust-intrinsic" {
/// Moves a value out of scope without running drop glue.
///
/// This exists solely for [`mem::forget_unsized`]; normal `forget` uses
/// This exists solely for [`crate::mem::forget_unsized`]; normal `forget` uses
/// `ManuallyDrop` instead.
///
/// Note that, unlike most intrinsics, this is safe to call;
@ -1233,7 +1233,7 @@ extern "rust-intrinsic" {
/// Depending on what the code is doing, the following alternatives are preferable to
/// pointer-to-integer transmutation:
/// - If the code just wants to store data of arbitrary type in some buffer and needs to pick a
/// type for that buffer, it can use [`MaybeUninit`][mem::MaybeUninit].
/// type for that buffer, it can use [`MaybeUninit`][crate::mem::MaybeUninit].
/// - If the code actually wants to work on the address the pointer points to, it can use `as`
/// casts or [`ptr.addr()`][pointer::addr].
///
@ -2317,7 +2317,7 @@ extern "rust-intrinsic" {
/// Therefore, implementations must not require the user to uphold
/// any safety invariants.
///
/// The to-be-stabilized version of this intrinsic is [`mem::variant_count`].
/// The to-be-stabilized version of this intrinsic is [`crate::mem::variant_count`].
#[rustc_const_unstable(feature = "variant_count", issue = "73662")]
#[rustc_safe_intrinsic]
#[rustc_nounwind]
@ -2569,6 +2569,19 @@ extern "rust-intrinsic" {
#[rustc_nounwind]
pub fn is_val_statically_known<T: Copy>(arg: T) -> bool;
/// Returns the value of `cfg!(debug_assertions)`, but after monomorphization instead of in
/// macro expansion.
///
/// This always returns `false` in const eval and Miri. The interpreter provides better
/// diagnostics than the checks that this is used to implement. However, this means
/// you should only be using this intrinsic to guard requirements that, if violated,
/// immediately lead to UB. Otherwise, const-eval and Miri will miss out on those
/// checks entirely.
///
/// Since this is evaluated after monomorphization, branching on this value can be used to
/// implement debug assertions that are included in the precompiled standard library, but can
/// be optimized out by builds that monomorphize the standard library code with debug
/// assertions disabled. This intrinsic is primarily used by [`assert_unsafe_precondition`].
#[rustc_const_unstable(feature = "delayed_debug_assertions", issue = "none")]
#[rustc_safe_intrinsic]
#[cfg(not(bootstrap))]
@ -2597,7 +2610,7 @@ pub(crate) const fn debug_assertions() -> bool {
/// These checks are behind a condition which is evaluated at codegen time, not expansion time like
/// [`debug_assert`]. This means that a standard library built with optimizations and debug
/// assertions disabled will have these checks optimized out of its monomorphizations, but if a
/// a caller of the standard library has debug assertions enabled and monomorphizes an expansion of
/// caller of the standard library has debug assertions enabled and monomorphizes an expansion of
/// this macro, that monomorphization will contain the check.
///
/// Since these checks cannot be optimized out in MIR, some care must be taken in both call and
@ -2606,8 +2619,8 @@ pub(crate) const fn debug_assertions() -> bool {
/// combination of properties ensures that the code for the checks is only compiled once, and has a
/// minimal impact on the caller's code size.
///
/// Caller should also introducing any other `let` bindings or any code outside this macro in order
/// to call it. Since the precompiled standard library is built with full debuginfo and these
/// Callers should also avoid introducing any other `let` bindings or any code outside this macro in
/// order to call it. Since the precompiled standard library is built with full debuginfo and these
/// variables cannot be optimized out in MIR, an innocent-looking `let` can produce enough
/// debuginfo to have a measurable compile-time impact on debug builds.
///
@ -2659,12 +2672,9 @@ pub(crate) fn is_valid_allocation_size(size: usize, len: usize) -> bool {
len <= max_len
}
pub(crate) fn is_nonoverlapping_mono(
src: *const (),
dst: *const (),
size: usize,
count: usize,
) -> bool {
/// Checks whether the regions of memory starting at `src` and `dst` of size
/// `count * size` do *not* overlap.
pub(crate) fn is_nonoverlapping(src: *const (), dst: *const (), size: usize, count: usize) -> bool {
let src_usize = src.addr();
let dst_usize = dst.addr();
let Some(size) = size.checked_mul(count) else {
@ -2678,24 +2688,6 @@ pub(crate) fn is_nonoverlapping_mono(
diff >= size
}
/// Checks whether the regions of memory starting at `src` and `dst` of size
/// `count * size_of::<T>()` do *not* overlap.
#[inline]
pub(crate) fn is_nonoverlapping<T>(src: *const T, dst: *const T, count: usize) -> bool {
let src_usize = src.addr();
let dst_usize = dst.addr();
let Some(size) = mem::size_of::<T>().checked_mul(count) else {
// Use panic_nounwind instead of Option::expect, so that this function is nounwind.
crate::panicking::panic_nounwind(
"is_nonoverlapping: `size_of::<T>() * count` overflows a usize",
)
};
let diff = src_usize.abs_diff(dst_usize);
// If the absolute distance between the ptrs is at least as big as the size of the buffer,
// they do not overlap.
diff >= size
}
/// Copies `count * size_of::<T>()` bytes from `src` to `dst`. The source
/// and destination must *not* overlap.
///
@ -2809,7 +2801,7 @@ pub const unsafe fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: us
) =>
is_aligned_and_not_null(src, align)
&& is_aligned_and_not_null(dst, align)
&& is_nonoverlapping_mono(src, dst, size, count)
&& is_nonoverlapping(src, dst, size, count)
);
copy_nonoverlapping(src, dst, count)
}

View File

@ -383,7 +383,7 @@ use crate::cmp::Ordering;
use crate::fmt;
use crate::hash;
use crate::intrinsics::{
self, assert_unsafe_precondition, is_aligned_and_not_null, is_nonoverlapping_mono,
self, assert_unsafe_precondition, is_aligned_and_not_null, is_nonoverlapping,
};
use crate::marker::FnPtr;
@ -978,7 +978,7 @@ pub const unsafe fn swap_nonoverlapping<T>(x: *mut T, y: *mut T, count: usize) {
) =>
is_aligned_and_not_null(x, align)
&& is_aligned_and_not_null(y, align)
&& is_nonoverlapping_mono(x, y, size, count)
&& is_nonoverlapping(x, y, size, count)
);
}