Add debug assertions to some unsafe functions

These debug assertions are all implemented only at runtime using
`const_eval_select`, and in the error path they execute
`intrinsics::abort` instead of being a normal debug assertion to
minimize the impact of these assertions on code size, when enabled.

Of all these changes, the bounds checks for unchecked indexing are
expected to be most impactful (case in point, they found a problem in
rustc).
This commit is contained in:
Ben Kimock 2022-01-08 23:55:09 -05:00
parent ba14a836c7
commit 6e6d0cbf83
8 changed files with 125 additions and 130 deletions

View File

@ -30,13 +30,13 @@ impl<T> MapInPlace<T> for Vec<T> {
while read_i < old_len {
// move the read_i'th item out of the vector and map it
// to an iterator
let e = ptr::read(self.get_unchecked(read_i));
let e = ptr::read(self.as_ptr().add(read_i));
let iter = f(e).into_iter();
read_i += 1;
for e in iter {
if write_i < read_i {
ptr::write(self.get_unchecked_mut(write_i), e);
ptr::write(self.as_mut_ptr().add(write_i), e);
write_i += 1;
} else {
// If this is reached we ran out of space
@ -76,13 +76,13 @@ impl<T, A: Array<Item = T>> MapInPlace<T> for SmallVec<A> {
while read_i < old_len {
// move the read_i'th item out of the vector and map it
// to an iterator
let e = ptr::read(self.get_unchecked(read_i));
let e = ptr::read(self.as_ptr().add(read_i));
let iter = f(e).into_iter();
read_i += 1;
for e in iter {
if write_i < read_i {
ptr::write(self.get_unchecked_mut(write_i), e);
ptr::write(self.as_mut_ptr().add(write_i), e);
write_i += 1;
} else {
// If this is reached we ran out of space

View File

@ -627,10 +627,10 @@ fn bench_map_regular(b: &mut Bencher) {
fn bench_map_fast(b: &mut Bencher) {
let data = black_box([(0, 0); LEN]);
b.iter(|| {
let mut result = Vec::with_capacity(data.len());
let mut result: Vec<u32> = Vec::with_capacity(data.len());
for i in 0..data.len() {
unsafe {
*result.get_unchecked_mut(i) = data[i].0;
*result.as_mut_ptr().add(i) = data[i].0;
result.set_len(i);
}
}

View File

@ -1969,6 +1969,40 @@ extern "rust-intrinsic" {
// (`transmute` also falls into this category, but it cannot be wrapped due to the
// check that `T` and `U` have the same size.)
/// Check that the preconditions of an unsafe function are followed, if debug_assertions are on,
/// and only at runtime.
///
/// # Safety
///
/// Invoking this macro is only sound if the following code is already UB when the passed
/// expression evaluates to false.
///
/// This macro expands to a check at runtime if debug_assertions is set. It has no effect at
/// compile time, but the semantics of the contained `const_eval_select` must be the same at
/// runtime and at compile time. Thus if the expression evaluates to false, this macro produces
/// different behavior at compile time and at runtime, and invoking it is incorrect.
///
/// So in a sense it is UB if this macro is useful, but we expect callers of `unsafe fn` to make
/// the occasional mistake, and this check should help them figure things out.
#[allow_internal_unstable(const_eval_select)] // permit this to be called in stably-const fn
macro_rules! assert_unsafe_precondition {
($e:expr) => {
if cfg!(debug_assertions) {
// Use a closure so that we can capture arbitrary expressions from the invocation
let runtime = || {
if !$e {
// abort instead of panicking to reduce impact on code size
::core::intrinsics::abort();
}
};
const fn comptime() {}
::core::intrinsics::const_eval_select((), comptime, runtime);
}
};
}
pub(crate) use assert_unsafe_precondition;
/// Checks whether `ptr` is properly aligned with respect to
/// `align_of::<T>()`.
pub(crate) fn is_aligned_and_not_null<T>(ptr: *const T) -> bool {
@ -1977,7 +2011,6 @@ pub(crate) fn is_aligned_and_not_null<T>(ptr: *const T) -> bool {
/// Checks whether the regions of memory starting at `src` and `dst` of size
/// `count * size_of::<T>()` do *not* overlap.
#[cfg(debug_assertions)]
pub(crate) fn is_nonoverlapping<T>(src: *const T, dst: *const T, count: usize) -> bool {
let src_usize = src as usize;
let dst_usize = dst as usize;
@ -2079,28 +2112,16 @@ pub const unsafe fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: us
pub fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize);
}
#[cfg(debug_assertions)]
fn runtime_check<T>(src: *const T, dst: *mut T, count: usize) {
if !is_aligned_and_not_null(src)
|| !is_aligned_and_not_null(dst)
|| !is_nonoverlapping(src, dst, count)
{
// Not panicking to keep codegen impact smaller.
abort();
}
}
#[cfg(debug_assertions)]
const fn compiletime_check<T>(_src: *const T, _dst: *mut T, _count: usize) {}
#[cfg(debug_assertions)]
// SAFETY: As per our safety precondition, we may assume that the `abort` above is never reached.
// Therefore, compiletime_check and runtime_check are observably equivalent.
unsafe {
const_eval_select((src, dst, count), compiletime_check, runtime_check);
}
// SAFETY: the safety contract for `copy_nonoverlapping` must be
// upheld by the caller.
unsafe { copy_nonoverlapping(src, dst, count) }
unsafe {
assert_unsafe_precondition!(
is_aligned_and_not_null(src)
&& is_aligned_and_not_null(dst)
&& is_nonoverlapping(src, dst, count)
);
copy_nonoverlapping(src, dst, count)
}
}
/// Copies `count * size_of::<T>()` bytes from `src` to `dst`. The source
@ -2173,24 +2194,11 @@ pub const unsafe fn copy<T>(src: *const T, dst: *mut T, count: usize) {
fn copy<T>(src: *const T, dst: *mut T, count: usize);
}
#[cfg(debug_assertions)]
fn runtime_check<T>(src: *const T, dst: *mut T) {
if !is_aligned_and_not_null(src) || !is_aligned_and_not_null(dst) {
// Not panicking to keep codegen impact smaller.
abort();
}
}
#[cfg(debug_assertions)]
const fn compiletime_check<T>(_src: *const T, _dst: *mut T) {}
#[cfg(debug_assertions)]
// SAFETY: As per our safety precondition, we may assume that the `abort` above is never reached.
// Therefore, compiletime_check and runtime_check are observably equivalent.
unsafe {
const_eval_select((src, dst), compiletime_check, runtime_check);
}
// SAFETY: the safety contract for `copy` must be upheld by the caller.
unsafe { copy(src, dst, count) }
unsafe {
assert_unsafe_precondition!(is_aligned_and_not_null(src) && is_aligned_and_not_null(dst));
copy(src, dst, count)
}
}
/// Sets `count * size_of::<T>()` bytes of memory starting at `dst` to
@ -2274,24 +2282,11 @@ pub const unsafe fn write_bytes<T>(dst: *mut T, val: u8, count: usize) {
fn write_bytes<T>(dst: *mut T, val: u8, count: usize);
}
#[cfg(debug_assertions)]
fn runtime_check<T>(ptr: *mut T) {
debug_assert!(
is_aligned_and_not_null(ptr),
"attempt to write to unaligned or null pointer"
);
}
#[cfg(debug_assertions)]
const fn compiletime_check<T>(_ptr: *mut T) {}
#[cfg(debug_assertions)]
// SAFETY: runtime debug-assertions are a best-effort basis; it's fine to
// not do them during compile time
unsafe {
const_eval_select((dst,), compiletime_check, runtime_check);
}
// SAFETY: the safety contract for `write_bytes` must be upheld by the caller.
unsafe { write_bytes(dst, val, count) }
unsafe {
assert_unsafe_precondition!(is_aligned_and_not_null(dst));
write_bytes(dst, val, count)
}
}
/// Selects which function to call depending on the context.

View File

@ -52,9 +52,13 @@ macro_rules! nonzero_integers {
#[$const_new_unchecked_stability]
#[must_use]
#[inline]
#[rustc_allow_const_fn_unstable(const_fn_fn_ptr_basics)] // required by assert_unsafe_precondition
pub const unsafe fn new_unchecked(n: $Int) -> Self {
// SAFETY: this is guaranteed to be safe by the caller.
unsafe { Self(n) }
unsafe {
core::intrinsics::assert_unsafe_precondition!(n != 0);
Self(n)
}
}
/// Creates a non-zero if the given value is not zero.

View File

@ -75,7 +75,10 @@
use crate::cmp::Ordering;
use crate::fmt;
use crate::hash;
use crate::intrinsics::{self, abort, is_aligned_and_not_null};
use crate::intrinsics::{
self, assert_unsafe_precondition, is_aligned_and_not_null, is_nonoverlapping,
};
use crate::mem::{self, MaybeUninit};
#[stable(feature = "rust1", since = "1.0.0")]
@ -438,6 +441,16 @@ pub const unsafe fn swap_nonoverlapping<T>(x: *mut T, y: *mut T, count: usize) {
};
}
// SAFETY: the caller must guarantee that `x` and `y` are
// valid for writes and properly aligned.
unsafe {
assert_unsafe_precondition!(
is_aligned_and_not_null(x)
&& is_aligned_and_not_null(y)
&& is_nonoverlapping(x, y, count)
);
}
// NOTE(scottmcm) MIRI is disabled here as reading in smaller units is a
// pessimization for it. Also, if the type contains any unaligned pointers,
// copying those over multiple reads is difficult to support.
@ -528,6 +541,7 @@ pub const unsafe fn replace<T>(dst: *mut T, mut src: T) -> T {
// and cannot overlap `src` since `dst` must point to a distinct
// allocated object.
unsafe {
assert_unsafe_precondition!(is_aligned_and_not_null(dst));
mem::swap(&mut *dst, &mut src); // cannot overlap
}
src
@ -1007,12 +1021,11 @@ pub const unsafe fn write_unaligned<T>(dst: *mut T, src: T) {
#[inline]
#[stable(feature = "volatile", since = "1.9.0")]
pub unsafe fn read_volatile<T>(src: *const T) -> T {
if cfg!(debug_assertions) && !is_aligned_and_not_null(src) {
// Not panicking to keep codegen impact smaller.
abort();
}
// SAFETY: the caller must uphold the safety contract for `volatile_load`.
unsafe { intrinsics::volatile_load(src) }
unsafe {
assert_unsafe_precondition!(is_aligned_and_not_null(src));
intrinsics::volatile_load(src)
}
}
/// Performs a volatile write of a memory location with the given value without
@ -1078,12 +1091,9 @@ pub unsafe fn read_volatile<T>(src: *const T) -> T {
#[inline]
#[stable(feature = "volatile", since = "1.9.0")]
pub unsafe fn write_volatile<T>(dst: *mut T, src: T) {
if cfg!(debug_assertions) && !is_aligned_and_not_null(dst) {
// Not panicking to keep codegen impact smaller.
abort();
}
// SAFETY: the caller must uphold the safety contract for `volatile_store`.
unsafe {
assert_unsafe_precondition!(is_aligned_and_not_null(dst));
intrinsics::volatile_store(dst, src);
}
}

View File

@ -1,5 +1,6 @@
//! Indexing implementations for `[T]`.
use crate::intrinsics::assert_unsafe_precondition;
use crate::intrinsics::const_eval_select;
use crate::ops;
use crate::ptr;
@ -219,13 +220,19 @@ unsafe impl<T> const SliceIndex<[T]> for usize {
// cannot be longer than `isize::MAX`. They also guarantee that
// `self` is in bounds of `slice` so `self` cannot overflow an `isize`,
// so the call to `add` is safe.
unsafe { slice.as_ptr().add(self) }
unsafe {
assert_unsafe_precondition!(self < slice.len());
slice.as_ptr().add(self)
}
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut T {
// SAFETY: see comments for `get_unchecked` above.
unsafe { slice.as_mut_ptr().add(self) }
unsafe {
assert_unsafe_precondition!(self < slice.len());
slice.as_mut_ptr().add(self)
}
}
#[inline]
@ -272,13 +279,18 @@ unsafe impl<T> const SliceIndex<[T]> for ops::Range<usize> {
// cannot be longer than `isize::MAX`. They also guarantee that
// `self` is in bounds of `slice` so `self` cannot overflow an `isize`,
// so the call to `add` is safe.
unsafe { ptr::slice_from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start) }
unsafe {
assert_unsafe_precondition!(self.end >= self.start && self.end <= slice.len());
ptr::slice_from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start)
}
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] {
// SAFETY: see comments for `get_unchecked` above.
unsafe {
assert_unsafe_precondition!(self.end >= self.start && self.end <= slice.len());
ptr::slice_from_raw_parts_mut(slice.as_mut_ptr().add(self.start), self.end - self.start)
}
}

View File

@ -7,6 +7,7 @@
#![stable(feature = "rust1", since = "1.0.0")]
use crate::cmp::Ordering::{self, Greater, Less};
use crate::intrinsics::{assert_unsafe_precondition, exact_div};
use crate::marker::Copy;
use crate::mem;
use crate::num::NonZeroUsize;
@ -637,15 +638,10 @@ impl<T> [T] {
#[unstable(feature = "slice_swap_unchecked", issue = "88539")]
#[rustc_const_unstable(feature = "const_swap", issue = "83163")]
pub const unsafe fn swap_unchecked(&mut self, a: usize, b: usize) {
#[cfg(debug_assertions)]
{
let _ = &self[a];
let _ = &self[b];
}
let ptr = self.as_mut_ptr();
// SAFETY: caller has to guarantee that `a < self.len()` and `b < self.len()`
unsafe {
assert_unsafe_precondition!(a < self.len() && b < self.len());
ptr::swap(ptr.add(a), ptr.add(b));
}
}
@ -950,11 +946,11 @@ impl<T> [T] {
#[unstable(feature = "slice_as_chunks", issue = "74985")]
#[inline]
pub unsafe fn as_chunks_unchecked<const N: usize>(&self) -> &[[T; N]] {
debug_assert_ne!(N, 0);
debug_assert_eq!(self.len() % N, 0);
let new_len =
// SAFETY: Our precondition is exactly what's needed to call this
unsafe { crate::intrinsics::exact_div(self.len(), N) };
// SAFETY: Caller must guarantee that `N` is nonzero and exactly divides the slice length
let new_len = unsafe {
assert_unsafe_precondition!(N != 0 && self.len() % N == 0);
exact_div(self.len(), N)
};
// SAFETY: We cast a slice of `new_len * N` elements into
// a slice of `new_len` many `N` elements chunks.
unsafe { from_raw_parts(self.as_ptr().cast(), new_len) }
@ -1086,11 +1082,11 @@ impl<T> [T] {
#[unstable(feature = "slice_as_chunks", issue = "74985")]
#[inline]
pub unsafe fn as_chunks_unchecked_mut<const N: usize>(&mut self) -> &mut [[T; N]] {
debug_assert_ne!(N, 0);
debug_assert_eq!(self.len() % N, 0);
let new_len =
// SAFETY: Our precondition is exactly what's needed to call this
unsafe { crate::intrinsics::exact_div(self.len(), N) };
// SAFETY: Caller must guarantee that `N` is nonzero and exactly divides the slice length
let new_len = unsafe {
assert_unsafe_precondition!(N != 0 && self.len() % N == 0);
exact_div(self.len(), N)
};
// SAFETY: We cast a slice of `new_len * N` elements into
// a slice of `new_len` many `N` elements chunks.
unsafe { from_raw_parts_mut(self.as_mut_ptr().cast(), new_len) }
@ -1646,7 +1642,10 @@ impl<T> [T] {
//
// `[ptr; mid]` and `[mid; len]` are not overlapping, so returning a mutable reference
// is fine.
unsafe { (from_raw_parts_mut(ptr, mid), from_raw_parts_mut(ptr.add(mid), len - mid)) }
unsafe {
assert_unsafe_precondition!(mid <= len);
(from_raw_parts_mut(ptr, mid), from_raw_parts_mut(ptr.add(mid), len - mid))
}
}
/// Divides one slice into an array and a remainder slice at an index.

View File

@ -1,6 +1,7 @@
//! Free functions to create `&[T]` and `&mut [T]`.
use crate::array;
use crate::intrinsics::{assert_unsafe_precondition, is_aligned_and_not_null};
use crate::ops::Range;
use crate::ptr;
@ -86,10 +87,14 @@ use crate::ptr;
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_unstable(feature = "const_slice_from_raw_parts", issue = "67456")]
pub const unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T] {
debug_check_data_len(data, len);
// SAFETY: the caller must uphold the safety contract for `from_raw_parts`.
unsafe { &*ptr::slice_from_raw_parts(data, len) }
unsafe {
assert_unsafe_precondition!(
is_aligned_and_not_null(data)
&& crate::mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize
);
&*ptr::slice_from_raw_parts(data, len)
}
}
/// Performs the same functionality as [`from_raw_parts`], except that a
@ -125,46 +130,16 @@ pub const unsafe fn from_raw_parts<'a, T>(data: *const T, len: usize) -> &'a [T]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_unstable(feature = "const_slice_from_raw_parts", issue = "67456")]
pub const unsafe fn from_raw_parts_mut<'a, T>(data: *mut T, len: usize) -> &'a mut [T] {
debug_check_data_len(data as _, len);
// SAFETY: the caller must uphold the safety contract for `from_raw_parts_mut`.
unsafe { &mut *ptr::slice_from_raw_parts_mut(data, len) }
}
// In debug builds checks that `data` pointer is aligned and non-null and that slice with given `len` would cover less than half the address space
#[cfg(debug_assertions)]
#[unstable(feature = "const_slice_from_raw_parts", issue = "67456")]
#[rustc_const_unstable(feature = "const_slice_from_raw_parts", issue = "67456")]
const fn debug_check_data_len<T>(data: *const T, len: usize) {
fn rt_check<T>(data: *const T) {
use crate::intrinsics::is_aligned_and_not_null;
assert!(is_aligned_and_not_null(data), "attempt to create unaligned or null slice");
}
const fn noop<T>(_: *const T) {}
// SAFETY:
//
// `rt_check` is just a debug assert to hint users that they are causing UB,
// it is not required for safety (the safety must be guatanteed by
// the `from_raw_parts[_mut]` caller).
//
// As per our safety precondition, we may assume that assertion above never fails.
// Therefore, noop and rt_check are observably equivalent.
unsafe {
crate::intrinsics::const_eval_select((data,), noop, rt_check);
assert_unsafe_precondition!(
is_aligned_and_not_null(data)
&& crate::mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize
);
&mut *ptr::slice_from_raw_parts_mut(data, len)
}
assert!(
crate::mem::size_of::<T>().saturating_mul(len) <= isize::MAX as usize,
"attempt to create slice covering at least half the address space"
);
}
#[cfg(not(debug_assertions))]
const fn debug_check_data_len<T>(_data: *const T, _len: usize) {}
/// Converts a reference to T into a slice of length 1 (without copying).
#[stable(feature = "from_ref", since = "1.28.0")]
#[rustc_const_unstable(feature = "const_slice_from_ref", issue = "90206")]