mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-26 08:44:35 +00:00
Auto merge of #130251 - saethlin:ptr-offset-preconditions, r=Amanieu
Add precondition checks to ptr::offset, ptr::add, ptr::sub All of `offset`, `add`, and `sub` (currently) have the trivial preconditions that the offset in bytes must be <= isize::MAX, and the computation of the new address must not wrap. This adds precondition checks for these, and like in slice indexing, we use intrinsics directly to implement unsafe APIs that have explicit checks, because we get a clearer error message that mentions the misused API not an implementation detail. Experimentation indicates these checks have 1-2% compile time overhead, due primarily to adding the checks for `add`. A crater run (https://github.com/rust-lang/rust/pull/130251#issuecomment-2395824565) indicates some people currently have buggy calls to `ptr::offset` that apply a negative offset to a null pointer, but the crater run does not hit the `ptr::add` or `ptr::sub` checks, which seems like an argument for cfg'ing out those checks on account of their overhead.
This commit is contained in:
commit
b8495e5dd2
@ -395,6 +395,36 @@ impl<T: ?Sized> *const T {
|
|||||||
where
|
where
|
||||||
T: Sized,
|
T: Sized,
|
||||||
{
|
{
|
||||||
|
#[inline]
|
||||||
|
const fn runtime_offset_nowrap(this: *const (), count: isize, size: usize) -> bool {
|
||||||
|
#[inline]
|
||||||
|
fn runtime(this: *const (), count: isize, size: usize) -> bool {
|
||||||
|
// We know `size <= isize::MAX` so the `as` cast here is not lossy.
|
||||||
|
let Some(byte_offset) = count.checked_mul(size as isize) else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
|
||||||
|
!overflow
|
||||||
|
}
|
||||||
|
|
||||||
|
const fn comptime(_: *const (), _: isize, _: usize) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
// We can use const_eval_select here because this is only for UB checks.
|
||||||
|
intrinsics::const_eval_select((this, count, size), comptime, runtime)
|
||||||
|
}
|
||||||
|
|
||||||
|
ub_checks::assert_unsafe_precondition!(
|
||||||
|
check_language_ub,
|
||||||
|
"ptr::offset requires the address calculation to not overflow",
|
||||||
|
(
|
||||||
|
this: *const () = self as *const (),
|
||||||
|
count: isize = count,
|
||||||
|
size: usize = size_of::<T>(),
|
||||||
|
) => runtime_offset_nowrap(this, count, size)
|
||||||
|
);
|
||||||
|
|
||||||
// SAFETY: the caller must uphold the safety contract for `offset`.
|
// SAFETY: the caller must uphold the safety contract for `offset`.
|
||||||
unsafe { intrinsics::offset(self, count) }
|
unsafe { intrinsics::offset(self, count) }
|
||||||
}
|
}
|
||||||
@ -726,7 +756,6 @@ impl<T: ?Sized> *const T {
|
|||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused_unsafe)]
|
|
||||||
intrinsics::const_eval_select((this, origin), comptime, runtime)
|
intrinsics::const_eval_select((this, origin), comptime, runtime)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -858,6 +887,36 @@ impl<T: ?Sized> *const T {
|
|||||||
where
|
where
|
||||||
T: Sized,
|
T: Sized,
|
||||||
{
|
{
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
#[inline]
|
||||||
|
const fn runtime_add_nowrap(this: *const (), count: usize, size: usize) -> bool {
|
||||||
|
#[inline]
|
||||||
|
fn runtime(this: *const (), count: usize, size: usize) -> bool {
|
||||||
|
let Some(byte_offset) = count.checked_mul(size) else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
let (_, overflow) = this.addr().overflowing_add(byte_offset);
|
||||||
|
byte_offset <= (isize::MAX as usize) && !overflow
|
||||||
|
}
|
||||||
|
|
||||||
|
const fn comptime(_: *const (), _: usize, _: usize) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
intrinsics::const_eval_select((this, count, size), comptime, runtime)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
|
||||||
|
ub_checks::assert_unsafe_precondition!(
|
||||||
|
check_language_ub,
|
||||||
|
"ptr::add requires that the address calculation does not overflow",
|
||||||
|
(
|
||||||
|
this: *const () = self as *const (),
|
||||||
|
count: usize = count,
|
||||||
|
size: usize = size_of::<T>(),
|
||||||
|
) => runtime_add_nowrap(this, count, size)
|
||||||
|
);
|
||||||
|
|
||||||
// SAFETY: the caller must uphold the safety contract for `offset`.
|
// SAFETY: the caller must uphold the safety contract for `offset`.
|
||||||
unsafe { intrinsics::offset(self, count) }
|
unsafe { intrinsics::offset(self, count) }
|
||||||
}
|
}
|
||||||
@ -936,6 +995,35 @@ impl<T: ?Sized> *const T {
|
|||||||
where
|
where
|
||||||
T: Sized,
|
T: Sized,
|
||||||
{
|
{
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
#[inline]
|
||||||
|
const fn runtime_sub_nowrap(this: *const (), count: usize, size: usize) -> bool {
|
||||||
|
#[inline]
|
||||||
|
fn runtime(this: *const (), count: usize, size: usize) -> bool {
|
||||||
|
let Some(byte_offset) = count.checked_mul(size) else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
|
||||||
|
}
|
||||||
|
|
||||||
|
const fn comptime(_: *const (), _: usize, _: usize) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
intrinsics::const_eval_select((this, count, size), comptime, runtime)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
|
||||||
|
ub_checks::assert_unsafe_precondition!(
|
||||||
|
check_language_ub,
|
||||||
|
"ptr::sub requires that the address calculation does not overflow",
|
||||||
|
(
|
||||||
|
this: *const () = self as *const (),
|
||||||
|
count: usize = count,
|
||||||
|
size: usize = size_of::<T>(),
|
||||||
|
) => runtime_sub_nowrap(this, count, size)
|
||||||
|
);
|
||||||
|
|
||||||
if T::IS_ZST {
|
if T::IS_ZST {
|
||||||
// Pointer arithmetic does nothing when the pointee is a ZST.
|
// Pointer arithmetic does nothing when the pointee is a ZST.
|
||||||
self
|
self
|
||||||
@ -943,7 +1031,7 @@ impl<T: ?Sized> *const T {
|
|||||||
// SAFETY: the caller must uphold the safety contract for `offset`.
|
// SAFETY: the caller must uphold the safety contract for `offset`.
|
||||||
// Because the pointee is *not* a ZST, that means that `count` is
|
// Because the pointee is *not* a ZST, that means that `count` is
|
||||||
// at most `isize::MAX`, and thus the negation cannot overflow.
|
// at most `isize::MAX`, and thus the negation cannot overflow.
|
||||||
unsafe { self.offset((count as isize).unchecked_neg()) }
|
unsafe { intrinsics::offset(self, intrinsics::unchecked_sub(0, count as isize)) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -393,6 +393,37 @@ impl<T: ?Sized> *mut T {
|
|||||||
where
|
where
|
||||||
T: Sized,
|
T: Sized,
|
||||||
{
|
{
|
||||||
|
#[inline]
|
||||||
|
const fn runtime_offset_nowrap(this: *const (), count: isize, size: usize) -> bool {
|
||||||
|
#[inline]
|
||||||
|
fn runtime(this: *const (), count: isize, size: usize) -> bool {
|
||||||
|
// `size` is the size of a Rust type, so we know that
|
||||||
|
// `size <= isize::MAX` and thus `as` cast here is not lossy.
|
||||||
|
let Some(byte_offset) = count.checked_mul(size as isize) else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
|
||||||
|
!overflow
|
||||||
|
}
|
||||||
|
|
||||||
|
const fn comptime(_: *const (), _: isize, _: usize) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
// We can use const_eval_select here because this is only for UB checks.
|
||||||
|
intrinsics::const_eval_select((this, count, size), comptime, runtime)
|
||||||
|
}
|
||||||
|
|
||||||
|
ub_checks::assert_unsafe_precondition!(
|
||||||
|
check_language_ub,
|
||||||
|
"ptr::offset requires the address calculation to not overflow",
|
||||||
|
(
|
||||||
|
this: *const () = self as *const (),
|
||||||
|
count: isize = count,
|
||||||
|
size: usize = size_of::<T>(),
|
||||||
|
) => runtime_offset_nowrap(this, count, size)
|
||||||
|
);
|
||||||
|
|
||||||
// SAFETY: the caller must uphold the safety contract for `offset`.
|
// SAFETY: the caller must uphold the safety contract for `offset`.
|
||||||
// The obtained pointer is valid for writes since the caller must
|
// The obtained pointer is valid for writes since the caller must
|
||||||
// guarantee that it points to the same allocated object as `self`.
|
// guarantee that it points to the same allocated object as `self`.
|
||||||
@ -940,6 +971,36 @@ impl<T: ?Sized> *mut T {
|
|||||||
where
|
where
|
||||||
T: Sized,
|
T: Sized,
|
||||||
{
|
{
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
#[inline]
|
||||||
|
const fn runtime_add_nowrap(this: *const (), count: usize, size: usize) -> bool {
|
||||||
|
#[inline]
|
||||||
|
fn runtime(this: *const (), count: usize, size: usize) -> bool {
|
||||||
|
let Some(byte_offset) = count.checked_mul(size) else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
let (_, overflow) = this.addr().overflowing_add(byte_offset);
|
||||||
|
byte_offset <= (isize::MAX as usize) && !overflow
|
||||||
|
}
|
||||||
|
|
||||||
|
const fn comptime(_: *const (), _: usize, _: usize) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
intrinsics::const_eval_select((this, count, size), comptime, runtime)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
|
||||||
|
ub_checks::assert_unsafe_precondition!(
|
||||||
|
check_language_ub,
|
||||||
|
"ptr::add requires that the address calculation does not overflow",
|
||||||
|
(
|
||||||
|
this: *const () = self as *const (),
|
||||||
|
count: usize = count,
|
||||||
|
size: usize = size_of::<T>(),
|
||||||
|
) => runtime_add_nowrap(this, count, size)
|
||||||
|
);
|
||||||
|
|
||||||
// SAFETY: the caller must uphold the safety contract for `offset`.
|
// SAFETY: the caller must uphold the safety contract for `offset`.
|
||||||
unsafe { intrinsics::offset(self, count) }
|
unsafe { intrinsics::offset(self, count) }
|
||||||
}
|
}
|
||||||
@ -1018,6 +1079,35 @@ impl<T: ?Sized> *mut T {
|
|||||||
where
|
where
|
||||||
T: Sized,
|
T: Sized,
|
||||||
{
|
{
|
||||||
|
#[cfg(debug_assertions)]
|
||||||
|
#[inline]
|
||||||
|
const fn runtime_sub_nowrap(this: *const (), count: usize, size: usize) -> bool {
|
||||||
|
#[inline]
|
||||||
|
fn runtime(this: *const (), count: usize, size: usize) -> bool {
|
||||||
|
let Some(byte_offset) = count.checked_mul(size) else {
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
|
||||||
|
}
|
||||||
|
|
||||||
|
const fn comptime(_: *const (), _: usize, _: usize) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
intrinsics::const_eval_select((this, count, size), comptime, runtime)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
|
||||||
|
ub_checks::assert_unsafe_precondition!(
|
||||||
|
check_language_ub,
|
||||||
|
"ptr::sub requires that the address calculation does not overflow",
|
||||||
|
(
|
||||||
|
this: *const () = self as *const (),
|
||||||
|
count: usize = count,
|
||||||
|
size: usize = size_of::<T>(),
|
||||||
|
) => runtime_sub_nowrap(this, count, size)
|
||||||
|
);
|
||||||
|
|
||||||
if T::IS_ZST {
|
if T::IS_ZST {
|
||||||
// Pointer arithmetic does nothing when the pointee is a ZST.
|
// Pointer arithmetic does nothing when the pointee is a ZST.
|
||||||
self
|
self
|
||||||
@ -1025,7 +1115,7 @@ impl<T: ?Sized> *mut T {
|
|||||||
// SAFETY: the caller must uphold the safety contract for `offset`.
|
// SAFETY: the caller must uphold the safety contract for `offset`.
|
||||||
// Because the pointee is *not* a ZST, that means that `count` is
|
// Because the pointee is *not* a ZST, that means that `count` is
|
||||||
// at most `isize::MAX`, and thus the negation cannot overflow.
|
// at most `isize::MAX`, and thus the negation cannot overflow.
|
||||||
unsafe { self.offset((count as isize).unchecked_neg()) }
|
unsafe { intrinsics::offset(self, intrinsics::unchecked_sub(0, count as isize)) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -14,7 +14,9 @@ pub fn u64_opt_as_slice(o: &Option<u64>) -> &[u64] {
|
|||||||
// CHECK-NOT: br
|
// CHECK-NOT: br
|
||||||
// CHECK-NOT: switch
|
// CHECK-NOT: switch
|
||||||
// CHECK-NOT: icmp
|
// CHECK-NOT: icmp
|
||||||
// CHECK: %[[LEN:.+]] = load i64,{{.+}} !range ![[META_U64:.+]], !noundef
|
// CHECK: %[[LEN:.+]] = load i64
|
||||||
|
// CHECK-SAME: !range ![[META_U64:[0-9]+]],
|
||||||
|
// CHECK-SAME: !noundef
|
||||||
// CHECK-NOT: select
|
// CHECK-NOT: select
|
||||||
// CHECK-NOT: br
|
// CHECK-NOT: br
|
||||||
// CHECK-NOT: switch
|
// CHECK-NOT: switch
|
||||||
@ -51,7 +53,9 @@ pub fn u8_opt_as_slice(o: &Option<u8>) -> &[u8] {
|
|||||||
// CHECK-NOT: br
|
// CHECK-NOT: br
|
||||||
// CHECK-NOT: switch
|
// CHECK-NOT: switch
|
||||||
// CHECK-NOT: icmp
|
// CHECK-NOT: icmp
|
||||||
// CHECK: %[[TAG:.+]] = load i8,{{.+}} !range ![[META_U8:.+]], !noundef
|
// CHECK: %[[TAG:.+]] = load i8
|
||||||
|
// CHECK-SAME: !range ![[META_U8:[0-9]+]],
|
||||||
|
// CHECK-SAME: !noundef
|
||||||
// CHECK: %[[LEN:.+]] = zext{{.*}} i8 %[[TAG]] to i64
|
// CHECK: %[[LEN:.+]] = zext{{.*}} i8 %[[TAG]] to i64
|
||||||
// CHECK-NOT: select
|
// CHECK-NOT: select
|
||||||
// CHECK-NOT: br
|
// CHECK-NOT: br
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
// skip-filecheck
|
// skip-filecheck
|
||||||
//@ compile-flags: -O -C debuginfo=0 -Zmir-opt-level=2
|
//@ compile-flags: -O -C debuginfo=0 -Zmir-opt-level=2
|
||||||
//@ only-64bit (constants for `None::<&T>` show in the output)
|
//@ only-64bit (constants for `None::<&T>` show in the output)
|
||||||
|
//@ ignore-debug: precondition checks on ptr::add are under cfg(debug_assertions)
|
||||||
// EMIT_MIR_FOR_EACH_PANIC_STRATEGY
|
// EMIT_MIR_FOR_EACH_PANIC_STRATEGY
|
||||||
|
|
||||||
#![crate_type = "lib"]
|
#![crate_type = "lib"]
|
||||||
|
Loading…
Reference in New Issue
Block a user