rename ptr::invalid -> ptr::without_provenance

also introduce ptr::dangling matching NonNull::dangling
This commit is contained in:
Ralf Jung 2024-02-11 19:04:29 +01:00
parent 1d447a9946
commit b58f647d54
56 changed files with 304 additions and 233 deletions

View File

@ -95,7 +95,7 @@ impl<T> ArenaChunk<T> {
unsafe { unsafe {
if mem::size_of::<T>() == 0 { if mem::size_of::<T>() == 0 {
// A pointer as large as possible for zero-sized elements. // A pointer as large as possible for zero-sized elements.
ptr::invalid_mut(!0) ptr::without_provenance_mut(!0)
} else { } else {
self.start().add(self.storage.len()) self.start().add(self.storage.len())
} }

View File

@ -2804,7 +2804,9 @@ impl<T> Weak<T> {
#[must_use] #[must_use]
pub const fn new() -> Weak<T> { pub const fn new() -> Weak<T> {
Weak { Weak {
ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::<RcBox<T>>(usize::MAX)) }, ptr: unsafe {
NonNull::new_unchecked(ptr::without_provenance_mut::<RcBox<T>>(usize::MAX))
},
alloc: Global, alloc: Global,
} }
} }
@ -2829,7 +2831,9 @@ impl<T, A: Allocator> Weak<T, A> {
#[unstable(feature = "allocator_api", issue = "32838")] #[unstable(feature = "allocator_api", issue = "32838")]
pub fn new_in(alloc: A) -> Weak<T, A> { pub fn new_in(alloc: A) -> Weak<T, A> {
Weak { Weak {
ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::<RcBox<T>>(usize::MAX)) }, ptr: unsafe {
NonNull::new_unchecked(ptr::without_provenance_mut::<RcBox<T>>(usize::MAX))
},
alloc, alloc,
} }
} }

View File

@ -2555,7 +2555,9 @@ impl<T> Weak<T> {
#[must_use] #[must_use]
pub const fn new() -> Weak<T> { pub const fn new() -> Weak<T> {
Weak { Weak {
ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::<ArcInner<T>>(usize::MAX)) }, ptr: unsafe {
NonNull::new_unchecked(ptr::without_provenance_mut::<ArcInner<T>>(usize::MAX))
},
alloc: Global, alloc: Global,
} }
} }
@ -2583,7 +2585,9 @@ impl<T, A: Allocator> Weak<T, A> {
#[unstable(feature = "allocator_api", issue = "32838")] #[unstable(feature = "allocator_api", issue = "32838")]
pub fn new_in(alloc: A) -> Weak<T, A> { pub fn new_in(alloc: A) -> Weak<T, A> {
Weak { Weak {
ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::<ArcInner<T>>(usize::MAX)) }, ptr: unsafe {
NonNull::new_unchecked(ptr::without_provenance_mut::<ArcInner<T>>(usize::MAX))
},
alloc, alloc,
} }
} }

View File

@ -77,14 +77,14 @@ fn test_format_macro_interface() {
t!(format!("{}", "foo"), "foo"); t!(format!("{}", "foo"), "foo");
t!(format!("{}", "foo".to_string()), "foo"); t!(format!("{}", "foo".to_string()), "foo");
if cfg!(target_pointer_width = "32") { if cfg!(target_pointer_width = "32") {
t!(format!("{:#p}", ptr::invalid::<isize>(0x1234)), "0x00001234"); t!(format!("{:#p}", ptr::without_provenance::<isize>(0x1234)), "0x00001234");
t!(format!("{:#p}", ptr::invalid_mut::<isize>(0x1234)), "0x00001234"); t!(format!("{:#p}", ptr::without_provenance_mut::<isize>(0x1234)), "0x00001234");
} else { } else {
t!(format!("{:#p}", ptr::invalid::<isize>(0x1234)), "0x0000000000001234"); t!(format!("{:#p}", ptr::without_provenance::<isize>(0x1234)), "0x0000000000001234");
t!(format!("{:#p}", ptr::invalid_mut::<isize>(0x1234)), "0x0000000000001234"); t!(format!("{:#p}", ptr::without_provenance_mut::<isize>(0x1234)), "0x0000000000001234");
} }
t!(format!("{:p}", ptr::invalid::<isize>(0x1234)), "0x1234"); t!(format!("{:p}", ptr::without_provenance::<isize>(0x1234)), "0x1234");
t!(format!("{:p}", ptr::invalid_mut::<isize>(0x1234)), "0x1234"); t!(format!("{:p}", ptr::without_provenance_mut::<isize>(0x1234)), "0x1234");
t!(format!("{A:x}"), "aloha"); t!(format!("{A:x}"), "aloha");
t!(format!("{B:X}"), "adios"); t!(format!("{B:X}"), "adios");
t!(format!("foo {} ☃☃☃☃☃☃", "bar"), "foo bar ☃☃☃☃☃☃"); t!(format!("foo {} ☃☃☃☃☃☃", "bar"), "foo bar ☃☃☃☃☃☃");
@ -208,7 +208,7 @@ fn test_format_macro_interface() {
{ {
let val = usize::MAX; let val = usize::MAX;
let exp = format!("{val:#x}"); let exp = format!("{val:#x}");
t!(format!("{:p}", std::ptr::invalid::<isize>(val)), exp); t!(format!("{:p}", std::ptr::without_provenance::<isize>(val)), exp);
} }
// Escaping // Escaping

View File

@ -2575,7 +2575,7 @@ fn test_box_zero_allocator() {
assert!(state.0.insert(addr)); assert!(state.0.insert(addr));
state.1 += 1; state.1 += 1;
std::println!("allocating {addr}"); std::println!("allocating {addr}");
std::ptr::invalid_mut(addr) std::ptr::without_provenance_mut(addr)
} else { } else {
unsafe { std::alloc::alloc(layout) } unsafe { std::alloc::alloc(layout) }
}; };

View File

@ -215,7 +215,7 @@ impl Layout {
#[inline] #[inline]
pub const fn dangling(&self) -> NonNull<u8> { pub const fn dangling(&self) -> NonNull<u8> {
// SAFETY: align is guaranteed to be non-zero // SAFETY: align is guaranteed to be non-zero
unsafe { NonNull::new_unchecked(crate::ptr::invalid_mut::<u8>(self.align())) } unsafe { NonNull::new_unchecked(crate::ptr::without_provenance_mut::<u8>(self.align())) }
} }
/// Creates a layout describing the record that can hold a value /// Creates a layout describing the record that can hold a value

View File

@ -1155,7 +1155,7 @@ extern "rust-intrinsic" {
/// ///
/// Transmuting pointers *to* integers in a `const` context is [undefined behavior][ub], /// Transmuting pointers *to* integers in a `const` context is [undefined behavior][ub],
/// unless the pointer was originally created *from* an integer. /// unless the pointer was originally created *from* an integer.
/// (That includes this function specifically, integer-to-pointer casts, and helpers like [`invalid`][crate::ptr::invalid], /// (That includes this function specifically, integer-to-pointer casts, and helpers like [`invalid`][crate::ptr::dangling],
/// but also semantically-equivalent conversions such as punning through `repr(C)` union fields.) /// but also semantically-equivalent conversions such as punning through `repr(C)` union fields.)
/// Any attempt to use the resulting value for integer operations will abort const-evaluation. /// Any attempt to use the resulting value for integer operations will abort const-evaluation.
/// (And even outside `const`, such transmutation is touching on many unspecified aspects of the /// (And even outside `const`, such transmutation is touching on many unspecified aspects of the

View File

@ -181,7 +181,7 @@ impl<T: ?Sized> *const T {
/// ///
/// This is similar to `self as usize`, which semantically discards *provenance* and /// This is similar to `self as usize`, which semantically discards *provenance* and
/// *address-space* information. However, unlike `self as usize`, casting the returned address /// *address-space* information. However, unlike `self as usize`, casting the returned address
/// back to a pointer yields [`invalid`][], which is undefined behavior to dereference. To /// back to a pointer yields a [pointer without provenance][without_provenance], which is undefined behavior to dereference. To
/// properly restore the lost information and obtain a dereferenceable pointer, use /// properly restore the lost information and obtain a dereferenceable pointer, use
/// [`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr]. /// [`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr].
/// ///

View File

@ -4,13 +4,13 @@
//! //!
//! # Safety //! # Safety
//! //!
//! Many functions in this module take raw pointers as arguments and read from //! Many functions in this module take raw pointers as arguments and read from or write to them. For
//! or write to them. For this to be safe, these pointers must be *valid*. //! this to be safe, these pointers must be *valid* for the given access. Whether a pointer is valid
//! Whether a pointer is valid depends on the operation it is used for //! depends on the operation it is used for (read or write), and the extent of the memory that is
//! (read or write), and the extent of the memory that is accessed (i.e., //! accessed (i.e., how many bytes are read/written) -- it makes no sense to ask "is this pointer
//! how many bytes are read/written). Most functions use `*mut T` and `*const T` //! valid"; one has to ask "is this pointer valid for a given access". Most functions use `*mut T`
//! to access only a single value, in which case the documentation omits the size //! and `*const T` to access only a single value, in which case the documentation omits the size and
//! and implicitly assumes it to be `size_of::<T>()` bytes. //! implicitly assumes it to be `size_of::<T>()` bytes.
//! //!
//! The precise rules for validity are not determined yet. The guarantees that are //! The precise rules for validity are not determined yet. The guarantees that are
//! provided at this point are very minimal: //! provided at this point are very minimal:
@ -26,7 +26,7 @@
//! some memory happens to exist at that address and gets deallocated. This corresponds to writing //! some memory happens to exist at that address and gets deallocated. This corresponds to writing
//! your own allocator: allocating zero-sized objects is not very hard. The canonical way to //! your own allocator: allocating zero-sized objects is not very hard. The canonical way to
//! obtain a pointer that is valid for zero-sized accesses is [`NonNull::dangling`]. //! obtain a pointer that is valid for zero-sized accesses is [`NonNull::dangling`].
//FIXME: mention `ptr::invalid` above, once it is stable. //FIXME: mention `ptr::dangling` above, once it is stable.
//! * All accesses performed by functions in this module are *non-atomic* in the sense //! * All accesses performed by functions in this module are *non-atomic* in the sense
//! of [atomic operations] used to synchronize between threads. This means it is //! of [atomic operations] used to synchronize between threads. This means it is
//! undefined behavior to perform two concurrent accesses to the same location from different //! undefined behavior to perform two concurrent accesses to the same location from different
@ -44,6 +44,10 @@
//! information, see the [book] as well as the section in the reference devoted //! information, see the [book] as well as the section in the reference devoted
//! to [undefined behavior][ub]. //! to [undefined behavior][ub].
//! //!
//! We say that a pointer is "dangling" if it is not valid for any non-zero-sized accesses. This
//! means out-of-bounds pointers, pointers to freed memory, null pointers, and pointers created with
//! [`NonNull::dangling`] are all dangling.
//!
//! ## Alignment //! ## Alignment
//! //!
//! Valid raw pointers as defined above are not necessarily properly aligned (where //! Valid raw pointers as defined above are not necessarily properly aligned (where
@ -167,6 +171,7 @@
//! * The **address-space** it is part of (e.g. "data" vs "code" in WASM). //! * The **address-space** it is part of (e.g. "data" vs "code" in WASM).
//! * The **address** it points to, which can be represented by a `usize`. //! * The **address** it points to, which can be represented by a `usize`.
//! * The **provenance** it has, defining the memory it has permission to access. //! * The **provenance** it has, defining the memory it has permission to access.
//! Provenance can be absent, in which case the pointer does not have permission to access any memory.
//! //!
//! Under Strict Provenance, a usize *cannot* accurately represent a pointer, and converting from //! Under Strict Provenance, a usize *cannot* accurately represent a pointer, and converting from
//! a pointer to a usize is generally an operation which *only* extracts the address. It is //! a pointer to a usize is generally an operation which *only* extracts the address. It is
@ -270,11 +275,12 @@
//! //!
//! But it *is* still sound to: //! But it *is* still sound to:
//! //!
//! * Create an invalid pointer from just an address (see [`ptr::invalid`][]). This can //! * Create a pointer without provenance from just an address (see [`ptr::dangling`][]). Such a
//! be used for sentinel values like `null` *or* to represent a tagged pointer that will //! pointer cannot be used for memory accesses (except for zero-sized accesses). This can still be
//! never be dereferenceable. In general, it is always sound for an integer to pretend //! useful for sentinel values like `null` *or* to represent a tagged pointer that will never be
//! to be a pointer "for fun" as long as you don't use operations on it which require //! dereferenceable. In general, it is always sound for an integer to pretend to be a pointer "for
//! it to be valid (offset, read, write, etc). //! fun" as long as you don't use operations on it which require it to be valid (non-zero-sized
//! offset, read, write, etc).
//! //!
//! * Forge an allocation of size zero at any sufficiently aligned non-null address. //! * Forge an allocation of size zero at any sufficiently aligned non-null address.
//! i.e. the usual "ZSTs are fake, do what you want" rules apply *but* this only applies //! i.e. the usual "ZSTs are fake, do what you want" rules apply *but* this only applies
@ -283,7 +289,7 @@
//! that allocation and it will still get invalidated if the allocation gets deallocated. //! that allocation and it will still get invalidated if the allocation gets deallocated.
//! In the future we may introduce an API to make such a forged allocation explicit. //! In the future we may introduce an API to make such a forged allocation explicit.
//! //!
//! * [`wrapping_offset`][] a pointer outside its provenance. This includes invalid pointers //! * [`wrapping_offset`][] a pointer outside its provenance. This includes pointers
//! which have "no" provenance. Unfortunately there may be practical limits on this for a //! which have "no" provenance. Unfortunately there may be practical limits on this for a
//! particular platform, and it's an open question as to how to specify this (if at all). //! particular platform, and it's an open question as to how to specify this (if at all).
//! Notably, [CHERI][] relies on a compression scheme that can't handle a //! Notably, [CHERI][] relies on a compression scheme that can't handle a
@ -294,7 +300,7 @@
//! generous (think kilobytes, not bytes). //! generous (think kilobytes, not bytes).
//! //!
//! * Compare arbitrary pointers by address. Addresses *are* just integers and so there is //! * Compare arbitrary pointers by address. Addresses *are* just integers and so there is
//! always a coherent answer, even if the pointers are invalid or from different //! always a coherent answer, even if the pointers are dangling or from different
//! address-spaces/provenances. Of course, comparing addresses from different address-spaces //! address-spaces/provenances. Of course, comparing addresses from different address-spaces
//! is generally going to be *meaningless*, but so is comparing Kilograms to Meters, and Rust //! is generally going to be *meaningless*, but so is comparing Kilograms to Meters, and Rust
//! doesn't prevent that either. Similarly, if you get "lucky" and notice that a pointer //! doesn't prevent that either. Similarly, if you get "lucky" and notice that a pointer
@ -367,7 +373,7 @@
//! [`with_addr`]: pointer::with_addr //! [`with_addr`]: pointer::with_addr
//! [`map_addr`]: pointer::map_addr //! [`map_addr`]: pointer::map_addr
//! [`addr`]: pointer::addr //! [`addr`]: pointer::addr
//! [`ptr::invalid`]: core::ptr::invalid //! [`ptr::dangling`]: core::ptr::dangling
//! [`expose_addr`]: pointer::expose_addr //! [`expose_addr`]: pointer::expose_addr
//! [`from_exposed_addr`]: from_exposed_addr //! [`from_exposed_addr`]: from_exposed_addr
//! [Miri]: https://github.com/rust-lang/miri //! [Miri]: https://github.com/rust-lang/miri
@ -537,7 +543,7 @@ pub unsafe fn drop_in_place<T: ?Sized>(to_drop: *mut T) {
#[rustc_allow_const_fn_unstable(ptr_metadata)] #[rustc_allow_const_fn_unstable(ptr_metadata)]
#[rustc_diagnostic_item = "ptr_null"] #[rustc_diagnostic_item = "ptr_null"]
pub const fn null<T: ?Sized + Thin>() -> *const T { pub const fn null<T: ?Sized + Thin>() -> *const T {
from_raw_parts(invalid(0), ()) from_raw_parts(without_provenance(0), ())
} }
/// Creates a null mutable raw pointer. /// Creates a null mutable raw pointer.
@ -563,32 +569,26 @@ pub const fn null<T: ?Sized + Thin>() -> *const T {
#[rustc_allow_const_fn_unstable(ptr_metadata)] #[rustc_allow_const_fn_unstable(ptr_metadata)]
#[rustc_diagnostic_item = "ptr_null_mut"] #[rustc_diagnostic_item = "ptr_null_mut"]
pub const fn null_mut<T: ?Sized + Thin>() -> *mut T { pub const fn null_mut<T: ?Sized + Thin>() -> *mut T {
from_raw_parts_mut(invalid_mut(0), ()) from_raw_parts_mut(without_provenance_mut(0), ())
} }
/// Creates an invalid pointer with the given address. /// Creates a pointer with the given address and no provenance.
///
/// Without provenance, this pointer is not associated with any actual allocation. Such a
/// no-provenance pointer may be used for zero-sized memory accesses (if suitably aligned), but
/// non-zero-sized memory accesses with a no-provenance pointer are UB. No-provenance pointers are
/// little more than a usize address in disguise.
/// ///
/// This is different from `addr as *const T`, which creates a pointer that picks up a previously /// This is different from `addr as *const T`, which creates a pointer that picks up a previously
/// exposed provenance. See [`from_exposed_addr`] for more details on that operation. /// exposed provenance. See [`from_exposed_addr`] for more details on that operation.
/// ///
/// The module's top-level documentation discusses the precise meaning of an "invalid"
/// pointer but essentially this expresses that the pointer is not associated
/// with any actual allocation and is little more than a usize address in disguise.
///
/// This pointer will have no provenance associated with it and is therefore
/// UB to read/write/offset. This mostly exists to facilitate things
/// like `ptr::null` and `NonNull::dangling` which make invalid pointers.
///
/// (Standard "Zero-Sized-Types get to cheat and lie" caveats apply, although it
/// may be desirable to give them their own API just to make that 100% clear.)
///
/// This API and its claimed semantics are part of the Strict Provenance experiment, /// This API and its claimed semantics are part of the Strict Provenance experiment,
/// see the [module documentation][crate::ptr] for details. /// see the [module documentation][crate::ptr] for details.
#[inline(always)] #[inline(always)]
#[must_use] #[must_use]
#[rustc_const_stable(feature = "stable_things_using_strict_provenance", since = "1.61.0")] #[rustc_const_stable(feature = "stable_things_using_strict_provenance", since = "1.61.0")]
#[unstable(feature = "strict_provenance", issue = "95228")] #[unstable(feature = "strict_provenance", issue = "95228")]
pub const fn invalid<T>(addr: usize) -> *const T { pub const fn without_provenance<T>(addr: usize) -> *const T {
// FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic. // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
// We use transmute rather than a cast so tools like Miri can tell that this // We use transmute rather than a cast so tools like Miri can tell that this
// is *not* the same as from_exposed_addr. // is *not* the same as from_exposed_addr.
@ -597,29 +597,40 @@ pub const fn invalid<T>(addr: usize) -> *const T {
unsafe { mem::transmute(addr) } unsafe { mem::transmute(addr) }
} }
/// Creates an invalid mutable pointer with the given address. /// Creates a new pointer that is dangling, but well-aligned.
///
/// This is useful for initializing types which lazily allocate, like
/// `Vec::new` does.
///
/// Note that the pointer value may potentially represent a valid pointer to
/// a `T`, which means this must not be used as a "not yet initialized"
/// sentinel value. Types that lazily allocate must track initialization by
/// some other means.
#[inline(always)]
#[must_use]
#[rustc_const_stable(feature = "stable_things_using_strict_provenance", since = "1.61.0")]
#[unstable(feature = "strict_provenance", issue = "95228")]
pub const fn dangling<T>() -> *const T {
without_provenance(mem::align_of::<T>())
}
/// Creates a pointer with the given address and no provenance.
///
/// Without provenance, this pointer is not associated with any actual allocation. Such a
/// no-provenance pointer may be used for zero-sized memory accesses (if suitably aligned), but
/// non-zero-sized memory accesses with a no-provenance pointer are UB. No-provenance pointers are
/// little more than a usize address in disguise.
/// ///
/// This is different from `addr as *mut T`, which creates a pointer that picks up a previously /// This is different from `addr as *mut T`, which creates a pointer that picks up a previously
/// exposed provenance. See [`from_exposed_addr_mut`] for more details on that operation. /// exposed provenance. See [`from_exposed_addr_mut`] for more details on that operation.
/// ///
/// The module's top-level documentation discusses the precise meaning of an "invalid"
/// pointer but essentially this expresses that the pointer is not associated
/// with any actual allocation and is little more than a usize address in disguise.
///
/// This pointer will have no provenance associated with it and is therefore
/// UB to read/write/offset. This mostly exists to facilitate things
/// like `ptr::null` and `NonNull::dangling` which make invalid pointers.
///
/// (Standard "Zero-Sized-Types get to cheat and lie" caveats apply, although it
/// may be desirable to give them their own API just to make that 100% clear.)
///
/// This API and its claimed semantics are part of the Strict Provenance experiment, /// This API and its claimed semantics are part of the Strict Provenance experiment,
/// see the [module documentation][crate::ptr] for details. /// see the [module documentation][crate::ptr] for details.
#[inline(always)] #[inline(always)]
#[must_use] #[must_use]
#[rustc_const_stable(feature = "stable_things_using_strict_provenance", since = "1.61.0")] #[rustc_const_stable(feature = "stable_things_using_strict_provenance", since = "1.61.0")]
#[unstable(feature = "strict_provenance", issue = "95228")] #[unstable(feature = "strict_provenance", issue = "95228")]
pub const fn invalid_mut<T>(addr: usize) -> *mut T { pub const fn without_provenance_mut<T>(addr: usize) -> *mut T {
// FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic. // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
// We use transmute rather than a cast so tools like Miri can tell that this // We use transmute rather than a cast so tools like Miri can tell that this
// is *not* the same as from_exposed_addr. // is *not* the same as from_exposed_addr.
@ -628,6 +639,23 @@ pub const fn invalid_mut<T>(addr: usize) -> *mut T {
unsafe { mem::transmute(addr) } unsafe { mem::transmute(addr) }
} }
/// Creates a new pointer that is dangling, but well-aligned.
///
/// This is useful for initializing types which lazily allocate, like
/// `Vec::new` does.
///
/// Note that the pointer value may potentially represent a valid pointer to
/// a `T`, which means this must not be used as a "not yet initialized"
/// sentinel value. Types that lazily allocate must track initialization by
/// some other means.
#[inline(always)]
#[must_use]
#[rustc_const_stable(feature = "stable_things_using_strict_provenance", since = "1.61.0")]
#[unstable(feature = "strict_provenance", issue = "95228")]
pub const fn dangling_mut<T>() -> *mut T {
without_provenance_mut(mem::align_of::<T>())
}
/// Convert an address back to a pointer, picking up a previously 'exposed' provenance. /// Convert an address back to a pointer, picking up a previously 'exposed' provenance.
/// ///
/// This is a more rigorously specified alternative to `addr as *const T`. The provenance of the /// This is a more rigorously specified alternative to `addr as *const T`. The provenance of the

View File

@ -188,9 +188,10 @@ impl<T: ?Sized> *mut T {
/// ///
/// This is similar to `self as usize`, which semantically discards *provenance* and /// This is similar to `self as usize`, which semantically discards *provenance* and
/// *address-space* information. However, unlike `self as usize`, casting the returned address /// *address-space* information. However, unlike `self as usize`, casting the returned address
/// back to a pointer yields [`invalid`][], which is undefined behavior to dereference. To /// back to a pointer yields yields a [pointer without provenance][without_provenance_mut], which is undefined
/// properly restore the lost information and obtain a dereferenceable pointer, use /// behavior to dereference. To properly restore the lost information and obtain a
/// [`with_addr`][pointer::with_addr] or [`map_addr`][pointer::map_addr]. /// dereferenceable pointer, use [`with_addr`][pointer::with_addr] or
/// [`map_addr`][pointer::map_addr].
/// ///
/// If using those APIs is not possible because there is no way to preserve a pointer with the /// If using those APIs is not possible because there is no way to preserve a pointer with the
/// required provenance, then Strict Provenance might not be for you. Use pointer-integer casts /// required provenance, then Strict Provenance might not be for you. Use pointer-integer casts

View File

@ -4,8 +4,7 @@ use crate::hash;
use crate::intrinsics; use crate::intrinsics;
use crate::intrinsics::assert_unsafe_precondition; use crate::intrinsics::assert_unsafe_precondition;
use crate::marker::Unsize; use crate::marker::Unsize;
use crate::mem::SizedTypeProperties; use crate::mem::{MaybeUninit, SizedTypeProperties};
use crate::mem::{self, MaybeUninit};
use crate::num::{NonZero, NonZeroUsize}; use crate::num::{NonZero, NonZeroUsize};
use crate::ops::{CoerceUnsized, DispatchFromDyn}; use crate::ops::{CoerceUnsized, DispatchFromDyn};
use crate::ptr; use crate::ptr;
@ -114,7 +113,7 @@ impl<T: Sized> NonNull<T> {
// to a *mut T. Therefore, `ptr` is not null and the conditions for // to a *mut T. Therefore, `ptr` is not null and the conditions for
// calling new_unchecked() are respected. // calling new_unchecked() are respected.
unsafe { unsafe {
let ptr = crate::ptr::invalid_mut::<T>(mem::align_of::<T>()); let ptr = crate::ptr::dangling_mut::<T>();
NonNull::new_unchecked(ptr) NonNull::new_unchecked(ptr)
} }
} }

View File

@ -12,7 +12,7 @@ use crate::iter::{
use crate::marker::PhantomData; use crate::marker::PhantomData;
use crate::mem::{self, SizedTypeProperties}; use crate::mem::{self, SizedTypeProperties};
use crate::num::NonZero; use crate::num::NonZero;
use crate::ptr::{self, invalid, invalid_mut, NonNull}; use crate::ptr::{self, without_provenance, without_provenance_mut, NonNull};
use super::{from_raw_parts, from_raw_parts_mut}; use super::{from_raw_parts, from_raw_parts_mut};
@ -67,7 +67,7 @@ pub struct Iter<'a, T: 'a> {
ptr: NonNull<T>, ptr: NonNull<T>,
/// For non-ZSTs, the non-null pointer to the past-the-end element. /// For non-ZSTs, the non-null pointer to the past-the-end element.
/// ///
/// For ZSTs, this is `ptr::invalid(len)`. /// For ZSTs, this is `ptr::dangling(len)`.
end_or_len: *const T, end_or_len: *const T,
_marker: PhantomData<&'a T>, _marker: PhantomData<&'a T>,
} }
@ -91,7 +91,8 @@ impl<'a, T> Iter<'a, T> {
let ptr: NonNull<T> = NonNull::from(slice).cast(); let ptr: NonNull<T> = NonNull::from(slice).cast();
// SAFETY: Similar to `IterMut::new`. // SAFETY: Similar to `IterMut::new`.
unsafe { unsafe {
let end_or_len = if T::IS_ZST { invalid(len) } else { ptr.as_ptr().add(len) }; let end_or_len =
if T::IS_ZST { without_provenance(len) } else { ptr.as_ptr().add(len) };
Self { ptr, end_or_len, _marker: PhantomData } Self { ptr, end_or_len, _marker: PhantomData }
} }
@ -189,7 +190,7 @@ pub struct IterMut<'a, T: 'a> {
ptr: NonNull<T>, ptr: NonNull<T>,
/// For non-ZSTs, the non-null pointer to the past-the-end element. /// For non-ZSTs, the non-null pointer to the past-the-end element.
/// ///
/// For ZSTs, this is `ptr::invalid_mut(len)`. /// For ZSTs, this is `ptr::without_provenance_mut(len)`.
end_or_len: *mut T, end_or_len: *mut T,
_marker: PhantomData<&'a mut T>, _marker: PhantomData<&'a mut T>,
} }
@ -228,7 +229,8 @@ impl<'a, T> IterMut<'a, T> {
// See the `next_unchecked!` and `is_empty!` macros as well as the // See the `next_unchecked!` and `is_empty!` macros as well as the
// `post_inc_start` method for more information. // `post_inc_start` method for more information.
unsafe { unsafe {
let end_or_len = if T::IS_ZST { invalid_mut(len) } else { ptr.as_ptr().add(len) }; let end_or_len =
if T::IS_ZST { without_provenance_mut(len) } else { ptr.as_ptr().add(len) };
Self { ptr, end_or_len, _marker: PhantomData } Self { ptr, end_or_len, _marker: PhantomData }
} }

View File

@ -1842,7 +1842,7 @@ impl<T> AtomicPtr<T> {
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub fn fetch_byte_add(&self, val: usize, order: Ordering) -> *mut T { pub fn fetch_byte_add(&self, val: usize, order: Ordering) -> *mut T {
// SAFETY: data races are prevented by atomic intrinsics. // SAFETY: data races are prevented by atomic intrinsics.
unsafe { atomic_add(self.p.get(), core::ptr::invalid_mut(val), order).cast() } unsafe { atomic_add(self.p.get(), core::ptr::without_provenance_mut(val), order).cast() }
} }
/// Offsets the pointer's address by subtracting `val` *bytes*, returning the /// Offsets the pointer's address by subtracting `val` *bytes*, returning the
@ -1867,7 +1867,7 @@ impl<T> AtomicPtr<T> {
/// #![feature(strict_provenance_atomic_ptr, strict_provenance)] /// #![feature(strict_provenance_atomic_ptr, strict_provenance)]
/// use core::sync::atomic::{AtomicPtr, Ordering}; /// use core::sync::atomic::{AtomicPtr, Ordering};
/// ///
/// let atom = AtomicPtr::<i64>::new(core::ptr::invalid_mut(1)); /// let atom = AtomicPtr::<i64>::new(core::ptr::without_provenance_mut(1));
/// assert_eq!(atom.fetch_byte_sub(1, Ordering::Relaxed).addr(), 1); /// assert_eq!(atom.fetch_byte_sub(1, Ordering::Relaxed).addr(), 1);
/// assert_eq!(atom.load(Ordering::Relaxed).addr(), 0); /// assert_eq!(atom.load(Ordering::Relaxed).addr(), 0);
/// ``` /// ```
@ -1877,7 +1877,7 @@ impl<T> AtomicPtr<T> {
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub fn fetch_byte_sub(&self, val: usize, order: Ordering) -> *mut T { pub fn fetch_byte_sub(&self, val: usize, order: Ordering) -> *mut T {
// SAFETY: data races are prevented by atomic intrinsics. // SAFETY: data races are prevented by atomic intrinsics.
unsafe { atomic_sub(self.p.get(), core::ptr::invalid_mut(val), order).cast() } unsafe { atomic_sub(self.p.get(), core::ptr::without_provenance_mut(val), order).cast() }
} }
/// Performs a bitwise "or" operation on the address of the current pointer, /// Performs a bitwise "or" operation on the address of the current pointer,
@ -1928,7 +1928,7 @@ impl<T> AtomicPtr<T> {
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub fn fetch_or(&self, val: usize, order: Ordering) -> *mut T { pub fn fetch_or(&self, val: usize, order: Ordering) -> *mut T {
// SAFETY: data races are prevented by atomic intrinsics. // SAFETY: data races are prevented by atomic intrinsics.
unsafe { atomic_or(self.p.get(), core::ptr::invalid_mut(val), order).cast() } unsafe { atomic_or(self.p.get(), core::ptr::without_provenance_mut(val), order).cast() }
} }
/// Performs a bitwise "and" operation on the address of the current /// Performs a bitwise "and" operation on the address of the current
@ -1978,7 +1978,7 @@ impl<T> AtomicPtr<T> {
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub fn fetch_and(&self, val: usize, order: Ordering) -> *mut T { pub fn fetch_and(&self, val: usize, order: Ordering) -> *mut T {
// SAFETY: data races are prevented by atomic intrinsics. // SAFETY: data races are prevented by atomic intrinsics.
unsafe { atomic_and(self.p.get(), core::ptr::invalid_mut(val), order).cast() } unsafe { atomic_and(self.p.get(), core::ptr::without_provenance_mut(val), order).cast() }
} }
/// Performs a bitwise "xor" operation on the address of the current /// Performs a bitwise "xor" operation on the address of the current
@ -2026,7 +2026,7 @@ impl<T> AtomicPtr<T> {
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces #[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
pub fn fetch_xor(&self, val: usize, order: Ordering) -> *mut T { pub fn fetch_xor(&self, val: usize, order: Ordering) -> *mut T {
// SAFETY: data races are prevented by atomic intrinsics. // SAFETY: data races are prevented by atomic intrinsics.
unsafe { atomic_xor(self.p.get(), core::ptr::invalid_mut(val), order).cast() } unsafe { atomic_xor(self.p.get(), core::ptr::without_provenance_mut(val), order).cast() }
} }
/// Returns a mutable pointer to the underlying pointer. /// Returns a mutable pointer to the underlying pointer.

View File

@ -10,7 +10,7 @@ fn const_unchecked_layout() {
const DANGLING: NonNull<u8> = LAYOUT.dangling(); const DANGLING: NonNull<u8> = LAYOUT.dangling();
assert_eq!(LAYOUT.size(), SIZE); assert_eq!(LAYOUT.size(), SIZE);
assert_eq!(LAYOUT.align(), ALIGN); assert_eq!(LAYOUT.align(), ALIGN);
assert_eq!(Some(DANGLING), NonNull::new(ptr::invalid_mut(ALIGN))); assert_eq!(Some(DANGLING), NonNull::new(ptr::without_provenance_mut(ALIGN)));
} }
#[test] #[test]

View File

@ -87,10 +87,10 @@ fn test_writer_hasher() {
let cs: Rc<[u8]> = Rc::new([1, 2, 3]); let cs: Rc<[u8]> = Rc::new([1, 2, 3]);
assert_eq!(hash(&cs), 9); assert_eq!(hash(&cs), 9);
let ptr = ptr::invalid::<i32>(5_usize); let ptr = ptr::without_provenance::<i32>(5_usize);
assert_eq!(hash(&ptr), 5); assert_eq!(hash(&ptr), 5);
let ptr = ptr::invalid_mut::<i32>(5_usize); let ptr = ptr::without_provenance_mut::<i32>(5_usize);
assert_eq!(hash(&ptr), 5); assert_eq!(hash(&ptr), 5);
if cfg!(miri) { if cfg!(miri) {

View File

@ -350,9 +350,9 @@ fn align_offset_zst() {
// all, because no amount of elements will align the pointer. // all, because no amount of elements will align the pointer.
let mut p = 1; let mut p = 1;
while p < 1024 { while p < 1024 {
assert_eq!(ptr::invalid::<()>(p).align_offset(p), 0); assert_eq!(ptr::without_provenance::<()>(p).align_offset(p), 0);
if p != 1 { if p != 1 {
assert_eq!(ptr::invalid::<()>(p + 1).align_offset(p), !0); assert_eq!(ptr::without_provenance::<()>(p + 1).align_offset(p), !0);
} }
p = (p + 1).next_power_of_two(); p = (p + 1).next_power_of_two();
} }
@ -365,9 +365,9 @@ fn align_offset_zst_const() {
// all, because no amount of elements will align the pointer. // all, because no amount of elements will align the pointer.
let mut p = 1; let mut p = 1;
while p < 1024 { while p < 1024 {
assert!(ptr::invalid::<()>(p).align_offset(p) == 0); assert!(ptr::without_provenance::<()>(p).align_offset(p) == 0);
if p != 1 { if p != 1 {
assert!(ptr::invalid::<()>(p + 1).align_offset(p) == !0); assert!(ptr::without_provenance::<()>(p + 1).align_offset(p) == !0);
} }
p = (p + 1).next_power_of_two(); p = (p + 1).next_power_of_two();
} }
@ -384,7 +384,7 @@ fn align_offset_stride_one() {
let expected = ptr % align; let expected = ptr % align;
let offset = if expected == 0 { 0 } else { align - expected }; let offset = if expected == 0 { 0 } else { align - expected };
assert_eq!( assert_eq!(
ptr::invalid::<u8>(ptr).align_offset(align), ptr::without_provenance::<u8>(ptr).align_offset(align),
offset, offset,
"ptr = {}, align = {}, size = 1", "ptr = {}, align = {}, size = 1",
ptr, ptr,
@ -406,7 +406,7 @@ fn align_offset_stride_one_const() {
while ptr < 2 * align { while ptr < 2 * align {
let expected = ptr % align; let expected = ptr % align;
let offset = if expected == 0 { 0 } else { align - expected }; let offset = if expected == 0 { 0 } else { align - expected };
assert!(ptr::invalid::<u8>(ptr).align_offset(align) == offset); assert!(ptr::without_provenance::<u8>(ptr).align_offset(align) == offset);
ptr += 1; ptr += 1;
} }
align = (align + 1).next_power_of_two(); align = (align + 1).next_power_of_two();
@ -452,30 +452,30 @@ fn align_offset_various_strides() {
unsafe { unsafe {
#[repr(packed)] #[repr(packed)]
struct A3(#[allow(dead_code)] u16, #[allow(dead_code)] u8); struct A3(#[allow(dead_code)] u16, #[allow(dead_code)] u8);
x |= test_stride::<A3>(ptr::invalid::<A3>(ptr), align); x |= test_stride::<A3>(ptr::without_provenance::<A3>(ptr), align);
struct A4(#[allow(dead_code)] u32); struct A4(#[allow(dead_code)] u32);
x |= test_stride::<A4>(ptr::invalid::<A4>(ptr), align); x |= test_stride::<A4>(ptr::without_provenance::<A4>(ptr), align);
#[repr(packed)] #[repr(packed)]
struct A5(#[allow(dead_code)] u32, #[allow(dead_code)] u8); struct A5(#[allow(dead_code)] u32, #[allow(dead_code)] u8);
x |= test_stride::<A5>(ptr::invalid::<A5>(ptr), align); x |= test_stride::<A5>(ptr::without_provenance::<A5>(ptr), align);
#[repr(packed)] #[repr(packed)]
struct A6(#[allow(dead_code)] u32, #[allow(dead_code)] u16); struct A6(#[allow(dead_code)] u32, #[allow(dead_code)] u16);
x |= test_stride::<A6>(ptr::invalid::<A6>(ptr), align); x |= test_stride::<A6>(ptr::without_provenance::<A6>(ptr), align);
#[repr(packed)] #[repr(packed)]
struct A7(#[allow(dead_code)] u32, #[allow(dead_code)] u16, #[allow(dead_code)] u8); struct A7(#[allow(dead_code)] u32, #[allow(dead_code)] u16, #[allow(dead_code)] u8);
x |= test_stride::<A7>(ptr::invalid::<A7>(ptr), align); x |= test_stride::<A7>(ptr::without_provenance::<A7>(ptr), align);
#[repr(packed)] #[repr(packed)]
struct A8(#[allow(dead_code)] u32, #[allow(dead_code)] u32); struct A8(#[allow(dead_code)] u32, #[allow(dead_code)] u32);
x |= test_stride::<A8>(ptr::invalid::<A8>(ptr), align); x |= test_stride::<A8>(ptr::without_provenance::<A8>(ptr), align);
#[repr(packed)] #[repr(packed)]
struct A9(#[allow(dead_code)] u32, #[allow(dead_code)] u32, #[allow(dead_code)] u8); struct A9(#[allow(dead_code)] u32, #[allow(dead_code)] u32, #[allow(dead_code)] u8);
x |= test_stride::<A9>(ptr::invalid::<A9>(ptr), align); x |= test_stride::<A9>(ptr::without_provenance::<A9>(ptr), align);
#[repr(packed)] #[repr(packed)]
struct A10( struct A10(
@ -483,10 +483,10 @@ fn align_offset_various_strides() {
#[allow(dead_code)] u32, #[allow(dead_code)] u32,
#[allow(dead_code)] u16, #[allow(dead_code)] u16,
); );
x |= test_stride::<A10>(ptr::invalid::<A10>(ptr), align); x |= test_stride::<A10>(ptr::without_provenance::<A10>(ptr), align);
x |= test_stride::<u32>(ptr::invalid::<u32>(ptr), align); x |= test_stride::<u32>(ptr::without_provenance::<u32>(ptr), align);
x |= test_stride::<u128>(ptr::invalid::<u128>(ptr), align); x |= test_stride::<u128>(ptr::without_provenance::<u128>(ptr), align);
} }
} }
align = (align + 1).next_power_of_two(); align = (align + 1).next_power_of_two();
@ -522,18 +522,18 @@ fn align_offset_various_strides_const() {
unsafe { unsafe {
#[repr(packed)] #[repr(packed)]
struct A3(#[allow(dead_code)] u16, #[allow(dead_code)] u8); struct A3(#[allow(dead_code)] u16, #[allow(dead_code)] u8);
test_stride::<A3>(ptr::invalid::<A3>(ptr), ptr, align); test_stride::<A3>(ptr::without_provenance::<A3>(ptr), ptr, align);
struct A4(#[allow(dead_code)] u32); struct A4(#[allow(dead_code)] u32);
test_stride::<A4>(ptr::invalid::<A4>(ptr), ptr, align); test_stride::<A4>(ptr::without_provenance::<A4>(ptr), ptr, align);
#[repr(packed)] #[repr(packed)]
struct A5(#[allow(dead_code)] u32, #[allow(dead_code)] u8); struct A5(#[allow(dead_code)] u32, #[allow(dead_code)] u8);
test_stride::<A5>(ptr::invalid::<A5>(ptr), ptr, align); test_stride::<A5>(ptr::without_provenance::<A5>(ptr), ptr, align);
#[repr(packed)] #[repr(packed)]
struct A6(#[allow(dead_code)] u32, #[allow(dead_code)] u16); struct A6(#[allow(dead_code)] u32, #[allow(dead_code)] u16);
test_stride::<A6>(ptr::invalid::<A6>(ptr), ptr, align); test_stride::<A6>(ptr::without_provenance::<A6>(ptr), ptr, align);
#[repr(packed)] #[repr(packed)]
struct A7( struct A7(
@ -541,11 +541,11 @@ fn align_offset_various_strides_const() {
#[allow(dead_code)] u16, #[allow(dead_code)] u16,
#[allow(dead_code)] u8, #[allow(dead_code)] u8,
); );
test_stride::<A7>(ptr::invalid::<A7>(ptr), ptr, align); test_stride::<A7>(ptr::without_provenance::<A7>(ptr), ptr, align);
#[repr(packed)] #[repr(packed)]
struct A8(#[allow(dead_code)] u32, #[allow(dead_code)] u32); struct A8(#[allow(dead_code)] u32, #[allow(dead_code)] u32);
test_stride::<A8>(ptr::invalid::<A8>(ptr), ptr, align); test_stride::<A8>(ptr::without_provenance::<A8>(ptr), ptr, align);
#[repr(packed)] #[repr(packed)]
struct A9( struct A9(
@ -553,7 +553,7 @@ fn align_offset_various_strides_const() {
#[allow(dead_code)] u32, #[allow(dead_code)] u32,
#[allow(dead_code)] u8, #[allow(dead_code)] u8,
); );
test_stride::<A9>(ptr::invalid::<A9>(ptr), ptr, align); test_stride::<A9>(ptr::without_provenance::<A9>(ptr), ptr, align);
#[repr(packed)] #[repr(packed)]
struct A10( struct A10(
@ -561,10 +561,10 @@ fn align_offset_various_strides_const() {
#[allow(dead_code)] u32, #[allow(dead_code)] u32,
#[allow(dead_code)] u16, #[allow(dead_code)] u16,
); );
test_stride::<A10>(ptr::invalid::<A10>(ptr), ptr, align); test_stride::<A10>(ptr::without_provenance::<A10>(ptr), ptr, align);
test_stride::<u32>(ptr::invalid::<u32>(ptr), ptr, align); test_stride::<u32>(ptr::without_provenance::<u32>(ptr), ptr, align);
test_stride::<u128>(ptr::invalid::<u128>(ptr), ptr, align); test_stride::<u128>(ptr::without_provenance::<u128>(ptr), ptr, align);
} }
ptr += 1; ptr += 1;
} }
@ -689,7 +689,7 @@ fn align_offset_issue_103361() {
#[cfg(target_pointer_width = "16")] #[cfg(target_pointer_width = "16")]
const SIZE: usize = 1 << 13; const SIZE: usize = 1 << 13;
struct HugeSize(#[allow(dead_code)] [u8; SIZE - 1]); struct HugeSize(#[allow(dead_code)] [u8; SIZE - 1]);
let _ = ptr::invalid::<HugeSize>(SIZE).align_offset(SIZE); let _ = ptr::without_provenance::<HugeSize>(SIZE).align_offset(SIZE);
} }
#[test] #[test]
@ -703,9 +703,9 @@ fn align_offset_issue_103361_const() {
struct HugeSize(#[allow(dead_code)] [u8; SIZE - 1]); struct HugeSize(#[allow(dead_code)] [u8; SIZE - 1]);
const { const {
assert!(ptr::invalid::<HugeSize>(SIZE - 1).align_offset(SIZE) == SIZE - 1); assert!(ptr::without_provenance::<HugeSize>(SIZE - 1).align_offset(SIZE) == SIZE - 1);
assert!(ptr::invalid::<HugeSize>(SIZE).align_offset(SIZE) == 0); assert!(ptr::without_provenance::<HugeSize>(SIZE).align_offset(SIZE) == 0);
assert!(ptr::invalid::<HugeSize>(SIZE + 1).align_offset(SIZE) == 1); assert!(ptr::without_provenance::<HugeSize>(SIZE + 1).align_offset(SIZE) == 1);
} }
} }

View File

@ -3,7 +3,7 @@ use std::task::{RawWaker, RawWakerVTable, Waker};
#[test] #[test]
fn test_waker_getters() { fn test_waker_getters() {
let raw_waker = RawWaker::new(ptr::invalid_mut(42usize), &WAKER_VTABLE); let raw_waker = RawWaker::new(ptr::without_provenance_mut(42usize), &WAKER_VTABLE);
assert_eq!(raw_waker.data() as usize, 42); assert_eq!(raw_waker.data() as usize, 42);
assert!(ptr::eq(raw_waker.vtable(), &WAKER_VTABLE)); assert!(ptr::eq(raw_waker.vtable(), &WAKER_VTABLE));
@ -15,7 +15,7 @@ fn test_waker_getters() {
} }
static WAKER_VTABLE: RawWakerVTable = RawWakerVTable::new( static WAKER_VTABLE: RawWakerVTable = RawWakerVTable::new(
|data| RawWaker::new(ptr::invalid_mut(data as usize + 1), &WAKER_VTABLE), |data| RawWaker::new(ptr::without_provenance_mut(data as usize + 1), &WAKER_VTABLE),
|_| {}, |_| {},
|_| {}, |_| {},
|_| {}, |_| {},

View File

@ -467,7 +467,7 @@ impl RawFrame {
match self { match self {
RawFrame::Actual(frame) => frame.ip(), RawFrame::Actual(frame) => frame.ip(),
#[cfg(test)] #[cfg(test)]
RawFrame::Fake => crate::ptr::invalid_mut(1), RawFrame::Fake => crate::ptr::without_provenance_mut(1),
} }
} }
} }

View File

@ -174,7 +174,10 @@ impl Repr {
pub(super) fn new_os(code: RawOsError) -> Self { pub(super) fn new_os(code: RawOsError) -> Self {
let utagged = ((code as usize) << 32) | TAG_OS; let utagged = ((code as usize) << 32) | TAG_OS;
// Safety: `TAG_OS` is not zero, so the result of the `|` is not 0. // Safety: `TAG_OS` is not zero, so the result of the `|` is not 0.
let res = Self(unsafe { NonNull::new_unchecked(ptr::invalid_mut(utagged)) }, PhantomData); let res = Self(
unsafe { NonNull::new_unchecked(ptr::without_provenance_mut(utagged)) },
PhantomData,
);
// quickly smoke-check we encoded the right thing (This generally will // quickly smoke-check we encoded the right thing (This generally will
// only run in std's tests, unless the user uses -Zbuild-std) // only run in std's tests, unless the user uses -Zbuild-std)
debug_assert!( debug_assert!(
@ -188,7 +191,10 @@ impl Repr {
pub(super) fn new_simple(kind: ErrorKind) -> Self { pub(super) fn new_simple(kind: ErrorKind) -> Self {
let utagged = ((kind as usize) << 32) | TAG_SIMPLE; let utagged = ((kind as usize) << 32) | TAG_SIMPLE;
// Safety: `TAG_SIMPLE` is not zero, so the result of the `|` is not 0. // Safety: `TAG_SIMPLE` is not zero, so the result of the `|` is not 0.
let res = Self(unsafe { NonNull::new_unchecked(ptr::invalid_mut(utagged)) }, PhantomData); let res = Self(
unsafe { NonNull::new_unchecked(ptr::without_provenance_mut(utagged)) },
PhantomData,
);
// quickly smoke-check we encoded the right thing (This generally will // quickly smoke-check we encoded the right thing (This generally will
// only run in std's tests, unless the user uses -Zbuild-std) // only run in std's tests, unless the user uses -Zbuild-std)
debug_assert!( debug_assert!(

View File

@ -110,7 +110,7 @@
use crate::cell::OnceCell; use crate::cell::OnceCell;
use crate::hint::spin_loop; use crate::hint::spin_loop;
use crate::mem; use crate::mem;
use crate::ptr::{self, invalid_mut, null_mut, NonNull}; use crate::ptr::{self, null_mut, without_provenance_mut, NonNull};
use crate::sync::atomic::{ use crate::sync::atomic::{
AtomicBool, AtomicPtr, AtomicBool, AtomicPtr,
Ordering::{AcqRel, Acquire, Relaxed, Release}, Ordering::{AcqRel, Acquire, Relaxed, Release},
@ -126,7 +126,7 @@ const SPIN_COUNT: usize = 7;
type State = *mut (); type State = *mut ();
type AtomicState = AtomicPtr<()>; type AtomicState = AtomicPtr<()>;
const UNLOCKED: State = invalid_mut(0); const UNLOCKED: State = without_provenance_mut(0);
const LOCKED: usize = 1; const LOCKED: usize = 1;
const QUEUED: usize = 2; const QUEUED: usize = 2;
const QUEUE_LOCKED: usize = 4; const QUEUE_LOCKED: usize = 4;
@ -144,7 +144,7 @@ fn write_lock(state: State) -> Option<State> {
#[inline] #[inline]
fn read_lock(state: State) -> Option<State> { fn read_lock(state: State) -> Option<State> {
if state.addr() & QUEUED == 0 && state.addr() != LOCKED { if state.addr() & QUEUED == 0 && state.addr() != LOCKED {
Some(invalid_mut(state.addr().checked_add(SINGLE)? | LOCKED)) Some(without_provenance_mut(state.addr().checked_add(SINGLE)? | LOCKED))
} else { } else {
None None
} }
@ -405,7 +405,7 @@ impl RwLock {
match self.state.fetch_update(Release, Acquire, |state| { match self.state.fetch_update(Release, Acquire, |state| {
if state.addr() & QUEUED == 0 { if state.addr() & QUEUED == 0 {
let count = state.addr() - (SINGLE | LOCKED); let count = state.addr() - (SINGLE | LOCKED);
Some(if count > 0 { invalid_mut(count | LOCKED) } else { UNLOCKED }) Some(if count > 0 { without_provenance_mut(count | LOCKED) } else { UNLOCKED })
} else { } else {
None None
} }
@ -444,7 +444,7 @@ impl RwLock {
#[inline] #[inline]
pub unsafe fn write_unlock(&self) { pub unsafe fn write_unlock(&self) {
if let Err(state) = if let Err(state) =
self.state.compare_exchange(invalid_mut(LOCKED), UNLOCKED, Release, Relaxed) self.state.compare_exchange(without_provenance_mut(LOCKED), UNLOCKED, Release, Relaxed)
{ {
// SAFETY: // SAFETY:
// Since other threads cannot acquire the lock, the state can only // Since other threads cannot acquire the lock, the state can only

View File

@ -176,7 +176,7 @@ unsafe extern "C" fn destroy_value<T: 'static>(ptr: *mut u8) {
if let Err(_) = panic::catch_unwind(|| unsafe { if let Err(_) = panic::catch_unwind(|| unsafe {
let ptr = Box::from_raw(ptr as *mut Value<T>); let ptr = Box::from_raw(ptr as *mut Value<T>);
let key = ptr.key; let key = ptr.key;
key.os.set(ptr::invalid_mut(1)); key.os.set(ptr::without_provenance_mut(1));
drop(ptr); drop(ptr);
key.os.set(ptr::null_mut()); key.os.set(ptr::null_mut());
}) { }) {

View File

@ -53,7 +53,7 @@ pub fn futex_wait(futex: &AtomicU32, expected: u32, timeout: Option<Duration>) -
futex as *const AtomicU32 as *mut _, futex as *const AtomicU32 as *mut _,
libc::UMTX_OP_WAIT_UINT_PRIVATE, libc::UMTX_OP_WAIT_UINT_PRIVATE,
expected as libc::c_ulong, expected as libc::c_ulong,
crate::ptr::invalid_mut(umtx_timeout_size), crate::ptr::without_provenance_mut(umtx_timeout_size),
umtx_timeout_ptr as *mut _, umtx_timeout_ptr as *mut _,
) )
} else if #[cfg(any(target_os = "linux", target_os = "android"))] { } else if #[cfg(any(target_os = "linux", target_os = "android"))] {

View File

@ -25,7 +25,7 @@ pub fn current() -> ThreadId {
#[inline] #[inline]
pub fn park(hint: usize) { pub fn park(hint: usize) {
unsafe { unsafe {
___lwp_park60(0, 0, ptr::null_mut(), 0, ptr::invalid(hint), ptr::null()); ___lwp_park60(0, 0, ptr::null_mut(), 0, ptr::without_provenance(hint), ptr::null());
} }
} }
@ -40,13 +40,20 @@ pub fn park_timeout(dur: Duration, hint: usize) {
// Timeout needs to be mutable since it is modified on NetBSD 9.0 and // Timeout needs to be mutable since it is modified on NetBSD 9.0 and
// above. // above.
unsafe { unsafe {
___lwp_park60(CLOCK_MONOTONIC, 0, &mut timeout, 0, ptr::invalid(hint), ptr::null()); ___lwp_park60(
CLOCK_MONOTONIC,
0,
&mut timeout,
0,
ptr::without_provenance(hint),
ptr::null(),
);
} }
} }
#[inline] #[inline]
pub fn unpark(tid: ThreadId, hint: usize) { pub fn unpark(tid: ThreadId, hint: usize) {
unsafe { unsafe {
_lwp_unpark(tid, ptr::invalid(hint)); _lwp_unpark(tid, ptr::without_provenance(hint));
} }
} }

View File

@ -80,7 +80,11 @@ pub(crate) struct DlsymWeak<F> {
impl<F> DlsymWeak<F> { impl<F> DlsymWeak<F> {
pub(crate) const fn new(name: &'static str) -> Self { pub(crate) const fn new(name: &'static str) -> Self {
DlsymWeak { name, func: AtomicPtr::new(ptr::invalid_mut(1)), _marker: PhantomData } DlsymWeak {
name,
func: AtomicPtr::new(ptr::without_provenance_mut(1)),
_marker: PhantomData,
}
} }
#[inline] #[inline]

View File

@ -47,7 +47,7 @@ pub use FD_SET as fd_set;
pub use LINGER as linger; pub use LINGER as linger;
pub use TIMEVAL as timeval; pub use TIMEVAL as timeval;
pub const INVALID_HANDLE_VALUE: HANDLE = ::core::ptr::invalid_mut(-1i32 as _); pub const INVALID_HANDLE_VALUE: HANDLE = ::core::ptr::without_provenance_mut(-1i32 as _);
// https://learn.microsoft.com/en-us/cpp/c-runtime-library/exit-success-exit-failure?view=msvc-170 // https://learn.microsoft.com/en-us/cpp/c-runtime-library/exit-success-exit-failure?view=msvc-170
pub const EXIT_SUCCESS: u32 = 0; pub const EXIT_SUCCESS: u32 = 0;

View File

@ -327,7 +327,7 @@ fn home_dir_crt() -> Option<PathBuf> {
super::fill_utf16_buf( super::fill_utf16_buf(
|buf, mut sz| { |buf, mut sz| {
match c::GetUserProfileDirectoryW( match c::GetUserProfileDirectoryW(
ptr::invalid_mut(CURRENT_PROCESS_TOKEN), ptr::without_provenance_mut(CURRENT_PROCESS_TOKEN),
buf, buf,
&mut sz, &mut sz,
) { ) {

View File

@ -13,8 +13,8 @@ fn smoke() {
unsafe { unsafe {
assert!(K1.get().is_null()); assert!(K1.get().is_null());
assert!(K2.get().is_null()); assert!(K2.get().is_null());
K1.set(ptr::invalid_mut(1)); K1.set(ptr::without_provenance_mut(1));
K2.set(ptr::invalid_mut(2)); K2.set(ptr::without_provenance_mut(2));
assert_eq!(K1.get() as usize, 1); assert_eq!(K1.get() as usize, 1);
assert_eq!(K2.get() as usize, 2); assert_eq!(K2.get() as usize, 2);
} }

View File

@ -220,7 +220,7 @@ impl Parker {
} }
fn keyed_event_handle() -> c::HANDLE { fn keyed_event_handle() -> c::HANDLE {
const INVALID: c::HANDLE = ptr::invalid_mut(!0); const INVALID: c::HANDLE = ptr::without_provenance_mut(!0);
static HANDLE: AtomicPtr<crate::ffi::c_void> = AtomicPtr::new(INVALID); static HANDLE: AtomicPtr<crate::ffi::c_void> = AtomicPtr::new(INVALID);
match HANDLE.load(Relaxed) { match HANDLE.load(Relaxed) {
INVALID => { INVALID => {

View File

@ -218,7 +218,7 @@ pub fn output_filename(
#[cfg(all(target_vendor = "fortanix", target_env = "sgx"))] #[cfg(all(target_vendor = "fortanix", target_env = "sgx"))]
pub fn set_image_base() { pub fn set_image_base() {
let image_base = crate::os::fortanix_sgx::mem::image_base(); let image_base = crate::os::fortanix_sgx::mem::image_base();
backtrace_rs::set_image_base(crate::ptr::invalid_mut(image_base as _)); backtrace_rs::set_image_base(crate::ptr::without_provenance_mut(image_base as _));
} }
#[cfg(not(all(target_vendor = "fortanix", target_env = "sgx")))] #[cfg(not(all(target_vendor = "fortanix", target_env = "sgx")))]

View File

@ -110,7 +110,7 @@ impl Once {
#[inline] #[inline]
#[rustc_const_stable(feature = "const_once_new", since = "1.32.0")] #[rustc_const_stable(feature = "const_once_new", since = "1.32.0")]
pub const fn new() -> Once { pub const fn new() -> Once {
Once { state_and_queue: AtomicPtr::new(ptr::invalid_mut(INCOMPLETE)) } Once { state_and_queue: AtomicPtr::new(ptr::without_provenance_mut(INCOMPLETE)) }
} }
#[inline] #[inline]
@ -158,7 +158,7 @@ impl Once {
// Try to register this thread as the one RUNNING. // Try to register this thread as the one RUNNING.
let exchange_result = self.state_and_queue.compare_exchange( let exchange_result = self.state_and_queue.compare_exchange(
state_and_queue, state_and_queue,
ptr::invalid_mut(RUNNING), ptr::without_provenance_mut(RUNNING),
Ordering::Acquire, Ordering::Acquire,
Ordering::Acquire, Ordering::Acquire,
); );
@ -170,14 +170,14 @@ impl Once {
// wake them up on drop. // wake them up on drop.
let mut waiter_queue = WaiterQueue { let mut waiter_queue = WaiterQueue {
state_and_queue: &self.state_and_queue, state_and_queue: &self.state_and_queue,
set_state_on_drop_to: ptr::invalid_mut(POISONED), set_state_on_drop_to: ptr::without_provenance_mut(POISONED),
}; };
// Run the initialization function, letting it know if we're // Run the initialization function, letting it know if we're
// poisoned or not. // poisoned or not.
let init_state = public::OnceState { let init_state = public::OnceState {
inner: OnceState { inner: OnceState {
poisoned: state_and_queue.addr() == POISONED, poisoned: state_and_queue.addr() == POISONED,
set_state_on_drop_to: Cell::new(ptr::invalid_mut(COMPLETE)), set_state_on_drop_to: Cell::new(ptr::without_provenance_mut(COMPLETE)),
}, },
}; };
init(&init_state); init(&init_state);
@ -289,6 +289,6 @@ impl OnceState {
#[inline] #[inline]
pub fn poison(&self) { pub fn poison(&self) {
self.set_state_on_drop_to.set(ptr::invalid_mut(POISONED)); self.set_state_on_drop_to.set(ptr::without_provenance_mut(POISONED));
} }
} }

View File

@ -9,8 +9,8 @@ fn statik() {
unsafe { unsafe {
assert!(K1.get().is_null()); assert!(K1.get().is_null());
assert!(K2.get().is_null()); assert!(K2.get().is_null());
K1.set(ptr::invalid_mut(1)); K1.set(ptr::without_provenance_mut(1));
K2.set(ptr::invalid_mut(2)); K2.set(ptr::without_provenance_mut(2));
assert_eq!(K1.get() as usize, 1); assert_eq!(K1.get() as usize, 1);
assert_eq!(K2.get() as usize, 2); assert_eq!(K2.get() as usize, 2);
} }

View File

@ -8,7 +8,7 @@ use std::ptr::{self, addr_of_mut};
// (This test relies on the `deref_copy` pass that lowers `**ptr` to materialize the intermediate pointer.) // (This test relies on the `deref_copy` pass that lowers `**ptr` to materialize the intermediate pointer.)
fn main() { fn main() {
let mut inner = ptr::invalid::<i32>(24); let mut inner = ptr::without_provenance::<i32>(24);
let outer = addr_of_mut!(inner).cast::<Box<i32>>(); let outer = addr_of_mut!(inner).cast::<Box<i32>>();
// Now `outer` is a pointer to a dangling reference. // Now `outer` is a pointer to a dangling reference.
// Deref'ing that should be UB. // Deref'ing that should be UB.

View File

@ -8,7 +8,7 @@ use std::ptr::{self, addr_of_mut};
// (This test relies on the `deref_copy` pass that lowers `**ptr` to materialize the intermediate pointer.) // (This test relies on the `deref_copy` pass that lowers `**ptr` to materialize the intermediate pointer.)
fn main() { fn main() {
let mut inner = ptr::invalid::<i32>(24); let mut inner = ptr::without_provenance::<i32>(24);
let outer = addr_of_mut!(inner).cast::<&'static mut i32>(); let outer = addr_of_mut!(inner).cast::<&'static mut i32>();
// Now `outer` is a pointer to a dangling reference. // Now `outer` is a pointer to a dangling reference.
// Deref'ing that should be UB. // Deref'ing that should be UB.

View File

@ -1,9 +1,9 @@
#![feature(strict_provenance, exposed_provenance)] #![feature(strict_provenance, exposed_provenance)]
// Ensure that a `ptr::invalid` ptr is truly invalid. // Ensure that a `ptr::without_provenance` ptr is truly invalid.
fn main() { fn main() {
let x = 42; let x = 42;
let xptr = &x as *const i32; let xptr = &x as *const i32;
let xptr_invalid = std::ptr::invalid::<i32>(xptr.expose_addr()); let xptr_invalid = std::ptr::without_provenance::<i32>(xptr.expose_addr());
let _val = unsafe { *xptr_invalid }; //~ ERROR: is a dangling pointer let _val = unsafe { *xptr_invalid }; //~ ERROR: is a dangling pointer
} }

View File

@ -4,7 +4,7 @@
fn main() { fn main() {
let x = 22; let x = 22;
let ptr = &x as *const _ as *const u8; let ptr = &x as *const _ as *const u8;
let roundtrip = std::ptr::invalid::<u8>(ptr as usize); let roundtrip = std::ptr::without_provenance::<u8>(ptr as usize);
// Not even offsetting this is allowed. // Not even offsetting this is allowed.
let _ = unsafe { roundtrip.offset(1) }; //~ERROR: is a dangling pointer let _ = unsafe { roundtrip.offset(1) }; //~ERROR: is a dangling pointer
} }

View File

@ -71,7 +71,7 @@ fn test_mmap<Offset: Default>(
let ptr = unsafe { let ptr = unsafe {
mmap( mmap(
ptr::invalid_mut(page_size * 64), ptr::without_provenance_mut(page_size * 64),
page_size, page_size,
libc::PROT_READ | libc::PROT_WRITE, libc::PROT_READ | libc::PROT_WRITE,
// We don't support MAP_FIXED // We don't support MAP_FIXED
@ -114,13 +114,13 @@ fn test_mmap<Offset: Default>(
assert_eq!(ptr, libc::MAP_FAILED); assert_eq!(ptr, libc::MAP_FAILED);
// We report an error when trying to munmap an address which is not a multiple of the page size // We report an error when trying to munmap an address which is not a multiple of the page size
let res = unsafe { libc::munmap(ptr::invalid_mut(1), page_size) }; let res = unsafe { libc::munmap(ptr::without_provenance_mut(1), page_size) };
assert_eq!(res, -1); assert_eq!(res, -1);
assert_eq!(Error::last_os_error().raw_os_error().unwrap(), libc::EINVAL); assert_eq!(Error::last_os_error().raw_os_error().unwrap(), libc::EINVAL);
// We report an error when trying to munmap a length that cannot be rounded up to a multiple of // We report an error when trying to munmap a length that cannot be rounded up to a multiple of
// the page size. // the page size.
let res = unsafe { libc::munmap(ptr::invalid_mut(page_size), usize::MAX - 1) }; let res = unsafe { libc::munmap(ptr::without_provenance_mut(page_size), usize::MAX - 1) };
assert_eq!(res, -1); assert_eq!(res, -1);
assert_eq!(Error::last_os_error().raw_os_error().unwrap(), libc::EINVAL); assert_eq!(Error::last_os_error().raw_os_error().unwrap(), libc::EINVAL);
} }
@ -156,7 +156,7 @@ fn test_mremap() {
// Test all of our error conditions // Test all of our error conditions
// Not aligned // Not aligned
let ptr = let ptr =
unsafe { libc::mremap(ptr::invalid_mut(1), page_size, page_size, libc::MREMAP_MAYMOVE) }; unsafe { libc::mremap(ptr::without_provenance_mut(1), page_size, page_size, libc::MREMAP_MAYMOVE) };
assert_eq!(ptr, libc::MAP_FAILED); assert_eq!(ptr, libc::MAP_FAILED);
assert_eq!(Error::last_os_error().raw_os_error().unwrap(), libc::EINVAL); assert_eq!(Error::last_os_error().raw_os_error().unwrap(), libc::EINVAL);

View File

@ -58,7 +58,7 @@ fn main() {
// Non-power of 2 align // Non-power of 2 align
unsafe { unsafe {
let mut ptr: *mut libc::c_void = ptr::invalid_mut(0x1234567); let mut ptr: *mut libc::c_void = ptr::without_provenance_mut(0x1234567);
let align = 15; let align = 15;
let size = 8; let size = 8;
assert_eq!(libc::posix_memalign(&mut ptr, align, size), libc::EINVAL); assert_eq!(libc::posix_memalign(&mut ptr, align, size), libc::EINVAL);
@ -70,7 +70,7 @@ fn main() {
// Too small align (smaller than ptr) // Too small align (smaller than ptr)
unsafe { unsafe {
let mut ptr: *mut libc::c_void = ptr::invalid_mut(0x1234567); let mut ptr: *mut libc::c_void = ptr::without_provenance_mut(0x1234567);
let align = std::mem::size_of::<usize>() / 2; let align = std::mem::size_of::<usize>() / 2;
let size = 8; let size = 8;
assert_eq!(libc::posix_memalign(&mut ptr, align, size), libc::EINVAL); assert_eq!(libc::posix_memalign(&mut ptr, align, size), libc::EINVAL);

View File

@ -100,7 +100,7 @@ fn huge_align() {
#[cfg(target_pointer_width = "16")] #[cfg(target_pointer_width = "16")]
const SIZE: usize = 1 << 13; const SIZE: usize = 1 << 13;
struct HugeSize(#[allow(dead_code)] [u8; SIZE - 1]); struct HugeSize(#[allow(dead_code)] [u8; SIZE - 1]);
let _ = std::ptr::invalid::<HugeSize>(SIZE).align_offset(SIZE); let _ = std::ptr::without_provenance::<HugeSize>(SIZE).align_offset(SIZE);
} }
// This shows that we cannot store the promised alignment info in `AllocExtra`, // This shows that we cannot store the promised alignment info in `AllocExtra`,

View File

@ -137,7 +137,7 @@ fn atomic_ptr() {
let ptr = AtomicPtr::<i32>::new(ptr::null_mut()); let ptr = AtomicPtr::<i32>::new(ptr::null_mut());
assert!(ptr.load(Relaxed).addr() == 0); assert!(ptr.load(Relaxed).addr() == 0);
ptr.store(ptr::invalid_mut(13), SeqCst); ptr.store(ptr::without_provenance_mut(13), SeqCst);
assert!(ptr.swap(x, Relaxed).addr() == 13); assert!(ptr.swap(x, Relaxed).addr() == 13);
unsafe { assert!(*ptr.load(Acquire) == 0) }; unsafe { assert!(*ptr.load(Acquire) == 0) };
@ -145,7 +145,7 @@ fn atomic_ptr() {
assert_eq!( assert_eq!(
ptr.compare_exchange( ptr.compare_exchange(
(&mut 0 as *mut i32).with_addr(x.addr()), (&mut 0 as *mut i32).with_addr(x.addr()),
ptr::invalid_mut(0), ptr::without_provenance_mut(0),
SeqCst, SeqCst,
SeqCst SeqCst
) )
@ -156,7 +156,7 @@ fn atomic_ptr() {
assert_eq!( assert_eq!(
ptr.compare_exchange( ptr.compare_exchange(
(&mut 0 as *mut i32).with_addr(x.addr()), (&mut 0 as *mut i32).with_addr(x.addr()),
ptr::invalid_mut(0), ptr::without_provenance_mut(0),
SeqCst, SeqCst,
SeqCst SeqCst
) )

View File

@ -35,12 +35,12 @@ fn assign_overlapping() {
fn deref_invalid() { fn deref_invalid() {
unsafe { unsafe {
// `addr_of!(*ptr)` is never UB. // `addr_of!(*ptr)` is never UB.
let _val = addr_of!(*ptr::invalid::<i32>(0)); let _val = addr_of!(*ptr::without_provenance::<i32>(0));
let _val = addr_of!(*ptr::invalid::<i32>(1)); // not aligned let _val = addr_of!(*ptr::without_provenance::<i32>(1)); // not aligned
// Similarly, just mentioning the place is fine. // Similarly, just mentioning the place is fine.
let _ = *ptr::invalid::<i32>(0); let _ = *ptr::without_provenance::<i32>(0);
let _ = *ptr::invalid::<i32>(1); let _ = *ptr::without_provenance::<i32>(1);
} }
} }

View File

@ -29,7 +29,7 @@ fn slice_of_zst() {
// In a slice of zero-size elements the pointer is meaningless. // In a slice of zero-size elements the pointer is meaningless.
// Ensure iteration still works even if the pointer is at the end of the address space. // Ensure iteration still works even if the pointer is at the end of the address space.
let slice: &[()] = unsafe { slice::from_raw_parts(ptr::invalid(-5isize as usize), 10) }; let slice: &[()] = unsafe { slice::from_raw_parts(ptr::without_provenance(-5isize as usize), 10) };
assert_eq!(slice.len(), 10); assert_eq!(slice.len(), 10);
assert_eq!(slice.iter().count(), 10); assert_eq!(slice.iter().count(), 10);
@ -43,7 +43,7 @@ fn slice_of_zst() {
// Test mutable iterators as well // Test mutable iterators as well
let slice: &mut [()] = let slice: &mut [()] =
unsafe { slice::from_raw_parts_mut(ptr::invalid_mut(-5isize as usize), 10) }; unsafe { slice::from_raw_parts_mut(ptr::without_provenance_mut(-5isize as usize), 10) };
assert_eq!(slice.len(), 10); assert_eq!(slice.len(), 10);
assert_eq!(slice.iter_mut().count(), 10); assert_eq!(slice.iter_mut().count(), 10);
@ -263,7 +263,7 @@ fn test_for_invalidated_pointers() {
fn large_raw_slice() { fn large_raw_slice() {
let size = isize::MAX as usize; let size = isize::MAX as usize;
// Creating a raw slice of size isize::MAX and asking for its size is okay. // Creating a raw slice of size isize::MAX and asking for its size is okay.
let s = std::ptr::slice_from_raw_parts(ptr::invalid::<u8>(1), size); let s = std::ptr::slice_from_raw_parts(ptr::without_provenance::<u8>(1), size);
assert_eq!(size, unsafe { std::mem::size_of_val_raw(s) }); assert_eq!(size, unsafe { std::mem::size_of_val_raw(s) });
} }

View File

@ -38,7 +38,7 @@ fn invalid_match() {
fn dangling_let() { fn dangling_let() {
unsafe { unsafe {
let ptr = ptr::invalid::<bool>(0x40); let ptr = ptr::without_provenance::<bool>(0x40);
let _ = *ptr; let _ = *ptr;
} }
} }
@ -54,7 +54,7 @@ fn invalid_let() {
// Adding a type annotation used to change how MIR is generated, make sure we cover both cases. // Adding a type annotation used to change how MIR is generated, make sure we cover both cases.
fn dangling_let_type_annotation() { fn dangling_let_type_annotation() {
unsafe { unsafe {
let ptr = ptr::invalid::<bool>(0x40); let ptr = ptr::without_provenance::<bool>(0x40);
let _: bool = *ptr; let _: bool = *ptr;
} }
} }

View File

@ -17,26 +17,28 @@
scope 4 (inlined Unique::<[bool; 0]>::dangling) { scope 4 (inlined Unique::<[bool; 0]>::dangling) {
let mut _5: std::ptr::NonNull<[bool; 0]>; let mut _5: std::ptr::NonNull<[bool; 0]>;
scope 5 (inlined NonNull::<[bool; 0]>::dangling) { scope 5 (inlined NonNull::<[bool; 0]>::dangling) {
let mut _7: usize;
scope 6 { scope 6 {
let _6: *mut [bool; 0]; let _6: *mut [bool; 0];
scope 7 { scope 7 {
debug ptr => _6; debug ptr => _6;
scope 11 (inlined NonNull::<[bool; 0]>::new_unchecked) { scope 12 (inlined NonNull::<[bool; 0]>::new_unchecked) {
debug ptr => _6; debug ptr => _6;
let mut _8: bool; let mut _8: bool;
let _9: (); let _9: ();
let mut _10: *mut (); let mut _10: *mut ();
let mut _11: *const [bool; 0]; let mut _11: *const [bool; 0];
scope 12 { scope 13 {
} }
} }
} }
scope 8 (inlined align_of::<[bool; 0]>) { scope 8 (inlined dangling_mut::<[bool; 0]>) {
} let mut _7: usize;
scope 9 (inlined invalid_mut::<[bool; 0]>) { scope 9 (inlined align_of::<[bool; 0]>) {
debug addr => _7; }
scope 10 { scope 10 (inlined without_provenance_mut::<[bool; 0]>) {
debug addr => _7;
scope 11 {
}
} }
} }
} }

View File

@ -17,26 +17,28 @@
scope 4 (inlined Unique::<[bool; 0]>::dangling) { scope 4 (inlined Unique::<[bool; 0]>::dangling) {
let mut _5: std::ptr::NonNull<[bool; 0]>; let mut _5: std::ptr::NonNull<[bool; 0]>;
scope 5 (inlined NonNull::<[bool; 0]>::dangling) { scope 5 (inlined NonNull::<[bool; 0]>::dangling) {
let mut _7: usize;
scope 6 { scope 6 {
let _6: *mut [bool; 0]; let _6: *mut [bool; 0];
scope 7 { scope 7 {
debug ptr => _6; debug ptr => _6;
scope 11 (inlined NonNull::<[bool; 0]>::new_unchecked) { scope 12 (inlined NonNull::<[bool; 0]>::new_unchecked) {
debug ptr => _6; debug ptr => _6;
let mut _8: bool; let mut _8: bool;
let _9: (); let _9: ();
let mut _10: *mut (); let mut _10: *mut ();
let mut _11: *const [bool; 0]; let mut _11: *const [bool; 0];
scope 12 { scope 13 {
} }
} }
} }
scope 8 (inlined align_of::<[bool; 0]>) { scope 8 (inlined dangling_mut::<[bool; 0]>) {
} let mut _7: usize;
scope 9 (inlined invalid_mut::<[bool; 0]>) { scope 9 (inlined align_of::<[bool; 0]>) {
debug addr => _7; }
scope 10 { scope 10 (inlined without_provenance_mut::<[bool; 0]>) {
debug addr => _7;
scope 11 {
}
} }
} }
} }

View File

@ -17,26 +17,28 @@
scope 4 (inlined Unique::<[bool; 0]>::dangling) { scope 4 (inlined Unique::<[bool; 0]>::dangling) {
let mut _5: std::ptr::NonNull<[bool; 0]>; let mut _5: std::ptr::NonNull<[bool; 0]>;
scope 5 (inlined NonNull::<[bool; 0]>::dangling) { scope 5 (inlined NonNull::<[bool; 0]>::dangling) {
let mut _7: usize;
scope 6 { scope 6 {
let _6: *mut [bool; 0]; let _6: *mut [bool; 0];
scope 7 { scope 7 {
debug ptr => _6; debug ptr => _6;
scope 11 (inlined NonNull::<[bool; 0]>::new_unchecked) { scope 12 (inlined NonNull::<[bool; 0]>::new_unchecked) {
debug ptr => _6; debug ptr => _6;
let mut _8: bool; let mut _8: bool;
let _9: (); let _9: ();
let mut _10: *mut (); let mut _10: *mut ();
let mut _11: *const [bool; 0]; let mut _11: *const [bool; 0];
scope 12 { scope 13 {
} }
} }
} }
scope 8 (inlined align_of::<[bool; 0]>) { scope 8 (inlined dangling_mut::<[bool; 0]>) {
} let mut _7: usize;
scope 9 (inlined invalid_mut::<[bool; 0]>) { scope 9 (inlined align_of::<[bool; 0]>) {
debug addr => _7; }
scope 10 { scope 10 (inlined without_provenance_mut::<[bool; 0]>) {
debug addr => _7;
scope 11 {
}
} }
} }
} }

View File

@ -17,26 +17,28 @@
scope 4 (inlined Unique::<[bool; 0]>::dangling) { scope 4 (inlined Unique::<[bool; 0]>::dangling) {
let mut _5: std::ptr::NonNull<[bool; 0]>; let mut _5: std::ptr::NonNull<[bool; 0]>;
scope 5 (inlined NonNull::<[bool; 0]>::dangling) { scope 5 (inlined NonNull::<[bool; 0]>::dangling) {
let mut _7: usize;
scope 6 { scope 6 {
let _6: *mut [bool; 0]; let _6: *mut [bool; 0];
scope 7 { scope 7 {
debug ptr => _6; debug ptr => _6;
scope 11 (inlined NonNull::<[bool; 0]>::new_unchecked) { scope 12 (inlined NonNull::<[bool; 0]>::new_unchecked) {
debug ptr => _6; debug ptr => _6;
let mut _8: bool; let mut _8: bool;
let _9: (); let _9: ();
let mut _10: *mut (); let mut _10: *mut ();
let mut _11: *const [bool; 0]; let mut _11: *const [bool; 0];
scope 12 { scope 13 {
} }
} }
} }
scope 8 (inlined align_of::<[bool; 0]>) { scope 8 (inlined dangling_mut::<[bool; 0]>) {
} let mut _7: usize;
scope 9 (inlined invalid_mut::<[bool; 0]>) { scope 9 (inlined align_of::<[bool; 0]>) {
debug addr => _7; }
scope 10 { scope 10 (inlined without_provenance_mut::<[bool; 0]>) {
debug addr => _7;
scope 11 {
}
} }
} }
} }

View File

@ -17,26 +17,28 @@
scope 4 (inlined Unique::<[bool; 0]>::dangling) { scope 4 (inlined Unique::<[bool; 0]>::dangling) {
let mut _5: std::ptr::NonNull<[bool; 0]>; let mut _5: std::ptr::NonNull<[bool; 0]>;
scope 5 (inlined NonNull::<[bool; 0]>::dangling) { scope 5 (inlined NonNull::<[bool; 0]>::dangling) {
let mut _7: usize;
scope 6 { scope 6 {
let _6: *mut [bool; 0]; let _6: *mut [bool; 0];
scope 7 { scope 7 {
debug ptr => _6; debug ptr => _6;
scope 11 (inlined NonNull::<[bool; 0]>::new_unchecked) { scope 12 (inlined NonNull::<[bool; 0]>::new_unchecked) {
debug ptr => _6; debug ptr => _6;
let mut _8: bool; let mut _8: bool;
let _9: (); let _9: ();
let mut _10: *mut (); let mut _10: *mut ();
let mut _11: *const [bool; 0]; let mut _11: *const [bool; 0];
scope 12 { scope 13 {
} }
} }
} }
scope 8 (inlined align_of::<[bool; 0]>) { scope 8 (inlined dangling_mut::<[bool; 0]>) {
} let mut _7: usize;
scope 9 (inlined invalid_mut::<[bool; 0]>) { scope 9 (inlined align_of::<[bool; 0]>) {
debug addr => _7; }
scope 10 { scope 10 (inlined without_provenance_mut::<[bool; 0]>) {
debug addr => _7;
scope 11 {
}
} }
} }
} }

View File

@ -17,26 +17,28 @@
scope 4 (inlined Unique::<[bool; 0]>::dangling) { scope 4 (inlined Unique::<[bool; 0]>::dangling) {
let mut _5: std::ptr::NonNull<[bool; 0]>; let mut _5: std::ptr::NonNull<[bool; 0]>;
scope 5 (inlined NonNull::<[bool; 0]>::dangling) { scope 5 (inlined NonNull::<[bool; 0]>::dangling) {
let mut _7: usize;
scope 6 { scope 6 {
let _6: *mut [bool; 0]; let _6: *mut [bool; 0];
scope 7 { scope 7 {
debug ptr => _6; debug ptr => _6;
scope 11 (inlined NonNull::<[bool; 0]>::new_unchecked) { scope 12 (inlined NonNull::<[bool; 0]>::new_unchecked) {
debug ptr => _6; debug ptr => _6;
let mut _8: bool; let mut _8: bool;
let _9: (); let _9: ();
let mut _10: *mut (); let mut _10: *mut ();
let mut _11: *const [bool; 0]; let mut _11: *const [bool; 0];
scope 12 { scope 13 {
} }
} }
} }
scope 8 (inlined align_of::<[bool; 0]>) { scope 8 (inlined dangling_mut::<[bool; 0]>) {
} let mut _7: usize;
scope 9 (inlined invalid_mut::<[bool; 0]>) { scope 9 (inlined align_of::<[bool; 0]>) {
debug addr => _7; }
scope 10 { scope 10 (inlined without_provenance_mut::<[bool; 0]>) {
debug addr => _7;
scope 11 {
}
} }
} }
} }

View File

@ -17,26 +17,28 @@
scope 4 (inlined Unique::<[bool; 0]>::dangling) { scope 4 (inlined Unique::<[bool; 0]>::dangling) {
let mut _5: std::ptr::NonNull<[bool; 0]>; let mut _5: std::ptr::NonNull<[bool; 0]>;
scope 5 (inlined NonNull::<[bool; 0]>::dangling) { scope 5 (inlined NonNull::<[bool; 0]>::dangling) {
let mut _7: usize;
scope 6 { scope 6 {
let _6: *mut [bool; 0]; let _6: *mut [bool; 0];
scope 7 { scope 7 {
debug ptr => _6; debug ptr => _6;
scope 11 (inlined NonNull::<[bool; 0]>::new_unchecked) { scope 12 (inlined NonNull::<[bool; 0]>::new_unchecked) {
debug ptr => _6; debug ptr => _6;
let mut _8: bool; let mut _8: bool;
let _9: (); let _9: ();
let mut _10: *mut (); let mut _10: *mut ();
let mut _11: *const [bool; 0]; let mut _11: *const [bool; 0];
scope 12 { scope 13 {
} }
} }
} }
scope 8 (inlined align_of::<[bool; 0]>) { scope 8 (inlined dangling_mut::<[bool; 0]>) {
} let mut _7: usize;
scope 9 (inlined invalid_mut::<[bool; 0]>) { scope 9 (inlined align_of::<[bool; 0]>) {
debug addr => _7; }
scope 10 { scope 10 (inlined without_provenance_mut::<[bool; 0]>) {
debug addr => _7;
scope 11 {
}
} }
} }
} }

View File

@ -17,26 +17,28 @@
scope 4 (inlined Unique::<[bool; 0]>::dangling) { scope 4 (inlined Unique::<[bool; 0]>::dangling) {
let mut _5: std::ptr::NonNull<[bool; 0]>; let mut _5: std::ptr::NonNull<[bool; 0]>;
scope 5 (inlined NonNull::<[bool; 0]>::dangling) { scope 5 (inlined NonNull::<[bool; 0]>::dangling) {
let mut _7: usize;
scope 6 { scope 6 {
let _6: *mut [bool; 0]; let _6: *mut [bool; 0];
scope 7 { scope 7 {
debug ptr => _6; debug ptr => _6;
scope 11 (inlined NonNull::<[bool; 0]>::new_unchecked) { scope 12 (inlined NonNull::<[bool; 0]>::new_unchecked) {
debug ptr => _6; debug ptr => _6;
let mut _8: bool; let mut _8: bool;
let _9: (); let _9: ();
let mut _10: *mut (); let mut _10: *mut ();
let mut _11: *const [bool; 0]; let mut _11: *const [bool; 0];
scope 12 { scope 13 {
} }
} }
} }
scope 8 (inlined align_of::<[bool; 0]>) { scope 8 (inlined dangling_mut::<[bool; 0]>) {
} let mut _7: usize;
scope 9 (inlined invalid_mut::<[bool; 0]>) { scope 9 (inlined align_of::<[bool; 0]>) {
debug addr => _7; }
scope 10 { scope 10 (inlined without_provenance_mut::<[bool; 0]>) {
debug addr => _7;
scope 11 {
}
} }
} }
} }

View File

@ -42,7 +42,7 @@ fn enumerated_loop(_1: &[T], _2: impl Fn(usize, &T)) -> () {
scope 8 { scope 8 {
debug end_or_len => _11; debug end_or_len => _11;
} }
scope 14 (inlined invalid::<T>) { scope 14 (inlined without_provenance::<T>) {
debug addr => _3; debug addr => _3;
scope 15 { scope 15 {
} }

View File

@ -42,7 +42,7 @@ fn enumerated_loop(_1: &[T], _2: impl Fn(usize, &T)) -> () {
scope 8 { scope 8 {
debug end_or_len => _11; debug end_or_len => _11;
} }
scope 14 (inlined invalid::<T>) { scope 14 (inlined without_provenance::<T>) {
debug addr => _3; debug addr => _3;
scope 15 { scope 15 {
} }

View File

@ -39,7 +39,7 @@ fn forward_loop(_1: &[T], _2: impl Fn(&T)) -> () {
scope 8 { scope 8 {
debug end_or_len => _11; debug end_or_len => _11;
} }
scope 14 (inlined invalid::<T>) { scope 14 (inlined without_provenance::<T>) {
debug addr => _3; debug addr => _3;
scope 15 { scope 15 {
} }

View File

@ -39,7 +39,7 @@ fn forward_loop(_1: &[T], _2: impl Fn(&T)) -> () {
scope 8 { scope 8 {
debug end_or_len => _11; debug end_or_len => _11;
} }
scope 14 (inlined invalid::<T>) { scope 14 (inlined without_provenance::<T>) {
debug addr => _3; debug addr => _3;
scope 15 { scope 15 {
} }

View File

@ -44,7 +44,7 @@ fn reverse_loop(_1: &[T], _2: impl Fn(&T)) -> () {
scope 8 { scope 8 {
debug end_or_len => _11; debug end_or_len => _11;
} }
scope 14 (inlined invalid::<T>) { scope 14 (inlined without_provenance::<T>) {
debug addr => _3; debug addr => _3;
scope 15 { scope 15 {
} }

View File

@ -44,7 +44,7 @@ fn reverse_loop(_1: &[T], _2: impl Fn(&T)) -> () {
scope 8 { scope 8 {
debug end_or_len => _11; debug end_or_len => _11;
} }
scope 14 (inlined invalid::<T>) { scope 14 (inlined without_provenance::<T>) {
debug addr => _3; debug addr => _3;
scope 15 { scope 15 {
} }