mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-25 00:03:43 +00:00
Rollup merge of #132571 - RalfJung:const_eval_select_macro, r=oli-obk
add const_eval_select macro to reduce redundancy I played around a bit with a macro to make const_eval_select invocations look a bit nicer and avoid repeating the argument lists. Here's what I got. What do you think? I didn't apply this everywhere yet because I wanted to gather feedback first. The second commit moves the macros from https://github.com/rust-lang/rust/pull/132542 into a more sensible place. It didn't seem worth its own PR and would conflict with this PR if done separately. Cc ``@oli-obk`` ``@saethlin`` ``@tgross35`` try-job: dist-aarch64-msvc
This commit is contained in:
commit
aa4fe48afe
@ -1,7 +1,7 @@
|
||||
//! impl char {}
|
||||
|
||||
use super::*;
|
||||
use crate::macros::const_panic;
|
||||
use crate::panic::const_panic;
|
||||
use crate::slice;
|
||||
use crate::str::from_utf8_unchecked_mut;
|
||||
use crate::unicode::printable::is_printable;
|
||||
|
@ -3,11 +3,12 @@
|
||||
use crate::cmp::Ordering;
|
||||
use crate::error::Error;
|
||||
use crate::ffi::c_char;
|
||||
use crate::intrinsics::const_eval_select;
|
||||
use crate::iter::FusedIterator;
|
||||
use crate::marker::PhantomData;
|
||||
use crate::ptr::NonNull;
|
||||
use crate::slice::memchr;
|
||||
use crate::{fmt, intrinsics, ops, slice, str};
|
||||
use crate::{fmt, ops, slice, str};
|
||||
|
||||
// FIXME: because this is doc(inline)d, we *have* to use intra-doc links because the actual link
|
||||
// depends on where the item is being documented. however, since this is libcore, we can't
|
||||
@ -411,37 +412,35 @@ impl CStr {
|
||||
#[rustc_const_stable(feature = "const_cstr_unchecked", since = "1.59.0")]
|
||||
#[rustc_allow_const_fn_unstable(const_eval_select)]
|
||||
pub const unsafe fn from_bytes_with_nul_unchecked(bytes: &[u8]) -> &CStr {
|
||||
#[inline]
|
||||
fn rt_impl(bytes: &[u8]) -> &CStr {
|
||||
// Chance at catching some UB at runtime with debug builds.
|
||||
debug_assert!(!bytes.is_empty() && bytes[bytes.len() - 1] == 0);
|
||||
const_eval_select!(
|
||||
@capture { bytes: &[u8] } -> &CStr:
|
||||
if const {
|
||||
// Saturating so that an empty slice panics in the assert with a good
|
||||
// message, not here due to underflow.
|
||||
let mut i = bytes.len().saturating_sub(1);
|
||||
assert!(!bytes.is_empty() && bytes[i] == 0, "input was not nul-terminated");
|
||||
|
||||
// SAFETY: Casting to CStr is safe because its internal representation
|
||||
// is a [u8] too (safe only inside std).
|
||||
// Dereferencing the obtained pointer is safe because it comes from a
|
||||
// reference. Making a reference is then safe because its lifetime
|
||||
// is bound by the lifetime of the given `bytes`.
|
||||
unsafe { &*(bytes as *const [u8] as *const CStr) }
|
||||
}
|
||||
// Ending nul byte exists, skip to the rest.
|
||||
while i != 0 {
|
||||
i -= 1;
|
||||
let byte = bytes[i];
|
||||
assert!(byte != 0, "input contained interior nul");
|
||||
}
|
||||
|
||||
const fn const_impl(bytes: &[u8]) -> &CStr {
|
||||
// Saturating so that an empty slice panics in the assert with a good
|
||||
// message, not here due to underflow.
|
||||
let mut i = bytes.len().saturating_sub(1);
|
||||
assert!(!bytes.is_empty() && bytes[i] == 0, "input was not nul-terminated");
|
||||
// SAFETY: See runtime cast comment below.
|
||||
unsafe { &*(bytes as *const [u8] as *const CStr) }
|
||||
} else {
|
||||
// Chance at catching some UB at runtime with debug builds.
|
||||
debug_assert!(!bytes.is_empty() && bytes[bytes.len() - 1] == 0);
|
||||
|
||||
// Ending nul byte exists, skip to the rest.
|
||||
while i != 0 {
|
||||
i -= 1;
|
||||
let byte = bytes[i];
|
||||
assert!(byte != 0, "input contained interior nul");
|
||||
// SAFETY: Casting to CStr is safe because its internal representation
|
||||
// is a [u8] too (safe only inside std).
|
||||
// Dereferencing the obtained pointer is safe because it comes from a
|
||||
// reference. Making a reference is then safe because its lifetime
|
||||
// is bound by the lifetime of the given `bytes`.
|
||||
unsafe { &*(bytes as *const [u8] as *const CStr) }
|
||||
}
|
||||
|
||||
// SAFETY: See `rt_impl` cast.
|
||||
unsafe { &*(bytes as *const [u8] as *const CStr) }
|
||||
}
|
||||
|
||||
intrinsics::const_eval_select((bytes,), const_impl, rt_impl)
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns the inner pointer to this C string.
|
||||
@ -735,29 +734,27 @@ impl AsRef<CStr> for CStr {
|
||||
#[cfg_attr(bootstrap, rustc_const_stable(feature = "const_cstr_from_ptr", since = "1.81.0"))]
|
||||
#[rustc_allow_const_fn_unstable(const_eval_select)]
|
||||
const unsafe fn strlen(ptr: *const c_char) -> usize {
|
||||
const fn strlen_ct(s: *const c_char) -> usize {
|
||||
let mut len = 0;
|
||||
const_eval_select!(
|
||||
@capture { s: *const c_char = ptr } -> usize:
|
||||
if const {
|
||||
let mut len = 0;
|
||||
|
||||
// SAFETY: Outer caller has provided a pointer to a valid C string.
|
||||
while unsafe { *s.add(len) } != 0 {
|
||||
len += 1;
|
||||
// SAFETY: Outer caller has provided a pointer to a valid C string.
|
||||
while unsafe { *s.add(len) } != 0 {
|
||||
len += 1;
|
||||
}
|
||||
|
||||
len
|
||||
} else {
|
||||
extern "C" {
|
||||
/// Provided by libc or compiler_builtins.
|
||||
fn strlen(s: *const c_char) -> usize;
|
||||
}
|
||||
|
||||
// SAFETY: Outer caller has provided a pointer to a valid C string.
|
||||
unsafe { strlen(s) }
|
||||
}
|
||||
|
||||
len
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn strlen_rt(s: *const c_char) -> usize {
|
||||
extern "C" {
|
||||
/// Provided by libc or compiler_builtins.
|
||||
fn strlen(s: *const c_char) -> usize;
|
||||
}
|
||||
|
||||
// SAFETY: Outer caller has provided a pointer to a valid C string.
|
||||
unsafe { strlen(s) }
|
||||
}
|
||||
|
||||
intrinsics::const_eval_select((ptr,), strlen_ct, strlen_rt)
|
||||
)
|
||||
}
|
||||
|
||||
/// An iterator over the bytes of a [`CStr`], without the nul terminator.
|
||||
|
@ -2940,6 +2940,68 @@ where
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
/// A macro to make it easier to invoke const_eval_select. Use as follows:
|
||||
/// ```rust,ignore (just a macro example)
|
||||
/// const_eval_select!(
|
||||
/// @capture { arg1: i32 = some_expr, arg2: T = other_expr } -> U:
|
||||
/// if const #[attributes_for_const_arm] {
|
||||
/// // Compile-time code goes here.
|
||||
/// } else #[attributes_for_runtime_arm] {
|
||||
/// // Run-time code goes here.
|
||||
/// }
|
||||
/// )
|
||||
/// ```
|
||||
/// The `@capture` block declares which surrounding variables / expressions can be
|
||||
/// used inside the `if const`.
|
||||
/// Note that the two arms of this `if` really each become their own function, which is why the
|
||||
/// macro supports setting attributes for those functions. The runtime function is always
|
||||
/// markes as `#[inline]`.
|
||||
///
|
||||
/// See [`const_eval_select()`] for the rules and requirements around that intrinsic.
|
||||
pub(crate) macro const_eval_select {
|
||||
(
|
||||
@capture { $($arg:ident : $ty:ty = $val:expr),* $(,)? } $( -> $ret:ty )? :
|
||||
if const
|
||||
$(#[$compiletime_attr:meta])* $compiletime:block
|
||||
else
|
||||
$(#[$runtime_attr:meta])* $runtime:block
|
||||
) => {{
|
||||
#[inline] // avoid the overhead of an extra fn call
|
||||
$(#[$runtime_attr])*
|
||||
fn runtime($($arg: $ty),*) $( -> $ret )? {
|
||||
$runtime
|
||||
}
|
||||
|
||||
#[inline] // prevent codegen on this function
|
||||
$(#[$compiletime_attr])*
|
||||
const fn compiletime($($arg: $ty),*) $( -> $ret )? {
|
||||
// Don't warn if one of the arguments is unused.
|
||||
$(let _ = $arg;)*
|
||||
|
||||
$compiletime
|
||||
}
|
||||
|
||||
const_eval_select(($($val,)*), compiletime, runtime)
|
||||
}},
|
||||
// We support leaving away the `val` expressions for *all* arguments
|
||||
// (but not for *some* arguments, that's too tricky).
|
||||
(
|
||||
@capture { $($arg:ident : $ty:ty),* $(,)? } $( -> $ret:ty )? :
|
||||
if const
|
||||
$(#[$compiletime_attr:meta])* $compiletime:block
|
||||
else
|
||||
$(#[$runtime_attr:meta])* $runtime:block
|
||||
) => {
|
||||
$crate::intrinsics::const_eval_select!(
|
||||
@capture { $($arg : $ty = $arg),* } $(-> $ret)? :
|
||||
if const
|
||||
$(#[$compiletime_attr])* $compiletime
|
||||
else
|
||||
$(#[$runtime_attr])* $runtime
|
||||
)
|
||||
},
|
||||
}
|
||||
|
||||
/// Returns whether the argument's value is statically known at
|
||||
/// compile-time.
|
||||
///
|
||||
@ -2982,7 +3044,7 @@ where
|
||||
/// # Stability concerns
|
||||
///
|
||||
/// While it is safe to call, this intrinsic may behave differently in
|
||||
/// a `const` context than otherwise. See the [`const_eval_select`]
|
||||
/// a `const` context than otherwise. See the [`const_eval_select()`]
|
||||
/// documentation for an explanation of the issues this can cause. Unlike
|
||||
/// `const_eval_select`, this intrinsic isn't guaranteed to behave
|
||||
/// deterministically even in a `const` context.
|
||||
@ -3868,14 +3930,15 @@ pub(crate) const fn miri_promise_symbolic_alignment(ptr: *const (), align: usize
|
||||
fn miri_promise_symbolic_alignment(ptr: *const (), align: usize);
|
||||
}
|
||||
|
||||
fn runtime(ptr: *const (), align: usize) {
|
||||
// SAFETY: this call is always safe.
|
||||
unsafe {
|
||||
miri_promise_symbolic_alignment(ptr, align);
|
||||
const_eval_select!(
|
||||
@capture { ptr: *const (), align: usize}:
|
||||
if const {
|
||||
// Do nothing.
|
||||
} else {
|
||||
// SAFETY: this call is always safe.
|
||||
unsafe {
|
||||
miri_promise_symbolic_alignment(ptr, align);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const fn compiletime(_ptr: *const (), _align: usize) {}
|
||||
|
||||
const_eval_select((ptr, align), compiletime, runtime);
|
||||
)
|
||||
}
|
||||
|
@ -12,54 +12,6 @@ macro_rules! panic {
|
||||
};
|
||||
}
|
||||
|
||||
/// Helper macro for panicking in a `const fn`.
|
||||
/// Invoke as:
|
||||
/// ```rust,ignore (just an example)
|
||||
/// core::macros::const_panic!("boring message", "flavored message {a} {b:?}", a: u32 = foo.len(), b: Something = bar);
|
||||
/// ```
|
||||
/// where the first message will be printed in const-eval,
|
||||
/// and the second message will be printed at runtime.
|
||||
// All uses of this macro are FIXME(const-hack).
|
||||
#[unstable(feature = "panic_internals", issue = "none")]
|
||||
#[doc(hidden)]
|
||||
pub macro const_panic {
|
||||
($const_msg:literal, $runtime_msg:literal, $($arg:ident : $ty:ty = $val:expr),* $(,)?) => {{
|
||||
#[inline]
|
||||
#[track_caller]
|
||||
fn runtime($($arg: $ty),*) -> ! {
|
||||
$crate::panic!($runtime_msg);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[track_caller]
|
||||
const fn compiletime($(_: $ty),*) -> ! {
|
||||
$crate::panic!($const_msg);
|
||||
}
|
||||
|
||||
// Wrap call to `const_eval_select` in a function so that we can
|
||||
// add the `rustc_allow_const_fn_unstable`. This is okay to do
|
||||
// because both variants will panic, just with different messages.
|
||||
#[rustc_allow_const_fn_unstable(const_eval_select)]
|
||||
#[inline(always)]
|
||||
#[track_caller]
|
||||
#[cfg_attr(bootstrap, rustc_const_stable(feature = "const_panic", since = "CURRENT_RUSTC_VERSION"))]
|
||||
const fn do_panic($($arg: $ty),*) -> ! {
|
||||
$crate::intrinsics::const_eval_select(($($arg),* ,), compiletime, runtime)
|
||||
}
|
||||
|
||||
do_panic($($val),*)
|
||||
}},
|
||||
// We support leaving away the `val` expressions for *all* arguments
|
||||
// (but not for *some* arguments, that's too tricky).
|
||||
($const_msg:literal, $runtime_msg:literal, $($arg:ident : $ty:ty),* $(,)?) => {
|
||||
$crate::macros::const_panic!(
|
||||
$const_msg,
|
||||
$runtime_msg,
|
||||
$($arg: $ty = $arg),*
|
||||
)
|
||||
},
|
||||
}
|
||||
|
||||
/// Asserts that two expressions are equal to each other (using [`PartialEq`]).
|
||||
///
|
||||
/// Assertions are always checked in both debug and release builds, and cannot
|
||||
@ -244,19 +196,6 @@ pub macro assert_matches {
|
||||
},
|
||||
}
|
||||
|
||||
/// A version of `assert` that prints a non-formatting message in const contexts.
|
||||
///
|
||||
/// See [`const_panic!`].
|
||||
#[unstable(feature = "panic_internals", issue = "none")]
|
||||
#[doc(hidden)]
|
||||
pub macro const_assert {
|
||||
($condition: expr, $const_msg:literal, $runtime_msg:literal, $($arg:tt)*) => {{
|
||||
if !$crate::intrinsics::likely($condition) {
|
||||
$crate::macros::const_panic!($const_msg, $runtime_msg, $($arg)*)
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
/// A macro for defining `#[cfg]` match-like statements.
|
||||
///
|
||||
/// It is similar to the `if/elif` C preprocessor macro by allowing definition of a cascade of
|
||||
|
@ -14,9 +14,9 @@
|
||||
use crate::convert::FloatToInt;
|
||||
#[cfg(not(test))]
|
||||
use crate::intrinsics;
|
||||
use crate::macros::const_assert;
|
||||
use crate::mem;
|
||||
use crate::num::FpCategory;
|
||||
use crate::panic::const_assert;
|
||||
|
||||
/// Basic mathematical constants.
|
||||
#[unstable(feature = "f128", issue = "116909")]
|
||||
|
@ -14,9 +14,9 @@
|
||||
use crate::convert::FloatToInt;
|
||||
#[cfg(not(test))]
|
||||
use crate::intrinsics;
|
||||
use crate::macros::const_assert;
|
||||
use crate::mem;
|
||||
use crate::num::FpCategory;
|
||||
use crate::panic::const_assert;
|
||||
|
||||
/// Basic mathematical constants.
|
||||
#[unstable(feature = "f16", issue = "116909")]
|
||||
|
@ -14,9 +14,9 @@
|
||||
use crate::convert::FloatToInt;
|
||||
#[cfg(not(test))]
|
||||
use crate::intrinsics;
|
||||
use crate::macros::const_assert;
|
||||
use crate::mem;
|
||||
use crate::num::FpCategory;
|
||||
use crate::panic::const_assert;
|
||||
|
||||
/// The radix or base of the internal representation of `f32`.
|
||||
/// Use [`f32::RADIX`] instead.
|
||||
|
@ -14,9 +14,9 @@
|
||||
use crate::convert::FloatToInt;
|
||||
#[cfg(not(test))]
|
||||
use crate::intrinsics;
|
||||
use crate::macros::const_assert;
|
||||
use crate::mem;
|
||||
use crate::num::FpCategory;
|
||||
use crate::panic::const_assert;
|
||||
|
||||
/// The radix or base of the internal representation of `f64`.
|
||||
/// Use [`f64::RADIX`] instead.
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
#![stable(feature = "rust1", since = "1.0.0")]
|
||||
|
||||
use crate::macros::const_panic;
|
||||
use crate::panic::const_panic;
|
||||
use crate::str::FromStr;
|
||||
use crate::ub_checks::assert_unsafe_precondition;
|
||||
use crate::{ascii, intrinsics, mem};
|
||||
|
@ -189,3 +189,59 @@ pub unsafe trait PanicPayload: crate::fmt::Display {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper macro for panicking in a `const fn`.
|
||||
/// Invoke as:
|
||||
/// ```rust,ignore (just an example)
|
||||
/// core::macros::const_panic!("boring message", "flavored message {a} {b:?}", a: u32 = foo.len(), b: Something = bar);
|
||||
/// ```
|
||||
/// where the first message will be printed in const-eval,
|
||||
/// and the second message will be printed at runtime.
|
||||
// All uses of this macro are FIXME(const-hack).
|
||||
#[unstable(feature = "panic_internals", issue = "none")]
|
||||
#[doc(hidden)]
|
||||
pub macro const_panic {
|
||||
($const_msg:literal, $runtime_msg:literal, $($arg:ident : $ty:ty = $val:expr),* $(,)?) => {{
|
||||
// Wrap call to `const_eval_select` in a function so that we can
|
||||
// add the `rustc_allow_const_fn_unstable`. This is okay to do
|
||||
// because both variants will panic, just with different messages.
|
||||
#[rustc_allow_const_fn_unstable(const_eval_select)]
|
||||
#[inline(always)]
|
||||
#[track_caller]
|
||||
#[cfg_attr(bootstrap, rustc_const_stable(feature = "const_panic", since = "CURRENT_RUSTC_VERSION"))]
|
||||
const fn do_panic($($arg: $ty),*) -> ! {
|
||||
$crate::intrinsics::const_eval_select!(
|
||||
@capture { $($arg: $ty),* } -> !:
|
||||
if const #[track_caller] {
|
||||
$crate::panic!($const_msg)
|
||||
} else #[track_caller] {
|
||||
$crate::panic!($runtime_msg)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
do_panic($($val),*)
|
||||
}},
|
||||
// We support leaving away the `val` expressions for *all* arguments
|
||||
// (but not for *some* arguments, that's too tricky).
|
||||
($const_msg:literal, $runtime_msg:literal, $($arg:ident : $ty:ty),* $(,)?) => {
|
||||
$crate::panic::const_panic!(
|
||||
$const_msg,
|
||||
$runtime_msg,
|
||||
$($arg: $ty = $arg),*
|
||||
)
|
||||
},
|
||||
}
|
||||
|
||||
/// A version of `assert` that prints a non-formatting message in const contexts.
|
||||
///
|
||||
/// See [`const_panic!`].
|
||||
#[unstable(feature = "panic_internals", issue = "none")]
|
||||
#[doc(hidden)]
|
||||
pub macro const_assert {
|
||||
($condition: expr, $const_msg:literal, $runtime_msg:literal, $($arg:tt)*) => {{
|
||||
if !$crate::intrinsics::likely($condition) {
|
||||
$crate::panic::const_panic!($const_msg, $runtime_msg, $($arg)*)
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
@ -29,6 +29,7 @@
|
||||
)]
|
||||
|
||||
use crate::fmt;
|
||||
use crate::intrinsics::const_eval_select;
|
||||
use crate::panic::{Location, PanicInfo};
|
||||
|
||||
#[cfg(feature = "panic_immediate_abort")]
|
||||
@ -89,40 +90,35 @@ pub const fn panic_fmt(fmt: fmt::Arguments<'_>) -> ! {
|
||||
#[cfg_attr(not(bootstrap), rustc_const_stable_indirect)] // must follow stable const rules since it is exposed to stable
|
||||
#[rustc_allow_const_fn_unstable(const_eval_select)]
|
||||
pub const fn panic_nounwind_fmt(fmt: fmt::Arguments<'_>, force_no_backtrace: bool) -> ! {
|
||||
#[inline] // this should always be inlined into `panic_nounwind_fmt`
|
||||
#[track_caller]
|
||||
fn runtime(fmt: fmt::Arguments<'_>, force_no_backtrace: bool) -> ! {
|
||||
if cfg!(feature = "panic_immediate_abort") {
|
||||
super::intrinsics::abort()
|
||||
const_eval_select!(
|
||||
@capture { fmt: fmt::Arguments<'_>, force_no_backtrace: bool } -> !:
|
||||
if const #[track_caller] {
|
||||
// We don't unwind anyway at compile-time so we can call the regular `panic_fmt`.
|
||||
panic_fmt(fmt)
|
||||
} else #[track_caller] {
|
||||
if cfg!(feature = "panic_immediate_abort") {
|
||||
super::intrinsics::abort()
|
||||
}
|
||||
|
||||
// NOTE This function never crosses the FFI boundary; it's a Rust-to-Rust call
|
||||
// that gets resolved to the `#[panic_handler]` function.
|
||||
extern "Rust" {
|
||||
#[lang = "panic_impl"]
|
||||
fn panic_impl(pi: &PanicInfo<'_>) -> !;
|
||||
}
|
||||
|
||||
// PanicInfo with the `can_unwind` flag set to false forces an abort.
|
||||
let pi = PanicInfo::new(
|
||||
&fmt,
|
||||
Location::caller(),
|
||||
/* can_unwind */ false,
|
||||
force_no_backtrace,
|
||||
);
|
||||
|
||||
// SAFETY: `panic_impl` is defined in safe Rust code and thus is safe to call.
|
||||
unsafe { panic_impl(&pi) }
|
||||
}
|
||||
|
||||
// NOTE This function never crosses the FFI boundary; it's a Rust-to-Rust call
|
||||
// that gets resolved to the `#[panic_handler]` function.
|
||||
extern "Rust" {
|
||||
#[lang = "panic_impl"]
|
||||
fn panic_impl(pi: &PanicInfo<'_>) -> !;
|
||||
}
|
||||
|
||||
// PanicInfo with the `can_unwind` flag set to false forces an abort.
|
||||
let pi = PanicInfo::new(
|
||||
&fmt,
|
||||
Location::caller(),
|
||||
/* can_unwind */ false,
|
||||
force_no_backtrace,
|
||||
);
|
||||
|
||||
// SAFETY: `panic_impl` is defined in safe Rust code and thus is safe to call.
|
||||
unsafe { panic_impl(&pi) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[track_caller]
|
||||
const fn comptime(fmt: fmt::Arguments<'_>, _force_no_backtrace: bool) -> ! {
|
||||
// We don't unwind anyway at compile-time so we can call the regular `panic_fmt`.
|
||||
panic_fmt(fmt);
|
||||
}
|
||||
|
||||
super::intrinsics::const_eval_select((fmt, force_no_backtrace), comptime, runtime);
|
||||
)
|
||||
}
|
||||
|
||||
// Next we define a bunch of higher-level wrappers that all bottom out in the two core functions
|
||||
|
@ -33,26 +33,23 @@ impl<T: ?Sized> *const T {
|
||||
#[rustc_diagnostic_item = "ptr_const_is_null"]
|
||||
#[inline]
|
||||
pub const fn is_null(self) -> bool {
|
||||
#[inline]
|
||||
fn runtime_impl(ptr: *const u8) -> bool {
|
||||
ptr.addr() == 0
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[rustc_const_unstable(feature = "const_ptr_is_null", issue = "74939")]
|
||||
const fn const_impl(ptr: *const u8) -> bool {
|
||||
match (ptr).guaranteed_eq(null_mut()) {
|
||||
Some(res) => res,
|
||||
// To remain maximally convervative, we stop execution when we don't
|
||||
// know whether the pointer is null or not.
|
||||
// We can *not* return `false` here, that would be unsound in `NonNull::new`!
|
||||
None => panic!("null-ness of this pointer cannot be determined in const context"),
|
||||
}
|
||||
}
|
||||
|
||||
// Compare via a cast to a thin pointer, so fat pointers are only
|
||||
// considering their "data" part for null-ness.
|
||||
const_eval_select((self as *const u8,), const_impl, runtime_impl)
|
||||
let ptr = self as *const u8;
|
||||
const_eval_select!(
|
||||
@capture { ptr: *const u8 } -> bool:
|
||||
if const #[rustc_const_unstable(feature = "const_ptr_is_null", issue = "74939")] {
|
||||
match (ptr).guaranteed_eq(null_mut()) {
|
||||
Some(res) => res,
|
||||
// To remain maximally convervative, we stop execution when we don't
|
||||
// know whether the pointer is null or not.
|
||||
// We can *not* return `false` here, that would be unsound in `NonNull::new`!
|
||||
None => panic!("null-ness of this pointer cannot be determined in const context"),
|
||||
}
|
||||
} else {
|
||||
ptr.addr() == 0
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/// Casts to a pointer of another type.
|
||||
@ -410,22 +407,21 @@ impl<T: ?Sized> *const T {
|
||||
#[inline]
|
||||
#[rustc_allow_const_fn_unstable(const_eval_select)]
|
||||
const fn runtime_offset_nowrap(this: *const (), count: isize, size: usize) -> bool {
|
||||
#[inline]
|
||||
fn runtime(this: *const (), count: isize, size: usize) -> bool {
|
||||
// We know `size <= isize::MAX` so the `as` cast here is not lossy.
|
||||
let Some(byte_offset) = count.checked_mul(size as isize) else {
|
||||
return false;
|
||||
};
|
||||
let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
|
||||
!overflow
|
||||
}
|
||||
|
||||
const fn comptime(_: *const (), _: isize, _: usize) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
// We can use const_eval_select here because this is only for UB checks.
|
||||
intrinsics::const_eval_select((this, count, size), comptime, runtime)
|
||||
const_eval_select!(
|
||||
@capture { this: *const (), count: isize, size: usize } -> bool:
|
||||
if const {
|
||||
true
|
||||
} else {
|
||||
// `size` is the size of a Rust type, so we know that
|
||||
// `size <= isize::MAX` and thus `as` cast here is not lossy.
|
||||
let Some(byte_offset) = count.checked_mul(size as isize) else {
|
||||
return false;
|
||||
};
|
||||
let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
|
||||
!overflow
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
ub_checks::assert_unsafe_precondition!(
|
||||
@ -763,14 +759,14 @@ impl<T: ?Sized> *const T {
|
||||
{
|
||||
#[rustc_allow_const_fn_unstable(const_eval_select)]
|
||||
const fn runtime_ptr_ge(this: *const (), origin: *const ()) -> bool {
|
||||
fn runtime(this: *const (), origin: *const ()) -> bool {
|
||||
this >= origin
|
||||
}
|
||||
const fn comptime(_: *const (), _: *const ()) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
intrinsics::const_eval_select((this, origin), comptime, runtime)
|
||||
const_eval_select!(
|
||||
@capture { this: *const (), origin: *const () } -> bool:
|
||||
if const {
|
||||
true
|
||||
} else {
|
||||
this >= origin
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
ub_checks::assert_unsafe_precondition!(
|
||||
@ -924,20 +920,18 @@ impl<T: ?Sized> *const T {
|
||||
#[inline]
|
||||
#[rustc_allow_const_fn_unstable(const_eval_select)]
|
||||
const fn runtime_add_nowrap(this: *const (), count: usize, size: usize) -> bool {
|
||||
#[inline]
|
||||
fn runtime(this: *const (), count: usize, size: usize) -> bool {
|
||||
let Some(byte_offset) = count.checked_mul(size) else {
|
||||
return false;
|
||||
};
|
||||
let (_, overflow) = this.addr().overflowing_add(byte_offset);
|
||||
byte_offset <= (isize::MAX as usize) && !overflow
|
||||
}
|
||||
|
||||
const fn comptime(_: *const (), _: usize, _: usize) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
intrinsics::const_eval_select((this, count, size), comptime, runtime)
|
||||
const_eval_select!(
|
||||
@capture { this: *const (), count: usize, size: usize } -> bool:
|
||||
if const {
|
||||
true
|
||||
} else {
|
||||
let Some(byte_offset) = count.checked_mul(size) else {
|
||||
return false;
|
||||
};
|
||||
let (_, overflow) = this.addr().overflowing_add(byte_offset);
|
||||
byte_offset <= (isize::MAX as usize) && !overflow
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
|
||||
@ -1033,19 +1027,17 @@ impl<T: ?Sized> *const T {
|
||||
#[inline]
|
||||
#[rustc_allow_const_fn_unstable(const_eval_select)]
|
||||
const fn runtime_sub_nowrap(this: *const (), count: usize, size: usize) -> bool {
|
||||
#[inline]
|
||||
fn runtime(this: *const (), count: usize, size: usize) -> bool {
|
||||
let Some(byte_offset) = count.checked_mul(size) else {
|
||||
return false;
|
||||
};
|
||||
byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
|
||||
}
|
||||
|
||||
const fn comptime(_: *const (), _: usize, _: usize) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
intrinsics::const_eval_select((this, count, size), comptime, runtime)
|
||||
const_eval_select!(
|
||||
@capture { this: *const (), count: usize, size: usize } -> bool:
|
||||
if const {
|
||||
true
|
||||
} else {
|
||||
let Some(byte_offset) = count.checked_mul(size) else {
|
||||
return false;
|
||||
};
|
||||
byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
|
||||
|
@ -1,5 +1,6 @@
|
||||
use super::*;
|
||||
use crate::cmp::Ordering::{Equal, Greater, Less};
|
||||
use crate::intrinsics::const_eval_select;
|
||||
use crate::mem::SizedTypeProperties;
|
||||
use crate::slice::{self, SliceIndex};
|
||||
|
||||
@ -404,23 +405,21 @@ impl<T: ?Sized> *mut T {
|
||||
#[inline]
|
||||
#[rustc_allow_const_fn_unstable(const_eval_select)]
|
||||
const fn runtime_offset_nowrap(this: *const (), count: isize, size: usize) -> bool {
|
||||
#[inline]
|
||||
fn runtime(this: *const (), count: isize, size: usize) -> bool {
|
||||
// `size` is the size of a Rust type, so we know that
|
||||
// `size <= isize::MAX` and thus `as` cast here is not lossy.
|
||||
let Some(byte_offset) = count.checked_mul(size as isize) else {
|
||||
return false;
|
||||
};
|
||||
let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
|
||||
!overflow
|
||||
}
|
||||
|
||||
const fn comptime(_: *const (), _: isize, _: usize) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
// We can use const_eval_select here because this is only for UB checks.
|
||||
intrinsics::const_eval_select((this, count, size), comptime, runtime)
|
||||
const_eval_select!(
|
||||
@capture { this: *const (), count: isize, size: usize } -> bool:
|
||||
if const {
|
||||
true
|
||||
} else {
|
||||
// `size` is the size of a Rust type, so we know that
|
||||
// `size <= isize::MAX` and thus `as` cast here is not lossy.
|
||||
let Some(byte_offset) = count.checked_mul(size as isize) else {
|
||||
return false;
|
||||
};
|
||||
let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
|
||||
!overflow
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
ub_checks::assert_unsafe_precondition!(
|
||||
@ -1002,20 +1001,18 @@ impl<T: ?Sized> *mut T {
|
||||
#[inline]
|
||||
#[rustc_allow_const_fn_unstable(const_eval_select)]
|
||||
const fn runtime_add_nowrap(this: *const (), count: usize, size: usize) -> bool {
|
||||
#[inline]
|
||||
fn runtime(this: *const (), count: usize, size: usize) -> bool {
|
||||
let Some(byte_offset) = count.checked_mul(size) else {
|
||||
return false;
|
||||
};
|
||||
let (_, overflow) = this.addr().overflowing_add(byte_offset);
|
||||
byte_offset <= (isize::MAX as usize) && !overflow
|
||||
}
|
||||
|
||||
const fn comptime(_: *const (), _: usize, _: usize) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
intrinsics::const_eval_select((this, count, size), comptime, runtime)
|
||||
const_eval_select!(
|
||||
@capture { this: *const (), count: usize, size: usize } -> bool:
|
||||
if const {
|
||||
true
|
||||
} else {
|
||||
let Some(byte_offset) = count.checked_mul(size) else {
|
||||
return false;
|
||||
};
|
||||
let (_, overflow) = this.addr().overflowing_add(byte_offset);
|
||||
byte_offset <= (isize::MAX as usize) && !overflow
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
|
||||
@ -1111,19 +1108,17 @@ impl<T: ?Sized> *mut T {
|
||||
#[inline]
|
||||
#[rustc_allow_const_fn_unstable(const_eval_select)]
|
||||
const fn runtime_sub_nowrap(this: *const (), count: usize, size: usize) -> bool {
|
||||
#[inline]
|
||||
fn runtime(this: *const (), count: usize, size: usize) -> bool {
|
||||
let Some(byte_offset) = count.checked_mul(size) else {
|
||||
return false;
|
||||
};
|
||||
byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
|
||||
}
|
||||
|
||||
const fn comptime(_: *const (), _: usize, _: usize) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
intrinsics::const_eval_select((this, count, size), comptime, runtime)
|
||||
const_eval_select!(
|
||||
@capture { this: *const (), count: usize, size: usize } -> bool:
|
||||
if const {
|
||||
true
|
||||
} else {
|
||||
let Some(byte_offset) = count.checked_mul(size) else {
|
||||
return false;
|
||||
};
|
||||
byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
|
||||
|
@ -351,89 +351,87 @@ pub const fn is_ascii_simple(mut bytes: &[u8]) -> bool {
|
||||
const fn is_ascii(s: &[u8]) -> bool {
|
||||
// The runtime version behaves the same as the compiletime version, it's
|
||||
// just more optimized.
|
||||
return const_eval_select((s,), compiletime, runtime);
|
||||
const_eval_select!(
|
||||
@capture { s: &[u8] } -> bool:
|
||||
if const {
|
||||
is_ascii_simple(s)
|
||||
} else {
|
||||
const USIZE_SIZE: usize = mem::size_of::<usize>();
|
||||
|
||||
const fn compiletime(s: &[u8]) -> bool {
|
||||
is_ascii_simple(s)
|
||||
}
|
||||
let len = s.len();
|
||||
let align_offset = s.as_ptr().align_offset(USIZE_SIZE);
|
||||
|
||||
#[inline]
|
||||
fn runtime(s: &[u8]) -> bool {
|
||||
const USIZE_SIZE: usize = mem::size_of::<usize>();
|
||||
|
||||
let len = s.len();
|
||||
let align_offset = s.as_ptr().align_offset(USIZE_SIZE);
|
||||
|
||||
// If we wouldn't gain anything from the word-at-a-time implementation, fall
|
||||
// back to a scalar loop.
|
||||
//
|
||||
// We also do this for architectures where `size_of::<usize>()` isn't
|
||||
// sufficient alignment for `usize`, because it's a weird edge case.
|
||||
if len < USIZE_SIZE || len < align_offset || USIZE_SIZE < mem::align_of::<usize>() {
|
||||
return is_ascii_simple(s);
|
||||
}
|
||||
|
||||
// We always read the first word unaligned, which means `align_offset` is
|
||||
// 0, we'd read the same value again for the aligned read.
|
||||
let offset_to_aligned = if align_offset == 0 { USIZE_SIZE } else { align_offset };
|
||||
|
||||
let start = s.as_ptr();
|
||||
// SAFETY: We verify `len < USIZE_SIZE` above.
|
||||
let first_word = unsafe { (start as *const usize).read_unaligned() };
|
||||
|
||||
if contains_nonascii(first_word) {
|
||||
return false;
|
||||
}
|
||||
// We checked this above, somewhat implicitly. Note that `offset_to_aligned`
|
||||
// is either `align_offset` or `USIZE_SIZE`, both of are explicitly checked
|
||||
// above.
|
||||
debug_assert!(offset_to_aligned <= len);
|
||||
|
||||
// SAFETY: word_ptr is the (properly aligned) usize ptr we use to read the
|
||||
// middle chunk of the slice.
|
||||
let mut word_ptr = unsafe { start.add(offset_to_aligned) as *const usize };
|
||||
|
||||
// `byte_pos` is the byte index of `word_ptr`, used for loop end checks.
|
||||
let mut byte_pos = offset_to_aligned;
|
||||
|
||||
// Paranoia check about alignment, since we're about to do a bunch of
|
||||
// unaligned loads. In practice this should be impossible barring a bug in
|
||||
// `align_offset` though.
|
||||
// While this method is allowed to spuriously fail in CTFE, if it doesn't
|
||||
// have alignment information it should have given a `usize::MAX` for
|
||||
// `align_offset` earlier, sending things through the scalar path instead of
|
||||
// this one, so this check should pass if it's reachable.
|
||||
debug_assert!(word_ptr.is_aligned_to(mem::align_of::<usize>()));
|
||||
|
||||
// Read subsequent words until the last aligned word, excluding the last
|
||||
// aligned word by itself to be done in tail check later, to ensure that
|
||||
// tail is always one `usize` at most to extra branch `byte_pos == len`.
|
||||
while byte_pos < len - USIZE_SIZE {
|
||||
// Sanity check that the read is in bounds
|
||||
debug_assert!(byte_pos + USIZE_SIZE <= len);
|
||||
// And that our assumptions about `byte_pos` hold.
|
||||
debug_assert!(word_ptr.cast::<u8>() == start.wrapping_add(byte_pos));
|
||||
|
||||
// SAFETY: We know `word_ptr` is properly aligned (because of
|
||||
// `align_offset`), and we know that we have enough bytes between `word_ptr` and the end
|
||||
let word = unsafe { word_ptr.read() };
|
||||
if contains_nonascii(word) {
|
||||
return false;
|
||||
// If we wouldn't gain anything from the word-at-a-time implementation, fall
|
||||
// back to a scalar loop.
|
||||
//
|
||||
// We also do this for architectures where `size_of::<usize>()` isn't
|
||||
// sufficient alignment for `usize`, because it's a weird edge case.
|
||||
if len < USIZE_SIZE || len < align_offset || USIZE_SIZE < mem::align_of::<usize>() {
|
||||
return is_ascii_simple(s);
|
||||
}
|
||||
|
||||
byte_pos += USIZE_SIZE;
|
||||
// SAFETY: We know that `byte_pos <= len - USIZE_SIZE`, which means that
|
||||
// after this `add`, `word_ptr` will be at most one-past-the-end.
|
||||
word_ptr = unsafe { word_ptr.add(1) };
|
||||
// We always read the first word unaligned, which means `align_offset` is
|
||||
// 0, we'd read the same value again for the aligned read.
|
||||
let offset_to_aligned = if align_offset == 0 { USIZE_SIZE } else { align_offset };
|
||||
|
||||
let start = s.as_ptr();
|
||||
// SAFETY: We verify `len < USIZE_SIZE` above.
|
||||
let first_word = unsafe { (start as *const usize).read_unaligned() };
|
||||
|
||||
if contains_nonascii(first_word) {
|
||||
return false;
|
||||
}
|
||||
// We checked this above, somewhat implicitly. Note that `offset_to_aligned`
|
||||
// is either `align_offset` or `USIZE_SIZE`, both of are explicitly checked
|
||||
// above.
|
||||
debug_assert!(offset_to_aligned <= len);
|
||||
|
||||
// SAFETY: word_ptr is the (properly aligned) usize ptr we use to read the
|
||||
// middle chunk of the slice.
|
||||
let mut word_ptr = unsafe { start.add(offset_to_aligned) as *const usize };
|
||||
|
||||
// `byte_pos` is the byte index of `word_ptr`, used for loop end checks.
|
||||
let mut byte_pos = offset_to_aligned;
|
||||
|
||||
// Paranoia check about alignment, since we're about to do a bunch of
|
||||
// unaligned loads. In practice this should be impossible barring a bug in
|
||||
// `align_offset` though.
|
||||
// While this method is allowed to spuriously fail in CTFE, if it doesn't
|
||||
// have alignment information it should have given a `usize::MAX` for
|
||||
// `align_offset` earlier, sending things through the scalar path instead of
|
||||
// this one, so this check should pass if it's reachable.
|
||||
debug_assert!(word_ptr.is_aligned_to(mem::align_of::<usize>()));
|
||||
|
||||
// Read subsequent words until the last aligned word, excluding the last
|
||||
// aligned word by itself to be done in tail check later, to ensure that
|
||||
// tail is always one `usize` at most to extra branch `byte_pos == len`.
|
||||
while byte_pos < len - USIZE_SIZE {
|
||||
// Sanity check that the read is in bounds
|
||||
debug_assert!(byte_pos + USIZE_SIZE <= len);
|
||||
// And that our assumptions about `byte_pos` hold.
|
||||
debug_assert!(word_ptr.cast::<u8>() == start.wrapping_add(byte_pos));
|
||||
|
||||
// SAFETY: We know `word_ptr` is properly aligned (because of
|
||||
// `align_offset`), and we know that we have enough bytes between `word_ptr` and the end
|
||||
let word = unsafe { word_ptr.read() };
|
||||
if contains_nonascii(word) {
|
||||
return false;
|
||||
}
|
||||
|
||||
byte_pos += USIZE_SIZE;
|
||||
// SAFETY: We know that `byte_pos <= len - USIZE_SIZE`, which means that
|
||||
// after this `add`, `word_ptr` will be at most one-past-the-end.
|
||||
word_ptr = unsafe { word_ptr.add(1) };
|
||||
}
|
||||
|
||||
// Sanity check to ensure there really is only one `usize` left. This should
|
||||
// be guaranteed by our loop condition.
|
||||
debug_assert!(byte_pos <= len && len - byte_pos <= USIZE_SIZE);
|
||||
|
||||
// SAFETY: This relies on `len >= USIZE_SIZE`, which we check at the start.
|
||||
let last_word = unsafe { (start.add(len - USIZE_SIZE) as *const usize).read_unaligned() };
|
||||
|
||||
!contains_nonascii(last_word)
|
||||
}
|
||||
|
||||
// Sanity check to ensure there really is only one `usize` left. This should
|
||||
// be guaranteed by our loop condition.
|
||||
debug_assert!(byte_pos <= len && len - byte_pos <= USIZE_SIZE);
|
||||
|
||||
// SAFETY: This relies on `len >= USIZE_SIZE`, which we check at the start.
|
||||
let last_word = unsafe { (start.add(len - USIZE_SIZE) as *const usize).read_unaligned() };
|
||||
|
||||
!contains_nonascii(last_word)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
//! Indexing implementations for `[T]`.
|
||||
|
||||
use crate::macros::const_panic;
|
||||
use crate::panic::const_panic;
|
||||
use crate::ub_checks::assert_unsafe_precondition;
|
||||
use crate::{ops, range};
|
||||
|
||||
|
@ -56,61 +56,59 @@ const fn memchr_naive(x: u8, text: &[u8]) -> Option<usize> {
|
||||
const fn memchr_aligned(x: u8, text: &[u8]) -> Option<usize> {
|
||||
// The runtime version behaves the same as the compiletime version, it's
|
||||
// just more optimized.
|
||||
return const_eval_select((x, text), compiletime, runtime);
|
||||
const_eval_select!(
|
||||
@capture { x: u8, text: &[u8] } -> Option<usize>:
|
||||
if const {
|
||||
memchr_naive(x, text)
|
||||
} else {
|
||||
// Scan for a single byte value by reading two `usize` words at a time.
|
||||
//
|
||||
// Split `text` in three parts
|
||||
// - unaligned initial part, before the first word aligned address in text
|
||||
// - body, scan by 2 words at a time
|
||||
// - the last remaining part, < 2 word size
|
||||
|
||||
const fn compiletime(x: u8, text: &[u8]) -> Option<usize> {
|
||||
memchr_naive(x, text)
|
||||
}
|
||||
// search up to an aligned boundary
|
||||
let len = text.len();
|
||||
let ptr = text.as_ptr();
|
||||
let mut offset = ptr.align_offset(USIZE_BYTES);
|
||||
|
||||
#[inline]
|
||||
fn runtime(x: u8, text: &[u8]) -> Option<usize> {
|
||||
// Scan for a single byte value by reading two `usize` words at a time.
|
||||
//
|
||||
// Split `text` in three parts
|
||||
// - unaligned initial part, before the first word aligned address in text
|
||||
// - body, scan by 2 words at a time
|
||||
// - the last remaining part, < 2 word size
|
||||
|
||||
// search up to an aligned boundary
|
||||
let len = text.len();
|
||||
let ptr = text.as_ptr();
|
||||
let mut offset = ptr.align_offset(USIZE_BYTES);
|
||||
|
||||
if offset > 0 {
|
||||
offset = offset.min(len);
|
||||
let slice = &text[..offset];
|
||||
if let Some(index) = memchr_naive(x, slice) {
|
||||
return Some(index);
|
||||
}
|
||||
}
|
||||
|
||||
// search the body of the text
|
||||
let repeated_x = usize::repeat_u8(x);
|
||||
while offset <= len - 2 * USIZE_BYTES {
|
||||
// SAFETY: the while's predicate guarantees a distance of at least 2 * usize_bytes
|
||||
// between the offset and the end of the slice.
|
||||
unsafe {
|
||||
let u = *(ptr.add(offset) as *const usize);
|
||||
let v = *(ptr.add(offset + USIZE_BYTES) as *const usize);
|
||||
|
||||
// break if there is a matching byte
|
||||
let zu = contains_zero_byte(u ^ repeated_x);
|
||||
let zv = contains_zero_byte(v ^ repeated_x);
|
||||
if zu || zv {
|
||||
break;
|
||||
if offset > 0 {
|
||||
offset = offset.min(len);
|
||||
let slice = &text[..offset];
|
||||
if let Some(index) = memchr_naive(x, slice) {
|
||||
return Some(index);
|
||||
}
|
||||
}
|
||||
offset += USIZE_BYTES * 2;
|
||||
}
|
||||
|
||||
// Find the byte after the point the body loop stopped.
|
||||
// FIXME(const-hack): Use `?` instead.
|
||||
// FIXME(const-hack, fee1-dead): use range slicing
|
||||
let slice =
|
||||
// SAFETY: offset is within bounds
|
||||
unsafe { super::from_raw_parts(text.as_ptr().add(offset), text.len() - offset) };
|
||||
if let Some(i) = memchr_naive(x, slice) { Some(offset + i) } else { None }
|
||||
}
|
||||
// search the body of the text
|
||||
let repeated_x = usize::repeat_u8(x);
|
||||
while offset <= len - 2 * USIZE_BYTES {
|
||||
// SAFETY: the while's predicate guarantees a distance of at least 2 * usize_bytes
|
||||
// between the offset and the end of the slice.
|
||||
unsafe {
|
||||
let u = *(ptr.add(offset) as *const usize);
|
||||
let v = *(ptr.add(offset + USIZE_BYTES) as *const usize);
|
||||
|
||||
// break if there is a matching byte
|
||||
let zu = contains_zero_byte(u ^ repeated_x);
|
||||
let zv = contains_zero_byte(v ^ repeated_x);
|
||||
if zu || zv {
|
||||
break;
|
||||
}
|
||||
}
|
||||
offset += USIZE_BYTES * 2;
|
||||
}
|
||||
|
||||
// Find the byte after the point the body loop stopped.
|
||||
// FIXME(const-hack): Use `?` instead.
|
||||
// FIXME(const-hack, fee1-dead): use range slicing
|
||||
let slice =
|
||||
// SAFETY: offset is within bounds
|
||||
unsafe { super::from_raw_parts(text.as_ptr().add(offset), text.len() - offset) };
|
||||
if let Some(i) = memchr_naive(x, slice) { Some(offset + i) } else { None }
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns the last index matching the byte `x` in `text`.
|
||||
|
@ -132,19 +132,16 @@ pub(super) const fn run_utf8_validation(v: &[u8]) -> Result<(), Utf8Error> {
|
||||
|
||||
let ascii_block_size = 2 * USIZE_BYTES;
|
||||
let blocks_end = if len >= ascii_block_size { len - ascii_block_size + 1 } else { 0 };
|
||||
let align = {
|
||||
const fn compiletime(_v: &[u8]) -> usize {
|
||||
// Below, we safely fall back to a slower codepath if the offset is `usize::MAX`,
|
||||
// so the end-to-end behavior is the same at compiletime and runtime.
|
||||
let align = const_eval_select!(
|
||||
@capture { v: &[u8] } -> usize:
|
||||
if const {
|
||||
usize::MAX
|
||||
}
|
||||
|
||||
fn runtime(v: &[u8]) -> usize {
|
||||
} else {
|
||||
v.as_ptr().align_offset(USIZE_BYTES)
|
||||
}
|
||||
|
||||
// Below, we safely fall back to a slower codepath if the offset is `usize::MAX`,
|
||||
// so the end-to-end behavior is the same at compiletime and runtime.
|
||||
const_eval_select((v,), compiletime, runtime)
|
||||
};
|
||||
);
|
||||
|
||||
while index < len {
|
||||
let old_offset = index;
|
||||
|
@ -95,20 +95,18 @@ pub use intrinsics::ub_checks as check_library_ub;
|
||||
#[inline]
|
||||
#[rustc_allow_const_fn_unstable(const_eval_select)]
|
||||
pub(crate) const fn check_language_ub() -> bool {
|
||||
#[inline]
|
||||
fn runtime() -> bool {
|
||||
// Disable UB checks in Miri.
|
||||
!cfg!(miri)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
const fn comptime() -> bool {
|
||||
// Always disable UB checks.
|
||||
false
|
||||
}
|
||||
|
||||
// Only used for UB checks so we may const_eval_select.
|
||||
intrinsics::ub_checks() && const_eval_select((), comptime, runtime)
|
||||
intrinsics::ub_checks()
|
||||
&& const_eval_select!(
|
||||
@capture { } -> bool:
|
||||
if const {
|
||||
// Always disable UB checks.
|
||||
false
|
||||
} else {
|
||||
// Disable UB checks in Miri.
|
||||
!cfg!(miri)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/// Checks whether `ptr` is properly aligned with respect to the given alignment, and
|
||||
@ -120,19 +118,15 @@ pub(crate) const fn check_language_ub() -> bool {
|
||||
#[inline]
|
||||
#[rustc_const_unstable(feature = "const_ub_checks", issue = "none")]
|
||||
pub(crate) const fn is_aligned_and_not_null(ptr: *const (), align: usize, is_zst: bool) -> bool {
|
||||
#[inline]
|
||||
fn runtime(ptr: *const (), align: usize, is_zst: bool) -> bool {
|
||||
ptr.is_aligned_to(align) && (is_zst || !ptr.is_null())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[rustc_const_unstable(feature = "const_ub_checks", issue = "none")]
|
||||
const fn comptime(ptr: *const (), _align: usize, is_zst: bool) -> bool {
|
||||
is_zst || !ptr.is_null()
|
||||
}
|
||||
|
||||
// This is just for safety checks so we can const_eval_select.
|
||||
const_eval_select((ptr, align, is_zst), comptime, runtime)
|
||||
const_eval_select!(
|
||||
@capture { ptr: *const (), align: usize, is_zst: bool } -> bool:
|
||||
if const #[rustc_const_unstable(feature = "const_ub_checks", issue = "none")] {
|
||||
is_zst || !ptr.is_null()
|
||||
} else {
|
||||
ptr.is_aligned_to(align) && (is_zst || !ptr.is_null())
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@ -154,26 +148,23 @@ pub(crate) const fn is_nonoverlapping(
|
||||
size: usize,
|
||||
count: usize,
|
||||
) -> bool {
|
||||
#[inline]
|
||||
fn runtime(src: *const (), dst: *const (), size: usize, count: usize) -> bool {
|
||||
let src_usize = src.addr();
|
||||
let dst_usize = dst.addr();
|
||||
let Some(size) = size.checked_mul(count) else {
|
||||
crate::panicking::panic_nounwind(
|
||||
"is_nonoverlapping: `size_of::<T>() * count` overflows a usize",
|
||||
)
|
||||
};
|
||||
let diff = src_usize.abs_diff(dst_usize);
|
||||
// If the absolute distance between the ptrs is at least as big as the size of the buffer,
|
||||
// they do not overlap.
|
||||
diff >= size
|
||||
}
|
||||
|
||||
#[inline]
|
||||
const fn comptime(_: *const (), _: *const (), _: usize, _: usize) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
// This is just for safety checks so we can const_eval_select.
|
||||
const_eval_select((src, dst, size, count), comptime, runtime)
|
||||
const_eval_select!(
|
||||
@capture { src: *const (), dst: *const (), size: usize, count: usize } -> bool:
|
||||
if const {
|
||||
true
|
||||
} else {
|
||||
let src_usize = src.addr();
|
||||
let dst_usize = dst.addr();
|
||||
let Some(size) = size.checked_mul(count) else {
|
||||
crate::panicking::panic_nounwind(
|
||||
"is_nonoverlapping: `size_of::<T>() * count` overflows a usize",
|
||||
)
|
||||
};
|
||||
let diff = src_usize.abs_diff(dst_usize);
|
||||
// If the absolute distance between the ptrs is at least as big as the size of the buffer,
|
||||
// they do not overlap.
|
||||
diff >= size
|
||||
}
|
||||
)
|
||||
}
|
||||
|
@ -3,7 +3,7 @@ error[E0080]: evaluation of constant value failed
|
||||
|
|
||||
= note: the evaluated program panicked at 'null-ness of this pointer cannot be determined in const context', $SRC_DIR/core/src/ptr/const_ptr.rs:LL:COL
|
||||
|
|
||||
note: inside `std::ptr::const_ptr::<impl *const T>::is_null::const_impl`
|
||||
note: inside `std::ptr::const_ptr::<impl *const T>::is_null::compiletime`
|
||||
--> $SRC_DIR/core/src/ptr/const_ptr.rs:LL:COL
|
||||
note: inside `std::ptr::const_ptr::<impl *const i32>::is_null`
|
||||
--> $SRC_DIR/core/src/ptr/const_ptr.rs:LL:COL
|
||||
@ -12,7 +12,7 @@ note: inside `MAYBE_NULL`
|
||||
|
|
||||
LL | assert!(!ptr.wrapping_sub(512).is_null());
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
= note: this error originates in the macro `$crate::panic::panic_2021` which comes from the expansion of the macro `panic` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
= note: this error originates in the macro `$crate::panic::panic_2021` which comes from the expansion of the macro `const_eval_select` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: aborting due to 1 previous error
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user