mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-25 16:24:46 +00:00
Auto merge of #129046 - matthiaskrgr:rollup-9x4xgak, r=matthiaskrgr
Rollup of 7 pull requests Successful merges: - #128643 (Refactor `powerpc64` call ABI handling) - #128655 (std: refactor UNIX random data generation) - #128745 (Remove unused lifetime parameter from spawn_unchecked) - #128841 (bootstrap: don't use rustflags for `--rustc-args`) - #128983 (Slightly refactor `TargetSelection` in bootstrap) - #129026 (CFI: Move CFI ui tests to cfi directory) - #129040 (Fix blessing of rmake tests) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
80eb5a8e91
@ -552,7 +552,7 @@ fn asm_tests(env: &Env, args: &TestArg) -> Result<(), String> {
|
||||
&"--stage",
|
||||
&"0",
|
||||
&"tests/assembly/asm",
|
||||
&"--rustc-args",
|
||||
&"--compiletest-rustc-args",
|
||||
&rustc_args,
|
||||
],
|
||||
Some(&rust_dir),
|
||||
@ -1020,7 +1020,7 @@ where
|
||||
&"--stage",
|
||||
&"0",
|
||||
&format!("tests/{}", test_type),
|
||||
&"--rustc-args",
|
||||
&"--compiletest-rustc-args",
|
||||
&rustc_args,
|
||||
],
|
||||
Some(&rust_path),
|
||||
|
@ -41,59 +41,12 @@ where
|
||||
})
|
||||
}
|
||||
|
||||
fn classify_ret<'a, Ty, C>(cx: &C, ret: &mut ArgAbi<'a, Ty>, abi: ABI)
|
||||
fn classify<'a, Ty, C>(cx: &C, arg: &mut ArgAbi<'a, Ty>, abi: ABI, is_ret: bool)
|
||||
where
|
||||
Ty: TyAbiInterface<'a, C> + Copy,
|
||||
C: HasDataLayout,
|
||||
{
|
||||
if !ret.layout.is_sized() {
|
||||
// Not touching this...
|
||||
return;
|
||||
}
|
||||
if !ret.layout.is_aggregate() {
|
||||
ret.extend_integer_width_to(64);
|
||||
return;
|
||||
}
|
||||
|
||||
// The ELFv1 ABI doesn't return aggregates in registers
|
||||
if abi == ELFv1 {
|
||||
ret.make_indirect();
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(uniform) = is_homogeneous_aggregate(cx, ret, abi) {
|
||||
ret.cast_to(uniform);
|
||||
return;
|
||||
}
|
||||
|
||||
let size = ret.layout.size;
|
||||
let bits = size.bits();
|
||||
if bits <= 128 {
|
||||
let unit = if cx.data_layout().endian == Endian::Big {
|
||||
Reg { kind: RegKind::Integer, size }
|
||||
} else if bits <= 8 {
|
||||
Reg::i8()
|
||||
} else if bits <= 16 {
|
||||
Reg::i16()
|
||||
} else if bits <= 32 {
|
||||
Reg::i32()
|
||||
} else {
|
||||
Reg::i64()
|
||||
};
|
||||
|
||||
ret.cast_to(Uniform::new(unit, size));
|
||||
return;
|
||||
}
|
||||
|
||||
ret.make_indirect();
|
||||
}
|
||||
|
||||
fn classify_arg<'a, Ty, C>(cx: &C, arg: &mut ArgAbi<'a, Ty>, abi: ABI)
|
||||
where
|
||||
Ty: TyAbiInterface<'a, C> + Copy,
|
||||
C: HasDataLayout,
|
||||
{
|
||||
if !arg.layout.is_sized() {
|
||||
if arg.is_ignore() || !arg.layout.is_sized() {
|
||||
// Not touching this...
|
||||
return;
|
||||
}
|
||||
@ -102,13 +55,22 @@ where
|
||||
return;
|
||||
}
|
||||
|
||||
// The ELFv1 ABI doesn't return aggregates in registers
|
||||
if is_ret && abi == ELFv1 {
|
||||
arg.make_indirect();
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(uniform) = is_homogeneous_aggregate(cx, arg, abi) {
|
||||
arg.cast_to(uniform);
|
||||
return;
|
||||
}
|
||||
|
||||
let size = arg.layout.size;
|
||||
if size.bits() <= 64 {
|
||||
if is_ret && size.bits() > 128 {
|
||||
// Non-homogeneous aggregates larger than two doublewords are returned indirectly.
|
||||
arg.make_indirect();
|
||||
} else if size.bits() <= 64 {
|
||||
// Aggregates smaller than a doubleword should appear in
|
||||
// the least-significant bits of the parameter doubleword.
|
||||
arg.cast_to(Reg { kind: RegKind::Integer, size })
|
||||
@ -138,14 +100,9 @@ where
|
||||
}
|
||||
};
|
||||
|
||||
if !fn_abi.ret.is_ignore() {
|
||||
classify_ret(cx, &mut fn_abi.ret, abi);
|
||||
}
|
||||
classify(cx, &mut fn_abi.ret, abi, true);
|
||||
|
||||
for arg in fn_abi.args.iter_mut() {
|
||||
if arg.is_ignore() {
|
||||
continue;
|
||||
}
|
||||
classify_arg(cx, arg, abi);
|
||||
classify(cx, arg, abi, false);
|
||||
}
|
||||
}
|
||||
|
@ -2,7 +2,9 @@ pub fn hashmap_random_keys() -> (u64, u64) {
|
||||
const KEY_LEN: usize = core::mem::size_of::<u64>();
|
||||
|
||||
let mut v = [0u8; KEY_LEN * 2];
|
||||
imp::fill_bytes(&mut v);
|
||||
if let Err(err) = read(&mut v) {
|
||||
panic!("failed to retrieve random hash map seed: {err}");
|
||||
}
|
||||
|
||||
let key1 = v[0..KEY_LEN].try_into().unwrap();
|
||||
let key2 = v[KEY_LEN..].try_into().unwrap();
|
||||
@ -10,27 +12,78 @@ pub fn hashmap_random_keys() -> (u64, u64) {
|
||||
(u64::from_ne_bytes(key1), u64::from_ne_bytes(key2))
|
||||
}
|
||||
|
||||
#[cfg(all(
|
||||
unix,
|
||||
not(target_os = "openbsd"),
|
||||
not(target_os = "netbsd"),
|
||||
not(target_os = "fuchsia"),
|
||||
not(target_os = "redox"),
|
||||
not(target_os = "vxworks"),
|
||||
not(target_os = "emscripten"),
|
||||
not(target_os = "vita"),
|
||||
not(target_vendor = "apple"),
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(any(
|
||||
target_vendor = "apple",
|
||||
target_os = "openbsd",
|
||||
target_os = "emscripten",
|
||||
target_os = "vita",
|
||||
all(target_os = "netbsd", not(netbsd10)),
|
||||
target_os = "fuchsia",
|
||||
target_os = "vxworks",
|
||||
))] {
|
||||
// Some systems have a syscall that directly retrieves random data.
|
||||
// If that is guaranteed to be available, use it.
|
||||
use imp::syscall as read;
|
||||
} else {
|
||||
// Otherwise, try the syscall to see if it exists only on some systems
|
||||
// and fall back to reading from the random device otherwise.
|
||||
fn read(bytes: &mut [u8]) -> crate::io::Result<()> {
|
||||
use crate::fs::File;
|
||||
use crate::io::Read;
|
||||
use crate::sync::OnceLock;
|
||||
|
||||
#[cfg(any(
|
||||
target_os = "linux",
|
||||
target_os = "android",
|
||||
target_os = "espidf",
|
||||
target_os = "horizon",
|
||||
target_os = "freebsd",
|
||||
target_os = "dragonfly",
|
||||
target_os = "solaris",
|
||||
target_os = "illumos",
|
||||
netbsd10,
|
||||
))]
|
||||
if let Some(res) = imp::syscall(bytes) {
|
||||
return res;
|
||||
}
|
||||
|
||||
const PATH: &'static str = if cfg!(target_os = "redox") {
|
||||
"/scheme/rand"
|
||||
} else {
|
||||
"/dev/urandom"
|
||||
};
|
||||
|
||||
static FILE: OnceLock<File> = OnceLock::new();
|
||||
|
||||
FILE.get_or_try_init(|| File::open(PATH))?.read_exact(bytes)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// All these systems a `getrandom` syscall.
|
||||
//
|
||||
// It is not guaranteed to be available, so return None to fallback to the file
|
||||
// implementation.
|
||||
#[cfg(any(
|
||||
target_os = "linux",
|
||||
target_os = "android",
|
||||
target_os = "espidf",
|
||||
target_os = "horizon",
|
||||
target_os = "freebsd",
|
||||
target_os = "dragonfly",
|
||||
target_os = "solaris",
|
||||
target_os = "illumos",
|
||||
netbsd10,
|
||||
))]
|
||||
mod imp {
|
||||
use crate::fs::File;
|
||||
use crate::io::Read;
|
||||
#[cfg(any(target_os = "linux", target_os = "android"))]
|
||||
use crate::sys::weak::syscall;
|
||||
use crate::io::{Error, Result};
|
||||
use crate::sync::atomic::{AtomicBool, Ordering};
|
||||
use crate::sys::os::errno;
|
||||
|
||||
#[cfg(any(target_os = "linux", target_os = "android"))]
|
||||
fn getrandom(buf: &mut [u8]) -> libc::ssize_t {
|
||||
use crate::sync::atomic::{AtomicBool, Ordering};
|
||||
use crate::sys::os::errno;
|
||||
use crate::sys::weak::syscall;
|
||||
|
||||
// A weak symbol allows interposition, e.g. for perf measurements that want to
|
||||
// disable randomness for consistency. Otherwise, we'll try a raw syscall.
|
||||
@ -59,6 +112,7 @@ mod imp {
|
||||
}
|
||||
|
||||
#[cfg(any(
|
||||
target_os = "dragonfly",
|
||||
target_os = "espidf",
|
||||
target_os = "horizon",
|
||||
target_os = "freebsd",
|
||||
@ -70,51 +124,11 @@ mod imp {
|
||||
unsafe { libc::getrandom(buf.as_mut_ptr().cast(), buf.len(), 0) }
|
||||
}
|
||||
|
||||
#[cfg(target_os = "dragonfly")]
|
||||
fn getrandom(buf: &mut [u8]) -> libc::ssize_t {
|
||||
extern "C" {
|
||||
fn getrandom(
|
||||
buf: *mut libc::c_void,
|
||||
buflen: libc::size_t,
|
||||
flags: libc::c_uint,
|
||||
) -> libc::ssize_t;
|
||||
}
|
||||
unsafe { getrandom(buf.as_mut_ptr().cast(), buf.len(), 0) }
|
||||
}
|
||||
|
||||
#[cfg(not(any(
|
||||
target_os = "linux",
|
||||
target_os = "android",
|
||||
target_os = "espidf",
|
||||
target_os = "horizon",
|
||||
target_os = "freebsd",
|
||||
target_os = "dragonfly",
|
||||
target_os = "solaris",
|
||||
target_os = "illumos",
|
||||
netbsd10
|
||||
)))]
|
||||
fn getrandom_fill_bytes(_buf: &mut [u8]) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
#[cfg(any(
|
||||
target_os = "linux",
|
||||
target_os = "android",
|
||||
target_os = "espidf",
|
||||
target_os = "horizon",
|
||||
target_os = "freebsd",
|
||||
target_os = "dragonfly",
|
||||
target_os = "solaris",
|
||||
target_os = "illumos",
|
||||
netbsd10
|
||||
))]
|
||||
fn getrandom_fill_bytes(v: &mut [u8]) -> bool {
|
||||
use crate::sync::atomic::{AtomicBool, Ordering};
|
||||
use crate::sys::os::errno;
|
||||
|
||||
pub fn syscall(v: &mut [u8]) -> Option<Result<()>> {
|
||||
static GETRANDOM_UNAVAILABLE: AtomicBool = AtomicBool::new(false);
|
||||
|
||||
if GETRANDOM_UNAVAILABLE.load(Ordering::Relaxed) {
|
||||
return false;
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut read = 0;
|
||||
@ -125,8 +139,7 @@ mod imp {
|
||||
if err == libc::EINTR {
|
||||
continue;
|
||||
} else if err == libc::ENOSYS || err == libc::EPERM {
|
||||
// Fall back to reading /dev/urandom if `getrandom` is not
|
||||
// supported on the current kernel.
|
||||
// `getrandom` is not supported on the current system.
|
||||
//
|
||||
// Also fall back in case it is disabled by something like
|
||||
// seccomp or inside of docker.
|
||||
@ -142,123 +155,83 @@ mod imp {
|
||||
// https://github.com/moby/moby/issues/42680
|
||||
//
|
||||
GETRANDOM_UNAVAILABLE.store(true, Ordering::Relaxed);
|
||||
return false;
|
||||
return None;
|
||||
} else if err == libc::EAGAIN {
|
||||
return false;
|
||||
// getrandom has failed because it would have blocked as the
|
||||
// non-blocking pool (urandom) has not been initialized in
|
||||
// the kernel yet due to a lack of entropy. Fallback to
|
||||
// reading from `/dev/urandom` which will return potentially
|
||||
// insecure random data to avoid blocking applications which
|
||||
// could depend on this call without ever knowing they do and
|
||||
// don't have a work around.
|
||||
return None;
|
||||
} else {
|
||||
panic!("unexpected getrandom error: {err}");
|
||||
return Some(Err(Error::from_raw_os_error(err)));
|
||||
}
|
||||
} else {
|
||||
read += result as usize;
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
pub fn fill_bytes(v: &mut [u8]) {
|
||||
// getrandom_fill_bytes here can fail if getrandom() returns EAGAIN,
|
||||
// meaning it would have blocked because the non-blocking pool (urandom)
|
||||
// has not initialized in the kernel yet due to a lack of entropy. The
|
||||
// fallback we do here is to avoid blocking applications which could
|
||||
// depend on this call without ever knowing they do and don't have a
|
||||
// work around. The PRNG of /dev/urandom will still be used but over a
|
||||
// possibly predictable entropy pool.
|
||||
if getrandom_fill_bytes(v) {
|
||||
return;
|
||||
}
|
||||
|
||||
// getrandom failed because it is permanently or temporarily (because
|
||||
// of missing entropy) unavailable. Open /dev/urandom, read from it,
|
||||
// and close it again.
|
||||
let mut file = File::open("/dev/urandom").expect("failed to open /dev/urandom");
|
||||
file.read_exact(v).expect("failed to read /dev/urandom")
|
||||
Some(Ok(()))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_vendor = "apple")]
|
||||
#[cfg(any(
|
||||
target_os = "macos", // Supported since macOS 10.12+.
|
||||
target_os = "openbsd",
|
||||
target_os = "emscripten",
|
||||
target_os = "vita",
|
||||
))]
|
||||
mod imp {
|
||||
use libc::{c_int, c_void, size_t};
|
||||
|
||||
use crate::io;
|
||||
|
||||
#[inline(always)]
|
||||
fn random_failure() -> ! {
|
||||
panic!("unexpected random generation error: {}", io::Error::last_os_error());
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
fn getentropy_fill_bytes(v: &mut [u8]) {
|
||||
extern "C" {
|
||||
fn getentropy(bytes: *mut c_void, count: size_t) -> c_int;
|
||||
}
|
||||
use crate::io::{Error, Result};
|
||||
|
||||
pub fn syscall(v: &mut [u8]) -> Result<()> {
|
||||
// getentropy(2) permits a maximum buffer size of 256 bytes
|
||||
for s in v.chunks_mut(256) {
|
||||
let ret = unsafe { getentropy(s.as_mut_ptr().cast(), s.len()) };
|
||||
let ret = unsafe { libc::getentropy(s.as_mut_ptr().cast(), s.len()) };
|
||||
if ret == -1 {
|
||||
random_failure()
|
||||
return Err(Error::last_os_error());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
fn ccrandom_fill_bytes(v: &mut [u8]) {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// On Apple platforms, `CCRandomGenerateBytes` and `SecRandomCopyBytes` simply
|
||||
// call into `CCRandomCopyBytes` with `kCCRandomDefault`. `CCRandomCopyBytes`
|
||||
// manages a CSPRNG which is seeded from the kernel's CSPRNG and which runs on
|
||||
// its own thread accessed via GCD. This seems needlessly heavyweight for our purposes
|
||||
// so we only use it when `getentropy` is blocked, which appears to be the case
|
||||
// on all platforms except macOS (see #102643).
|
||||
//
|
||||
// `CCRandomGenerateBytes` is used instead of `SecRandomCopyBytes` because the former is accessible
|
||||
// via `libSystem` (libc) while the other needs to link to `Security.framework`.
|
||||
#[cfg(all(target_vendor = "apple", not(target_os = "macos")))]
|
||||
mod imp {
|
||||
use libc::size_t;
|
||||
|
||||
use crate::ffi::{c_int, c_void};
|
||||
use crate::io::{Error, Result};
|
||||
|
||||
pub fn syscall(v: &mut [u8]) -> Result<()> {
|
||||
extern "C" {
|
||||
fn CCRandomGenerateBytes(bytes: *mut c_void, count: size_t) -> c_int;
|
||||
}
|
||||
|
||||
let ret = unsafe { CCRandomGenerateBytes(v.as_mut_ptr().cast(), v.len()) };
|
||||
if ret == -1 {
|
||||
random_failure()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fill_bytes(v: &mut [u8]) {
|
||||
// All supported versions of macOS (10.12+) support getentropy.
|
||||
//
|
||||
// `getentropy` is measurably faster (via Divan) then the other alternatives so its preferred
|
||||
// when usable.
|
||||
#[cfg(target_os = "macos")]
|
||||
getentropy_fill_bytes(v);
|
||||
|
||||
// On Apple platforms, `CCRandomGenerateBytes` and `SecRandomCopyBytes` simply
|
||||
// call into `CCRandomCopyBytes` with `kCCRandomDefault`. `CCRandomCopyBytes`
|
||||
// manages a CSPRNG which is seeded from the kernel's CSPRNG and which runs on
|
||||
// its own thread accessed via GCD. This seems needlessly heavyweight for our purposes
|
||||
// so we only use it on non-Mac OSes where the better entrypoints are blocked.
|
||||
//
|
||||
// `CCRandomGenerateBytes` is used instead of `SecRandomCopyBytes` because the former is accessible
|
||||
// via `libSystem` (libc) while the other needs to link to `Security.framework`.
|
||||
//
|
||||
// Note that while `getentropy` has a available attribute in the macOS headers, the lack
|
||||
// of a header in the iOS (and others) SDK means that its can cause app store rejections.
|
||||
// Just use `CCRandomGenerateBytes` instead.
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
ccrandom_fill_bytes(v);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(target_os = "openbsd", target_os = "emscripten", target_os = "vita"))]
|
||||
mod imp {
|
||||
use crate::sys::os::errno;
|
||||
|
||||
pub fn fill_bytes(v: &mut [u8]) {
|
||||
// getentropy(2) permits a maximum buffer size of 256 bytes
|
||||
for s in v.chunks_mut(256) {
|
||||
let ret = unsafe { libc::getentropy(s.as_mut_ptr() as *mut libc::c_void, s.len()) };
|
||||
if ret == -1 {
|
||||
panic!("unexpected getentropy error: {}", errno());
|
||||
}
|
||||
}
|
||||
if ret != -1 { Ok(()) } else { Err(Error::last_os_error()) }
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: once the 10.x release becomes the minimum, this can be dropped for simplification.
|
||||
#[cfg(all(target_os = "netbsd", not(netbsd10)))]
|
||||
mod imp {
|
||||
use crate::io::{Error, Result};
|
||||
use crate::ptr;
|
||||
|
||||
pub fn fill_bytes(v: &mut [u8]) {
|
||||
pub fn syscall(v: &mut [u8]) -> Result<()> {
|
||||
let mib = [libc::CTL_KERN, libc::KERN_ARND];
|
||||
// kern.arandom permits a maximum buffer size of 256 bytes
|
||||
for s in v.chunks_mut(256) {
|
||||
@ -273,39 +246,30 @@ mod imp {
|
||||
0,
|
||||
)
|
||||
};
|
||||
if ret == -1 || s_len != s.len() {
|
||||
panic!(
|
||||
"kern.arandom sysctl failed! (returned {}, s.len() {}, oldlenp {})",
|
||||
ret,
|
||||
s.len(),
|
||||
s_len
|
||||
);
|
||||
if ret == -1 {
|
||||
return Err(Error::last_os_error());
|
||||
} else if s_len != s.len() {
|
||||
// FIXME(joboet): this can't actually happen, can it?
|
||||
panic!("read less bytes than requested from kern.arandom");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "fuchsia")]
|
||||
mod imp {
|
||||
use crate::io::Result;
|
||||
|
||||
#[link(name = "zircon")]
|
||||
extern "C" {
|
||||
fn zx_cprng_draw(buffer: *mut u8, len: usize);
|
||||
}
|
||||
|
||||
pub fn fill_bytes(v: &mut [u8]) {
|
||||
unsafe { zx_cprng_draw(v.as_mut_ptr(), v.len()) }
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "redox")]
|
||||
mod imp {
|
||||
use crate::fs::File;
|
||||
use crate::io::Read;
|
||||
|
||||
pub fn fill_bytes(v: &mut [u8]) {
|
||||
// Open rand:, read from it, and close it again.
|
||||
let mut file = File::open("rand:").expect("failed to open rand:");
|
||||
file.read_exact(v).expect("failed to read rand:")
|
||||
pub fn syscall(v: &mut [u8]) -> Result<()> {
|
||||
unsafe { zx_cprng_draw(v.as_mut_ptr(), v.len()) };
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@ -314,25 +278,25 @@ mod imp {
|
||||
use core::sync::atomic::AtomicBool;
|
||||
use core::sync::atomic::Ordering::Relaxed;
|
||||
|
||||
use crate::io;
|
||||
use crate::io::{Error, Result};
|
||||
|
||||
pub fn fill_bytes(v: &mut [u8]) {
|
||||
pub fn syscall(v: &mut [u8]) -> Result<()> {
|
||||
static RNG_INIT: AtomicBool = AtomicBool::new(false);
|
||||
while !RNG_INIT.load(Relaxed) {
|
||||
let ret = unsafe { libc::randSecure() };
|
||||
if ret < 0 {
|
||||
panic!("couldn't generate random bytes: {}", io::Error::last_os_error());
|
||||
return Err(Error::last_os_error());
|
||||
} else if ret > 0 {
|
||||
RNG_INIT.store(true, Relaxed);
|
||||
break;
|
||||
}
|
||||
|
||||
unsafe { libc::usleep(10) };
|
||||
}
|
||||
|
||||
let ret = unsafe {
|
||||
libc::randABytes(v.as_mut_ptr() as *mut libc::c_uchar, v.len() as libc::c_int)
|
||||
};
|
||||
if ret < 0 {
|
||||
panic!("couldn't generate random bytes: {}", io::Error::last_os_error());
|
||||
}
|
||||
if ret >= 0 { Ok(()) } else { Err(Error::last_os_error()) }
|
||||
}
|
||||
}
|
||||
|
@ -434,25 +434,24 @@ impl Builder {
|
||||
///
|
||||
/// [`io::Result`]: crate::io::Result
|
||||
#[unstable(feature = "thread_spawn_unchecked", issue = "55132")]
|
||||
pub unsafe fn spawn_unchecked<'a, F, T>(self, f: F) -> io::Result<JoinHandle<T>>
|
||||
pub unsafe fn spawn_unchecked<F, T>(self, f: F) -> io::Result<JoinHandle<T>>
|
||||
where
|
||||
F: FnOnce() -> T,
|
||||
F: Send + 'a,
|
||||
T: Send + 'a,
|
||||
F: Send,
|
||||
T: Send,
|
||||
{
|
||||
Ok(JoinHandle(unsafe { self.spawn_unchecked_(f, None) }?))
|
||||
}
|
||||
|
||||
unsafe fn spawn_unchecked_<'a, 'scope, F, T>(
|
||||
unsafe fn spawn_unchecked_<'scope, F, T>(
|
||||
self,
|
||||
f: F,
|
||||
scope_data: Option<Arc<scoped::ScopeData>>,
|
||||
) -> io::Result<JoinInner<'scope, T>>
|
||||
where
|
||||
F: FnOnce() -> T,
|
||||
F: Send + 'a,
|
||||
T: Send + 'a,
|
||||
'scope: 'a,
|
||||
F: Send,
|
||||
T: Send,
|
||||
{
|
||||
let Builder { name, stack_size } = self;
|
||||
|
||||
@ -532,7 +531,7 @@ impl Builder {
|
||||
// will call `decrement_num_running_threads` and therefore signal that this thread is
|
||||
// done.
|
||||
drop(their_packet);
|
||||
// Here, the lifetime `'a` and even `'scope` can end. `main` keeps running for a bit
|
||||
// Here, the lifetime `'scope` can end. `main` keeps running for a bit
|
||||
// after that before returning itself.
|
||||
};
|
||||
|
||||
|
@ -121,7 +121,7 @@ fn clean(build: &Build, all: bool, stage: Option<u32>) {
|
||||
|
||||
fn clean_specific_stage(build: &Build, stage: u32) {
|
||||
for host in &build.hosts {
|
||||
let entries = match build.out.join(host.triple).read_dir() {
|
||||
let entries = match build.out.join(host).read_dir() {
|
||||
Ok(iter) => iter,
|
||||
Err(_) => continue,
|
||||
};
|
||||
@ -148,7 +148,7 @@ fn clean_default(build: &Build) {
|
||||
rm_rf(&build.out.join("bootstrap-shims-dump"));
|
||||
rm_rf(&build.out.join("rustfmt.stamp"));
|
||||
|
||||
let mut hosts: Vec<_> = build.hosts.iter().map(|t| build.out.join(t.triple)).collect();
|
||||
let mut hosts: Vec<_> = build.hosts.iter().map(|t| build.out.join(t)).collect();
|
||||
// After cross-compilation, artifacts of the host architecture (which may differ from build.host)
|
||||
// might not get removed.
|
||||
// Adding its path (linked one for easier accessibility) will solve this problem.
|
||||
|
@ -246,7 +246,7 @@ impl Step for Std {
|
||||
.rustc_snapshot_sysroot()
|
||||
.join("lib")
|
||||
.join("rustlib")
|
||||
.join(compiler.host.triple)
|
||||
.join(compiler.host)
|
||||
.join("bin");
|
||||
if src_sysroot_bin.exists() {
|
||||
let target_sysroot_bin =
|
||||
@ -432,7 +432,7 @@ fn copy_self_contained_objects(
|
||||
DependencyType::TargetSelfContained,
|
||||
);
|
||||
}
|
||||
} else if target.ends_with("windows-gnu") {
|
||||
} else if target.is_windows_gnu() {
|
||||
for obj in ["crt2.o", "dllcrt2.o"].iter() {
|
||||
let src = compiler_file(builder, &builder.cc(target), target, CLang::C, obj);
|
||||
let target = libdir_self_contained.join(obj);
|
||||
@ -651,8 +651,8 @@ impl Step for StdLink {
|
||||
compiler: self.compiler,
|
||||
force_recompile: self.force_recompile,
|
||||
});
|
||||
let libdir = sysroot.join(lib).join("rustlib").join(target.triple).join("lib");
|
||||
let hostdir = sysroot.join(lib).join("rustlib").join(compiler.host.triple).join("lib");
|
||||
let libdir = sysroot.join(lib).join("rustlib").join(target).join("lib");
|
||||
let hostdir = sysroot.join(lib).join("rustlib").join(compiler.host).join("lib");
|
||||
(libdir, hostdir)
|
||||
} else {
|
||||
let libdir = builder.sysroot_libdir(target_compiler, target);
|
||||
@ -670,12 +670,12 @@ impl Step for StdLink {
|
||||
.build
|
||||
.config
|
||||
.initial_rustc
|
||||
.starts_with(builder.out.join(compiler.host.triple).join("stage0/bin"))
|
||||
.starts_with(builder.out.join(compiler.host).join("stage0/bin"))
|
||||
{
|
||||
// Copy bin files from stage0/bin to stage0-sysroot/bin
|
||||
let sysroot = builder.out.join(compiler.host.triple).join("stage0-sysroot");
|
||||
let sysroot = builder.out.join(compiler.host).join("stage0-sysroot");
|
||||
|
||||
let host = compiler.host.triple;
|
||||
let host = compiler.host;
|
||||
let stage0_bin_dir = builder.out.join(host).join("stage0/bin");
|
||||
let sysroot_bin_dir = sysroot.join("bin");
|
||||
t!(fs::create_dir_all(&sysroot_bin_dir));
|
||||
@ -793,7 +793,7 @@ impl Step for StartupObjects {
|
||||
fn run(self, builder: &Builder<'_>) -> Vec<(PathBuf, DependencyType)> {
|
||||
let for_compiler = self.compiler;
|
||||
let target = self.target;
|
||||
if !target.ends_with("windows-gnu") {
|
||||
if !target.is_windows_gnu() {
|
||||
return vec![];
|
||||
}
|
||||
|
||||
@ -1554,7 +1554,7 @@ impl Step for Sysroot {
|
||||
/// For all other stages, it's the same stage directory that the compiler lives in.
|
||||
fn run(self, builder: &Builder<'_>) -> PathBuf {
|
||||
let compiler = self.compiler;
|
||||
let host_dir = builder.out.join(compiler.host.triple);
|
||||
let host_dir = builder.out.join(compiler.host);
|
||||
|
||||
let sysroot_dir = |stage| {
|
||||
if stage == 0 {
|
||||
|
@ -275,12 +275,8 @@ fn make_win_dist(
|
||||
}
|
||||
|
||||
//Copy platform tools to platform-specific bin directory
|
||||
let target_bin_dir = plat_root
|
||||
.join("lib")
|
||||
.join("rustlib")
|
||||
.join(target.triple)
|
||||
.join("bin")
|
||||
.join("self-contained");
|
||||
let target_bin_dir =
|
||||
plat_root.join("lib").join("rustlib").join(target).join("bin").join("self-contained");
|
||||
fs::create_dir_all(&target_bin_dir).expect("creating target_bin_dir failed");
|
||||
for src in target_tools {
|
||||
builder.copy_link_to_folder(&src, &target_bin_dir);
|
||||
@ -295,12 +291,8 @@ fn make_win_dist(
|
||||
);
|
||||
|
||||
//Copy platform libs to platform-specific lib directory
|
||||
let target_lib_dir = plat_root
|
||||
.join("lib")
|
||||
.join("rustlib")
|
||||
.join(target.triple)
|
||||
.join("lib")
|
||||
.join("self-contained");
|
||||
let target_lib_dir =
|
||||
plat_root.join("lib").join("rustlib").join(target).join("lib").join("self-contained");
|
||||
fs::create_dir_all(&target_lib_dir).expect("creating target_lib_dir failed");
|
||||
for src in target_libs {
|
||||
builder.copy_link_to_folder(&src, &target_lib_dir);
|
||||
@ -450,7 +442,7 @@ impl Step for Rustc {
|
||||
// component for now.
|
||||
maybe_install_llvm_runtime(builder, host, image);
|
||||
|
||||
let dst_dir = image.join("lib/rustlib").join(&*host.triple).join("bin");
|
||||
let dst_dir = image.join("lib/rustlib").join(host).join("bin");
|
||||
t!(fs::create_dir_all(&dst_dir));
|
||||
|
||||
// Copy over lld if it's there
|
||||
@ -607,7 +599,7 @@ fn verify_uefi_rlib_format(builder: &Builder<'_>, target: TargetSelection, stamp
|
||||
|
||||
/// Copy stamped files into an image's `target/lib` directory.
|
||||
fn copy_target_libs(builder: &Builder<'_>, target: TargetSelection, image: &Path, stamp: &Path) {
|
||||
let dst = image.join("lib/rustlib").join(target.triple).join("lib");
|
||||
let dst = image.join("lib/rustlib").join(target).join("lib");
|
||||
let self_contained_dst = dst.join("self-contained");
|
||||
t!(fs::create_dir_all(&dst));
|
||||
t!(fs::create_dir_all(&self_contained_dst));
|
||||
@ -769,7 +761,7 @@ impl Step for Analysis {
|
||||
|
||||
let src = builder
|
||||
.stage_out(compiler, Mode::Std)
|
||||
.join(target.triple)
|
||||
.join(target)
|
||||
.join(builder.cargo_dir())
|
||||
.join("deps")
|
||||
.join("save-analysis");
|
||||
@ -1509,7 +1501,7 @@ impl Step for Extended {
|
||||
tarballs.push(builder.ensure(Rustc { compiler: builder.compiler(stage, target) }));
|
||||
tarballs.push(builder.ensure(Std { compiler, target }).expect("missing std"));
|
||||
|
||||
if target.ends_with("windows-gnu") {
|
||||
if target.is_windows_gnu() {
|
||||
tarballs.push(builder.ensure(Mingw { host: target }).expect("missing mingw"));
|
||||
}
|
||||
|
||||
@ -1683,7 +1675,7 @@ impl Step for Extended {
|
||||
prepare(tool);
|
||||
}
|
||||
}
|
||||
if target.ends_with("windows-gnu") {
|
||||
if target.is_windows_gnu() {
|
||||
prepare("rust-mingw");
|
||||
}
|
||||
|
||||
@ -1830,7 +1822,7 @@ impl Step for Extended {
|
||||
.arg("-t")
|
||||
.arg(etc.join("msi/remove-duplicates.xsl"))
|
||||
.run(builder);
|
||||
if target.ends_with("windows-gnu") {
|
||||
if target.is_windows_gnu() {
|
||||
command(&heat)
|
||||
.current_dir(&exe)
|
||||
.arg("dir")
|
||||
@ -1876,7 +1868,7 @@ impl Step for Extended {
|
||||
if built_tools.contains("miri") {
|
||||
cmd.arg("-dMiriDir=miri");
|
||||
}
|
||||
if target.ends_with("windows-gnu") {
|
||||
if target.is_windows_gnu() {
|
||||
cmd.arg("-dGccDir=rust-mingw");
|
||||
}
|
||||
cmd.run(builder);
|
||||
@ -1901,7 +1893,7 @@ impl Step for Extended {
|
||||
}
|
||||
candle("AnalysisGroup.wxs".as_ref());
|
||||
|
||||
if target.ends_with("windows-gnu") {
|
||||
if target.is_windows_gnu() {
|
||||
candle("GccGroup.wxs".as_ref());
|
||||
}
|
||||
|
||||
@ -1941,7 +1933,7 @@ impl Step for Extended {
|
||||
cmd.arg("DocsGroup.wixobj");
|
||||
}
|
||||
|
||||
if target.ends_with("windows-gnu") {
|
||||
if target.is_windows_gnu() {
|
||||
cmd.arg("GccGroup.wixobj");
|
||||
}
|
||||
// ICE57 wrongly complains about the shortcuts
|
||||
@ -1973,7 +1965,7 @@ fn add_env(builder: &Builder<'_>, cmd: &mut BootstrapCommand, target: TargetSele
|
||||
|
||||
if target.contains("windows-gnullvm") {
|
||||
cmd.env("CFG_MINGW", "1").env("CFG_ABI", "LLVM");
|
||||
} else if target.contains("windows-gnu") {
|
||||
} else if target.is_windows_gnu() {
|
||||
cmd.env("CFG_MINGW", "1").env("CFG_ABI", "GNU");
|
||||
} else {
|
||||
cmd.env("CFG_MINGW", "0").env("CFG_ABI", "MSVC");
|
||||
@ -2087,7 +2079,7 @@ fn maybe_install_llvm(
|
||||
|
||||
/// Maybe add libLLVM.so to the target lib-dir for linking.
|
||||
pub fn maybe_install_llvm_target(builder: &Builder<'_>, target: TargetSelection, sysroot: &Path) {
|
||||
let dst_libdir = sysroot.join("lib/rustlib").join(&*target.triple).join("lib");
|
||||
let dst_libdir = sysroot.join("lib/rustlib").join(target).join("lib");
|
||||
// We do not need to copy LLVM files into the sysroot if it is not
|
||||
// dynamically linked; it is already included into librustc_llvm
|
||||
// statically.
|
||||
|
@ -699,13 +699,12 @@ fn doc_std(
|
||||
let compiler = builder.compiler(stage, builder.config.build);
|
||||
|
||||
let target_doc_dir_name = if format == DocumentationFormat::Json { "json-doc" } else { "doc" };
|
||||
let target_dir =
|
||||
builder.stage_out(compiler, Mode::Std).join(target.triple).join(target_doc_dir_name);
|
||||
let target_dir = builder.stage_out(compiler, Mode::Std).join(target).join(target_doc_dir_name);
|
||||
|
||||
// This is directory where the compiler will place the output of the command.
|
||||
// We will then copy the files from this directory into the final `out` directory, the specified
|
||||
// as a function parameter.
|
||||
let out_dir = target_dir.join(target.triple).join("doc");
|
||||
let out_dir = target_dir.join(target).join("doc");
|
||||
|
||||
let mut cargo =
|
||||
builder::Cargo::new(builder, compiler, Mode::Std, SourceType::InTree, target, Kind::Doc);
|
||||
@ -846,7 +845,7 @@ impl Step for Rustc {
|
||||
|
||||
let mut to_open = None;
|
||||
|
||||
let out_dir = builder.stage_out(compiler, Mode::Rustc).join(target.triple).join("doc");
|
||||
let out_dir = builder.stage_out(compiler, Mode::Rustc).join(target).join("doc");
|
||||
for krate in &*self.crates {
|
||||
// Create all crate output directories first to make sure rustdoc uses
|
||||
// relative links.
|
||||
@ -992,7 +991,7 @@ macro_rules! tool_doc {
|
||||
// see https://github.com/rust-lang/rust/pull/122066#issuecomment-1983049222
|
||||
// cargo.rustdocflag("--generate-link-to-definition");
|
||||
|
||||
let out_dir = builder.stage_out(compiler, Mode::ToolRustc).join(target.triple).join("doc");
|
||||
let out_dir = builder.stage_out(compiler, Mode::ToolRustc).join(target).join("doc");
|
||||
$(for krate in $crates {
|
||||
let dir_name = krate.replace("-", "_");
|
||||
t!(fs::create_dir_all(out_dir.join(&*dir_name)));
|
||||
|
@ -149,7 +149,7 @@ You can skip linkcheck with --skip src/tools/linkchecker"
|
||||
let _guard =
|
||||
builder.msg(Kind::Test, compiler.stage, "Linkcheck", bootstrap_host, bootstrap_host);
|
||||
let _time = helpers::timeit(builder);
|
||||
linkchecker.delay_failure().arg(builder.out.join(host.triple).join("doc")).run(builder);
|
||||
linkchecker.delay_failure().arg(builder.out.join(host).join("doc")).run(builder);
|
||||
}
|
||||
|
||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||
@ -435,7 +435,7 @@ impl Miri {
|
||||
compiler: Compiler,
|
||||
target: TargetSelection,
|
||||
) -> PathBuf {
|
||||
let miri_sysroot = builder.out.join(compiler.host.triple).join("miri-sysroot");
|
||||
let miri_sysroot = builder.out.join(compiler.host).join("miri-sysroot");
|
||||
let mut cargo = builder::Cargo::new(
|
||||
builder,
|
||||
compiler,
|
||||
@ -1115,7 +1115,7 @@ HELP: to skip test's attempt to check tidiness, pass `--skip src/tools/tidy` to
|
||||
}
|
||||
|
||||
fn testdir(builder: &Builder<'_>, host: TargetSelection) -> PathBuf {
|
||||
builder.out.join(host.triple).join("test")
|
||||
builder.out.join(host).join("test")
|
||||
}
|
||||
|
||||
macro_rules! default_test {
|
||||
@ -1817,7 +1817,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the
|
||||
|
||||
let mut flags = if is_rustdoc { Vec::new() } else { vec!["-Crpath".to_string()] };
|
||||
flags.push(format!("-Cdebuginfo={}", builder.config.rust_debuginfo_level_tests));
|
||||
flags.extend(builder.config.cmd.rustc_args().iter().map(|s| s.to_string()));
|
||||
flags.extend(builder.config.cmd.compiletest_rustc_args().iter().map(|s| s.to_string()));
|
||||
|
||||
if suite != "mir-opt" {
|
||||
if let Some(linker) = builder.linker(target) {
|
||||
@ -2685,7 +2685,7 @@ impl Step for Crate {
|
||||
if builder.download_rustc() && compiler.stage > 0 {
|
||||
let sysroot = builder
|
||||
.out
|
||||
.join(compiler.host.triple)
|
||||
.join(compiler.host)
|
||||
.join(format!("stage{}-test-sysroot", compiler.stage));
|
||||
cargo.env("RUSTC_SYSROOT", sysroot);
|
||||
}
|
||||
|
@ -1171,7 +1171,7 @@ impl<'a> Builder<'a> {
|
||||
.sysroot(self.compiler)
|
||||
.join(lib)
|
||||
.join("rustlib")
|
||||
.join(self.target.triple)
|
||||
.join(self.target)
|
||||
.join("lib");
|
||||
// Avoid deleting the rustlib/ directory we just copied
|
||||
// (in `impl Step for Sysroot`).
|
||||
@ -1254,7 +1254,7 @@ impl<'a> Builder<'a> {
|
||||
|
||||
// Ensure that the downloaded LLVM libraries can be found.
|
||||
if self.config.llvm_from_ci {
|
||||
let ci_llvm_lib = self.out.join(&*compiler.host.triple).join("ci-llvm").join("lib");
|
||||
let ci_llvm_lib = self.out.join(compiler.host).join("ci-llvm").join("lib");
|
||||
dylib_dirs.push(ci_llvm_lib);
|
||||
}
|
||||
|
||||
@ -1504,9 +1504,9 @@ impl<'a> Builder<'a> {
|
||||
Mode::Rustc | Mode::ToolRustc => self.compiler_doc_out(target),
|
||||
Mode::Std => {
|
||||
if self.config.cmd.json() {
|
||||
out_dir.join(target.triple).join("json-doc")
|
||||
out_dir.join(target).join("json-doc")
|
||||
} else {
|
||||
out_dir.join(target.triple).join("doc")
|
||||
out_dir.join(target).join("doc")
|
||||
}
|
||||
}
|
||||
_ => panic!("doc mode {mode:?} not expected"),
|
||||
@ -2226,11 +2226,6 @@ impl<'a> Builder<'a> {
|
||||
rustdocflags.arg("--cfg=parallel_compiler");
|
||||
}
|
||||
|
||||
// Pass the value of `--rustc-args` from test command. If it's not a test command, this won't set anything.
|
||||
self.config.cmd.rustc_args().iter().for_each(|v| {
|
||||
rustflags.arg(v);
|
||||
});
|
||||
|
||||
Cargo {
|
||||
command: cargo,
|
||||
compiler,
|
||||
|
@ -633,7 +633,7 @@ mod dist {
|
||||
config.paths = vec!["library/std".into()];
|
||||
config.cmd = Subcommand::Test {
|
||||
test_args: vec![],
|
||||
rustc_args: vec![],
|
||||
compiletest_rustc_args: vec![],
|
||||
no_fail_fast: false,
|
||||
no_doc: true,
|
||||
doc: false,
|
||||
@ -704,7 +704,7 @@ mod dist {
|
||||
let mut config = configure(&["A-A"], &["A-A"]);
|
||||
config.cmd = Subcommand::Test {
|
||||
test_args: vec![],
|
||||
rustc_args: vec![],
|
||||
compiletest_rustc_args: vec![],
|
||||
no_fail_fast: false,
|
||||
doc: true,
|
||||
no_doc: false,
|
||||
|
@ -514,6 +514,10 @@ impl TargetSelection {
|
||||
self.contains("windows")
|
||||
}
|
||||
|
||||
pub fn is_windows_gnu(&self) -> bool {
|
||||
self.ends_with("windows-gnu")
|
||||
}
|
||||
|
||||
/// Path to the file defining the custom target, if any.
|
||||
pub fn filepath(&self) -> Option<&Path> {
|
||||
self.file.as_ref().map(Path::new)
|
||||
@ -542,6 +546,14 @@ impl PartialEq<&str> for TargetSelection {
|
||||
}
|
||||
}
|
||||
|
||||
// Targets are often used as directory names throughout bootstrap.
|
||||
// This impl makes it more ergonomics to use them as such.
|
||||
impl AsRef<Path> for TargetSelection {
|
||||
fn as_ref(&self) -> &Path {
|
||||
self.triple.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
/// Per-target configuration stored in the global configuration structure.
|
||||
#[derive(Debug, Default, Clone, PartialEq, Eq)]
|
||||
pub struct Target {
|
||||
@ -1469,7 +1481,7 @@ impl Config {
|
||||
config.download_beta_toolchain();
|
||||
config
|
||||
.out
|
||||
.join(config.build.triple)
|
||||
.join(config.build)
|
||||
.join("stage0")
|
||||
.join("bin")
|
||||
.join(exe("rustc", config.build))
|
||||
@ -1484,7 +1496,7 @@ impl Config {
|
||||
config.download_beta_toolchain();
|
||||
config
|
||||
.out
|
||||
.join(config.build.triple)
|
||||
.join(config.build)
|
||||
.join("stage0")
|
||||
.join("bin")
|
||||
.join(exe("cargo", config.build))
|
||||
@ -2277,13 +2289,13 @@ impl Config {
|
||||
/// The absolute path to the downloaded LLVM artifacts.
|
||||
pub(crate) fn ci_llvm_root(&self) -> PathBuf {
|
||||
assert!(self.llvm_from_ci);
|
||||
self.out.join(&*self.build.triple).join("ci-llvm")
|
||||
self.out.join(self.build).join("ci-llvm")
|
||||
}
|
||||
|
||||
/// Directory where the extracted `rustc-dev` component is stored.
|
||||
pub(crate) fn ci_rustc_dir(&self) -> PathBuf {
|
||||
assert!(self.download_rustc());
|
||||
self.out.join(self.build.triple).join("ci-rustc")
|
||||
self.out.join(self.build).join("ci-rustc")
|
||||
}
|
||||
|
||||
/// Determine whether llvm should be linked dynamically.
|
||||
|
@ -357,9 +357,9 @@ pub enum Subcommand {
|
||||
/// extra arguments to be passed for the test tool being used
|
||||
/// (e.g. libtest, compiletest or rustdoc)
|
||||
test_args: Vec<String>,
|
||||
/// extra options to pass the compiler when running tests
|
||||
/// extra options to pass the compiler when running compiletest tests
|
||||
#[arg(long, value_name = "ARGS", allow_hyphen_values(true))]
|
||||
rustc_args: Vec<String>,
|
||||
compiletest_rustc_args: Vec<String>,
|
||||
#[arg(long)]
|
||||
/// do not run doc tests
|
||||
no_doc: bool,
|
||||
@ -402,9 +402,6 @@ pub enum Subcommand {
|
||||
/// extra arguments to be passed for the test tool being used
|
||||
/// (e.g. libtest, compiletest or rustdoc)
|
||||
test_args: Vec<String>,
|
||||
/// extra options to pass the compiler when running tests
|
||||
#[arg(long, value_name = "ARGS", allow_hyphen_values(true))]
|
||||
rustc_args: Vec<String>,
|
||||
#[arg(long)]
|
||||
/// do not run doc tests
|
||||
no_doc: bool,
|
||||
@ -509,10 +506,10 @@ impl Subcommand {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn rustc_args(&self) -> Vec<&str> {
|
||||
pub fn compiletest_rustc_args(&self) -> Vec<&str> {
|
||||
match *self {
|
||||
Subcommand::Test { ref rustc_args, .. } | Subcommand::Miri { ref rustc_args, .. } => {
|
||||
rustc_args.iter().flat_map(|s| s.split_whitespace()).collect()
|
||||
Subcommand::Test { ref compiletest_rustc_args, .. } => {
|
||||
compiletest_rustc_args.iter().flat_map(|s| s.split_whitespace()).collect()
|
||||
}
|
||||
_ => vec![],
|
||||
}
|
||||
|
@ -379,7 +379,7 @@ impl Config {
|
||||
let version = &self.stage0_metadata.compiler.version;
|
||||
let host = self.build;
|
||||
|
||||
let bin_root = self.out.join(host.triple).join("stage0");
|
||||
let bin_root = self.out.join(host).join("stage0");
|
||||
let clippy_stamp = bin_root.join(".clippy-stamp");
|
||||
let cargo_clippy = bin_root.join("bin").join(exe("cargo-clippy", host));
|
||||
if cargo_clippy.exists() && !program_out_of_date(&clippy_stamp, date) {
|
||||
@ -412,7 +412,7 @@ impl Config {
|
||||
let channel = format!("{version}-{date}");
|
||||
|
||||
let host = self.build;
|
||||
let bin_root = self.out.join(host.triple).join("rustfmt");
|
||||
let bin_root = self.out.join(host).join("rustfmt");
|
||||
let rustfmt_path = bin_root.join("bin").join(exe("rustfmt", host));
|
||||
let rustfmt_stamp = bin_root.join(".rustfmt-stamp");
|
||||
if rustfmt_path.exists() && !program_out_of_date(&rustfmt_stamp, &channel) {
|
||||
@ -519,7 +519,7 @@ impl Config {
|
||||
extra_components: &[&str],
|
||||
download_component: fn(&Config, String, &str, &str),
|
||||
) {
|
||||
let host = self.build.triple;
|
||||
let host = self.build;
|
||||
let bin_root = self.out.join(host).join(sysroot);
|
||||
let rustc_stamp = bin_root.join(".rustc-stamp");
|
||||
|
||||
@ -592,7 +592,7 @@ impl Config {
|
||||
t!(fs::create_dir_all(&cache_dir));
|
||||
}
|
||||
|
||||
let bin_root = self.out.join(self.build.triple).join(destination);
|
||||
let bin_root = self.out.join(self.build).join(destination);
|
||||
let tarball = cache_dir.join(&filename);
|
||||
let (base_url, url, should_verify) = match mode {
|
||||
DownloadSource::CI => {
|
||||
|
@ -452,7 +452,7 @@ impl Build {
|
||||
}
|
||||
|
||||
// Make a symbolic link so we can use a consistent directory in the documentation.
|
||||
let build_triple = build.out.join(build.build.triple);
|
||||
let build_triple = build.out.join(build.build);
|
||||
t!(fs::create_dir_all(&build_triple));
|
||||
let host = build.out.join("host");
|
||||
if host.is_symlink() {
|
||||
@ -807,10 +807,7 @@ impl Build {
|
||||
}
|
||||
|
||||
fn tools_dir(&self, compiler: Compiler) -> PathBuf {
|
||||
let out = self
|
||||
.out
|
||||
.join(&*compiler.host.triple)
|
||||
.join(format!("stage{}-tools-bin", compiler.stage));
|
||||
let out = self.out.join(compiler.host).join(format!("stage{}-tools-bin", compiler.stage));
|
||||
t!(fs::create_dir_all(&out));
|
||||
out
|
||||
}
|
||||
@ -827,14 +824,14 @@ impl Build {
|
||||
Mode::ToolBootstrap => "-bootstrap-tools",
|
||||
Mode::ToolStd | Mode::ToolRustc => "-tools",
|
||||
};
|
||||
self.out.join(&*compiler.host.triple).join(format!("stage{}{}", compiler.stage, suffix))
|
||||
self.out.join(compiler.host).join(format!("stage{}{}", compiler.stage, suffix))
|
||||
}
|
||||
|
||||
/// Returns the root output directory for all Cargo output in a given stage,
|
||||
/// running a particular compiler, whether or not we're building the
|
||||
/// standard library, and targeting the specified architecture.
|
||||
fn cargo_out(&self, compiler: Compiler, mode: Mode, target: TargetSelection) -> PathBuf {
|
||||
self.stage_out(compiler, mode).join(&*target.triple).join(self.cargo_dir())
|
||||
self.stage_out(compiler, mode).join(target).join(self.cargo_dir())
|
||||
}
|
||||
|
||||
/// Root output directory of LLVM for `target`
|
||||
@ -845,36 +842,36 @@ impl Build {
|
||||
if self.config.llvm_from_ci && self.config.build == target {
|
||||
self.config.ci_llvm_root()
|
||||
} else {
|
||||
self.out.join(&*target.triple).join("llvm")
|
||||
self.out.join(target).join("llvm")
|
||||
}
|
||||
}
|
||||
|
||||
fn lld_out(&self, target: TargetSelection) -> PathBuf {
|
||||
self.out.join(&*target.triple).join("lld")
|
||||
self.out.join(target).join("lld")
|
||||
}
|
||||
|
||||
/// Output directory for all documentation for a target
|
||||
fn doc_out(&self, target: TargetSelection) -> PathBuf {
|
||||
self.out.join(&*target.triple).join("doc")
|
||||
self.out.join(target).join("doc")
|
||||
}
|
||||
|
||||
/// Output directory for all JSON-formatted documentation for a target
|
||||
fn json_doc_out(&self, target: TargetSelection) -> PathBuf {
|
||||
self.out.join(&*target.triple).join("json-doc")
|
||||
self.out.join(target).join("json-doc")
|
||||
}
|
||||
|
||||
fn test_out(&self, target: TargetSelection) -> PathBuf {
|
||||
self.out.join(&*target.triple).join("test")
|
||||
self.out.join(target).join("test")
|
||||
}
|
||||
|
||||
/// Output directory for all documentation for a target
|
||||
fn compiler_doc_out(&self, target: TargetSelection) -> PathBuf {
|
||||
self.out.join(&*target.triple).join("compiler-doc")
|
||||
self.out.join(target).join("compiler-doc")
|
||||
}
|
||||
|
||||
/// Output directory for some generated md crate documentation for a target (temporary)
|
||||
fn md_doc_out(&self, target: TargetSelection) -> PathBuf {
|
||||
self.out.join(&*target.triple).join("md-doc")
|
||||
self.out.join(target).join("md-doc")
|
||||
}
|
||||
|
||||
/// Returns `true` if this is an external version of LLVM not managed by bootstrap.
|
||||
@ -954,7 +951,7 @@ impl Build {
|
||||
|
||||
/// Directory for libraries built from C/C++ code and shared between stages.
|
||||
fn native_dir(&self, target: TargetSelection) -> PathBuf {
|
||||
self.out.join(&*target.triple).join("native")
|
||||
self.out.join(target).join("native")
|
||||
}
|
||||
|
||||
/// Root output directory for rust_test_helpers library compiled for
|
||||
|
@ -225,4 +225,9 @@ pub const CONFIG_CHANGE_HISTORY: &[ChangeInfo] = &[
|
||||
severity: ChangeSeverity::Info,
|
||||
summary: "New option `llvm.libzstd` to control whether llvm is built with zstd support.",
|
||||
},
|
||||
ChangeInfo {
|
||||
change_id: 128841,
|
||||
severity: ChangeSeverity::Warning,
|
||||
summary: "./x test --rustc-args was renamed to --compiletest-rustc-args as it only applies there. ./x miri --rustc-args was also removed.",
|
||||
},
|
||||
];
|
||||
|
@ -18,9 +18,9 @@ if [[ -z "${PR_CI_JOB}" ]]; then
|
||||
# compiler, and is sensitive to the addition of new flags.
|
||||
../x.py --stage 1 test tests/ui-fulldeps
|
||||
|
||||
# The tests are run a second time with the size optimizations enabled.
|
||||
../x.py --stage 1 test library/std library/alloc library/core \
|
||||
--rustc-args "--cfg feature=\"optimize_for_size\""
|
||||
# Rebuild the stdlib with the size optimizations enabled and run tests again.
|
||||
RUSTFLAGS_NOT_BOOTSTRAP="--cfg feature=\"optimize_for_size\"" ../x.py --stage 1 test \
|
||||
library/std library/alloc library/core
|
||||
fi
|
||||
|
||||
# NOTE: intentionally uses all of `x.py`, `x`, and `x.ps1` to make sure they all work on Linux.
|
||||
|
@ -266,7 +266,7 @@ complete -c x.py -n "__fish_seen_subcommand_from doc" -l enable-bolt-settings -d
|
||||
complete -c x.py -n "__fish_seen_subcommand_from doc" -l skip-stage0-validation -d 'Skip stage0 compiler validation'
|
||||
complete -c x.py -n "__fish_seen_subcommand_from doc" -s h -l help -d 'Print help (see more with \'--help\')'
|
||||
complete -c x.py -n "__fish_seen_subcommand_from test" -l test-args -d 'extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)' -r
|
||||
complete -c x.py -n "__fish_seen_subcommand_from test" -l rustc-args -d 'extra options to pass the compiler when running tests' -r
|
||||
complete -c x.py -n "__fish_seen_subcommand_from test" -l compiletest-rustc-args -d 'extra options to pass the compiler when running compiletest tests' -r
|
||||
complete -c x.py -n "__fish_seen_subcommand_from test" -l extra-checks -d 'comma-separated list of other files types to check (accepts py, py:lint, py:fmt, shell)' -r
|
||||
complete -c x.py -n "__fish_seen_subcommand_from test" -l compare-mode -d 'mode describing what file the actual ui output will be compared to' -r
|
||||
complete -c x.py -n "__fish_seen_subcommand_from test" -l pass -d 'force {check,build,run}-pass tests to this mode' -r
|
||||
@ -313,7 +313,6 @@ complete -c x.py -n "__fish_seen_subcommand_from test" -l enable-bolt-settings -
|
||||
complete -c x.py -n "__fish_seen_subcommand_from test" -l skip-stage0-validation -d 'Skip stage0 compiler validation'
|
||||
complete -c x.py -n "__fish_seen_subcommand_from test" -s h -l help -d 'Print help (see more with \'--help\')'
|
||||
complete -c x.py -n "__fish_seen_subcommand_from miri" -l test-args -d 'extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)' -r
|
||||
complete -c x.py -n "__fish_seen_subcommand_from miri" -l rustc-args -d 'extra options to pass the compiler when running tests' -r
|
||||
complete -c x.py -n "__fish_seen_subcommand_from miri" -l config -d 'TOML configuration file for build' -r -F
|
||||
complete -c x.py -n "__fish_seen_subcommand_from miri" -l build-dir -d 'Build directory, overrides `build.build-dir` in `config.toml`' -r -f -a "(__fish_complete_directories)"
|
||||
complete -c x.py -n "__fish_seen_subcommand_from miri" -l build -d 'build target of the stage0 compiler' -r -f
|
||||
|
@ -338,7 +338,7 @@ Register-ArgumentCompleter -Native -CommandName 'x.py' -ScriptBlock {
|
||||
}
|
||||
'x.py;test' {
|
||||
[CompletionResult]::new('--test-args', 'test-args', [CompletionResultType]::ParameterName, 'extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)')
|
||||
[CompletionResult]::new('--rustc-args', 'rustc-args', [CompletionResultType]::ParameterName, 'extra options to pass the compiler when running tests')
|
||||
[CompletionResult]::new('--compiletest-rustc-args', 'compiletest-rustc-args', [CompletionResultType]::ParameterName, 'extra options to pass the compiler when running compiletest tests')
|
||||
[CompletionResult]::new('--extra-checks', 'extra-checks', [CompletionResultType]::ParameterName, 'comma-separated list of other files types to check (accepts py, py:lint, py:fmt, shell)')
|
||||
[CompletionResult]::new('--compare-mode', 'compare-mode', [CompletionResultType]::ParameterName, 'mode describing what file the actual ui output will be compared to')
|
||||
[CompletionResult]::new('--pass', 'pass', [CompletionResultType]::ParameterName, 'force {check,build,run}-pass tests to this mode')
|
||||
@ -392,7 +392,6 @@ Register-ArgumentCompleter -Native -CommandName 'x.py' -ScriptBlock {
|
||||
}
|
||||
'x.py;miri' {
|
||||
[CompletionResult]::new('--test-args', 'test-args', [CompletionResultType]::ParameterName, 'extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)')
|
||||
[CompletionResult]::new('--rustc-args', 'rustc-args', [CompletionResultType]::ParameterName, 'extra options to pass the compiler when running tests')
|
||||
[CompletionResult]::new('--config', 'config', [CompletionResultType]::ParameterName, 'TOML configuration file for build')
|
||||
[CompletionResult]::new('--build-dir', 'build-dir', [CompletionResultType]::ParameterName, 'Build directory, overrides `build.build-dir` in `config.toml`')
|
||||
[CompletionResult]::new('--build', 'build', [CompletionResultType]::ParameterName, 'build target of the stage0 compiler')
|
||||
|
@ -1300,7 +1300,7 @@ _x.py() {
|
||||
return 0
|
||||
;;
|
||||
x.py__miri)
|
||||
opts="-v -i -j -h --no-fail-fast --test-args --rustc-args --no-doc --doc --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..."
|
||||
opts="-v -i -j -h --no-fail-fast --test-args --no-doc --doc --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..."
|
||||
if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then
|
||||
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
|
||||
return 0
|
||||
@ -1310,10 +1310,6 @@ _x.py() {
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--rustc-args)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--config)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
@ -1862,7 +1858,7 @@ _x.py() {
|
||||
return 0
|
||||
;;
|
||||
x.py__test)
|
||||
opts="-v -i -j -h --no-fail-fast --test-args --rustc-args --no-doc --doc --bless --extra-checks --force-rerun --only-modified --compare-mode --pass --run --rustfix-coverage --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..."
|
||||
opts="-v -i -j -h --no-fail-fast --test-args --compiletest-rustc-args --no-doc --doc --bless --extra-checks --force-rerun --only-modified --compare-mode --pass --run --rustfix-coverage --verbose --incremental --config --build-dir --build --host --target --exclude --skip --include-default-paths --rustc-error-format --on-fail --dry-run --dump-bootstrap-shims --stage --keep-stage --keep-stage-std --src --jobs --warnings --error-format --json-output --color --bypass-bootstrap-lock --llvm-skip-rebuild --rust-profile-generate --rust-profile-use --llvm-profile-use --llvm-profile-generate --enable-bolt-settings --skip-stage0-validation --reproducible-artifact --set --help [PATHS]... [ARGS]..."
|
||||
if [[ ${cur} == -* || ${COMP_CWORD} -eq 2 ]] ; then
|
||||
COMPREPLY=( $(compgen -W "${opts}" -- "${cur}") )
|
||||
return 0
|
||||
@ -1872,7 +1868,7 @@ _x.py() {
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
--rustc-args)
|
||||
--compiletest-rustc-args)
|
||||
COMPREPLY=($(compgen -f "${cur}"))
|
||||
return 0
|
||||
;;
|
||||
|
@ -337,7 +337,7 @@ _arguments "${_arguments_options[@]}" \
|
||||
(test)
|
||||
_arguments "${_arguments_options[@]}" \
|
||||
'*--test-args=[extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)]:ARGS: ' \
|
||||
'*--rustc-args=[extra options to pass the compiler when running tests]:ARGS: ' \
|
||||
'*--compiletest-rustc-args=[extra options to pass the compiler when running compiletest tests]:ARGS: ' \
|
||||
'--extra-checks=[comma-separated list of other files types to check (accepts py, py\:lint, py\:fmt, shell)]:EXTRA_CHECKS: ' \
|
||||
'--compare-mode=[mode describing what file the actual ui output will be compared to]:COMPARE MODE: ' \
|
||||
'--pass=[force {check,build,run}-pass tests to this mode]:check | build | run: ' \
|
||||
@ -393,7 +393,6 @@ _arguments "${_arguments_options[@]}" \
|
||||
(miri)
|
||||
_arguments "${_arguments_options[@]}" \
|
||||
'*--test-args=[extra arguments to be passed for the test tool being used (e.g. libtest, compiletest or rustdoc)]:ARGS: ' \
|
||||
'*--rustc-args=[extra options to pass the compiler when running tests]:ARGS: ' \
|
||||
'--config=[TOML configuration file for build]:FILE:_files' \
|
||||
'--build-dir=[Build directory, overrides \`build.build-dir\` in \`config.toml\`]:DIR:_files -/' \
|
||||
'--build=[build target of the stage0 compiler]:BUILD:( )' \
|
||||
|
@ -3735,15 +3735,14 @@ impl<'test> TestCx<'test> {
|
||||
}
|
||||
|
||||
if self.config.bless {
|
||||
cmd.env("RUSTC_BLESS_TEST", "--bless");
|
||||
// Assume this option is active if the environment variable is "defined", with _any_ value.
|
||||
// As an example, a `Makefile` can use this option by:
|
||||
// If we're running in `--bless` mode, set an environment variable to tell
|
||||
// `run_make_support` to bless snapshot files instead of checking them.
|
||||
//
|
||||
// ifdef RUSTC_BLESS_TEST
|
||||
// cp "$(TMPDIR)"/actual_something.ext expected_something.ext
|
||||
// else
|
||||
// $(DIFF) expected_something.ext "$(TMPDIR)"/actual_something.ext
|
||||
// endif
|
||||
// The value is this test's source directory, because the support code
|
||||
// will need that path in order to bless the _original_ snapshot files,
|
||||
// not the copies in `rmake_out`.
|
||||
// (See <https://github.com/rust-lang/rust/issues/129038>.)
|
||||
cmd.env("RUSTC_BLESS_TEST", &self.testpaths.file);
|
||||
}
|
||||
|
||||
if self.config.target.contains("msvc") && !self.config.cc.is_empty() {
|
||||
|
@ -112,14 +112,8 @@ impl Diff {
|
||||
let (expected_name, actual_name, output, actual) = self.run_common();
|
||||
|
||||
if !output.is_empty() {
|
||||
// If we can bless (meaning we have a file to write into and the `RUSTC_BLESS_TEST`
|
||||
// environment variable set), then we write into the file and return.
|
||||
if let Some(ref expected_file) = self.expected_file {
|
||||
if std::env::var("RUSTC_BLESS_TEST").is_ok() {
|
||||
println!("Blessing `{}`", expected_file.display());
|
||||
fs::write(expected_file, actual);
|
||||
return;
|
||||
}
|
||||
if self.maybe_bless_expected_file(&actual) {
|
||||
return;
|
||||
}
|
||||
panic!(
|
||||
"test failed: `{}` is different from `{}`\n\n{}",
|
||||
@ -134,14 +128,8 @@ impl Diff {
|
||||
let (expected_name, actual_name, output, actual) = self.run_common();
|
||||
|
||||
if output.is_empty() {
|
||||
// If we can bless (meaning we have a file to write into and the `RUSTC_BLESS_TEST`
|
||||
// environment variable set), then we write into the file and return.
|
||||
if let Some(ref expected_file) = self.expected_file {
|
||||
if std::env::var("RUSTC_BLESS_TEST").is_ok() {
|
||||
println!("Blessing `{}`", expected_file.display());
|
||||
fs::write(expected_file, actual);
|
||||
return;
|
||||
}
|
||||
if self.maybe_bless_expected_file(&actual) {
|
||||
return;
|
||||
}
|
||||
panic!(
|
||||
"test failed: `{}` is not different from `{}`\n\n{}",
|
||||
@ -149,4 +137,24 @@ impl Diff {
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// If we have an expected file to write into, and `RUSTC_BLESS_TEST` is
|
||||
/// set, then write the actual output into the file and return `true`.
|
||||
///
|
||||
/// We assume that `RUSTC_BLESS_TEST` contains the path to the original test's
|
||||
/// source directory. That lets us bless the original snapshot file in the
|
||||
/// source tree, not the copy in `rmake_out` that we would normally use.
|
||||
fn maybe_bless_expected_file(&self, actual: &str) -> bool {
|
||||
let Some(ref expected_file) = self.expected_file else {
|
||||
return false;
|
||||
};
|
||||
let Ok(bless_dir) = std::env::var("RUSTC_BLESS_TEST") else {
|
||||
return false;
|
||||
};
|
||||
|
||||
let bless_file = Path::new(&bless_dir).join(expected_file);
|
||||
println!("Blessing `{}`", bless_file.display());
|
||||
fs::write(bless_file, actual);
|
||||
true
|
||||
}
|
||||
}
|
||||
|
132
tests/assembly/powerpc64-struct-abi.rs
Normal file
132
tests/assembly/powerpc64-struct-abi.rs
Normal file
@ -0,0 +1,132 @@
|
||||
//@ revisions: elfv1-be elfv2-be elfv2-le
|
||||
//@ assembly-output: emit-asm
|
||||
//@ compile-flags: -O
|
||||
//@[elfv1-be] compile-flags: --target powerpc64-unknown-linux-gnu
|
||||
//@[elfv1-be] needs-llvm-components: powerpc
|
||||
//@[elfv2-be] compile-flags: --target powerpc64-unknown-linux-musl
|
||||
//@[elfv2-be] needs-llvm-components: powerpc
|
||||
//@[elfv2-le] compile-flags: --target powerpc64le-unknown-linux-gnu
|
||||
//@[elfv2-le] needs-llvm-components: powerpc
|
||||
//@[elfv1-be] filecheck-flags: --check-prefix be
|
||||
//@[elfv2-be] filecheck-flags: --check-prefix be
|
||||
|
||||
#![feature(no_core, lang_items)]
|
||||
#![no_std]
|
||||
#![no_core]
|
||||
#![crate_type = "lib"]
|
||||
|
||||
#[lang = "sized"]
|
||||
trait Sized {}
|
||||
|
||||
#[lang = "copy"]
|
||||
trait Copy {}
|
||||
|
||||
#[lang = "freeze"]
|
||||
trait Freeze {}
|
||||
|
||||
#[lang = "unpin"]
|
||||
trait Unpin {}
|
||||
|
||||
impl Copy for u8 {}
|
||||
impl Copy for u16 {}
|
||||
impl Copy for u32 {}
|
||||
impl Copy for FiveU32s {}
|
||||
impl Copy for FiveU16s {}
|
||||
impl Copy for ThreeU8s {}
|
||||
|
||||
#[repr(C)]
|
||||
struct FiveU32s(u32, u32, u32, u32, u32);
|
||||
|
||||
#[repr(C)]
|
||||
struct FiveU16s(u16, u16, u16, u16, u16);
|
||||
|
||||
#[repr(C)]
|
||||
struct ThreeU8s(u8, u8, u8);
|
||||
|
||||
// CHECK-LABEL: read_large
|
||||
// be: lwz [[REG1:.*]], 16(4)
|
||||
// be-NEXT: stw [[REG1]], 16(3)
|
||||
// be-NEXT: ld [[REG2:.*]], 8(4)
|
||||
// be-NEXT: ld [[REG3:.*]], 0(4)
|
||||
// be-NEXT: std [[REG2]], 8(3)
|
||||
// be-NEXT: std [[REG3]], 0(3)
|
||||
// elfv2-le: lxvd2x [[REG1:.*]], 0, 4
|
||||
// elfv2-le-NEXT: lwz [[REG2:.*]], 16(4)
|
||||
// elfv2-le-NEXT: stw [[REG2]], 16(3)
|
||||
// elfv2-le-NEXT: stxvd2x [[REG1]], 0, 3
|
||||
// CHECK-NEXT: blr
|
||||
#[no_mangle]
|
||||
extern "C" fn read_large(x: &FiveU32s) -> FiveU32s {
|
||||
*x
|
||||
}
|
||||
|
||||
// CHECK-LABEL: read_medium
|
||||
// elfv1-be: lhz [[REG1:.*]], 8(4)
|
||||
// elfv1-be-NEXT: ld [[REG2:.*]], 0(4)
|
||||
// elfv1-be-NEXT: sth [[REG1]], 8(3)
|
||||
// elfv1-be-NEXT: std [[REG2]], 0(3)
|
||||
// elfv2-be: lhz [[REG1:.*]], 8(3)
|
||||
// elfv2-be-NEXT: ld 3, 0(3)
|
||||
// elfv2-be-NEXT: sldi 4, [[REG1]], 48
|
||||
// elfv2-le: ld [[REG1:.*]], 0(3)
|
||||
// elfv2-le-NEXT: lhz 4, 8(3)
|
||||
// elfv2-le-NEXT: mr 3, [[REG1]]
|
||||
// CHECK-NEXT: blr
|
||||
#[no_mangle]
|
||||
extern "C" fn read_medium(x: &FiveU16s) -> FiveU16s {
|
||||
*x
|
||||
}
|
||||
|
||||
// CHECK-LABEL: read_small
|
||||
// elfv1-be: lbz [[REG1:.*]], 2(4)
|
||||
// elfv1-be-NEXT: lhz [[REG2:.*]], 0(4)
|
||||
// elfv1-be-NEXT: stb [[REG1]], 2(3)
|
||||
// elfv1-be-NEXT: sth [[REG2]], 0(3)
|
||||
// elfv2-be: lhz [[REG1:.*]], 0(3)
|
||||
// elfv2-be-NEXT: lbz 3, 2(3)
|
||||
// elfv2-be-NEXT: rldimi 3, [[REG1]], 8, 0
|
||||
// elfv2-le: lbz [[REG1:.*]], 2(3)
|
||||
// elfv2-le-NEXT: lhz 3, 0(3)
|
||||
// elfv2-le-NEXT: rldimi 3, [[REG1]], 16, 0
|
||||
// CHECK-NEXT: blr
|
||||
#[no_mangle]
|
||||
extern "C" fn read_small(x: &ThreeU8s) -> ThreeU8s {
|
||||
*x
|
||||
}
|
||||
|
||||
// CHECK-LABEL: write_large
|
||||
// CHECK: std 3, 0(6)
|
||||
// be-NEXT: rldicl [[REG1:.*]], 5, 32, 32
|
||||
// CHECK-NEXT: std 4, 8(6)
|
||||
// be-NEXT: stw [[REG1]], 16(6)
|
||||
// elfv2-le-NEXT: stw 5, 16(6)
|
||||
// CHECK-NEXT: blr
|
||||
#[no_mangle]
|
||||
extern "C" fn write_large(x: FiveU32s, dest: &mut FiveU32s) {
|
||||
*dest = x;
|
||||
}
|
||||
|
||||
// CHECK-LABEL: write_medium
|
||||
// CHECK: std 3, 0(5)
|
||||
// be-NEXT: rldicl [[REG1:.*]], 4, 16, 48
|
||||
// be-NEXT: sth [[REG1]], 8(5)
|
||||
// elfv2-le-NEXT: sth 4, 8(5)
|
||||
// CHECK-NEXT: blr
|
||||
#[no_mangle]
|
||||
extern "C" fn write_medium(x: FiveU16s, dest: &mut FiveU16s) {
|
||||
*dest = x;
|
||||
}
|
||||
|
||||
// CHECK-LABEL: write_small
|
||||
// be: stb 3, 2(4)
|
||||
// be-NEXT: srwi [[REG1:.*]], 3, 8
|
||||
// be-NEXT: sth [[REG1]], 0(4)
|
||||
// The order these instructions are emitted in changed in LLVM 18.
|
||||
// elfv2-le-DAG: sth 3, 0(4)
|
||||
// elfv2-le-DAG: srwi [[REG1:.*]], 3, 16
|
||||
// elfv2-le-NEXT: stb [[REG1]], 2(4)
|
||||
// CHECK-NEXT: blr
|
||||
#[no_mangle]
|
||||
extern "C" fn write_small(x: ThreeU8s, dest: &mut ThreeU8s) {
|
||||
*dest = x;
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
error: malformed `cfi_encoding` attribute input
|
||||
--> $DIR/cfi-invalid-attr-cfi-encoding.rs:10:1
|
||||
--> $DIR/invalid-attr-encoding.rs:10:1
|
||||
|
|
||||
LL | #[cfi_encoding]
|
||||
| ^^^^^^^^^^^^^^^ help: must be of the form: `#[cfi_encoding = "encoding"]`
|
Loading…
Reference in New Issue
Block a user