From 8716f2780e067c3590a6be4545cf40d0b6d8865a Mon Sep 17 00:00:00 2001 From: Amanieu d'Antras Date: Tue, 7 Dec 2021 23:46:38 +0000 Subject: [PATCH] asm: Allow using r9 (ARM) and x18 (AArch64) if they are not reserved by the current target. --- compiler/rustc_ast_lowering/src/asm.rs | 7 +++- .../rustc_codegen_ssa/src/target_features.rs | 1 + compiler/rustc_target/src/asm/aarch64.rs | 20 +++++++++-- compiler/rustc_target/src/asm/arm.rs | 19 +++++++++-- compiler/rustc_target/src/asm/mod.rs | 34 ++++++++++++++++--- 5 files changed, 72 insertions(+), 9 deletions(-) diff --git a/compiler/rustc_ast_lowering/src/asm.rs b/compiler/rustc_ast_lowering/src/asm.rs index 9f27ace25ab..9c28f3c7f58 100644 --- a/compiler/rustc_ast_lowering/src/asm.rs +++ b/compiler/rustc_ast_lowering/src/asm.rs @@ -64,7 +64,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { let mut clobber_abis = FxHashMap::default(); if let Some(asm_arch) = asm_arch { for (abi_name, abi_span) in &asm.clobber_abis { - match asm::InlineAsmClobberAbi::parse(asm_arch, &self.sess.target, *abi_name) { + match asm::InlineAsmClobberAbi::parse( + asm_arch, + |feature| self.sess.target_features.contains(&Symbol::intern(feature)), + &self.sess.target, + *abi_name, + ) { Ok(abi) => { // If the abi was already in the list, emit an error match clobber_abis.get(&abi) { diff --git a/compiler/rustc_codegen_ssa/src/target_features.rs b/compiler/rustc_codegen_ssa/src/target_features.rs index ba72e3cfafc..63cc6faf9ec 100644 --- a/compiler/rustc_codegen_ssa/src/target_features.rs +++ b/compiler/rustc_codegen_ssa/src/target_features.rs @@ -36,6 +36,7 @@ const ARM_ALLOWED_FEATURES: &[(&str, Option)] = &[ // #[target_feature]. ("thumb-mode", Some(sym::arm_target_feature)), ("thumb2", Some(sym::arm_target_feature)), + ("reserve-r9", Some(sym::arm_target_feature)), ]; const AARCH64_ALLOWED_FEATURES: &[(&str, Option)] = &[ diff --git a/compiler/rustc_target/src/asm/aarch64.rs b/compiler/rustc_target/src/asm/aarch64.rs index 76e50678314..4bf909ce46d 100644 --- a/compiler/rustc_target/src/asm/aarch64.rs +++ b/compiler/rustc_target/src/asm/aarch64.rs @@ -1,4 +1,5 @@ use super::{InlineAsmArch, InlineAsmType}; +use crate::spec::Target; use rustc_macros::HashStable_Generic; use std::fmt; @@ -70,6 +71,22 @@ impl AArch64InlineAsmRegClass { } } +pub fn reserved_x18( + _arch: InlineAsmArch, + _has_feature: impl FnMut(&str) -> bool, + target: &Target, +) -> Result<(), &'static str> { + if target.os == "android" + || target.is_like_fuchsia + || target.is_like_osx + || target.is_like_windows + { + Err("x18 is a reserved register on this target") + } else { + Ok(()) + } +} + def_regs! { AArch64 AArch64InlineAsmReg AArch64InlineAsmRegClass { x0: reg = ["x0", "w0"], @@ -90,6 +107,7 @@ def_regs! { x15: reg = ["x15", "w15"], x16: reg = ["x16", "w16"], x17: reg = ["x17", "w17"], + x18: reg = ["x18", "w18"] % reserved_x18, x20: reg = ["x20", "w20"], x21: reg = ["x21", "w21"], x22: reg = ["x22", "w22"], @@ -149,8 +167,6 @@ def_regs! { p14: preg = ["p14"], p15: preg = ["p15"], ffr: preg = ["ffr"], - #error = ["x18", "w18"] => - "x18 is used as a reserved register on some targets and cannot be used as an operand for inline asm", #error = ["x19", "w19"] => "x19 is used internally by LLVM and cannot be used as an operand for inline asm", #error = ["x29", "w29", "fp", "wfp"] => diff --git a/compiler/rustc_target/src/asm/arm.rs b/compiler/rustc_target/src/asm/arm.rs index 4eeb7fcc71b..b03594b3151 100644 --- a/compiler/rustc_target/src/asm/arm.rs +++ b/compiler/rustc_target/src/asm/arm.rs @@ -99,6 +99,22 @@ fn not_thumb1( } } +fn reserved_r9( + arch: InlineAsmArch, + mut has_feature: impl FnMut(&str) -> bool, + target: &Target, +) -> Result<(), &'static str> { + not_thumb1(arch, &mut has_feature, target)?; + + // We detect this using the reserved-r9 feature instead of using the target + // because the relocation model can be changed with compiler options. + if has_feature("reserved-r9") { + Err("the RWPI static base register (r9) cannot be used as an operand for inline asm") + } else { + Ok(()) + } +} + def_regs! { Arm ArmInlineAsmReg ArmInlineAsmRegClass { r0: reg = ["r0", "a1"], @@ -109,6 +125,7 @@ def_regs! { r5: reg = ["r5", "v2"], r7: reg = ["r7", "v4"] % frame_pointer_r7, r8: reg = ["r8", "v5"] % not_thumb1, + r9: reg = ["r9", "v6", "rfp"] % reserved_r9, r10: reg = ["r10", "sl"] % not_thumb1, r11: reg = ["r11", "fp"] % frame_pointer_r11, r12: reg = ["r12", "ip"] % not_thumb1, @@ -195,8 +212,6 @@ def_regs! { q15: qreg = ["q15"], #error = ["r6", "v3"] => "r6 is used internally by LLVM and cannot be used as an operand for inline asm", - #error = ["r9", "v6", "rfp"] => - "r9 is used internally by LLVM and cannot be used as an operand for inline asm", #error = ["r13", "sp"] => "the stack pointer cannot be used as an operand for inline asm", #error = ["r15", "pc"] => diff --git a/compiler/rustc_target/src/asm/mod.rs b/compiler/rustc_target/src/asm/mod.rs index cf940594bc4..f1f5f4389e3 100644 --- a/compiler/rustc_target/src/asm/mod.rs +++ b/compiler/rustc_target/src/asm/mod.rs @@ -785,6 +785,7 @@ pub enum InlineAsmClobberAbi { X86_64SysV, Arm, AArch64, + AArch64NoX18, RiscV, } @@ -793,6 +794,7 @@ impl InlineAsmClobberAbi { /// clobber ABIs for the target. pub fn parse( arch: InlineAsmArch, + has_feature: impl FnMut(&str) -> bool, target: &Target, name: Symbol, ) -> Result { @@ -816,7 +818,13 @@ impl InlineAsmClobberAbi { _ => Err(&["C", "system", "efiapi", "aapcs"]), }, InlineAsmArch::AArch64 => match name { - "C" | "system" | "efiapi" => Ok(InlineAsmClobberAbi::AArch64), + "C" | "system" | "efiapi" => { + Ok(if aarch64::reserved_x18(arch, has_feature, target).is_err() { + InlineAsmClobberAbi::AArch64NoX18 + } else { + InlineAsmClobberAbi::AArch64 + }) + } _ => Err(&["C", "system", "efiapi"]), }, InlineAsmArch::RiscV32 | InlineAsmArch::RiscV64 => match name { @@ -891,8 +899,25 @@ impl InlineAsmClobberAbi { AArch64 AArch64InlineAsmReg { x0, x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, - // x18 is platform-reserved or temporary, but we exclude it - // here since it is a reserved register. + x16, x17, x18, x30, + + // Technically the low 64 bits of v8-v15 are preserved, but + // we have no way of expressing this using clobbers. + v0, v1, v2, v3, v4, v5, v6, v7, + v8, v9, v10, v11, v12, v13, v14, v15, + v16, v17, v18, v19, v20, v21, v22, v23, + v24, v25, v26, v27, v28, v29, v30, v31, + + p0, p1, p2, p3, p4, p5, p6, p7, + p8, p9, p10, p11, p12, p13, p14, p15, + ffr, + + } + }, + InlineAsmClobberAbi::AArch64NoX18 => clobbered_regs! { + AArch64 AArch64InlineAsmReg { + x0, x1, x2, x3, x4, x5, x6, x7, + x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x30, // Technically the low 64 bits of v8-v15 are preserved, but @@ -910,7 +935,8 @@ impl InlineAsmClobberAbi { }, InlineAsmClobberAbi::Arm => clobbered_regs! { Arm ArmInlineAsmReg { - // r9 is platform-reserved and is treated as callee-saved. + // r9 is either platform-reserved or callee-saved. Either + // way we don't need to clobber it. r0, r1, r2, r3, r12, r14, // The finest-grained register variant is used here so that