diff --git a/compiler/rustc_codegen_llvm/src/abi.rs b/compiler/rustc_codegen_llvm/src/abi.rs index 8b2330471f7..f918facc86d 100644 --- a/compiler/rustc_codegen_llvm/src/abi.rs +++ b/compiler/rustc_codegen_llvm/src/abi.rs @@ -150,7 +150,10 @@ impl LlvmType for CastTarget { // Simplify to a single unit or an array if there's no prefix. // This produces the same layout, but using a simpler type. if self.prefix.iter().all(|x| x.is_none()) { - if rest_count == 1 && !self.rest.force_array { + // We can't do this if is_consecutive is set and the unit would get + // split on the target. Currently, this is only relevant for i128 + // registers. + if rest_count == 1 && (!self.rest.is_consecutive || self.rest.unit != Reg::i128()) { return rest_ll_unit; } diff --git a/compiler/rustc_target/src/abi/call/aarch64.rs b/compiler/rustc_target/src/abi/call/aarch64.rs index bf9cf232d81..04020d13f22 100644 --- a/compiler/rustc_target/src/abi/call/aarch64.rs +++ b/compiler/rustc_target/src/abi/call/aarch64.rs @@ -31,7 +31,7 @@ where RegKind::Vector => size.bits() == 64 || size.bits() == 128, }; - valid_unit.then_some(Uniform { unit, total: size, force_array: false }) + valid_unit.then_some(Uniform::consecutive(unit, size)) }) } @@ -60,7 +60,7 @@ where let size = ret.layout.size; let bits = size.bits(); if bits <= 128 { - ret.cast_to(Uniform { unit: Reg::i64(), total: size, force_array: false }); + ret.cast_to(Uniform::new(Reg::i64(), size)); return; } ret.make_indirect(); @@ -100,9 +100,9 @@ where }; if size.bits() <= 128 { if align.bits() == 128 { - arg.cast_to(Uniform { unit: Reg::i128(), total: size, force_array: false }); + arg.cast_to(Uniform::new(Reg::i128(), size)); } else { - arg.cast_to(Uniform { unit: Reg::i64(), total: size, force_array: false }); + arg.cast_to(Uniform::new(Reg::i64(), size)); } return; } diff --git a/compiler/rustc_target/src/abi/call/arm.rs b/compiler/rustc_target/src/abi/call/arm.rs index 177a16bafc8..9371e1b3958 100644 --- a/compiler/rustc_target/src/abi/call/arm.rs +++ b/compiler/rustc_target/src/abi/call/arm.rs @@ -21,7 +21,7 @@ where RegKind::Vector => size.bits() == 64 || size.bits() == 128, }; - valid_unit.then_some(Uniform { unit, total: size, force_array: false }) + valid_unit.then_some(Uniform::consecutive(unit, size)) }) } @@ -49,7 +49,7 @@ where let size = ret.layout.size; let bits = size.bits(); if bits <= 32 { - ret.cast_to(Uniform { unit: Reg::i32(), total: size, force_array: false }); + ret.cast_to(Uniform::new(Reg::i32(), size)); return; } ret.make_indirect(); @@ -78,11 +78,7 @@ where let align = arg.layout.align.abi.bytes(); let total = arg.layout.size; - arg.cast_to(Uniform { - unit: if align <= 4 { Reg::i32() } else { Reg::i64() }, - total, - force_array: false, - }); + arg.cast_to(Uniform::consecutive(if align <= 4 { Reg::i32() } else { Reg::i64() }, total)); } pub fn compute_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>) diff --git a/compiler/rustc_target/src/abi/call/csky.rs b/compiler/rustc_target/src/abi/call/csky.rs index dae428e3538..7951f28beea 100644 --- a/compiler/rustc_target/src/abi/call/csky.rs +++ b/compiler/rustc_target/src/abi/call/csky.rs @@ -18,7 +18,7 @@ fn classify_ret(arg: &mut ArgAbi<'_, Ty>) { if total.bits() > 64 { arg.make_indirect(); } else if total.bits() > 32 { - arg.cast_to(Uniform { unit: Reg::i32(), total, force_array: false }); + arg.cast_to(Uniform::new(Reg::i32(), total)); } else { arg.cast_to(Reg::i32()); } @@ -38,7 +38,7 @@ fn classify_arg(arg: &mut ArgAbi<'_, Ty>) { if arg.layout.is_aggregate() { let total = arg.layout.size; if total.bits() > 32 { - arg.cast_to(Uniform { unit: Reg::i32(), total, force_array: false }); + arg.cast_to(Uniform::new(Reg::i32(), total)); } else { arg.cast_to(Reg::i32()); } diff --git a/compiler/rustc_target/src/abi/call/loongarch.rs b/compiler/rustc_target/src/abi/call/loongarch.rs index 3b8a3ba66ba..943b12a9fbf 100644 --- a/compiler/rustc_target/src/abi/call/loongarch.rs +++ b/compiler/rustc_target/src/abi/call/loongarch.rs @@ -195,11 +195,7 @@ where if total.bits() <= xlen { arg.cast_to(xlen_reg); } else { - arg.cast_to(Uniform { - unit: xlen_reg, - total: Size::from_bits(xlen * 2), - force_array: false, - }); + arg.cast_to(Uniform::new(xlen_reg, Size::from_bits(xlen * 2))); } return false; } @@ -282,11 +278,10 @@ fn classify_arg<'a, Ty, C>( if total.bits() > xlen { let align_regs = align > xlen; if is_loongarch_aggregate(arg) { - arg.cast_to(Uniform { - unit: if align_regs { double_xlen_reg } else { xlen_reg }, - total: Size::from_bits(xlen * 2), - force_array: false, - }); + arg.cast_to(Uniform::new( + if align_regs { double_xlen_reg } else { xlen_reg }, + Size::from_bits(xlen * 2), + )); } if align_regs && is_vararg { *avail_gprs -= *avail_gprs % 2; diff --git a/compiler/rustc_target/src/abi/call/mips.rs b/compiler/rustc_target/src/abi/call/mips.rs index 054c127181a..0e5a7f37a09 100644 --- a/compiler/rustc_target/src/abi/call/mips.rs +++ b/compiler/rustc_target/src/abi/call/mips.rs @@ -27,10 +27,7 @@ where if arg.layout.is_aggregate() { let pad_i32 = !offset.is_aligned(align); - arg.cast_to_and_pad_i32( - Uniform { unit: Reg::i32(), total: size, force_array: false }, - pad_i32, - ); + arg.cast_to_and_pad_i32(Uniform::new(Reg::i32(), size), pad_i32); } else { arg.extend_integer_width_to(32); } diff --git a/compiler/rustc_target/src/abi/call/mips64.rs b/compiler/rustc_target/src/abi/call/mips64.rs index 69e73beca9a..b2a2c34b980 100644 --- a/compiler/rustc_target/src/abi/call/mips64.rs +++ b/compiler/rustc_target/src/abi/call/mips64.rs @@ -68,7 +68,7 @@ where } // Cast to a uniform int structure - ret.cast_to(Uniform { unit: Reg::i64(), total: size, force_array: false }); + ret.cast_to(Uniform::new(Reg::i64(), size)); } else { ret.make_indirect(); } @@ -139,7 +139,7 @@ where let rest_size = size - Size::from_bytes(8) * prefix_index as u64; arg.cast_to(CastTarget { prefix, - rest: Uniform { unit: Reg::i64(), total: rest_size, force_array: false }, + rest: Uniform::new(Reg::i64(), rest_size), attrs: ArgAttributes { regular: ArgAttribute::default(), arg_ext: ArgExtension::None, diff --git a/compiler/rustc_target/src/abi/call/mod.rs b/compiler/rustc_target/src/abi/call/mod.rs index 022ea44cc93..4502df339d1 100644 --- a/compiler/rustc_target/src/abi/call/mod.rs +++ b/compiler/rustc_target/src/abi/call/mod.rs @@ -256,13 +256,15 @@ pub struct Uniform { /// this size will be rounded up to the nearest multiple of `unit.size`. pub total: Size, - /// Force the use of an array, even if there is only a single element. - pub force_array: bool, + /// Indicate that the argument is consecutive, in the sense that either all values need to be + /// passed in register, or all on the stack. If they are passed on the stack, there should be + /// no additional padding between elements. + pub is_consecutive: bool, } impl From for Uniform { fn from(unit: Reg) -> Uniform { - Uniform { unit, total: unit.size, force_array: false } + Uniform { unit, total: unit.size, is_consecutive: false } } } @@ -270,6 +272,18 @@ impl Uniform { pub fn align(&self, cx: &C) -> Align { self.unit.align(cx) } + + /// Pass using one or more values of the given type, without requiring them to be consecutive. + /// That is, some values may be passed in register and some on the stack. + pub fn new(unit: Reg, total: Size) -> Self { + Uniform { unit, total, is_consecutive: false } + } + + /// Pass using one or more consecutive values of the given type. Either all values will be + /// passed in registers, or all on the stack. + pub fn consecutive(unit: Reg, total: Size) -> Self { + Uniform { unit, total, is_consecutive: true } + } } /// Describes the type used for `PassMode::Cast`. diff --git a/compiler/rustc_target/src/abi/call/nvptx64.rs b/compiler/rustc_target/src/abi/call/nvptx64.rs index e7525c76015..f85fa2419f0 100644 --- a/compiler/rustc_target/src/abi/call/nvptx64.rs +++ b/compiler/rustc_target/src/abi/call/nvptx64.rs @@ -35,7 +35,7 @@ where 16 => Reg::i128(), _ => unreachable!("Align is given as power of 2 no larger than 16 bytes"), }; - arg.cast_to(Uniform { unit, total: Size::from_bytes(2 * align_bytes), force_array: false }); + arg.cast_to(Uniform::new(unit, Size::from_bytes(2 * align_bytes))); } else { // FIXME: find a better way to do this. See https://github.com/rust-lang/rust/issues/117271. arg.make_direct_deprecated(); diff --git a/compiler/rustc_target/src/abi/call/powerpc64.rs b/compiler/rustc_target/src/abi/call/powerpc64.rs index 0b2bd8c90d7..11a6cb52bab 100644 --- a/compiler/rustc_target/src/abi/call/powerpc64.rs +++ b/compiler/rustc_target/src/abi/call/powerpc64.rs @@ -37,7 +37,7 @@ where RegKind::Vector => arg.layout.size.bits() == 128, }; - valid_unit.then_some(Uniform { unit, total: arg.layout.size, force_array: false }) + valid_unit.then_some(Uniform::consecutive(unit, arg.layout.size)) }) } @@ -81,7 +81,7 @@ where Reg::i64() }; - ret.cast_to(Uniform { unit, total: size, force_array: false }); + ret.cast_to(Uniform::new(unit, size)); return; } @@ -117,11 +117,10 @@ where // of i64s or i128s, depending on the aggregate alignment. Always use an array for // this, even if there is only a single element. let reg = if arg.layout.align.abi.bytes() > 8 { Reg::i128() } else { Reg::i64() }; - arg.cast_to(Uniform { - unit: reg, - total: size.align_to(Align::from_bytes(reg.size.bytes()).unwrap()), - force_array: true, - }) + arg.cast_to(Uniform::consecutive( + reg, + size.align_to(Align::from_bytes(reg.size.bytes()).unwrap()), + )) }; } diff --git a/compiler/rustc_target/src/abi/call/riscv.rs b/compiler/rustc_target/src/abi/call/riscv.rs index 0dad35c8aab..5d4b3a9d245 100644 --- a/compiler/rustc_target/src/abi/call/riscv.rs +++ b/compiler/rustc_target/src/abi/call/riscv.rs @@ -201,11 +201,7 @@ where if total.bits() <= xlen { arg.cast_to(xlen_reg); } else { - arg.cast_to(Uniform { - unit: xlen_reg, - total: Size::from_bits(xlen * 2), - force_array: false, - }); + arg.cast_to(Uniform::new(xlen_reg, Size::from_bits(xlen * 2))); } return false; } @@ -288,11 +284,10 @@ fn classify_arg<'a, Ty, C>( if total.bits() > xlen { let align_regs = align > xlen; if is_riscv_aggregate(arg) { - arg.cast_to(Uniform { - unit: if align_regs { double_xlen_reg } else { xlen_reg }, - total: Size::from_bits(xlen * 2), - force_array: false, - }); + arg.cast_to(Uniform::new( + if align_regs { double_xlen_reg } else { xlen_reg }, + Size::from_bits(xlen * 2), + )); } if align_regs && is_vararg { *avail_gprs -= *avail_gprs % 2; diff --git a/compiler/rustc_target/src/abi/call/sparc.rs b/compiler/rustc_target/src/abi/call/sparc.rs index 054c127181a..0e5a7f37a09 100644 --- a/compiler/rustc_target/src/abi/call/sparc.rs +++ b/compiler/rustc_target/src/abi/call/sparc.rs @@ -27,10 +27,7 @@ where if arg.layout.is_aggregate() { let pad_i32 = !offset.is_aligned(align); - arg.cast_to_and_pad_i32( - Uniform { unit: Reg::i32(), total: size, force_array: false }, - pad_i32, - ); + arg.cast_to_and_pad_i32(Uniform::new(Reg::i32(), size), pad_i32); } else { arg.extend_integer_width_to(32); } diff --git a/compiler/rustc_target/src/abi/call/sparc64.rs b/compiler/rustc_target/src/abi/call/sparc64.rs index a36b691468e..acdcd5cc0d4 100644 --- a/compiler/rustc_target/src/abi/call/sparc64.rs +++ b/compiler/rustc_target/src/abi/call/sparc64.rs @@ -192,7 +192,7 @@ where arg.cast_to(CastTarget { prefix: data.prefix, - rest: Uniform { unit: Reg::i64(), total: rest_size, force_array: false }, + rest: Uniform::new(Reg::i64(), rest_size), attrs: ArgAttributes { regular: data.arg_attribute, arg_ext: ArgExtension::None, @@ -205,7 +205,7 @@ where } } - arg.cast_to(Uniform { unit: Reg::i64(), total, force_array: false }); + arg.cast_to(Uniform::new(Reg::i64(), total)); } pub fn compute_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>)