2018-09-20 13:47:22 +00:00
|
|
|
use super::abi::AbiBuilderMethods;
|
|
|
|
use super::asm::AsmBuilderMethods;
|
2024-03-16 05:09:05 +00:00
|
|
|
use super::consts::ConstMethods;
|
2020-06-22 06:29:08 +00:00
|
|
|
use super::coverageinfo::CoverageInfoBuilderMethods;
|
2018-09-20 13:47:22 +00:00
|
|
|
use super::debuginfo::DebugInfoBuilderMethods;
|
|
|
|
use super::intrinsic::IntrinsicCallMethods;
|
2021-12-30 01:18:44 +00:00
|
|
|
use super::misc::MiscMethods;
|
2024-03-16 05:09:05 +00:00
|
|
|
use super::type_::{ArgAbiMethods, BaseTypeMethods, LayoutTypeMethods};
|
2018-11-26 17:36:58 +00:00
|
|
|
use super::{HasCodegen, StaticBuilderMethods};
|
2019-05-17 01:20:14 +00:00
|
|
|
|
2019-02-09 14:31:47 +00:00
|
|
|
use crate::common::{
|
2021-12-30 01:18:44 +00:00
|
|
|
AtomicOrdering, AtomicRmwBinOp, IntPredicate, RealPredicate, SynchronizationScope, TypeKind,
|
2019-02-09 14:31:47 +00:00
|
|
|
};
|
2024-03-16 05:09:05 +00:00
|
|
|
use crate::mir::operand::{OperandRef, OperandValue};
|
2019-02-09 14:31:47 +00:00
|
|
|
use crate::mir::place::PlaceRef;
|
|
|
|
use crate::MemFlags;
|
2019-05-17 01:20:14 +00:00
|
|
|
|
2022-12-13 06:42:44 +00:00
|
|
|
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs;
|
2020-08-29 16:10:01 +00:00
|
|
|
use rustc_middle::ty::layout::{HasParamEnv, TyAndLayout};
|
2024-03-15 19:45:46 +00:00
|
|
|
use rustc_middle::ty::{Instance, Ty};
|
2024-03-16 05:09:05 +00:00
|
|
|
use rustc_session::config::OptLevel;
|
2020-10-06 13:39:12 +00:00
|
|
|
use rustc_span::Span;
|
2022-10-01 17:01:31 +00:00
|
|
|
use rustc_target::abi::call::FnAbi;
|
2021-08-25 13:21:45 +00:00
|
|
|
use rustc_target::abi::{Abi, Align, Scalar, Size, WrappingRange};
|
2019-05-17 01:20:14 +00:00
|
|
|
use rustc_target::spec::HasTargetSpec;
|
|
|
|
|
2018-11-24 15:44:17 +00:00
|
|
|
#[derive(Copy, Clone)]
|
|
|
|
pub enum OverflowOp {
|
|
|
|
Add,
|
|
|
|
Sub,
|
|
|
|
Mul,
|
|
|
|
}
|
|
|
|
|
2019-06-14 16:39:39 +00:00
|
|
|
pub trait BuilderMethods<'a, 'tcx>:
|
2018-09-20 13:47:22 +00:00
|
|
|
HasCodegen<'tcx>
|
2020-06-22 06:29:08 +00:00
|
|
|
+ CoverageInfoBuilderMethods<'tcx>
|
2020-01-26 16:50:13 +00:00
|
|
|
+ DebugInfoBuilderMethods
|
2019-10-29 17:29:42 +00:00
|
|
|
+ ArgAbiMethods<'tcx>
|
2018-09-20 13:47:22 +00:00
|
|
|
+ AbiBuilderMethods<'tcx>
|
|
|
|
+ IntrinsicCallMethods<'tcx>
|
|
|
|
+ AsmBuilderMethods<'tcx>
|
2019-06-11 09:49:10 +00:00
|
|
|
+ StaticBuilderMethods
|
2019-05-04 09:32:22 +00:00
|
|
|
+ HasParamEnv<'tcx>
|
2019-05-14 15:53:01 +00:00
|
|
|
+ HasTargetSpec
|
2018-09-20 13:47:22 +00:00
|
|
|
{
|
2021-05-06 15:57:04 +00:00
|
|
|
fn build(cx: &'a Self::CodegenCx, llbb: Self::BasicBlock) -> Self;
|
|
|
|
|
2018-09-20 13:47:22 +00:00
|
|
|
fn cx(&self) -> &Self::CodegenCx;
|
2018-08-22 16:57:31 +00:00
|
|
|
fn llbb(&self) -> Self::BasicBlock;
|
2021-05-06 15:57:04 +00:00
|
|
|
|
2020-10-17 11:28:58 +00:00
|
|
|
fn set_span(&mut self, span: Span);
|
2018-08-07 15:14:40 +00:00
|
|
|
|
2021-05-06 15:57:04 +00:00
|
|
|
// FIXME(eddyb) replace uses of this with `append_sibling_block`.
|
|
|
|
fn append_block(cx: &'a Self::CodegenCx, llfn: Self::Function, name: &str) -> Self::BasicBlock;
|
|
|
|
|
|
|
|
fn append_sibling_block(&mut self, name: &str) -> Self::BasicBlock;
|
|
|
|
|
2022-02-18 14:37:31 +00:00
|
|
|
fn switch_to_block(&mut self, llbb: Self::BasicBlock);
|
|
|
|
|
2018-10-04 13:23:10 +00:00
|
|
|
fn ret_void(&mut self);
|
|
|
|
fn ret(&mut self, v: Self::Value);
|
|
|
|
fn br(&mut self, dest: Self::BasicBlock);
|
|
|
|
fn cond_br(
|
|
|
|
&mut self,
|
|
|
|
cond: Self::Value,
|
|
|
|
then_llbb: Self::BasicBlock,
|
|
|
|
else_llbb: Self::BasicBlock,
|
|
|
|
);
|
2018-10-05 13:08:49 +00:00
|
|
|
fn switch(
|
|
|
|
&mut self,
|
|
|
|
v: Self::Value,
|
|
|
|
else_llbb: Self::BasicBlock,
|
2020-09-24 17:10:34 +00:00
|
|
|
cases: impl ExactSizeIterator<Item = (u128, Self::BasicBlock)>,
|
2018-12-08 17:42:31 +00:00
|
|
|
);
|
2018-08-07 15:14:40 +00:00
|
|
|
fn invoke(
|
2018-10-05 13:08:49 +00:00
|
|
|
&mut self,
|
2021-08-03 22:09:57 +00:00
|
|
|
llty: Self::Type,
|
2022-12-13 06:42:44 +00:00
|
|
|
fn_attrs: Option<&CodegenFnAttrs>,
|
2022-10-01 17:01:31 +00:00
|
|
|
fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
|
2018-08-22 16:57:31 +00:00
|
|
|
llfn: Self::Value,
|
|
|
|
args: &[Self::Value],
|
|
|
|
then: Self::BasicBlock,
|
|
|
|
catch: Self::BasicBlock,
|
2018-11-13 10:51:42 +00:00
|
|
|
funclet: Option<&Self::Funclet>,
|
2024-03-15 19:45:46 +00:00
|
|
|
instance: Option<Instance<'tcx>>,
|
2018-08-22 16:57:31 +00:00
|
|
|
) -> Self::Value;
|
2018-10-05 13:08:49 +00:00
|
|
|
fn unreachable(&mut self);
|
2018-12-08 17:42:31 +00:00
|
|
|
|
2018-10-05 13:08:49 +00:00
|
|
|
fn add(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn fadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn fadd_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
2024-02-06 19:32:00 +00:00
|
|
|
fn fadd_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
2018-10-05 13:08:49 +00:00
|
|
|
fn sub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn fsub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn fsub_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
2024-02-06 19:32:00 +00:00
|
|
|
fn fsub_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
2018-10-05 13:08:49 +00:00
|
|
|
fn mul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn fmul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn fmul_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
2024-02-06 19:32:00 +00:00
|
|
|
fn fmul_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
2018-10-05 13:08:49 +00:00
|
|
|
fn udiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn exactudiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn sdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn exactsdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn fdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn fdiv_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
2024-02-06 19:32:00 +00:00
|
|
|
fn fdiv_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
2018-10-05 13:08:49 +00:00
|
|
|
fn urem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn srem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn frem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn frem_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
2024-02-06 19:32:00 +00:00
|
|
|
fn frem_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
2018-10-05 13:08:49 +00:00
|
|
|
fn shl(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn lshr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn ashr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
2019-06-03 10:59:17 +00:00
|
|
|
fn unchecked_sadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn unchecked_uadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn unchecked_ssub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn unchecked_usub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn unchecked_smul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn unchecked_umul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
2018-10-05 13:08:49 +00:00
|
|
|
fn and(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn or(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn xor(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn neg(&mut self, v: Self::Value) -> Self::Value;
|
|
|
|
fn fneg(&mut self, v: Self::Value) -> Self::Value;
|
|
|
|
fn not(&mut self, v: Self::Value) -> Self::Value;
|
2018-08-07 15:14:40 +00:00
|
|
|
|
2018-11-24 15:44:17 +00:00
|
|
|
fn checked_binop(
|
|
|
|
&mut self,
|
|
|
|
oop: OverflowOp,
|
2019-02-25 07:52:46 +00:00
|
|
|
ty: Ty<'_>,
|
2018-11-24 15:44:17 +00:00
|
|
|
lhs: Self::Value,
|
|
|
|
rhs: Self::Value,
|
|
|
|
) -> (Self::Value, Self::Value);
|
|
|
|
|
2020-08-29 16:10:01 +00:00
|
|
|
fn from_immediate(&mut self, val: Self::Value) -> Self::Value;
|
|
|
|
fn to_immediate(&mut self, val: Self::Value, layout: TyAndLayout<'_>) -> Self::Value {
|
2021-08-29 09:06:55 +00:00
|
|
|
if let Abi::Scalar(scalar) = layout.abi {
|
2020-08-29 16:10:01 +00:00
|
|
|
self.to_immediate_scalar(val, scalar)
|
|
|
|
} else {
|
|
|
|
val
|
|
|
|
}
|
|
|
|
}
|
2021-08-29 09:06:55 +00:00
|
|
|
fn to_immediate_scalar(&mut self, val: Self::Value, scalar: Scalar) -> Self::Value;
|
2020-08-29 16:10:01 +00:00
|
|
|
|
2019-09-12 16:04:30 +00:00
|
|
|
fn alloca(&mut self, ty: Self::Type, align: Align) -> Self::Value;
|
2022-10-01 18:22:46 +00:00
|
|
|
fn byte_array_alloca(&mut self, len: Self::Value, align: Align) -> Self::Value;
|
2018-08-07 15:14:40 +00:00
|
|
|
|
2021-07-04 16:53:04 +00:00
|
|
|
fn load(&mut self, ty: Self::Type, ptr: Self::Value, align: Align) -> Self::Value;
|
|
|
|
fn volatile_load(&mut self, ty: Self::Type, ptr: Self::Value) -> Self::Value;
|
2021-07-04 15:49:51 +00:00
|
|
|
fn atomic_load(
|
|
|
|
&mut self,
|
|
|
|
ty: Self::Type,
|
|
|
|
ptr: Self::Value,
|
|
|
|
order: AtomicOrdering,
|
|
|
|
size: Size,
|
|
|
|
) -> Self::Value;
|
2018-10-05 13:08:49 +00:00
|
|
|
fn load_operand(&mut self, place: PlaceRef<'tcx, Self::Value>)
|
|
|
|
-> OperandRef<'tcx, Self::Value>;
|
2018-08-07 15:14:40 +00:00
|
|
|
|
2018-12-08 10:48:43 +00:00
|
|
|
/// Called for Rvalue::Repeat when the elem is neither a ZST nor optimizable using memset.
|
|
|
|
fn write_operand_repeatedly(
|
2022-11-09 00:04:10 +00:00
|
|
|
&mut self,
|
2018-12-08 10:48:43 +00:00
|
|
|
elem: OperandRef<'tcx, Self::Value>,
|
|
|
|
count: u64,
|
|
|
|
dest: PlaceRef<'tcx, Self::Value>,
|
2022-11-09 00:04:10 +00:00
|
|
|
);
|
2018-12-08 10:48:43 +00:00
|
|
|
|
2021-08-29 09:06:55 +00:00
|
|
|
fn range_metadata(&mut self, load: Self::Value, range: WrappingRange);
|
2018-10-05 13:08:49 +00:00
|
|
|
fn nonnull_metadata(&mut self, load: Self::Value);
|
2018-08-07 15:14:40 +00:00
|
|
|
|
2018-09-08 22:16:45 +00:00
|
|
|
fn store(&mut self, val: Self::Value, ptr: Self::Value, align: Align) -> Self::Value;
|
2018-08-07 15:14:40 +00:00
|
|
|
fn store_with_flags(
|
2018-10-05 13:08:49 +00:00
|
|
|
&mut self,
|
2018-08-22 16:57:31 +00:00
|
|
|
val: Self::Value,
|
|
|
|
ptr: Self::Value,
|
2018-09-08 22:16:45 +00:00
|
|
|
align: Align,
|
2018-08-07 15:14:40 +00:00
|
|
|
flags: MemFlags,
|
2018-08-22 16:57:31 +00:00
|
|
|
) -> Self::Value;
|
2018-10-05 13:08:49 +00:00
|
|
|
fn atomic_store(
|
|
|
|
&mut self,
|
|
|
|
val: Self::Value,
|
|
|
|
ptr: Self::Value,
|
|
|
|
order: AtomicOrdering,
|
|
|
|
size: Size,
|
|
|
|
);
|
2018-08-07 15:14:40 +00:00
|
|
|
|
2021-07-31 00:00:00 +00:00
|
|
|
fn gep(&mut self, ty: Self::Type, ptr: Self::Value, indices: &[Self::Value]) -> Self::Value;
|
2021-08-01 00:00:00 +00:00
|
|
|
fn inbounds_gep(
|
|
|
|
&mut self,
|
|
|
|
ty: Self::Type,
|
|
|
|
ptr: Self::Value,
|
|
|
|
indices: &[Self::Value],
|
|
|
|
) -> Self::Value;
|
2024-02-24 07:01:41 +00:00
|
|
|
fn ptradd(&mut self, ptr: Self::Value, offset: Self::Value) -> Self::Value {
|
|
|
|
self.gep(self.cx().type_i8(), ptr, &[offset])
|
|
|
|
}
|
|
|
|
fn inbounds_ptradd(&mut self, ptr: Self::Value, offset: Self::Value) -> Self::Value {
|
|
|
|
self.inbounds_gep(self.cx().type_i8(), ptr, &[offset])
|
|
|
|
}
|
2018-08-07 15:14:40 +00:00
|
|
|
|
2018-10-05 13:08:49 +00:00
|
|
|
fn trunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
|
|
|
|
fn sext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
|
2022-08-16 22:46:17 +00:00
|
|
|
fn fptoui_sat(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
|
|
|
|
fn fptosi_sat(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
|
2018-10-05 13:08:49 +00:00
|
|
|
fn fptoui(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
|
|
|
|
fn fptosi(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
|
|
|
|
fn uitofp(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
|
|
|
|
fn sitofp(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
|
|
|
|
fn fptrunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
|
|
|
|
fn fpext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
|
|
|
|
fn ptrtoint(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
|
|
|
|
fn inttoptr(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
|
|
|
|
fn bitcast(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
|
|
|
|
fn intcast(&mut self, val: Self::Value, dest_ty: Self::Type, is_signed: bool) -> Self::Value;
|
|
|
|
fn pointercast(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
|
2018-08-07 15:14:40 +00:00
|
|
|
|
2021-12-30 01:18:44 +00:00
|
|
|
fn cast_float_to_int(
|
|
|
|
&mut self,
|
|
|
|
signed: bool,
|
|
|
|
x: Self::Value,
|
|
|
|
dest_ty: Self::Type,
|
|
|
|
) -> Self::Value {
|
|
|
|
let in_ty = self.cx().val_ty(x);
|
|
|
|
let (float_ty, int_ty) = if self.cx().type_kind(dest_ty) == TypeKind::Vector
|
|
|
|
&& self.cx().type_kind(in_ty) == TypeKind::Vector
|
|
|
|
{
|
|
|
|
(self.cx().element_type(in_ty), self.cx().element_type(dest_ty))
|
|
|
|
} else {
|
|
|
|
(in_ty, dest_ty)
|
|
|
|
};
|
|
|
|
assert!(matches!(self.cx().type_kind(float_ty), TypeKind::Float | TypeKind::Double));
|
|
|
|
assert_eq!(self.cx().type_kind(int_ty), TypeKind::Integer);
|
|
|
|
|
2022-07-06 12:44:47 +00:00
|
|
|
if let Some(false) = self.cx().sess().opts.unstable_opts.saturating_float_casts {
|
2021-12-30 01:18:44 +00:00
|
|
|
return if signed { self.fptosi(x, dest_ty) } else { self.fptoui(x, dest_ty) };
|
|
|
|
}
|
|
|
|
|
2022-08-16 22:46:17 +00:00
|
|
|
if signed { self.fptosi_sat(x, dest_ty) } else { self.fptoui_sat(x, dest_ty) }
|
2021-12-30 01:18:44 +00:00
|
|
|
}
|
|
|
|
|
2018-10-05 13:08:49 +00:00
|
|
|
fn icmp(&mut self, op: IntPredicate, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
|
|
|
fn fcmp(&mut self, op: RealPredicate, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
|
2018-08-07 15:14:40 +00:00
|
|
|
|
2018-09-13 12:58:19 +00:00
|
|
|
fn memcpy(
|
2018-10-05 13:08:49 +00:00
|
|
|
&mut self,
|
2018-09-13 12:58:19 +00:00
|
|
|
dst: Self::Value,
|
2018-09-08 22:16:45 +00:00
|
|
|
dst_align: Align,
|
2018-09-13 12:58:19 +00:00
|
|
|
src: Self::Value,
|
2018-09-08 22:16:45 +00:00
|
|
|
src_align: Align,
|
2018-09-13 12:58:19 +00:00
|
|
|
size: Self::Value,
|
|
|
|
flags: MemFlags,
|
|
|
|
);
|
|
|
|
fn memmove(
|
2018-10-05 13:08:49 +00:00
|
|
|
&mut self,
|
2018-09-13 12:58:19 +00:00
|
|
|
dst: Self::Value,
|
2018-09-08 22:16:45 +00:00
|
|
|
dst_align: Align,
|
2018-09-13 12:58:19 +00:00
|
|
|
src: Self::Value,
|
2018-09-08 22:16:45 +00:00
|
|
|
src_align: Align,
|
2018-09-13 12:58:19 +00:00
|
|
|
size: Self::Value,
|
|
|
|
flags: MemFlags,
|
|
|
|
);
|
2018-09-10 15:59:20 +00:00
|
|
|
fn memset(
|
2018-10-05 13:08:49 +00:00
|
|
|
&mut self,
|
2018-09-10 15:59:20 +00:00
|
|
|
ptr: Self::Value,
|
|
|
|
fill_byte: Self::Value,
|
|
|
|
size: Self::Value,
|
2018-09-08 22:16:45 +00:00
|
|
|
align: Align,
|
2018-09-10 15:59:20 +00:00
|
|
|
flags: MemFlags,
|
|
|
|
);
|
2018-08-07 15:14:40 +00:00
|
|
|
|
2024-03-16 05:09:05 +00:00
|
|
|
/// *Typed* copy for non-overlapping places.
|
|
|
|
///
|
|
|
|
/// Has a default implementation in terms of `memcpy`, but specific backends
|
|
|
|
/// can override to do something smarter if possible.
|
|
|
|
///
|
|
|
|
/// (For example, typed load-stores with alias metadata.)
|
|
|
|
fn typed_place_copy(
|
|
|
|
&mut self,
|
|
|
|
dst: PlaceRef<'tcx, Self::Value>,
|
|
|
|
src: PlaceRef<'tcx, Self::Value>,
|
|
|
|
) {
|
|
|
|
debug_assert!(src.llextra.is_none());
|
|
|
|
debug_assert!(dst.llextra.is_none());
|
|
|
|
debug_assert_eq!(dst.layout.size, src.layout.size);
|
|
|
|
if self.sess().opts.optimize == OptLevel::No && self.is_backend_immediate(dst.layout) {
|
|
|
|
// If we're not optimizing, the aliasing information from `memcpy`
|
|
|
|
// isn't useful, so just load-store the value for smaller code.
|
|
|
|
let temp = self.load_operand(src);
|
|
|
|
temp.val.store(self, dst);
|
|
|
|
} else if !dst.layout.is_zst() {
|
|
|
|
let bytes = self.const_usize(dst.layout.size.bytes());
|
|
|
|
self.memcpy(dst.llval, dst.align, src.llval, src.align, bytes, MemFlags::empty());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// *Typed* swap for non-overlapping places.
|
|
|
|
///
|
|
|
|
/// Avoids `alloca`s for Immediates and ScalarPairs.
|
|
|
|
///
|
|
|
|
/// FIXME: Maybe do something smarter for Ref types too?
|
|
|
|
/// For now, the `typed_swap` intrinsic just doesn't call this for those
|
|
|
|
/// cases (in non-debug), preferring the fallback body instead.
|
|
|
|
fn typed_place_swap(
|
|
|
|
&mut self,
|
|
|
|
left: PlaceRef<'tcx, Self::Value>,
|
|
|
|
right: PlaceRef<'tcx, Self::Value>,
|
|
|
|
) {
|
|
|
|
let mut temp = self.load_operand(left);
|
|
|
|
if let OperandValue::Ref(..) = temp.val {
|
|
|
|
// The SSA value isn't stand-alone, so we need to copy it elsewhere
|
|
|
|
let alloca = PlaceRef::alloca(self, left.layout);
|
|
|
|
self.typed_place_copy(alloca, left);
|
|
|
|
temp = self.load_operand(alloca);
|
|
|
|
}
|
|
|
|
self.typed_place_copy(left, right);
|
|
|
|
temp.val.store(self, right);
|
|
|
|
}
|
|
|
|
|
2018-08-07 15:14:40 +00:00
|
|
|
fn select(
|
2018-10-05 13:08:49 +00:00
|
|
|
&mut self,
|
2018-09-13 12:58:19 +00:00
|
|
|
cond: Self::Value,
|
2018-08-22 16:57:31 +00:00
|
|
|
then_val: Self::Value,
|
|
|
|
else_val: Self::Value,
|
|
|
|
) -> Self::Value;
|
2018-08-07 15:14:40 +00:00
|
|
|
|
2018-12-04 19:20:45 +00:00
|
|
|
fn va_arg(&mut self, list: Self::Value, ty: Self::Type) -> Self::Value;
|
2018-10-05 13:08:49 +00:00
|
|
|
fn extract_element(&mut self, vec: Self::Value, idx: Self::Value) -> Self::Value;
|
|
|
|
fn vector_splat(&mut self, num_elts: usize, elt: Self::Value) -> Self::Value;
|
|
|
|
fn extract_value(&mut self, agg_val: Self::Value, idx: u64) -> Self::Value;
|
|
|
|
fn insert_value(&mut self, agg_val: Self::Value, elt: Self::Value, idx: u64) -> Self::Value;
|
2018-08-07 15:14:40 +00:00
|
|
|
|
2022-01-24 12:45:34 +00:00
|
|
|
fn set_personality_fn(&mut self, personality: Self::Value);
|
|
|
|
|
|
|
|
// These are used by everyone except msvc
|
2022-12-03 18:27:18 +00:00
|
|
|
fn cleanup_landing_pad(&mut self, pers_fn: Self::Value) -> (Self::Value, Self::Value);
|
2022-11-06 21:24:20 +00:00
|
|
|
fn filter_landing_pad(&mut self, pers_fn: Self::Value) -> (Self::Value, Self::Value);
|
2022-12-03 18:27:18 +00:00
|
|
|
fn resume(&mut self, exn0: Self::Value, exn1: Self::Value);
|
2022-01-24 12:45:34 +00:00
|
|
|
|
|
|
|
// These are used only by msvc
|
2018-10-05 13:08:49 +00:00
|
|
|
fn cleanup_pad(&mut self, parent: Option<Self::Value>, args: &[Self::Value]) -> Self::Funclet;
|
2022-01-24 12:48:09 +00:00
|
|
|
fn cleanup_ret(&mut self, funclet: &Self::Funclet, unwind: Option<Self::BasicBlock>);
|
2018-10-05 13:08:49 +00:00
|
|
|
fn catch_pad(&mut self, parent: Self::Value, args: &[Self::Value]) -> Self::Funclet;
|
2018-08-07 15:14:40 +00:00
|
|
|
fn catch_switch(
|
2018-10-05 13:08:49 +00:00
|
|
|
&mut self,
|
2018-08-22 16:57:31 +00:00
|
|
|
parent: Option<Self::Value>,
|
|
|
|
unwind: Option<Self::BasicBlock>,
|
2022-01-24 12:50:12 +00:00
|
|
|
handlers: &[Self::BasicBlock],
|
2018-08-22 16:57:31 +00:00
|
|
|
) -> Self::Value;
|
2018-08-07 15:14:40 +00:00
|
|
|
|
|
|
|
fn atomic_cmpxchg(
|
2018-10-05 13:08:49 +00:00
|
|
|
&mut self,
|
2018-08-22 16:57:31 +00:00
|
|
|
dst: Self::Value,
|
|
|
|
cmp: Self::Value,
|
|
|
|
src: Self::Value,
|
2018-08-07 15:14:40 +00:00
|
|
|
order: AtomicOrdering,
|
|
|
|
failure_order: AtomicOrdering,
|
2018-08-20 16:16:51 +00:00
|
|
|
weak: bool,
|
2023-11-30 13:54:27 +00:00
|
|
|
) -> (Self::Value, Self::Value);
|
2018-08-07 15:14:40 +00:00
|
|
|
fn atomic_rmw(
|
2018-10-05 13:08:49 +00:00
|
|
|
&mut self,
|
2018-08-07 15:14:40 +00:00
|
|
|
op: AtomicRmwBinOp,
|
2018-08-22 16:57:31 +00:00
|
|
|
dst: Self::Value,
|
|
|
|
src: Self::Value,
|
2018-08-07 15:14:40 +00:00
|
|
|
order: AtomicOrdering,
|
2018-08-22 16:57:31 +00:00
|
|
|
) -> Self::Value;
|
2018-10-05 13:08:49 +00:00
|
|
|
fn atomic_fence(&mut self, order: AtomicOrdering, scope: SynchronizationScope);
|
|
|
|
fn set_invariant_load(&mut self, load: Self::Value);
|
2018-08-07 15:14:40 +00:00
|
|
|
|
2018-11-24 15:36:41 +00:00
|
|
|
/// Called for `StorageLive`
|
2018-10-05 13:08:49 +00:00
|
|
|
fn lifetime_start(&mut self, ptr: Self::Value, size: Size);
|
2018-08-07 15:14:40 +00:00
|
|
|
|
2018-11-24 15:36:41 +00:00
|
|
|
/// Called for `StorageDead`
|
|
|
|
fn lifetime_end(&mut self, ptr: Self::Value, size: Size);
|
2018-08-07 15:14:40 +00:00
|
|
|
|
2020-06-04 04:19:34 +00:00
|
|
|
fn instrprof_increment(
|
|
|
|
&mut self,
|
|
|
|
fn_name: Self::Value,
|
|
|
|
hash: Self::Value,
|
|
|
|
num_counters: Self::Value,
|
|
|
|
index: Self::Value,
|
2020-08-15 11:42:13 +00:00
|
|
|
);
|
2020-06-04 04:19:34 +00:00
|
|
|
|
2018-09-13 12:58:19 +00:00
|
|
|
fn call(
|
2018-10-05 13:08:49 +00:00
|
|
|
&mut self,
|
2021-08-03 22:09:57 +00:00
|
|
|
llty: Self::Type,
|
2022-12-13 06:42:44 +00:00
|
|
|
fn_attrs: Option<&CodegenFnAttrs>,
|
2022-10-01 17:01:31 +00:00
|
|
|
fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
|
2018-09-13 12:58:19 +00:00
|
|
|
llfn: Self::Value,
|
|
|
|
args: &[Self::Value],
|
2018-11-13 10:51:42 +00:00
|
|
|
funclet: Option<&Self::Funclet>,
|
2024-03-15 19:45:46 +00:00
|
|
|
instance: Option<Instance<'tcx>>,
|
2018-09-13 12:58:19 +00:00
|
|
|
) -> Self::Value;
|
2018-10-05 13:08:49 +00:00
|
|
|
fn zext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
|
2018-09-20 13:47:22 +00:00
|
|
|
|
2022-03-10 22:10:36 +00:00
|
|
|
fn apply_attrs_to_cleanup_callsite(&mut self, llret: Self::Value);
|
2018-08-07 15:14:40 +00:00
|
|
|
}
|