2021-08-03 22:09:57 +00:00
|
|
|
|
use crate::abi::{Abi, FnAbi, FnAbiLlvmExt, LlvmType, PassMode};
|
2019-12-22 22:42:04 +00:00
|
|
|
|
use crate::builder::Builder;
|
2019-02-17 18:58:58 +00:00
|
|
|
|
use crate::context::CodegenCx;
|
2019-12-22 22:42:04 +00:00
|
|
|
|
use crate::llvm;
|
2019-02-17 18:58:58 +00:00
|
|
|
|
use crate::type_::Type;
|
|
|
|
|
use crate::type_of::LayoutLlvmExt;
|
|
|
|
|
use crate::va_arg::emit_va_arg;
|
2019-12-22 22:42:04 +00:00
|
|
|
|
use crate::value::Value;
|
2020-03-31 16:16:47 +00:00
|
|
|
|
|
2021-08-10 10:50:33 +00:00
|
|
|
|
use rustc_ast as ast;
|
2020-08-29 16:10:01 +00:00
|
|
|
|
use rustc_codegen_ssa::base::{compare_simd_types, wants_msvc_seh};
|
2020-03-31 16:16:47 +00:00
|
|
|
|
use rustc_codegen_ssa::common::span_invalid_monomorphization_error;
|
2020-06-16 08:37:34 +00:00
|
|
|
|
use rustc_codegen_ssa::common::{IntPredicate, TypeKind};
|
2020-09-15 21:35:31 +00:00
|
|
|
|
use rustc_codegen_ssa::mir::operand::OperandRef;
|
2019-12-22 22:42:04 +00:00
|
|
|
|
use rustc_codegen_ssa::mir::place::PlaceRef;
|
2020-03-31 16:16:47 +00:00
|
|
|
|
use rustc_codegen_ssa::traits::*;
|
2020-01-05 01:37:57 +00:00
|
|
|
|
use rustc_hir as hir;
|
2020-03-31 16:16:47 +00:00
|
|
|
|
use rustc_middle::ty::layout::{FnAbiExt, HasTyCtxt};
|
2020-03-29 14:41:09 +00:00
|
|
|
|
use rustc_middle::ty::{self, Ty};
|
|
|
|
|
use rustc_middle::{bug, span_bug};
|
2020-07-08 01:04:10 +00:00
|
|
|
|
use rustc_span::{sym, symbol::kw, Span, Symbol};
|
2020-03-31 16:16:47 +00:00
|
|
|
|
use rustc_target::abi::{self, HasDataLayout, LayoutOf, Primitive};
|
2020-03-31 22:15:39 +00:00
|
|
|
|
use rustc_target::spec::PanicStrategy;
|
2015-09-18 22:42:57 +00:00
|
|
|
|
|
2015-08-14 22:46:51 +00:00
|
|
|
|
use std::cmp::Ordering;
|
2020-04-04 15:25:45 +00:00
|
|
|
|
use std::iter;
|
2015-08-14 22:46:51 +00:00
|
|
|
|
|
2021-08-03 22:09:57 +00:00
|
|
|
|
fn get_simple_intrinsic(cx: &CodegenCx<'ll, '_>, name: Symbol) -> Option<(&'ll Type, &'ll Value)> {
|
2016-02-23 20:00:59 +00:00
|
|
|
|
let llvm_name = match name {
|
2020-07-08 01:04:10 +00:00
|
|
|
|
sym::sqrtf32 => "llvm.sqrt.f32",
|
|
|
|
|
sym::sqrtf64 => "llvm.sqrt.f64",
|
|
|
|
|
sym::powif32 => "llvm.powi.f32",
|
|
|
|
|
sym::powif64 => "llvm.powi.f64",
|
|
|
|
|
sym::sinf32 => "llvm.sin.f32",
|
|
|
|
|
sym::sinf64 => "llvm.sin.f64",
|
|
|
|
|
sym::cosf32 => "llvm.cos.f32",
|
|
|
|
|
sym::cosf64 => "llvm.cos.f64",
|
|
|
|
|
sym::powf32 => "llvm.pow.f32",
|
|
|
|
|
sym::powf64 => "llvm.pow.f64",
|
|
|
|
|
sym::expf32 => "llvm.exp.f32",
|
|
|
|
|
sym::expf64 => "llvm.exp.f64",
|
|
|
|
|
sym::exp2f32 => "llvm.exp2.f32",
|
|
|
|
|
sym::exp2f64 => "llvm.exp2.f64",
|
|
|
|
|
sym::logf32 => "llvm.log.f32",
|
|
|
|
|
sym::logf64 => "llvm.log.f64",
|
|
|
|
|
sym::log10f32 => "llvm.log10.f32",
|
|
|
|
|
sym::log10f64 => "llvm.log10.f64",
|
|
|
|
|
sym::log2f32 => "llvm.log2.f32",
|
|
|
|
|
sym::log2f64 => "llvm.log2.f64",
|
|
|
|
|
sym::fmaf32 => "llvm.fma.f32",
|
|
|
|
|
sym::fmaf64 => "llvm.fma.f64",
|
|
|
|
|
sym::fabsf32 => "llvm.fabs.f32",
|
|
|
|
|
sym::fabsf64 => "llvm.fabs.f64",
|
|
|
|
|
sym::minnumf32 => "llvm.minnum.f32",
|
|
|
|
|
sym::minnumf64 => "llvm.minnum.f64",
|
|
|
|
|
sym::maxnumf32 => "llvm.maxnum.f32",
|
|
|
|
|
sym::maxnumf64 => "llvm.maxnum.f64",
|
|
|
|
|
sym::copysignf32 => "llvm.copysign.f32",
|
|
|
|
|
sym::copysignf64 => "llvm.copysign.f64",
|
|
|
|
|
sym::floorf32 => "llvm.floor.f32",
|
|
|
|
|
sym::floorf64 => "llvm.floor.f64",
|
|
|
|
|
sym::ceilf32 => "llvm.ceil.f32",
|
|
|
|
|
sym::ceilf64 => "llvm.ceil.f64",
|
|
|
|
|
sym::truncf32 => "llvm.trunc.f32",
|
|
|
|
|
sym::truncf64 => "llvm.trunc.f64",
|
|
|
|
|
sym::rintf32 => "llvm.rint.f32",
|
|
|
|
|
sym::rintf64 => "llvm.rint.f64",
|
|
|
|
|
sym::nearbyintf32 => "llvm.nearbyint.f32",
|
|
|
|
|
sym::nearbyintf64 => "llvm.nearbyint.f64",
|
|
|
|
|
sym::roundf32 => "llvm.round.f32",
|
|
|
|
|
sym::roundf64 => "llvm.round.f64",
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => return None,
|
2014-02-14 05:07:09 +00:00
|
|
|
|
};
|
2018-01-05 05:04:08 +00:00
|
|
|
|
Some(cx.get_intrinsic(&llvm_name))
|
2014-01-29 23:44:14 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-09-20 13:47:22 +00:00
|
|
|
|
impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
|
2018-09-11 09:46:03 +00:00
|
|
|
|
fn codegen_intrinsic_call(
|
2018-10-05 13:08:49 +00:00
|
|
|
|
&mut self,
|
2019-06-07 17:22:42 +00:00
|
|
|
|
instance: ty::Instance<'tcx>,
|
2019-10-29 14:35:26 +00:00
|
|
|
|
fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
|
2018-09-11 09:46:03 +00:00
|
|
|
|
args: &[OperandRef<'tcx, &'ll Value>],
|
|
|
|
|
llresult: &'ll Value,
|
|
|
|
|
span: Span,
|
|
|
|
|
) {
|
2018-11-27 18:00:25 +00:00
|
|
|
|
let tcx = self.tcx;
|
2020-06-22 12:57:03 +00:00
|
|
|
|
let callee_ty = instance.ty(tcx, ty::ParamEnv::reveal_all());
|
2018-09-11 09:46:03 +00:00
|
|
|
|
|
2020-08-02 22:49:11 +00:00
|
|
|
|
let (def_id, substs) = match *callee_ty.kind() {
|
2018-09-11 09:46:03 +00:00
|
|
|
|
ty::FnDef(def_id, substs) => (def_id, substs),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => bug!("expected fn item type, found {}", callee_ty),
|
2018-09-11 09:46:03 +00:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let sig = callee_ty.fn_sig(tcx);
|
2020-10-24 00:21:18 +00:00
|
|
|
|
let sig = tcx.normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), sig);
|
2018-09-11 09:46:03 +00:00
|
|
|
|
let arg_tys = sig.inputs();
|
|
|
|
|
let ret_ty = sig.output();
|
2020-07-08 01:04:10 +00:00
|
|
|
|
let name = tcx.item_name(def_id);
|
|
|
|
|
let name_str = &*name.as_str();
|
2018-09-11 09:46:03 +00:00
|
|
|
|
|
2018-11-27 18:00:25 +00:00
|
|
|
|
let llret_ty = self.layout_of(ret_ty).llvm_type(self);
|
2019-10-29 14:35:26 +00:00
|
|
|
|
let result = PlaceRef::new_sized(llresult, fn_abi.ret.layout);
|
2018-09-11 09:46:03 +00:00
|
|
|
|
|
2018-11-27 18:00:25 +00:00
|
|
|
|
let simple = get_simple_intrinsic(self, name);
|
2018-09-11 09:46:03 +00:00
|
|
|
|
let llval = match name {
|
2021-08-03 22:09:57 +00:00
|
|
|
|
_ if simple.is_some() => {
|
|
|
|
|
let (simple_ty, simple_fn) = simple.unwrap();
|
|
|
|
|
self.call(
|
|
|
|
|
simple_ty,
|
|
|
|
|
simple_fn,
|
|
|
|
|
&args.iter().map(|arg| arg.immediate()).collect::<Vec<_>>(),
|
|
|
|
|
None,
|
|
|
|
|
)
|
2018-09-11 09:46:03 +00:00
|
|
|
|
}
|
2021-08-03 22:09:57 +00:00
|
|
|
|
sym::likely => {
|
|
|
|
|
self.call_intrinsic("llvm.expect.i1", &[args[0].immediate(), self.const_bool(true)])
|
2018-09-11 09:46:03 +00:00
|
|
|
|
}
|
2021-08-03 22:09:57 +00:00
|
|
|
|
sym::unlikely => self
|
|
|
|
|
.call_intrinsic("llvm.expect.i1", &[args[0].immediate(), self.const_bool(false)]),
|
2020-07-08 01:04:10 +00:00
|
|
|
|
kw::Try => {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
try_intrinsic(
|
|
|
|
|
self,
|
|
|
|
|
args[0].immediate(),
|
|
|
|
|
args[1].immediate(),
|
|
|
|
|
args[2].immediate(),
|
|
|
|
|
llresult,
|
|
|
|
|
);
|
2018-09-11 09:46:03 +00:00
|
|
|
|
return;
|
|
|
|
|
}
|
2021-08-03 22:09:57 +00:00
|
|
|
|
sym::breakpoint => self.call_intrinsic("llvm.debugtrap", &[]),
|
2020-07-08 01:04:10 +00:00
|
|
|
|
sym::va_copy => {
|
2021-08-03 22:09:57 +00:00
|
|
|
|
self.call_intrinsic("llvm.va_copy", &[args[0].immediate(), args[1].immediate()])
|
2018-10-23 23:13:33 +00:00
|
|
|
|
}
|
2020-07-08 01:04:10 +00:00
|
|
|
|
sym::va_arg => {
|
2019-10-29 14:35:26 +00:00
|
|
|
|
match fn_abi.ret.layout.abi {
|
2020-03-31 16:16:47 +00:00
|
|
|
|
abi::Abi::Scalar(ref scalar) => {
|
2018-10-23 23:13:33 +00:00
|
|
|
|
match scalar.value {
|
|
|
|
|
Primitive::Int(..) => {
|
|
|
|
|
if self.cx().size_of(ret_ty).bytes() < 4 {
|
2021-08-22 12:46:15 +00:00
|
|
|
|
// `va_arg` should not be called on an integer type
|
2018-10-23 23:13:33 +00:00
|
|
|
|
// less than 4 bytes in length. If it is, promote
|
|
|
|
|
// the integer to a `i32` and truncate the result
|
|
|
|
|
// back to the smaller type.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let promoted_result = emit_va_arg(self, args[0], tcx.types.i32);
|
2018-10-23 23:13:33 +00:00
|
|
|
|
self.trunc(promoted_result, llret_ty)
|
|
|
|
|
} else {
|
|
|
|
|
emit_va_arg(self, args[0], ret_ty)
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Primitive::F64 | Primitive::Pointer => {
|
2018-10-23 23:13:33 +00:00
|
|
|
|
emit_va_arg(self, args[0], ret_ty)
|
|
|
|
|
}
|
|
|
|
|
// `va_arg` should never be used with the return type f32.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Primitive::F32 => bug!("the va_arg intrinsic does not work with `f32`"),
|
2018-10-23 23:13:33 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => bug!("the va_arg intrinsic does not work with non-scalar types"),
|
2018-10-23 23:13:33 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2017-08-16 13:41:09 +00:00
|
|
|
|
|
2020-07-08 01:04:10 +00:00
|
|
|
|
sym::volatile_load | sym::unaligned_volatile_load => {
|
2018-09-11 09:46:03 +00:00
|
|
|
|
let tp_ty = substs.type_at(0);
|
2021-07-04 16:53:04 +00:00
|
|
|
|
let ptr = args[0].immediate();
|
|
|
|
|
let load = if let PassMode::Cast(ty) = fn_abi.ret.mode {
|
|
|
|
|
let llty = ty.llvm_type(self);
|
|
|
|
|
let ptr = self.pointercast(ptr, self.type_ptr_to(llty));
|
|
|
|
|
self.volatile_load(llty, ptr)
|
|
|
|
|
} else {
|
|
|
|
|
self.volatile_load(self.layout_of(tp_ty).llvm_type(self), ptr)
|
|
|
|
|
};
|
2020-07-08 01:04:10 +00:00
|
|
|
|
let align = if name == sym::unaligned_volatile_load {
|
2018-09-11 09:46:03 +00:00
|
|
|
|
1
|
|
|
|
|
} else {
|
2018-11-27 18:00:25 +00:00
|
|
|
|
self.align_of(tp_ty).bytes() as u32
|
2018-09-11 09:46:03 +00:00
|
|
|
|
};
|
|
|
|
|
unsafe {
|
|
|
|
|
llvm::LLVMSetAlignment(load, align);
|
|
|
|
|
}
|
2020-08-29 16:10:01 +00:00
|
|
|
|
self.to_immediate(load, self.layout_of(tp_ty))
|
2019-12-22 22:42:04 +00:00
|
|
|
|
}
|
2020-07-08 01:04:10 +00:00
|
|
|
|
sym::volatile_store => {
|
2018-10-05 13:08:49 +00:00
|
|
|
|
let dst = args[0].deref(self.cx());
|
2018-09-20 13:47:22 +00:00
|
|
|
|
args[1].val.volatile_store(self, dst);
|
2018-09-11 09:46:03 +00:00
|
|
|
|
return;
|
2019-12-22 22:42:04 +00:00
|
|
|
|
}
|
2020-07-08 01:04:10 +00:00
|
|
|
|
sym::unaligned_volatile_store => {
|
2018-10-05 13:08:49 +00:00
|
|
|
|
let dst = args[0].deref(self.cx());
|
2018-09-20 13:47:22 +00:00
|
|
|
|
args[1].val.unaligned_volatile_store(self, dst);
|
2018-09-11 09:46:03 +00:00
|
|
|
|
return;
|
2019-12-22 22:42:04 +00:00
|
|
|
|
}
|
2020-07-08 01:04:10 +00:00
|
|
|
|
sym::prefetch_read_data
|
|
|
|
|
| sym::prefetch_write_data
|
|
|
|
|
| sym::prefetch_read_instruction
|
|
|
|
|
| sym::prefetch_write_instruction => {
|
2018-09-11 09:46:03 +00:00
|
|
|
|
let (rw, cache_type) = match name {
|
2020-07-08 01:04:10 +00:00
|
|
|
|
sym::prefetch_read_data => (0, 1),
|
|
|
|
|
sym::prefetch_write_data => (1, 1),
|
|
|
|
|
sym::prefetch_read_instruction => (0, 0),
|
|
|
|
|
sym::prefetch_write_instruction => (1, 0),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => bug!(),
|
2018-09-11 09:46:03 +00:00
|
|
|
|
};
|
2021-08-03 22:09:57 +00:00
|
|
|
|
self.call_intrinsic(
|
|
|
|
|
"llvm.prefetch",
|
2019-12-22 22:42:04 +00:00
|
|
|
|
&[
|
|
|
|
|
args[0].immediate(),
|
|
|
|
|
self.const_i32(rw),
|
|
|
|
|
args[1].immediate(),
|
|
|
|
|
self.const_i32(cache_type),
|
|
|
|
|
],
|
|
|
|
|
)
|
|
|
|
|
}
|
2020-07-08 01:04:10 +00:00
|
|
|
|
sym::ctlz
|
|
|
|
|
| sym::ctlz_nonzero
|
|
|
|
|
| sym::cttz
|
|
|
|
|
| sym::cttz_nonzero
|
|
|
|
|
| sym::ctpop
|
|
|
|
|
| sym::bswap
|
|
|
|
|
| sym::bitreverse
|
|
|
|
|
| sym::rotate_left
|
|
|
|
|
| sym::rotate_right
|
|
|
|
|
| sym::saturating_add
|
|
|
|
|
| sym::saturating_sub => {
|
2018-09-11 09:46:03 +00:00
|
|
|
|
let ty = arg_tys[0];
|
2018-11-27 18:00:25 +00:00
|
|
|
|
match int_type_width_signed(ty, self) {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Some((width, signed)) => match name {
|
2020-07-08 01:04:10 +00:00
|
|
|
|
sym::ctlz | sym::cttz => {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let y = self.const_bool(false);
|
2021-08-03 22:09:57 +00:00
|
|
|
|
self.call_intrinsic(
|
|
|
|
|
&format!("llvm.{}.i{}", name, width),
|
|
|
|
|
&[args[0].immediate(), y],
|
|
|
|
|
)
|
2019-12-22 22:42:04 +00:00
|
|
|
|
}
|
2020-07-08 01:04:10 +00:00
|
|
|
|
sym::ctlz_nonzero | sym::cttz_nonzero => {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let y = self.const_bool(true);
|
2020-07-08 01:04:10 +00:00
|
|
|
|
let llvm_name = &format!("llvm.{}.i{}", &name_str[..4], width);
|
2021-08-03 22:09:57 +00:00
|
|
|
|
self.call_intrinsic(llvm_name, &[args[0].immediate(), y])
|
2019-12-22 22:42:04 +00:00
|
|
|
|
}
|
2021-08-03 22:09:57 +00:00
|
|
|
|
sym::ctpop => self.call_intrinsic(
|
|
|
|
|
&format!("llvm.ctpop.i{}", width),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
&[args[0].immediate()],
|
|
|
|
|
),
|
2020-07-08 01:04:10 +00:00
|
|
|
|
sym::bswap => {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
if width == 8 {
|
|
|
|
|
args[0].immediate() // byte swap a u8/i8 is just a no-op
|
|
|
|
|
} else {
|
2021-08-03 22:09:57 +00:00
|
|
|
|
self.call_intrinsic(
|
|
|
|
|
&format!("llvm.bswap.i{}", width),
|
2018-10-05 13:08:49 +00:00
|
|
|
|
&[args[0].immediate()],
|
|
|
|
|
)
|
2018-09-11 09:46:03 +00:00
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
}
|
2021-08-03 22:09:57 +00:00
|
|
|
|
sym::bitreverse => self.call_intrinsic(
|
|
|
|
|
&format!("llvm.bitreverse.i{}", width),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
&[args[0].immediate()],
|
|
|
|
|
),
|
2020-07-08 01:04:10 +00:00
|
|
|
|
sym::rotate_left | sym::rotate_right => {
|
|
|
|
|
let is_left = name == sym::rotate_left;
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let val = args[0].immediate();
|
|
|
|
|
let raw_shift = args[1].immediate();
|
|
|
|
|
// rotate = funnel shift with first two args the same
|
|
|
|
|
let llvm_name =
|
|
|
|
|
&format!("llvm.fsh{}.i{}", if is_left { 'l' } else { 'r' }, width);
|
2021-08-03 22:09:57 +00:00
|
|
|
|
self.call_intrinsic(llvm_name, &[val, val, raw_shift])
|
2019-12-22 22:42:04 +00:00
|
|
|
|
}
|
2020-07-08 01:04:10 +00:00
|
|
|
|
sym::saturating_add | sym::saturating_sub => {
|
|
|
|
|
let is_add = name == sym::saturating_add;
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let lhs = args[0].immediate();
|
|
|
|
|
let rhs = args[1].immediate();
|
2020-04-14 19:10:58 +00:00
|
|
|
|
let llvm_name = &format!(
|
|
|
|
|
"llvm.{}{}.sat.i{}",
|
|
|
|
|
if signed { 's' } else { 'u' },
|
|
|
|
|
if is_add { "add" } else { "sub" },
|
|
|
|
|
width
|
|
|
|
|
);
|
2021-08-03 22:09:57 +00:00
|
|
|
|
self.call_intrinsic(llvm_name, &[lhs, rhs])
|
2019-12-22 22:42:04 +00:00
|
|
|
|
}
|
|
|
|
|
_ => bug!(),
|
|
|
|
|
},
|
2018-09-11 09:46:03 +00:00
|
|
|
|
None => {
|
|
|
|
|
span_invalid_monomorphization_error(
|
2019-12-22 22:42:04 +00:00
|
|
|
|
tcx.sess,
|
|
|
|
|
span,
|
|
|
|
|
&format!(
|
|
|
|
|
"invalid monomorphization of `{}` intrinsic: \
|
|
|
|
|
expected basic integer type, found `{}`",
|
|
|
|
|
name, ty
|
|
|
|
|
),
|
|
|
|
|
);
|
2018-09-11 09:46:03 +00:00
|
|
|
|
return;
|
2016-03-06 14:30:21 +00:00
|
|
|
|
}
|
2014-07-09 22:31:45 +00:00
|
|
|
|
}
|
2021-05-30 17:25:41 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
sym::raw_eq => {
|
2021-06-03 06:35:30 +00:00
|
|
|
|
use abi::Abi::*;
|
2021-05-30 17:25:41 +00:00
|
|
|
|
let tp_ty = substs.type_at(0);
|
2021-06-03 06:35:30 +00:00
|
|
|
|
let layout = self.layout_of(tp_ty).layout;
|
|
|
|
|
let use_integer_compare = match layout.abi {
|
|
|
|
|
Scalar(_) | ScalarPair(_, _) => true,
|
|
|
|
|
Uninhabited | Vector { .. } => false,
|
|
|
|
|
Aggregate { .. } => {
|
|
|
|
|
// For rusty ABIs, small aggregates are actually passed
|
|
|
|
|
// as `RegKind::Integer` (see `FnAbi::adjust_for_abi`),
|
|
|
|
|
// so we re-use that same threshold here.
|
|
|
|
|
layout.size <= self.data_layout().pointer_size * 2
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2021-05-30 17:25:41 +00:00
|
|
|
|
let a = args[0].immediate();
|
|
|
|
|
let b = args[1].immediate();
|
2021-06-03 06:35:30 +00:00
|
|
|
|
if layout.size.bytes() == 0 {
|
2021-05-30 17:25:41 +00:00
|
|
|
|
self.const_bool(true)
|
2021-06-03 06:35:30 +00:00
|
|
|
|
} else if use_integer_compare {
|
|
|
|
|
let integer_ty = self.type_ix(layout.size.bits());
|
|
|
|
|
let ptr_ty = self.type_ptr_to(integer_ty);
|
|
|
|
|
let a_ptr = self.bitcast(a, ptr_ty);
|
2021-07-04 16:53:04 +00:00
|
|
|
|
let a_val = self.load(integer_ty, a_ptr, layout.align.abi);
|
2021-06-03 06:35:30 +00:00
|
|
|
|
let b_ptr = self.bitcast(b, ptr_ty);
|
2021-07-04 16:53:04 +00:00
|
|
|
|
let b_val = self.load(integer_ty, b_ptr, layout.align.abi);
|
2021-06-03 06:35:30 +00:00
|
|
|
|
self.icmp(IntPredicate::IntEQ, a_val, b_val)
|
|
|
|
|
} else {
|
2021-05-30 17:25:41 +00:00
|
|
|
|
let i8p_ty = self.type_i8p();
|
|
|
|
|
let a_ptr = self.bitcast(a, i8p_ty);
|
|
|
|
|
let b_ptr = self.bitcast(b, i8p_ty);
|
2021-06-03 06:35:30 +00:00
|
|
|
|
let n = self.const_usize(layout.size.bytes());
|
2021-08-03 22:09:57 +00:00
|
|
|
|
let cmp = self.call_intrinsic("memcmp", &[a_ptr, b_ptr, n]);
|
2021-05-30 17:25:41 +00:00
|
|
|
|
self.icmp(IntPredicate::IntEQ, cmp, self.const_i32(0))
|
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
}
|
2014-07-09 22:31:45 +00:00
|
|
|
|
|
2021-08-10 10:50:33 +00:00
|
|
|
|
sym::black_box => {
|
|
|
|
|
args[0].val.store(self, result);
|
|
|
|
|
|
|
|
|
|
// We need to "use" the argument in some way LLVM can't introspect, and on
|
|
|
|
|
// targets that support it we can typically leverage inline assembly to do
|
|
|
|
|
// this. LLVM's interpretation of inline assembly is that it's, well, a black
|
|
|
|
|
// box. This isn't the greatest implementation since it probably deoptimizes
|
|
|
|
|
// more than we want, but it's so far good enough.
|
|
|
|
|
crate::asm::inline_asm_call(
|
|
|
|
|
self,
|
|
|
|
|
"",
|
|
|
|
|
"r,~{memory}",
|
|
|
|
|
&[result.llval],
|
|
|
|
|
self.type_void(),
|
|
|
|
|
true,
|
|
|
|
|
false,
|
|
|
|
|
ast::LlvmAsmDialect::Att,
|
|
|
|
|
&[span],
|
|
|
|
|
)
|
|
|
|
|
.unwrap_or_else(|| bug!("failed to generate inline asm call for `black_box`"));
|
|
|
|
|
|
|
|
|
|
// We have copied the value to `result` already.
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2020-07-08 01:04:10 +00:00
|
|
|
|
_ if name_str.starts_with("simd_") => {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
match generic_simd_intrinsic(self, name, callee_ty, args, ret_ty, llret_ty, span) {
|
2018-09-11 09:46:03 +00:00
|
|
|
|
Ok(llval) => llval,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Err(()) => return,
|
2014-07-09 22:31:45 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2019-08-20 15:51:54 +00:00
|
|
|
|
|
2019-01-07 16:18:32 +00:00
|
|
|
|
_ => bug!("unknown intrinsic '{}'", name),
|
2018-09-11 09:46:03 +00:00
|
|
|
|
};
|
2014-07-09 22:31:45 +00:00
|
|
|
|
|
2019-10-29 14:35:26 +00:00
|
|
|
|
if !fn_abi.ret.is_ignore() {
|
|
|
|
|
if let PassMode::Cast(ty) = fn_abi.ret.mode {
|
2018-11-27 18:00:25 +00:00
|
|
|
|
let ptr_llty = self.type_ptr_to(ty.llvm_type(self));
|
2018-10-05 13:08:49 +00:00
|
|
|
|
let ptr = self.pointercast(result.llval, ptr_llty);
|
2018-09-11 09:46:03 +00:00
|
|
|
|
self.store(llval, ptr, result.align);
|
|
|
|
|
} else {
|
2018-09-20 13:47:22 +00:00
|
|
|
|
OperandRef::from_immediate_or_packed_pair(self, llval, result.layout)
|
2019-12-22 22:42:04 +00:00
|
|
|
|
.val
|
|
|
|
|
.store(self, result);
|
2018-09-11 09:46:03 +00:00
|
|
|
|
}
|
2016-03-06 11:23:20 +00:00
|
|
|
|
}
|
2014-07-09 22:31:45 +00:00
|
|
|
|
}
|
2018-11-24 15:01:47 +00:00
|
|
|
|
|
|
|
|
|
fn abort(&mut self) {
|
2021-08-03 22:09:57 +00:00
|
|
|
|
self.call_intrinsic("llvm.trap", &[]);
|
2018-11-24 15:01:47 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn assume(&mut self, val: Self::Value) {
|
2021-08-03 22:09:57 +00:00
|
|
|
|
self.call_intrinsic("llvm.assume", &[val]);
|
2018-11-24 15:01:47 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn expect(&mut self, cond: Self::Value, expected: bool) -> Self::Value {
|
2021-08-03 22:09:57 +00:00
|
|
|
|
self.call_intrinsic("llvm.expect.i1", &[cond, self.const_bool(expected)])
|
2018-11-24 15:01:47 +00:00
|
|
|
|
}
|
2018-11-30 15:53:44 +00:00
|
|
|
|
|
2021-03-07 23:59:10 +00:00
|
|
|
|
fn sideeffect(&mut self) {
|
|
|
|
|
// This kind of check would make a ton of sense in the caller, but currently the only
|
|
|
|
|
// caller of this function is in `rustc_codegen_ssa`, which is agnostic to whether LLVM
|
|
|
|
|
// codegen backend being used, and so is unable to check the LLVM version.
|
|
|
|
|
if unsafe { llvm::LLVMRustVersionMajor() } < 12 {
|
2021-08-03 22:09:57 +00:00
|
|
|
|
self.call_intrinsic("llvm.sideeffect", &[]);
|
2019-09-27 23:13:53 +00:00
|
|
|
|
}
|
2019-06-06 00:39:20 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-03-25 21:28:03 +00:00
|
|
|
|
fn va_start(&mut self, va_list: &'ll Value) -> &'ll Value {
|
2021-08-03 22:09:57 +00:00
|
|
|
|
self.call_intrinsic("llvm.va_start", &[va_list])
|
2018-11-30 15:53:44 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-03-25 21:28:03 +00:00
|
|
|
|
fn va_end(&mut self, va_list: &'ll Value) -> &'ll Value {
|
2021-08-03 22:09:57 +00:00
|
|
|
|
self.call_intrinsic("llvm.va_end", &[va_list])
|
2018-11-30 15:53:44 +00:00
|
|
|
|
}
|
2014-07-09 22:31:45 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-07-02 14:52:53 +00:00
|
|
|
|
fn try_intrinsic(
|
2018-10-05 13:08:49 +00:00
|
|
|
|
bx: &mut Builder<'a, 'll, 'tcx>,
|
2020-03-02 13:59:20 +00:00
|
|
|
|
try_func: &'ll Value,
|
2018-07-10 10:28:39 +00:00
|
|
|
|
data: &'ll Value,
|
2020-03-02 13:59:20 +00:00
|
|
|
|
catch_func: &'ll Value,
|
2018-07-10 10:28:39 +00:00
|
|
|
|
dest: &'ll Value,
|
2016-12-11 03:32:44 +00:00
|
|
|
|
) {
|
2020-03-31 22:15:39 +00:00
|
|
|
|
if bx.sess().panic_strategy() == PanicStrategy::Abort {
|
2021-08-03 22:09:57 +00:00
|
|
|
|
let try_func_ty = bx.type_func(&[bx.type_i8p()], bx.type_void());
|
|
|
|
|
bx.call(try_func_ty, try_func, &[data], None);
|
2019-12-26 15:03:41 +00:00
|
|
|
|
// Return 0 unconditionally from the intrinsic call;
|
|
|
|
|
// we can never unwind.
|
|
|
|
|
let ret_align = bx.tcx().data_layout.i32_align.abi;
|
|
|
|
|
bx.store(bx.const_i32(0), dest, ret_align);
|
2018-11-27 18:00:25 +00:00
|
|
|
|
} else if wants_msvc_seh(bx.sess()) {
|
2020-03-02 13:59:20 +00:00
|
|
|
|
codegen_msvc_try(bx, try_func, data, catch_func, dest);
|
2020-11-08 11:27:51 +00:00
|
|
|
|
} else if bx.sess().target.is_like_emscripten {
|
2020-03-21 07:50:38 +00:00
|
|
|
|
codegen_emcc_try(bx, try_func, data, catch_func, dest);
|
2015-07-20 20:27:38 +00:00
|
|
|
|
} else {
|
2020-03-02 13:59:20 +00:00
|
|
|
|
codegen_gnu_try(bx, try_func, data, catch_func, dest);
|
2015-07-20 20:27:38 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2015-10-24 01:18:44 +00:00
|
|
|
|
// MSVC's definition of the `rust_try` function.
|
2015-07-20 20:27:38 +00:00
|
|
|
|
//
|
2015-10-24 01:18:44 +00:00
|
|
|
|
// This implementation uses the new exception handling instructions in LLVM
|
|
|
|
|
// which have support in LLVM for SEH on MSVC targets. Although these
|
|
|
|
|
// instructions are meant to work for all targets, as of the time of this
|
|
|
|
|
// writing, however, LLVM does not recommend the usage of these new instructions
|
|
|
|
|
// as the old ones are still more optimized.
|
2018-07-02 14:52:53 +00:00
|
|
|
|
fn codegen_msvc_try(
|
2018-10-05 13:08:49 +00:00
|
|
|
|
bx: &mut Builder<'a, 'll, 'tcx>,
|
2020-03-02 13:59:20 +00:00
|
|
|
|
try_func: &'ll Value,
|
2018-07-10 10:28:39 +00:00
|
|
|
|
data: &'ll Value,
|
2020-03-02 13:59:20 +00:00
|
|
|
|
catch_func: &'ll Value,
|
2018-07-10 10:28:39 +00:00
|
|
|
|
dest: &'ll Value,
|
2018-07-02 14:52:53 +00:00
|
|
|
|
) {
|
2021-08-03 22:09:57 +00:00
|
|
|
|
let (llty, llfn) = get_rust_try_fn(bx, &mut |mut bx| {
|
2018-11-27 18:00:25 +00:00
|
|
|
|
bx.set_personality_fn(bx.eh_personality());
|
2015-07-20 20:27:38 +00:00
|
|
|
|
|
2018-10-04 13:23:10 +00:00
|
|
|
|
let mut normal = bx.build_sibling_block("normal");
|
2018-10-05 13:08:49 +00:00
|
|
|
|
let mut catchswitch = bx.build_sibling_block("catchswitch");
|
2020-03-21 07:50:38 +00:00
|
|
|
|
let mut catchpad_rust = bx.build_sibling_block("catchpad_rust");
|
|
|
|
|
let mut catchpad_foreign = bx.build_sibling_block("catchpad_foreign");
|
2018-10-04 13:23:10 +00:00
|
|
|
|
let mut caught = bx.build_sibling_block("caught");
|
2015-07-20 20:27:38 +00:00
|
|
|
|
|
2020-03-02 13:59:20 +00:00
|
|
|
|
let try_func = llvm::get_param(bx.llfn(), 0);
|
2018-01-05 05:12:32 +00:00
|
|
|
|
let data = llvm::get_param(bx.llfn(), 1);
|
2020-03-02 13:59:20 +00:00
|
|
|
|
let catch_func = llvm::get_param(bx.llfn(), 2);
|
2015-07-20 20:27:38 +00:00
|
|
|
|
|
2015-10-24 01:18:44 +00:00
|
|
|
|
// We're generating an IR snippet that looks like:
|
|
|
|
|
//
|
2020-03-02 13:59:20 +00:00
|
|
|
|
// declare i32 @rust_try(%try_func, %data, %catch_func) {
|
2020-03-21 07:50:38 +00:00
|
|
|
|
// %slot = alloca i8*
|
2020-03-02 13:59:20 +00:00
|
|
|
|
// invoke %try_func(%data) to label %normal unwind label %catchswitch
|
2015-10-24 01:18:44 +00:00
|
|
|
|
//
|
|
|
|
|
// normal:
|
|
|
|
|
// ret i32 0
|
2015-07-20 20:27:38 +00:00
|
|
|
|
//
|
2015-10-24 01:18:44 +00:00
|
|
|
|
// catchswitch:
|
2020-03-21 07:50:38 +00:00
|
|
|
|
// %cs = catchswitch within none [%catchpad_rust, %catchpad_foreign] unwind to caller
|
2015-07-20 20:27:38 +00:00
|
|
|
|
//
|
2020-03-21 07:50:38 +00:00
|
|
|
|
// catchpad_rust:
|
|
|
|
|
// %tok = catchpad within %cs [%type_descriptor, 8, %slot]
|
2020-03-02 13:59:20 +00:00
|
|
|
|
// %ptr = load %slot
|
|
|
|
|
// call %catch_func(%data, %ptr)
|
2015-10-24 01:18:44 +00:00
|
|
|
|
// catchret from %tok to label %caught
|
2015-07-20 20:27:38 +00:00
|
|
|
|
//
|
2020-03-21 07:50:38 +00:00
|
|
|
|
// catchpad_foreign:
|
|
|
|
|
// %tok = catchpad within %cs [null, 64, null]
|
|
|
|
|
// call %catch_func(%data, null)
|
|
|
|
|
// catchret from %tok to label %caught
|
|
|
|
|
//
|
2015-10-24 01:18:44 +00:00
|
|
|
|
// caught:
|
|
|
|
|
// ret i32 1
|
|
|
|
|
// }
|
2015-07-20 20:27:38 +00:00
|
|
|
|
//
|
rustc: Use C++ personalities on MSVC
Currently the compiler has two relatively critical bugs in the implementation of
MSVC unwinding:
* #33112 - faults like segfaults and illegal instructions will run destructors
in Rust, meaning we keep running code after a super-fatal exception
has happened.
* #33116 - When compiling with LTO plus `-Z no-landing-pads` (or `-C
panic=abort` with the previous commit) LLVM won't remove all `invoke`
instructions, meaning that some landing pads stick around and
cleanups may be run due to the previous bug.
These both stem from the flavor of "personality function" that Rust uses for
unwinding on MSVC. On 32-bit this is `_except_handler3` and on 64-bit this is
`__C_specific_handler`, but they both essentially are the "most generic"
personality functions for catching exceptions and running cleanups. That is,
thse two personalities will run cleanups for all exceptions unconditionally, so
when we use them we run cleanups for **all SEH exceptions** (include things like
segfaults).
Note that this also explains why LLVM won't optimize away `invoke` instructions.
These functions can legitimately still unwind (the `nounwind` attribute only
seems to apply to "C++ exception-like unwining"). Also note that the standard
library only *catches* Rust exceptions, not others like segfaults and illegal
instructions.
LLVM has support for another personality, `__CxxFrameHandler3`, which does not
run cleanups for general exceptions, only C++ exceptions thrown by
`_CxxThrowException`. This essentially ideally matches our use case, so this
commit moves us over to using this well-known personality function as well as
exception-throwing function.
This doesn't *seem* to pull in any extra runtime dependencies just yet, but if
it does we can perhaps try to work out how to implement more of it in Rust
rather than relying on MSVCRT runtime bits.
More details about how this is actually implemented can be found in the changes
itself, but this...
Closes #33112
Closes #33116
2016-04-26 21:30:01 +00:00
|
|
|
|
// This structure follows the basic usage of throw/try/catch in LLVM.
|
|
|
|
|
// For example, compile this C++ snippet to see what LLVM generates:
|
|
|
|
|
//
|
2019-10-27 22:33:25 +00:00
|
|
|
|
// struct rust_panic {
|
2019-12-29 20:16:20 +00:00
|
|
|
|
// rust_panic(const rust_panic&);
|
|
|
|
|
// ~rust_panic();
|
|
|
|
|
//
|
2020-03-21 07:50:38 +00:00
|
|
|
|
// void* x[2];
|
2020-03-02 13:59:20 +00:00
|
|
|
|
// };
|
2019-10-27 22:33:25 +00:00
|
|
|
|
//
|
2020-03-02 13:59:20 +00:00
|
|
|
|
// int __rust_try(
|
|
|
|
|
// void (*try_func)(void*),
|
|
|
|
|
// void *data,
|
|
|
|
|
// void (*catch_func)(void*, void*) noexcept
|
|
|
|
|
// ) {
|
rustc: Use C++ personalities on MSVC
Currently the compiler has two relatively critical bugs in the implementation of
MSVC unwinding:
* #33112 - faults like segfaults and illegal instructions will run destructors
in Rust, meaning we keep running code after a super-fatal exception
has happened.
* #33116 - When compiling with LTO plus `-Z no-landing-pads` (or `-C
panic=abort` with the previous commit) LLVM won't remove all `invoke`
instructions, meaning that some landing pads stick around and
cleanups may be run due to the previous bug.
These both stem from the flavor of "personality function" that Rust uses for
unwinding on MSVC. On 32-bit this is `_except_handler3` and on 64-bit this is
`__C_specific_handler`, but they both essentially are the "most generic"
personality functions for catching exceptions and running cleanups. That is,
thse two personalities will run cleanups for all exceptions unconditionally, so
when we use them we run cleanups for **all SEH exceptions** (include things like
segfaults).
Note that this also explains why LLVM won't optimize away `invoke` instructions.
These functions can legitimately still unwind (the `nounwind` attribute only
seems to apply to "C++ exception-like unwining"). Also note that the standard
library only *catches* Rust exceptions, not others like segfaults and illegal
instructions.
LLVM has support for another personality, `__CxxFrameHandler3`, which does not
run cleanups for general exceptions, only C++ exceptions thrown by
`_CxxThrowException`. This essentially ideally matches our use case, so this
commit moves us over to using this well-known personality function as well as
exception-throwing function.
This doesn't *seem* to pull in any extra runtime dependencies just yet, but if
it does we can perhaps try to work out how to implement more of it in Rust
rather than relying on MSVCRT runtime bits.
More details about how this is actually implemented can be found in the changes
itself, but this...
Closes #33112
Closes #33116
2016-04-26 21:30:01 +00:00
|
|
|
|
// try {
|
2020-03-02 13:59:20 +00:00
|
|
|
|
// try_func(data);
|
rustc: Use C++ personalities on MSVC
Currently the compiler has two relatively critical bugs in the implementation of
MSVC unwinding:
* #33112 - faults like segfaults and illegal instructions will run destructors
in Rust, meaning we keep running code after a super-fatal exception
has happened.
* #33116 - When compiling with LTO plus `-Z no-landing-pads` (or `-C
panic=abort` with the previous commit) LLVM won't remove all `invoke`
instructions, meaning that some landing pads stick around and
cleanups may be run due to the previous bug.
These both stem from the flavor of "personality function" that Rust uses for
unwinding on MSVC. On 32-bit this is `_except_handler3` and on 64-bit this is
`__C_specific_handler`, but they both essentially are the "most generic"
personality functions for catching exceptions and running cleanups. That is,
thse two personalities will run cleanups for all exceptions unconditionally, so
when we use them we run cleanups for **all SEH exceptions** (include things like
segfaults).
Note that this also explains why LLVM won't optimize away `invoke` instructions.
These functions can legitimately still unwind (the `nounwind` attribute only
seems to apply to "C++ exception-like unwining"). Also note that the standard
library only *catches* Rust exceptions, not others like segfaults and illegal
instructions.
LLVM has support for another personality, `__CxxFrameHandler3`, which does not
run cleanups for general exceptions, only C++ exceptions thrown by
`_CxxThrowException`. This essentially ideally matches our use case, so this
commit moves us over to using this well-known personality function as well as
exception-throwing function.
This doesn't *seem* to pull in any extra runtime dependencies just yet, but if
it does we can perhaps try to work out how to implement more of it in Rust
rather than relying on MSVCRT runtime bits.
More details about how this is actually implemented can be found in the changes
itself, but this...
Closes #33112
Closes #33116
2016-04-26 21:30:01 +00:00
|
|
|
|
// return 0;
|
2019-12-29 20:16:20 +00:00
|
|
|
|
// } catch(rust_panic& a) {
|
2020-03-02 13:59:20 +00:00
|
|
|
|
// catch_func(data, &a);
|
rustc: Use C++ personalities on MSVC
Currently the compiler has two relatively critical bugs in the implementation of
MSVC unwinding:
* #33112 - faults like segfaults and illegal instructions will run destructors
in Rust, meaning we keep running code after a super-fatal exception
has happened.
* #33116 - When compiling with LTO plus `-Z no-landing-pads` (or `-C
panic=abort` with the previous commit) LLVM won't remove all `invoke`
instructions, meaning that some landing pads stick around and
cleanups may be run due to the previous bug.
These both stem from the flavor of "personality function" that Rust uses for
unwinding on MSVC. On 32-bit this is `_except_handler3` and on 64-bit this is
`__C_specific_handler`, but they both essentially are the "most generic"
personality functions for catching exceptions and running cleanups. That is,
thse two personalities will run cleanups for all exceptions unconditionally, so
when we use them we run cleanups for **all SEH exceptions** (include things like
segfaults).
Note that this also explains why LLVM won't optimize away `invoke` instructions.
These functions can legitimately still unwind (the `nounwind` attribute only
seems to apply to "C++ exception-like unwining"). Also note that the standard
library only *catches* Rust exceptions, not others like segfaults and illegal
instructions.
LLVM has support for another personality, `__CxxFrameHandler3`, which does not
run cleanups for general exceptions, only C++ exceptions thrown by
`_CxxThrowException`. This essentially ideally matches our use case, so this
commit moves us over to using this well-known personality function as well as
exception-throwing function.
This doesn't *seem* to pull in any extra runtime dependencies just yet, but if
it does we can perhaps try to work out how to implement more of it in Rust
rather than relying on MSVCRT runtime bits.
More details about how this is actually implemented can be found in the changes
itself, but this...
Closes #33112
Closes #33116
2016-04-26 21:30:01 +00:00
|
|
|
|
// return 1;
|
2020-03-21 07:50:38 +00:00
|
|
|
|
// } catch(...) {
|
|
|
|
|
// catch_func(data, NULL);
|
|
|
|
|
// return 1;
|
rustc: Use C++ personalities on MSVC
Currently the compiler has two relatively critical bugs in the implementation of
MSVC unwinding:
* #33112 - faults like segfaults and illegal instructions will run destructors
in Rust, meaning we keep running code after a super-fatal exception
has happened.
* #33116 - When compiling with LTO plus `-Z no-landing-pads` (or `-C
panic=abort` with the previous commit) LLVM won't remove all `invoke`
instructions, meaning that some landing pads stick around and
cleanups may be run due to the previous bug.
These both stem from the flavor of "personality function" that Rust uses for
unwinding on MSVC. On 32-bit this is `_except_handler3` and on 64-bit this is
`__C_specific_handler`, but they both essentially are the "most generic"
personality functions for catching exceptions and running cleanups. That is,
thse two personalities will run cleanups for all exceptions unconditionally, so
when we use them we run cleanups for **all SEH exceptions** (include things like
segfaults).
Note that this also explains why LLVM won't optimize away `invoke` instructions.
These functions can legitimately still unwind (the `nounwind` attribute only
seems to apply to "C++ exception-like unwining"). Also note that the standard
library only *catches* Rust exceptions, not others like segfaults and illegal
instructions.
LLVM has support for another personality, `__CxxFrameHandler3`, which does not
run cleanups for general exceptions, only C++ exceptions thrown by
`_CxxThrowException`. This essentially ideally matches our use case, so this
commit moves us over to using this well-known personality function as well as
exception-throwing function.
This doesn't *seem* to pull in any extra runtime dependencies just yet, but if
it does we can perhaps try to work out how to implement more of it in Rust
rather than relying on MSVCRT runtime bits.
More details about how this is actually implemented can be found in the changes
itself, but this...
Closes #33112
Closes #33116
2016-04-26 21:30:01 +00:00
|
|
|
|
// }
|
|
|
|
|
// }
|
2015-10-24 01:18:44 +00:00
|
|
|
|
//
|
|
|
|
|
// More information can be found in libstd's seh.rs implementation.
|
2019-12-29 20:16:20 +00:00
|
|
|
|
let ptr_align = bx.tcx().data_layout.pointer_align.abi;
|
2020-03-02 13:59:20 +00:00
|
|
|
|
let slot = bx.alloca(bx.type_i8p(), ptr_align);
|
2021-08-03 22:09:57 +00:00
|
|
|
|
let try_func_ty = bx.type_func(&[bx.type_i8p()], bx.type_void());
|
|
|
|
|
bx.invoke(try_func_ty, try_func, &[data], normal.llbb(), catchswitch.llbb(), None);
|
2015-10-24 01:18:44 +00:00
|
|
|
|
|
2018-11-27 18:00:25 +00:00
|
|
|
|
normal.ret(bx.const_i32(0));
|
2015-10-24 01:18:44 +00:00
|
|
|
|
|
2020-03-21 07:50:38 +00:00
|
|
|
|
let cs = catchswitch.catch_switch(None, None, 2);
|
|
|
|
|
catchswitch.add_handler(cs, catchpad_rust.llbb());
|
|
|
|
|
catchswitch.add_handler(cs, catchpad_foreign.llbb());
|
2015-10-24 01:18:44 +00:00
|
|
|
|
|
2020-01-13 00:55:36 +00:00
|
|
|
|
// We can't use the TypeDescriptor defined in libpanic_unwind because it
|
|
|
|
|
// might be in another DLL and the SEH encoding only supports specifying
|
|
|
|
|
// a TypeDescriptor from the current module.
|
|
|
|
|
//
|
|
|
|
|
// However this isn't an issue since the MSVC runtime uses string
|
|
|
|
|
// comparison on the type name to match TypeDescriptors rather than
|
|
|
|
|
// pointer equality.
|
|
|
|
|
//
|
|
|
|
|
// So instead we generate a new TypeDescriptor in each module that uses
|
|
|
|
|
// `try` and let the linker merge duplicate definitions in the same
|
|
|
|
|
// module.
|
|
|
|
|
//
|
|
|
|
|
// When modifying, make sure that the type_name string exactly matches
|
|
|
|
|
// the one used in src/libpanic_unwind/seh.rs.
|
|
|
|
|
let type_info_vtable = bx.declare_global("??_7type_info@@6B@", bx.type_i8p());
|
|
|
|
|
let type_name = bx.const_bytes(b"rust_panic\0");
|
|
|
|
|
let type_info =
|
|
|
|
|
bx.const_struct(&[type_info_vtable, bx.const_null(bx.type_i8p()), type_name], false);
|
|
|
|
|
let tydesc = bx.declare_global("__rust_panic_type_info", bx.val_ty(type_info));
|
|
|
|
|
unsafe {
|
|
|
|
|
llvm::LLVMRustSetLinkage(tydesc, llvm::Linkage::LinkOnceODRLinkage);
|
|
|
|
|
llvm::SetUniqueComdat(bx.llmod, tydesc);
|
|
|
|
|
llvm::LLVMSetInitializer(tydesc, type_info);
|
|
|
|
|
}
|
|
|
|
|
|
2020-01-01 21:24:39 +00:00
|
|
|
|
// The flag value of 8 indicates that we are catching the exception by
|
2020-01-07 10:36:57 +00:00
|
|
|
|
// reference instead of by value. We can't use catch by value because
|
|
|
|
|
// that requires copying the exception object, which we don't support
|
|
|
|
|
// since our exception object effectively contains a Box.
|
2020-01-01 21:24:39 +00:00
|
|
|
|
//
|
|
|
|
|
// Source: MicrosoftCXXABI::getAddrOfCXXCatchHandlerType in clang
|
|
|
|
|
let flags = bx.const_i32(8);
|
2020-03-21 07:50:38 +00:00
|
|
|
|
let funclet = catchpad_rust.catch_pad(cs, &[tydesc, flags, slot]);
|
2021-07-04 16:53:04 +00:00
|
|
|
|
let ptr = catchpad_rust.load(bx.type_i8p(), slot, ptr_align);
|
2021-08-03 22:09:57 +00:00
|
|
|
|
let catch_ty = bx.type_func(&[bx.type_i8p(), bx.type_i8p()], bx.type_void());
|
|
|
|
|
catchpad_rust.call(catch_ty, catch_func, &[data, ptr], Some(&funclet));
|
2020-03-21 07:50:38 +00:00
|
|
|
|
catchpad_rust.catch_ret(&funclet, caught.llbb());
|
2019-12-29 20:16:20 +00:00
|
|
|
|
|
2020-03-21 07:50:38 +00:00
|
|
|
|
// The flag value of 64 indicates a "catch-all".
|
|
|
|
|
let flags = bx.const_i32(64);
|
|
|
|
|
let null = bx.const_null(bx.type_i8p());
|
|
|
|
|
let funclet = catchpad_foreign.catch_pad(cs, &[null, flags, null]);
|
2021-08-03 22:09:57 +00:00
|
|
|
|
catchpad_foreign.call(catch_ty, catch_func, &[data, null], Some(&funclet));
|
2020-03-21 07:50:38 +00:00
|
|
|
|
catchpad_foreign.catch_ret(&funclet, caught.llbb());
|
2015-10-24 01:18:44 +00:00
|
|
|
|
|
2018-11-27 18:00:25 +00:00
|
|
|
|
caught.ret(bx.const_i32(1));
|
2015-07-20 20:27:38 +00:00
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// Note that no invoke is used here because by definition this function
|
|
|
|
|
// can't panic (that's what it's catching).
|
2021-08-03 22:09:57 +00:00
|
|
|
|
let ret = bx.call(llty, llfn, &[try_func, data, catch_func], None);
|
2018-09-08 22:16:45 +00:00
|
|
|
|
let i32_align = bx.tcx().data_layout.i32_align.abi;
|
2018-01-05 05:12:32 +00:00
|
|
|
|
bx.store(ret, dest, i32_align);
|
2015-07-20 20:27:38 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-28 22:43:53 +00:00
|
|
|
|
// Definition of the standard `try` function for Rust using the GNU-like model
|
|
|
|
|
// of exceptions (e.g., the normal semantics of LLVM's `landingpad` and `invoke`
|
2015-07-20 20:27:38 +00:00
|
|
|
|
// instructions).
|
|
|
|
|
//
|
2018-05-08 13:10:16 +00:00
|
|
|
|
// This codegen is a little surprising because we always call a shim
|
2015-10-24 01:18:44 +00:00
|
|
|
|
// function instead of inlining the call to `invoke` manually here. This is done
|
|
|
|
|
// because in LLVM we're only allowed to have one personality per function
|
|
|
|
|
// definition. The call to the `try` intrinsic is being inlined into the
|
|
|
|
|
// function calling it, and that function may already have other personality
|
|
|
|
|
// functions in play. By calling a shim we're guaranteed that our shim will have
|
|
|
|
|
// the right personality function.
|
2018-07-02 14:52:53 +00:00
|
|
|
|
fn codegen_gnu_try(
|
2018-10-05 13:08:49 +00:00
|
|
|
|
bx: &mut Builder<'a, 'll, 'tcx>,
|
2020-03-02 13:59:20 +00:00
|
|
|
|
try_func: &'ll Value,
|
2018-07-10 10:28:39 +00:00
|
|
|
|
data: &'ll Value,
|
2020-03-02 13:59:20 +00:00
|
|
|
|
catch_func: &'ll Value,
|
2018-07-10 10:28:39 +00:00
|
|
|
|
dest: &'ll Value,
|
2018-07-02 14:52:53 +00:00
|
|
|
|
) {
|
2021-08-03 22:09:57 +00:00
|
|
|
|
let (llty, llfn) = get_rust_try_fn(bx, &mut |mut bx| {
|
2018-05-08 13:10:16 +00:00
|
|
|
|
// Codegens the shims described above:
|
2015-07-14 01:11:44 +00:00
|
|
|
|
//
|
2018-01-05 05:12:32 +00:00
|
|
|
|
// bx:
|
2020-03-07 16:31:30 +00:00
|
|
|
|
// invoke %try_func(%data) normal %normal unwind %catch
|
2015-07-14 01:11:44 +00:00
|
|
|
|
//
|
|
|
|
|
// normal:
|
2015-10-24 01:18:44 +00:00
|
|
|
|
// ret 0
|
2015-07-14 01:11:44 +00:00
|
|
|
|
//
|
|
|
|
|
// catch:
|
2020-03-02 13:59:20 +00:00
|
|
|
|
// (%ptr, _) = landingpad
|
|
|
|
|
// call %catch_func(%data, %ptr)
|
2015-10-24 01:18:44 +00:00
|
|
|
|
// ret 1
|
2018-10-04 13:23:10 +00:00
|
|
|
|
let mut then = bx.build_sibling_block("then");
|
|
|
|
|
let mut catch = bx.build_sibling_block("catch");
|
2015-07-20 20:27:38 +00:00
|
|
|
|
|
2020-03-02 13:59:20 +00:00
|
|
|
|
let try_func = llvm::get_param(bx.llfn(), 0);
|
2018-01-05 05:12:32 +00:00
|
|
|
|
let data = llvm::get_param(bx.llfn(), 1);
|
2020-03-02 13:59:20 +00:00
|
|
|
|
let catch_func = llvm::get_param(bx.llfn(), 2);
|
2021-08-03 22:09:57 +00:00
|
|
|
|
let try_func_ty = bx.type_func(&[bx.type_i8p()], bx.type_void());
|
|
|
|
|
bx.invoke(try_func_ty, try_func, &[data], then.llbb(), catch.llbb(), None);
|
2018-11-27 18:00:25 +00:00
|
|
|
|
then.ret(bx.const_i32(0));
|
2015-07-14 01:11:44 +00:00
|
|
|
|
|
|
|
|
|
// Type indicator for the exception being thrown.
|
2015-10-24 01:18:44 +00:00
|
|
|
|
//
|
|
|
|
|
// The first value in this tuple is a pointer to the exception object
|
|
|
|
|
// being thrown. The second value is a "selector" indicating which of
|
|
|
|
|
// the landing pad clauses the exception's type had been matched to.
|
|
|
|
|
// rust_try ignores the selector.
|
2018-11-27 18:00:25 +00:00
|
|
|
|
let lpad_ty = bx.type_struct(&[bx.type_i8p(), bx.type_i32()], false);
|
|
|
|
|
let vals = catch.landing_pad(lpad_ty, bx.eh_personality(), 1);
|
2020-03-21 07:50:38 +00:00
|
|
|
|
let tydesc = bx.const_null(bx.type_i8p());
|
2019-10-27 22:33:16 +00:00
|
|
|
|
catch.add_clause(vals, tydesc);
|
2016-12-11 15:59:20 +00:00
|
|
|
|
let ptr = catch.extract_value(vals, 0);
|
2021-08-03 22:09:57 +00:00
|
|
|
|
let catch_ty = bx.type_func(&[bx.type_i8p(), bx.type_i8p()], bx.type_void());
|
|
|
|
|
catch.call(catch_ty, catch_func, &[data, ptr], None);
|
2018-11-27 18:00:25 +00:00
|
|
|
|
catch.ret(bx.const_i32(1));
|
2015-07-14 01:11:44 +00:00
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// Note that no invoke is used here because by definition this function
|
|
|
|
|
// can't panic (that's what it's catching).
|
2021-08-03 22:09:57 +00:00
|
|
|
|
let ret = bx.call(llty, llfn, &[try_func, data, catch_func], None);
|
2020-03-21 07:50:38 +00:00
|
|
|
|
let i32_align = bx.tcx().data_layout.i32_align.abi;
|
|
|
|
|
bx.store(ret, dest, i32_align);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Variant of codegen_gnu_try used for emscripten where Rust panics are
|
|
|
|
|
// implemented using C++ exceptions. Here we use exceptions of a specific type
|
|
|
|
|
// (`struct rust_panic`) to represent Rust panics.
|
|
|
|
|
fn codegen_emcc_try(
|
|
|
|
|
bx: &mut Builder<'a, 'll, 'tcx>,
|
|
|
|
|
try_func: &'ll Value,
|
|
|
|
|
data: &'ll Value,
|
|
|
|
|
catch_func: &'ll Value,
|
|
|
|
|
dest: &'ll Value,
|
|
|
|
|
) {
|
2021-08-03 22:09:57 +00:00
|
|
|
|
let (llty, llfn) = get_rust_try_fn(bx, &mut |mut bx| {
|
2020-03-21 07:50:38 +00:00
|
|
|
|
// Codegens the shims described above:
|
|
|
|
|
//
|
|
|
|
|
// bx:
|
|
|
|
|
// invoke %try_func(%data) normal %normal unwind %catch
|
|
|
|
|
//
|
|
|
|
|
// normal:
|
|
|
|
|
// ret 0
|
|
|
|
|
//
|
|
|
|
|
// catch:
|
|
|
|
|
// (%ptr, %selector) = landingpad
|
|
|
|
|
// %rust_typeid = @llvm.eh.typeid.for(@_ZTI10rust_panic)
|
|
|
|
|
// %is_rust_panic = %selector == %rust_typeid
|
|
|
|
|
// %catch_data = alloca { i8*, i8 }
|
|
|
|
|
// %catch_data[0] = %ptr
|
|
|
|
|
// %catch_data[1] = %is_rust_panic
|
|
|
|
|
// call %catch_func(%data, %catch_data)
|
|
|
|
|
// ret 1
|
|
|
|
|
let mut then = bx.build_sibling_block("then");
|
|
|
|
|
let mut catch = bx.build_sibling_block("catch");
|
|
|
|
|
|
|
|
|
|
let try_func = llvm::get_param(bx.llfn(), 0);
|
|
|
|
|
let data = llvm::get_param(bx.llfn(), 1);
|
|
|
|
|
let catch_func = llvm::get_param(bx.llfn(), 2);
|
2021-08-03 22:09:57 +00:00
|
|
|
|
let try_func_ty = bx.type_func(&[bx.type_i8p()], bx.type_void());
|
|
|
|
|
bx.invoke(try_func_ty, try_func, &[data], then.llbb(), catch.llbb(), None);
|
2020-03-21 07:50:38 +00:00
|
|
|
|
then.ret(bx.const_i32(0));
|
|
|
|
|
|
|
|
|
|
// Type indicator for the exception being thrown.
|
|
|
|
|
//
|
|
|
|
|
// The first value in this tuple is a pointer to the exception object
|
|
|
|
|
// being thrown. The second value is a "selector" indicating which of
|
|
|
|
|
// the landing pad clauses the exception's type had been matched to.
|
|
|
|
|
let tydesc = bx.eh_catch_typeinfo();
|
|
|
|
|
let lpad_ty = bx.type_struct(&[bx.type_i8p(), bx.type_i32()], false);
|
|
|
|
|
let vals = catch.landing_pad(lpad_ty, bx.eh_personality(), 2);
|
|
|
|
|
catch.add_clause(vals, tydesc);
|
|
|
|
|
catch.add_clause(vals, bx.const_null(bx.type_i8p()));
|
|
|
|
|
let ptr = catch.extract_value(vals, 0);
|
|
|
|
|
let selector = catch.extract_value(vals, 1);
|
|
|
|
|
|
|
|
|
|
// Check if the typeid we got is the one for a Rust panic.
|
2021-08-03 22:09:57 +00:00
|
|
|
|
let rust_typeid = catch.call_intrinsic("llvm.eh.typeid.for", &[tydesc]);
|
2020-03-21 07:50:38 +00:00
|
|
|
|
let is_rust_panic = catch.icmp(IntPredicate::IntEQ, selector, rust_typeid);
|
|
|
|
|
let is_rust_panic = catch.zext(is_rust_panic, bx.type_bool());
|
|
|
|
|
|
|
|
|
|
// We need to pass two values to catch_func (ptr and is_rust_panic), so
|
|
|
|
|
// create an alloca and pass a pointer to that.
|
|
|
|
|
let ptr_align = bx.tcx().data_layout.pointer_align.abi;
|
|
|
|
|
let i8_align = bx.tcx().data_layout.i8_align.abi;
|
2021-08-01 00:00:00 +00:00
|
|
|
|
let catch_data_type = bx.type_struct(&[bx.type_i8p(), bx.type_bool()], false);
|
|
|
|
|
let catch_data = catch.alloca(catch_data_type, ptr_align);
|
|
|
|
|
let catch_data_0 = catch.inbounds_gep(
|
|
|
|
|
catch_data_type,
|
|
|
|
|
catch_data,
|
|
|
|
|
&[bx.const_usize(0), bx.const_usize(0)],
|
|
|
|
|
);
|
2020-03-21 07:50:38 +00:00
|
|
|
|
catch.store(ptr, catch_data_0, ptr_align);
|
2021-08-01 00:00:00 +00:00
|
|
|
|
let catch_data_1 = catch.inbounds_gep(
|
|
|
|
|
catch_data_type,
|
|
|
|
|
catch_data,
|
|
|
|
|
&[bx.const_usize(0), bx.const_usize(1)],
|
|
|
|
|
);
|
2020-03-21 07:50:38 +00:00
|
|
|
|
catch.store(is_rust_panic, catch_data_1, i8_align);
|
|
|
|
|
let catch_data = catch.bitcast(catch_data, bx.type_i8p());
|
|
|
|
|
|
2021-08-03 22:09:57 +00:00
|
|
|
|
let catch_ty = bx.type_func(&[bx.type_i8p(), bx.type_i8p()], bx.type_void());
|
|
|
|
|
catch.call(catch_ty, catch_func, &[data, catch_data], None);
|
2020-03-21 07:50:38 +00:00
|
|
|
|
catch.ret(bx.const_i32(1));
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// Note that no invoke is used here because by definition this function
|
|
|
|
|
// can't panic (that's what it's catching).
|
2021-08-03 22:09:57 +00:00
|
|
|
|
let ret = bx.call(llty, llfn, &[try_func, data, catch_func], None);
|
2018-09-08 22:16:45 +00:00
|
|
|
|
let i32_align = bx.tcx().data_layout.i32_align.abi;
|
2018-01-05 05:12:32 +00:00
|
|
|
|
bx.store(ret, dest, i32_align);
|
2015-07-20 20:27:38 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-05-08 13:10:16 +00:00
|
|
|
|
// Helper function to give a Block to a closure to codegen a shim function.
|
2015-10-24 01:18:44 +00:00
|
|
|
|
// This is currently primarily used for the `try` intrinsic functions above.
|
2018-07-02 14:52:53 +00:00
|
|
|
|
fn gen_fn<'ll, 'tcx>(
|
|
|
|
|
cx: &CodegenCx<'ll, 'tcx>,
|
|
|
|
|
name: &str,
|
2020-10-08 00:02:06 +00:00
|
|
|
|
rust_fn_sig: ty::PolyFnSig<'tcx>,
|
2018-07-02 14:52:53 +00:00
|
|
|
|
codegen: &mut dyn FnMut(Builder<'_, 'll, 'tcx>),
|
2021-08-03 22:09:57 +00:00
|
|
|
|
) -> (&'ll Type, &'ll Value) {
|
2019-10-29 19:57:45 +00:00
|
|
|
|
let fn_abi = FnAbi::of_fn_ptr(cx, rust_fn_sig, &[]);
|
2021-08-04 18:20:31 +00:00
|
|
|
|
let llty = fn_abi.llvm_type(cx);
|
2019-10-29 16:34:50 +00:00
|
|
|
|
let llfn = cx.declare_fn(name, &fn_abi);
|
2021-06-26 20:53:35 +00:00
|
|
|
|
cx.set_frame_pointer_type(llfn);
|
2020-01-14 17:42:47 +00:00
|
|
|
|
cx.apply_target_cpu_attr(llfn);
|
2019-10-29 16:39:54 +00:00
|
|
|
|
// FIXME(eddyb) find a nicer way to do this.
|
|
|
|
|
unsafe { llvm::LLVMRustSetLinkage(llfn, llvm::Linkage::InternalLinkage) };
|
2021-05-06 15:57:04 +00:00
|
|
|
|
let llbb = Builder::append_block(cx, llfn, "entry-block");
|
|
|
|
|
let bx = Builder::build(cx, llbb);
|
2018-05-08 13:10:16 +00:00
|
|
|
|
codegen(bx);
|
2021-08-03 22:09:57 +00:00
|
|
|
|
(llty, llfn)
|
2015-10-24 01:18:44 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Helper function used to get a handle to the `__rust_try` function used to
|
|
|
|
|
// catch exceptions.
|
|
|
|
|
//
|
|
|
|
|
// This function is only generated once and is then cached.
|
2018-07-02 14:52:53 +00:00
|
|
|
|
fn get_rust_try_fn<'ll, 'tcx>(
|
|
|
|
|
cx: &CodegenCx<'ll, 'tcx>,
|
|
|
|
|
codegen: &mut dyn FnMut(Builder<'_, 'll, 'tcx>),
|
2021-08-03 22:09:57 +00:00
|
|
|
|
) -> (&'ll Type, &'ll Value) {
|
2018-01-05 05:04:08 +00:00
|
|
|
|
if let Some(llfn) = cx.rust_try_fn.get() {
|
2016-02-23 19:59:23 +00:00
|
|
|
|
return llfn;
|
2015-07-20 20:27:38 +00:00
|
|
|
|
}
|
|
|
|
|
|
2015-07-14 01:11:44 +00:00
|
|
|
|
// Define the type up front for the signature of the rust_try function.
|
2018-01-05 05:04:08 +00:00
|
|
|
|
let tcx = cx.tcx;
|
2015-07-20 20:27:38 +00:00
|
|
|
|
let i8p = tcx.mk_mut_ptr(tcx.types.i8);
|
2020-10-08 00:02:06 +00:00
|
|
|
|
// `unsafe fn(*mut i8) -> ()`
|
|
|
|
|
let try_fn_ty = tcx.mk_fn_ptr(ty::Binder::dummy(tcx.mk_fn_sig(
|
2017-02-13 08:51:06 +00:00
|
|
|
|
iter::once(i8p),
|
2018-09-10 02:07:13 +00:00
|
|
|
|
tcx.mk_unit(),
|
2017-02-13 08:51:06 +00:00
|
|
|
|
false,
|
|
|
|
|
hir::Unsafety::Unsafe,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Abi::Rust,
|
2017-02-13 08:51:06 +00:00
|
|
|
|
)));
|
2020-10-08 00:02:06 +00:00
|
|
|
|
// `unsafe fn(*mut i8, *mut i8) -> ()`
|
|
|
|
|
let catch_fn_ty = tcx.mk_fn_ptr(ty::Binder::dummy(tcx.mk_fn_sig(
|
2020-03-02 13:59:20 +00:00
|
|
|
|
[i8p, i8p].iter().cloned(),
|
|
|
|
|
tcx.mk_unit(),
|
|
|
|
|
false,
|
|
|
|
|
hir::Unsafety::Unsafe,
|
|
|
|
|
Abi::Rust,
|
|
|
|
|
)));
|
2020-10-11 21:14:07 +00:00
|
|
|
|
// `unsafe fn(unsafe fn(*mut i8) -> (), *mut i8, unsafe fn(*mut i8, *mut i8) -> ()) -> i32`
|
2020-10-08 00:02:06 +00:00
|
|
|
|
let rust_fn_sig = ty::Binder::dummy(cx.tcx.mk_fn_sig(
|
|
|
|
|
vec![try_fn_ty, i8p, catch_fn_ty].into_iter(),
|
2020-10-11 21:14:07 +00:00
|
|
|
|
tcx.types.i32,
|
2020-10-08 00:02:06 +00:00
|
|
|
|
false,
|
|
|
|
|
hir::Unsafety::Unsafe,
|
|
|
|
|
Abi::Rust,
|
|
|
|
|
));
|
|
|
|
|
let rust_try = gen_fn(cx, "__rust_try", rust_fn_sig, codegen);
|
2018-01-05 05:04:08 +00:00
|
|
|
|
cx.rust_try_fn.set(Some(rust_try));
|
2018-10-08 15:00:30 +00:00
|
|
|
|
rust_try
|
2015-07-20 20:27:38 +00:00
|
|
|
|
}
|
2015-07-16 18:59:23 +00:00
|
|
|
|
|
2018-07-02 14:52:53 +00:00
|
|
|
|
fn generic_simd_intrinsic(
|
2018-10-05 13:08:49 +00:00
|
|
|
|
bx: &mut Builder<'a, 'll, 'tcx>,
|
2020-07-08 01:04:10 +00:00
|
|
|
|
name: Symbol,
|
2016-12-11 03:32:44 +00:00
|
|
|
|
callee_ty: Ty<'tcx>,
|
2018-08-02 14:48:44 +00:00
|
|
|
|
args: &[OperandRef<'tcx, &'ll Value>],
|
2016-12-11 03:32:44 +00:00
|
|
|
|
ret_ty: Ty<'tcx>,
|
2018-07-02 14:52:53 +00:00
|
|
|
|
llret_ty: &'ll Type,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
span: Span,
|
2018-07-10 10:28:39 +00:00
|
|
|
|
) -> Result<&'ll Value, ()> {
|
2015-08-14 22:20:22 +00:00
|
|
|
|
// macros for error handling:
|
|
|
|
|
macro_rules! emit_error {
|
|
|
|
|
($msg: tt) => {
|
|
|
|
|
emit_error!($msg, )
|
|
|
|
|
};
|
|
|
|
|
($msg: tt, $($fmt: tt)*) => {
|
2015-09-18 22:42:57 +00:00
|
|
|
|
span_invalid_monomorphization_error(
|
2018-11-27 18:00:25 +00:00
|
|
|
|
bx.sess(), span,
|
2018-10-08 14:52:34 +00:00
|
|
|
|
&format!(concat!("invalid monomorphization of `{}` intrinsic: ", $msg),
|
2015-09-18 22:42:57 +00:00
|
|
|
|
name, $($fmt)*));
|
2015-08-14 22:20:22 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2018-10-08 14:52:34 +00:00
|
|
|
|
|
2018-03-13 15:46:55 +00:00
|
|
|
|
macro_rules! return_error {
|
|
|
|
|
($($fmt: tt)*) => {
|
|
|
|
|
{
|
2015-08-14 22:20:22 +00:00
|
|
|
|
emit_error!($($fmt)*);
|
2017-06-01 18:50:53 +00:00
|
|
|
|
return Err(());
|
2015-08-13 23:00:44 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-03-13 15:46:55 +00:00
|
|
|
|
|
|
|
|
|
macro_rules! require {
|
|
|
|
|
($cond: expr, $($fmt: tt)*) => {
|
|
|
|
|
if !$cond {
|
|
|
|
|
return_error!($($fmt)*);
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
}
|
2018-10-08 14:52:34 +00:00
|
|
|
|
|
2015-08-14 22:20:22 +00:00
|
|
|
|
macro_rules! require_simd {
|
|
|
|
|
($ty: expr, $position: expr) => {
|
|
|
|
|
require!($ty.is_simd(), "expected SIMD {} type, found non-SIMD `{}`", $position, $ty)
|
2019-12-22 22:42:04 +00:00
|
|
|
|
};
|
2015-08-14 22:20:22 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-01-05 05:12:32 +00:00
|
|
|
|
let tcx = bx.tcx();
|
2020-10-24 00:21:18 +00:00
|
|
|
|
let sig =
|
|
|
|
|
tcx.normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), callee_ty.fn_sig(tcx));
|
2016-11-29 02:35:38 +00:00
|
|
|
|
let arg_tys = sig.inputs();
|
2020-07-08 01:04:10 +00:00
|
|
|
|
let name_str = &*name.as_str();
|
2015-07-16 18:59:23 +00:00
|
|
|
|
|
2020-07-08 01:04:10 +00:00
|
|
|
|
if name == sym::simd_select_bitmask {
|
2018-12-13 20:50:42 +00:00
|
|
|
|
let in_ty = arg_tys[0];
|
2020-08-02 22:49:11 +00:00
|
|
|
|
let m_len = match in_ty.kind() {
|
2018-12-13 20:50:42 +00:00
|
|
|
|
// Note that this `.unwrap()` crashes for isize/usize, that's sort
|
|
|
|
|
// of intentional as there's not currently a use case for that.
|
2020-03-21 12:58:06 +00:00
|
|
|
|
ty::Int(i) => i.bit_width().unwrap(),
|
|
|
|
|
ty::Uint(i) => i.bit_width().unwrap(),
|
2018-12-13 20:50:42 +00:00
|
|
|
|
_ => return_error!("`{}` is not an integral type", in_ty),
|
|
|
|
|
};
|
|
|
|
|
require_simd!(arg_tys[1], "argument");
|
2020-11-13 04:32:03 +00:00
|
|
|
|
let (v_len, _) = arg_tys[1].simd_size_and_type(bx.tcx());
|
2019-12-22 22:42:04 +00:00
|
|
|
|
require!(
|
2020-10-03 15:13:06 +00:00
|
|
|
|
// Allow masks for vectors with fewer than 8 elements to be
|
|
|
|
|
// represented with a u8 or i8.
|
|
|
|
|
m_len == v_len || (m_len == 8 && v_len < 8),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
"mismatched lengths: mask length `{}` != other vector length `{}`",
|
|
|
|
|
m_len,
|
|
|
|
|
v_len
|
2018-12-13 20:50:42 +00:00
|
|
|
|
);
|
|
|
|
|
let i1 = bx.type_i1();
|
2020-10-03 15:13:06 +00:00
|
|
|
|
let im = bx.type_ix(v_len);
|
|
|
|
|
let i1xn = bx.type_vector(i1, v_len);
|
|
|
|
|
let m_im = bx.trunc(args[0].immediate(), im);
|
|
|
|
|
let m_i1s = bx.bitcast(m_im, i1xn);
|
2018-12-13 20:50:42 +00:00
|
|
|
|
return Ok(bx.select(m_i1s, args[1].immediate(), args[2].immediate()));
|
|
|
|
|
}
|
|
|
|
|
|
2018-12-13 20:50:42 +00:00
|
|
|
|
// every intrinsic below takes a SIMD vector as its first argument
|
2015-08-14 22:20:22 +00:00
|
|
|
|
require_simd!(arg_tys[0], "input");
|
|
|
|
|
let in_ty = arg_tys[0];
|
|
|
|
|
|
2015-07-16 18:59:23 +00:00
|
|
|
|
let comparison = match name {
|
2020-07-08 01:04:10 +00:00
|
|
|
|
sym::simd_eq => Some(hir::BinOpKind::Eq),
|
|
|
|
|
sym::simd_ne => Some(hir::BinOpKind::Ne),
|
|
|
|
|
sym::simd_lt => Some(hir::BinOpKind::Lt),
|
|
|
|
|
sym::simd_le => Some(hir::BinOpKind::Le),
|
|
|
|
|
sym::simd_gt => Some(hir::BinOpKind::Gt),
|
|
|
|
|
sym::simd_ge => Some(hir::BinOpKind::Ge),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => None,
|
2015-07-16 18:59:23 +00:00
|
|
|
|
};
|
|
|
|
|
|
2020-11-13 04:32:03 +00:00
|
|
|
|
let (in_len, in_elem) = arg_tys[0].simd_size_and_type(bx.tcx());
|
2015-07-16 18:59:23 +00:00
|
|
|
|
if let Some(cmp_op) = comparison {
|
2015-08-14 22:20:22 +00:00
|
|
|
|
require_simd!(ret_ty, "return");
|
|
|
|
|
|
2020-11-13 04:32:03 +00:00
|
|
|
|
let (out_len, out_ty) = ret_ty.simd_size_and_type(bx.tcx());
|
2019-12-22 22:42:04 +00:00
|
|
|
|
require!(
|
|
|
|
|
in_len == out_len,
|
|
|
|
|
"expected return type with length {} (same as input type `{}`), \
|
2019-07-13 15:16:57 +00:00
|
|
|
|
found `{}` with length {}",
|
2019-12-22 22:42:04 +00:00
|
|
|
|
in_len,
|
|
|
|
|
in_ty,
|
|
|
|
|
ret_ty,
|
|
|
|
|
out_len
|
|
|
|
|
);
|
|
|
|
|
require!(
|
|
|
|
|
bx.type_kind(bx.element_type(llret_ty)) == TypeKind::Integer,
|
|
|
|
|
"expected return type with integer elements, found `{}` with non-integer `{}`",
|
|
|
|
|
ret_ty,
|
2019-07-13 15:16:57 +00:00
|
|
|
|
out_ty
|
2019-12-22 22:42:04 +00:00
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
return Ok(compare_simd_types(
|
|
|
|
|
bx,
|
|
|
|
|
args[0].immediate(),
|
|
|
|
|
args[1].immediate(),
|
|
|
|
|
in_elem,
|
|
|
|
|
llret_ty,
|
|
|
|
|
cmp_op,
|
|
|
|
|
));
|
2015-07-16 18:59:23 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-12-11 16:32:03 +00:00
|
|
|
|
if let Some(stripped) = name_str.strip_prefix("simd_shuffle") {
|
|
|
|
|
let n: u64 = stripped.parse().unwrap_or_else(|_| {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
span_bug!(span, "bad `simd_shuffle` instruction only caught in codegen?")
|
|
|
|
|
});
|
2015-07-16 18:59:23 +00:00
|
|
|
|
|
2015-08-14 22:20:22 +00:00
|
|
|
|
require_simd!(ret_ty, "return");
|
2015-07-16 18:59:23 +00:00
|
|
|
|
|
2020-11-13 04:32:03 +00:00
|
|
|
|
let (out_len, out_ty) = ret_ty.simd_size_and_type(bx.tcx());
|
2019-12-22 22:42:04 +00:00
|
|
|
|
require!(
|
|
|
|
|
out_len == n,
|
|
|
|
|
"expected return type of length {}, found `{}` with length {}",
|
|
|
|
|
n,
|
|
|
|
|
ret_ty,
|
|
|
|
|
out_len
|
|
|
|
|
);
|
|
|
|
|
require!(
|
2019-07-13 15:16:57 +00:00
|
|
|
|
in_elem == out_ty,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
"expected return element type `{}` (element of input `{}`), \
|
2019-07-13 15:16:57 +00:00
|
|
|
|
found `{}` with element type `{}`",
|
2019-12-22 22:42:04 +00:00
|
|
|
|
in_elem,
|
|
|
|
|
in_ty,
|
|
|
|
|
ret_ty,
|
2019-07-13 15:16:57 +00:00
|
|
|
|
out_ty
|
2019-12-22 22:42:04 +00:00
|
|
|
|
);
|
2015-07-16 18:59:23 +00:00
|
|
|
|
|
2019-11-16 15:09:45 +00:00
|
|
|
|
let total_len = u128::from(in_len) * 2;
|
2015-07-16 18:59:23 +00:00
|
|
|
|
|
2017-09-19 23:32:22 +00:00
|
|
|
|
let vector = args[2].immediate();
|
2015-08-12 18:25:37 +00:00
|
|
|
|
|
|
|
|
|
let indices: Option<Vec<_>> = (0..n)
|
|
|
|
|
.map(|i| {
|
|
|
|
|
let arg_idx = i;
|
2018-11-27 18:00:25 +00:00
|
|
|
|
let val = bx.const_get_elt(vector, i as u64);
|
|
|
|
|
match bx.const_to_opt_u128(val, true) {
|
2015-07-16 18:59:23 +00:00
|
|
|
|
None => {
|
2015-08-14 22:20:22 +00:00
|
|
|
|
emit_error!("shuffle index #{} is not a constant", arg_idx);
|
2015-07-16 18:59:23 +00:00
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
Some(idx) if idx >= total_len => {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
emit_error!(
|
|
|
|
|
"shuffle index #{} is out of bounds (limit {})",
|
|
|
|
|
arg_idx,
|
|
|
|
|
total_len
|
|
|
|
|
);
|
2015-07-16 18:59:23 +00:00
|
|
|
|
None
|
|
|
|
|
}
|
2018-11-27 18:00:25 +00:00
|
|
|
|
Some(idx) => Some(bx.const_i32(idx as i32)),
|
2015-07-16 18:59:23 +00:00
|
|
|
|
}
|
|
|
|
|
})
|
|
|
|
|
.collect();
|
|
|
|
|
let indices = match indices {
|
|
|
|
|
Some(i) => i,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
None => return Ok(bx.const_null(llret_ty)),
|
2015-07-16 18:59:23 +00:00
|
|
|
|
};
|
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
return Ok(bx.shuffle_vector(
|
|
|
|
|
args[0].immediate(),
|
|
|
|
|
args[1].immediate(),
|
|
|
|
|
bx.const_vector(&indices),
|
|
|
|
|
));
|
2015-07-16 18:59:23 +00:00
|
|
|
|
}
|
2015-07-20 18:55:47 +00:00
|
|
|
|
|
2020-07-08 01:04:10 +00:00
|
|
|
|
if name == sym::simd_insert {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
require!(
|
|
|
|
|
in_elem == arg_tys[2],
|
|
|
|
|
"expected inserted type `{}` (element of input `{}`), found `{}`",
|
|
|
|
|
in_elem,
|
|
|
|
|
in_ty,
|
|
|
|
|
arg_tys[2]
|
|
|
|
|
);
|
|
|
|
|
return Ok(bx.insert_element(
|
|
|
|
|
args[0].immediate(),
|
|
|
|
|
args[2].immediate(),
|
|
|
|
|
args[1].immediate(),
|
|
|
|
|
));
|
2015-07-20 18:55:47 +00:00
|
|
|
|
}
|
2020-07-08 01:04:10 +00:00
|
|
|
|
if name == sym::simd_extract {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
require!(
|
|
|
|
|
ret_ty == in_elem,
|
|
|
|
|
"expected return type `{}` (element of input `{}`), found `{}`",
|
|
|
|
|
in_elem,
|
|
|
|
|
in_ty,
|
|
|
|
|
ret_ty
|
|
|
|
|
);
|
|
|
|
|
return Ok(bx.extract_element(args[0].immediate(), args[1].immediate()));
|
2015-07-20 18:55:47 +00:00
|
|
|
|
}
|
2015-07-29 23:40:22 +00:00
|
|
|
|
|
2020-07-08 01:04:10 +00:00
|
|
|
|
if name == sym::simd_select {
|
2018-03-18 17:33:36 +00:00
|
|
|
|
let m_elem_ty = in_elem;
|
|
|
|
|
let m_len = in_len;
|
2018-12-13 20:50:42 +00:00
|
|
|
|
require_simd!(arg_tys[1], "argument");
|
2020-11-13 04:32:03 +00:00
|
|
|
|
let (v_len, _) = arg_tys[1].simd_size_and_type(bx.tcx());
|
2019-12-22 22:42:04 +00:00
|
|
|
|
require!(
|
|
|
|
|
m_len == v_len,
|
|
|
|
|
"mismatched lengths: mask length `{}` != other vector length `{}`",
|
|
|
|
|
m_len,
|
|
|
|
|
v_len
|
2018-03-18 17:33:36 +00:00
|
|
|
|
);
|
2020-08-02 22:49:11 +00:00
|
|
|
|
match m_elem_ty.kind() {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
ty::Int(_) => {}
|
|
|
|
|
_ => return_error!("mask element type is `{}`, expected `i_`", m_elem_ty),
|
2018-03-18 17:33:36 +00:00
|
|
|
|
}
|
|
|
|
|
// truncate the mask to a vector of i1s
|
2018-11-27 18:00:25 +00:00
|
|
|
|
let i1 = bx.type_i1();
|
|
|
|
|
let i1xn = bx.type_vector(i1, m_len as u64);
|
2018-03-18 17:33:36 +00:00
|
|
|
|
let m_i1s = bx.trunc(args[0].immediate(), i1xn);
|
|
|
|
|
return Ok(bx.select(m_i1s, args[1].immediate(), args[2].immediate()));
|
|
|
|
|
}
|
|
|
|
|
|
2020-07-08 01:04:10 +00:00
|
|
|
|
if name == sym::simd_bitmask {
|
2019-01-02 15:49:30 +00:00
|
|
|
|
// The `fn simd_bitmask(vector) -> unsigned integer` intrinsic takes a
|
|
|
|
|
// vector mask and returns an unsigned integer containing the most
|
|
|
|
|
// significant bit (MSB) of each lane.
|
|
|
|
|
|
|
|
|
|
// If the vector has less than 8 lanes, an u8 is returned with zeroed
|
|
|
|
|
// trailing bits.
|
|
|
|
|
let expected_int_bits = in_len.max(8);
|
2020-08-02 22:49:11 +00:00
|
|
|
|
match ret_ty.kind() {
|
2020-03-21 12:58:06 +00:00
|
|
|
|
ty::Uint(i) if i.bit_width() == Some(expected_int_bits) => (),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => return_error!("bitmask `{}`, expected `u{}`", ret_ty, expected_int_bits),
|
2019-01-02 15:49:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Integer vector <i{in_bitwidth} x in_len>:
|
2020-08-02 22:49:11 +00:00
|
|
|
|
let (i_xn, in_elem_bitwidth) = match in_elem.kind() {
|
2020-11-06 21:24:55 +00:00
|
|
|
|
ty::Int(i) => (
|
|
|
|
|
args[0].immediate(),
|
|
|
|
|
i.bit_width().unwrap_or_else(|| bx.data_layout().pointer_size.bits()),
|
|
|
|
|
),
|
|
|
|
|
ty::Uint(i) => (
|
|
|
|
|
args[0].immediate(),
|
|
|
|
|
i.bit_width().unwrap_or_else(|| bx.data_layout().pointer_size.bits()),
|
|
|
|
|
),
|
2019-01-02 15:49:30 +00:00
|
|
|
|
_ => return_error!(
|
|
|
|
|
"vector argument `{}`'s element type `{}`, expected integer element type",
|
2019-12-22 22:42:04 +00:00
|
|
|
|
in_ty,
|
|
|
|
|
in_elem
|
2019-01-02 15:49:30 +00:00
|
|
|
|
),
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Shift the MSB to the right by "in_elem_bitwidth - 1" into the first bit position.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let shift_indices =
|
|
|
|
|
vec![
|
2020-03-21 12:58:06 +00:00
|
|
|
|
bx.cx.const_int(bx.type_ix(in_elem_bitwidth), (in_elem_bitwidth - 1) as _);
|
2019-12-22 22:42:04 +00:00
|
|
|
|
in_len as _
|
|
|
|
|
];
|
2019-01-02 15:49:30 +00:00
|
|
|
|
let i_xn_msb = bx.lshr(i_xn, bx.const_vector(shift_indices.as_slice()));
|
|
|
|
|
// Truncate vector to an <i1 x N>
|
2020-03-21 12:58:06 +00:00
|
|
|
|
let i1xn = bx.trunc(i_xn_msb, bx.type_vector(bx.type_i1(), in_len));
|
2019-01-02 15:49:30 +00:00
|
|
|
|
// Bitcast <i1 x N> to iN:
|
2020-03-21 12:58:06 +00:00
|
|
|
|
let i_ = bx.bitcast(i1xn, bx.type_ix(in_len));
|
2019-01-02 15:49:30 +00:00
|
|
|
|
// Zero-extend iN to the bitmask type:
|
2020-03-21 12:58:06 +00:00
|
|
|
|
return Ok(bx.zext(i_, bx.type_ix(expected_int_bits)));
|
2019-01-02 15:49:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-07-02 14:52:53 +00:00
|
|
|
|
fn simd_simple_float_intrinsic(
|
2021-01-03 04:49:28 +00:00
|
|
|
|
name: Symbol,
|
2020-03-29 14:41:09 +00:00
|
|
|
|
in_elem: &::rustc_middle::ty::TyS<'_>,
|
|
|
|
|
in_ty: &::rustc_middle::ty::TyS<'_>,
|
2019-11-16 12:31:09 +00:00
|
|
|
|
in_len: u64,
|
2018-10-05 13:08:49 +00:00
|
|
|
|
bx: &mut Builder<'a, 'll, 'tcx>,
|
2018-07-02 14:52:53 +00:00
|
|
|
|
span: Span,
|
2018-08-02 14:48:44 +00:00
|
|
|
|
args: &[OperandRef<'tcx, &'ll Value>],
|
2018-07-10 10:28:39 +00:00
|
|
|
|
) -> Result<&'ll Value, ()> {
|
2018-05-04 18:07:35 +00:00
|
|
|
|
macro_rules! emit_error {
|
|
|
|
|
($msg: tt) => {
|
|
|
|
|
emit_error!($msg, )
|
|
|
|
|
};
|
|
|
|
|
($msg: tt, $($fmt: tt)*) => {
|
|
|
|
|
span_invalid_monomorphization_error(
|
2018-11-27 18:00:25 +00:00
|
|
|
|
bx.sess(), span,
|
2018-10-08 14:52:34 +00:00
|
|
|
|
&format!(concat!("invalid monomorphization of `{}` intrinsic: ", $msg),
|
2018-05-04 18:07:35 +00:00
|
|
|
|
name, $($fmt)*));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
macro_rules! return_error {
|
|
|
|
|
($($fmt: tt)*) => {
|
|
|
|
|
{
|
|
|
|
|
emit_error!($($fmt)*);
|
|
|
|
|
return Err(());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2021-01-03 04:49:28 +00:00
|
|
|
|
|
|
|
|
|
let (elem_ty_str, elem_ty) = if let ty::Float(f) = in_elem.kind() {
|
|
|
|
|
let elem_ty = bx.cx.type_float_from_ty(*f);
|
|
|
|
|
match f.bit_width() {
|
|
|
|
|
32 => ("f32", elem_ty),
|
|
|
|
|
64 => ("f64", elem_ty),
|
|
|
|
|
_ => {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
return_error!(
|
2021-01-03 04:49:28 +00:00
|
|
|
|
"unsupported element type `{}` of floating-point vector `{}`",
|
|
|
|
|
f.name_str(),
|
|
|
|
|
in_ty
|
2019-12-22 22:42:04 +00:00
|
|
|
|
);
|
2018-05-04 18:07:35 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2021-01-03 04:49:28 +00:00
|
|
|
|
} else {
|
|
|
|
|
return_error!("`{}` is not a floating-point type", in_ty);
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let vec_ty = bx.type_vector(elem_ty, in_len);
|
|
|
|
|
|
|
|
|
|
let (intr_name, fn_ty) = match name {
|
|
|
|
|
sym::simd_ceil => ("ceil", bx.type_func(&[vec_ty], vec_ty)),
|
2021-04-17 17:40:59 +00:00
|
|
|
|
sym::simd_fabs => ("fabs", bx.type_func(&[vec_ty], vec_ty)),
|
|
|
|
|
sym::simd_fcos => ("cos", bx.type_func(&[vec_ty], vec_ty)),
|
2021-01-03 04:49:28 +00:00
|
|
|
|
sym::simd_fexp2 => ("exp2", bx.type_func(&[vec_ty], vec_ty)),
|
2021-04-17 17:40:59 +00:00
|
|
|
|
sym::simd_fexp => ("exp", bx.type_func(&[vec_ty], vec_ty)),
|
2021-01-03 04:49:28 +00:00
|
|
|
|
sym::simd_flog10 => ("log10", bx.type_func(&[vec_ty], vec_ty)),
|
|
|
|
|
sym::simd_flog2 => ("log2", bx.type_func(&[vec_ty], vec_ty)),
|
|
|
|
|
sym::simd_flog => ("log", bx.type_func(&[vec_ty], vec_ty)),
|
2021-04-17 17:40:59 +00:00
|
|
|
|
sym::simd_floor => ("floor", bx.type_func(&[vec_ty], vec_ty)),
|
|
|
|
|
sym::simd_fma => ("fma", bx.type_func(&[vec_ty, vec_ty, vec_ty], vec_ty)),
|
2021-01-03 04:49:28 +00:00
|
|
|
|
sym::simd_fpowi => ("powi", bx.type_func(&[vec_ty, bx.type_i32()], vec_ty)),
|
|
|
|
|
sym::simd_fpow => ("pow", bx.type_func(&[vec_ty, vec_ty], vec_ty)),
|
2021-04-17 17:40:59 +00:00
|
|
|
|
sym::simd_fsin => ("sin", bx.type_func(&[vec_ty], vec_ty)),
|
|
|
|
|
sym::simd_fsqrt => ("sqrt", bx.type_func(&[vec_ty], vec_ty)),
|
|
|
|
|
sym::simd_round => ("round", bx.type_func(&[vec_ty], vec_ty)),
|
|
|
|
|
sym::simd_trunc => ("trunc", bx.type_func(&[vec_ty], vec_ty)),
|
2021-01-03 04:49:28 +00:00
|
|
|
|
_ => return_error!("unrecognized intrinsic `{}`", name),
|
2018-05-04 18:07:35 +00:00
|
|
|
|
};
|
2021-01-03 04:49:28 +00:00
|
|
|
|
let llvm_name = &format!("llvm.{0}.v{1}{2}", intr_name, in_len, elem_ty_str);
|
2021-01-23 22:19:49 +00:00
|
|
|
|
let f = bx.declare_cfn(&llvm_name, llvm::UnnamedAddr::No, fn_ty);
|
2021-08-03 22:09:57 +00:00
|
|
|
|
let c =
|
|
|
|
|
bx.call(fn_ty, f, &args.iter().map(|arg| arg.immediate()).collect::<Vec<_>>(), None);
|
2018-10-08 14:58:26 +00:00
|
|
|
|
Ok(c)
|
2018-05-04 18:07:35 +00:00
|
|
|
|
}
|
|
|
|
|
|
2021-01-03 04:49:28 +00:00
|
|
|
|
if std::matches!(
|
|
|
|
|
name,
|
2021-04-17 17:40:59 +00:00
|
|
|
|
sym::simd_ceil
|
2021-01-03 04:49:28 +00:00
|
|
|
|
| sym::simd_fabs
|
2021-04-17 17:40:59 +00:00
|
|
|
|
| sym::simd_fcos
|
2021-01-03 04:49:28 +00:00
|
|
|
|
| sym::simd_fexp2
|
2021-04-17 17:40:59 +00:00
|
|
|
|
| sym::simd_fexp
|
2021-01-03 04:49:28 +00:00
|
|
|
|
| sym::simd_flog10
|
|
|
|
|
| sym::simd_flog2
|
|
|
|
|
| sym::simd_flog
|
2021-04-17 17:40:59 +00:00
|
|
|
|
| sym::simd_floor
|
2021-01-03 04:49:28 +00:00
|
|
|
|
| sym::simd_fma
|
2021-04-17 17:40:59 +00:00
|
|
|
|
| sym::simd_fpow
|
|
|
|
|
| sym::simd_fpowi
|
|
|
|
|
| sym::simd_fsin
|
|
|
|
|
| sym::simd_fsqrt
|
|
|
|
|
| sym::simd_round
|
|
|
|
|
| sym::simd_trunc
|
2021-01-03 04:49:28 +00:00
|
|
|
|
) {
|
|
|
|
|
return simd_simple_float_intrinsic(name, in_elem, in_ty, in_len, bx, span, args);
|
2018-05-04 18:07:35 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-05-16 18:47:28 +00:00
|
|
|
|
// FIXME: use:
|
|
|
|
|
// https://github.com/llvm-mirror/llvm/blob/master/include/llvm/IR/Function.h#L182
|
|
|
|
|
// https://github.com/llvm-mirror/llvm/blob/master/include/llvm/IR/Intrinsics.h#L81
|
2019-11-16 15:09:45 +00:00
|
|
|
|
fn llvm_vector_str(elem_ty: Ty<'_>, vec_len: u64, no_pointers: usize) -> String {
|
2018-05-16 18:47:28 +00:00
|
|
|
|
let p0s: String = "p0".repeat(no_pointers);
|
2020-08-02 22:49:11 +00:00
|
|
|
|
match *elem_ty.kind() {
|
2018-08-22 00:35:55 +00:00
|
|
|
|
ty::Int(v) => format!("v{}{}i{}", vec_len, p0s, v.bit_width().unwrap()),
|
|
|
|
|
ty::Uint(v) => format!("v{}{}i{}", vec_len, p0s, v.bit_width().unwrap()),
|
|
|
|
|
ty::Float(v) => format!("v{}{}f{}", vec_len, p0s, v.bit_width()),
|
2018-05-16 18:47:28 +00:00
|
|
|
|
_ => unreachable!(),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
fn llvm_vector_ty(
|
|
|
|
|
cx: &CodegenCx<'ll, '_>,
|
|
|
|
|
elem_ty: Ty<'_>,
|
|
|
|
|
vec_len: u64,
|
|
|
|
|
mut no_pointers: usize,
|
|
|
|
|
) -> &'ll Type {
|
2018-05-16 18:47:28 +00:00
|
|
|
|
// FIXME: use cx.layout_of(ty).llvm_type() ?
|
2020-08-02 22:49:11 +00:00
|
|
|
|
let mut elem_ty = match *elem_ty.kind() {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
ty::Int(v) => cx.type_int_from_ty(v),
|
|
|
|
|
ty::Uint(v) => cx.type_uint_from_ty(v),
|
|
|
|
|
ty::Float(v) => cx.type_float_from_ty(v),
|
2018-05-16 18:47:28 +00:00
|
|
|
|
_ => unreachable!(),
|
|
|
|
|
};
|
|
|
|
|
while no_pointers > 0 {
|
2018-09-06 20:52:15 +00:00
|
|
|
|
elem_ty = cx.type_ptr_to(elem_ty);
|
2018-05-16 18:47:28 +00:00
|
|
|
|
no_pointers -= 1;
|
|
|
|
|
}
|
2019-11-16 15:09:45 +00:00
|
|
|
|
cx.type_vector(elem_ty, vec_len)
|
2018-05-16 18:47:28 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-07-08 01:04:10 +00:00
|
|
|
|
if name == sym::simd_gather {
|
2018-05-04 18:07:35 +00:00
|
|
|
|
// simd_gather(values: <N x T>, pointers: <N x *_ T>,
|
|
|
|
|
// mask: <N x i{M}>) -> <N x T>
|
|
|
|
|
// * N: number of elements in the input vectors
|
|
|
|
|
// * T: type of the element to load
|
|
|
|
|
// * M: any integer width is supported, will be truncated to i1
|
|
|
|
|
|
|
|
|
|
// All types must be simd vector types
|
|
|
|
|
require_simd!(in_ty, "first");
|
|
|
|
|
require_simd!(arg_tys[1], "second");
|
|
|
|
|
require_simd!(arg_tys[2], "third");
|
|
|
|
|
require_simd!(ret_ty, "return");
|
|
|
|
|
|
|
|
|
|
// Of the same length:
|
2020-11-13 04:32:03 +00:00
|
|
|
|
let (out_len, _) = arg_tys[1].simd_size_and_type(bx.tcx());
|
|
|
|
|
let (out_len2, _) = arg_tys[2].simd_size_and_type(bx.tcx());
|
2019-12-22 22:42:04 +00:00
|
|
|
|
require!(
|
2019-07-13 15:16:57 +00:00
|
|
|
|
in_len == out_len,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
"expected {} argument with length {} (same as input type `{}`), \
|
2019-07-13 15:16:57 +00:00
|
|
|
|
found `{}` with length {}",
|
2019-12-22 22:42:04 +00:00
|
|
|
|
"second",
|
|
|
|
|
in_len,
|
|
|
|
|
in_ty,
|
|
|
|
|
arg_tys[1],
|
2019-07-13 15:16:57 +00:00
|
|
|
|
out_len
|
2019-12-22 22:42:04 +00:00
|
|
|
|
);
|
|
|
|
|
require!(
|
2019-07-13 15:16:57 +00:00
|
|
|
|
in_len == out_len2,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
"expected {} argument with length {} (same as input type `{}`), \
|
2019-07-13 15:16:57 +00:00
|
|
|
|
found `{}` with length {}",
|
2019-12-22 22:42:04 +00:00
|
|
|
|
"third",
|
|
|
|
|
in_len,
|
|
|
|
|
in_ty,
|
|
|
|
|
arg_tys[2],
|
2019-07-13 15:16:57 +00:00
|
|
|
|
out_len2
|
2019-12-22 22:42:04 +00:00
|
|
|
|
);
|
2018-05-04 18:07:35 +00:00
|
|
|
|
|
|
|
|
|
// The return type must match the first argument type
|
2019-12-22 22:42:04 +00:00
|
|
|
|
require!(ret_ty == in_ty, "expected return type `{}`, found `{}`", in_ty, ret_ty);
|
2018-05-04 18:07:35 +00:00
|
|
|
|
|
|
|
|
|
// This counts how many pointers
|
2019-06-12 13:06:35 +00:00
|
|
|
|
fn ptr_count(t: Ty<'_>) -> usize {
|
2020-08-02 22:49:11 +00:00
|
|
|
|
match t.kind() {
|
2018-08-22 00:35:02 +00:00
|
|
|
|
ty::RawPtr(p) => 1 + ptr_count(p.ty),
|
2018-05-04 18:07:35 +00:00
|
|
|
|
_ => 0,
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Non-ptr type
|
2019-06-12 13:06:35 +00:00
|
|
|
|
fn non_ptr(t: Ty<'_>) -> Ty<'_> {
|
2020-08-02 22:49:11 +00:00
|
|
|
|
match t.kind() {
|
2018-08-22 00:35:02 +00:00
|
|
|
|
ty::RawPtr(p) => non_ptr(p.ty),
|
2018-05-04 18:07:35 +00:00
|
|
|
|
_ => t,
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// The second argument must be a simd vector with an element type that's a pointer
|
|
|
|
|
// to the element type of the first argument
|
2020-11-13 04:32:03 +00:00
|
|
|
|
let (_, element_ty0) = arg_tys[0].simd_size_and_type(bx.tcx());
|
|
|
|
|
let (_, element_ty1) = arg_tys[1].simd_size_and_type(bx.tcx());
|
2019-07-13 15:16:57 +00:00
|
|
|
|
let (pointer_count, underlying_ty) = match element_ty1.kind() {
|
|
|
|
|
ty::RawPtr(p) if p.ty == in_elem => (ptr_count(element_ty1), non_ptr(element_ty1)),
|
2018-05-04 18:07:35 +00:00
|
|
|
|
_ => {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
require!(
|
|
|
|
|
false,
|
|
|
|
|
"expected element type `{}` of second argument `{}` \
|
2019-07-13 15:16:57 +00:00
|
|
|
|
to be a pointer to the element type `{}` of the first \
|
|
|
|
|
argument `{}`, found `{}` != `*_ {}`",
|
|
|
|
|
element_ty1,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
arg_tys[1],
|
|
|
|
|
in_elem,
|
|
|
|
|
in_ty,
|
2019-07-13 15:16:57 +00:00
|
|
|
|
element_ty1,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
in_elem
|
|
|
|
|
);
|
2018-05-04 18:07:35 +00:00
|
|
|
|
unreachable!();
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
assert!(pointer_count > 0);
|
2019-07-13 15:16:57 +00:00
|
|
|
|
assert_eq!(pointer_count - 1, ptr_count(element_ty0));
|
|
|
|
|
assert_eq!(underlying_ty, non_ptr(element_ty0));
|
2018-05-04 18:07:35 +00:00
|
|
|
|
|
|
|
|
|
// The element type of the third argument must be a signed integer type of any width:
|
2020-11-13 04:32:03 +00:00
|
|
|
|
let (_, element_ty2) = arg_tys[2].simd_size_and_type(bx.tcx());
|
2019-07-13 15:16:57 +00:00
|
|
|
|
match element_ty2.kind() {
|
2018-08-22 00:35:55 +00:00
|
|
|
|
ty::Int(_) => (),
|
2018-05-04 18:07:35 +00:00
|
|
|
|
_ => {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
require!(
|
|
|
|
|
false,
|
|
|
|
|
"expected element type `{}` of third argument `{}` \
|
2018-05-04 18:07:35 +00:00
|
|
|
|
to be a signed integer type",
|
2019-07-13 15:16:57 +00:00
|
|
|
|
element_ty2,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
arg_tys[2]
|
|
|
|
|
);
|
2018-05-04 18:07:35 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Alignment of T, must be a constant integer value:
|
2018-11-27 18:00:25 +00:00
|
|
|
|
let alignment_ty = bx.type_i32();
|
|
|
|
|
let alignment = bx.const_i32(bx.align_of(in_elem).bytes() as i32);
|
2018-05-04 18:07:35 +00:00
|
|
|
|
|
|
|
|
|
// Truncate the mask vector to a vector of i1s:
|
|
|
|
|
let (mask, mask_ty) = {
|
2018-11-27 18:00:25 +00:00
|
|
|
|
let i1 = bx.type_i1();
|
2019-11-16 12:31:09 +00:00
|
|
|
|
let i1xn = bx.type_vector(i1, in_len);
|
2018-05-04 18:07:35 +00:00
|
|
|
|
(bx.trunc(args[2].immediate(), i1xn), i1xn)
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Type of the vector of pointers:
|
2018-11-27 18:00:25 +00:00
|
|
|
|
let llvm_pointer_vec_ty = llvm_vector_ty(bx, underlying_ty, in_len, pointer_count);
|
2018-05-04 18:07:35 +00:00
|
|
|
|
let llvm_pointer_vec_str = llvm_vector_str(underlying_ty, in_len, pointer_count);
|
|
|
|
|
|
|
|
|
|
// Type of the vector of elements:
|
2018-11-27 18:00:25 +00:00
|
|
|
|
let llvm_elem_vec_ty = llvm_vector_ty(bx, underlying_ty, in_len, pointer_count - 1);
|
2018-05-04 18:07:35 +00:00
|
|
|
|
let llvm_elem_vec_str = llvm_vector_str(underlying_ty, in_len, pointer_count - 1);
|
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let llvm_intrinsic =
|
|
|
|
|
format!("llvm.masked.gather.{}.{}", llvm_elem_vec_str, llvm_pointer_vec_str);
|
2021-08-03 22:09:57 +00:00
|
|
|
|
let fn_ty = bx.type_func(
|
|
|
|
|
&[llvm_pointer_vec_ty, alignment_ty, mask_ty, llvm_elem_vec_ty],
|
|
|
|
|
llvm_elem_vec_ty,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
);
|
2021-08-03 22:09:57 +00:00
|
|
|
|
let f = bx.declare_cfn(&llvm_intrinsic, llvm::UnnamedAddr::No, fn_ty);
|
|
|
|
|
let v =
|
|
|
|
|
bx.call(fn_ty, f, &[args[1].immediate(), alignment, mask, args[0].immediate()], None);
|
2018-05-04 18:07:35 +00:00
|
|
|
|
return Ok(v);
|
|
|
|
|
}
|
|
|
|
|
|
2020-07-08 01:04:10 +00:00
|
|
|
|
if name == sym::simd_scatter {
|
2018-05-04 18:07:35 +00:00
|
|
|
|
// simd_scatter(values: <N x T>, pointers: <N x *mut T>,
|
|
|
|
|
// mask: <N x i{M}>) -> ()
|
|
|
|
|
// * N: number of elements in the input vectors
|
|
|
|
|
// * T: type of the element to load
|
|
|
|
|
// * M: any integer width is supported, will be truncated to i1
|
|
|
|
|
|
|
|
|
|
// All types must be simd vector types
|
|
|
|
|
require_simd!(in_ty, "first");
|
|
|
|
|
require_simd!(arg_tys[1], "second");
|
|
|
|
|
require_simd!(arg_tys[2], "third");
|
|
|
|
|
|
|
|
|
|
// Of the same length:
|
2020-11-13 04:32:03 +00:00
|
|
|
|
let (element_len1, _) = arg_tys[1].simd_size_and_type(bx.tcx());
|
|
|
|
|
let (element_len2, _) = arg_tys[2].simd_size_and_type(bx.tcx());
|
2019-12-22 22:42:04 +00:00
|
|
|
|
require!(
|
2019-07-13 15:16:57 +00:00
|
|
|
|
in_len == element_len1,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
"expected {} argument with length {} (same as input type `{}`), \
|
2019-07-13 15:16:57 +00:00
|
|
|
|
found `{}` with length {}",
|
2019-12-22 22:42:04 +00:00
|
|
|
|
"second",
|
|
|
|
|
in_len,
|
|
|
|
|
in_ty,
|
|
|
|
|
arg_tys[1],
|
2019-07-13 15:16:57 +00:00
|
|
|
|
element_len1
|
2019-12-22 22:42:04 +00:00
|
|
|
|
);
|
|
|
|
|
require!(
|
2019-07-13 15:16:57 +00:00
|
|
|
|
in_len == element_len2,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
"expected {} argument with length {} (same as input type `{}`), \
|
2019-07-13 15:16:57 +00:00
|
|
|
|
found `{}` with length {}",
|
2019-12-22 22:42:04 +00:00
|
|
|
|
"third",
|
|
|
|
|
in_len,
|
|
|
|
|
in_ty,
|
|
|
|
|
arg_tys[2],
|
2019-07-13 15:16:57 +00:00
|
|
|
|
element_len2
|
2019-12-22 22:42:04 +00:00
|
|
|
|
);
|
2018-05-04 18:07:35 +00:00
|
|
|
|
|
|
|
|
|
// This counts how many pointers
|
2019-06-12 13:06:35 +00:00
|
|
|
|
fn ptr_count(t: Ty<'_>) -> usize {
|
2020-08-02 22:49:11 +00:00
|
|
|
|
match t.kind() {
|
2018-08-22 00:35:02 +00:00
|
|
|
|
ty::RawPtr(p) => 1 + ptr_count(p.ty),
|
2018-05-04 18:07:35 +00:00
|
|
|
|
_ => 0,
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Non-ptr type
|
2019-06-12 13:06:35 +00:00
|
|
|
|
fn non_ptr(t: Ty<'_>) -> Ty<'_> {
|
2020-08-02 22:49:11 +00:00
|
|
|
|
match t.kind() {
|
2018-08-22 00:35:02 +00:00
|
|
|
|
ty::RawPtr(p) => non_ptr(p.ty),
|
2018-05-04 18:07:35 +00:00
|
|
|
|
_ => t,
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// The second argument must be a simd vector with an element type that's a pointer
|
|
|
|
|
// to the element type of the first argument
|
2020-11-13 04:32:03 +00:00
|
|
|
|
let (_, element_ty0) = arg_tys[0].simd_size_and_type(bx.tcx());
|
|
|
|
|
let (_, element_ty1) = arg_tys[1].simd_size_and_type(bx.tcx());
|
|
|
|
|
let (_, element_ty2) = arg_tys[2].simd_size_and_type(bx.tcx());
|
2019-07-13 15:16:57 +00:00
|
|
|
|
let (pointer_count, underlying_ty) = match element_ty1.kind() {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
ty::RawPtr(p) if p.ty == in_elem && p.mutbl == hir::Mutability::Mut => {
|
2019-07-13 15:16:57 +00:00
|
|
|
|
(ptr_count(element_ty1), non_ptr(element_ty1))
|
2019-12-22 22:42:04 +00:00
|
|
|
|
}
|
2018-05-04 18:07:35 +00:00
|
|
|
|
_ => {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
require!(
|
|
|
|
|
false,
|
|
|
|
|
"expected element type `{}` of second argument `{}` \
|
2019-07-13 15:16:57 +00:00
|
|
|
|
to be a pointer to the element type `{}` of the first \
|
|
|
|
|
argument `{}`, found `{}` != `*mut {}`",
|
|
|
|
|
element_ty1,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
arg_tys[1],
|
|
|
|
|
in_elem,
|
|
|
|
|
in_ty,
|
2019-07-13 15:16:57 +00:00
|
|
|
|
element_ty1,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
in_elem
|
|
|
|
|
);
|
2018-05-04 18:07:35 +00:00
|
|
|
|
unreachable!();
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
assert!(pointer_count > 0);
|
2019-07-13 15:16:57 +00:00
|
|
|
|
assert_eq!(pointer_count - 1, ptr_count(element_ty0));
|
|
|
|
|
assert_eq!(underlying_ty, non_ptr(element_ty0));
|
2018-05-04 18:07:35 +00:00
|
|
|
|
|
|
|
|
|
// The element type of the third argument must be a signed integer type of any width:
|
2019-07-13 15:16:57 +00:00
|
|
|
|
match element_ty2.kind() {
|
2018-08-22 00:35:55 +00:00
|
|
|
|
ty::Int(_) => (),
|
2018-05-04 18:07:35 +00:00
|
|
|
|
_ => {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
require!(
|
|
|
|
|
false,
|
|
|
|
|
"expected element type `{}` of third argument `{}` \
|
2019-07-13 15:16:57 +00:00
|
|
|
|
be a signed integer type",
|
|
|
|
|
element_ty2,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
arg_tys[2]
|
|
|
|
|
);
|
2018-05-04 18:07:35 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Alignment of T, must be a constant integer value:
|
2018-11-27 18:00:25 +00:00
|
|
|
|
let alignment_ty = bx.type_i32();
|
|
|
|
|
let alignment = bx.const_i32(bx.align_of(in_elem).bytes() as i32);
|
2018-05-04 18:07:35 +00:00
|
|
|
|
|
|
|
|
|
// Truncate the mask vector to a vector of i1s:
|
|
|
|
|
let (mask, mask_ty) = {
|
2018-11-27 18:00:25 +00:00
|
|
|
|
let i1 = bx.type_i1();
|
2019-11-16 12:31:09 +00:00
|
|
|
|
let i1xn = bx.type_vector(i1, in_len);
|
2018-05-04 18:07:35 +00:00
|
|
|
|
(bx.trunc(args[2].immediate(), i1xn), i1xn)
|
|
|
|
|
};
|
|
|
|
|
|
2018-11-27 18:00:25 +00:00
|
|
|
|
let ret_t = bx.type_void();
|
2018-05-04 18:07:35 +00:00
|
|
|
|
|
|
|
|
|
// Type of the vector of pointers:
|
2018-11-27 18:00:25 +00:00
|
|
|
|
let llvm_pointer_vec_ty = llvm_vector_ty(bx, underlying_ty, in_len, pointer_count);
|
2018-05-04 18:07:35 +00:00
|
|
|
|
let llvm_pointer_vec_str = llvm_vector_str(underlying_ty, in_len, pointer_count);
|
|
|
|
|
|
|
|
|
|
// Type of the vector of elements:
|
2018-11-27 18:00:25 +00:00
|
|
|
|
let llvm_elem_vec_ty = llvm_vector_ty(bx, underlying_ty, in_len, pointer_count - 1);
|
2018-05-04 18:07:35 +00:00
|
|
|
|
let llvm_elem_vec_str = llvm_vector_str(underlying_ty, in_len, pointer_count - 1);
|
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let llvm_intrinsic =
|
|
|
|
|
format!("llvm.masked.scatter.{}.{}", llvm_elem_vec_str, llvm_pointer_vec_str);
|
2021-08-03 22:09:57 +00:00
|
|
|
|
let fn_ty =
|
|
|
|
|
bx.type_func(&[llvm_elem_vec_ty, llvm_pointer_vec_ty, alignment_ty, mask_ty], ret_t);
|
|
|
|
|
let f = bx.declare_cfn(&llvm_intrinsic, llvm::UnnamedAddr::No, fn_ty);
|
|
|
|
|
let v =
|
|
|
|
|
bx.call(fn_ty, f, &[args[0].immediate(), args[1].immediate(), alignment, mask], None);
|
2018-05-04 18:07:35 +00:00
|
|
|
|
return Ok(v);
|
|
|
|
|
}
|
|
|
|
|
|
2018-03-14 16:22:40 +00:00
|
|
|
|
macro_rules! arith_red {
|
2020-07-08 01:04:10 +00:00
|
|
|
|
($name:ident : $integer_reduce:ident, $float_reduce:ident, $ordered:expr, $op:ident,
|
|
|
|
|
$identity:expr) => {
|
|
|
|
|
if name == sym::$name {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
require!(
|
|
|
|
|
ret_ty == in_elem,
|
|
|
|
|
"expected return type `{}` (element of input `{}`), found `{}`",
|
|
|
|
|
in_elem,
|
|
|
|
|
in_ty,
|
|
|
|
|
ret_ty
|
|
|
|
|
);
|
2020-08-02 22:49:11 +00:00
|
|
|
|
return match in_elem.kind() {
|
2018-08-22 00:35:55 +00:00
|
|
|
|
ty::Int(_) | ty::Uint(_) => {
|
2018-03-14 16:22:40 +00:00
|
|
|
|
let r = bx.$integer_reduce(args[0].immediate());
|
|
|
|
|
if $ordered {
|
|
|
|
|
// if overflow occurs, the result is the
|
|
|
|
|
// mathematical result modulo 2^n:
|
2020-07-08 01:04:10 +00:00
|
|
|
|
Ok(bx.$op(args[1].immediate(), r))
|
2018-03-14 16:22:40 +00:00
|
|
|
|
} else {
|
|
|
|
|
Ok(bx.$integer_reduce(args[0].immediate()))
|
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
}
|
2018-08-22 00:35:55 +00:00
|
|
|
|
ty::Float(f) => {
|
2018-03-14 16:22:40 +00:00
|
|
|
|
let acc = if $ordered {
|
2019-07-20 11:05:37 +00:00
|
|
|
|
// ordered arithmetic reductions take an accumulator
|
|
|
|
|
args[1].immediate()
|
2018-03-14 16:22:40 +00:00
|
|
|
|
} else {
|
2019-07-20 11:05:37 +00:00
|
|
|
|
// unordered arithmetic reductions use the identity accumulator
|
2018-03-14 16:22:40 +00:00
|
|
|
|
match f.bit_width() {
|
2020-07-08 01:04:10 +00:00
|
|
|
|
32 => bx.const_real(bx.type_f32(), $identity),
|
|
|
|
|
64 => bx.const_real(bx.type_f64(), $identity),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
v => return_error!(
|
|
|
|
|
r#"
|
2018-03-14 19:14:47 +00:00
|
|
|
|
unsupported {} from `{}` with element `{}` of size `{}` to `{}`"#,
|
2020-07-08 01:04:10 +00:00
|
|
|
|
sym::$name,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
in_ty,
|
|
|
|
|
in_elem,
|
|
|
|
|
v,
|
|
|
|
|
ret_ty
|
|
|
|
|
),
|
2018-03-14 16:22:40 +00:00
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
Ok(bx.$float_reduce(acc, args[0].immediate()))
|
2018-03-13 15:46:55 +00:00
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => return_error!(
|
|
|
|
|
"unsupported {} from `{}` with element `{}` to `{}`",
|
2020-07-08 01:04:10 +00:00
|
|
|
|
sym::$name,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
in_ty,
|
|
|
|
|
in_elem,
|
|
|
|
|
ret_ty
|
|
|
|
|
),
|
|
|
|
|
};
|
2018-03-13 15:46:55 +00:00
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
};
|
2018-03-13 15:46:55 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-07-08 01:04:10 +00:00
|
|
|
|
arith_red!(simd_reduce_add_ordered: vector_reduce_add, vector_reduce_fadd, true, add, 0.0);
|
|
|
|
|
arith_red!(simd_reduce_mul_ordered: vector_reduce_mul, vector_reduce_fmul, true, mul, 1.0);
|
|
|
|
|
arith_red!(
|
|
|
|
|
simd_reduce_add_unordered: vector_reduce_add,
|
|
|
|
|
vector_reduce_fadd_fast,
|
|
|
|
|
false,
|
|
|
|
|
add,
|
|
|
|
|
0.0
|
|
|
|
|
);
|
|
|
|
|
arith_red!(
|
|
|
|
|
simd_reduce_mul_unordered: vector_reduce_mul,
|
|
|
|
|
vector_reduce_fmul_fast,
|
|
|
|
|
false,
|
|
|
|
|
mul,
|
|
|
|
|
1.0
|
|
|
|
|
);
|
2018-03-14 16:22:40 +00:00
|
|
|
|
|
|
|
|
|
macro_rules! minmax_red {
|
2020-07-08 01:04:10 +00:00
|
|
|
|
($name:ident: $int_red:ident, $float_red:ident) => {
|
|
|
|
|
if name == sym::$name {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
require!(
|
|
|
|
|
ret_ty == in_elem,
|
|
|
|
|
"expected return type `{}` (element of input `{}`), found `{}`",
|
|
|
|
|
in_elem,
|
|
|
|
|
in_ty,
|
|
|
|
|
ret_ty
|
|
|
|
|
);
|
2020-08-02 22:49:11 +00:00
|
|
|
|
return match in_elem.kind() {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
ty::Int(_i) => Ok(bx.$int_red(args[0].immediate(), true)),
|
|
|
|
|
ty::Uint(_u) => Ok(bx.$int_red(args[0].immediate(), false)),
|
|
|
|
|
ty::Float(_f) => Ok(bx.$float_red(args[0].immediate())),
|
|
|
|
|
_ => return_error!(
|
|
|
|
|
"unsupported {} from `{}` with element `{}` to `{}`",
|
2020-07-08 01:04:10 +00:00
|
|
|
|
sym::$name,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
in_ty,
|
|
|
|
|
in_elem,
|
|
|
|
|
ret_ty
|
|
|
|
|
),
|
|
|
|
|
};
|
2018-03-13 15:46:55 +00:00
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
};
|
2018-03-13 15:46:55 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-07-08 01:04:10 +00:00
|
|
|
|
minmax_red!(simd_reduce_min: vector_reduce_min, vector_reduce_fmin);
|
|
|
|
|
minmax_red!(simd_reduce_max: vector_reduce_max, vector_reduce_fmax);
|
2018-03-13 15:46:55 +00:00
|
|
|
|
|
2020-07-08 01:04:10 +00:00
|
|
|
|
minmax_red!(simd_reduce_min_nanless: vector_reduce_min, vector_reduce_fmin_fast);
|
|
|
|
|
minmax_red!(simd_reduce_max_nanless: vector_reduce_max, vector_reduce_fmax_fast);
|
2018-03-13 15:46:55 +00:00
|
|
|
|
|
2018-03-14 16:22:40 +00:00
|
|
|
|
macro_rules! bitwise_red {
|
2020-07-08 01:04:10 +00:00
|
|
|
|
($name:ident : $red:ident, $boolean:expr) => {
|
|
|
|
|
if name == sym::$name {
|
2018-03-14 16:22:40 +00:00
|
|
|
|
let input = if !$boolean {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
require!(
|
|
|
|
|
ret_ty == in_elem,
|
|
|
|
|
"expected return type `{}` (element of input `{}`), found `{}`",
|
|
|
|
|
in_elem,
|
|
|
|
|
in_ty,
|
|
|
|
|
ret_ty
|
|
|
|
|
);
|
2018-03-14 16:22:40 +00:00
|
|
|
|
args[0].immediate()
|
|
|
|
|
} else {
|
2020-08-02 22:49:11 +00:00
|
|
|
|
match in_elem.kind() {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
ty::Int(_) | ty::Uint(_) => {}
|
|
|
|
|
_ => return_error!(
|
|
|
|
|
"unsupported {} from `{}` with element `{}` to `{}`",
|
2020-07-08 01:04:10 +00:00
|
|
|
|
sym::$name,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
in_ty,
|
|
|
|
|
in_elem,
|
|
|
|
|
ret_ty
|
|
|
|
|
),
|
2018-03-15 15:36:02 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-03-14 16:22:40 +00:00
|
|
|
|
// boolean reductions operate on vectors of i1s:
|
2018-11-27 18:00:25 +00:00
|
|
|
|
let i1 = bx.type_i1();
|
|
|
|
|
let i1xn = bx.type_vector(i1, in_len as u64);
|
2018-03-14 16:22:40 +00:00
|
|
|
|
bx.trunc(args[0].immediate(), i1xn)
|
|
|
|
|
};
|
2020-08-02 22:49:11 +00:00
|
|
|
|
return match in_elem.kind() {
|
2018-08-22 00:35:55 +00:00
|
|
|
|
ty::Int(_) | ty::Uint(_) => {
|
2018-03-14 16:22:40 +00:00
|
|
|
|
let r = bx.$red(input);
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Ok(if !$boolean { r } else { bx.zext(r, bx.type_bool()) })
|
|
|
|
|
}
|
|
|
|
|
_ => return_error!(
|
|
|
|
|
"unsupported {} from `{}` with element `{}` to `{}`",
|
2020-07-08 01:04:10 +00:00
|
|
|
|
sym::$name,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
in_ty,
|
|
|
|
|
in_elem,
|
|
|
|
|
ret_ty
|
|
|
|
|
),
|
|
|
|
|
};
|
2018-03-14 16:22:40 +00:00
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
};
|
2018-03-13 15:46:55 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-07-08 01:04:10 +00:00
|
|
|
|
bitwise_red!(simd_reduce_and: vector_reduce_and, false);
|
|
|
|
|
bitwise_red!(simd_reduce_or: vector_reduce_or, false);
|
|
|
|
|
bitwise_red!(simd_reduce_xor: vector_reduce_xor, false);
|
|
|
|
|
bitwise_red!(simd_reduce_all: vector_reduce_and, true);
|
|
|
|
|
bitwise_red!(simd_reduce_any: vector_reduce_or, true);
|
2018-03-13 15:46:55 +00:00
|
|
|
|
|
2020-07-08 01:04:10 +00:00
|
|
|
|
if name == sym::simd_cast {
|
2015-08-14 22:20:22 +00:00
|
|
|
|
require_simd!(ret_ty, "return");
|
2020-11-13 04:32:03 +00:00
|
|
|
|
let (out_len, out_elem) = ret_ty.simd_size_and_type(bx.tcx());
|
2019-12-22 22:42:04 +00:00
|
|
|
|
require!(
|
|
|
|
|
in_len == out_len,
|
|
|
|
|
"expected return type with length {} (same as input type `{}`), \
|
2015-08-14 22:20:22 +00:00
|
|
|
|
found `{}` with length {}",
|
2019-12-22 22:42:04 +00:00
|
|
|
|
in_len,
|
|
|
|
|
in_ty,
|
|
|
|
|
ret_ty,
|
|
|
|
|
out_len
|
|
|
|
|
);
|
2015-07-29 23:40:22 +00:00
|
|
|
|
// casting cares about nominal type, not just structural type
|
2019-12-22 22:42:04 +00:00
|
|
|
|
if in_elem == out_elem {
|
|
|
|
|
return Ok(args[0].immediate());
|
|
|
|
|
}
|
2015-07-29 23:40:22 +00:00
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
enum Style {
|
|
|
|
|
Float,
|
|
|
|
|
Int(/* is signed? */ bool),
|
|
|
|
|
Unsupported,
|
|
|
|
|
}
|
2015-08-14 22:46:51 +00:00
|
|
|
|
|
2020-08-02 22:49:11 +00:00
|
|
|
|
let (in_style, in_width) = match in_elem.kind() {
|
2015-08-14 22:46:51 +00:00
|
|
|
|
// vectors of pointer-sized integers should've been
|
|
|
|
|
// disallowed before here, so this unwrap is safe.
|
2018-08-22 00:35:55 +00:00
|
|
|
|
ty::Int(i) => (Style::Int(true), i.bit_width().unwrap()),
|
|
|
|
|
ty::Uint(u) => (Style::Int(false), u.bit_width().unwrap()),
|
|
|
|
|
ty::Float(f) => (Style::Float, f.bit_width()),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => (Style::Unsupported, 0),
|
2015-08-14 22:46:51 +00:00
|
|
|
|
};
|
2020-08-02 22:49:11 +00:00
|
|
|
|
let (out_style, out_width) = match out_elem.kind() {
|
2018-08-22 00:35:55 +00:00
|
|
|
|
ty::Int(i) => (Style::Int(true), i.bit_width().unwrap()),
|
|
|
|
|
ty::Uint(u) => (Style::Int(false), u.bit_width().unwrap()),
|
|
|
|
|
ty::Float(f) => (Style::Float, f.bit_width()),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => (Style::Unsupported, 0),
|
2015-08-14 22:46:51 +00:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
match (in_style, out_style) {
|
|
|
|
|
(Style::Int(in_is_signed), Style::Int(_)) => {
|
2017-06-01 18:50:53 +00:00
|
|
|
|
return Ok(match in_width.cmp(&out_width) {
|
2018-01-05 05:12:32 +00:00
|
|
|
|
Ordering::Greater => bx.trunc(args[0].immediate(), llret_ty),
|
2017-09-19 23:32:22 +00:00
|
|
|
|
Ordering::Equal => args[0].immediate(),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Ordering::Less => {
|
|
|
|
|
if in_is_signed {
|
|
|
|
|
bx.sext(args[0].immediate(), llret_ty)
|
|
|
|
|
} else {
|
|
|
|
|
bx.zext(args[0].immediate(), llret_ty)
|
|
|
|
|
}
|
2015-08-14 22:46:51 +00:00
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
});
|
2015-08-10 23:34:25 +00:00
|
|
|
|
}
|
2015-08-14 22:46:51 +00:00
|
|
|
|
(Style::Int(in_is_signed), Style::Float) => {
|
2017-06-01 18:50:53 +00:00
|
|
|
|
return Ok(if in_is_signed {
|
2018-01-05 05:12:32 +00:00
|
|
|
|
bx.sitofp(args[0].immediate(), llret_ty)
|
2015-08-14 22:46:51 +00:00
|
|
|
|
} else {
|
2018-01-05 05:12:32 +00:00
|
|
|
|
bx.uitofp(args[0].immediate(), llret_ty)
|
2019-12-22 22:42:04 +00:00
|
|
|
|
});
|
2015-08-10 23:34:25 +00:00
|
|
|
|
}
|
2015-08-14 22:46:51 +00:00
|
|
|
|
(Style::Float, Style::Int(out_is_signed)) => {
|
2017-06-01 18:50:53 +00:00
|
|
|
|
return Ok(if out_is_signed {
|
2018-01-05 05:12:32 +00:00
|
|
|
|
bx.fptosi(args[0].immediate(), llret_ty)
|
2015-08-14 22:46:51 +00:00
|
|
|
|
} else {
|
2018-01-05 05:12:32 +00:00
|
|
|
|
bx.fptoui(args[0].immediate(), llret_ty)
|
2019-12-22 22:42:04 +00:00
|
|
|
|
});
|
2015-07-29 23:40:22 +00:00
|
|
|
|
}
|
2015-08-14 22:46:51 +00:00
|
|
|
|
(Style::Float, Style::Float) => {
|
2017-06-01 18:50:53 +00:00
|
|
|
|
return Ok(match in_width.cmp(&out_width) {
|
2018-01-05 05:12:32 +00:00
|
|
|
|
Ordering::Greater => bx.fptrunc(args[0].immediate(), llret_ty),
|
2017-09-19 23:32:22 +00:00
|
|
|
|
Ordering::Equal => args[0].immediate(),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Ordering::Less => bx.fpext(args[0].immediate(), llret_ty),
|
|
|
|
|
});
|
2015-07-29 23:40:22 +00:00
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => { /* Unsupported. Fallthrough. */ }
|
2015-07-29 23:40:22 +00:00
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
require!(
|
|
|
|
|
false,
|
|
|
|
|
"unsupported cast from `{}` with element `{}` to `{}` with element `{}`",
|
|
|
|
|
in_ty,
|
|
|
|
|
in_elem,
|
|
|
|
|
ret_ty,
|
|
|
|
|
out_elem
|
|
|
|
|
);
|
2015-07-29 23:40:22 +00:00
|
|
|
|
}
|
2021-03-18 19:47:13 +00:00
|
|
|
|
macro_rules! arith_binary {
|
2016-12-11 15:59:20 +00:00
|
|
|
|
($($name: ident: $($($p: ident),* => $call: ident),*;)*) => {
|
2020-07-08 01:04:10 +00:00
|
|
|
|
$(if name == sym::$name {
|
2020-08-02 22:49:11 +00:00
|
|
|
|
match in_elem.kind() {
|
2017-09-19 23:32:22 +00:00
|
|
|
|
$($(ty::$p(_))|* => {
|
2018-01-05 05:12:32 +00:00
|
|
|
|
return Ok(bx.$call(args[0].immediate(), args[1].immediate()))
|
2017-09-19 23:32:22 +00:00
|
|
|
|
})*
|
|
|
|
|
_ => {},
|
|
|
|
|
}
|
|
|
|
|
require!(false,
|
2018-10-08 14:52:34 +00:00
|
|
|
|
"unsupported operation on `{}` with element `{}`",
|
|
|
|
|
in_ty,
|
|
|
|
|
in_elem)
|
2017-09-19 23:32:22 +00:00
|
|
|
|
})*
|
2015-07-31 18:23:12 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2021-03-18 19:47:13 +00:00
|
|
|
|
arith_binary! {
|
2018-08-22 00:35:55 +00:00
|
|
|
|
simd_add: Uint, Int => add, Float => fadd;
|
|
|
|
|
simd_sub: Uint, Int => sub, Float => fsub;
|
|
|
|
|
simd_mul: Uint, Int => mul, Float => fmul;
|
|
|
|
|
simd_div: Uint => udiv, Int => sdiv, Float => fdiv;
|
|
|
|
|
simd_rem: Uint => urem, Int => srem, Float => frem;
|
|
|
|
|
simd_shl: Uint, Int => shl;
|
|
|
|
|
simd_shr: Uint => lshr, Int => ashr;
|
|
|
|
|
simd_and: Uint, Int => and;
|
|
|
|
|
simd_or: Uint, Int => or;
|
|
|
|
|
simd_xor: Uint, Int => xor;
|
|
|
|
|
simd_fmax: Float => maxnum;
|
|
|
|
|
simd_fmin: Float => minnum;
|
2019-02-08 14:23:48 +00:00
|
|
|
|
|
|
|
|
|
}
|
2021-03-18 19:47:13 +00:00
|
|
|
|
macro_rules! arith_unary {
|
2021-03-18 18:16:21 +00:00
|
|
|
|
($($name: ident: $($($p: ident),* => $call: ident),*;)*) => {
|
|
|
|
|
$(if name == sym::$name {
|
|
|
|
|
match in_elem.kind() {
|
|
|
|
|
$($(ty::$p(_))|* => {
|
|
|
|
|
return Ok(bx.$call(args[0].immediate()))
|
|
|
|
|
})*
|
|
|
|
|
_ => {},
|
|
|
|
|
}
|
|
|
|
|
require!(false,
|
|
|
|
|
"unsupported operation on `{}` with element `{}`",
|
|
|
|
|
in_ty,
|
|
|
|
|
in_elem)
|
|
|
|
|
})*
|
|
|
|
|
}
|
|
|
|
|
}
|
2021-03-18 19:47:13 +00:00
|
|
|
|
arith_unary! {
|
2021-03-18 18:16:21 +00:00
|
|
|
|
simd_neg: Int => neg, Float => fneg;
|
|
|
|
|
}
|
2019-02-08 14:23:48 +00:00
|
|
|
|
|
2020-07-08 01:04:10 +00:00
|
|
|
|
if name == sym::simd_saturating_add || name == sym::simd_saturating_sub {
|
2019-02-08 14:23:48 +00:00
|
|
|
|
let lhs = args[0].immediate();
|
|
|
|
|
let rhs = args[1].immediate();
|
2020-07-08 01:04:10 +00:00
|
|
|
|
let is_add = name == sym::simd_saturating_add;
|
2019-02-08 14:23:48 +00:00
|
|
|
|
let ptr_bits = bx.tcx().data_layout.pointer_size.bits() as _;
|
2020-08-02 22:49:11 +00:00
|
|
|
|
let (signed, elem_width, elem_ty) = match *in_elem.kind() {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
ty::Int(i) => (true, i.bit_width().unwrap_or(ptr_bits), bx.cx.type_int_from_ty(i)),
|
|
|
|
|
ty::Uint(i) => (false, i.bit_width().unwrap_or(ptr_bits), bx.cx.type_uint_from_ty(i)),
|
2019-02-08 14:23:48 +00:00
|
|
|
|
_ => {
|
|
|
|
|
return_error!(
|
|
|
|
|
"expected element type `{}` of vector type `{}` \
|
|
|
|
|
to be a signed or unsigned integer type",
|
2020-11-13 04:32:03 +00:00
|
|
|
|
arg_tys[0].simd_size_and_type(bx.tcx()).1,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
arg_tys[0]
|
2019-02-08 14:23:48 +00:00
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
let llvm_intrinsic = &format!(
|
|
|
|
|
"llvm.{}{}.sat.v{}i{}",
|
|
|
|
|
if signed { 's' } else { 'u' },
|
|
|
|
|
if is_add { "add" } else { "sub" },
|
2019-12-22 22:42:04 +00:00
|
|
|
|
in_len,
|
|
|
|
|
elem_width
|
2019-02-08 14:23:48 +00:00
|
|
|
|
);
|
|
|
|
|
let vec_ty = bx.cx.type_vector(elem_ty, in_len as u64);
|
|
|
|
|
|
2021-08-03 22:09:57 +00:00
|
|
|
|
let fn_ty = bx.type_func(&[vec_ty, vec_ty], vec_ty);
|
|
|
|
|
let f = bx.declare_cfn(&llvm_intrinsic, llvm::UnnamedAddr::No, fn_ty);
|
|
|
|
|
let v = bx.call(fn_ty, f, &[lhs, rhs], None);
|
2019-02-08 14:23:48 +00:00
|
|
|
|
return Ok(v);
|
2015-07-31 18:23:12 +00:00
|
|
|
|
}
|
2019-02-08 14:23:48 +00:00
|
|
|
|
|
2016-03-28 23:46:02 +00:00
|
|
|
|
span_bug!(span, "unknown SIMD intrinsic");
|
2015-07-16 18:59:23 +00:00
|
|
|
|
}
|
2015-10-26 03:51:51 +00:00
|
|
|
|
|
2017-06-01 18:50:53 +00:00
|
|
|
|
// Returns the width of an int Ty, and if it's signed or not
|
2015-10-26 03:51:51 +00:00
|
|
|
|
// Returns None if the type is not an integer
|
2016-08-23 00:56:52 +00:00
|
|
|
|
// FIXME: there’s multiple of this functions, investigate using some of the already existing
|
|
|
|
|
// stuffs.
|
2019-02-25 07:40:18 +00:00
|
|
|
|
fn int_type_width_signed(ty: Ty<'_>, cx: &CodegenCx<'_, '_>) -> Option<(u64, bool)> {
|
2020-08-02 22:49:11 +00:00
|
|
|
|
match ty.kind() {
|
2020-09-15 21:35:31 +00:00
|
|
|
|
ty::Int(t) => {
|
2020-10-15 09:44:00 +00:00
|
|
|
|
Some((t.bit_width().unwrap_or(u64::from(cx.tcx.sess.target.pointer_width)), true))
|
2020-09-15 21:35:31 +00:00
|
|
|
|
}
|
|
|
|
|
ty::Uint(t) => {
|
2020-10-15 09:44:00 +00:00
|
|
|
|
Some((t.bit_width().unwrap_or(u64::from(cx.tcx.sess.target.pointer_width)), false))
|
2020-09-15 21:35:31 +00:00
|
|
|
|
}
|
2016-03-14 23:01:12 +00:00
|
|
|
|
_ => None,
|
|
|
|
|
}
|
|
|
|
|
}
|