rust/src/abi/mod.rs

699 lines
25 KiB
Rust
Raw Normal View History

#[cfg(debug_assertions)]
mod comments;
mod pass_mode;
2019-08-31 17:28:09 +00:00
mod returning;
use rustc_target::spec::abi::Abi;
2018-07-19 17:33:42 +00:00
use cranelift_codegen::ir::AbiParam;
use self::pass_mode::*;
2019-08-31 17:28:09 +00:00
use crate::prelude::*;
2018-09-08 16:00:06 +00:00
pub(crate) use self::returning::{can_return_to_ssa_var, codegen_return};
// Copied from https://github.com/rust-lang/rust/blob/c2f4c57296f0d929618baed0b0d6eb594abf01eb/src/librustc/ty/layout.rs#L2349
pub(crate) fn fn_sig_for_fn_abi<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> ty::PolyFnSig<'tcx> {
let ty = instance.monomorphic_ty(tcx);
match ty.kind {
ty::FnDef(..) |
// Shims currently have type FnPtr. Not sure this should remain.
ty::FnPtr(_) => {
let mut sig = ty.fn_sig(tcx);
if let ty::InstanceDef::VtableShim(..) = instance.def {
// Modify `fn(self, ...)` to `fn(self: *mut Self, ...)`.
sig = sig.map_bound(|mut sig| {
let mut inputs_and_output = sig.inputs_and_output.to_vec();
inputs_and_output[0] = tcx.mk_mut_ptr(inputs_and_output[0]);
sig.inputs_and_output = tcx.intern_type_list(&inputs_and_output);
sig
});
}
sig
}
ty::Closure(def_id, substs) => {
let sig = substs.as_closure().sig();
let env_ty = tcx.closure_env_ty(def_id, substs).unwrap();
sig.map_bound(|sig| tcx.mk_fn_sig(
std::iter::once(*env_ty.skip_binder()).chain(sig.inputs().iter().cloned()),
sig.output(),
sig.c_variadic,
sig.unsafety,
sig.abi
))
}
ty::Generator(_def_id, substs, _) => {
let sig = substs.as_generator().poly_sig();
let env_region = ty::ReLateBound(ty::INNERMOST, ty::BrEnv);
let env_ty = tcx.mk_mut_ref(tcx.mk_region(env_region), ty);
let pin_did = tcx.lang_items().pin_type().unwrap();
let pin_adt_ref = tcx.adt_def(pin_did);
let pin_substs = tcx.intern_substs(&[env_ty.into()]);
let env_ty = tcx.mk_adt(pin_adt_ref, pin_substs);
sig.map_bound(|sig| {
let state_did = tcx.lang_items().gen_state().unwrap();
let state_adt_ref = tcx.adt_def(state_did);
let state_substs = tcx.intern_substs(&[
sig.yield_ty.into(),
sig.return_ty.into(),
]);
let ret_ty = tcx.mk_adt(state_adt_ref, state_substs);
tcx.mk_fn_sig(std::iter::once(env_ty),
ret_ty,
false,
rustc_hir::Unsafety::Normal,
rustc_target::spec::abi::Abi::Rust
)
})
}
_ => bug!("unexpected type {:?} in Instance::fn_sig", ty)
}
}
2019-08-31 17:28:09 +00:00
fn clif_sig_from_fn_sig<'tcx>(
tcx: TyCtxt<'tcx>,
triple: &target_lexicon::Triple,
2019-08-31 17:28:09 +00:00
sig: FnSig<'tcx>,
is_vtable_fn: bool,
2020-01-11 15:49:42 +00:00
requires_caller_location: bool,
2019-08-31 17:28:09 +00:00
) -> Signature {
2019-08-16 16:16:24 +00:00
let abi = match sig.abi {
Abi::System => {
if tcx.sess.target.target.options.is_like_windows {
unimplemented!()
} else {
Abi::C
}
}
abi => abi,
};
let (call_conv, inputs, output): (CallConv, Vec<Ty<'tcx>>, Ty<'tcx>) = match abi {
Abi::Rust => (CallConv::triple_default(triple), sig.inputs().to_vec(), sig.output()),
Abi::C => (CallConv::triple_default(triple), sig.inputs().to_vec(), sig.output()),
Abi::RustCall => {
assert_eq!(sig.inputs().len(), 2);
let extra_args = match sig.inputs().last().unwrap().kind {
ty::Tuple(ref tupled_arguments) => tupled_arguments,
2018-07-20 11:38:49 +00:00
_ => bug!("argument to function with \"rust-call\" ABI is not a tuple"),
};
let mut inputs: Vec<Ty<'tcx>> = vec![sig.inputs()[0]];
inputs.extend(extra_args.types());
(CallConv::triple_default(triple), inputs, sig.output())
}
2019-08-16 16:16:24 +00:00
Abi::System => unreachable!(),
Abi::RustIntrinsic => (CallConv::triple_default(triple), sig.inputs().to_vec(), sig.output()),
_ => unimplemented!("unsupported abi {:?}", sig.abi),
2018-07-19 17:33:42 +00:00
};
let inputs = inputs
.into_iter()
2019-06-16 10:54:37 +00:00
.enumerate()
.map(|(i, ty)| {
let mut layout = tcx.layout_of(ParamEnv::reveal_all().and(ty)).unwrap();
if i == 0 && is_vtable_fn {
// Virtual calls turn their self param into a thin pointer.
2019-06-16 15:27:51 +00:00
// See https://github.com/rust-lang/rust/blob/37b6a5e5e82497caf5353d9d856e4eb5d14cbe06/src/librustc/ty/layout.rs#L2519-L2572 for more info
2019-08-31 17:28:09 +00:00
layout = tcx
.layout_of(ParamEnv::reveal_all().and(tcx.mk_mut_ptr(tcx.mk_unit())))
.unwrap();
2019-06-16 10:54:37 +00:00
}
2019-08-30 10:42:18 +00:00
get_pass_mode(tcx, layout).get_param_ty(tcx).into_iter()
2019-08-31 17:28:09 +00:00
})
.flatten();
2019-06-16 10:54:37 +00:00
2020-01-11 15:49:42 +00:00
let (mut params, returns): (Vec<_>, Vec<_>) = match get_pass_mode(
2019-08-31 17:28:09 +00:00
tcx,
tcx.layout_of(ParamEnv::reveal_all().and(output)).unwrap(),
) {
PassMode::NoPass => (inputs.map(AbiParam::new).collect(), vec![]),
PassMode::ByVal(ret_ty) => (
inputs.map(AbiParam::new).collect(),
vec![AbiParam::new(ret_ty)],
),
2019-06-16 10:54:37 +00:00
PassMode::ByValPair(ret_ty_a, ret_ty_b) => (
inputs.map(AbiParam::new).collect(),
vec![AbiParam::new(ret_ty_a), AbiParam::new(ret_ty_b)],
),
PassMode::ByRef { sized: true } => {
(
2018-09-08 15:24:52 +00:00
Some(pointer_ty(tcx)) // First param is place to put return val
.into_iter()
.chain(inputs)
.map(AbiParam::new)
.collect(),
vec![],
)
}
PassMode::ByRef { sized: false } => todo!(),
};
2020-01-11 15:49:42 +00:00
if requires_caller_location {
params.push(AbiParam::new(pointer_ty(tcx)));
}
2018-07-19 17:33:42 +00:00
Signature {
params,
returns,
2018-07-19 17:33:42 +00:00
call_conv,
}
}
pub(crate) fn get_function_name_and_sig<'tcx>(
tcx: TyCtxt<'tcx>,
triple: &target_lexicon::Triple,
2018-08-11 11:59:08 +00:00
inst: Instance<'tcx>,
2019-02-21 14:06:09 +00:00
support_vararg: bool,
2018-08-11 11:59:08 +00:00
) -> (String, Signature) {
assert!(!inst.substs.needs_infer());
2019-08-31 17:28:09 +00:00
let fn_sig =
tcx.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), &fn_sig_for_fn_abi(tcx, inst));
if fn_sig.c_variadic && !support_vararg {
unimpl_fatal!(tcx, tcx.def_span(inst.def_id()), "Variadic function definitions are not yet supported");
2019-01-02 11:20:32 +00:00
}
2020-01-11 15:49:42 +00:00
let sig = clif_sig_from_fn_sig(tcx, triple, fn_sig, false, inst.def.requires_caller_location(tcx));
(tcx.symbol_name(inst).name.as_str().to_string(), sig)
2018-08-11 11:59:08 +00:00
}
2019-01-02 11:20:32 +00:00
/// Instance must be monomorphized
pub(crate) fn import_function<'tcx>(
tcx: TyCtxt<'tcx>,
2019-01-02 11:20:32 +00:00
module: &mut Module<impl Backend>,
inst: Instance<'tcx>,
) -> FuncId {
let (name, sig) = get_function_name_and_sig(tcx, module.isa().triple(), inst, true);
2019-01-02 11:20:32 +00:00
module
.declare_function(&name, Linkage::Import, &sig)
.unwrap()
}
2018-09-08 16:00:06 +00:00
2019-08-18 14:52:07 +00:00
impl<'tcx, B: Backend + 'static> FunctionCx<'_, 'tcx, B> {
2018-09-08 16:00:06 +00:00
/// Instance must be monomorphized
pub(crate) fn get_function_ref(&mut self, inst: Instance<'tcx>) -> FuncRef {
2019-01-02 11:20:32 +00:00
let func_id = import_function(self.tcx, self.module, inst);
2019-02-21 14:06:09 +00:00
let func_ref = self
.module
2018-12-27 09:59:01 +00:00
.declare_func_in_func(func_id, &mut self.bcx.func);
2018-12-28 16:07:40 +00:00
#[cfg(debug_assertions)]
2020-03-20 11:18:40 +00:00
self.add_comment(func_ref, format!("{:?}", inst));
2018-12-28 16:07:40 +00:00
2018-12-27 09:59:01 +00:00
func_ref
2018-07-19 17:33:42 +00:00
}
fn lib_call(
2018-07-30 13:34:34 +00:00
&mut self,
name: &str,
input_tys: Vec<types::Type>,
output_tys: Vec<types::Type>,
2018-07-30 13:34:34 +00:00
args: &[Value],
) -> &[Value] {
2018-07-30 13:34:34 +00:00
let sig = Signature {
params: input_tys.iter().cloned().map(AbiParam::new).collect(),
returns: output_tys.iter().cloned().map(AbiParam::new).collect(),
call_conv: CallConv::triple_default(self.triple()),
2018-07-30 13:34:34 +00:00
};
let func_id = self
.module
.declare_function(&name, Linkage::Import, &sig)
.unwrap();
let func_ref = self
.module
.declare_func_in_func(func_id, &mut self.bcx.func);
2018-07-30 13:34:34 +00:00
let call_inst = self.bcx.ins().call(func_ref, args);
2019-08-31 17:28:09 +00:00
#[cfg(debug_assertions)]
{
2019-07-30 13:00:15 +00:00
self.add_comment(call_inst, format!("easy_call {}", name));
}
2018-07-30 13:34:34 +00:00
let results = self.bcx.inst_results(call_inst);
assert!(results.len() <= 2, "{}", results.len());
results
2018-07-30 13:34:34 +00:00
}
pub(crate) fn easy_call(
&mut self,
name: &str,
args: &[CValue<'tcx>],
return_ty: Ty<'tcx>,
) -> CValue<'tcx> {
let (input_tys, args): (Vec<_>, Vec<_>) = args
.into_iter()
.map(|arg| {
(
2018-11-12 15:23:39 +00:00
self.clif_type(arg.layout().ty).unwrap(),
arg.load_scalar(self),
)
2018-10-10 17:07:13 +00:00
})
.unzip();
2018-07-30 13:34:34 +00:00
let return_layout = self.layout_of(return_ty);
let return_tys = if let ty::Tuple(tup) = return_ty.kind {
tup.types().map(|ty| self.clif_type(ty).unwrap()).collect()
} else {
vec![self.clif_type(return_ty).unwrap()]
};
let ret_vals = self.lib_call(name, input_tys, return_tys, &args);
match *ret_vals {
[] => CValue::by_ref(
Pointer::const_addr(self, self.pointer_type.bytes() as i64),
2019-02-21 14:06:09 +00:00
return_layout,
),
[val] => CValue::by_val(val, return_layout),
[val, extra] => CValue::by_val_pair(val, extra, return_layout),
_ => unreachable!(),
}
2018-07-30 13:34:34 +00:00
}
2018-07-19 17:33:42 +00:00
}
2019-08-18 14:52:07 +00:00
fn local_place<'tcx>(
fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
2018-12-25 15:47:33 +00:00
local: Local,
layout: TyAndLayout<'tcx>,
2018-12-25 15:47:33 +00:00
is_ssa: bool,
) -> CPlace<'tcx> {
let place = if is_ssa {
2019-06-11 13:32:30 +00:00
CPlace::new_var(fx, local, layout)
2018-12-25 15:47:33 +00:00
} else {
CPlace::new_stack_slot(fx, layout)
2018-12-25 15:47:33 +00:00
};
#[cfg(debug_assertions)]
self::comments::add_local_place_comments(fx, place, local);
2018-12-28 14:18:17 +00:00
let prev_place = fx.local_map.insert(local, place);
debug_assert!(prev_place.is_none());
2018-12-25 15:47:33 +00:00
fx.local_map[&local]
}
pub(crate) fn codegen_fn_prelude<'tcx>(
fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
start_block: Block,
should_codegen_locals: bool,
) {
2018-08-09 08:46:56 +00:00
let ssa_analyzed = crate::analyze::analyze(fx);
2018-12-28 16:07:40 +00:00
#[cfg(debug_assertions)]
2019-08-30 13:07:15 +00:00
self::comments::add_args_header_comment(fx);
2018-12-27 09:59:01 +00:00
2020-02-14 17:23:29 +00:00
self::returning::codegen_return_param(fx, &ssa_analyzed, start_block);
// None means pass_mode == NoPass
2018-12-27 09:59:01 +00:00
enum ArgKind<'tcx> {
Normal(Option<CValue<'tcx>>),
Spread(Vec<Option<CValue<'tcx>>>),
}
2018-08-14 10:13:07 +00:00
let func_params = fx
.mir
.args_iter()
.map(|local| {
let arg_ty = fx.monomorphize(&fx.mir.local_decls[local].ty);
// Adapted from https://github.com/rust-lang/rust/blob/145155dc96757002c7b2e9de8489416e2fdbbd57/src/librustc_codegen_llvm/mir/mod.rs#L442-L482
if Some(local) == fx.mir.spread_arg {
// This argument (e.g. the last argument in the "rust-call" ABI)
// is a tuple that was spread at the ABI level and now we have
// to reconstruct it into a tuple local variable, from multiple
// individual function arguments.
let tupled_arg_tys = match arg_ty.kind {
ty::Tuple(ref tys) => tys,
2018-08-14 10:13:07 +00:00
_ => bug!("spread argument isn't a tuple?! but {:?}", arg_ty),
};
2018-12-27 09:59:01 +00:00
let mut params = Vec::new();
for (i, arg_ty) in tupled_arg_tys.types().enumerate() {
2020-02-14 17:23:29 +00:00
let param = cvalue_for_param(fx, start_block, Some(local), Some(i), arg_ty);
2018-12-27 09:59:01 +00:00
params.push(param);
2018-08-14 10:13:07 +00:00
}
2018-12-27 09:59:01 +00:00
(local, ArgKind::Spread(params), arg_ty)
2018-08-14 10:13:07 +00:00
} else {
2020-02-14 17:23:29 +00:00
let param = cvalue_for_param(fx, start_block, Some(local), None, arg_ty);
2019-02-21 14:06:09 +00:00
(local, ArgKind::Normal(param), arg_ty)
}
2018-10-10 17:07:13 +00:00
})
.collect::<Vec<(Local, ArgKind<'tcx>, Ty<'tcx>)>>();
2020-01-11 15:49:42 +00:00
assert!(fx.caller_location.is_none());
if fx.instance.def.requires_caller_location(fx.tcx) {
// Store caller location for `#[track_caller]`.
2020-02-14 17:23:29 +00:00
fx.caller_location = Some(cvalue_for_param(fx, start_block, None, None, fx.tcx.caller_location_ty()).unwrap());
2020-01-11 15:49:42 +00:00
}
2020-02-14 17:23:29 +00:00
fx.bcx.switch_to_block(start_block);
2019-12-23 15:48:43 +00:00
fx.bcx.ins().nop();
2018-08-14 16:52:43 +00:00
2019-08-30 13:07:15 +00:00
#[cfg(debug_assertions)]
self::comments::add_locals_header_comment(fx);
for (local, arg_kind, ty) in func_params {
let layout = fx.layout_of(ty);
2018-08-09 08:46:56 +00:00
let is_ssa = ssa_analyzed[local] == crate::analyze::SsaKind::Ssa;
2018-12-25 15:47:33 +00:00
// While this is normally an optimization to prevent an unnecessary copy when an argument is
// not mutated by the current function, this is necessary to support unsized arguments.
match arg_kind {
ArgKind::Normal(Some(val)) => {
if let Some((addr, meta)) = val.try_to_ptr() {
let local_decl = &fx.mir.local_decls[local];
// v this ! is important
let internally_mutable = !val.layout().ty.is_freeze(
fx.tcx,
ParamEnv::reveal_all(),
local_decl.source_info.span,
);
2020-01-25 15:24:15 +00:00
if local_decl.mutability == mir::Mutability::Not && !internally_mutable {
// We wont mutate this argument, so it is fine to borrow the backing storage
// of this argument, to prevent a copy.
let place = if let Some(meta) = meta {
CPlace::for_ptr_with_extra(addr, meta, val.layout())
} else {
CPlace::for_ptr(addr, val.layout())
};
#[cfg(debug_assertions)]
self::comments::add_local_place_comments(fx, place, local);
let prev_place = fx.local_map.insert(local, place);
debug_assert!(prev_place.is_none());
continue;
}
}
}
_ => {}
}
2018-12-26 10:15:42 +00:00
let place = local_place(fx, local, layout, is_ssa);
match arg_kind {
2018-12-27 09:59:01 +00:00
ArgKind::Normal(param) => {
if let Some(param) = param {
place.write_cvalue(fx, param);
}
}
2018-12-27 09:59:01 +00:00
ArgKind::Spread(params) => {
for (i, param) in params.into_iter().enumerate() {
if let Some(param) = param {
place
.place_field(fx, mir::Field::new(i))
.write_cvalue(fx, param);
}
}
}
2018-07-19 17:33:42 +00:00
}
}
// HACK should_codegen_locals required for the ``implement `<Box<F> as FnOnce>::call_once`
// without `alloca``` hack in `base::trans_fn`.
if should_codegen_locals {
for local in fx.mir.vars_and_temps_iter() {
let ty = fx.monomorphize(&fx.mir.local_decls[local].ty);
let layout = fx.layout_of(ty);
2018-08-09 08:46:56 +00:00
let is_ssa = ssa_analyzed[local] == crate::analyze::SsaKind::Ssa;
2018-08-09 08:46:56 +00:00
local_place(fx, local, layout, is_ssa);
}
2018-07-19 17:33:42 +00:00
}
2018-08-14 16:52:43 +00:00
fx.bcx
.ins()
2020-02-14 17:23:29 +00:00
.jump(*fx.block_map.get(START_BLOCK).unwrap(), &[]);
2018-07-19 17:33:42 +00:00
}
pub(crate) fn codegen_terminator_call<'tcx>(
2019-08-18 14:52:07 +00:00
fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
2020-01-11 15:49:42 +00:00
span: Span,
2018-07-19 17:33:42 +00:00
func: &Operand<'tcx>,
args: &[Operand<'tcx>],
destination: Option<(Place<'tcx>, BasicBlock)>,
2018-07-20 11:51:34 +00:00
) {
let fn_ty = fx.monomorphize(&func.ty(fx.mir, fx.tcx));
2019-08-31 17:28:09 +00:00
let sig = fx
.tcx
.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), &fn_ty.fn_sig(fx.tcx));
2018-08-11 11:59:34 +00:00
let destination = destination
.map(|(place, bb)| (trans_place(fx, place), bb));
2018-09-11 17:27:57 +00:00
if let ty::FnDef(def_id, substs) = fn_ty.kind {
2019-02-21 14:06:09 +00:00
let instance =
ty::Instance::resolve(fx.tcx, ty::ParamEnv::reveal_all(), def_id, substs).unwrap();
if fx.tcx.symbol_name(instance).name.as_str().starts_with("llvm.") {
crate::intrinsics::codegen_llvm_intrinsic_call(
2019-08-31 17:28:09 +00:00
fx,
&fx.tcx.symbol_name(instance).name.as_str(),
2019-08-31 17:28:09 +00:00
substs,
args,
destination,
);
return;
}
match instance.def {
InstanceDef::Intrinsic(_) => {
crate::intrinsics::codegen_intrinsic_call(fx, instance, args, destination, span);
return;
}
InstanceDef::DropGlue(_, None) => {
// empty drop glue - a nop.
let (_, dest) = destination.expect("Non terminating drop_in_place_real???");
2020-02-14 17:23:29 +00:00
let ret_block = fx.get_block(dest);
fx.bcx.ins().jump(ret_block, &[]);
return;
}
_ => {}
2018-09-11 17:27:57 +00:00
}
}
2019-07-28 09:24:33 +00:00
// Unpack arguments tuple for closures
let args = if sig.abi == Abi::RustCall {
assert_eq!(args.len(), 2, "rust-call abi requires two arguments");
let self_arg = trans_operand(fx, &args[0]);
let pack_arg = trans_operand(fx, &args[1]);
let mut args = Vec::new();
args.push(self_arg);
match pack_arg.layout().ty.kind {
2019-07-28 09:24:33 +00:00
ty::Tuple(ref tupled_arguments) => {
for (i, _) in tupled_arguments.iter().enumerate() {
args.push(pack_arg.value_field(fx, mir::Field::new(i)));
}
}
_ => bug!("argument to function with \"rust-call\" ABI is not a tuple"),
}
args
} else {
args.into_iter()
.map(|arg| trans_operand(fx, arg))
.collect::<Vec<_>>()
};
codegen_call_inner(
fx,
2020-01-11 15:49:42 +00:00
span,
Some(func),
fn_ty,
args,
destination.map(|(place, _)| place),
);
if let Some((_, dest)) = destination {
2020-02-14 17:23:29 +00:00
let ret_block = fx.get_block(dest);
fx.bcx.ins().jump(ret_block, &[]);
} else {
2019-03-23 12:06:35 +00:00
trap_unreachable(fx, "[corruption] Diverging function returned");
}
2018-09-11 17:27:57 +00:00
}
2019-08-18 14:52:07 +00:00
fn codegen_call_inner<'tcx>(
fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
2020-01-11 15:49:42 +00:00
span: Span,
2018-09-11 17:27:57 +00:00
func: Option<&Operand<'tcx>>,
fn_ty: Ty<'tcx>,
args: Vec<CValue<'tcx>>,
ret_place: Option<CPlace<'tcx>>,
) {
2020-02-14 17:23:29 +00:00
// FIXME mark the current block as cold when calling a `#[cold]` function.
2019-08-31 17:28:09 +00:00
let fn_sig = fx
.tcx
.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), &fn_ty.fn_sig(fx.tcx));
let instance = match fn_ty.kind {
ty::FnDef(def_id, substs) => {
2018-09-08 16:00:06 +00:00
Some(Instance::resolve(fx.tcx, ParamEnv::reveal_all(), def_id, substs).unwrap())
2018-08-19 08:50:39 +00:00
}
2018-09-08 16:00:06 +00:00
_ => None,
};
2019-06-16 10:54:37 +00:00
// | indirect call target
// | | the first argument to be passed
// v v v virtual calls are special cased below
let (func_ref, first_arg, is_virtual_call) = match instance {
// Trait object call
Some(Instance {
2018-09-08 16:00:06 +00:00
def: InstanceDef::Virtual(_, idx),
..
}) => {
2019-06-16 12:47:01 +00:00
#[cfg(debug_assertions)]
{
let nop_inst = fx.bcx.ins().nop();
fx.add_comment(
nop_inst,
2019-08-31 17:28:09 +00:00
format!(
"virtual call; self arg pass mode: {:?}",
get_pass_mode(fx.tcx, args[0].layout())
),
2019-06-16 12:47:01 +00:00
);
}
2018-09-08 16:00:06 +00:00
let (ptr, method) = crate::vtable::get_ptr_and_method_ref(fx, args[0], idx);
2019-06-16 10:54:37 +00:00
(Some(method), Single(ptr), true)
}
2018-09-08 16:00:06 +00:00
// Normal call
2019-08-31 17:28:09 +00:00
Some(_) => (
None,
args.get(0)
.map(|arg| adjust_arg_for_abi(fx, *arg))
.unwrap_or(Empty),
false,
),
// Indirect call
None => {
2019-06-16 12:47:01 +00:00
#[cfg(debug_assertions)]
{
let nop_inst = fx.bcx.ins().nop();
fx.add_comment(nop_inst, "indirect call");
}
2019-02-21 14:06:09 +00:00
let func = trans_operand(fx, func.expect("indirect call without func Operand"))
.load_scalar(fx);
(
Some(func),
2019-08-31 17:28:09 +00:00
args.get(0)
.map(|arg| adjust_arg_for_abi(fx, *arg))
.unwrap_or(Empty),
2019-06-16 10:54:37 +00:00
false,
2019-02-21 14:06:09 +00:00
)
2018-10-10 17:07:13 +00:00
}
2018-09-08 16:00:06 +00:00
};
2019-08-31 17:28:09 +00:00
let (call_inst, call_args) =
self::returning::codegen_with_call_return_arg(fx, fn_sig, ret_place, |fx, return_ptr| {
2020-01-11 15:49:42 +00:00
let mut call_args: Vec<Value> = return_ptr
2019-08-31 17:28:09 +00:00
.into_iter()
.chain(first_arg.into_iter())
.chain(
args.into_iter()
.skip(1)
.map(|arg| adjust_arg_for_abi(fx, arg).into_iter())
.flatten(),
)
.collect::<Vec<_>>();
2020-01-11 15:49:42 +00:00
if instance.map(|inst| inst.def.requires_caller_location(fx.tcx)).unwrap_or(false) {
// Pass the caller location for `#[track_caller]`.
let caller_location = fx.get_caller_location(span);
call_args.extend(adjust_arg_for_abi(fx, caller_location).into_iter());
}
2019-08-31 17:28:09 +00:00
let call_inst = if let Some(func_ref) = func_ref {
2020-01-11 15:49:42 +00:00
let sig = clif_sig_from_fn_sig(
fx.tcx,
fx.triple(),
fn_sig,
is_virtual_call,
false, // calls through function pointers never pass the caller location
);
let sig = fx.bcx.import_signature(sig);
2019-08-31 17:28:09 +00:00
fx.bcx.ins().call_indirect(sig, func_ref, &call_args)
} else {
let func_ref =
fx.get_function_ref(instance.expect("non-indirect call on non-FnDef type"));
fx.bcx.ins().call(func_ref, &call_args)
};
2019-08-31 17:28:09 +00:00
(call_inst, call_args)
});
2019-02-11 18:18:52 +00:00
// FIXME find a cleaner way to support varargs
if fn_sig.c_variadic {
2019-02-11 18:18:52 +00:00
if fn_sig.abi != Abi::C {
unimpl_fatal!(fx.tcx, span, "Variadic call for non-C abi {:?}", fn_sig.abi);
2019-02-11 18:18:52 +00:00
}
let sig_ref = fx.bcx.func.dfg.call_signature(call_inst).unwrap();
2019-02-21 14:06:09 +00:00
let abi_params = call_args
.into_iter()
.map(|arg| {
let ty = fx.bcx.func.dfg.value_type(arg);
if !ty.is_int() {
// FIXME set %al to upperbound on float args once floats are supported
unimpl_fatal!(fx.tcx, span, "Non int ty {:?} for variadic call", ty);
2019-02-21 14:06:09 +00:00
}
AbiParam::new(ty)
})
.collect::<Vec<AbiParam>>();
2019-02-11 18:18:52 +00:00
fx.bcx.func.dfg.signatures[sig_ref].params = abi_params;
}
2018-07-19 17:33:42 +00:00
}
2018-08-10 17:20:13 +00:00
pub(crate) fn codegen_drop<'tcx>(
2020-01-11 15:49:42 +00:00
fx: &mut FunctionCx<'_, 'tcx, impl Backend>,
span: Span,
drop_place: CPlace<'tcx>,
) {
2019-06-16 13:57:53 +00:00
let ty = drop_place.layout().ty;
let drop_fn = Instance::resolve_drop_in_place(fx.tcx, ty);
2019-02-07 19:45:15 +00:00
2019-06-16 13:57:53 +00:00
if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def {
// we don't actually need to drop anything
} else {
let drop_fn_ty = drop_fn.monomorphic_ty(fx.tcx);
match ty.kind {
2019-06-16 13:57:53 +00:00
ty::Dynamic(..) => {
2020-03-29 09:51:43 +00:00
let (ptr, vtable) = drop_place.to_ptr_maybe_unsized();
let ptr = ptr.get_addr(fx);
2019-06-16 13:57:53 +00:00
let drop_fn = crate::vtable::drop_fn_of_obj(fx, vtable.unwrap());
2019-02-07 19:45:15 +00:00
2019-08-31 17:28:09 +00:00
let fn_sig = fx.tcx.normalize_erasing_late_bound_regions(
ParamEnv::reveal_all(),
&drop_fn_ty.fn_sig(fx.tcx),
);
2019-02-07 19:45:15 +00:00
2019-06-16 13:57:53 +00:00
assert_eq!(fn_sig.output(), fx.tcx.mk_unit());
2020-01-11 15:49:42 +00:00
let sig = clif_sig_from_fn_sig(
fx.tcx,
fx.triple(),
fn_sig,
true,
false, // `drop_in_place` is never `#[track_caller]`
);
let sig = fx.bcx.import_signature(sig);
2019-06-16 13:57:53 +00:00
fx.bcx.ins().call_indirect(sig, drop_fn, &[ptr]);
}
_ => {
let arg_place = CPlace::new_stack_slot(
fx,
fx.layout_of(fx.tcx.mk_ref(
2019-06-16 13:57:53 +00:00
&ty::RegionKind::ReErased,
TypeAndMut {
ty,
mutbl: crate::rustc_hir::Mutability::Mut,
2019-06-16 13:57:53 +00:00
},
)),
2019-06-16 13:57:53 +00:00
);
drop_place.write_place_ref(fx, arg_place);
let arg_value = arg_place.to_cvalue(fx);
2020-01-11 15:49:42 +00:00
codegen_call_inner(fx, span, None, drop_fn_ty, vec![arg_value], None);
2019-06-16 13:57:53 +00:00
}
}
}
2019-02-07 19:45:15 +00:00
}