Merge pull request #1131 from bjorn3/abi_compat

Full abi compatibilty
This commit is contained in:
bjorn3 2021-01-30 16:53:15 +01:00 committed by GitHub
commit 18de1b1fde
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 782 additions and 587 deletions

1
Cargo.lock generated
View File

@ -333,6 +333,7 @@ dependencies = [
"indexmap",
"libloading",
"object",
"smallvec",
"target-lexicon",
]

View File

@ -21,6 +21,7 @@ object = { version = "0.22.0", default-features = false, features = ["std", "rea
ar = { git = "https://github.com/bjorn3/rust-ar.git", branch = "do_not_remove_cg_clif_ranlib" }
indexmap = "1.0.2"
libloading = { version = "0.6.0", optional = true }
smallvec = "1.6.1"
# Uncomment to use local checkout of cranelift
#[patch."https://github.com/bytecodealliance/wasmtime/"]

View File

@ -1 +1 @@
nightly-2021-01-21
nightly-2021-01-25

View File

@ -4,10 +4,10 @@
use std::borrow::Cow;
use rustc_middle::mir;
use rustc_target::abi::call::PassMode;
use cranelift_codegen::entity::EntityRef;
use crate::abi::pass_mode::*;
use crate::prelude::*;
pub(super) fn add_args_header_comment(fx: &mut FunctionCx<'_, '_, impl Module>) {
@ -21,9 +21,9 @@ pub(super) fn add_arg_comment<'tcx>(
kind: &str,
local: Option<mir::Local>,
local_field: Option<usize>,
params: EmptySinglePair<Value>,
pass_mode: PassMode,
ty: Ty<'tcx>,
params: &[Value],
arg_abi_mode: PassMode,
arg_layout: TyAndLayout<'tcx>,
) {
let local = if let Some(local) = local {
Cow::Owned(format!("{:?}", local))
@ -37,12 +37,20 @@ pub(super) fn add_arg_comment<'tcx>(
};
let params = match params {
Empty => Cow::Borrowed("-"),
Single(param) => Cow::Owned(format!("= {:?}", param)),
Pair(param_a, param_b) => Cow::Owned(format!("= {:?}, {:?}", param_a, param_b)),
[] => Cow::Borrowed("-"),
[param] => Cow::Owned(format!("= {:?}", param)),
[param_a, param_b] => Cow::Owned(format!("= {:?},{:?}", param_a, param_b)),
params => Cow::Owned(format!(
"= {}",
params
.iter()
.map(ToString::to_string)
.collect::<Vec<_>>()
.join(",")
)),
};
let pass_mode = format!("{:?}", pass_mode);
let pass_mode = format!("{:?}", arg_abi_mode);
fx.add_global_comment(format!(
"{kind:5}{local:>3}{local_field:<5} {params:10} {pass_mode:36} {ty:?}",
kind = kind,
@ -50,7 +58,7 @@ pub(super) fn add_arg_comment<'tcx>(
local_field = local_field,
params = params,
pass_mode = pass_mode,
ty = ty,
ty = arg_layout.ty,
));
}

View File

@ -6,199 +6,50 @@ mod pass_mode;
mod returning;
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
use rustc_middle::ty::layout::FnAbiExt;
use rustc_target::abi::call::{Conv, FnAbi};
use rustc_target::spec::abi::Abi;
use cranelift_codegen::ir::{AbiParam, ArgumentPurpose};
use cranelift_codegen::ir::AbiParam;
use smallvec::smallvec;
use self::pass_mode::*;
use crate::prelude::*;
pub(crate) use self::returning::{can_return_to_ssa_var, codegen_return};
// Copied from https://github.com/rust-lang/rust/blob/f52c72948aa1dd718cc1f168d21c91c584c0a662/src/librustc_middle/ty/layout.rs#L2301
#[rustfmt::skip]
pub(crate) fn fn_sig_for_fn_abi<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> ty::PolyFnSig<'tcx> {
use rustc_middle::ty::subst::Subst;
// FIXME(davidtwco,eddyb): A `ParamEnv` should be passed through to this function.
let ty = instance.ty(tcx, ty::ParamEnv::reveal_all());
match *ty.kind() {
ty::FnDef(..) => {
// HACK(davidtwco,eddyb): This is a workaround for polymorphization considering
// parameters unused if they show up in the signature, but not in the `mir::Body`
// (i.e. due to being inside a projection that got normalized, see
// `src/test/ui/polymorphization/normalized_sig_types.rs`), and codegen not keeping
// track of a polymorphization `ParamEnv` to allow normalizing later.
let mut sig = match *ty.kind() {
ty::FnDef(def_id, substs) => tcx
.normalize_erasing_regions(tcx.param_env(def_id), tcx.fn_sig(def_id))
.subst(tcx, substs),
_ => unreachable!(),
};
if let ty::InstanceDef::VtableShim(..) = instance.def {
// Modify `fn(self, ...)` to `fn(self: *mut Self, ...)`.
sig = sig.map_bound(|mut sig| {
let mut inputs_and_output = sig.inputs_and_output.to_vec();
inputs_and_output[0] = tcx.mk_mut_ptr(inputs_and_output[0]);
sig.inputs_and_output = tcx.intern_type_list(&inputs_and_output);
sig
});
}
sig
}
ty::Closure(def_id, substs) => {
let sig = substs.as_closure().sig();
let env_ty = tcx.closure_env_ty(def_id, substs).unwrap();
sig.map_bound(|sig| {
tcx.mk_fn_sig(
std::iter::once(env_ty.skip_binder()).chain(sig.inputs().iter().cloned()),
sig.output(),
sig.c_variadic,
sig.unsafety,
sig.abi,
)
})
}
ty::Generator(_, substs, _) => {
let sig = substs.as_generator().poly_sig();
let env_region = ty::ReLateBound(ty::INNERMOST, ty::BoundRegion { kind: ty::BrEnv });
let env_ty = tcx.mk_mut_ref(tcx.mk_region(env_region), ty);
let pin_did = tcx.require_lang_item(rustc_hir::LangItem::Pin, None);
let pin_adt_ref = tcx.adt_def(pin_did);
let pin_substs = tcx.intern_substs(&[env_ty.into()]);
let env_ty = tcx.mk_adt(pin_adt_ref, pin_substs);
sig.map_bound(|sig| {
let state_did = tcx.require_lang_item(rustc_hir::LangItem::GeneratorState, None);
let state_adt_ref = tcx.adt_def(state_did);
let state_substs =
tcx.intern_substs(&[sig.yield_ty.into(), sig.return_ty.into()]);
let ret_ty = tcx.mk_adt(state_adt_ref, state_substs);
tcx.mk_fn_sig(
[env_ty, sig.resume_ty].iter(),
&ret_ty,
false,
rustc_hir::Unsafety::Normal,
rustc_target::spec::abi::Abi::Rust,
)
})
}
_ => bug!("unexpected type {:?} in Instance::fn_sig", ty),
}
}
fn clif_sig_from_fn_sig<'tcx>(
fn clif_sig_from_fn_abi<'tcx>(
tcx: TyCtxt<'tcx>,
triple: &target_lexicon::Triple,
sig: FnSig<'tcx>,
span: Span,
is_vtable_fn: bool,
requires_caller_location: bool,
fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
) -> Signature {
let abi = match sig.abi {
Abi::System => Abi::C,
abi => abi,
};
let (call_conv, inputs, output): (CallConv, Vec<Ty<'tcx>>, Ty<'tcx>) = match abi {
Abi::Rust => (
CallConv::triple_default(triple),
sig.inputs().to_vec(),
sig.output(),
),
Abi::C | Abi::Unadjusted => (
CallConv::triple_default(triple),
sig.inputs().to_vec(),
sig.output(),
),
Abi::SysV64 => (CallConv::SystemV, sig.inputs().to_vec(), sig.output()),
Abi::RustCall => {
assert_eq!(sig.inputs().len(), 2);
let extra_args = match sig.inputs().last().unwrap().kind() {
ty::Tuple(ref tupled_arguments) => tupled_arguments,
_ => bug!("argument to function with \"rust-call\" ABI is not a tuple"),
};
let mut inputs: Vec<Ty<'tcx>> = vec![sig.inputs()[0]];
inputs.extend(extra_args.types());
(CallConv::triple_default(triple), inputs, sig.output())
let call_conv = match fn_abi.conv {
Conv::Rust | Conv::C => CallConv::triple_default(triple),
Conv::X86_64SysV => CallConv::SystemV,
Conv::X86_64Win64 => CallConv::WindowsFastcall,
Conv::ArmAapcs
| Conv::Msp430Intr
| Conv::PtxKernel
| Conv::X86Fastcall
| Conv::X86Intr
| Conv::X86Stdcall
| Conv::X86ThisCall
| Conv::X86VectorCall
| Conv::AmdGpuKernel
| Conv::AvrInterrupt
| Conv::AvrNonBlockingInterrupt => {
todo!("{:?}", fn_abi.conv)
}
Abi::System => unreachable!(),
Abi::RustIntrinsic => (
CallConv::triple_default(triple),
sig.inputs().to_vec(),
sig.output(),
),
_ => unimplemented!("unsupported abi {:?}", sig.abi),
};
let inputs = inputs
.into_iter()
.enumerate()
.map(|(i, ty)| {
let mut layout = tcx.layout_of(ParamEnv::reveal_all().and(ty)).unwrap();
if i == 0 && is_vtable_fn {
// Virtual calls turn their self param into a thin pointer.
// See https://github.com/rust-lang/rust/blob/37b6a5e5e82497caf5353d9d856e4eb5d14cbe06/src/librustc/ty/layout.rs#L2519-L2572 for more info
layout = tcx
.layout_of(ParamEnv::reveal_all().and(tcx.mk_mut_ptr(tcx.mk_unit())))
.unwrap();
}
let pass_mode = get_pass_mode(tcx, layout);
if abi != Abi::Rust && abi != Abi::RustCall && abi != Abi::RustIntrinsic {
match pass_mode {
PassMode::NoPass | PassMode::ByVal(_) => {}
PassMode::ByRef { size: Some(size) } => {
let purpose = ArgumentPurpose::StructArgument(u32::try_from(size.bytes()).expect("struct too big to pass on stack"));
return EmptySinglePair::Single(AbiParam::special(pointer_ty(tcx), purpose)).into_iter();
}
PassMode::ByValPair(_, _) | PassMode::ByRef { size: None } => {
tcx.sess.span_warn(
span,
&format!(
"Argument of type `{:?}` with pass mode `{:?}` is not yet supported \
for non-rust abi `{}`. Calling this function may result in a crash.",
layout.ty,
pass_mode,
abi,
),
);
}
}
}
pass_mode.get_param_ty(tcx).map(AbiParam::new).into_iter()
})
let inputs = fn_abi
.args
.iter()
.map(|arg_abi| arg_abi.get_abi_param(tcx).into_iter())
.flatten();
let (mut params, returns): (Vec<_>, Vec<_>) = match get_pass_mode(
tcx,
tcx.layout_of(ParamEnv::reveal_all().and(output)).unwrap(),
) {
PassMode::NoPass => (inputs.collect(), vec![]),
PassMode::ByVal(ret_ty) => (inputs.collect(), vec![AbiParam::new(ret_ty)]),
PassMode::ByValPair(ret_ty_a, ret_ty_b) => (
inputs.collect(),
vec![AbiParam::new(ret_ty_a), AbiParam::new(ret_ty_b)],
),
PassMode::ByRef { size: Some(_) } => {
(
Some(pointer_ty(tcx)) // First param is place to put return val
.into_iter()
.map(|ty| AbiParam::special(ty, ArgumentPurpose::StructReturn))
.chain(inputs)
.collect(),
vec![],
)
}
PassMode::ByRef { size: None } => todo!(),
};
if requires_caller_location {
params.push(AbiParam::new(pointer_ty(tcx)));
}
let (return_ptr, returns) = fn_abi.ret.get_abi_return(tcx);
// Sometimes the first param is an pointer to the place where the return value needs to be stored.
let params: Vec<_> = return_ptr.into_iter().chain(inputs).collect();
Signature {
params,
@ -207,30 +58,17 @@ fn clif_sig_from_fn_sig<'tcx>(
}
}
pub(crate) fn get_function_name_and_sig<'tcx>(
pub(crate) fn get_function_sig<'tcx>(
tcx: TyCtxt<'tcx>,
triple: &target_lexicon::Triple,
inst: Instance<'tcx>,
support_vararg: bool,
) -> (String, Signature) {
) -> Signature {
assert!(!inst.substs.needs_infer());
let fn_sig = tcx
.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), fn_sig_for_fn_abi(tcx, inst));
if fn_sig.c_variadic && !support_vararg {
tcx.sess.span_fatal(
tcx.def_span(inst.def_id()),
"Variadic function definitions are not yet supported",
);
}
let sig = clif_sig_from_fn_sig(
clif_sig_from_fn_abi(
tcx,
triple,
fn_sig,
tcx.def_span(inst.def_id()),
false,
inst.def.requires_caller_location(tcx),
);
(tcx.symbol_name(inst).name.to_string(), sig)
&FnAbi::of_instance(&RevealAllLayoutCx(tcx), inst, &[]),
)
}
/// Instance must be monomorphized
@ -239,7 +77,8 @@ pub(crate) fn import_function<'tcx>(
module: &mut impl Module,
inst: Instance<'tcx>,
) -> FuncId {
let (name, sig) = get_function_name_and_sig(tcx, module.isa().triple(), inst, true);
let name = tcx.symbol_name(inst).name.to_string();
let sig = get_function_sig(tcx, module.isa().triple(), inst);
module
.declare_function(&name, Linkage::Import, &sig)
.unwrap()
@ -263,13 +102,13 @@ impl<'tcx, M: Module> FunctionCx<'_, 'tcx, M> {
pub(crate) fn lib_call(
&mut self,
name: &str,
input_tys: Vec<types::Type>,
output_tys: Vec<types::Type>,
params: Vec<AbiParam>,
returns: Vec<AbiParam>,
args: &[Value],
) -> &[Value] {
let sig = Signature {
params: input_tys.iter().cloned().map(AbiParam::new).collect(),
returns: output_tys.iter().cloned().map(AbiParam::new).collect(),
params,
returns,
call_conv: CallConv::triple_default(self.triple()),
};
let func_id = self
@ -301,16 +140,18 @@ impl<'tcx, M: Module> FunctionCx<'_, 'tcx, M> {
.iter()
.map(|arg| {
(
self.clif_type(arg.layout().ty).unwrap(),
AbiParam::new(self.clif_type(arg.layout().ty).unwrap()),
arg.load_scalar(self),
)
})
.unzip();
let return_layout = self.layout_of(return_ty);
let return_tys = if let ty::Tuple(tup) = return_ty.kind() {
tup.types().map(|ty| self.clif_type(ty).unwrap()).collect()
tup.types()
.map(|ty| AbiParam::new(self.clif_type(ty).unwrap()))
.collect()
} else {
vec![self.clif_type(return_ty).unwrap()]
vec![AbiParam::new(self.clif_type(return_ty).unwrap())]
};
let ret_vals = self.lib_call(name, input_tys, return_tys, &args);
match *ret_vals {
@ -352,12 +193,25 @@ pub(crate) fn codegen_fn_prelude<'tcx>(
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
start_block: Block,
) {
fx.bcx.append_block_params_for_function_params(start_block);
fx.bcx.switch_to_block(start_block);
fx.bcx.ins().nop();
let ssa_analyzed = crate::analyze::analyze(fx);
#[cfg(debug_assertions)]
self::comments::add_args_header_comment(fx);
let ret_place = self::returning::codegen_return_param(fx, &ssa_analyzed, start_block);
let mut block_params_iter = fx
.bcx
.func
.dfg
.block_params(start_block)
.to_vec()
.into_iter();
let ret_place =
self::returning::codegen_return_param(fx, &ssa_analyzed, &mut block_params_iter);
assert_eq!(fx.local_map.push(ret_place), RETURN_PLACE);
// None means pass_mode == NoPass
@ -366,6 +220,9 @@ pub(crate) fn codegen_fn_prelude<'tcx>(
Spread(Vec<Option<CValue<'tcx>>>),
}
let fn_abi = fx.fn_abi.take().unwrap();
let mut arg_abis_iter = fn_abi.args.iter();
let func_params = fx
.mir
.args_iter()
@ -385,14 +242,18 @@ pub(crate) fn codegen_fn_prelude<'tcx>(
};
let mut params = Vec::new();
for (i, arg_ty) in tupled_arg_tys.types().enumerate() {
let param = cvalue_for_param(fx, start_block, Some(local), Some(i), arg_ty);
for (i, _arg_ty) in tupled_arg_tys.types().enumerate() {
let arg_abi = arg_abis_iter.next().unwrap();
let param =
cvalue_for_param(fx, Some(local), Some(i), arg_abi, &mut block_params_iter);
params.push(param);
}
(local, ArgKind::Spread(params), arg_ty)
} else {
let param = cvalue_for_param(fx, start_block, Some(local), None, arg_ty);
let arg_abi = arg_abis_iter.next().unwrap();
let param =
cvalue_for_param(fx, Some(local), None, arg_abi, &mut block_params_iter);
(local, ArgKind::Normal(param), arg_ty)
}
})
@ -401,13 +262,14 @@ pub(crate) fn codegen_fn_prelude<'tcx>(
assert!(fx.caller_location.is_none());
if fx.instance.def.requires_caller_location(fx.tcx) {
// Store caller location for `#[track_caller]`.
fx.caller_location = Some(
cvalue_for_param(fx, start_block, None, None, fx.tcx.caller_location_ty()).unwrap(),
);
let arg_abi = arg_abis_iter.next().unwrap();
fx.caller_location =
Some(cvalue_for_param(fx, None, None, arg_abi, &mut block_params_iter).unwrap());
}
fx.bcx.switch_to_block(start_block);
fx.bcx.ins().nop();
assert!(arg_abis_iter.next().is_none(), "ArgAbi left behind");
fx.fn_abi = Some(fn_abi);
assert!(block_params_iter.next().is_none(), "arg_value left behind");
#[cfg(debug_assertions)]
self::comments::add_locals_header_comment(fx);
@ -533,6 +395,21 @@ pub(crate) fn codegen_terminator_call<'tcx>(
None
};
let extra_args = &args[fn_sig.inputs().len()..];
let extra_args = extra_args
.iter()
.map(|op_arg| fx.monomorphize(op_arg.ty(fx.mir, fx.tcx)))
.collect::<Vec<_>>();
let fn_abi = if let Some(instance) = instance {
FnAbi::of_instance(&RevealAllLayoutCx(fx.tcx), instance, &extra_args)
} else {
FnAbi::of_fn_ptr(
&RevealAllLayoutCx(fx.tcx),
fn_ty.fn_sig(fx.tcx),
&extra_args,
)
};
let is_cold = instance
.map(|inst| {
fx.tcx
@ -570,8 +447,8 @@ pub(crate) fn codegen_terminator_call<'tcx>(
// | indirect call target
// | | the first argument to be passed
// v v v virtual calls are special cased below
let (func_ref, first_arg, is_virtual_call) = match instance {
// v v
let (func_ref, first_arg) = match instance {
// Trait object call
Some(Instance {
def: InstanceDef::Virtual(_, idx),
@ -582,23 +459,19 @@ pub(crate) fn codegen_terminator_call<'tcx>(
let nop_inst = fx.bcx.ins().nop();
fx.add_comment(
nop_inst,
format!(
"virtual call; self arg pass mode: {:?}",
get_pass_mode(fx.tcx, args[0].layout())
),
format!("virtual call; self arg pass mode: {:?}", &fn_abi.args[0],),
);
}
let (ptr, method) = crate::vtable::get_ptr_and_method_ref(fx, args[0], idx);
(Some(method), Single(ptr), true)
(Some(method), smallvec![ptr])
}
// Normal call
Some(_) => (
None,
args.get(0)
.map(|arg| adjust_arg_for_abi(fx, *arg))
.unwrap_or(Empty),
false,
.map(|arg| adjust_arg_for_abi(fx, *arg, &fn_abi.args[0]))
.unwrap_or(smallvec![]),
),
// Indirect call
@ -612,23 +485,27 @@ pub(crate) fn codegen_terminator_call<'tcx>(
(
Some(func),
args.get(0)
.map(|arg| adjust_arg_for_abi(fx, *arg))
.unwrap_or(Empty),
false,
.map(|arg| adjust_arg_for_abi(fx, *arg, &fn_abi.args[0]))
.unwrap_or(smallvec![]),
)
}
};
let ret_place = destination.map(|(place, _)| place);
let (call_inst, call_args) =
self::returning::codegen_with_call_return_arg(fx, fn_sig, ret_place, |fx, return_ptr| {
let (call_inst, call_args) = self::returning::codegen_with_call_return_arg(
fx,
&fn_abi.ret,
ret_place,
|fx, return_ptr| {
let regular_args_count = args.len();
let mut call_args: Vec<Value> = return_ptr
.into_iter()
.chain(first_arg.into_iter())
.chain(
args.into_iter()
.enumerate()
.skip(1)
.map(|arg| adjust_arg_for_abi(fx, arg).into_iter())
.map(|(i, arg)| adjust_arg_for_abi(fx, arg, &fn_abi.args[i]).into_iter())
.flatten(),
)
.collect::<Vec<_>>();
@ -639,18 +516,17 @@ pub(crate) fn codegen_terminator_call<'tcx>(
{
// Pass the caller location for `#[track_caller]`.
let caller_location = fx.get_caller_location(span);
call_args.extend(adjust_arg_for_abi(fx, caller_location).into_iter());
call_args.extend(
adjust_arg_for_abi(fx, caller_location, &fn_abi.args[regular_args_count])
.into_iter(),
);
assert_eq!(fn_abi.args.len(), regular_args_count + 1);
} else {
assert_eq!(fn_abi.args.len(), regular_args_count);
}
let call_inst = if let Some(func_ref) = func_ref {
let sig = clif_sig_from_fn_sig(
fx.tcx,
fx.triple(),
fn_sig,
span,
is_virtual_call,
false, // calls through function pointers never pass the caller location
);
let sig = clif_sig_from_fn_abi(fx.tcx, fx.triple(), &fn_abi);
let sig = fx.bcx.import_signature(sig);
fx.bcx.ins().call_indirect(sig, func_ref, &call_args)
} else {
@ -660,7 +536,8 @@ pub(crate) fn codegen_terminator_call<'tcx>(
};
(call_inst, call_args)
});
},
);
// FIXME find a cleaner way to support varargs
if fn_sig.c_variadic {
@ -701,37 +578,33 @@ pub(crate) fn codegen_drop<'tcx>(
drop_place: CPlace<'tcx>,
) {
let ty = drop_place.layout().ty;
let drop_fn = Instance::resolve_drop_in_place(fx.tcx, ty).polymorphize(fx.tcx);
let drop_instance = Instance::resolve_drop_in_place(fx.tcx, ty).polymorphize(fx.tcx);
if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def {
if let ty::InstanceDef::DropGlue(_, None) = drop_instance.def {
// we don't actually need to drop anything
} else {
let drop_fn_ty = drop_fn.ty(fx.tcx, ParamEnv::reveal_all());
let fn_sig = fx.tcx.normalize_erasing_late_bound_regions(
ParamEnv::reveal_all(),
drop_fn_ty.fn_sig(fx.tcx),
);
assert_eq!(fn_sig.output(), fx.tcx.mk_unit());
match ty.kind() {
ty::Dynamic(..) => {
let (ptr, vtable) = drop_place.to_ptr_maybe_unsized();
let ptr = ptr.get_addr(fx);
let drop_fn = crate::vtable::drop_fn_of_obj(fx, vtable.unwrap());
let sig = clif_sig_from_fn_sig(
fx.tcx,
fx.triple(),
fn_sig,
span,
true,
false, // `drop_in_place` is never `#[track_caller]`
);
// FIXME(eddyb) perhaps move some of this logic into
// `Instance::resolve_drop_in_place`?
let virtual_drop = Instance {
def: ty::InstanceDef::Virtual(drop_instance.def_id(), 0),
substs: drop_instance.substs,
};
let fn_abi = FnAbi::of_instance(&RevealAllLayoutCx(fx.tcx), virtual_drop, &[]);
let sig = clif_sig_from_fn_abi(fx.tcx, fx.triple(), &fn_abi);
let sig = fx.bcx.import_signature(sig);
fx.bcx.ins().call_indirect(sig, drop_fn, &[ptr]);
}
_ => {
assert!(!matches!(drop_fn.def, InstanceDef::Virtual(_, _)));
assert!(!matches!(drop_instance.def, InstanceDef::Virtual(_, _)));
let fn_abi = FnAbi::of_instance(&RevealAllLayoutCx(fx.tcx), drop_instance, &[]);
let arg_value = drop_place.place_ref(
fx,
@ -743,17 +616,19 @@ pub(crate) fn codegen_drop<'tcx>(
},
)),
);
let arg_value = adjust_arg_for_abi(fx, arg_value);
let arg_value = adjust_arg_for_abi(fx, arg_value, &fn_abi.args[0]);
let mut call_args: Vec<Value> = arg_value.into_iter().collect::<Vec<_>>();
if drop_fn.def.requires_caller_location(fx.tcx) {
if drop_instance.def.requires_caller_location(fx.tcx) {
// Pass the caller location for `#[track_caller]`.
let caller_location = fx.get_caller_location(span);
call_args.extend(adjust_arg_for_abi(fx, caller_location).into_iter());
call_args.extend(
adjust_arg_for_abi(fx, caller_location, &fn_abi.args[1]).into_iter(),
);
}
let func_ref = fx.get_function_ref(drop_fn);
let func_ref = fx.get_function_ref(drop_instance);
fx.bcx.ins().call(func_ref, &call_args);
}
}

View File

@ -1,140 +1,279 @@
//! Argument passing
use crate::prelude::*;
use crate::value_and_place::assert_assignable;
pub(super) use EmptySinglePair::*;
use cranelift_codegen::ir::{ArgumentExtension, ArgumentPurpose};
use rustc_target::abi::call::{
ArgAbi, ArgAttributes, ArgExtension as RustcArgExtension, CastTarget, PassMode, Reg, RegKind,
};
use smallvec::{smallvec, SmallVec};
#[derive(Copy, Clone, Debug)]
pub(super) enum PassMode {
NoPass,
ByVal(Type),
ByValPair(Type, Type),
ByRef { size: Option<Size> },
pub(super) trait ArgAbiExt<'tcx> {
fn get_abi_param(&self, tcx: TyCtxt<'tcx>) -> SmallVec<[AbiParam; 2]>;
fn get_abi_return(&self, tcx: TyCtxt<'tcx>) -> (Option<AbiParam>, Vec<AbiParam>);
}
#[derive(Copy, Clone, Debug)]
pub(super) enum EmptySinglePair<T> {
Empty,
Single(T),
Pair(T, T),
fn reg_to_abi_param(reg: Reg) -> AbiParam {
let clif_ty = match (reg.kind, reg.size.bytes()) {
(RegKind::Integer, 1) => types::I8,
(RegKind::Integer, 2) => types::I16,
(RegKind::Integer, 4) => types::I32,
(RegKind::Integer, 8) => types::I64,
(RegKind::Integer, 16) => types::I128,
(RegKind::Float, 4) => types::F32,
(RegKind::Float, 8) => types::F64,
(RegKind::Vector, size) => types::I8.by(u16::try_from(size).unwrap()).unwrap(),
_ => unreachable!("{:?}", reg),
};
AbiParam::new(clif_ty)
}
impl<T> EmptySinglePair<T> {
pub(super) fn into_iter(self) -> EmptySinglePairIter<T> {
EmptySinglePairIter(self)
}
pub(super) fn map<U>(self, mut f: impl FnMut(T) -> U) -> EmptySinglePair<U> {
match self {
Empty => Empty,
Single(v) => Single(f(v)),
Pair(a, b) => Pair(f(a), f(b)),
}
fn apply_arg_attrs_to_abi_param(mut param: AbiParam, arg_attrs: ArgAttributes) -> AbiParam {
match arg_attrs.arg_ext {
RustcArgExtension::None => {}
RustcArgExtension::Zext => param.extension = ArgumentExtension::Uext,
RustcArgExtension::Sext => param.extension = ArgumentExtension::Sext,
}
param
}
pub(super) struct EmptySinglePairIter<T>(EmptySinglePair<T>);
impl<T> Iterator for EmptySinglePairIter<T> {
type Item = T;
fn next(&mut self) -> Option<T> {
match std::mem::replace(&mut self.0, Empty) {
Empty => None,
Single(v) => Some(v),
Pair(a, b) => {
self.0 = Single(b);
Some(a)
}
}
}
}
impl<T: std::fmt::Debug> EmptySinglePair<T> {
pub(super) fn assert_single(self) -> T {
match self {
Single(v) => v,
_ => panic!("Called assert_single on {:?}", self),
}
}
pub(super) fn assert_pair(self) -> (T, T) {
match self {
Pair(a, b) => (a, b),
_ => panic!("Called assert_pair on {:?}", self),
}
}
}
impl PassMode {
pub(super) fn get_param_ty(self, tcx: TyCtxt<'_>) -> EmptySinglePair<Type> {
match self {
PassMode::NoPass => Empty,
PassMode::ByVal(clif_type) => Single(clif_type),
PassMode::ByValPair(a, b) => Pair(a, b),
PassMode::ByRef { size: Some(_) } => Single(pointer_ty(tcx)),
PassMode::ByRef { size: None } => Pair(pointer_ty(tcx), pointer_ty(tcx)),
}
}
}
pub(super) fn get_pass_mode<'tcx>(tcx: TyCtxt<'tcx>, layout: TyAndLayout<'tcx>) -> PassMode {
if layout.is_zst() {
// WARNING zst arguments must never be passed, as that will break CastKind::ClosureFnPointer
PassMode::NoPass
fn cast_target_to_abi_params(cast: CastTarget) -> SmallVec<[AbiParam; 2]> {
let (rest_count, rem_bytes) = if cast.rest.unit.size.bytes() == 0 {
(0, 0)
} else {
match &layout.abi {
Abi::Uninhabited => PassMode::NoPass,
Abi::Scalar(scalar) => PassMode::ByVal(scalar_to_clif_type(tcx, scalar.clone())),
Abi::ScalarPair(a, b) => {
let a = scalar_to_clif_type(tcx, a.clone());
let b = scalar_to_clif_type(tcx, b.clone());
if a == types::I128 && b == types::I128 {
// Returning (i128, i128) by-val-pair would take 4 regs, while only 3 are
// available on x86_64. Cranelift gets confused when too many return params
// are used.
PassMode::ByRef {
size: Some(layout.size),
}
} else {
PassMode::ByValPair(a, b)
}
}
(
cast.rest.total.bytes() / cast.rest.unit.size.bytes(),
cast.rest.total.bytes() % cast.rest.unit.size.bytes(),
)
};
// FIXME implement Vector Abi in a cg_llvm compatible way
Abi::Vector { .. } => {
if let Some(vector_ty) = crate::intrinsics::clif_vector_type(tcx, layout) {
PassMode::ByVal(vector_ty)
} else {
PassMode::ByRef {
size: Some(layout.size),
}
}
}
Abi::Aggregate { sized: true } => PassMode::ByRef {
size: Some(layout.size),
},
Abi::Aggregate { sized: false } => PassMode::ByRef { size: None },
if cast.prefix.iter().all(|x| x.is_none()) {
// Simplify to a single unit when there is no prefix and size <= unit size
if cast.rest.total <= cast.rest.unit.size {
let clif_ty = match (cast.rest.unit.kind, cast.rest.unit.size.bytes()) {
(RegKind::Integer, 1) => types::I8,
(RegKind::Integer, 2) => types::I16,
(RegKind::Integer, 3..=4) => types::I32,
(RegKind::Integer, 5..=8) => types::I64,
(RegKind::Integer, 9..=16) => types::I128,
(RegKind::Float, 4) => types::F32,
(RegKind::Float, 8) => types::F64,
(RegKind::Vector, size) => types::I8.by(u16::try_from(size).unwrap()).unwrap(),
_ => unreachable!("{:?}", cast.rest.unit),
};
return smallvec![AbiParam::new(clif_ty)];
}
}
// Create list of fields in the main structure
let mut args = cast
.prefix
.iter()
.flatten()
.map(|&kind| {
reg_to_abi_param(Reg {
kind,
size: cast.prefix_chunk_size,
})
})
.chain((0..rest_count).map(|_| reg_to_abi_param(cast.rest.unit)))
.collect::<SmallVec<_>>();
// Append final integer
if rem_bytes != 0 {
// Only integers can be really split further.
assert_eq!(cast.rest.unit.kind, RegKind::Integer);
args.push(reg_to_abi_param(Reg {
kind: RegKind::Integer,
size: Size::from_bytes(rem_bytes),
}));
}
args
}
impl<'tcx> ArgAbiExt<'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
fn get_abi_param(&self, tcx: TyCtxt<'tcx>) -> SmallVec<[AbiParam; 2]> {
match self.mode {
PassMode::Ignore => smallvec![],
PassMode::Direct(attrs) => match &self.layout.abi {
Abi::Scalar(scalar) => {
smallvec![apply_arg_attrs_to_abi_param(
AbiParam::new(scalar_to_clif_type(tcx, scalar.clone())),
attrs
)]
}
Abi::Vector { .. } => {
let vector_ty = crate::intrinsics::clif_vector_type(tcx, self.layout).unwrap();
smallvec![AbiParam::new(vector_ty)]
}
_ => unreachable!("{:?}", self.layout.abi),
},
PassMode::Pair(attrs_a, attrs_b) => match &self.layout.abi {
Abi::ScalarPair(a, b) => {
let a = scalar_to_clif_type(tcx, a.clone());
let b = scalar_to_clif_type(tcx, b.clone());
smallvec![
apply_arg_attrs_to_abi_param(AbiParam::new(a), attrs_a),
apply_arg_attrs_to_abi_param(AbiParam::new(b), attrs_b),
]
}
_ => unreachable!("{:?}", self.layout.abi),
},
PassMode::Cast(cast) => cast_target_to_abi_params(cast),
PassMode::Indirect {
attrs,
extra_attrs: None,
on_stack,
} => {
if on_stack {
let size = u32::try_from(self.layout.size.bytes()).unwrap();
smallvec![apply_arg_attrs_to_abi_param(
AbiParam::special(pointer_ty(tcx), ArgumentPurpose::StructArgument(size),),
attrs
)]
} else {
smallvec![apply_arg_attrs_to_abi_param(
AbiParam::new(pointer_ty(tcx)),
attrs
)]
}
}
PassMode::Indirect {
attrs,
extra_attrs: Some(extra_attrs),
on_stack,
} => {
assert!(!on_stack);
smallvec![
apply_arg_attrs_to_abi_param(AbiParam::new(pointer_ty(tcx)), attrs),
apply_arg_attrs_to_abi_param(AbiParam::new(pointer_ty(tcx)), extra_attrs),
]
}
}
}
fn get_abi_return(&self, tcx: TyCtxt<'tcx>) -> (Option<AbiParam>, Vec<AbiParam>) {
match self.mode {
PassMode::Ignore => (None, vec![]),
PassMode::Direct(_) => match &self.layout.abi {
Abi::Scalar(scalar) => (
None,
vec![AbiParam::new(scalar_to_clif_type(tcx, scalar.clone()))],
),
Abi::Vector { .. } => {
let vector_ty = crate::intrinsics::clif_vector_type(tcx, self.layout).unwrap();
(None, vec![AbiParam::new(vector_ty)])
}
_ => unreachable!("{:?}", self.layout.abi),
},
PassMode::Pair(_, _) => match &self.layout.abi {
Abi::ScalarPair(a, b) => {
let a = scalar_to_clif_type(tcx, a.clone());
let b = scalar_to_clif_type(tcx, b.clone());
(None, vec![AbiParam::new(a), AbiParam::new(b)])
}
_ => unreachable!("{:?}", self.layout.abi),
},
PassMode::Cast(cast) => (None, cast_target_to_abi_params(cast).into_iter().collect()),
PassMode::Indirect {
attrs: _,
extra_attrs: None,
on_stack,
} => {
assert!(!on_stack);
(
Some(AbiParam::special(
pointer_ty(tcx),
ArgumentPurpose::StructReturn,
)),
vec![],
)
}
PassMode::Indirect {
attrs: _,
extra_attrs: Some(_),
on_stack: _,
} => unreachable!("unsized return value"),
}
}
}
pub(super) fn to_casted_value<'tcx>(
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
arg: CValue<'tcx>,
cast: CastTarget,
) -> SmallVec<[Value; 2]> {
let (ptr, meta) = arg.force_stack(fx);
assert!(meta.is_none());
let mut offset = 0;
cast_target_to_abi_params(cast)
.into_iter()
.map(|param| {
let val = ptr
.offset_i64(fx, offset)
.load(fx, param.value_type, MemFlags::new());
offset += i64::from(param.value_type.bytes());
val
})
.collect()
}
pub(super) fn from_casted_value<'tcx>(
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
block_params: &[Value],
layout: TyAndLayout<'tcx>,
cast: CastTarget,
) -> CValue<'tcx> {
let abi_params = cast_target_to_abi_params(cast);
let size: u32 = abi_params
.iter()
.map(|param| param.value_type.bytes())
.sum();
// Stack slot size may be bigger for for example `[u8; 3]` which is packed into an `i32`.
assert!(u64::from(size) >= layout.size.bytes());
let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
kind: StackSlotKind::ExplicitSlot,
// FIXME Don't force the size to a multiple of 16 bytes once Cranelift gets a way to
// specify stack slot alignment.
size: (size + 15) / 16 * 16,
offset: None,
});
let ptr = Pointer::new(fx.bcx.ins().stack_addr(pointer_ty(fx.tcx), stack_slot, 0));
let mut offset = 0;
let mut block_params_iter = block_params.into_iter().copied();
for param in abi_params {
let val = ptr.offset_i64(fx, offset).store(
fx,
block_params_iter.next().unwrap(),
MemFlags::new(),
);
offset += i64::from(param.value_type.bytes());
val
}
assert_eq!(block_params_iter.next(), None, "Leftover block param");
CValue::by_ref(ptr, layout)
}
/// Get a set of values to be passed as function arguments.
pub(super) fn adjust_arg_for_abi<'tcx>(
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
arg: CValue<'tcx>,
) -> EmptySinglePair<Value> {
match get_pass_mode(fx.tcx, arg.layout()) {
PassMode::NoPass => Empty,
PassMode::ByVal(_) => Single(arg.load_scalar(fx)),
PassMode::ByValPair(_, _) => {
arg_abi: &ArgAbi<'tcx, Ty<'tcx>>,
) -> SmallVec<[Value; 2]> {
assert_assignable(fx, arg.layout().ty, arg_abi.layout.ty);
match arg_abi.mode {
PassMode::Ignore => smallvec![],
PassMode::Direct(_) => smallvec![arg.load_scalar(fx)],
PassMode::Pair(_, _) => {
let (a, b) = arg.load_scalar_pair(fx);
Pair(a, b)
smallvec![a, b]
}
PassMode::ByRef { size: _ } => match arg.force_stack(fx) {
(ptr, None) => Single(ptr.get_addr(fx)),
(ptr, Some(meta)) => Pair(ptr.get_addr(fx), meta),
PassMode::Cast(cast) => to_casted_value(fx, arg, cast),
PassMode::Indirect { .. } => match arg.force_stack(fx) {
(ptr, None) => smallvec![ptr.get_addr(fx)],
(ptr, Some(meta)) => smallvec![ptr.get_addr(fx), meta],
},
}
}
@ -143,20 +282,23 @@ pub(super) fn adjust_arg_for_abi<'tcx>(
/// as necessary.
pub(super) fn cvalue_for_param<'tcx>(
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
start_block: Block,
#[cfg_attr(not(debug_assertions), allow(unused_variables))] local: Option<mir::Local>,
#[cfg_attr(not(debug_assertions), allow(unused_variables))] local_field: Option<usize>,
arg_ty: Ty<'tcx>,
arg_abi: &ArgAbi<'tcx, Ty<'tcx>>,
block_params_iter: &mut impl Iterator<Item = Value>,
) -> Option<CValue<'tcx>> {
let layout = fx.layout_of(arg_ty);
let pass_mode = get_pass_mode(fx.tcx, layout);
if let PassMode::NoPass = pass_mode {
return None;
}
let clif_types = pass_mode.get_param_ty(fx.tcx);
let block_params = clif_types.map(|t| fx.bcx.append_block_param(start_block, t));
let block_params = arg_abi
.get_abi_param(fx.tcx)
.into_iter()
.map(|abi_param| {
let block_param = block_params_iter.next().unwrap();
assert_eq!(
fx.bcx.func.dfg.value_type(block_param),
abi_param.value_type
);
block_param
})
.collect::<SmallVec<[_; 2]>>();
#[cfg(debug_assertions)]
crate::abi::comments::add_arg_comment(
@ -164,25 +306,48 @@ pub(super) fn cvalue_for_param<'tcx>(
"arg",
local,
local_field,
block_params,
pass_mode,
arg_ty,
&block_params,
arg_abi.mode,
arg_abi.layout,
);
match pass_mode {
PassMode::NoPass => unreachable!(),
PassMode::ByVal(_) => Some(CValue::by_val(block_params.assert_single(), layout)),
PassMode::ByValPair(_, _) => {
let (a, b) = block_params.assert_pair();
Some(CValue::by_val_pair(a, b, layout))
match arg_abi.mode {
PassMode::Ignore => None,
PassMode::Direct(_) => {
assert_eq!(block_params.len(), 1, "{:?}", block_params);
Some(CValue::by_val(block_params[0], arg_abi.layout))
}
PassMode::ByRef { size: Some(_) } => Some(CValue::by_ref(
Pointer::new(block_params.assert_single()),
layout,
)),
PassMode::ByRef { size: None } => {
let (ptr, meta) = block_params.assert_pair();
Some(CValue::by_ref_unsized(Pointer::new(ptr), meta, layout))
PassMode::Pair(_, _) => {
assert_eq!(block_params.len(), 2, "{:?}", block_params);
Some(CValue::by_val_pair(
block_params[0],
block_params[1],
arg_abi.layout,
))
}
PassMode::Cast(cast) => Some(from_casted_value(fx, &block_params, arg_abi.layout, cast)),
PassMode::Indirect {
attrs: _,
extra_attrs: None,
on_stack: _,
} => {
assert_eq!(block_params.len(), 1, "{:?}", block_params);
Some(CValue::by_ref(
Pointer::new(block_params[0]),
arg_abi.layout,
))
}
PassMode::Indirect {
attrs: _,
extra_attrs: Some(_),
on_stack: _,
} => {
assert_eq!(block_params.len(), 2, "{:?}", block_params);
Some(CValue::by_ref_unsized(
Pointer::new(block_params[0]),
block_params[1],
arg_abi.layout,
))
}
}
}

View File

@ -1,21 +1,57 @@
//! Return value handling
use crate::abi::pass_mode::*;
use crate::prelude::*;
fn return_layout<'a, 'tcx>(fx: &mut FunctionCx<'a, 'tcx, impl Module>) -> TyAndLayout<'tcx> {
fx.layout_of(fx.monomorphize(&fx.mir.local_decls[RETURN_PLACE].ty))
}
use rustc_middle::ty::layout::FnAbiExt;
use rustc_target::abi::call::{ArgAbi, FnAbi, PassMode};
use smallvec::{smallvec, SmallVec};
/// Can the given type be returned into an ssa var or does it need to be returned on the stack.
pub(crate) fn can_return_to_ssa_var<'tcx>(
tcx: TyCtxt<'tcx>,
dest_layout: TyAndLayout<'tcx>,
fx: &FunctionCx<'_, 'tcx, impl Module>,
func: &mir::Operand<'tcx>,
args: &[mir::Operand<'tcx>],
) -> bool {
match get_pass_mode(tcx, dest_layout) {
PassMode::NoPass | PassMode::ByVal(_) | PassMode::ByValPair(_, _) => true,
// FIXME Make it possible to return ByRef to an ssa var.
PassMode::ByRef { size: _ } => false,
let fn_ty = fx.monomorphize(func.ty(fx.mir, fx.tcx));
let fn_sig = fx
.tcx
.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), fn_ty.fn_sig(fx.tcx));
// Handle special calls like instrinsics and empty drop glue.
let instance = if let ty::FnDef(def_id, substs) = *fn_ty.kind() {
let instance = ty::Instance::resolve(fx.tcx, ty::ParamEnv::reveal_all(), def_id, substs)
.unwrap()
.unwrap()
.polymorphize(fx.tcx);
match instance.def {
InstanceDef::Intrinsic(_) | InstanceDef::DropGlue(_, _) => {
return true;
}
_ => Some(instance),
}
} else {
None
};
let extra_args = &args[fn_sig.inputs().len()..];
let extra_args = extra_args
.iter()
.map(|op_arg| fx.monomorphize(op_arg.ty(fx.mir, fx.tcx)))
.collect::<Vec<_>>();
let fn_abi = if let Some(instance) = instance {
FnAbi::of_instance(&RevealAllLayoutCx(fx.tcx), instance, &extra_args)
} else {
FnAbi::of_fn_ptr(
&RevealAllLayoutCx(fx.tcx),
fn_ty.fn_sig(fx.tcx),
&extra_args,
)
};
match fn_abi.ret.mode {
PassMode::Ignore | PassMode::Direct(_) | PassMode::Pair(_, _) => true,
// FIXME Make it possible to return Cast and Indirect to an ssa var.
PassMode::Cast(_) | PassMode::Indirect { .. } => false,
}
}
@ -24,27 +60,45 @@ pub(crate) fn can_return_to_ssa_var<'tcx>(
pub(super) fn codegen_return_param<'tcx>(
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
ssa_analyzed: &rustc_index::vec::IndexVec<Local, crate::analyze::SsaKind>,
start_block: Block,
block_params_iter: &mut impl Iterator<Item = Value>,
) -> CPlace<'tcx> {
let ret_layout = return_layout(fx);
let ret_pass_mode = get_pass_mode(fx.tcx, ret_layout);
let (ret_place, ret_param) = match ret_pass_mode {
PassMode::NoPass => (CPlace::no_place(ret_layout), Empty),
PassMode::ByVal(_) | PassMode::ByValPair(_, _) => {
let (ret_place, ret_param): (_, SmallVec<[_; 2]>) = match fx.fn_abi.as_ref().unwrap().ret.mode {
PassMode::Ignore => (
CPlace::no_place(fx.fn_abi.as_ref().unwrap().ret.layout),
smallvec![],
),
PassMode::Direct(_) | PassMode::Pair(_, _) | PassMode::Cast(_) => {
let is_ssa = ssa_analyzed[RETURN_PLACE] == crate::analyze::SsaKind::Ssa;
(
super::make_local_place(fx, RETURN_PLACE, ret_layout, is_ssa),
Empty,
super::make_local_place(
fx,
RETURN_PLACE,
fx.fn_abi.as_ref().unwrap().ret.layout,
is_ssa,
),
smallvec![],
)
}
PassMode::ByRef { size: Some(_) } => {
let ret_param = fx.bcx.append_block_param(start_block, fx.pointer_type);
PassMode::Indirect {
attrs: _,
extra_attrs: None,
on_stack: _,
} => {
let ret_param = block_params_iter.next().unwrap();
assert_eq!(fx.bcx.func.dfg.value_type(ret_param), pointer_ty(fx.tcx));
(
CPlace::for_ptr(Pointer::new(ret_param), ret_layout),
Single(ret_param),
CPlace::for_ptr(
Pointer::new(ret_param),
fx.fn_abi.as_ref().unwrap().ret.layout,
),
smallvec![ret_param],
)
}
PassMode::ByRef { size: None } => todo!(),
PassMode::Indirect {
attrs: _,
extra_attrs: Some(_),
on_stack: _,
} => unreachable!("unsized return value"),
};
#[cfg(not(debug_assertions))]
@ -56,9 +110,9 @@ pub(super) fn codegen_return_param<'tcx>(
"ret",
Some(RETURN_PLACE),
None,
ret_param,
ret_pass_mode,
ret_layout.ty,
&ret_param,
fx.fn_abi.as_ref().unwrap().ret.mode,
fx.fn_abi.as_ref().unwrap().ret.layout,
);
ret_place
@ -68,42 +122,71 @@ pub(super) fn codegen_return_param<'tcx>(
/// returns the call return value(s) if any are written to the correct place.
pub(super) fn codegen_with_call_return_arg<'tcx, M: Module, T>(
fx: &mut FunctionCx<'_, 'tcx, M>,
fn_sig: FnSig<'tcx>,
ret_arg_abi: &ArgAbi<'tcx, Ty<'tcx>>,
ret_place: Option<CPlace<'tcx>>,
f: impl FnOnce(&mut FunctionCx<'_, 'tcx, M>, Option<Value>) -> (Inst, T),
) -> (Inst, T) {
let ret_layout = fx.layout_of(fn_sig.output());
let output_pass_mode = get_pass_mode(fx.tcx, ret_layout);
let return_ptr = match output_pass_mode {
PassMode::NoPass => None,
PassMode::ByRef { size: Some(_) } => match ret_place {
let return_ptr = match ret_arg_abi.mode {
PassMode::Ignore => None,
PassMode::Indirect {
attrs: _,
extra_attrs: None,
on_stack: _,
} => match ret_place {
Some(ret_place) => Some(ret_place.to_ptr().get_addr(fx)),
None => Some(fx.bcx.ins().iconst(fx.pointer_type, 43)), // FIXME allocate temp stack slot
},
PassMode::ByRef { size: None } => todo!(),
PassMode::ByVal(_) | PassMode::ByValPair(_, _) => None,
PassMode::Indirect {
attrs: _,
extra_attrs: Some(_),
on_stack: _,
} => unreachable!("unsized return value"),
PassMode::Direct(_) | PassMode::Pair(_, _) | PassMode::Cast(_) => None,
};
let (call_inst, meta) = f(fx, return_ptr);
match output_pass_mode {
PassMode::NoPass => {}
PassMode::ByVal(_) => {
match ret_arg_abi.mode {
PassMode::Ignore => {}
PassMode::Direct(_) => {
if let Some(ret_place) = ret_place {
let ret_val = fx.bcx.inst_results(call_inst)[0];
ret_place.write_cvalue(fx, CValue::by_val(ret_val, ret_layout));
ret_place.write_cvalue(fx, CValue::by_val(ret_val, ret_arg_abi.layout));
}
}
PassMode::ByValPair(_, _) => {
PassMode::Pair(_, _) => {
if let Some(ret_place) = ret_place {
let ret_val_a = fx.bcx.inst_results(call_inst)[0];
let ret_val_b = fx.bcx.inst_results(call_inst)[1];
ret_place.write_cvalue(fx, CValue::by_val_pair(ret_val_a, ret_val_b, ret_layout));
ret_place.write_cvalue(
fx,
CValue::by_val_pair(ret_val_a, ret_val_b, ret_arg_abi.layout),
);
}
}
PassMode::ByRef { size: Some(_) } => {}
PassMode::ByRef { size: None } => todo!(),
PassMode::Cast(cast) => {
if let Some(ret_place) = ret_place {
let results = fx
.bcx
.inst_results(call_inst)
.into_iter()
.copied()
.collect::<SmallVec<[Value; 2]>>();
let result =
super::pass_mode::from_casted_value(fx, &results, ret_place.layout(), cast);
ret_place.write_cvalue(fx, result);
}
}
PassMode::Indirect {
attrs: _,
extra_attrs: None,
on_stack: _,
} => {}
PassMode::Indirect {
attrs: _,
extra_attrs: Some(_),
on_stack: _,
} => unreachable!("unsized return value"),
}
(call_inst, meta)
@ -111,20 +194,35 @@ pub(super) fn codegen_with_call_return_arg<'tcx, M: Module, T>(
/// Codegen a return instruction with the right return value(s) if any.
pub(crate) fn codegen_return(fx: &mut FunctionCx<'_, '_, impl Module>) {
match get_pass_mode(fx.tcx, return_layout(fx)) {
PassMode::NoPass | PassMode::ByRef { size: Some(_) } => {
match fx.fn_abi.as_ref().unwrap().ret.mode {
PassMode::Ignore
| PassMode::Indirect {
attrs: _,
extra_attrs: None,
on_stack: _,
} => {
fx.bcx.ins().return_(&[]);
}
PassMode::ByRef { size: None } => todo!(),
PassMode::ByVal(_) => {
PassMode::Indirect {
attrs: _,
extra_attrs: Some(_),
on_stack: _,
} => unreachable!("unsized return value"),
PassMode::Direct(_) => {
let place = fx.get_local_place(RETURN_PLACE);
let ret_val = place.to_cvalue(fx).load_scalar(fx);
fx.bcx.ins().return_(&[ret_val]);
}
PassMode::ByValPair(_, _) => {
PassMode::Pair(_, _) => {
let place = fx.get_local_place(RETURN_PLACE);
let (ret_val_a, ret_val_b) = place.to_cvalue(fx).load_scalar_pair(fx);
fx.bcx.ins().return_(&[ret_val_a, ret_val_b]);
}
PassMode::Cast(cast) => {
let place = fx.get_local_place(RETURN_PLACE);
let ret_val = place.to_cvalue(fx);
let ret_vals = super::pass_mode::to_casted_value(fx, ret_val, cast);
fx.bcx.ins().return_(&ret_vals);
}
}
}

View File

@ -40,11 +40,14 @@ pub(crate) fn analyze(fx: &FunctionCx<'_, '_, impl Module>) -> IndexVec<Local, S
}
match &bb.terminator().kind {
TerminatorKind::Call { destination, .. } => {
TerminatorKind::Call {
destination,
func,
args,
..
} => {
if let Some((dest_place, _dest_bb)) = destination {
let dest_layout = fx
.layout_of(fx.monomorphize(&dest_place.ty(&fx.mir.local_decls, fx.tcx).ty));
if !crate::abi::can_return_to_ssa_var(fx.tcx, dest_layout) {
if !crate::abi::can_return_to_ssa_var(fx, func, args) {
not_ssa(&mut flag_map, dest_place.local)
}
}

View File

@ -2,6 +2,8 @@
use rustc_index::vec::IndexVec;
use rustc_middle::ty::adjustment::PointerCast;
use rustc_middle::ty::layout::FnAbiExt;
use rustc_target::abi::call::FnAbi;
use crate::prelude::*;
@ -19,7 +21,8 @@ pub(crate) fn codegen_fn<'tcx>(
let mir = tcx.instance_mir(instance.def);
// Declare function
let (name, sig) = get_function_name_and_sig(tcx, cx.module.isa().triple(), instance, false);
let name = tcx.symbol_name(instance).name.to_string();
let sig = get_function_sig(tcx, cx.module.isa().triple(), instance);
let func_id = cx.module.declare_function(&name, linkage, &sig).unwrap();
cx.cached_context.clear();
@ -50,6 +53,7 @@ pub(crate) fn codegen_fn<'tcx>(
instance,
mir,
fn_abi: Some(FnAbi::of_instance(&RevealAllLayoutCx(tcx), instance, &[])),
bcx,
block_map,
@ -1056,7 +1060,11 @@ pub(crate) fn codegen_panic_inner<'tcx>(
fx.lib_call(
&*symbol_name,
vec![fx.pointer_type, fx.pointer_type, fx.pointer_type],
vec![
AbiParam::new(fx.pointer_type),
AbiParam::new(fx.pointer_type),
AbiParam::new(fx.pointer_type),
],
vec![],
args,
);

View File

@ -1,5 +1,7 @@
//! Replaces 128-bit operators with lang item calls where necessary
use cranelift_codegen::ir::ArgumentPurpose;
use crate::prelude::*;
pub(crate) fn maybe_codegen<'tcx>(
@ -24,41 +26,41 @@ pub(crate) fn maybe_codegen<'tcx>(
None
}
BinOp::Add | BinOp::Sub if !checked => None,
BinOp::Add => {
let out_ty = fx.tcx.mk_tup([lhs.layout().ty, fx.tcx.types.bool].iter());
return Some(if is_signed {
fx.easy_call("__rust_i128_addo", &[lhs, rhs], out_ty)
BinOp::Mul if !checked => {
let val_ty = if is_signed {
fx.tcx.types.i128
} else {
fx.easy_call("__rust_u128_addo", &[lhs, rhs], out_ty)
});
fx.tcx.types.u128
};
Some(fx.easy_call("__multi3", &[lhs, rhs], val_ty))
}
BinOp::Sub => {
BinOp::Add | BinOp::Sub | BinOp::Mul => {
assert!(checked);
let out_ty = fx.tcx.mk_tup([lhs.layout().ty, fx.tcx.types.bool].iter());
return Some(if is_signed {
fx.easy_call("__rust_i128_subo", &[lhs, rhs], out_ty)
} else {
fx.easy_call("__rust_u128_subo", &[lhs, rhs], out_ty)
});
let out_place = CPlace::new_stack_slot(fx, fx.layout_of(out_ty));
let param_types = vec![
AbiParam::special(pointer_ty(fx.tcx), ArgumentPurpose::StructReturn),
AbiParam::new(types::I128),
AbiParam::new(types::I128),
];
let args = [
out_place.to_ptr().get_addr(fx),
lhs.load_scalar(fx),
rhs.load_scalar(fx),
];
let name = match (bin_op, is_signed) {
(BinOp::Add, false) => "__rust_u128_addo",
(BinOp::Add, true) => "__rust_i128_addo",
(BinOp::Sub, false) => "__rust_u128_subo",
(BinOp::Sub, true) => "__rust_i128_subo",
(BinOp::Mul, false) => "__rust_u128_mulo",
(BinOp::Mul, true) => "__rust_i128_mulo",
_ => unreachable!(),
};
fx.lib_call(name, param_types, vec![], &args);
Some(out_place.to_cvalue(fx))
}
BinOp::Offset => unreachable!("offset should only be used on pointers, not 128bit ints"),
BinOp::Mul => {
let res = if checked {
let out_ty = fx.tcx.mk_tup([lhs.layout().ty, fx.tcx.types.bool].iter());
if is_signed {
fx.easy_call("__rust_i128_mulo", &[lhs, rhs], out_ty)
} else {
fx.easy_call("__rust_u128_mulo", &[lhs, rhs], out_ty)
}
} else {
let val_ty = if is_signed {
fx.tcx.types.i128
} else {
fx.tcx.types.u128
};
fx.easy_call("__multi3", &[lhs, rhs], val_ty)
};
Some(res)
}
BinOp::Div => {
assert!(!checked);
if is_signed {

View File

@ -1,4 +1,5 @@
use rustc_index::vec::IndexVec;
use rustc_target::abi::call::FnAbi;
use rustc_target::abi::{Integer, Primitive};
use rustc_target::spec::{HasTargetSpec, Target};
@ -294,6 +295,7 @@ pub(crate) struct FunctionCx<'clif, 'tcx, M: Module> {
pub(crate) instance: Instance<'tcx>,
pub(crate) mir: &'tcx Body<'tcx>,
pub(crate) fn_abi: Option<FnAbi<'tcx, Ty<'tcx>>>,
pub(crate) bcx: FunctionBuilder<'clif>,
pub(crate) block_map: IndexVec<BasicBlock, Block>,
@ -319,16 +321,7 @@ impl<'tcx, M: Module> LayoutOf for FunctionCx<'_, 'tcx, M> {
type TyAndLayout = TyAndLayout<'tcx>;
fn layout_of(&self, ty: Ty<'tcx>) -> TyAndLayout<'tcx> {
assert!(!ty.still_further_specializable());
self.tcx
.layout_of(ParamEnv::reveal_all().and(&ty))
.unwrap_or_else(|e| {
if let layout::LayoutError::SizeOverflow(_) = e {
self.tcx.sess.fatal(&e.to_string())
} else {
bug!("failed to get layout for `{}`: {}", ty, e)
}
})
RevealAllLayoutCx(self.tcx).layout_of(ty)
}
}
@ -442,3 +435,47 @@ impl<'tcx, M: Module> FunctionCx<'_, 'tcx, M> {
self.bcx.ins().global_value(self.pointer_type, local_msg_id)
}
}
pub(crate) struct RevealAllLayoutCx<'tcx>(pub(crate) TyCtxt<'tcx>);
impl<'tcx> LayoutOf for RevealAllLayoutCx<'tcx> {
type Ty = Ty<'tcx>;
type TyAndLayout = TyAndLayout<'tcx>;
fn layout_of(&self, ty: Ty<'tcx>) -> TyAndLayout<'tcx> {
assert!(!ty.still_further_specializable());
self.0
.layout_of(ParamEnv::reveal_all().and(&ty))
.unwrap_or_else(|e| {
if let layout::LayoutError::SizeOverflow(_) = e {
self.0.sess.fatal(&e.to_string())
} else {
bug!("failed to get layout for `{}`: {}", ty, e)
}
})
}
}
impl<'tcx> layout::HasTyCtxt<'tcx> for RevealAllLayoutCx<'tcx> {
fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
self.0
}
}
impl<'tcx> rustc_target::abi::HasDataLayout for RevealAllLayoutCx<'tcx> {
fn data_layout(&self) -> &rustc_target::abi::TargetDataLayout {
&self.0.data_layout
}
}
impl<'tcx> layout::HasParamEnv<'tcx> for RevealAllLayoutCx<'tcx> {
fn param_env(&self) -> ParamEnv<'tcx> {
ParamEnv::reveal_all()
}
}
impl<'tcx> HasTargetSpec for RevealAllLayoutCx<'tcx> {
fn target_spec(&self) -> &Target {
&self.0.sess.target
}
}

View File

@ -156,12 +156,8 @@ extern "C" fn __clif_jit_fn(instance_ptr: *const Instance<'static>) -> *const u8
let jit_module = jit_module.as_mut().unwrap();
let mut cx = crate::CodegenCx::new(tcx, jit_module, false, false);
let (name, sig) = crate::abi::get_function_name_and_sig(
tcx,
cx.module.isa().triple(),
instance,
true,
);
let name = tcx.symbol_name(instance).name.to_string();
let sig = crate::abi::get_function_sig(tcx, cx.module.isa().triple(), instance);
let func_id = cx
.module
.declare_function(&name, Linkage::Export, &sig)
@ -246,8 +242,8 @@ pub(super) fn codegen_shim<'tcx>(cx: &mut CodegenCx<'tcx, impl Module>, inst: In
let pointer_type = cx.module.target_config().pointer_type();
let (name, sig) =
crate::abi::get_function_name_and_sig(tcx, cx.module.isa().triple(), inst, true);
let name = tcx.symbol_name(inst).name.to_string();
let sig = crate::abi::get_function_sig(tcx, cx.module.isa().triple(), inst);
let func_id = cx
.module
.declare_function(&name, Linkage::Export, &sig)

View File

@ -50,12 +50,9 @@ fn predefine_mono_items<'tcx>(
for &(mono_item, (linkage, visibility)) in mono_items {
match mono_item {
MonoItem::Fn(instance) => {
let (name, sig) = get_function_name_and_sig(
cx.tcx,
cx.module.isa().triple(),
instance,
false,
);
let name = cx.tcx.symbol_name(instance).name.to_string();
let _inst_guard = crate::PrintOnPanic(|| format!("{:?} {}", instance, name));
let sig = get_function_sig(cx.tcx, cx.module.isa().triple(), instance);
let linkage = crate::linkage::get_clif_linkage(mono_item, linkage, visibility);
cx.module.declare_function(&name, linkage, &sig).unwrap();
}

View File

@ -90,7 +90,7 @@ mod prelude {
pub(crate) use rustc_middle::mir::{self, *};
pub(crate) use rustc_middle::ty::layout::{self, TyAndLayout};
pub(crate) use rustc_middle::ty::{
self, FnSig, Instance, InstanceDef, ParamEnv, Ty, TyCtxt, TypeAndMut, TypeFoldable,
self, Instance, InstanceDef, ParamEnv, Ty, TyCtxt, TypeAndMut, TypeFoldable,
};
pub(crate) use rustc_target::abi::{Abi, LayoutOf, Scalar, Size, VariantIdx};

View File

@ -69,8 +69,8 @@ pub(crate) fn maybe_create_entry_wrapper(
let instance = Instance::mono(tcx, rust_main_def_id).polymorphize(tcx);
let (main_name, main_sig) =
get_function_name_and_sig(tcx, m.isa().triple(), instance, false);
let main_name = tcx.symbol_name(instance).name.to_string();
let main_sig = get_function_sig(tcx, m.isa().triple(), instance);
let main_func_id = m
.declare_function(&main_name, Linkage::Import, &main_sig)
.unwrap();

View File

@ -61,7 +61,9 @@ use cranelift_codegen::{
write::{FuncWriter, PlainWriter},
};
use rustc_middle::ty::layout::FnAbiExt;
use rustc_session::config::OutputType;
use rustc_target::abi::call::FnAbi;
use crate::prelude::*;
@ -78,11 +80,8 @@ impl CommentWriter {
format!("symbol {}", tcx.symbol_name(instance).name),
format!("instance {:?}", instance),
format!(
"sig {:?}",
tcx.normalize_erasing_late_bound_regions(
ParamEnv::reveal_all(),
crate::abi::fn_sig_for_fn_abi(tcx, instance)
)
"abi {:?}",
FnAbi::of_instance(&RevealAllLayoutCx(tcx), instance, &[])
),
String::new(),
]

View File

@ -334,7 +334,9 @@ impl<'tcx> CPlace<'tcx> {
let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
kind: StackSlotKind::ExplicitSlot,
size: u32::try_from(layout.size.bytes()).unwrap(),
// FIXME Don't force the size to a multiple of 16 bytes once Cranelift gets a way to
// specify stack slot alignment.
size: (u32::try_from(layout.size.bytes()).unwrap() + 15) / 16 * 16,
offset: None,
});
CPlace {
@ -450,64 +452,6 @@ impl<'tcx> CPlace<'tcx> {
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
from: CValue<'tcx>,
) {
fn assert_assignable<'tcx>(
fx: &FunctionCx<'_, 'tcx, impl Module>,
from_ty: Ty<'tcx>,
to_ty: Ty<'tcx>,
) {
match (from_ty.kind(), to_ty.kind()) {
(ty::Ref(_, a, _), ty::Ref(_, b, _))
| (
ty::RawPtr(TypeAndMut { ty: a, mutbl: _ }),
ty::RawPtr(TypeAndMut { ty: b, mutbl: _ }),
) => {
assert_assignable(fx, a, b);
}
(ty::FnPtr(_), ty::FnPtr(_)) => {
let from_sig = fx.tcx.normalize_erasing_late_bound_regions(
ParamEnv::reveal_all(),
from_ty.fn_sig(fx.tcx),
);
let to_sig = fx.tcx.normalize_erasing_late_bound_regions(
ParamEnv::reveal_all(),
to_ty.fn_sig(fx.tcx),
);
assert_eq!(
from_sig, to_sig,
"Can't write fn ptr with incompatible sig {:?} to place with sig {:?}\n\n{:#?}",
from_sig, to_sig, fx,
);
// fn(&T) -> for<'l> fn(&'l T) is allowed
}
(&ty::Dynamic(from_traits, _), &ty::Dynamic(to_traits, _)) => {
for (from, to) in from_traits.iter().zip(to_traits) {
let from = fx
.tcx
.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), from);
let to = fx
.tcx
.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), to);
assert_eq!(
from, to,
"Can't write trait object of incompatible traits {:?} to place with traits {:?}\n\n{:#?}",
from_traits, to_traits, fx,
);
}
// dyn for<'r> Trait<'r> -> dyn Trait<'_> is allowed
}
_ => {
assert_eq!(
from_ty,
to_ty,
"Can't write value with incompatible type {:?} to place with type {:?}\n\n{:#?}",
from_ty,
to_ty,
fx,
);
}
}
}
assert_assignable(fx, from.layout().ty, self.layout().ty);
self.write_cvalue_maybe_transmute(fx, from, "write_cvalue");
@ -556,7 +500,9 @@ impl<'tcx> CPlace<'tcx> {
// FIXME do something more efficient for transmutes between vectors and integers.
let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
kind: StackSlotKind::ExplicitSlot,
size: src_ty.bytes(),
// FIXME Don't force the size to a multiple of 16 bytes once Cranelift gets a way to
// specify stack slot alignment.
size: (src_ty.bytes() + 15) / 16 * 16,
offset: None,
});
let ptr = Pointer::stack_slot(stack_slot);
@ -794,3 +740,62 @@ impl<'tcx> CPlace<'tcx> {
}
}
}
#[track_caller]
pub(crate) fn assert_assignable<'tcx>(
fx: &FunctionCx<'_, 'tcx, impl Module>,
from_ty: Ty<'tcx>,
to_ty: Ty<'tcx>,
) {
match (from_ty.kind(), to_ty.kind()) {
(ty::Ref(_, a, _), ty::Ref(_, b, _))
| (
ty::RawPtr(TypeAndMut { ty: a, mutbl: _ }),
ty::RawPtr(TypeAndMut { ty: b, mutbl: _ }),
) => {
assert_assignable(fx, a, b);
}
(ty::Ref(_, a, _), ty::RawPtr(TypeAndMut { ty: b, mutbl: _ }))
| (ty::RawPtr(TypeAndMut { ty: a, mutbl: _ }), ty::Ref(_, b, _)) => {
assert_assignable(fx, a, b);
}
(ty::FnPtr(_), ty::FnPtr(_)) => {
let from_sig = fx.tcx.normalize_erasing_late_bound_regions(
ParamEnv::reveal_all(),
from_ty.fn_sig(fx.tcx),
);
let to_sig = fx
.tcx
.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), to_ty.fn_sig(fx.tcx));
assert_eq!(
from_sig, to_sig,
"Can't write fn ptr with incompatible sig {:?} to place with sig {:?}\n\n{:#?}",
from_sig, to_sig, fx,
);
// fn(&T) -> for<'l> fn(&'l T) is allowed
}
(&ty::Dynamic(from_traits, _), &ty::Dynamic(to_traits, _)) => {
for (from, to) in from_traits.iter().zip(to_traits) {
let from = fx
.tcx
.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), from);
let to = fx
.tcx
.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), to);
assert_eq!(
from, to,
"Can't write trait object of incompatible traits {:?} to place with traits {:?}\n\n{:#?}",
from_traits, to_traits, fx,
);
}
// dyn for<'r> Trait<'r> -> dyn Trait<'_> is allowed
}
_ => {
assert_eq!(
from_ty, to_ty,
"Can't write value with incompatible type {:?} to place with type {:?}\n\n{:#?}",
from_ty, to_ty, fx,
);
}
}
}