trans: Apply all attributes through FnType.

This commit is contained in:
Eduard Burtescu 2016-02-26 01:10:40 +02:00
parent de5f8244f2
commit 77f3484148
14 changed files with 240 additions and 388 deletions

View File

@ -212,21 +212,21 @@ impl Attributes {
self
}
pub fn apply_llfn(&self, idx: c_uint, llfn: ValueRef) {
pub fn apply_llfn(&self, idx: usize, llfn: ValueRef) {
unsafe {
LLVMAddFunctionAttribute(llfn, idx, self.regular.bits());
LLVMAddFunctionAttribute(llfn, idx as c_uint, self.regular.bits());
if self.dereferenceable_bytes != 0 {
LLVMAddDereferenceableAttr(llfn, idx,
LLVMAddDereferenceableAttr(llfn, idx as c_uint,
self.dereferenceable_bytes);
}
}
}
pub fn apply_callsite(&self, idx: c_uint, callsite: ValueRef) {
pub fn apply_callsite(&self, idx: usize, callsite: ValueRef) {
unsafe {
LLVMAddCallSiteAttribute(callsite, idx, self.regular.bits());
LLVMAddCallSiteAttribute(callsite, idx as c_uint, self.regular.bits());
if self.dereferenceable_bytes != 0 {
LLVMAddDereferenceableCallSiteAttr(callsite, idx,
LLVMAddDereferenceableCallSiteAttr(callsite, idx as c_uint,
self.dereferenceable_bytes);
}
}
@ -240,49 +240,6 @@ pub enum AttributeSet {
FunctionIndex = !0
}
pub struct AttrBuilder {
attrs: Vec<(usize, Attributes)>
}
impl AttrBuilder {
pub fn new() -> AttrBuilder {
AttrBuilder {
attrs: Vec::new()
}
}
pub fn arg(&mut self, idx: usize) -> &mut Attributes {
let mut found = None;
for (i, &(idx2, _)) in self.attrs.iter().enumerate() {
if idx == idx2 {
found = Some(i);
break;
}
}
let i = found.unwrap_or_else(|| {
self.attrs.push((idx, Attributes::default()));
self.attrs.len() - 1
});
&mut self.attrs[i].1
}
pub fn ret(&mut self) -> &mut Attributes {
self.arg(ReturnIndex as usize)
}
pub fn apply_llfn(&self, llfn: ValueRef) {
for &(idx, ref attr) in &self.attrs {
attr.apply_llfn(idx as c_uint, llfn);
}
}
pub fn apply_callsite(&self, callsite: ValueRef) {
for &(idx, ref attr) in &self.attrs {
attr.apply_callsite(idx as c_uint, callsite);
}
}
}
// enum for the LLVM IntPredicate type
#[derive(Copy, Clone)]
pub enum IntPredicate {

View File

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use llvm;
use llvm::{self, ValueRef};
use trans::common::{return_type_is_void, type_is_fat_ptr};
use trans::context::CrateContext;
use trans::cabi_x86;
@ -24,6 +24,7 @@ use trans::machine::{llsize_of_alloc, llsize_of_real};
use trans::type_::Type;
use trans::type_of;
use rustc_front::hir;
use middle::ty::{self, Ty};
pub use syntax::abi::Abi;
@ -204,22 +205,102 @@ impl FnType {
}
};
let ret = match sig.output {
let mut ret = match sig.output {
ty::FnConverging(ret_ty) if !return_type_is_void(ccx, ret_ty) => {
arg_of(ret_ty)
}
_ => ArgType::new(Type::void(ccx), Type::void(ccx))
};
if let ty::FnConverging(ret_ty) = sig.output {
if !type_is_fat_ptr(ccx.tcx(), ret_ty) {
// The `noalias` attribute on the return value is useful to a
// function ptr caller.
if let ty::TyBox(_) = ret_ty.sty {
// `Box` pointer return values never alias because ownership
// is transferred
ret.attrs.set(llvm::Attribute::NoAlias);
}
// We can also mark the return value as `dereferenceable` in certain cases
match ret_ty.sty {
// These are not really pointers but pairs, (pointer, len)
ty::TyRef(_, ty::TypeAndMut { ty, .. }) |
ty::TyBox(ty) => {
let llty = type_of::sizing_type_of(ccx, ty);
let llsz = llsize_of_real(ccx, llty);
ret.attrs.set_dereferenceable(llsz);
}
_ => {}
}
}
}
let mut args = Vec::with_capacity(inputs.len() + extra_args.len());
// Handle safe Rust thin and fat pointers.
let rust_ptr_attrs = |ty: Ty<'tcx>, arg: &mut ArgType| match ty.sty {
// `Box` pointer parameters never alias because ownership is transferred
ty::TyBox(inner) => {
arg.attrs.set(llvm::Attribute::NoAlias);
Some(inner)
}
ty::TyRef(b, mt) => {
use middle::ty::{BrAnon, ReLateBound};
// `&mut` pointer parameters never alias other parameters, or mutable global data
//
// `&T` where `T` contains no `UnsafeCell<U>` is immutable, and can be marked as
// both `readonly` and `noalias`, as LLVM's definition of `noalias` is based solely
// on memory dependencies rather than pointer equality
let interior_unsafe = mt.ty.type_contents(ccx.tcx()).interior_unsafe();
if mt.mutbl != hir::MutMutable && !interior_unsafe {
arg.attrs.set(llvm::Attribute::NoAlias);
}
if mt.mutbl == hir::MutImmutable && !interior_unsafe {
arg.attrs.set(llvm::Attribute::ReadOnly);
}
// When a reference in an argument has no named lifetime, it's
// impossible for that reference to escape this function
// (returned or stored beyond the call by a closure).
if let ReLateBound(_, BrAnon(_)) = *b {
arg.attrs.set(llvm::Attribute::NoCapture);
}
Some(mt.ty)
}
_ => None
};
for ty in inputs.iter().chain(extra_args.iter()) {
let arg = arg_of(ty);
let mut arg = arg_of(ty);
if type_is_fat_ptr(ccx.tcx(), ty) {
let original = arg.original_ty.field_types();
let sizing = arg.ty.field_types();
args.extend(original.into_iter().zip(sizing)
.map(|(o, s)| ArgType::new(o, s)));
let original_tys = arg.original_ty.field_types();
let sizing_tys = arg.ty.field_types();
assert_eq!((original_tys.len(), sizing_tys.len()), (2, 2));
let mut data = ArgType::new(original_tys[0], sizing_tys[0]);
let mut info = ArgType::new(original_tys[1], sizing_tys[1]);
if let Some(inner) = rust_ptr_attrs(ty, &mut data) {
data.attrs.set(llvm::Attribute::NonNull);
if ccx.tcx().struct_tail(inner).is_trait() {
info.attrs.set(llvm::Attribute::NonNull);
}
}
args.push(data);
args.push(info);
} else {
if let Some(inner) = rust_ptr_attrs(ty, &mut arg) {
let llty = type_of::sizing_type_of(ccx, inner);
let llsz = llsize_of_real(ccx, llty);
arg.attrs.set_dereferenceable(llsz);
}
args.push(arg);
}
}
@ -327,18 +408,29 @@ impl FnType {
}
}
pub fn llvm_attrs(&self) -> llvm::AttrBuilder {
let mut attrs = llvm::AttrBuilder::new();
pub fn apply_attrs_llfn(&self, llfn: ValueRef) {
let mut i = if self.ret.is_indirect() { 1 } else { 0 };
*attrs.arg(i) = self.ret.attrs;
self.ret.attrs.apply_llfn(i, llfn);
i += 1;
for arg in &self.args {
if !arg.is_ignore() {
if arg.pad.is_some() { i += 1; }
*attrs.arg(i) = arg.attrs;
arg.attrs.apply_llfn(i, llfn);
i += 1;
}
}
}
pub fn apply_attrs_callsite(&self, callsite: ValueRef) {
let mut i = if self.ret.is_indirect() { 1 } else { 0 };
self.ret.attrs.apply_callsite(i, callsite);
i += 1;
for arg in &self.args {
if !arg.is_ignore() {
if arg.pad.is_some() { i += 1; }
arg.attrs.apply_callsite(i, callsite);
i += 1;
}
}
attrs
}
}

View File

@ -11,17 +11,10 @@
use libc::{c_uint, c_ulonglong};
use llvm::{self, ValueRef};
use middle::ty;
use middle::infer;
use session::config::NoDebugInfo;
pub use syntax::attr::InlineAttr;
use syntax::ast;
use rustc_front::hir;
use trans::abi::Abi;
use trans::common;
use trans::context::CrateContext;
use trans::machine;
use trans::type_of;
/// Mark LLVM function to use provided inline heuristic.
#[inline]
@ -111,174 +104,12 @@ pub fn from_fn_attrs(ccx: &CrateContext, attrs: &[ast::Attribute], llfn: ValueRe
for attr in attrs {
if attr.check_name("cold") {
llvm::Attributes::default().set(llvm::Attribute::Cold)
.apply_llfn(llvm::FunctionIndex as c_uint, llfn)
.apply_llfn(llvm::FunctionIndex as usize, llfn)
} else if attr.check_name("allocator") {
llvm::Attributes::default().set(llvm::Attribute::NoAlias)
.apply_llfn(llvm::ReturnIndex as c_uint, llfn)
.apply_llfn(llvm::ReturnIndex as usize, llfn)
} else if attr.check_name("unwind") {
unwind(llfn, true);
}
}
}
/// Composite function which converts function type into LLVM attributes for the function.
pub fn from_fn_type<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, fn_type: ty::Ty<'tcx>)
-> llvm::AttrBuilder {
use middle::ty::{BrAnon, ReLateBound};
let f = match fn_type.sty {
ty::TyFnDef(_, _, f) | ty::TyFnPtr(f) => f,
_ => unreachable!("expected fn type, found {:?}", fn_type)
};
let fn_sig = ccx.tcx().erase_late_bound_regions(&f.sig);
let fn_sig = infer::normalize_associated_type(ccx.tcx(), &fn_sig);
let mut attrs = llvm::AttrBuilder::new();
let ret_ty = fn_sig.output;
// These have an odd calling convention, so we need to manually
// unpack the input ty's
let input_tys = match fn_type.sty {
ty::TyFnDef(..) | ty::TyFnPtr(_) if f.abi == Abi::RustCall => {
let first = Some(fn_sig.inputs[0]).into_iter();
match fn_sig.inputs[1].sty {
ty::TyTuple(ref t_in) => {
first.chain(t_in.iter().cloned())
}
_ => ccx.sess().bug("expected tuple'd inputs")
}
}
_ => None.into_iter().chain(fn_sig.inputs.iter().cloned())
};
// Index 0 is the return value of the llvm func, so we start at 1
let mut idx = 1;
if let ty::FnConverging(ret_ty) = ret_ty {
// A function pointer is called without the declaration
// available, so we have to apply any attributes with ABI
// implications directly to the call instruction. Right now,
// the only attribute we need to worry about is `sret`.
if type_of::return_uses_outptr(ccx, ret_ty) {
let llret_sz = machine::llsize_of_real(ccx, type_of::type_of(ccx, ret_ty));
// The outptr can be noalias and nocapture because it's entirely
// invisible to the program. We also know it's nonnull as well
// as how many bytes we can dereference
attrs.arg(1).set(llvm::Attribute::StructRet)
.set(llvm::Attribute::NoAlias)
.set(llvm::Attribute::NoCapture)
.set_dereferenceable(llret_sz);
// Add one more since there's an outptr
idx += 1;
} else {
// The `noalias` attribute on the return value is useful to a
// function ptr caller.
match ret_ty.sty {
// `Box` pointer return values never alias because ownership
// is transferred
ty::TyBox(it) if common::type_is_sized(ccx.tcx(), it) => {
attrs.ret().set(llvm::Attribute::NoAlias);
}
_ => {}
}
// We can also mark the return value as `dereferenceable` in certain cases
match ret_ty.sty {
// These are not really pointers but pairs, (pointer, len)
ty::TyRef(_, ty::TypeAndMut { ty: inner, .. })
| ty::TyBox(inner) if common::type_is_sized(ccx.tcx(), inner) => {
let llret_sz = machine::llsize_of_real(ccx, type_of::type_of(ccx, inner));
attrs.ret().set_dereferenceable(llret_sz);
}
_ => {}
}
if let ty::TyBool = ret_ty.sty {
attrs.ret().set(llvm::Attribute::ZExt);
}
}
}
for t in input_tys {
match t.sty {
_ if type_of::arg_is_indirect(ccx, t) => {
let llarg_sz = machine::llsize_of_real(ccx, type_of::type_of(ccx, t));
// For non-immediate arguments the callee gets its own copy of
// the value on the stack, so there are no aliases. It's also
// program-invisible so can't possibly capture
attrs.arg(idx).set(llvm::Attribute::NoAlias)
.set(llvm::Attribute::NoCapture)
.set_dereferenceable(llarg_sz);
}
ty::TyBool => {
attrs.arg(idx).set(llvm::Attribute::ZExt);
}
// `Box` pointer parameters never alias because ownership is transferred
ty::TyBox(inner) => {
attrs.arg(idx).set(llvm::Attribute::NoAlias);
if common::type_is_sized(ccx.tcx(), inner) {
let llsz = machine::llsize_of_real(ccx, type_of::type_of(ccx, inner));
attrs.arg(idx).set_dereferenceable(llsz);
} else {
attrs.arg(idx).set(llvm::Attribute::NonNull);
if inner.is_trait() {
attrs.arg(idx + 1).set(llvm::Attribute::NonNull);
}
}
}
ty::TyRef(b, mt) => {
// `&mut` pointer parameters never alias other parameters, or mutable global data
//
// `&T` where `T` contains no `UnsafeCell<U>` is immutable, and can be marked as
// both `readonly` and `noalias`, as LLVM's definition of `noalias` is based solely
// on memory dependencies rather than pointer equality
let interior_unsafe = mt.ty.type_contents(ccx.tcx()).interior_unsafe();
if mt.mutbl != hir::MutMutable && !interior_unsafe {
attrs.arg(idx).set(llvm::Attribute::NoAlias);
}
if mt.mutbl == hir::MutImmutable && !interior_unsafe {
attrs.arg(idx).set(llvm::Attribute::ReadOnly);
}
// & pointer parameters are also never null and for sized types we also know
// exactly how many bytes we can dereference
if common::type_is_sized(ccx.tcx(), mt.ty) {
let llsz = machine::llsize_of_real(ccx, type_of::type_of(ccx, mt.ty));
attrs.arg(idx).set_dereferenceable(llsz);
} else {
attrs.arg(idx).set(llvm::Attribute::NonNull);
if mt.ty.is_trait() {
attrs.arg(idx + 1).set(llvm::Attribute::NonNull);
}
}
// When a reference in an argument has no named lifetime, it's
// impossible for that reference to escape this function
// (returned or stored beyond the call by a closure).
if let ReLateBound(_, BrAnon(_)) = *b {
attrs.arg(idx).set(llvm::Attribute::NoCapture);
}
}
_ => ()
}
if common::type_is_fat_ptr(ccx.tcx(), t) {
idx += 2;
} else {
idx += 1;
}
}
attrs
}

View File

@ -836,7 +836,6 @@ pub fn fail_if_zero_or_overflows<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
llfn: ValueRef,
llargs: &[ValueRef],
fn_ty: Ty<'tcx>,
debug_loc: DebugLoc)
-> (ValueRef, Block<'blk, 'tcx>) {
let _icx = push_ctxt("invoke_");
@ -844,8 +843,6 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
return (C_null(Type::i8(bcx.ccx())), bcx);
}
let attributes = attributes::from_fn_type(bcx.ccx(), fn_ty);
match bcx.opt_node_id {
None => {
debug!("invoke at ???");
@ -868,7 +865,6 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
&llargs[..],
normal_bcx.llbb,
landing_pad,
Some(attributes),
debug_loc);
return (llresult, normal_bcx);
} else {
@ -877,7 +873,7 @@ pub fn invoke<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
debug!("arg: {:?}", Value(llarg));
}
let llresult = Call(bcx, llfn, &llargs[..], Some(attributes), debug_loc);
let llresult = Call(bcx, llfn, &llargs[..], debug_loc);
return (llresult, bcx);
}
}
@ -1105,7 +1101,6 @@ pub fn call_lifetime_start(cx: Block, ptr: ValueRef) {
Call(cx,
lifetime_start,
&[C_u64(ccx, size), ptr],
None,
DebugLoc::None);
})
}
@ -1116,7 +1111,6 @@ pub fn call_lifetime_end(cx: Block, ptr: ValueRef) {
Call(cx,
lifetime_end,
&[C_u64(ccx, size), ptr],
None,
DebugLoc::None);
})
}
@ -1147,7 +1141,6 @@ pub fn call_memcpy(cx: Block, dst: ValueRef, src: ValueRef, n_bytes: ValueRef, a
Call(cx,
memcpy,
&[dst_ptr, src_ptr, size, align, volatile],
None,
DebugLoc::None);
}
@ -1217,7 +1210,7 @@ pub fn call_memset<'bcx, 'tcx>(b: &Builder<'bcx, 'tcx>,
let intrinsic_key = format!("llvm.memset.p0i8.i{}", ptr_width);
let llintrinsicfn = ccx.get_intrinsic(&intrinsic_key);
let volatile = C_bool(ccx, volatile);
b.call(llintrinsicfn, &[ptr, fill_byte, size, align, volatile], None, None);
b.call(llintrinsicfn, &[ptr, fill_byte, size, align, volatile], None);
}
@ -1292,7 +1285,7 @@ pub fn alloca_dropped<'blk, 'tcx>(cx: Block<'blk, 'tcx>, ty: Ty<'tcx>, name: &st
// Block, which we do not have for `alloca_insert_pt`).
core_lifetime_emit(cx.ccx(), p, Lifetime::Start, |ccx, size, lifetime_start| {
let ptr = b.pointercast(p, Type::i8p(ccx));
b.call(lifetime_start, &[C_u64(ccx, size), ptr], None, None);
b.call(lifetime_start, &[C_u64(ccx, size), ptr], None);
});
memfill(&b, p, ty, adt::DTOR_DONE);
p

View File

@ -12,7 +12,7 @@
#![allow(non_snake_case)]
use llvm;
use llvm::{CallConv, AtomicBinOp, AtomicOrdering, SynchronizationScope, AsmDialect, AttrBuilder};
use llvm::{CallConv, AtomicBinOp, AtomicOrdering, SynchronizationScope, AsmDialect};
use llvm::{Opcode, IntPredicate, RealPredicate};
use llvm::{ValueRef, BasicBlockRef};
use trans::common::*;
@ -139,7 +139,6 @@ pub fn Invoke(cx: Block,
args: &[ValueRef],
then: BasicBlockRef,
catch: BasicBlockRef,
attributes: Option<AttrBuilder>,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
@ -154,7 +153,7 @@ pub fn Invoke(cx: Block,
}).collect::<Vec<String>>().join(", "));
debug_loc.apply(cx.fcx);
let bundle = cx.lpad().and_then(|b| b.bundle());
B(cx).invoke(fn_, args, then, catch, bundle, attributes)
B(cx).invoke(fn_, args, then, catch, bundle)
}
pub fn Unreachable(cx: Block) {
@ -911,7 +910,6 @@ pub fn InlineAsmCall(cx: Block, asm: *const c_char, cons: *const c_char,
pub fn Call(cx: Block,
fn_: ValueRef,
args: &[ValueRef],
attributes: Option<AttrBuilder>,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
@ -919,14 +917,13 @@ pub fn Call(cx: Block,
}
debug_loc.apply(cx.fcx);
let bundle = cx.lpad.get().and_then(|b| b.bundle());
B(cx).call(fn_, args, bundle, attributes)
B(cx).call(fn_, args, bundle)
}
pub fn CallWithConv(cx: Block,
fn_: ValueRef,
args: &[ValueRef],
conv: CallConv,
attributes: Option<AttrBuilder>,
debug_loc: DebugLoc)
-> ValueRef {
if cx.unreachable.get() {
@ -934,7 +931,7 @@ pub fn CallWithConv(cx: Block,
}
debug_loc.apply(cx.fcx);
let bundle = cx.lpad.get().and_then(|b| b.bundle());
B(cx).call_with_conv(fn_, args, conv, bundle, attributes)
B(cx).call_with_conv(fn_, args, conv, bundle)
}
pub fn AtomicFence(cx: Block, order: AtomicOrdering, scope: SynchronizationScope) {

View File

@ -11,7 +11,7 @@
#![allow(dead_code)] // FFI wrappers
use llvm;
use llvm::{CallConv, AtomicBinOp, AtomicOrdering, SynchronizationScope, AsmDialect, AttrBuilder};
use llvm::{CallConv, AtomicBinOp, AtomicOrdering, SynchronizationScope, AsmDialect};
use llvm::{Opcode, IntPredicate, RealPredicate, False, OperandBundleDef};
use llvm::{ValueRef, BasicBlockRef, BuilderRef, ModuleRef};
use trans::base;
@ -165,8 +165,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
args: &[ValueRef],
then: BasicBlockRef,
catch: BasicBlockRef,
bundle: Option<&OperandBundleDef>,
attributes: Option<AttrBuilder>)
bundle: Option<&OperandBundleDef>)
-> ValueRef {
self.count_insn("invoke");
@ -180,18 +179,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let bundle = bundle.as_ref().map(|b| b.raw()).unwrap_or(0 as *mut _);
unsafe {
let v = llvm::LLVMRustBuildInvoke(self.llbuilder,
llfn,
args.as_ptr(),
args.len() as c_uint,
then,
catch,
bundle,
noname());
if let Some(a) = attributes {
a.apply_callsite(v);
}
v
llvm::LLVMRustBuildInvoke(self.llbuilder,
llfn,
args.as_ptr(),
args.len() as c_uint,
then,
catch,
bundle,
noname())
}
}
@ -775,7 +770,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
comment_text.as_ptr(), noname(), False,
False)
};
self.call(asm, &[], None, None);
self.call(asm, &[], None);
}
}
@ -800,13 +795,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
unsafe {
let v = llvm::LLVMInlineAsm(
fty.to_ref(), asm, cons, volatile, alignstack, dia as c_uint);
self.call(v, inputs, None, None)
self.call(v, inputs, None)
}
}
pub fn call(&self, llfn: ValueRef, args: &[ValueRef],
bundle: Option<&OperandBundleDef>,
attributes: Option<AttrBuilder>) -> ValueRef {
bundle: Option<&OperandBundleDef>) -> ValueRef {
self.count_insn("call");
debug!("Call {:?} with args ({})",
@ -844,22 +838,16 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let bundle = bundle.as_ref().map(|b| b.raw()).unwrap_or(0 as *mut _);
unsafe {
let v = llvm::LLVMRustBuildCall(self.llbuilder, llfn, args.as_ptr(),
args.len() as c_uint, bundle,
noname());
if let Some(a) = attributes {
a.apply_callsite(v);
}
v
llvm::LLVMRustBuildCall(self.llbuilder, llfn, args.as_ptr(),
args.len() as c_uint, bundle, noname())
}
}
pub fn call_with_conv(&self, llfn: ValueRef, args: &[ValueRef],
conv: CallConv,
bundle: Option<&OperandBundleDef>,
attributes: Option<AttrBuilder>) -> ValueRef {
bundle: Option<&OperandBundleDef>) -> ValueRef {
self.count_insn("callwithconv");
let v = self.call(llfn, args, bundle, attributes);
let v = self.call(llfn, args, bundle);
llvm::SetInstructionCallConv(v, conv);
v
}

View File

@ -27,7 +27,7 @@ use middle::subst;
use middle::subst::{Substs};
use middle::traits;
use rustc::front::map as hir_map;
use trans::abi::Abi;
use trans::abi::{Abi, FnType};
use trans::adt;
use trans::attributes;
use trans::base;
@ -700,25 +700,31 @@ fn trans_call_inner<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
};
// Invoke the actual rust fn and update bcx/llresult.
let (llret, b) = base::invoke(bcx,
datum.val,
&llargs[..],
datum.ty,
debug_loc);
let (llret, b) = base::invoke(bcx, datum.val, &llargs, debug_loc);
let fn_ty = match datum.ty.sty {
ty::TyFnDef(_, _, f) | ty::TyFnPtr(f) => {
let sig = bcx.tcx().erase_late_bound_regions(&f.sig);
let sig = infer::normalize_associated_type(bcx.tcx(), &sig);
FnType::new(bcx.ccx(), f.abi, &sig, &[])
}
_ => unreachable!("expected fn type")
};
if !bcx.unreachable.get() {
fn_ty.apply_attrs_callsite(llret);
}
bcx = b;
llresult = llret;
// If the Rust convention for this type is return via
// the return value, copy it into llretslot.
match (opt_llretslot, ret_ty) {
(Some(llretslot), ty::FnConverging(ret_ty)) => {
if !type_of::return_uses_outptr(bcx.ccx(), ret_ty) &&
!common::type_is_zero_size(bcx.ccx(), ret_ty)
{
store_ty(bcx, llret, llretslot, ret_ty)
}
if let Some(llretslot) = opt_llretslot {
let llty = fn_ty.ret.original_ty;
if !fn_ty.ret.is_indirect() && llty != Type::void(bcx.ccx()) {
store_ty(bcx, llret, llretslot, ret_ty.unwrap())
}
(_, _) => {}
}
} else {
// Lang items are the only case where dest is None, and

View File

@ -109,14 +109,11 @@ pub fn declare_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, name: &str,
llvm::SetFunctionAttribute(llfn, llvm::Attribute::NoReturn);
}
let attrs = if f.abi == Abi::Rust || f.abi == Abi::RustCall {
attributes::from_fn_type(ccx, fn_type)
} else {
if f.abi != Abi::Rust && f.abi != Abi::RustCall {
attributes::unwind(llfn, false);
fty.llvm_attrs()
};
}
attrs.apply_llfn(llfn);
fty.apply_attrs_llfn(llfn);
llfn
}

View File

@ -864,7 +864,6 @@ fn trans_index<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let expected = Call(bcx,
expect,
&[bounds_check, C_bool(ccx, false)],
None,
index_expr_debug_loc);
bcx = with_cond(bcx, expected, |bcx| {
controlflow::trans_fail_bounds_check(bcx,
@ -1681,10 +1680,10 @@ fn trans_scalar_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
if lhs_t == tcx.types.f32 {
let lhs = FPExt(bcx, lhs, f64t);
let rhs = FPExt(bcx, rhs, f64t);
let res = Call(bcx, llfn, &[lhs, rhs], None, binop_debug_loc);
let res = Call(bcx, llfn, &[lhs, rhs], binop_debug_loc);
FPTrunc(bcx, res, Type::f32(bcx.ccx()))
} else {
Call(bcx, llfn, &[lhs, rhs], None, binop_debug_loc)
Call(bcx, llfn, &[lhs, rhs], binop_debug_loc)
}
} else {
FRem(bcx, lhs, rhs, binop_debug_loc)
@ -2255,7 +2254,7 @@ impl OverflowOpViaIntrinsic {
-> (Block<'blk, 'tcx>, ValueRef) {
let llfn = self.to_intrinsic(bcx, lhs_t);
let val = Call(bcx, llfn, &[lhs, rhs], None, binop_debug_loc);
let val = Call(bcx, llfn, &[lhs, rhs], binop_debug_loc);
let result = ExtractValue(bcx, val, 0); // iN operation result
let overflow = ExtractValue(bcx, val, 1); // i1 "did it overflow?"
@ -2264,7 +2263,7 @@ impl OverflowOpViaIntrinsic {
let expect = bcx.ccx().get_intrinsic(&"llvm.expect.i1");
Call(bcx, expect, &[cond, C_integral(Type::i1(bcx.ccx()), 0, false)],
None, binop_debug_loc);
binop_debug_loc);
let bcx =
base::with_cond(bcx, cond, |bcx|

View File

@ -253,8 +253,10 @@ pub fn trans_native_call<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
llfn,
&llargs_foreign[..],
fn_type.cconv,
Some(fn_type.llvm_attrs()),
call_debug_loc);
if !bcx.unreachable.get() {
fn_type.apply_attrs_callsite(llforeign_retval);
}
// If the function we just called does not use an outpointer,
// store the result into the rust outpointer. Cast the outpointer
@ -347,20 +349,31 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
let fnty = ccx.tcx().node_id_to_type(id);
let mty = monomorphize::apply_param_substs(ccx.tcx(), param_substs, &fnty);
let (fn_abi, fn_sig) = match mty.sty {
ty::TyFnDef(_, _, ref fn_ty) => (fn_ty.abi, &fn_ty.sig),
let f = match mty.sty {
ty::TyFnDef(_, _, f) => f,
_ => ccx.sess().bug("trans_rust_fn_with_foreign_abi called on non-function type")
};
let fn_sig = ccx.tcx().erase_late_bound_regions(fn_sig);
assert!(f.abi != Abi::Rust);
assert!(f.abi != Abi::RustIntrinsic);
assert!(f.abi != Abi::PlatformIntrinsic);
let fn_sig = ccx.tcx().erase_late_bound_regions(&f.sig);
let fn_sig = infer::normalize_associated_type(ccx.tcx(), &fn_sig);
let fn_ty = FnType::new(ccx, fn_abi, &fn_sig, &[]);
let rust_fn_ty = ccx.tcx().mk_fn_ptr(ty::BareFnTy {
unsafety: f.unsafety,
abi: Abi::Rust,
sig: ty::Binder(fn_sig.clone())
});
let fty = FnType::new(ccx, f.abi, &fn_sig, &[]);
let rust_fty = FnType::new(ccx, Abi::Rust, &fn_sig, &[]);
unsafe { // unsafe because we call LLVM operations
// Build up the Rust function (`foo0` above).
let llrustfn = build_rust_fn(ccx, decl, body, param_substs, attrs, id, hash);
let llrustfn = build_rust_fn(ccx, decl, body, param_substs,
attrs, id, rust_fn_ty, hash);
// Build up the foreign wrapper (`foo` above).
return build_wrap_fn(ccx, llrustfn, llwrapfn, &fn_sig, &fn_ty, mty);
return build_wrap_fn(ccx, llrustfn, llwrapfn, &fn_sig, &fty, &rust_fty);
}
fn build_rust_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
@ -369,13 +382,12 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
param_substs: &'tcx Substs<'tcx>,
attrs: &[ast::Attribute],
id: ast::NodeId,
rust_fn_ty: Ty<'tcx>,
hash: Option<&str>)
-> ValueRef
{
let _icx = push_ctxt("foreign::foreign::build_rust_fn");
let tcx = ccx.tcx();
let t = tcx.node_id_to_type(id);
let t = monomorphize::apply_param_substs(tcx, param_substs, &t);
let path =
tcx.map.def_path(tcx.map.local_def_id(id))
@ -384,25 +396,6 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
.chain(once(special_idents::clownshoe_abi.name.as_str()));
let ps = link::mangle(path, hash);
// Compute the type that the function would have if it were just a
// normal Rust function. This will be the type of the wrappee fn.
let rust_fn_ty = match t.sty {
ty::TyFnDef(_, _, ref f) => {
assert!(f.abi != Abi::Rust);
assert!(f.abi != Abi::RustIntrinsic);
assert!(f.abi != Abi::PlatformIntrinsic);
tcx.mk_fn_ptr(ty::BareFnTy {
unsafety: f.unsafety,
abi: Abi::Rust,
sig: f.sig.clone()
})
}
_ => {
unreachable!("build_rust_fn: extern fn {} has ty {:?}, \
expected a fn item type",
tcx.map.path_to_string(id), t);
}
};
debug!("build_rust_fn: path={} id={} ty={:?}",
ccx.tcx().map.path_to_string(id),
@ -419,14 +412,13 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
llwrapfn: ValueRef,
fn_sig: &ty::FnSig<'tcx>,
fn_ty: &FnType,
t: Ty<'tcx>) {
rust_fty: &FnType) {
let _icx = push_ctxt(
"foreign::trans_rust_fn_with_foreign_abi::build_wrap_fn");
debug!("build_wrap_fn(llrustfn={:?}, llwrapfn={:?}, t={:?})",
debug!("build_wrap_fn(llrustfn={:?}, llwrapfn={:?})",
Value(llrustfn),
Value(llwrapfn),
t);
Value(llwrapfn));
// Avoid all the Rust generation stuff and just generate raw
// LLVM here.
@ -615,11 +607,9 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
}
// Perform the call itself
debug!("calling llrustfn = {:?}, t = {:?}",
Value(llrustfn), t);
let attributes = attributes::from_fn_type(ccx, t);
let llrust_ret_val = builder.call(llrustfn, &llrust_args,
None, Some(attributes));
debug!("calling llrustfn = {:?}", Value(llrustfn));
let llrust_ret_val = builder.call(llrustfn, &llrust_args, None);
rust_fty.apply_attrs_callsite(llrust_ret_val);
// Get the return value where the foreign fn expects it.
let llforeign_ret_ty = fn_ty.ret.cast.unwrap_or(fn_ty.ret.original_ty);

View File

@ -170,13 +170,13 @@ pub fn drop_ty_core<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let may_need_drop =
ICmp(bcx, llvm::IntNE, hint_val, moved_val, DebugLoc::None);
bcx = with_cond(bcx, may_need_drop, |cx| {
Call(cx, glue, &[ptr], None, debug_loc);
Call(cx, glue, &[ptr], debug_loc);
cx
})
}
None => {
// No drop-hint ==> call standard drop glue
Call(bcx, glue, &[ptr], None, debug_loc);
Call(bcx, glue, &[ptr], debug_loc);
}
}
}
@ -313,7 +313,7 @@ fn trans_struct_drop_flag<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
And(bcx, not_init, not_done, DebugLoc::None);
with_cond(bcx, drop_flag_neither_initialized_nor_cleared, |cx| {
let llfn = cx.ccx().get_intrinsic(&("llvm.debugtrap"));
Call(cx, llfn, &[], None, DebugLoc::None);
Call(cx, llfn, &[], DebugLoc::None);
cx
})
};
@ -583,7 +583,6 @@ fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, g: DropGlueK
Call(bcx,
dtor,
&[PointerCast(bcx, Load(bcx, data_ptr), Type::i8p(bcx.ccx()))],
None,
DebugLoc::None);
bcx
}

View File

@ -407,7 +407,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
// These are the only intrinsic functions that diverge.
if name == "abort" {
let llfn = ccx.get_intrinsic(&("llvm.trap"));
Call(bcx, llfn, &[], None, call_debug_location);
Call(bcx, llfn, &[], call_debug_location);
fcx.pop_and_trans_custom_cleanup_scope(bcx, cleanup_scope);
Unreachable(bcx);
return Result::new(bcx, C_undef(Type::nil(ccx).ptr_to()));
@ -442,11 +442,11 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
let simple = get_simple_intrinsic(ccx, &name);
let llval = match (simple, &name[..]) {
(Some(llfn), _) => {
Call(bcx, llfn, &llargs, None, call_debug_location)
Call(bcx, llfn, &llargs, call_debug_location)
}
(_, "breakpoint") => {
let llfn = ccx.get_intrinsic(&("llvm.debugtrap"));
Call(bcx, llfn, &[], None, call_debug_location)
Call(bcx, llfn, &[], call_debug_location)
}
(_, "size_of") => {
let tp_ty = *substs.types.get(FnSpace, 0);
@ -636,13 +636,13 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
"cttz" => count_zeros_intrinsic(bcx, &format!("llvm.cttz.i{}", width),
llargs[0], call_debug_location),
"ctpop" => Call(bcx, ccx.get_intrinsic(&format!("llvm.ctpop.i{}", width)),
&llargs, None, call_debug_location),
&llargs, call_debug_location),
"bswap" => {
if width == 8 {
llargs[0] // byte swap a u8/i8 is just a no-op
} else {
Call(bcx, ccx.get_intrinsic(&format!("llvm.bswap.i{}", width)),
&llargs, None, call_debug_location)
&llargs, call_debug_location)
}
}
"add_with_overflow" | "sub_with_overflow" | "mul_with_overflow" => {
@ -951,7 +951,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
let f = declare::declare_cfn(ccx,
name,
Type::func(&inputs, &outputs));
Call(bcx, f, &llargs, None, call_debug_location)
Call(bcx, f, &llargs, call_debug_location)
}
};
@ -1024,7 +1024,6 @@ fn copy_intrinsic<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
Mul(bcx, size, count, DebugLoc::None),
align,
C_bool(ccx, volatile)],
None,
call_debug_location)
}
@ -1054,7 +1053,6 @@ fn memset_intrinsic<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
Mul(bcx, size, count, DebugLoc::None),
align,
C_bool(ccx, volatile)],
None,
call_debug_location)
}
@ -1065,7 +1063,7 @@ fn count_zeros_intrinsic(bcx: Block,
-> ValueRef {
let y = C_bool(bcx.ccx(), false);
let llfn = bcx.ccx().get_intrinsic(&name);
Call(bcx, llfn, &[val, y], None, call_debug_location)
Call(bcx, llfn, &[val, y], call_debug_location)
}
fn with_overflow_intrinsic<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
@ -1078,7 +1076,7 @@ fn with_overflow_intrinsic<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let llfn = bcx.ccx().get_intrinsic(&name);
// Convert `i1` to a `bool`, and write it to the out parameter
let val = Call(bcx, llfn, &[a, b], None, call_debug_location);
let val = Call(bcx, llfn, &[a, b], call_debug_location);
let result = ExtractValue(bcx, val, 0);
let overflow = ZExt(bcx, ExtractValue(bcx, val, 1), Type::bool(bcx.ccx()));
Store(bcx, result, StructGEP(bcx, out, 0));
@ -1094,7 +1092,7 @@ fn try_intrinsic<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
dest: ValueRef,
dloc: DebugLoc) -> Block<'blk, 'tcx> {
if bcx.sess().no_landing_pads() {
Call(bcx, func, &[data], None, dloc);
Call(bcx, func, &[data], dloc);
Store(bcx, C_null(Type::i8p(bcx.ccx())), dest);
bcx
} else if wants_msvc_seh(bcx.sess()) {
@ -1165,9 +1163,9 @@ fn trans_msvc_try<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
// More information can be found in libstd's seh.rs implementation.
let slot = Alloca(bcx, Type::i8p(ccx), "slot");
let localescape = ccx.get_intrinsic(&"llvm.localescape");
Call(bcx, localescape, &[slot], None, dloc);
Call(bcx, localescape, &[slot], dloc);
Store(bcx, local_ptr, slot);
Invoke(bcx, func, &[data], normal.llbb, catchswitch.llbb, None, dloc);
Invoke(bcx, func, &[data], normal.llbb, catchswitch.llbb, dloc);
Ret(normal, C_i32(ccx, 0), dloc);
@ -1184,7 +1182,7 @@ fn trans_msvc_try<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
// Note that no invoke is used here because by definition this function
// can't panic (that's what it's catching).
let ret = Call(bcx, llfn, &[func, data, local_ptr], None, dloc);
let ret = Call(bcx, llfn, &[func, data, local_ptr], dloc);
Store(bcx, ret, dest);
return bcx
}
@ -1242,7 +1240,7 @@ fn trans_gnu_try<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let func = llvm::get_param(bcx.fcx.llfn, 0);
let data = llvm::get_param(bcx.fcx.llfn, 1);
let local_ptr = llvm::get_param(bcx.fcx.llfn, 2);
Invoke(bcx, func, &[data], then.llbb, catch.llbb, None, dloc);
Invoke(bcx, func, &[data], then.llbb, catch.llbb, dloc);
Ret(then, C_i32(ccx, 0), dloc);
// Type indicator for the exception being thrown.
@ -1262,7 +1260,7 @@ fn trans_gnu_try<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
// Note that no invoke is used here because by definition this function
// can't panic (that's what it's catching).
let ret = Call(bcx, llfn, &[func, data, local_ptr], None, dloc);
let ret = Call(bcx, llfn, &[func, data, local_ptr], dloc);
Store(bcx, ret, dest);
return bcx;
}
@ -1376,11 +1374,10 @@ fn generate_filter_fn<'a, 'tcx>(fcx: &FunctionContext<'a, 'tcx>,
// For more info, see seh.rs in the standard library.
let do_trans = |bcx: Block, ehptrs, base_pointer| {
let rust_try_fn = BitCast(bcx, rust_try_fn, Type::i8p(ccx));
let parentfp = Call(bcx, recoverfp, &[rust_try_fn, base_pointer],
None, dloc);
let parentfp = Call(bcx, recoverfp, &[rust_try_fn, base_pointer], dloc);
let arg = Call(bcx, localrecover,
&[rust_try_fn, parentfp, C_i32(ccx, 0)], None, dloc);
let ret = Call(bcx, rust_try_filter, &[ehptrs, arg], None, dloc);
&[rust_try_fn, parentfp, C_i32(ccx, 0)], dloc);
let ret = Call(bcx, rust_try_filter, &[ehptrs, arg], dloc);
Ret(bcx, ret, dloc);
};
@ -1402,7 +1399,7 @@ fn generate_filter_fn<'a, 'tcx>(fcx: &FunctionContext<'a, 'tcx>,
}),
});
gen_fn(fcx, "__rustc_try_filter", filter_fn_ty, output, &mut |bcx| {
let ebp = Call(bcx, frameaddress, &[C_i32(ccx, 1)], None, dloc);
let ebp = Call(bcx, frameaddress, &[C_i32(ccx, 1)], dloc);
let exn = InBoundsGEP(bcx, ebp, &[C_i32(ccx, -20)]);
let exn = Load(bcx, BitCast(bcx, exn, Type::i8p(ccx).ptr_to()));
do_trans(bcx, exn, ebp);

View File

@ -9,11 +9,10 @@
// except according to those terms.
use llvm::{BasicBlockRef, ValueRef, OperandBundleDef};
use rustc::middle::ty;
use rustc::middle::{infer, ty};
use rustc::mir::repr as mir;
use trans::abi::Abi;
use trans::abi::{Abi, FnType};
use trans::adt;
use trans::attributes;
use trans::base;
use trans::build;
use trans::callee::{Callee, Fn, Virtual};
@ -141,11 +140,10 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
&[llvalue],
self.llblock(target),
unwind.llbb(),
cleanup_bundle.as_ref(),
None);
cleanup_bundle.as_ref());
self.bcx(target).at_start(|bcx| drop::drop_fill(bcx, lvalue.llval, ty));
} else {
bcx.call(drop_fn, &[llvalue], cleanup_bundle.as_ref(), None);
bcx.call(drop_fn, &[llvalue], cleanup_bundle.as_ref());
drop::drop_fill(&bcx, lvalue.llval, ty);
funclet_br(bcx, self.llblock(target));
}
@ -244,7 +242,15 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
datum
};
let attrs = attributes::from_fn_type(bcx.ccx(), datum.ty);
let fn_ty = match datum.ty.sty {
ty::TyFnDef(_, _, f) | ty::TyFnPtr(f) => {
let sig = bcx.tcx().erase_late_bound_regions(&f.sig);
let sig = infer::normalize_associated_type(bcx.tcx(), &sig);
FnType::new(bcx.ccx(), f.abi, &sig, &[])
}
_ => unreachable!("expected fn type")
};
// Many different ways to call a function handled here
match (is_foreign, cleanup, destination) {
@ -253,12 +259,12 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
let cleanup = self.bcx(cleanup);
let landingpad = self.make_landing_pad(cleanup);
let unreachable_blk = self.unreachable_block();
bcx.invoke(datum.val,
&llargs[..],
unreachable_blk.llbb,
landingpad.llbb(),
cleanup_bundle.as_ref(),
Some(attrs));
let cs = bcx.invoke(datum.val,
&llargs[..],
unreachable_blk.llbb,
landingpad.llbb(),
cleanup_bundle.as_ref());
fn_ty.apply_attrs_callsite(cs);
landingpad.at_start(|bcx| for op in args {
self.set_operand_dropped(bcx, op);
});
@ -270,8 +276,8 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
&llargs[..],
self.llblock(success),
landingpad.llbb(),
cleanup_bundle.as_ref(),
Some(attrs));
cleanup_bundle.as_ref());
fn_ty.apply_attrs_callsite(invokeret);
if must_copy_dest {
let (ret_dest, ret_ty) = ret_dest_ty
.expect("return destination and type not set");
@ -289,18 +295,18 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
});
},
(false, _, &None) => {
bcx.call(datum.val,
&llargs[..],
cleanup_bundle.as_ref(),
Some(attrs));
let cs = bcx.call(datum.val,
&llargs[..],
cleanup_bundle.as_ref());
fn_ty.apply_attrs_callsite(cs);
// no need to drop args, because the call never returns
bcx.unreachable();
}
(false, _, &Some((_, target))) => {
let llret = bcx.call(datum.val,
&llargs[..],
cleanup_bundle.as_ref(),
Some(attrs));
cleanup_bundle.as_ref());
fn_ty.apply_attrs_callsite(llret);
if must_copy_dest {
let (ret_dest, ret_ty) = ret_dest_ty
.expect("return destination and type not set");

View File

@ -515,10 +515,10 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
if input_ty == tcx.types.f32 {
let lllhs = bcx.fpext(lhs, f64t);
let llrhs = bcx.fpext(rhs, f64t);
let llres = bcx.call(llfn, &[lllhs, llrhs], None, None);
let llres = bcx.call(llfn, &[lllhs, llrhs], None);
bcx.fptrunc(llres, Type::f32(bcx.ccx()))
} else {
bcx.call(llfn, &[lhs, rhs], None, None)
bcx.call(llfn, &[lhs, rhs], None)
}
} else {
bcx.frem(lhs, rhs)