mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-28 09:44:08 +00:00
Auto merge of #41917 - arielb1:mir-array, r=nagisa
Translate array drop glue using MIR I was a bit lazy here and used a usize-based index instead of a pointer iteration. Do you think this is important @eddyb? r? @eddyb
This commit is contained in:
commit
924898f88a
@ -315,7 +315,8 @@ impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for mir::Rvalue<'tcx>
|
||||
mir::Rvalue::Discriminant(ref lvalue) => {
|
||||
lvalue.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::Rvalue::Box(ty) => {
|
||||
mir::Rvalue::NullaryOp(op, ty) => {
|
||||
op.hash_stable(hcx, hasher);
|
||||
ty.hash_stable(hcx, hasher);
|
||||
}
|
||||
mir::Rvalue::Aggregate(ref kind, ref operands) => {
|
||||
@ -374,7 +375,8 @@ impl_stable_hash_for!(enum mir::BinOp {
|
||||
Le,
|
||||
Ne,
|
||||
Ge,
|
||||
Gt
|
||||
Gt,
|
||||
Offset
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(enum mir::UnOp {
|
||||
@ -382,6 +384,10 @@ impl_stable_hash_for!(enum mir::UnOp {
|
||||
Neg
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(enum mir::NullOp {
|
||||
Box,
|
||||
SizeOf
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(struct mir::Constant<'tcx> { span, ty, literal });
|
||||
|
||||
|
@ -47,7 +47,7 @@
|
||||
#![cfg_attr(stage0, feature(staged_api))]
|
||||
#![cfg_attr(stage0, feature(loop_break_value))]
|
||||
|
||||
#![recursion_limit="128"]
|
||||
#![recursion_limit="192"]
|
||||
|
||||
extern crate arena;
|
||||
extern crate core;
|
||||
|
@ -245,7 +245,6 @@ pub trait CrateStore {
|
||||
|
||||
// flags
|
||||
fn is_const_fn(&self, did: DefId) -> bool;
|
||||
fn is_default_impl(&self, impl_did: DefId) -> bool;
|
||||
fn is_dllimport_foreign_item(&self, def: DefId) -> bool;
|
||||
fn is_statically_included_foreign_item(&self, def_id: DefId) -> bool;
|
||||
|
||||
@ -364,7 +363,6 @@ impl CrateStore for DummyCrateStore {
|
||||
|
||||
// flags
|
||||
fn is_const_fn(&self, did: DefId) -> bool { bug!("is_const_fn") }
|
||||
fn is_default_impl(&self, impl_did: DefId) -> bool { bug!("is_default_impl") }
|
||||
fn is_dllimport_foreign_item(&self, id: DefId) -> bool { false }
|
||||
fn is_statically_included_foreign_item(&self, def_id: DefId) -> bool { false }
|
||||
|
||||
|
@ -1046,6 +1046,7 @@ pub enum Rvalue<'tcx> {
|
||||
BinaryOp(BinOp, Operand<'tcx>, Operand<'tcx>),
|
||||
CheckedBinaryOp(BinOp, Operand<'tcx>, Operand<'tcx>),
|
||||
|
||||
NullaryOp(NullOp, Ty<'tcx>),
|
||||
UnaryOp(UnOp, Operand<'tcx>),
|
||||
|
||||
/// Read the discriminant of an ADT.
|
||||
@ -1054,9 +1055,6 @@ pub enum Rvalue<'tcx> {
|
||||
/// be defined to return, say, a 0) if ADT is not an enum.
|
||||
Discriminant(Lvalue<'tcx>),
|
||||
|
||||
/// Creates an *uninitialized* Box
|
||||
Box(Ty<'tcx>),
|
||||
|
||||
/// Create an aggregate value, like a tuple or struct. This is
|
||||
/// only needed because we want to distinguish `dest = Foo { x:
|
||||
/// ..., y: ... }` from `dest.x = ...; dest.y = ...;` in the case
|
||||
@ -1132,6 +1130,8 @@ pub enum BinOp {
|
||||
Ge,
|
||||
/// The `>` operator (greater than)
|
||||
Gt,
|
||||
/// The `ptr.offset` operator
|
||||
Offset,
|
||||
}
|
||||
|
||||
impl BinOp {
|
||||
@ -1144,6 +1144,14 @@ impl BinOp {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable)]
|
||||
pub enum NullOp {
|
||||
/// Return the size of a value of that type
|
||||
SizeOf,
|
||||
/// Create a new uninitialized box for a value of that type
|
||||
Box,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable)]
|
||||
pub enum UnOp {
|
||||
/// The `!` operator for logical inversion
|
||||
@ -1167,7 +1175,7 @@ impl<'tcx> Debug for Rvalue<'tcx> {
|
||||
}
|
||||
UnaryOp(ref op, ref a) => write!(fmt, "{:?}({:?})", op, a),
|
||||
Discriminant(ref lval) => write!(fmt, "discriminant({:?})", lval),
|
||||
Box(ref t) => write!(fmt, "Box({:?})", t),
|
||||
NullaryOp(ref op, ref t) => write!(fmt, "{:?}({:?})", op, t),
|
||||
Ref(_, borrow_kind, ref lv) => {
|
||||
let kind_str = match borrow_kind {
|
||||
BorrowKind::Shared => "",
|
||||
@ -1601,7 +1609,7 @@ impl<'tcx> TypeFoldable<'tcx> for Rvalue<'tcx> {
|
||||
CheckedBinaryOp(op, rhs.fold_with(folder), lhs.fold_with(folder)),
|
||||
UnaryOp(op, ref val) => UnaryOp(op, val.fold_with(folder)),
|
||||
Discriminant(ref lval) => Discriminant(lval.fold_with(folder)),
|
||||
Box(ty) => Box(ty.fold_with(folder)),
|
||||
NullaryOp(op, ty) => NullaryOp(op, ty.fold_with(folder)),
|
||||
Aggregate(ref kind, ref fields) => {
|
||||
let kind = box match **kind {
|
||||
AggregateKind::Array(ty) => AggregateKind::Array(ty.fold_with(folder)),
|
||||
@ -1629,7 +1637,7 @@ impl<'tcx> TypeFoldable<'tcx> for Rvalue<'tcx> {
|
||||
rhs.visit_with(visitor) || lhs.visit_with(visitor),
|
||||
UnaryOp(_, ref val) => val.visit_with(visitor),
|
||||
Discriminant(ref lval) => lval.visit_with(visitor),
|
||||
Box(ty) => ty.visit_with(visitor),
|
||||
NullaryOp(_, ty) => ty.visit_with(visitor),
|
||||
Aggregate(ref kind, ref fields) => {
|
||||
(match **kind {
|
||||
AggregateKind::Array(ty) => ty.visit_with(visitor),
|
||||
|
@ -166,7 +166,8 @@ impl<'tcx> Rvalue<'tcx> {
|
||||
let ty = op.ty(tcx, lhs_ty, rhs_ty);
|
||||
tcx.intern_tup(&[ty, tcx.types.bool], false)
|
||||
}
|
||||
Rvalue::UnaryOp(_, ref operand) => {
|
||||
Rvalue::UnaryOp(UnOp::Not, ref operand) |
|
||||
Rvalue::UnaryOp(UnOp::Neg, ref operand) => {
|
||||
operand.ty(mir, tcx)
|
||||
}
|
||||
Rvalue::Discriminant(ref lval) => {
|
||||
@ -179,9 +180,8 @@ impl<'tcx> Rvalue<'tcx> {
|
||||
bug!("Rvalue::Discriminant on Lvalue of type {:?}", ty);
|
||||
}
|
||||
}
|
||||
Rvalue::Box(t) => {
|
||||
tcx.mk_box(t)
|
||||
}
|
||||
Rvalue::NullaryOp(NullOp::Box, t) => tcx.mk_box(t),
|
||||
Rvalue::NullaryOp(NullOp::SizeOf, _) => tcx.types.usize,
|
||||
Rvalue::Aggregate(ref ak, ref ops) => {
|
||||
match **ak {
|
||||
AggregateKind::Array(ty) => {
|
||||
@ -227,7 +227,7 @@ impl<'tcx> BinOp {
|
||||
assert_eq!(lhs_ty, rhs_ty);
|
||||
lhs_ty
|
||||
}
|
||||
&BinOp::Shl | &BinOp::Shr => {
|
||||
&BinOp::Shl | &BinOp::Shr | &BinOp::Offset => {
|
||||
lhs_ty // lhs_ty can be != rhs_ty
|
||||
}
|
||||
&BinOp::Eq | &BinOp::Lt | &BinOp::Le |
|
||||
@ -270,7 +270,8 @@ impl BinOp {
|
||||
BinOp::Lt => hir::BinOp_::BiLt,
|
||||
BinOp::Gt => hir::BinOp_::BiGt,
|
||||
BinOp::Le => hir::BinOp_::BiLe,
|
||||
BinOp::Ge => hir::BinOp_::BiGe
|
||||
BinOp::Ge => hir::BinOp_::BiGe,
|
||||
BinOp::Offset => unreachable!()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -509,7 +509,7 @@ macro_rules! make_mir_visitor {
|
||||
self.visit_lvalue(lvalue, LvalueContext::Inspect, location);
|
||||
}
|
||||
|
||||
Rvalue::Box(ref $($mutability)* ty) => {
|
||||
Rvalue::NullaryOp(_op, ref $($mutability)* ty) => {
|
||||
self.visit_ty(ty);
|
||||
}
|
||||
|
||||
|
@ -218,7 +218,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
|
||||
// Always use types for non-local impls, where types are always
|
||||
// available, and filename/line-number is mostly uninteresting.
|
||||
let use_types = !impl_def_id.is_local() || {
|
||||
let use_types = !self.is_default_impl(impl_def_id) && (!impl_def_id.is_local() || {
|
||||
// Otherwise, use filename/line-number if forced.
|
||||
let force_no_types = FORCE_IMPL_FILENAME_LINE.with(|f| f.get());
|
||||
!force_no_types && {
|
||||
@ -226,7 +226,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
ty::queries::impl_trait_ref::try_get(self, DUMMY_SP, impl_def_id).is_ok() &&
|
||||
ty::queries::type_of::try_get(self, DUMMY_SP, impl_def_id).is_ok()
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
if !use_types {
|
||||
return self.push_impl_path_fallback(buffer, impl_def_id);
|
||||
|
@ -774,6 +774,9 @@ define_maps! { <'tcx>
|
||||
/// True if this is a foreign item (i.e., linked via `extern { ... }`).
|
||||
[] is_foreign_item: IsForeignItem(DefId) -> bool,
|
||||
|
||||
/// True if this is a default impl (aka impl Foo for ..)
|
||||
[] is_default_impl: ItemSignature(DefId) -> bool,
|
||||
|
||||
/// Get a map with the variance of every item; use `item_variance`
|
||||
/// instead.
|
||||
[] crate_variances: crate_variances(CrateNum) -> Rc<ty::CrateVariancesMap>,
|
||||
|
@ -600,6 +600,15 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
bug!("empty_substs_for_def_id: {:?} has type parameters", item_def_id)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn const_usize(&self, val: u16) -> ConstInt {
|
||||
match self.sess.target.uint_type {
|
||||
ast::UintTy::U16 => ConstInt::Usize(ConstUsize::Us16(val as u16)),
|
||||
ast::UintTy::U32 => ConstInt::Usize(ConstUsize::Us32(val as u32)),
|
||||
ast::UintTy::U64 => ConstInt::Usize(ConstUsize::Us64(val as u64)),
|
||||
_ => bug!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TypeIdHasher<'a, 'gcx: 'a+'tcx, 'tcx: 'a, W> {
|
||||
|
@ -22,7 +22,7 @@ use rustc::util::nodemap::FxHashMap;
|
||||
use rustc_data_structures::indexed_set::IdxSetBuf;
|
||||
use rustc_data_structures::indexed_vec::Idx;
|
||||
use rustc_mir::util::patch::MirPatch;
|
||||
use rustc_mir::util::elaborate_drops::{DropFlagState, elaborate_drop};
|
||||
use rustc_mir::util::elaborate_drops::{DropFlagState, Unwind, elaborate_drop};
|
||||
use rustc_mir::util::elaborate_drops::{DropElaborator, DropStyle, DropFlagMode};
|
||||
use syntax::ast;
|
||||
use syntax_pos::Span;
|
||||
@ -399,14 +399,13 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
|
||||
ctxt: self
|
||||
},
|
||||
terminator.source_info,
|
||||
data.is_cleanup,
|
||||
location,
|
||||
path,
|
||||
target,
|
||||
if data.is_cleanup {
|
||||
None
|
||||
Unwind::InCleanup
|
||||
} else {
|
||||
Some(Option::unwrap_or(unwind, resume_block))
|
||||
Unwind::To(Option::unwrap_or(unwind, resume_block))
|
||||
},
|
||||
bb)
|
||||
}
|
||||
@ -455,6 +454,7 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
|
||||
let bb = loc.block;
|
||||
let data = &self.mir[bb];
|
||||
let terminator = data.terminator();
|
||||
assert!(!data.is_cleanup, "DropAndReplace in unwind path not supported");
|
||||
|
||||
let assign = Statement {
|
||||
kind: StatementKind::Assign(location.clone(), Rvalue::Use(value.clone())),
|
||||
@ -477,7 +477,7 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
|
||||
kind: TerminatorKind::Goto { target: target },
|
||||
..*terminator
|
||||
}),
|
||||
is_cleanup: data.is_cleanup,
|
||||
is_cleanup: false,
|
||||
});
|
||||
|
||||
match self.move_data().rev_lookup.find(location) {
|
||||
@ -491,11 +491,10 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
|
||||
ctxt: self
|
||||
},
|
||||
terminator.source_info,
|
||||
data.is_cleanup,
|
||||
location,
|
||||
path,
|
||||
target,
|
||||
Some(unwind),
|
||||
Unwind::To(unwind),
|
||||
bb);
|
||||
on_all_children_bits(self.tcx, self.mir, self.move_data(), path, |child| {
|
||||
self.set_drop_flag(Location { block: target, statement_index: 0 },
|
||||
|
@ -438,7 +438,8 @@ impl<'a, 'tcx> MoveDataBuilder<'a, 'tcx> {
|
||||
Rvalue::Ref(..) |
|
||||
Rvalue::Discriminant(..) |
|
||||
Rvalue::Len(..) |
|
||||
Rvalue::Box(..) => {
|
||||
Rvalue::NullaryOp(NullOp::SizeOf, _) |
|
||||
Rvalue::NullaryOp(NullOp::Box, _) => {
|
||||
// This returns an rvalue with uninitialized contents. We can't
|
||||
// move out of it here because it is an rvalue - assignments always
|
||||
// completely initialize their lvalue.
|
||||
|
@ -59,7 +59,7 @@ pub fn borrowck_mir(bcx: &mut BorrowckCtxt,
|
||||
attributes: &[ast::Attribute]) {
|
||||
let tcx = bcx.tcx;
|
||||
let def_id = tcx.hir.local_def_id(id);
|
||||
debug!("borrowck_mir({}) UNIMPLEMENTED", tcx.item_path_str(def_id));
|
||||
debug!("borrowck_mir({:?}) UNIMPLEMENTED", def_id);
|
||||
|
||||
// It is safe for us to borrow `mir_validated()`: `optimized_mir`
|
||||
// steals it, but it forces the `borrowck` query.
|
||||
|
@ -212,6 +212,13 @@ impl<I: Idx, T> IndexMut<I> for IndexVec<I, T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<I: Idx, T> Default for IndexVec<I, T> {
|
||||
#[inline]
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl<I: Idx, T> Extend<T> for IndexVec<I, T> {
|
||||
#[inline]
|
||||
fn extend<J: IntoIterator<Item = T>>(&mut self, iter: J) {
|
||||
|
@ -106,6 +106,7 @@ provide! { <'tcx> tcx, def_id, cdata
|
||||
closure_type => { cdata.closure_ty(def_id.index, tcx) }
|
||||
inherent_impls => { Rc::new(cdata.get_inherent_implementations_for_type(def_id.index)) }
|
||||
is_foreign_item => { cdata.is_foreign_item(def_id.index) }
|
||||
is_default_impl => { cdata.is_default_impl(def_id.index) }
|
||||
describe_def => { cdata.get_def(def_id.index) }
|
||||
def_span => { cdata.get_span(def_id.index, &tcx.sess) }
|
||||
stability => { cdata.get_stability(def_id.index) }
|
||||
@ -176,11 +177,6 @@ impl CrateStore for cstore::CStore {
|
||||
self.get_crate_data(did.krate).is_const_fn(did.index)
|
||||
}
|
||||
|
||||
fn is_default_impl(&self, impl_did: DefId) -> bool {
|
||||
self.dep_graph.read(DepNode::MetaData(impl_did));
|
||||
self.get_crate_data(impl_did.krate).is_default_impl(impl_did.index)
|
||||
}
|
||||
|
||||
fn is_statically_included_foreign_item(&self, def_id: DefId) -> bool
|
||||
{
|
||||
self.do_is_statically_included_foreign_item(def_id)
|
||||
@ -403,7 +399,7 @@ impl CrateStore for cstore::CStore {
|
||||
}
|
||||
|
||||
self.dep_graph.read(DepNode::MetaData(def_id));
|
||||
debug!("item_body({}): inlining item", tcx.item_path_str(def_id));
|
||||
debug!("item_body({:?}): inlining item", def_id);
|
||||
|
||||
self.get_crate_data(def_id.krate).item_body(tcx, def_id.index)
|
||||
}
|
||||
@ -515,4 +511,4 @@ impl CrateStore for cstore::CStore {
|
||||
drop(visible_parent_map);
|
||||
self.visible_parent_map.borrow()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -97,7 +97,8 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||
let value = this.hir.mirror(value);
|
||||
let result = this.temp(expr.ty, expr_span);
|
||||
// to start, malloc some memory of suitable type (thus far, uninitialized):
|
||||
this.cfg.push_assign(block, source_info, &result, Rvalue::Box(value.ty));
|
||||
let box_ = Rvalue::NullaryOp(NullOp::Box, value.ty);
|
||||
this.cfg.push_assign(block, source_info, &result, box_);
|
||||
this.in_scope(value_extents, block, |this| {
|
||||
// schedule a shallow free of that memory, lest we unwind:
|
||||
this.schedule_box_free(expr_span, value_extents, &result, value.ty);
|
||||
|
@ -198,11 +198,10 @@ fn build_drop_shim<'a, 'tcx>(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>,
|
||||
elaborate_drops::elaborate_drop(
|
||||
&mut elaborator,
|
||||
source_info,
|
||||
false,
|
||||
&dropee,
|
||||
(),
|
||||
return_block,
|
||||
Some(resume_block),
|
||||
elaborate_drops::Unwind::To(resume_block),
|
||||
START_BLOCK
|
||||
);
|
||||
elaborator.patch
|
||||
|
@ -53,7 +53,7 @@ impl<'a, 'tcx> MutVisitor<'tcx> for EraseRegionsVisitor<'a, 'tcx> {
|
||||
Rvalue::CheckedBinaryOp(..) |
|
||||
Rvalue::UnaryOp(..) |
|
||||
Rvalue::Discriminant(..) |
|
||||
Rvalue::Box(..) |
|
||||
Rvalue::NullaryOp(..) |
|
||||
Rvalue::Aggregate(..) => {
|
||||
// These variants don't contain regions.
|
||||
}
|
||||
|
@ -361,7 +361,7 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> {
|
||||
|
||||
/// Qualify a whole const, static initializer or const fn.
|
||||
fn qualify_const(&mut self) -> Qualif {
|
||||
debug!("qualifying {} {}", self.mode, self.tcx.item_path_str(self.def_id));
|
||||
debug!("qualifying {} {:?}", self.mode, self.def_id);
|
||||
|
||||
let mir = self.mir;
|
||||
|
||||
@ -595,7 +595,9 @@ impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> {
|
||||
match *rvalue {
|
||||
Rvalue::Use(_) |
|
||||
Rvalue::Repeat(..) |
|
||||
Rvalue::UnaryOp(..) |
|
||||
Rvalue::UnaryOp(UnOp::Neg, _) |
|
||||
Rvalue::UnaryOp(UnOp::Not, _) |
|
||||
Rvalue::NullaryOp(NullOp::SizeOf, _) |
|
||||
Rvalue::CheckedBinaryOp(..) |
|
||||
Rvalue::Cast(CastKind::ReifyFnPointer, ..) |
|
||||
Rvalue::Cast(CastKind::UnsafeFnPointer, ..) |
|
||||
@ -703,7 +705,8 @@ impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> {
|
||||
if let ty::TyRawPtr(_) = lhs.ty(self.mir, self.tcx).sty {
|
||||
assert!(op == BinOp::Eq || op == BinOp::Ne ||
|
||||
op == BinOp::Le || op == BinOp::Lt ||
|
||||
op == BinOp::Ge || op == BinOp::Gt);
|
||||
op == BinOp::Ge || op == BinOp::Gt ||
|
||||
op == BinOp::Offset);
|
||||
|
||||
self.add(Qualif::NOT_CONST);
|
||||
if self.mode != Mode::Fn {
|
||||
@ -719,7 +722,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
Rvalue::Box(_) => {
|
||||
Rvalue::NullaryOp(NullOp::Box, _) => {
|
||||
self.add(Qualif::NOT_CONST);
|
||||
if self.mode != Mode::Fn {
|
||||
struct_span_err!(self.tcx.sess, self.span, E0010,
|
||||
|
@ -744,7 +744,7 @@ impl MirPass for TypeckMir {
|
||||
mir: &mut Mir<'tcx>) {
|
||||
let item_id = src.item_id();
|
||||
let def_id = tcx.hir.local_def_id(item_id);
|
||||
debug!("run_pass: {}", tcx.item_path_str(def_id));
|
||||
debug!("run_pass: {:?}", def_id);
|
||||
|
||||
if tcx.sess.err_count() > 0 {
|
||||
// compiling a broken program can obviously result in a
|
||||
|
@ -11,7 +11,7 @@
|
||||
use std::fmt;
|
||||
use rustc::hir;
|
||||
use rustc::mir::*;
|
||||
use rustc::middle::const_val::ConstInt;
|
||||
use rustc::middle::const_val::{ConstInt, ConstVal};
|
||||
use rustc::middle::lang_items;
|
||||
use rustc::ty::{self, Ty};
|
||||
use rustc::ty::subst::{Kind, Substs};
|
||||
@ -50,6 +50,35 @@ pub enum DropFlagMode {
|
||||
Deep
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub enum Unwind {
|
||||
To(BasicBlock),
|
||||
InCleanup
|
||||
}
|
||||
|
||||
impl Unwind {
|
||||
fn is_cleanup(self) -> bool {
|
||||
match self {
|
||||
Unwind::To(..) => false,
|
||||
Unwind::InCleanup => true
|
||||
}
|
||||
}
|
||||
|
||||
fn into_option(self) -> Option<BasicBlock> {
|
||||
match self {
|
||||
Unwind::To(bb) => Some(bb),
|
||||
Unwind::InCleanup => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn map<F>(self, f: F) -> Self where F: FnOnce(BasicBlock) -> BasicBlock {
|
||||
match self {
|
||||
Unwind::To(bb) => Unwind::To(f(bb)),
|
||||
Unwind::InCleanup => Unwind::InCleanup
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait DropElaborator<'a, 'tcx: 'a> : fmt::Debug {
|
||||
type Path : Copy + fmt::Debug;
|
||||
|
||||
@ -75,28 +104,25 @@ struct DropCtxt<'l, 'b: 'l, 'tcx: 'b, D>
|
||||
elaborator: &'l mut D,
|
||||
|
||||
source_info: SourceInfo,
|
||||
is_cleanup: bool,
|
||||
|
||||
lvalue: &'l Lvalue<'tcx>,
|
||||
path: D::Path,
|
||||
succ: BasicBlock,
|
||||
unwind: Option<BasicBlock>,
|
||||
unwind: Unwind,
|
||||
}
|
||||
|
||||
pub fn elaborate_drop<'b, 'tcx, D>(
|
||||
elaborator: &mut D,
|
||||
source_info: SourceInfo,
|
||||
is_cleanup: bool,
|
||||
lvalue: &Lvalue<'tcx>,
|
||||
path: D::Path,
|
||||
succ: BasicBlock,
|
||||
unwind: Option<BasicBlock>,
|
||||
unwind: Unwind,
|
||||
bb: BasicBlock)
|
||||
where D: DropElaborator<'b, 'tcx>
|
||||
{
|
||||
assert_eq!(unwind.is_none(), is_cleanup);
|
||||
DropCtxt {
|
||||
elaborator, source_info, is_cleanup, lvalue, path, succ, unwind
|
||||
elaborator, source_info, lvalue, path, succ, unwind
|
||||
}.elaborate_drop(bb)
|
||||
}
|
||||
|
||||
@ -145,14 +171,13 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
self.elaborator.patch().patch_terminator(bb, TerminatorKind::Drop {
|
||||
location: self.lvalue.clone(),
|
||||
target: self.succ,
|
||||
unwind: self.unwind
|
||||
unwind: self.unwind.into_option(),
|
||||
});
|
||||
}
|
||||
DropStyle::Conditional => {
|
||||
let is_cleanup = self.is_cleanup; // FIXME(#6393)
|
||||
let unwind = self.unwind; // FIXME(#6393)
|
||||
let succ = self.succ;
|
||||
let drop_bb = self.complete_drop(
|
||||
is_cleanup, Some(DropFlagMode::Deep), succ);
|
||||
let drop_bb = self.complete_drop(Some(DropFlagMode::Deep), succ, unwind);
|
||||
self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
|
||||
target: drop_bb
|
||||
});
|
||||
@ -189,11 +214,10 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
}
|
||||
|
||||
fn drop_subpath(&mut self,
|
||||
is_cleanup: bool,
|
||||
lvalue: &Lvalue<'tcx>,
|
||||
path: Option<D::Path>,
|
||||
succ: BasicBlock,
|
||||
unwind: Option<BasicBlock>)
|
||||
unwind: Unwind)
|
||||
-> BasicBlock
|
||||
{
|
||||
if let Some(path) = path {
|
||||
@ -202,7 +226,7 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
DropCtxt {
|
||||
elaborator: self.elaborator,
|
||||
source_info: self.source_info,
|
||||
path, lvalue, succ, unwind, is_cleanup
|
||||
path, lvalue, succ, unwind,
|
||||
}.elaborated_drop_block()
|
||||
} else {
|
||||
debug!("drop_subpath: for rest field {:?}", lvalue);
|
||||
@ -210,49 +234,46 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
DropCtxt {
|
||||
elaborator: self.elaborator,
|
||||
source_info: self.source_info,
|
||||
lvalue, succ, unwind, is_cleanup,
|
||||
lvalue, succ, unwind,
|
||||
// Using `self.path` here to condition the drop on
|
||||
// our own drop flag.
|
||||
path: self.path
|
||||
}.complete_drop(is_cleanup, None, succ)
|
||||
}.complete_drop(None, succ, unwind)
|
||||
}
|
||||
}
|
||||
|
||||
/// Create one-half of the drop ladder for a list of fields, and return
|
||||
/// the list of steps in it in reverse order.
|
||||
/// the list of steps in it in reverse order, with the first step
|
||||
/// dropping 0 fields and so on.
|
||||
///
|
||||
/// `unwind_ladder` is such a list of steps in reverse order,
|
||||
/// which is called instead of the next step if the drop unwinds
|
||||
/// (the first field is never reached). If it is `None`, all
|
||||
/// unwind targets are left blank.
|
||||
fn drop_halfladder<'a>(&mut self,
|
||||
unwind_ladder: Option<&[BasicBlock]>,
|
||||
succ: BasicBlock,
|
||||
fields: &[(Lvalue<'tcx>, Option<D::Path>)],
|
||||
is_cleanup: bool)
|
||||
-> Vec<BasicBlock>
|
||||
/// which is called if the matching step of the drop glue panics.
|
||||
fn drop_halfladder(&mut self,
|
||||
unwind_ladder: &[Unwind],
|
||||
mut succ: BasicBlock,
|
||||
fields: &[(Lvalue<'tcx>, Option<D::Path>)])
|
||||
-> Vec<BasicBlock>
|
||||
{
|
||||
let mut unwind_succ = if is_cleanup {
|
||||
None
|
||||
} else {
|
||||
self.unwind
|
||||
};
|
||||
Some(succ).into_iter().chain(
|
||||
fields.iter().rev().zip(unwind_ladder)
|
||||
.map(|(&(ref lv, path), &unwind_succ)| {
|
||||
succ = self.drop_subpath(lv, path, succ, unwind_succ);
|
||||
succ
|
||||
})
|
||||
).collect()
|
||||
}
|
||||
|
||||
let goto = TerminatorKind::Goto { target: succ };
|
||||
let mut succ = self.new_block(is_cleanup, goto);
|
||||
|
||||
// Always clear the "master" drop flag at the bottom of the
|
||||
// ladder. This is needed because the "master" drop flag
|
||||
// protects the ADT's discriminant, which is invalidated
|
||||
// after the ADT is dropped.
|
||||
let succ_loc = Location { block: succ, statement_index: 0 };
|
||||
self.elaborator.clear_drop_flag(succ_loc, self.path, DropFlagMode::Shallow);
|
||||
|
||||
fields.iter().rev().enumerate().map(|(i, &(ref lv, path))| {
|
||||
succ = self.drop_subpath(is_cleanup, lv, path, succ, unwind_succ);
|
||||
unwind_succ = unwind_ladder.as_ref().map(|p| p[i]);
|
||||
succ
|
||||
}).collect()
|
||||
fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind) {
|
||||
// Clear the "master" drop flag at the end. This is needed
|
||||
// because the "master" drop protects the ADT's discriminant,
|
||||
// which is invalidated after the ADT is dropped.
|
||||
let (succ, unwind) = (self.succ, self.unwind); // FIXME(#6393)
|
||||
(
|
||||
self.drop_flag_reset_block(DropFlagMode::Shallow, succ, unwind),
|
||||
unwind.map(|unwind| {
|
||||
self.drop_flag_reset_block(DropFlagMode::Shallow, unwind, Unwind::InCleanup)
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
/// Create a full drop ladder, consisting of 2 connected half-drop-ladders
|
||||
@ -269,9 +290,14 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
/// ELAB(drop location.1 [target=.c2])
|
||||
/// .c2:
|
||||
/// ELAB(drop location.2 [target=`self.unwind`])
|
||||
///
|
||||
/// NOTE: this does not clear the master drop flag, so you need
|
||||
/// to point succ/unwind on a `drop_ladder_bottom`.
|
||||
fn drop_ladder<'a>(&mut self,
|
||||
fields: Vec<(Lvalue<'tcx>, Option<D::Path>)>)
|
||||
-> (BasicBlock, Option<BasicBlock>)
|
||||
fields: Vec<(Lvalue<'tcx>, Option<D::Path>)>,
|
||||
succ: BasicBlock,
|
||||
unwind: Unwind)
|
||||
-> (BasicBlock, Unwind)
|
||||
{
|
||||
debug!("drop_ladder({:?}, {:?})", self, fields);
|
||||
|
||||
@ -282,21 +308,18 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
|
||||
debug!("drop_ladder - fields needing drop: {:?}", fields);
|
||||
|
||||
let unwind_ladder = if self.is_cleanup {
|
||||
None
|
||||
let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
|
||||
let unwind_ladder: Vec<_> = if let Unwind::To(target) = unwind {
|
||||
let halfladder = self.drop_halfladder(&unwind_ladder, target, &fields);
|
||||
halfladder.into_iter().map(Unwind::To).collect()
|
||||
} else {
|
||||
let unwind = self.unwind.unwrap(); // FIXME(#6393)
|
||||
Some(self.drop_halfladder(None, unwind, &fields, true))
|
||||
unwind_ladder
|
||||
};
|
||||
|
||||
let succ = self.succ; // FIXME(#6393)
|
||||
let is_cleanup = self.is_cleanup;
|
||||
let normal_ladder =
|
||||
self.drop_halfladder(unwind_ladder.as_ref().map(|x| &**x),
|
||||
succ, &fields, is_cleanup);
|
||||
self.drop_halfladder(&unwind_ladder, succ, &fields);
|
||||
|
||||
(normal_ladder.last().cloned().unwrap_or(succ),
|
||||
unwind_ladder.and_then(|l| l.last().cloned()).or(self.unwind))
|
||||
(*normal_ladder.last().unwrap(), *unwind_ladder.last().unwrap())
|
||||
}
|
||||
|
||||
fn open_drop_for_tuple<'a>(&mut self, tys: &[Ty<'tcx>])
|
||||
@ -309,7 +332,8 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
self.elaborator.field_subpath(self.path, Field::new(i)))
|
||||
}).collect();
|
||||
|
||||
self.drop_ladder(fields).0
|
||||
let (succ, unwind) = self.drop_ladder_bottom();
|
||||
self.drop_ladder(fields, succ, unwind).0
|
||||
}
|
||||
|
||||
fn open_drop_for_box<'a>(&mut self, ty: Ty<'tcx>) -> BasicBlock
|
||||
@ -320,13 +344,13 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
let interior_path = self.elaborator.deref_subpath(self.path);
|
||||
|
||||
let succ = self.succ; // FIXME(#6393)
|
||||
let is_cleanup = self.is_cleanup;
|
||||
let succ = self.box_free_block(ty, succ, is_cleanup);
|
||||
let unwind_succ = self.unwind.map(|u| {
|
||||
self.box_free_block(ty, u, true)
|
||||
let unwind = self.unwind;
|
||||
let succ = self.box_free_block(ty, succ, unwind);
|
||||
let unwind_succ = self.unwind.map(|unwind| {
|
||||
self.box_free_block(ty, unwind, Unwind::InCleanup)
|
||||
});
|
||||
|
||||
self.drop_subpath(is_cleanup, &interior, interior_path, succ, unwind_succ)
|
||||
self.drop_subpath(&interior, interior_path, succ, unwind_succ)
|
||||
}
|
||||
|
||||
fn open_drop_for_adt<'a>(&mut self, adt: &'tcx ty::AdtDef, substs: &'tcx Substs<'tcx>)
|
||||
@ -339,7 +363,7 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
source_info: self.source_info,
|
||||
kind: TerminatorKind::Unreachable
|
||||
}),
|
||||
is_cleanup: self.is_cleanup
|
||||
is_cleanup: self.unwind.is_cleanup()
|
||||
});
|
||||
}
|
||||
|
||||
@ -356,114 +380,108 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
}
|
||||
}
|
||||
|
||||
fn open_drop_for_adt_contents<'a>(&mut self, adt: &'tcx ty::AdtDef,
|
||||
substs: &'tcx Substs<'tcx>)
|
||||
-> (BasicBlock, Option<BasicBlock>) {
|
||||
match adt.variants.len() {
|
||||
1 => {
|
||||
let fields = self.move_paths_for_fields(
|
||||
self.lvalue,
|
||||
self.path,
|
||||
&adt.variants[0],
|
||||
substs
|
||||
);
|
||||
self.drop_ladder(fields)
|
||||
}
|
||||
_ => {
|
||||
let is_cleanup = self.is_cleanup;
|
||||
let succ = self.succ;
|
||||
let unwind = self.unwind; // FIXME(#6393)
|
||||
|
||||
let mut values = Vec::with_capacity(adt.variants.len());
|
||||
let mut normal_blocks = Vec::with_capacity(adt.variants.len());
|
||||
let mut unwind_blocks = if is_cleanup {
|
||||
None
|
||||
} else {
|
||||
Some(Vec::with_capacity(adt.variants.len()))
|
||||
};
|
||||
let mut otherwise = None;
|
||||
let mut unwind_otherwise = None;
|
||||
for (variant_index, discr) in adt.discriminants(self.tcx()).enumerate() {
|
||||
let subpath = self.elaborator.downcast_subpath(
|
||||
self.path, variant_index);
|
||||
if let Some(variant_path) = subpath {
|
||||
let base_lv = self.lvalue.clone().elem(
|
||||
ProjectionElem::Downcast(adt, variant_index)
|
||||
);
|
||||
let fields = self.move_paths_for_fields(
|
||||
&base_lv,
|
||||
variant_path,
|
||||
&adt.variants[variant_index],
|
||||
substs);
|
||||
values.push(discr);
|
||||
if let Some(ref mut unwind_blocks) = unwind_blocks {
|
||||
// We can't use the half-ladder from the original
|
||||
// drop ladder, because this breaks the
|
||||
// "funclet can't have 2 successor funclets"
|
||||
// requirement from MSVC:
|
||||
//
|
||||
// switch unwind-switch
|
||||
// / \ / \
|
||||
// v1.0 v2.0 v2.0-unwind v1.0-unwind
|
||||
// | | / |
|
||||
// v1.1-unwind v2.1-unwind |
|
||||
// ^ |
|
||||
// \-------------------------------/
|
||||
//
|
||||
// Create a duplicate half-ladder to avoid that. We
|
||||
// could technically only do this on MSVC, but I
|
||||
// I want to minimize the divergence between MSVC
|
||||
// and non-MSVC.
|
||||
|
||||
let unwind = unwind.unwrap();
|
||||
let halfladder = self.drop_halfladder(
|
||||
None, unwind, &fields, true);
|
||||
unwind_blocks.push(
|
||||
halfladder.last().cloned().unwrap_or(unwind)
|
||||
);
|
||||
}
|
||||
let (normal, _) = self.drop_ladder(fields);
|
||||
normal_blocks.push(normal);
|
||||
} else {
|
||||
// variant not found - drop the entire enum
|
||||
if let None = otherwise {
|
||||
otherwise = Some(self.complete_drop(
|
||||
is_cleanup,
|
||||
Some(DropFlagMode::Shallow),
|
||||
succ));
|
||||
unwind_otherwise = unwind.map(|unwind| self.complete_drop(
|
||||
true,
|
||||
Some(DropFlagMode::Shallow),
|
||||
unwind
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(block) = otherwise {
|
||||
normal_blocks.push(block);
|
||||
if let Some(ref mut unwind_blocks) = unwind_blocks {
|
||||
unwind_blocks.push(unwind_otherwise.unwrap());
|
||||
}
|
||||
} else {
|
||||
values.pop();
|
||||
}
|
||||
|
||||
(self.adt_switch_block(is_cleanup, adt, normal_blocks, &values, succ),
|
||||
unwind_blocks.map(|unwind_blocks| {
|
||||
self.adt_switch_block(
|
||||
is_cleanup, adt, unwind_blocks, &values, unwind.unwrap()
|
||||
)
|
||||
}))
|
||||
}
|
||||
fn open_drop_for_adt_contents(&mut self, adt: &'tcx ty::AdtDef,
|
||||
substs: &'tcx Substs<'tcx>)
|
||||
-> (BasicBlock, Unwind) {
|
||||
let (succ, unwind) = self.drop_ladder_bottom();
|
||||
if adt.variants.len() == 1 {
|
||||
let fields = self.move_paths_for_fields(
|
||||
self.lvalue,
|
||||
self.path,
|
||||
&adt.variants[0],
|
||||
substs
|
||||
);
|
||||
self.drop_ladder(fields, succ, unwind)
|
||||
} else {
|
||||
self.open_drop_for_multivariant(adt, substs, succ, unwind)
|
||||
}
|
||||
}
|
||||
|
||||
fn open_drop_for_multivariant(&mut self, adt: &'tcx ty::AdtDef,
|
||||
substs: &'tcx Substs<'tcx>,
|
||||
succ: BasicBlock,
|
||||
unwind: Unwind)
|
||||
-> (BasicBlock, Unwind) {
|
||||
let mut values = Vec::with_capacity(adt.variants.len());
|
||||
let mut normal_blocks = Vec::with_capacity(adt.variants.len());
|
||||
let mut unwind_blocks = if unwind.is_cleanup() {
|
||||
None
|
||||
} else {
|
||||
Some(Vec::with_capacity(adt.variants.len()))
|
||||
};
|
||||
|
||||
let mut have_otherwise = false;
|
||||
|
||||
for (variant_index, discr) in adt.discriminants(self.tcx()).enumerate() {
|
||||
let subpath = self.elaborator.downcast_subpath(
|
||||
self.path, variant_index);
|
||||
if let Some(variant_path) = subpath {
|
||||
let base_lv = self.lvalue.clone().elem(
|
||||
ProjectionElem::Downcast(adt, variant_index)
|
||||
);
|
||||
let fields = self.move_paths_for_fields(
|
||||
&base_lv,
|
||||
variant_path,
|
||||
&adt.variants[variant_index],
|
||||
substs);
|
||||
values.push(discr);
|
||||
if let Unwind::To(unwind) = unwind {
|
||||
// We can't use the half-ladder from the original
|
||||
// drop ladder, because this breaks the
|
||||
// "funclet can't have 2 successor funclets"
|
||||
// requirement from MSVC:
|
||||
//
|
||||
// switch unwind-switch
|
||||
// / \ / \
|
||||
// v1.0 v2.0 v2.0-unwind v1.0-unwind
|
||||
// | | / |
|
||||
// v1.1-unwind v2.1-unwind |
|
||||
// ^ |
|
||||
// \-------------------------------/
|
||||
//
|
||||
// Create a duplicate half-ladder to avoid that. We
|
||||
// could technically only do this on MSVC, but I
|
||||
// I want to minimize the divergence between MSVC
|
||||
// and non-MSVC.
|
||||
|
||||
let unwind_blocks = unwind_blocks.as_mut().unwrap();
|
||||
let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
|
||||
let halfladder =
|
||||
self.drop_halfladder(&unwind_ladder, unwind, &fields);
|
||||
unwind_blocks.push(halfladder.last().cloned().unwrap());
|
||||
}
|
||||
let (normal, _) = self.drop_ladder(fields, succ, unwind);
|
||||
normal_blocks.push(normal);
|
||||
} else {
|
||||
have_otherwise = true;
|
||||
}
|
||||
}
|
||||
|
||||
if have_otherwise {
|
||||
normal_blocks.push(self.drop_block(succ, unwind));
|
||||
if let Unwind::To(unwind) = unwind {
|
||||
unwind_blocks.as_mut().unwrap().push(
|
||||
self.drop_block(unwind, Unwind::InCleanup)
|
||||
);
|
||||
}
|
||||
} else {
|
||||
values.pop();
|
||||
}
|
||||
|
||||
(self.adt_switch_block(adt, normal_blocks, &values, succ, unwind),
|
||||
unwind.map(|unwind| {
|
||||
self.adt_switch_block(
|
||||
adt, unwind_blocks.unwrap(), &values, unwind, Unwind::InCleanup
|
||||
)
|
||||
}))
|
||||
}
|
||||
|
||||
fn adt_switch_block(&mut self,
|
||||
is_cleanup: bool,
|
||||
adt: &'tcx ty::AdtDef,
|
||||
blocks: Vec<BasicBlock>,
|
||||
values: &[ConstInt],
|
||||
succ: BasicBlock)
|
||||
succ: BasicBlock,
|
||||
unwind: Unwind)
|
||||
-> BasicBlock {
|
||||
// If there are multiple variants, then if something
|
||||
// is present within the enum the discriminant, tracked
|
||||
@ -475,13 +493,8 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
let discr_ty = adt.repr.discr_type().to_ty(self.tcx());
|
||||
let discr = Lvalue::Local(self.new_temp(discr_ty));
|
||||
let discr_rv = Rvalue::Discriminant(self.lvalue.clone());
|
||||
let switch_block = self.elaborator.patch().new_block(BasicBlockData {
|
||||
statements: vec![
|
||||
Statement {
|
||||
source_info: self.source_info,
|
||||
kind: StatementKind::Assign(discr.clone(), discr_rv),
|
||||
}
|
||||
],
|
||||
let switch_block = BasicBlockData {
|
||||
statements: vec![self.assign(&discr, discr_rv)],
|
||||
terminator: Some(Terminator {
|
||||
source_info: self.source_info,
|
||||
kind: TerminatorKind::SwitchInt {
|
||||
@ -491,12 +504,13 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
targets: blocks,
|
||||
}
|
||||
}),
|
||||
is_cleanup: is_cleanup,
|
||||
});
|
||||
self.drop_flag_test_block(is_cleanup, switch_block, succ)
|
||||
is_cleanup: unwind.is_cleanup(),
|
||||
};
|
||||
let switch_block = self.elaborator.patch().new_block(switch_block);
|
||||
self.drop_flag_test_block(switch_block, succ, unwind)
|
||||
}
|
||||
|
||||
fn destructor_call_block<'a>(&mut self, (succ, unwind): (BasicBlock, Option<BasicBlock>))
|
||||
fn destructor_call_block<'a>(&mut self, (succ, unwind): (BasicBlock, Unwind))
|
||||
-> BasicBlock
|
||||
{
|
||||
debug!("destructor_call_block({:?}, {:?})", self, succ);
|
||||
@ -513,26 +527,213 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
let ref_lvalue = self.new_temp(ref_ty);
|
||||
let unit_temp = Lvalue::Local(self.new_temp(tcx.mk_nil()));
|
||||
|
||||
self.elaborator.patch().new_block(BasicBlockData {
|
||||
statements: vec![Statement {
|
||||
source_info: self.source_info,
|
||||
kind: StatementKind::Assign(
|
||||
Lvalue::Local(ref_lvalue),
|
||||
Rvalue::Ref(tcx.types.re_erased, BorrowKind::Mut, self.lvalue.clone())
|
||||
)
|
||||
}],
|
||||
let result = BasicBlockData {
|
||||
statements: vec![self.assign(
|
||||
&Lvalue::Local(ref_lvalue),
|
||||
Rvalue::Ref(tcx.types.re_erased, BorrowKind::Mut, self.lvalue.clone())
|
||||
)],
|
||||
terminator: Some(Terminator {
|
||||
kind: TerminatorKind::Call {
|
||||
func: Operand::function_handle(tcx, drop_fn.def_id, substs,
|
||||
self.source_info.span),
|
||||
args: vec![Operand::Consume(Lvalue::Local(ref_lvalue))],
|
||||
destination: Some((unit_temp, succ)),
|
||||
cleanup: unwind,
|
||||
cleanup: unwind.into_option(),
|
||||
},
|
||||
source_info: self.source_info
|
||||
}),
|
||||
is_cleanup: self.is_cleanup,
|
||||
})
|
||||
is_cleanup: unwind.is_cleanup(),
|
||||
};
|
||||
self.elaborator.patch().new_block(result)
|
||||
}
|
||||
|
||||
/// create a loop that drops an array:
|
||||
///
|
||||
|
||||
///
|
||||
/// loop-block:
|
||||
/// can_go = cur == length_or_end
|
||||
/// if can_go then succ else drop-block
|
||||
/// drop-block:
|
||||
/// if ptr_based {
|
||||
/// ptr = cur
|
||||
/// cur = cur.offset(1)
|
||||
/// } else {
|
||||
/// ptr = &mut LV[cur]
|
||||
/// cur = cur + 1
|
||||
/// }
|
||||
/// drop(ptr)
|
||||
fn drop_loop(&mut self,
|
||||
succ: BasicBlock,
|
||||
cur: &Lvalue<'tcx>,
|
||||
length_or_end: &Lvalue<'tcx>,
|
||||
ety: Ty<'tcx>,
|
||||
unwind: Unwind,
|
||||
ptr_based: bool)
|
||||
-> BasicBlock
|
||||
{
|
||||
let use_ = |lv: &Lvalue<'tcx>| Operand::Consume(lv.clone());
|
||||
let tcx = self.tcx();
|
||||
|
||||
let ref_ty = tcx.mk_ref(tcx.types.re_erased, ty::TypeAndMut {
|
||||
ty: ety,
|
||||
mutbl: hir::Mutability::MutMutable
|
||||
});
|
||||
let ptr = &Lvalue::Local(self.new_temp(ref_ty));
|
||||
let can_go = &Lvalue::Local(self.new_temp(tcx.types.bool));
|
||||
|
||||
let one = self.constant_usize(1);
|
||||
let (ptr_next, cur_next) = if ptr_based {
|
||||
(Rvalue::Use(use_(cur)),
|
||||
Rvalue::BinaryOp(BinOp::Offset, use_(cur), one))
|
||||
} else {
|
||||
(Rvalue::Ref(
|
||||
tcx.types.re_erased,
|
||||
BorrowKind::Mut,
|
||||
self.lvalue.clone().index(use_(cur))),
|
||||
Rvalue::BinaryOp(BinOp::Add, use_(cur), one))
|
||||
};
|
||||
|
||||
let drop_block = BasicBlockData {
|
||||
statements: vec![
|
||||
self.assign(ptr, ptr_next),
|
||||
self.assign(cur, cur_next)
|
||||
],
|
||||
is_cleanup: unwind.is_cleanup(),
|
||||
terminator: Some(Terminator {
|
||||
source_info: self.source_info,
|
||||
// this gets overwritten by drop elaboration.
|
||||
kind: TerminatorKind::Unreachable,
|
||||
})
|
||||
};
|
||||
let drop_block = self.elaborator.patch().new_block(drop_block);
|
||||
|
||||
let loop_block = BasicBlockData {
|
||||
statements: vec![
|
||||
self.assign(can_go, Rvalue::BinaryOp(BinOp::Eq,
|
||||
use_(cur),
|
||||
use_(length_or_end)))
|
||||
],
|
||||
is_cleanup: unwind.is_cleanup(),
|
||||
terminator: Some(Terminator {
|
||||
source_info: self.source_info,
|
||||
kind: TerminatorKind::if_(tcx, use_(can_go), succ, drop_block)
|
||||
})
|
||||
};
|
||||
let loop_block = self.elaborator.patch().new_block(loop_block);
|
||||
|
||||
self.elaborator.patch().patch_terminator(drop_block, TerminatorKind::Drop {
|
||||
location: ptr.clone().deref(),
|
||||
target: loop_block,
|
||||
unwind: unwind.into_option()
|
||||
});
|
||||
|
||||
loop_block
|
||||
}
|
||||
|
||||
fn open_drop_for_array(&mut self, ety: Ty<'tcx>) -> BasicBlock {
|
||||
debug!("open_drop_for_array({:?})", ety);
|
||||
|
||||
// if size_of::<ety>() == 0 {
|
||||
// index_based_loop
|
||||
// } else {
|
||||
// ptr_based_loop
|
||||
// }
|
||||
|
||||
let tcx = self.tcx();
|
||||
|
||||
let use_ = |lv: &Lvalue<'tcx>| Operand::Consume(lv.clone());
|
||||
let size = &Lvalue::Local(self.new_temp(tcx.types.usize));
|
||||
let size_is_zero = &Lvalue::Local(self.new_temp(tcx.types.bool));
|
||||
let base_block = BasicBlockData {
|
||||
statements: vec![
|
||||
self.assign(size, Rvalue::NullaryOp(NullOp::SizeOf, ety)),
|
||||
self.assign(size_is_zero, Rvalue::BinaryOp(BinOp::Eq,
|
||||
use_(size),
|
||||
self.constant_usize(0)))
|
||||
],
|
||||
is_cleanup: self.unwind.is_cleanup(),
|
||||
terminator: Some(Terminator {
|
||||
source_info: self.source_info,
|
||||
kind: TerminatorKind::if_(
|
||||
tcx,
|
||||
use_(size_is_zero),
|
||||
self.drop_loop_pair(ety, false),
|
||||
self.drop_loop_pair(ety, true)
|
||||
)
|
||||
})
|
||||
};
|
||||
self.elaborator.patch().new_block(base_block)
|
||||
}
|
||||
|
||||
// create a pair of drop-loops of `lvalue`, which drops its contents
|
||||
// even in the case of 1 panic. If `ptr_based`, create a pointer loop,
|
||||
// otherwise create an index loop.
|
||||
fn drop_loop_pair(&mut self, ety: Ty<'tcx>, ptr_based: bool) -> BasicBlock {
|
||||
debug!("drop_loop_pair({:?}, {:?})", ety, ptr_based);
|
||||
let tcx = self.tcx();
|
||||
let iter_ty = if ptr_based {
|
||||
tcx.mk_ptr(ty::TypeAndMut { ty: ety, mutbl: hir::Mutability::MutMutable })
|
||||
} else {
|
||||
tcx.types.usize
|
||||
};
|
||||
|
||||
let cur = Lvalue::Local(self.new_temp(iter_ty));
|
||||
let length = Lvalue::Local(self.new_temp(tcx.types.usize));
|
||||
let length_or_end = if ptr_based {
|
||||
Lvalue::Local(self.new_temp(iter_ty))
|
||||
} else {
|
||||
length.clone()
|
||||
};
|
||||
|
||||
let unwind = self.unwind.map(|unwind| {
|
||||
self.drop_loop(unwind,
|
||||
&cur,
|
||||
&length_or_end,
|
||||
ety,
|
||||
Unwind::InCleanup,
|
||||
ptr_based)
|
||||
});
|
||||
|
||||
let succ = self.succ; // FIXME(#6393)
|
||||
let loop_block = self.drop_loop(
|
||||
succ,
|
||||
&cur,
|
||||
&length_or_end,
|
||||
ety,
|
||||
unwind,
|
||||
ptr_based);
|
||||
|
||||
let zero = self.constant_usize(0);
|
||||
let mut drop_block_stmts = vec![];
|
||||
drop_block_stmts.push(self.assign(&length, Rvalue::Len(self.lvalue.clone())));
|
||||
if ptr_based {
|
||||
// cur = &LV[0];
|
||||
// end = &LV[len];
|
||||
drop_block_stmts.push(self.assign(&cur, Rvalue::Ref(
|
||||
tcx.types.re_erased, BorrowKind::Mut,
|
||||
self.lvalue.clone().index(zero.clone())
|
||||
)));
|
||||
drop_block_stmts.push(self.assign(&length_or_end, Rvalue::Ref(
|
||||
tcx.types.re_erased, BorrowKind::Mut,
|
||||
self.lvalue.clone().index(Operand::Consume(length.clone()))
|
||||
)));
|
||||
} else {
|
||||
// index = 0 (length already pushed)
|
||||
drop_block_stmts.push(self.assign(&cur, Rvalue::Use(zero)));
|
||||
}
|
||||
let drop_block = self.elaborator.patch().new_block(BasicBlockData {
|
||||
statements: drop_block_stmts,
|
||||
is_cleanup: unwind.is_cleanup(),
|
||||
terminator: Some(Terminator {
|
||||
source_info: self.source_info,
|
||||
kind: TerminatorKind::Goto { target: loop_block }
|
||||
})
|
||||
});
|
||||
|
||||
// FIXME(#34708): handle partially-dropped array/slice elements.
|
||||
let reset_block = self.drop_flag_reset_block(DropFlagMode::Deep, drop_block, unwind);
|
||||
self.drop_flag_test_block(reset_block, succ, unwind)
|
||||
}
|
||||
|
||||
/// The slow-path - create an "open", elaborated drop for a type
|
||||
@ -545,8 +746,6 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
/// ADT, both in the success case or if one of the destructors fail.
|
||||
fn open_drop<'a>(&mut self) -> BasicBlock {
|
||||
let ty = self.lvalue_ty(self.lvalue);
|
||||
let is_cleanup = self.is_cleanup; // FIXME(#6393)
|
||||
let succ = self.succ;
|
||||
match ty.sty {
|
||||
ty::TyClosure(def_id, substs) => {
|
||||
let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx()).collect();
|
||||
@ -562,12 +761,12 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
self.open_drop_for_adt(def, substs)
|
||||
}
|
||||
ty::TyDynamic(..) => {
|
||||
self.complete_drop(is_cleanup, Some(DropFlagMode::Deep), succ)
|
||||
let unwind = self.unwind; // FIXME(#6393)
|
||||
let succ = self.succ;
|
||||
self.complete_drop(Some(DropFlagMode::Deep), succ, unwind)
|
||||
}
|
||||
ty::TyArray(..) | ty::TySlice(..) => {
|
||||
// FIXME(#34708): handle partially-dropped
|
||||
// array/slice elements.
|
||||
self.complete_drop(is_cleanup, Some(DropFlagMode::Deep), succ)
|
||||
ty::TyArray(ety, _) | ty::TySlice(ety) => {
|
||||
self.open_drop_for_array(ety)
|
||||
}
|
||||
_ => bug!("open drop from non-ADT `{:?}`", ty)
|
||||
}
|
||||
@ -581,26 +780,40 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
/// if let Some(mode) = mode: FLAG(self.path)[mode] = false
|
||||
/// drop(self.lv)
|
||||
fn complete_drop<'a>(&mut self,
|
||||
is_cleanup: bool,
|
||||
drop_mode: Option<DropFlagMode>,
|
||||
succ: BasicBlock) -> BasicBlock
|
||||
succ: BasicBlock,
|
||||
unwind: Unwind) -> BasicBlock
|
||||
{
|
||||
debug!("complete_drop({:?},{:?})", self, drop_mode);
|
||||
|
||||
let drop_block = self.drop_block(is_cleanup, succ);
|
||||
if let Some(mode) = drop_mode {
|
||||
let block_start = Location { block: drop_block, statement_index: 0 };
|
||||
self.elaborator.clear_drop_flag(block_start, self.path, mode);
|
||||
}
|
||||
let drop_block = self.drop_block(succ, unwind);
|
||||
let drop_block = if let Some(mode) = drop_mode {
|
||||
self.drop_flag_reset_block(mode, drop_block, unwind)
|
||||
} else {
|
||||
drop_block
|
||||
};
|
||||
|
||||
self.drop_flag_test_block(is_cleanup, drop_block, succ)
|
||||
self.drop_flag_test_block(drop_block, succ, unwind)
|
||||
}
|
||||
|
||||
fn drop_flag_reset_block(&mut self,
|
||||
mode: DropFlagMode,
|
||||
succ: BasicBlock,
|
||||
unwind: Unwind) -> BasicBlock
|
||||
{
|
||||
debug!("drop_flag_reset_block({:?},{:?})", self, mode);
|
||||
|
||||
let block = self.new_block(unwind, TerminatorKind::Goto { target: succ });
|
||||
let block_start = Location { block: block, statement_index: 0 };
|
||||
self.elaborator.clear_drop_flag(block_start, self.path, mode);
|
||||
block
|
||||
}
|
||||
|
||||
fn elaborated_drop_block<'a>(&mut self) -> BasicBlock {
|
||||
debug!("elaborated_drop_block({:?})", self);
|
||||
let is_cleanup = self.is_cleanup; // FIXME(#6393)
|
||||
let unwind = self.unwind; // FIXME(#6393)
|
||||
let succ = self.succ;
|
||||
let blk = self.drop_block(is_cleanup, succ);
|
||||
let blk = self.drop_block(succ, unwind);
|
||||
self.elaborate_drop(blk);
|
||||
blk
|
||||
}
|
||||
@ -609,17 +822,17 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
&mut self,
|
||||
ty: Ty<'tcx>,
|
||||
target: BasicBlock,
|
||||
is_cleanup: bool
|
||||
unwind: Unwind,
|
||||
) -> BasicBlock {
|
||||
let block = self.unelaborated_free_block(ty, target, is_cleanup);
|
||||
self.drop_flag_test_block(is_cleanup, block, target)
|
||||
let block = self.unelaborated_free_block(ty, target, unwind);
|
||||
self.drop_flag_test_block(block, target, unwind)
|
||||
}
|
||||
|
||||
fn unelaborated_free_block<'a>(
|
||||
&mut self,
|
||||
ty: Ty<'tcx>,
|
||||
target: BasicBlock,
|
||||
is_cleanup: bool
|
||||
unwind: Unwind
|
||||
) -> BasicBlock {
|
||||
let tcx = self.tcx();
|
||||
let unit_temp = Lvalue::Local(self.new_temp(tcx.mk_nil()));
|
||||
@ -632,31 +845,31 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
destination: Some((unit_temp, target)),
|
||||
cleanup: None
|
||||
}; // FIXME(#6393)
|
||||
let free_block = self.new_block(is_cleanup, call);
|
||||
let free_block = self.new_block(unwind, call);
|
||||
|
||||
let block_start = Location { block: free_block, statement_index: 0 };
|
||||
self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
|
||||
free_block
|
||||
}
|
||||
|
||||
fn drop_block<'a>(&mut self, is_cleanup: bool, succ: BasicBlock) -> BasicBlock {
|
||||
fn drop_block<'a>(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
|
||||
let block = TerminatorKind::Drop {
|
||||
location: self.lvalue.clone(),
|
||||
target: succ,
|
||||
unwind: if is_cleanup { None } else { self.unwind }
|
||||
target: target,
|
||||
unwind: unwind.into_option()
|
||||
};
|
||||
self.new_block(is_cleanup, block)
|
||||
self.new_block(unwind, block)
|
||||
}
|
||||
|
||||
fn drop_flag_test_block(&mut self,
|
||||
is_cleanup: bool,
|
||||
on_set: BasicBlock,
|
||||
on_unset: BasicBlock)
|
||||
on_unset: BasicBlock,
|
||||
unwind: Unwind)
|
||||
-> BasicBlock
|
||||
{
|
||||
let style = self.elaborator.drop_style(self.path, DropFlagMode::Shallow);
|
||||
debug!("drop_flag_test_block({:?},{:?},{:?}) - {:?}",
|
||||
self, is_cleanup, on_set, style);
|
||||
debug!("drop_flag_test_block({:?},{:?},{:?},{:?}) - {:?}",
|
||||
self, on_set, on_unset, unwind, style);
|
||||
|
||||
match style {
|
||||
DropStyle::Dead => on_unset,
|
||||
@ -664,13 +877,13 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
DropStyle::Conditional | DropStyle::Open => {
|
||||
let flag = self.elaborator.get_drop_flag(self.path).unwrap();
|
||||
let term = TerminatorKind::if_(self.tcx(), flag, on_set, on_unset);
|
||||
self.new_block(is_cleanup, term)
|
||||
self.new_block(unwind, term)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn new_block<'a>(&mut self,
|
||||
is_cleanup: bool,
|
||||
unwind: Unwind,
|
||||
k: TerminatorKind<'tcx>)
|
||||
-> BasicBlock
|
||||
{
|
||||
@ -679,7 +892,7 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
terminator: Some(Terminator {
|
||||
source_info: self.source_info, kind: k
|
||||
}),
|
||||
is_cleanup: is_cleanup
|
||||
is_cleanup: unwind.is_cleanup()
|
||||
})
|
||||
}
|
||||
|
||||
@ -691,4 +904,19 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
let mir = self.elaborator.mir();
|
||||
self.elaborator.patch().terminator_loc(mir, bb)
|
||||
}
|
||||
|
||||
fn constant_usize(&self, val: u16) -> Operand<'tcx> {
|
||||
Operand::Constant(box Constant {
|
||||
span: self.source_info.span,
|
||||
ty: self.tcx().types.usize,
|
||||
literal: Literal::Value { value: ConstVal::Integral(self.tcx().const_usize(val)) }
|
||||
})
|
||||
}
|
||||
|
||||
fn assign(&self, lhs: &Lvalue<'tcx>, rhs: Rvalue<'tcx>) -> Statement<'tcx> {
|
||||
Statement {
|
||||
source_info: self.source_info,
|
||||
kind: StatementKind::Assign(lhs.clone(), rhs)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -186,7 +186,7 @@ impl<'a, 'tcx> mir_visit::Visitor<'tcx> for StatCollector<'a, 'tcx> {
|
||||
Rvalue::CheckedBinaryOp(..) => "Rvalue::CheckedBinaryOp",
|
||||
Rvalue::UnaryOp(..) => "Rvalue::UnaryOp",
|
||||
Rvalue::Discriminant(..) => "Rvalue::Discriminant",
|
||||
Rvalue::Box(..) => "Rvalue::Box",
|
||||
Rvalue::NullaryOp(..) => "Rvalue::NullaryOp",
|
||||
Rvalue::Aggregate(ref kind, ref _operands) => {
|
||||
// AggregateKind is not distinguished by visit API, so
|
||||
// record it. (`super_rvalue` handles `_operands`.)
|
||||
|
@ -11,7 +11,7 @@
|
||||
use llvm::{self, ValueRef, AttributePlace};
|
||||
use base;
|
||||
use builder::Builder;
|
||||
use common::{type_is_fat_ptr, C_uint};
|
||||
use common::{instance_ty, ty_fn_sig, type_is_fat_ptr, C_uint};
|
||||
use context::CrateContext;
|
||||
use cabi_x86;
|
||||
use cabi_x86_64;
|
||||
@ -610,6 +610,14 @@ pub struct FnType<'tcx> {
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> FnType<'tcx> {
|
||||
pub fn of_instance(ccx: &CrateContext<'a, 'tcx>, instance: &ty::Instance<'tcx>)
|
||||
-> Self {
|
||||
let fn_ty = instance_ty(ccx.shared(), &instance);
|
||||
let sig = ty_fn_sig(ccx, fn_ty);
|
||||
let sig = ccx.tcx().erase_late_bound_regions_and_normalize(&sig);
|
||||
Self::new(ccx, sig, &[])
|
||||
}
|
||||
|
||||
pub fn new(ccx: &CrateContext<'a, 'tcx>,
|
||||
sig: ty::FnSig<'tcx>,
|
||||
extra_args: &[Ty<'tcx>]) -> FnType<'tcx> {
|
||||
@ -631,6 +639,8 @@ impl<'a, 'tcx> FnType<'tcx> {
|
||||
pub fn unadjusted(ccx: &CrateContext<'a, 'tcx>,
|
||||
sig: ty::FnSig<'tcx>,
|
||||
extra_args: &[Ty<'tcx>]) -> FnType<'tcx> {
|
||||
debug!("FnType::unadjusted({:?}, {:?})", sig, extra_args);
|
||||
|
||||
use self::Abi::*;
|
||||
let cconv = match ccx.sess().target.target.adjust_abi(sig.abi) {
|
||||
RustIntrinsic | PlatformIntrinsic |
|
||||
|
@ -502,7 +502,7 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
|
||||
_ => bug!(),
|
||||
}
|
||||
}
|
||||
mir::Rvalue::Box(..) => {
|
||||
mir::Rvalue::NullaryOp(mir::NullOp::Box, _) => {
|
||||
let tcx = self.scx.tcx();
|
||||
let exchange_malloc_fn_def_id = tcx
|
||||
.lang_items
|
||||
@ -612,17 +612,7 @@ fn visit_instance_use<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
output.push(create_fn_trans_item(instance));
|
||||
}
|
||||
}
|
||||
ty::InstanceDef::DropGlue(_, Some(ty)) => {
|
||||
match ty.sty {
|
||||
ty::TyArray(ety, _) |
|
||||
ty::TySlice(ety)
|
||||
if is_direct_call =>
|
||||
{
|
||||
// drop of arrays/slices is translated in-line.
|
||||
visit_drop_use(scx, ety, false, output);
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
ty::InstanceDef::DropGlue(_, Some(_)) => {
|
||||
output.push(create_fn_trans_item(instance));
|
||||
}
|
||||
ty::InstanceDef::ClosureOnceShim { .. } |
|
||||
|
@ -191,15 +191,6 @@ impl Funclet {
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for Funclet {
|
||||
fn clone(&self) -> Funclet {
|
||||
Funclet {
|
||||
cleanuppad: self.cleanuppad,
|
||||
operand: OperandBundleDef::new("funclet", &[self.cleanuppad]),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn val_ty(v: ValueRef) -> Type {
|
||||
unsafe {
|
||||
Type::from_ref(llvm::LLVMTypeOf(v))
|
||||
|
@ -76,6 +76,7 @@ pub fn size_and_align_of_dst<'a, 'tcx>(bcx: &Builder<'a, 'tcx>, t: Ty<'tcx>, inf
|
||||
let align = C_uint(bcx.ccx, align);
|
||||
return (size, align);
|
||||
}
|
||||
assert!(!info.is_null());
|
||||
match t.sty {
|
||||
ty::TyAdt(def, substs) => {
|
||||
let ccx = bcx.ccx;
|
||||
|
@ -197,6 +197,16 @@ pub enum CleanupKind {
|
||||
Internal { funclet: mir::BasicBlock }
|
||||
}
|
||||
|
||||
impl CleanupKind {
|
||||
pub fn funclet_bb(self, for_bb: mir::BasicBlock) -> Option<mir::BasicBlock> {
|
||||
match self {
|
||||
CleanupKind::NotCleanup => None,
|
||||
CleanupKind::Funclet => Some(for_bb),
|
||||
CleanupKind::Internal { funclet } => Some(funclet),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cleanup_kinds<'a, 'tcx>(mir: &mir::Mir<'tcx>) -> IndexVec<mir::BasicBlock, CleanupKind> {
|
||||
fn discover_masters<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
|
||||
mir: &mir::Mir<'tcx>) {
|
||||
@ -260,7 +270,9 @@ pub fn cleanup_kinds<'a, 'tcx>(mir: &mir::Mir<'tcx>) -> IndexVec<mir::BasicBlock
|
||||
result[succ] = CleanupKind::Internal { funclet: funclet };
|
||||
}
|
||||
CleanupKind::Funclet => {
|
||||
set_successor(funclet, succ);
|
||||
if funclet != succ {
|
||||
set_successor(funclet, succ);
|
||||
}
|
||||
}
|
||||
CleanupKind::Internal { funclet: succ_funclet } => {
|
||||
if funclet != succ_funclet {
|
||||
|
@ -19,100 +19,147 @@ use adt;
|
||||
use base::{self, Lifetime};
|
||||
use callee;
|
||||
use builder::Builder;
|
||||
use common::{self, Funclet};
|
||||
use common::{C_bool, C_str_slice, C_struct, C_u32, C_uint, C_undef};
|
||||
use common::{self, C_bool, C_str_slice, C_struct, C_u32, C_undef};
|
||||
use consts;
|
||||
use machine::llalign_of_min;
|
||||
use meth;
|
||||
use monomorphize;
|
||||
use type_of;
|
||||
use tvec;
|
||||
use type_::Type;
|
||||
|
||||
use rustc_data_structures::indexed_vec::IndexVec;
|
||||
use syntax::symbol::Symbol;
|
||||
|
||||
use std::cmp;
|
||||
|
||||
use super::{MirContext, LocalRef};
|
||||
use super::analyze::CleanupKind;
|
||||
use super::constant::Const;
|
||||
use super::lvalue::{Alignment, LvalueRef};
|
||||
use super::operand::OperandRef;
|
||||
use super::operand::OperandValue::{Pair, Ref, Immediate};
|
||||
|
||||
impl<'a, 'tcx> MirContext<'a, 'tcx> {
|
||||
pub fn trans_block(&mut self, bb: mir::BasicBlock,
|
||||
funclets: &IndexVec<mir::BasicBlock, Option<Funclet>>) {
|
||||
pub fn trans_block(&mut self, bb: mir::BasicBlock) {
|
||||
let mut bcx = self.get_builder(bb);
|
||||
let data = &self.mir[bb];
|
||||
|
||||
debug!("trans_block({:?}={:?})", bb, data);
|
||||
|
||||
let funclet = match self.cleanup_kinds[bb] {
|
||||
CleanupKind::Internal { funclet } => funclets[funclet].as_ref(),
|
||||
_ => funclets[bb].as_ref(),
|
||||
};
|
||||
for statement in &data.statements {
|
||||
bcx = self.trans_statement(bcx, statement);
|
||||
}
|
||||
|
||||
self.trans_terminator(bcx, bb, data.terminator());
|
||||
}
|
||||
|
||||
fn trans_terminator(&mut self,
|
||||
mut bcx: Builder<'a, 'tcx>,
|
||||
bb: mir::BasicBlock,
|
||||
terminator: &mir::Terminator<'tcx>)
|
||||
{
|
||||
debug!("trans_terminator: {:?}", terminator);
|
||||
|
||||
// Create the cleanup bundle, if needed.
|
||||
let tcx = bcx.tcx();
|
||||
let span = terminator.source_info.span;
|
||||
let funclet_bb = self.cleanup_kinds[bb].funclet_bb(bb);
|
||||
let funclet = funclet_bb.and_then(|funclet_bb| self.funclets[funclet_bb].as_ref());
|
||||
|
||||
let cleanup_pad = funclet.map(|lp| lp.cleanuppad());
|
||||
let cleanup_bundle = funclet.map(|l| l.bundle());
|
||||
|
||||
let funclet_br = |this: &Self, bcx: Builder, bb: mir::BasicBlock| {
|
||||
let lltarget = this.blocks[bb];
|
||||
if let Some(cp) = cleanup_pad {
|
||||
match this.cleanup_kinds[bb] {
|
||||
CleanupKind::Funclet => {
|
||||
// micro-optimization: generate a `ret` rather than a jump
|
||||
// to a return block
|
||||
bcx.cleanup_ret(cp, Some(lltarget));
|
||||
}
|
||||
CleanupKind::Internal { .. } => bcx.br(lltarget),
|
||||
CleanupKind::NotCleanup => bug!("jump from cleanup bb to bb {:?}", bb)
|
||||
let lltarget = |this: &mut Self, target: mir::BasicBlock| {
|
||||
let lltarget = this.blocks[target];
|
||||
let target_funclet = this.cleanup_kinds[target].funclet_bb(target);
|
||||
match (funclet_bb, target_funclet) {
|
||||
(None, None) => (lltarget, false),
|
||||
(Some(f), Some(t_f))
|
||||
if f == t_f || !base::wants_msvc_seh(tcx.sess)
|
||||
=> (lltarget, false),
|
||||
(None, Some(_)) => {
|
||||
// jump *into* cleanup - need a landing pad if GNU
|
||||
(this.landing_pad_to(target), false)
|
||||
}
|
||||
(Some(_), None) => span_bug!(span, "{:?} - jump out of cleanup?", terminator),
|
||||
(Some(_), Some(_)) => {
|
||||
(this.landing_pad_to(target), true)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let llblock = |this: &mut Self, target: mir::BasicBlock| {
|
||||
let (lltarget, is_cleanupret) = lltarget(this, target);
|
||||
if is_cleanupret {
|
||||
// MSVC cross-funclet jump - need a trampoline
|
||||
|
||||
debug!("llblock: creating cleanup trampoline for {:?}", target);
|
||||
let name = &format!("{:?}_cleanup_trampoline_{:?}", bb, target);
|
||||
let trampoline = this.new_block(name);
|
||||
trampoline.cleanup_ret(cleanup_pad.unwrap(), Some(lltarget));
|
||||
trampoline.llbb()
|
||||
} else {
|
||||
lltarget
|
||||
}
|
||||
};
|
||||
|
||||
let funclet_br = |this: &mut Self, bcx: Builder, target: mir::BasicBlock| {
|
||||
let (lltarget, is_cleanupret) = lltarget(this, target);
|
||||
if is_cleanupret {
|
||||
// micro-optimization: generate a `ret` rather than a jump
|
||||
// to a trampoline.
|
||||
bcx.cleanup_ret(cleanup_pad.unwrap(), Some(lltarget));
|
||||
} else {
|
||||
bcx.br(lltarget);
|
||||
}
|
||||
};
|
||||
|
||||
let llblock = |this: &mut Self, target: mir::BasicBlock| {
|
||||
let lltarget = this.blocks[target];
|
||||
let do_call = |
|
||||
this: &mut Self,
|
||||
bcx: Builder<'a, 'tcx>,
|
||||
fn_ty: FnType<'tcx>,
|
||||
fn_ptr: ValueRef,
|
||||
llargs: &[ValueRef],
|
||||
destination: Option<(ReturnDest, ty::Ty<'tcx>, mir::BasicBlock)>,
|
||||
cleanup: Option<mir::BasicBlock>
|
||||
| {
|
||||
if let Some(cleanup) = cleanup {
|
||||
let ret_bcx = if let Some((_, _, target)) = destination {
|
||||
this.blocks[target]
|
||||
} else {
|
||||
this.unreachable_block()
|
||||
};
|
||||
let invokeret = bcx.invoke(fn_ptr,
|
||||
&llargs,
|
||||
ret_bcx,
|
||||
llblock(this, cleanup),
|
||||
cleanup_bundle);
|
||||
fn_ty.apply_attrs_callsite(invokeret);
|
||||
|
||||
if let Some(cp) = cleanup_pad {
|
||||
match this.cleanup_kinds[target] {
|
||||
CleanupKind::Funclet => {
|
||||
// MSVC cross-funclet jump - need a trampoline
|
||||
|
||||
debug!("llblock: creating cleanup trampoline for {:?}", target);
|
||||
let name = &format!("{:?}_cleanup_trampoline_{:?}", bb, target);
|
||||
let trampoline = this.new_block(name);
|
||||
trampoline.cleanup_ret(cp, Some(lltarget));
|
||||
trampoline.llbb()
|
||||
}
|
||||
CleanupKind::Internal { .. } => lltarget,
|
||||
CleanupKind::NotCleanup =>
|
||||
bug!("jump from cleanup bb {:?} to bb {:?}", bb, target)
|
||||
if let Some((ret_dest, ret_ty, target)) = destination {
|
||||
let ret_bcx = this.get_builder(target);
|
||||
this.set_debug_loc(&ret_bcx, terminator.source_info);
|
||||
let op = OperandRef {
|
||||
val: Immediate(invokeret),
|
||||
ty: ret_ty,
|
||||
};
|
||||
this.store_return(&ret_bcx, ret_dest, &fn_ty.ret, op);
|
||||
}
|
||||
} else {
|
||||
if let (CleanupKind::NotCleanup, CleanupKind::Funclet) =
|
||||
(this.cleanup_kinds[bb], this.cleanup_kinds[target])
|
||||
{
|
||||
// jump *into* cleanup - need a landing pad if GNU
|
||||
this.landing_pad_to(target)
|
||||
let llret = bcx.call(fn_ptr, &llargs, cleanup_bundle);
|
||||
fn_ty.apply_attrs_callsite(llret);
|
||||
|
||||
if let Some((ret_dest, ret_ty, target)) = destination {
|
||||
let op = OperandRef {
|
||||
val: Immediate(llret),
|
||||
ty: ret_ty,
|
||||
};
|
||||
this.store_return(&bcx, ret_dest, &fn_ty.ret, op);
|
||||
funclet_br(this, bcx, target);
|
||||
} else {
|
||||
lltarget
|
||||
bcx.unreachable();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
for statement in &data.statements {
|
||||
bcx = self.trans_statement(bcx, statement);
|
||||
}
|
||||
|
||||
let terminator = data.terminator();
|
||||
debug!("trans_block: terminator: {:?}", terminator);
|
||||
|
||||
let span = terminator.source_info.span;
|
||||
self.set_debug_loc(&bcx, terminator.source_info);
|
||||
match terminator.kind {
|
||||
mir::TerminatorKind::Resume => {
|
||||
@ -219,52 +266,16 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
|
||||
}
|
||||
|
||||
let lvalue = self.trans_lvalue(&bcx, location);
|
||||
let fn_ty = FnType::of_instance(bcx.ccx, &drop_fn);
|
||||
let (drop_fn, need_extra) = match ty.sty {
|
||||
ty::TyDynamic(..) => (meth::DESTRUCTOR.get_fn(&bcx, lvalue.llextra),
|
||||
false),
|
||||
ty::TyArray(ety, _) | ty::TySlice(ety) => {
|
||||
// FIXME: handle panics
|
||||
let drop_fn = monomorphize::resolve_drop_in_place(
|
||||
bcx.ccx.shared(), ety);
|
||||
let drop_fn = callee::get_fn(bcx.ccx, drop_fn);
|
||||
let bcx = tvec::slice_for_each(
|
||||
&bcx,
|
||||
lvalue.project_index(&bcx, C_uint(bcx.ccx, 0u64)),
|
||||
ety,
|
||||
lvalue.len(bcx.ccx),
|
||||
|bcx, llval, loop_bb| {
|
||||
self.set_debug_loc(&bcx, terminator.source_info);
|
||||
if let Some(unwind) = unwind {
|
||||
bcx.invoke(
|
||||
drop_fn,
|
||||
&[llval],
|
||||
loop_bb,
|
||||
llblock(self, unwind),
|
||||
cleanup_bundle
|
||||
);
|
||||
} else {
|
||||
bcx.call(drop_fn, &[llval], cleanup_bundle);
|
||||
bcx.br(loop_bb);
|
||||
}
|
||||
});
|
||||
funclet_br(self, bcx, target);
|
||||
return
|
||||
}
|
||||
_ => (callee::get_fn(bcx.ccx, drop_fn), lvalue.has_extra())
|
||||
};
|
||||
let args = &[lvalue.llval, lvalue.llextra][..1 + need_extra as usize];
|
||||
if let Some(unwind) = unwind {
|
||||
bcx.invoke(
|
||||
drop_fn,
|
||||
args,
|
||||
self.blocks[target],
|
||||
llblock(self, unwind),
|
||||
cleanup_bundle
|
||||
);
|
||||
} else {
|
||||
bcx.call(drop_fn, args, cleanup_bundle);
|
||||
funclet_br(self, bcx, target);
|
||||
}
|
||||
do_call(self, bcx, fn_ty, drop_fn, args,
|
||||
Some((ReturnDest::Nothing, tcx.mk_nil(), target)),
|
||||
unwind);
|
||||
}
|
||||
|
||||
mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, cleanup } => {
|
||||
@ -371,26 +382,18 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
|
||||
// Obtain the panic entry point.
|
||||
let def_id = common::langcall(bcx.tcx(), Some(span), "", lang_item);
|
||||
let instance = ty::Instance::mono(bcx.tcx(), def_id);
|
||||
let fn_ty = FnType::of_instance(bcx.ccx, &instance);
|
||||
let llfn = callee::get_fn(bcx.ccx, instance);
|
||||
|
||||
// Translate the actual panic invoke/call.
|
||||
if let Some(unwind) = cleanup {
|
||||
bcx.invoke(llfn,
|
||||
&args,
|
||||
self.unreachable_block(),
|
||||
llblock(self, unwind),
|
||||
cleanup_bundle);
|
||||
} else {
|
||||
bcx.call(llfn, &args, cleanup_bundle);
|
||||
bcx.unreachable();
|
||||
}
|
||||
do_call(self, bcx, fn_ty, llfn, &args, None, cleanup);
|
||||
}
|
||||
|
||||
mir::TerminatorKind::DropAndReplace { .. } => {
|
||||
bug!("undesugared DropAndReplace in trans: {:?}", data);
|
||||
bug!("undesugared DropAndReplace in trans: {:?}", terminator);
|
||||
}
|
||||
|
||||
mir::TerminatorKind::Call { ref func, ref args, ref destination, ref cleanup } => {
|
||||
mir::TerminatorKind::Call { ref func, ref args, ref destination, cleanup } => {
|
||||
// Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
|
||||
let callee = self.trans_operand(&bcx, func);
|
||||
|
||||
@ -543,43 +546,9 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
|
||||
_ => span_bug!(span, "no llfn for call"),
|
||||
};
|
||||
|
||||
// Many different ways to call a function handled here
|
||||
if let &Some(cleanup) = cleanup {
|
||||
let ret_bcx = if let Some((_, target)) = *destination {
|
||||
self.blocks[target]
|
||||
} else {
|
||||
self.unreachable_block()
|
||||
};
|
||||
let invokeret = bcx.invoke(fn_ptr,
|
||||
&llargs,
|
||||
ret_bcx,
|
||||
llblock(self, cleanup),
|
||||
cleanup_bundle);
|
||||
fn_ty.apply_attrs_callsite(invokeret);
|
||||
|
||||
if let Some((_, target)) = *destination {
|
||||
let ret_bcx = self.get_builder(target);
|
||||
self.set_debug_loc(&ret_bcx, terminator.source_info);
|
||||
let op = OperandRef {
|
||||
val: Immediate(invokeret),
|
||||
ty: sig.output(),
|
||||
};
|
||||
self.store_return(&ret_bcx, ret_dest, &fn_ty.ret, op);
|
||||
}
|
||||
} else {
|
||||
let llret = bcx.call(fn_ptr, &llargs, cleanup_bundle);
|
||||
fn_ty.apply_attrs_callsite(llret);
|
||||
if let Some((_, target)) = *destination {
|
||||
let op = OperandRef {
|
||||
val: Immediate(llret),
|
||||
ty: sig.output(),
|
||||
};
|
||||
self.store_return(&bcx, ret_dest, &fn_ty.ret, op);
|
||||
funclet_br(self, bcx, target);
|
||||
} else {
|
||||
bcx.unreachable();
|
||||
}
|
||||
}
|
||||
do_call(self, bcx, fn_ty, fn_ptr, &llargs,
|
||||
destination.as_ref().map(|&(_, target)| (ret_dest, sig.output(), target)),
|
||||
cleanup);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -774,7 +743,7 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
|
||||
|
||||
fn landing_pad_uncached(&mut self, target_bb: BasicBlockRef) -> BasicBlockRef {
|
||||
if base::wants_msvc_seh(self.ccx.sess()) {
|
||||
return target_bb;
|
||||
span_bug!(self.mir.span, "landing pad was not inserted?")
|
||||
}
|
||||
|
||||
let bcx = self.new_block("cleanup");
|
||||
|
@ -796,6 +796,12 @@ impl<'a, 'tcx> MirConstContext<'a, 'tcx> {
|
||||
Const::new(llval, operand.ty)
|
||||
}
|
||||
|
||||
mir::Rvalue::NullaryOp(mir::NullOp::SizeOf, ty) => {
|
||||
assert!(self.ccx.shared().type_is_sized(ty));
|
||||
let llval = C_uint(self.ccx, self.ccx.size_of(ty));
|
||||
Const::new(llval, tcx.types.usize)
|
||||
}
|
||||
|
||||
_ => span_bug!(span, "{:?} in constant", rvalue)
|
||||
};
|
||||
|
||||
@ -870,6 +876,7 @@ pub fn const_scalar_binop(op: mir::BinOp,
|
||||
llvm::LLVMConstICmp(cmp, lhs, rhs)
|
||||
}
|
||||
}
|
||||
mir::BinOp::Offset => unreachable!("BinOp::Offset in const-eval!")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -69,6 +69,10 @@ pub struct MirContext<'a, 'tcx:'a> {
|
||||
/// The funclet status of each basic block
|
||||
cleanup_kinds: IndexVec<mir::BasicBlock, analyze::CleanupKind>,
|
||||
|
||||
/// When targeting MSVC, this stores the cleanup info for each funclet
|
||||
/// BB. This is initialized as we compute the funclets' head block in RPO.
|
||||
funclets: &'a IndexVec<mir::BasicBlock, Option<Funclet>>,
|
||||
|
||||
/// This stores the landing-pad block for a given BB, computed lazily on GNU
|
||||
/// and eagerly on MSVC.
|
||||
landing_pads: IndexVec<mir::BasicBlock, Option<BasicBlockRef>>,
|
||||
@ -202,8 +206,11 @@ pub fn trans_mir<'a, 'tcx: 'a>(
|
||||
debuginfo::create_function_debug_context(ccx, instance, sig, llfn, mir);
|
||||
let bcx = Builder::new_block(ccx, llfn, "start");
|
||||
|
||||
let cleanup_kinds = analyze::cleanup_kinds(&mir);
|
||||
if mir.basic_blocks().iter().any(|bb| bb.is_cleanup) {
|
||||
bcx.set_personality_fn(ccx.eh_personality());
|
||||
}
|
||||
|
||||
let cleanup_kinds = analyze::cleanup_kinds(&mir);
|
||||
// Allocate a `Block` for every basic block, except
|
||||
// the start block, if nothing loops back to it.
|
||||
let reentrant_start_block = !mir.predecessors_for(mir::START_BLOCK).is_empty();
|
||||
@ -218,6 +225,7 @@ pub fn trans_mir<'a, 'tcx: 'a>(
|
||||
|
||||
// Compute debuginfo scopes from MIR scopes.
|
||||
let scopes = debuginfo::create_mir_scopes(ccx, mir, &debug_context);
|
||||
let (landing_pads, funclets) = create_funclets(&bcx, &cleanup_kinds, &block_bcxs);
|
||||
|
||||
let mut mircx = MirContext {
|
||||
mir: mir,
|
||||
@ -228,7 +236,8 @@ pub fn trans_mir<'a, 'tcx: 'a>(
|
||||
blocks: block_bcxs,
|
||||
unreachable_block: None,
|
||||
cleanup_kinds: cleanup_kinds,
|
||||
landing_pads: IndexVec::from_elem(None, mir.basic_blocks()),
|
||||
landing_pads: landing_pads,
|
||||
funclets: &funclets,
|
||||
scopes: scopes,
|
||||
locals: IndexVec::new(),
|
||||
debug_context: debug_context,
|
||||
@ -306,28 +315,13 @@ pub fn trans_mir<'a, 'tcx: 'a>(
|
||||
// emitting should be enabled.
|
||||
debuginfo::start_emitting_source_locations(&mircx.debug_context);
|
||||
|
||||
let funclets: IndexVec<mir::BasicBlock, Option<Funclet>> =
|
||||
mircx.cleanup_kinds.iter_enumerated().map(|(bb, cleanup_kind)| {
|
||||
if let CleanupKind::Funclet = *cleanup_kind {
|
||||
let bcx = mircx.get_builder(bb);
|
||||
unsafe {
|
||||
llvm::LLVMSetPersonalityFn(mircx.llfn, mircx.ccx.eh_personality());
|
||||
}
|
||||
if base::wants_msvc_seh(ccx.sess()) {
|
||||
return Some(Funclet::new(bcx.cleanup_pad(None, &[])));
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}).collect();
|
||||
|
||||
let rpo = traversal::reverse_postorder(&mir);
|
||||
let mut visited = BitVector::new(mir.basic_blocks().len());
|
||||
|
||||
// Translate the body of each block using reverse postorder
|
||||
for (bb, _) in rpo {
|
||||
visited.insert(bb.index());
|
||||
mircx.trans_block(bb, &funclets);
|
||||
mircx.trans_block(bb);
|
||||
}
|
||||
|
||||
// Remove blocks that haven't been visited, or have no
|
||||
@ -343,6 +337,26 @@ pub fn trans_mir<'a, 'tcx: 'a>(
|
||||
}
|
||||
}
|
||||
|
||||
fn create_funclets<'a, 'tcx>(
|
||||
bcx: &Builder<'a, 'tcx>,
|
||||
cleanup_kinds: &IndexVec<mir::BasicBlock, CleanupKind>,
|
||||
block_bcxs: &IndexVec<mir::BasicBlock, BasicBlockRef>)
|
||||
-> (IndexVec<mir::BasicBlock, Option<BasicBlockRef>>,
|
||||
IndexVec<mir::BasicBlock, Option<Funclet>>)
|
||||
{
|
||||
block_bcxs.iter_enumerated().zip(cleanup_kinds).map(|((bb, &llbb), cleanup_kind)| {
|
||||
match *cleanup_kind {
|
||||
CleanupKind::Funclet if base::wants_msvc_seh(bcx.sess()) => {
|
||||
let cleanup_bcx = bcx.build_sibling_block(&format!("funclet_{:?}", bb));
|
||||
let cleanup = cleanup_bcx.cleanup_pad(None, &[]);
|
||||
cleanup_bcx.br(llbb);
|
||||
(Some(cleanup_bcx.llbb()), Some(Funclet::new(cleanup)))
|
||||
}
|
||||
_ => (None, None)
|
||||
}
|
||||
}).unzip()
|
||||
}
|
||||
|
||||
/// Produce, for each argument, a `ValueRef` pointing at the
|
||||
/// argument's value. As arguments are lvalues, these are always
|
||||
/// indirect.
|
||||
|
@ -114,7 +114,7 @@ impl<'a, 'tcx> OperandRef<'tcx> {
|
||||
|
||||
pub fn deref(self) -> LvalueRef<'tcx> {
|
||||
let projected_ty = self.ty.builtin_deref(true, ty::NoPreference)
|
||||
.unwrap().ty;
|
||||
.unwrap_or_else(|| bug!("deref of non-pointer {:?}", self)).ty;
|
||||
let (llptr, llextra) = match self.val {
|
||||
OperandValue::Immediate(llptr) => (llptr, ptr::null_mut()),
|
||||
OperandValue::Pair(llptr, llextra) => (llptr, llextra),
|
||||
|
@ -432,7 +432,17 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
|
||||
})
|
||||
}
|
||||
|
||||
mir::Rvalue::Box(content_ty) => {
|
||||
mir::Rvalue::NullaryOp(mir::NullOp::SizeOf, ty) => {
|
||||
assert!(bcx.ccx.shared().type_is_sized(ty));
|
||||
let val = C_uint(bcx.ccx, bcx.ccx.size_of(ty));
|
||||
let tcx = bcx.tcx();
|
||||
(bcx, OperandRef {
|
||||
val: OperandValue::Immediate(val),
|
||||
ty: tcx.types.usize,
|
||||
})
|
||||
}
|
||||
|
||||
mir::Rvalue::NullaryOp(mir::NullOp::Box, content_ty) => {
|
||||
let content_ty: Ty<'tcx> = self.monomorphize(&content_ty);
|
||||
let llty = type_of::type_of(bcx.ccx, content_ty);
|
||||
let llsize = machine::llsize_of(bcx.ccx, llty);
|
||||
@ -515,6 +525,7 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
|
||||
mir::BinOp::BitOr => bcx.or(lhs, rhs),
|
||||
mir::BinOp::BitAnd => bcx.and(lhs, rhs),
|
||||
mir::BinOp::BitXor => bcx.xor(lhs, rhs),
|
||||
mir::BinOp::Offset => bcx.inbounds_gep(lhs, &[rhs]),
|
||||
mir::BinOp::Shl => common::build_unchecked_lshift(bcx, lhs, rhs),
|
||||
mir::BinOp::Shr => common::build_unchecked_rshift(bcx, input_ty, lhs, rhs),
|
||||
mir::BinOp::Ne | mir::BinOp::Lt | mir::BinOp::Gt |
|
||||
@ -660,7 +671,7 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
|
||||
mir::Rvalue::CheckedBinaryOp(..) |
|
||||
mir::Rvalue::UnaryOp(..) |
|
||||
mir::Rvalue::Discriminant(..) |
|
||||
mir::Rvalue::Box(..) |
|
||||
mir::Rvalue::NullaryOp(..) |
|
||||
mir::Rvalue::Use(..) => // (*)
|
||||
true,
|
||||
mir::Rvalue::Repeat(..) |
|
||||
|
@ -100,6 +100,7 @@ pub fn provide(providers: &mut Providers) {
|
||||
impl_trait_ref,
|
||||
impl_polarity,
|
||||
is_foreign_item,
|
||||
is_default_impl,
|
||||
..*providers
|
||||
};
|
||||
}
|
||||
@ -1545,3 +1546,14 @@ fn is_foreign_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
_ => bug!("is_foreign_item applied to non-local def-id {:?}", def_id)
|
||||
}
|
||||
}
|
||||
|
||||
fn is_default_impl<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
def_id: DefId)
|
||||
-> bool {
|
||||
match tcx.hir.get_if_local(def_id) {
|
||||
Some(hir_map::NodeItem(&hir::Item { node: hir::ItemDefaultImpl(..), .. }))
|
||||
=> true,
|
||||
Some(_) => false,
|
||||
_ => bug!("is_default_impl applied to non-local def-id {:?}", def_id)
|
||||
}
|
||||
}
|
||||
|
@ -290,7 +290,7 @@ pub fn build_impl(cx: &DocContext, did: DefId, ret: &mut Vec<clean::Item>) {
|
||||
}
|
||||
|
||||
// If this is a defaulted impl, then bail out early here
|
||||
if tcx.sess.cstore.is_default_impl(did) {
|
||||
if tcx.is_default_impl(did) {
|
||||
return ret.push(clean::Item {
|
||||
inner: clean::DefaultImplItem(clean::DefaultImpl {
|
||||
// FIXME: this should be decoded
|
||||
|
@ -781,11 +781,15 @@ extern "C" void LLVMRustWriteTypeToString(LLVMTypeRef Ty, RustStringRef Str) {
|
||||
extern "C" void LLVMRustWriteValueToString(LLVMValueRef V,
|
||||
RustStringRef Str) {
|
||||
RawRustStringOstream OS(Str);
|
||||
OS << "(";
|
||||
unwrap<llvm::Value>(V)->getType()->print(OS);
|
||||
OS << ":";
|
||||
unwrap<llvm::Value>(V)->print(OS);
|
||||
OS << ")";
|
||||
if (!V) {
|
||||
OS << "(null)";
|
||||
} else {
|
||||
OS << "(";
|
||||
unwrap<llvm::Value>(V)->getType()->print(OS);
|
||||
OS << ":";
|
||||
unwrap<llvm::Value>(V)->print(OS);
|
||||
OS << ")";
|
||||
}
|
||||
}
|
||||
|
||||
extern "C" bool LLVMRustLinkInExternalBitcode(LLVMModuleRef DstRef, char *BC,
|
||||
|
186
src/test/mir-opt/issue-41888.rs
Normal file
186
src/test/mir-opt/issue-41888.rs
Normal file
@ -0,0 +1,186 @@
|
||||
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// check that we clear the "ADT master drop flag" even when there are
|
||||
// no fields to be dropped.
|
||||
|
||||
fn main() {
|
||||
let e;
|
||||
if cond() {
|
||||
e = E::F(K);
|
||||
if let E::F(_k) = e {
|
||||
// older versions of rustc used to not clear the
|
||||
// drop flag for `e` in this path.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn cond() -> bool { false }
|
||||
|
||||
struct K;
|
||||
|
||||
enum E {
|
||||
F(K),
|
||||
G(Box<E>)
|
||||
}
|
||||
|
||||
// END RUST SOURCE
|
||||
// fn main() -> () {
|
||||
// let mut _0: ();
|
||||
// scope 1 {
|
||||
// let _1: E; // `e`
|
||||
// scope 2 {
|
||||
// let _6: K;
|
||||
// }
|
||||
// }
|
||||
// let mut _2: bool;
|
||||
// let mut _3: ();
|
||||
// let mut _4: E;
|
||||
// let mut _5: K;
|
||||
// let mut _7: isize;
|
||||
// let mut _8: bool; // drop flag for `e`
|
||||
// let mut _9: bool;
|
||||
// let mut _10: bool;
|
||||
// let mut _11: isize;
|
||||
// let mut _12: isize;
|
||||
//
|
||||
// bb0: {
|
||||
// _8 = const false;
|
||||
// _10 = const false;
|
||||
// _9 = const false;
|
||||
// StorageLive(_1);
|
||||
// StorageLive(_2);
|
||||
// _2 = const cond() -> [return: bb3, unwind: bb2];
|
||||
// }
|
||||
//
|
||||
// bb1: {
|
||||
// resume;
|
||||
// }
|
||||
//
|
||||
// bb2: {
|
||||
// goto -> bb1;
|
||||
// }
|
||||
//
|
||||
// bb3: {
|
||||
// switchInt(_2) -> [0u8: bb5, otherwise: bb4];
|
||||
// }
|
||||
//
|
||||
// bb4: {
|
||||
// StorageLive(_4);
|
||||
// StorageLive(_5);
|
||||
// _5 = K::{{constructor}};
|
||||
// _4 = E::F(_5,);
|
||||
// StorageDead(_5);
|
||||
// goto -> bb15;
|
||||
// }
|
||||
//
|
||||
// bb5: {
|
||||
// _0 = ();
|
||||
// goto -> bb12;
|
||||
// }
|
||||
//
|
||||
// bb6: {
|
||||
// goto -> bb2;
|
||||
// }
|
||||
//
|
||||
// bb7: {
|
||||
// goto -> bb8;
|
||||
// }
|
||||
//
|
||||
// bb8: {
|
||||
// StorageDead(_4);
|
||||
// _7 = discriminant(_1);
|
||||
// switchInt(_7) -> [0isize: bb10, otherwise: bb9];
|
||||
// }
|
||||
//
|
||||
// bb9: {
|
||||
// _0 = ();
|
||||
// goto -> bb11;
|
||||
// }
|
||||
//
|
||||
// bb10: {
|
||||
// StorageLive(_6);
|
||||
// _10 = const false;
|
||||
// _6 = ((_1 as F).0: K);
|
||||
// _0 = ();
|
||||
// goto -> bb11;
|
||||
// }
|
||||
//
|
||||
// bb11: {
|
||||
// StorageDead(_6);
|
||||
// goto -> bb12;
|
||||
// }
|
||||
//
|
||||
// bb12: {
|
||||
// StorageDead(_2);
|
||||
// goto -> bb22;
|
||||
// }
|
||||
//
|
||||
// bb13: {
|
||||
// StorageDead(_1);
|
||||
// return;
|
||||
// }
|
||||
//
|
||||
// bb14: {
|
||||
// _8 = const true;
|
||||
// _9 = const true;
|
||||
// _10 = const true;
|
||||
// _1 = _4;
|
||||
// goto -> bb6;
|
||||
// }
|
||||
//
|
||||
// bb15: {
|
||||
// _8 = const true;
|
||||
// _9 = const true;
|
||||
// _10 = const true;
|
||||
// _1 = _4;
|
||||
// goto -> bb7;
|
||||
// }
|
||||
//
|
||||
// bb16: {
|
||||
// _8 = const false; // clear the drop flag - must always be reached
|
||||
// goto -> bb13;
|
||||
// }
|
||||
//
|
||||
// bb17: {
|
||||
// _8 = const false;
|
||||
// goto -> bb1;
|
||||
// }
|
||||
//
|
||||
// bb18: {
|
||||
// goto -> bb17;
|
||||
// }
|
||||
//
|
||||
// bb19: {
|
||||
// drop(_1) -> [return: bb16, unwind: bb17];
|
||||
// }
|
||||
//
|
||||
// bb20: {
|
||||
// drop(_1) -> bb17;
|
||||
// }
|
||||
//
|
||||
// bb21: {
|
||||
// _11 = discriminant(_1);
|
||||
// switchInt(_11) -> [0isize: bb16, otherwise: bb19];
|
||||
// }
|
||||
//
|
||||
// bb22: {
|
||||
// switchInt(_8) -> [0u8: bb16, otherwise: bb21];
|
||||
// }
|
||||
//
|
||||
// bb23: {
|
||||
// _12 = discriminant(_1);
|
||||
// switchInt(_12) -> [0isize: bb18, otherwise: bb20];
|
||||
// }
|
||||
//
|
||||
// bb24: {
|
||||
// switchInt(_8) -> [0u8: bb17, otherwise: bb23];
|
||||
// }
|
||||
// }
|
@ -90,6 +90,22 @@ fn dynamic_drop(a: &Allocator, c: bool) {
|
||||
};
|
||||
}
|
||||
|
||||
struct TwoPtrs<'a>(Ptr<'a>, Ptr<'a>);
|
||||
fn struct_dynamic_drop(a: &Allocator, c0: bool, c1: bool, c: bool) {
|
||||
for i in 0..2 {
|
||||
let x;
|
||||
let y;
|
||||
if (c0 && i == 0) || (c1 && i == 1) {
|
||||
x = (a.alloc(), a.alloc(), a.alloc());
|
||||
y = TwoPtrs(a.alloc(), a.alloc());
|
||||
if c {
|
||||
drop(x.1);
|
||||
drop(y.0);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn assignment2(a: &Allocator, c0: bool, c1: bool) {
|
||||
let mut _v = a.alloc();
|
||||
let mut _w = a.alloc();
|
||||
@ -125,6 +141,14 @@ fn union1(a: &Allocator) {
|
||||
}
|
||||
}
|
||||
|
||||
fn array_simple(a: &Allocator) {
|
||||
let _x = [a.alloc(), a.alloc(), a.alloc(), a.alloc()];
|
||||
}
|
||||
|
||||
fn vec_simple(a: &Allocator) {
|
||||
let _x = vec![a.alloc(), a.alloc(), a.alloc(), a.alloc()];
|
||||
}
|
||||
|
||||
fn run_test<F>(mut f: F)
|
||||
where F: FnMut(&Allocator)
|
||||
{
|
||||
@ -171,5 +195,17 @@ fn main() {
|
||||
run_test(|a| assignment1(a, false));
|
||||
run_test(|a| assignment1(a, true));
|
||||
|
||||
run_test(|a| array_simple(a));
|
||||
run_test(|a| vec_simple(a));
|
||||
|
||||
run_test(|a| struct_dynamic_drop(a, false, false, false));
|
||||
run_test(|a| struct_dynamic_drop(a, false, false, true));
|
||||
run_test(|a| struct_dynamic_drop(a, false, true, false));
|
||||
run_test(|a| struct_dynamic_drop(a, false, true, true));
|
||||
run_test(|a| struct_dynamic_drop(a, true, false, false));
|
||||
run_test(|a| struct_dynamic_drop(a, true, false, true));
|
||||
run_test(|a| struct_dynamic_drop(a, true, true, false));
|
||||
run_test(|a| struct_dynamic_drop(a, true, true, true));
|
||||
|
||||
run_test_nopanic(|a| union1(a));
|
||||
}
|
||||
|
43
src/test/run-pass/issue-41888.rs
Normal file
43
src/test/run-pass/issue-41888.rs
Normal file
@ -0,0 +1,43 @@
|
||||
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
fn main() { let _ = g(Some(E::F(K))); }
|
||||
|
||||
type R = Result<(), ()>;
|
||||
struct K;
|
||||
|
||||
enum E {
|
||||
F(K), // must not be built-in type
|
||||
#[allow(dead_code)]
|
||||
G(Box<E>, Box<E>),
|
||||
}
|
||||
|
||||
fn translate(x: R) -> R { x }
|
||||
|
||||
fn g(mut status: Option<E>) -> R {
|
||||
loop {
|
||||
match status {
|
||||
Some(infix_or_postfix) => match infix_or_postfix {
|
||||
E::F(_op) => { // <- must be captured by value
|
||||
match Ok(()) {
|
||||
Err(err) => return Err(err),
|
||||
Ok(_) => {},
|
||||
};
|
||||
}
|
||||
_ => (),
|
||||
},
|
||||
_ => match translate(Err(())) {
|
||||
Err(err) => return Err(err),
|
||||
Ok(_) => {},
|
||||
}
|
||||
}
|
||||
status = None;
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user