Add EndRegion statement kind to MIR.

* Emit `EndRegion` for every code-extent for which we observe a
   borrow. To do this, we needed to thread source info back through
   to `fn in_scope`, which makes this commit a bit more painful than
   one might have expected.

 * There is `end_region` emission in `Builder::pop_scope` and in
   `Builder::exit_scope`; the first handles falling out of a scope
   normally, the second handles e.g. `break`.

 * Remove `EndRegion` statements during the erase_regions mir
   transformation.

 * Preallocate the terminator block, and throw an `Unreachable` marker
   on it from the outset. Then overwrite that Terminator as necessary
   on demand.

 * Instead of marking the scope as needs_cleanup after seeing a
   borrow, just treat every scope in the chain as being part of the
   diverge_block (after any *one* of them has separately signalled
   that it needs cleanup, e.g. due to having a destructor to run).

 * Allow for resume terminators to be patched when looking up drop flags.

   (In particular, `MirPatch::new` has an explicit code path,
   presumably previously unreachable, that patches up such resume
   terminators.)

 * Make `Scope` implement `Debug` trait.

 * Expanded a stray comment: we do not emit StorageDead on diverging
   paths, but that end behavior might not be desirable.
This commit is contained in:
Felix S. Klock II 2017-02-17 13:38:42 +01:00
parent 7c0c4cde80
commit 1d315cf7da
19 changed files with 94 additions and 10 deletions

View File

@ -226,6 +226,9 @@ for mir::StatementKind<'tcx> {
mir::StatementKind::StorageDead(ref lvalue) => {
lvalue.hash_stable(hcx, hasher);
}
mir::StatementKind::EndRegion(ref extents) => {
extents.hash_stable(hcx, hasher);
}
mir::StatementKind::Nop => {}
mir::StatementKind::InlineAsm { ref asm, ref outputs, ref inputs } => {
asm.hash_stable(hcx, hasher);

View File

@ -12,6 +12,7 @@
use graphviz::IntoCow;
use middle::const_val::ConstVal;
use middle::region::CodeExtent;
use rustc_const_math::{ConstUsize, ConstInt, ConstMathErr};
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
use rustc_data_structures::control_flow_graph::dominators::{Dominators, dominators};
@ -804,6 +805,10 @@ pub enum StatementKind<'tcx> {
inputs: Vec<Operand<'tcx>>
},
/// Mark one terminating point of an extent (i.e. static region).
/// (The starting point(s) arise implicitly from borrows.)
EndRegion(CodeExtent),
/// No-op. Useful for deleting instructions without affecting statement indices.
Nop,
}
@ -813,6 +818,8 @@ impl<'tcx> Debug for Statement<'tcx> {
use self::StatementKind::*;
match self.kind {
Assign(ref lv, ref rv) => write!(fmt, "{:?} = {:?}", lv, rv),
// (reuse lifetime rendering policy from ppaux.)
EndRegion(ref ce) => write!(fmt, "EndRegion({})", ty::ReScope(*ce)),
StorageLive(ref lv) => write!(fmt, "StorageLive({:?})", lv),
StorageDead(ref lv) => write!(fmt, "StorageDead({:?})", lv),
SetDiscriminant{lvalue: ref lv, variant_index: index} => {
@ -1472,6 +1479,13 @@ impl<'tcx> TypeFoldable<'tcx> for Statement<'tcx> {
outputs: outputs.fold_with(folder),
inputs: inputs.fold_with(folder)
},
// Note for future: If we want to expose the extents
// during the fold, we need to either generalize EndRegion
// to carry `[ty::Region]`, or extend the `TypeFolder`
// trait with a `fn fold_extent`.
EndRegion(ref extent) => EndRegion(extent.clone()),
Nop => Nop,
};
Statement {
@ -1490,6 +1504,13 @@ impl<'tcx> TypeFoldable<'tcx> for Statement<'tcx> {
StorageDead(ref lvalue) => lvalue.visit_with(visitor),
InlineAsm { ref outputs, ref inputs, .. } =>
outputs.visit_with(visitor) || inputs.visit_with(visitor),
// Note for future: If we want to expose the extents
// during the visit, we need to either generalize EndRegion
// to carry `[ty::Region]`, or extend the `TypeVisitor`
// trait with a `fn visit_extent`.
EndRegion(ref _extent) => false,
Nop => false,
}
}

View File

@ -325,6 +325,7 @@ macro_rules! make_mir_visitor {
ref $($mutability)* rvalue) => {
self.visit_assign(block, lvalue, rvalue, location);
}
StatementKind::EndRegion(_) => {}
StatementKind::SetDiscriminant{ ref $($mutability)* lvalue, .. } => {
self.visit_lvalue(lvalue, LvalueContext::Store, location);
}

View File

@ -474,6 +474,7 @@ impl<'a, 'tcx> BitDenotation for MovingOutStatements<'a, 'tcx> {
mir::StatementKind::StorageLive(_) |
mir::StatementKind::StorageDead(_) |
mir::StatementKind::InlineAsm { .. } |
mir::StatementKind::EndRegion(_) |
mir::StatementKind::Nop => {}
}
}

View File

@ -105,6 +105,7 @@ fn each_block<'a, 'tcx, O>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
mir::StatementKind::StorageLive(_) |
mir::StatementKind::StorageDead(_) |
mir::StatementKind::InlineAsm { .. } |
mir::StatementKind::EndRegion(_) |
mir::StatementKind::Nop => continue,
mir::StatementKind::SetDiscriminant{ .. } =>
span_bug!(stmt.source_info.span,

View File

@ -585,6 +585,11 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
// drop elaboration should handle that by itself
continue
}
TerminatorKind::Resume => {
// We can replace resumes with gotos
// jumping to a canonical resume.
continue
}
TerminatorKind::DropAndReplace { .. } => {
// this contains the move of the source and
// the initialization of the destination. We

View File

@ -413,6 +413,7 @@ impl<'a, 'tcx> MoveDataBuilder<'a, 'tcx> {
"SetDiscriminant should not exist during borrowck");
}
StatementKind::InlineAsm { .. } |
StatementKind::EndRegion(_) |
StatementKind::Nop => {}
}
}

View File

@ -394,6 +394,7 @@ fn drop_flag_effects_for_location<'a, 'tcx, F>(
mir::StatementKind::StorageLive(_) |
mir::StatementKind::StorageDead(_) |
mir::StatementKind::InlineAsm { .. } |
mir::StatementKind::EndRegion(_) |
mir::StatementKind::Nop => {}
},
None => {

View File

@ -71,7 +71,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
let outer_visibility_scope = this.visibility_scope;
let source_info = this.source_info(span);
for stmt in stmts {
let Stmt { span: _, kind, opt_destruction_extent } = this.hir.mirror(stmt);
let Stmt { span, kind, opt_destruction_extent } = this.hir.mirror(stmt);
match kind {
StmtKind::Expr { scope, expr } => {
unpack!(block = this.in_opt_scope(
@ -122,7 +122,6 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
if let Some(expr) = expr {
unpack!(block = this.into(destination, block, expr));
} else {
let source_info = this.source_info(span);
this.cfg.push_assign_unit(block, source_info, destination);
}
// Finally, we pop all the let scopes before exiting out from the scope of block

View File

@ -14,6 +14,7 @@
//! Routines for manipulating the control-flow graph.
use build::CFG;
use rustc::middle::region::CodeExtent;
use rustc::mir::*;
impl<'tcx> CFG<'tcx> {
@ -43,6 +44,16 @@ impl<'tcx> CFG<'tcx> {
self.block_data_mut(block).statements.push(statement);
}
pub fn push_end_region(&mut self,
block: BasicBlock,
source_info: SourceInfo,
extent: CodeExtent) {
self.push(block, Statement {
source_info: source_info,
kind: StatementKind::EndRegion(extent),
});
}
pub fn push_assign(&mut self,
block: BasicBlock,
source_info: SourceInfo,

View File

@ -49,7 +49,6 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
let expr_ty = expr.ty.clone();
let temp = this.temp(expr_ty.clone(), expr_span);
let source_info = this.source_info(expr_span);
if !expr_ty.is_never() && temp_lifetime.is_some() {
this.cfg.push(block, Statement {

View File

@ -94,10 +94,11 @@ use rustc::ty::subst::{Kind, Subst};
use rustc::ty::{Ty, TyCtxt};
use rustc::mir::*;
use rustc::mir::transform::MirSource;
use syntax_pos::Span;
use syntax_pos::{Span};
use rustc_data_structures::indexed_vec::Idx;
use rustc_data_structures::fx::FxHashMap;
#[derive(Debug)]
pub struct Scope<'tcx> {
/// The visibility scope this scope was created in.
visibility_scope: VisibilityScope,
@ -114,7 +115,7 @@ pub struct Scope<'tcx> {
/// * pollutting the cleanup MIR with StorageDead creates
/// landing pads even though there's no actual destructors
/// * freeing up stack space has no effect during unwinding
needs_cleanup: bool,
pub(super) needs_cleanup: bool,
/// set of lvalues to drop when exiting this scope. This starts
/// out empty but grows as variables are declared during the
@ -141,6 +142,7 @@ pub struct Scope<'tcx> {
cached_exits: FxHashMap<(BasicBlock, CodeExtent), BasicBlock>,
}
#[derive(Debug)]
struct DropData<'tcx> {
/// span where drop obligation was incurred (typically where lvalue was declared)
span: Span,
@ -152,6 +154,7 @@ struct DropData<'tcx> {
kind: DropKind
}
#[derive(Debug)]
enum DropKind {
Value {
/// The cached block for the cleanups-on-diverge path. This block
@ -163,6 +166,7 @@ enum DropKind {
Storage
}
#[derive(Debug)]
struct FreeData<'tcx> {
/// span where free obligation was incurred
span: Span,
@ -338,6 +342,8 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
&self.scopes,
block,
self.arg_count));
self.cfg.push_end_region(block, extent.1, scope.extent);
block.unit()
}
@ -379,6 +385,10 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
rest,
block,
self.arg_count));
// End all regions for scopes out of which we are breaking.
self.cfg.push_end_region(block, extent.1, scope.extent);
if let Some(ref free_data) = scope.free {
let next = self.cfg.start_new_block();
let free = build_free(self.hir.tcx(), &tmp, free_data, next);
@ -640,7 +650,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
resumeblk
};
for scope in scopes.iter_mut().filter(|s| s.needs_cleanup) {
for scope in scopes.iter_mut() {
target = build_diverge_scope(hir.tcx(), cfg, &unit_temp, span, scope, target);
}
Some(target)
@ -775,9 +785,9 @@ fn build_diverge_scope<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
// Build up the drops in **reverse** order. The end result will
// look like:
//
// [drops[n]] -...-> [drops[0]] -> [Free] -> [target]
// | |
// +------------------------------------+
// [EndRegion Block] -> [drops[n]] -...-> [drops[0]] -> [Free] -> [target]
// | |
// +---------------------------------------------------------+
// code for scope
//
// The code in this function reads from right to left. At each
@ -807,9 +817,16 @@ fn build_diverge_scope<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
// Next, build up the drops. Here we iterate the vector in
// *forward* order, so that we generate drops[0] first (right to
// left in diagram above).
for drop_data in &mut scope.drops {
for (j, drop_data) in scope.drops.iter_mut().enumerate() {
debug!("build_diverge_scope drop_data[{}]: {:?}", j, drop_data);
// Only full value drops are emitted in the diverging path,
// not StorageDead.
//
// Note: This may not actually be what we desire (are we
// "freeing" stack storage as we unwind, or merely observing a
// frozen stack)? In particular, the intent may have been to
// match the behavior of clang, but on inspection eddyb says
// this is not what clang does.
let cached_block = match drop_data.kind {
DropKind::Value { ref mut cached_block } => cached_block,
DropKind::Storage => continue
@ -829,6 +846,15 @@ fn build_diverge_scope<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
};
}
// Finally, push the EndRegion block, used by mir-borrowck. (Block
// becomes trivial goto after pass that removes all EndRegions.)
{
let block = cfg.start_new_cleanup_block();
cfg.push_end_region(block, source_info(span), scope.extent);
cfg.terminate(block, source_info(span), TerminatorKind::Goto { target: target });
target = block
}
target
}

View File

@ -65,6 +65,15 @@ impl<'a, 'tcx> MutVisitor<'tcx> for EraseRegionsVisitor<'a, 'tcx> {
substs: &mut ClosureSubsts<'tcx>) {
*substs = self.tcx.erase_regions(substs);
}
fn visit_statement(&mut self,
_block: BasicBlock,
statement: &mut Statement<'tcx>,
_location: Location) {
if let StatementKind::EndRegion(_) = statement.kind {
statement.kind = StatementKind::Nop;
}
}
}
pub struct EraseRegions;

View File

@ -907,6 +907,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> {
StatementKind::StorageLive(_) |
StatementKind::StorageDead(_) |
StatementKind::InlineAsm {..} |
StatementKind::EndRegion(_) |
StatementKind::Nop => {}
}
});

View File

@ -413,6 +413,7 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
}
}
StatementKind::InlineAsm { .. } |
StatementKind::EndRegion(_) |
StatementKind::Nop => {}
}
}

View File

@ -46,6 +46,7 @@ impl<'tcx> MirPatch<'tcx> {
for (bb, block) in mir.basic_blocks().iter_enumerated() {
if let TerminatorKind::Resume = block.terminator().kind {
if block.statements.len() > 0 {
assert!(resume_stmt_block.is_none());
resume_stmt_block = Some(bb);
} else {
resume_block = Some(bb);

View File

@ -125,6 +125,7 @@ impl<'a, 'tcx> mir_visit::Visitor<'tcx> for StatCollector<'a, 'tcx> {
self.record("Statement", statement);
self.record(match statement.kind {
StatementKind::Assign(..) => "StatementKind::Assign",
StatementKind::EndRegion(..) => "StatementKind::EndRegion",
StatementKind::SetDiscriminant { .. } => "StatementKind::SetDiscriminant",
StatementKind::StorageLive(..) => "StatementKind::StorageLive",
StatementKind::StorageDead(..) => "StatementKind::StorageDead",

View File

@ -284,6 +284,7 @@ impl<'a, 'tcx> MirConstContext<'a, 'tcx> {
}
mir::StatementKind::StorageLive(_) |
mir::StatementKind::StorageDead(_) |
mir::StatementKind::EndRegion(_) |
mir::StatementKind::Nop => {}
mir::StatementKind::InlineAsm { .. } |
mir::StatementKind::SetDiscriminant{ .. } => {

View File

@ -86,6 +86,7 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
asm::trans_inline_asm(&bcx, asm, outputs, input_vals);
bcx
}
mir::StatementKind::EndRegion(_) |
mir::StatementKind::Nop => bcx,
}
}