Handle break and continue. Change fixpoint computation to handle unreachable nodes.

This commit is contained in:
Eric Holk 2021-11-17 11:39:27 -08:00
parent ff0e8f4ba2
commit c7afaa1686
2 changed files with 137 additions and 67 deletions

View File

@ -246,7 +246,12 @@ pub fn resolve_interior<'a, 'tcx>(
)
.consume_body(body);
let mut drop_range_visitor = DropRangeVisitor::from(expr_use_visitor);
let region_scope_tree = fcx.tcx.region_scope_tree(def_id);
let mut drop_range_visitor = DropRangeVisitor::from(
expr_use_visitor,
region_scope_tree.body_expr_count(body.id()).unwrap_or(0),
);
intravisit::walk_body(&mut drop_range_visitor, body);
drop_range_visitor.drop_ranges.propagate_to_fixpoint();
@ -254,7 +259,7 @@ pub fn resolve_interior<'a, 'tcx>(
InteriorVisitor {
fcx,
types: FxIndexSet::default(),
region_scope_tree: fcx.tcx.region_scope_tree(def_id),
region_scope_tree,
expr_count: 0,
kind,
prev_unresolved_span: None,
@ -699,11 +704,12 @@ struct DropRangeVisitor<'tcx> {
}
impl<'tcx> DropRangeVisitor<'tcx> {
fn from(uses: ExprUseDelegate<'tcx>) -> Self {
fn from(uses: ExprUseDelegate<'tcx>, num_exprs: usize) -> Self {
debug!("consumed_places: {:?}", uses.consumed_places);
let drop_ranges = DropRanges::new(
uses.consumed_places.iter().flat_map(|(_, places)| places.iter().copied()),
&uses.hir,
num_exprs,
);
Self {
hir: uses.hir,
@ -851,18 +857,18 @@ impl<'tcx> Visitor<'tcx> for DropRangeVisitor<'tcx> {
ExprKind::If(test, if_true, if_false) => {
self.visit_expr(test);
let fork = self.expr_count - 1;
let fork = self.expr_count;
self.drop_ranges.add_control_edge(fork, self.expr_count);
self.drop_ranges.add_control_edge(fork, self.expr_count + 1);
self.visit_expr(if_true);
let true_end = self.expr_count - 1;
let true_end = self.expr_count;
self.drop_ranges.add_control_edge(fork, self.expr_count + 1);
if let Some(if_false) = if_false {
self.drop_ranges.add_control_edge(fork, self.expr_count);
self.visit_expr(if_false);
}
self.drop_ranges.add_control_edge(true_end, self.expr_count);
self.drop_ranges.add_control_edge(true_end, self.expr_count + 1);
}
ExprKind::Assign(lhs, rhs, _) => {
self.visit_expr(lhs);
@ -873,13 +879,13 @@ impl<'tcx> Visitor<'tcx> for DropRangeVisitor<'tcx> {
ExprKind::Loop(body, ..) => {
let loop_begin = self.expr_count;
self.visit_block(body);
self.drop_ranges.add_control_edge(self.expr_count - 1, loop_begin);
self.drop_ranges.add_control_edge(self.expr_count, loop_begin);
}
ExprKind::Match(scrutinee, arms, ..) => {
self.visit_expr(scrutinee);
let fork = self.expr_count - 1;
let arm_drops = arms
let arm_end_ids = arms
.iter()
.map(|Arm { pat, body, guard, .. }| {
self.drop_ranges.add_control_edge(fork, self.expr_count);
@ -893,16 +899,22 @@ impl<'tcx> Visitor<'tcx> for DropRangeVisitor<'tcx> {
None => (),
}
self.visit_expr(body);
self.expr_count - 1
self.expr_count
})
.collect::<Vec<_>>();
arm_drops.into_iter().for_each(|arm_end| {
self.drop_ranges.add_control_edge(arm_end, self.expr_count)
arm_end_ids.into_iter().for_each(|arm_end| {
self.drop_ranges.add_control_edge(arm_end, self.expr_count + 1)
});
}
ExprKind::Break(hir::Destination { target_id: Ok(target), .. }, ..)
| ExprKind::Continue(hir::Destination { target_id: Ok(target), .. }, ..) => {
self.drop_ranges.add_control_edge_hir_id(self.expr_count, target);
}
_ => intravisit::walk_expr(self, expr),
}
self.drop_ranges.add_node_mapping(expr.hir_id, self.expr_count);
self.expr_count += 1;
self.consume_expr(expr);
if let Some(expr) = reinit {

View File

@ -1,3 +1,7 @@
use std::collections::BTreeMap;
use std::fmt::Debug;
use std::mem::swap;
use rustc_hir::{HirId, HirIdMap};
use rustc_index::bit_set::BitSet;
use rustc_index::vec::IndexVec;
@ -20,6 +24,19 @@ rustc_index::newtype_index! {
pub struct DropRanges {
hir_id_map: HirIdMap<HirIdIndex>,
nodes: IndexVec<PostOrderId, NodeInfo>,
deferred_edges: Vec<(usize, HirId)>,
// FIXME: This should only be used for loops and break/continue.
post_order_map: HirIdMap<usize>,
}
impl Debug for DropRanges {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("DropRanges")
.field("hir_id_map", &self.hir_id_map)
.field("post_order_maps", &self.post_order_map)
.field("nodes", &self.nodes.iter_enumerated().collect::<BTreeMap<_, _>>())
.finish()
}
}
/// DropRanges keeps track of what values are definitely dropped at each point in the code.
@ -29,7 +46,7 @@ pub struct DropRanges {
/// (hir_id, post_order_id) -> bool, where a true value indicates that the value is definitely
/// dropped at the point of the node identified by post_order_id.
impl DropRanges {
pub fn new(hir_ids: impl Iterator<Item = HirId>, hir: &Map<'_>) -> Self {
pub fn new(hir_ids: impl Iterator<Item = HirId>, hir: &Map<'_>, num_exprs: usize) -> Self {
let mut hir_id_map = HirIdMap::<HirIdIndex>::default();
let mut next = <_>::from(0u32);
for hir_id in hir_ids {
@ -41,7 +58,13 @@ impl DropRanges {
});
}
debug!("hir_id_map: {:?}", hir_id_map);
Self { hir_id_map, nodes: <_>::default() }
let num_values = hir_id_map.len();
Self {
hir_id_map,
nodes: IndexVec::from_fn_n(|_| NodeInfo::new(num_values), num_exprs + 1),
deferred_edges: <_>::default(),
post_order_map: <_>::default(),
}
}
fn hidx(&self, hir_id: HirId) -> HirIdIndex {
@ -52,7 +75,7 @@ impl DropRanges {
self.hir_id_map
.get(&hir_id)
.copied()
.map_or(false, |hir_id| self.node(location.into()).drop_state.contains(hir_id))
.map_or(false, |hir_id| self.expect_node(location.into()).drop_state.contains(hir_id))
}
/// Returns the number of values (hir_ids) that are tracked
@ -60,9 +83,15 @@ impl DropRanges {
self.hir_id_map.len()
}
fn node(&mut self, id: PostOrderId) -> &NodeInfo {
let size = self.num_values();
self.nodes.ensure_contains_elem(id, || NodeInfo::new(size));
/// Adds an entry in the mapping from HirIds to PostOrderIds
///
/// Needed so that `add_control_edge_hir_id` can work.
pub fn add_node_mapping(&mut self, hir_id: HirId, post_order_id: usize) {
self.post_order_map.insert(hir_id, post_order_id);
}
/// Returns a reference to the NodeInfo for a node, panicking if it does not exist
fn expect_node(&self, id: PostOrderId) -> &NodeInfo {
&self.nodes[id]
}
@ -73,9 +102,32 @@ impl DropRanges {
}
pub fn add_control_edge(&mut self, from: usize, to: usize) {
trace!("adding control edge from {} to {}", from, to);
self.node_mut(from.into()).successors.push(to.into());
}
/// Like add_control_edge, but uses a hir_id as the target.
///
/// This can be used for branches where we do not know the PostOrderId of the target yet,
/// such as when handling `break` or `continue`.
pub fn add_control_edge_hir_id(&mut self, from: usize, to: HirId) {
self.deferred_edges.push((from, to));
}
/// Looks up PostOrderId for any control edges added by HirId and adds a proper edge for them.
///
/// Should be called after visiting the HIR but before solving the control flow, otherwise some
/// edges will be missed.
fn process_deferred_edges(&mut self) {
let mut edges = vec![];
swap(&mut edges, &mut self.deferred_edges);
edges.into_iter().for_each(|(from, to)| {
let to = *self.post_order_map.get(&to).expect("Expression ID not found");
trace!("Adding deferred edge from {} to {}", from, to);
self.add_control_edge(from, to)
});
}
pub fn drop_at(&mut self, value: HirId, location: usize) {
let value = self.hidx(value);
self.node_mut(location.into()).drops.push(value);
@ -92,40 +144,65 @@ impl DropRanges {
}
pub fn propagate_to_fixpoint(&mut self) {
while self.propagate() {}
trace!("before fixpoint: {:#?}", self);
self.process_deferred_edges();
let preds = self.compute_predecessors();
trace!("predecessors: {:#?}", preds.iter_enumerated().collect::<BTreeMap<_, _>>());
let mut propagate = || {
let mut changed = false;
for id in self.nodes.indices() {
let old_state = self.nodes[id].drop_state.clone();
if preds[id].len() != 0 {
self.nodes[id].drop_state = self.nodes[preds[id][0]].drop_state.clone();
for pred in &preds[id][1..] {
let state = self.nodes[*pred].drop_state.clone();
self.nodes[id].drop_state.intersect(&state);
}
} else {
self.nodes[id].drop_state = if id.index() == 0 {
BitSet::new_empty(self.num_values())
} else {
// If we are not the start node and we have no predecessors, treat
// everything as dropped because there's no way to get here anyway.
BitSet::new_filled(self.num_values())
};
};
for drop in &self.nodes[id].drops.clone() {
self.nodes[id].drop_state.insert(*drop);
}
for reinit in &self.nodes[id].reinits.clone() {
self.nodes[id].drop_state.remove(*reinit);
}
changed |= old_state != self.nodes[id].drop_state;
}
changed
};
while propagate() {}
trace!("after fixpoint: {:#?}", self);
}
fn propagate(&mut self) -> bool {
let mut visited = BitSet::new_empty(self.nodes.len());
self.visit(&mut visited, PostOrderId::from(0usize), PostOrderId::from(0usize), false)
}
fn visit(
&mut self,
visited: &mut BitSet<PostOrderId>,
id: PostOrderId,
pred_id: PostOrderId,
mut changed: bool,
) -> bool {
if visited.contains(id) {
return changed;
}
visited.insert(id);
changed &= self.nodes[id].merge_with(&self.nodes[pred_id]);
if self.nodes[id].successors.len() == 0 {
self.visit(visited, PostOrderId::from(id.index() + 1), id, changed)
} else {
self.nodes[id]
.successors
.iter()
.fold(changed, |changed, &succ| self.visit(visited, succ, id, changed))
fn compute_predecessors(&self) -> IndexVec<PostOrderId, Vec<PostOrderId>> {
let mut preds = IndexVec::from_fn_n(|_| vec![], self.nodes.len());
for (id, node) in self.nodes.iter_enumerated() {
if node.successors.len() == 0 && id.index() != self.nodes.len() - 1 {
preds[<_>::from(id.index() + 1)].push(id);
} else {
for succ in &node.successors {
preds[*succ].push(id);
}
}
}
preds
}
}
#[derive(Debug)]
struct NodeInfo {
/// IDs of nodes that can follow this one in the control flow
///
@ -148,26 +225,7 @@ impl NodeInfo {
successors: vec![],
drops: vec![],
reinits: vec![],
drop_state: BitSet::new_empty(num_values),
drop_state: BitSet::new_filled(num_values),
}
}
fn merge_with(&mut self, other: &NodeInfo) -> bool {
let mut changed = false;
for place in &self.drops {
if !self.drop_state.contains(place) && !self.reinits.contains(&place) {
changed = true;
self.drop_state.insert(place);
}
}
for place in &self.reinits {
if self.drop_state.contains(place) {
changed = true;
self.drop_state.remove(place);
}
}
changed
}
}