mirror of
https://github.com/rust-lang/rust.git
synced 2025-02-18 18:04:13 +00:00
Auto merge of #51987 - nikomatsakis:nll-region-infer-scc, r=pnkfelix
nll experiment: compute SCCs instead of iterative region solving This is an attempt to speed up region solving by replacing the current iterative dataflow with a SCC computation. The idea is to detect cycles (SCCs) amongst region constraints and then compute just one value per cycle. The graph with all cycles removed is of course a DAG, so we can then solve constraints "bottom up" once the liveness values are known. I kinda ran out of time this morning so the last commit is a bit sloppy but I wanted to get this posted, let travis run on it, and maybe do a perf run, before I clean it up.
This commit is contained in:
commit
bce32b532d
@ -8,11 +8,11 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use rustc_data_structures::graph;
|
||||
use cfg::*;
|
||||
use middle::region;
|
||||
use ty::{self, TyCtxt};
|
||||
use rustc_data_structures::graph::implementation as graph;
|
||||
use syntax::ptr::P;
|
||||
use ty::{self, TyCtxt};
|
||||
|
||||
use hir::{self, PatKind};
|
||||
use hir::def_id::DefId;
|
||||
|
@ -11,7 +11,7 @@
|
||||
//! Module that constructs a control-flow graph representing an item.
|
||||
//! Uses `Graph` as the underlying representation.
|
||||
|
||||
use rustc_data_structures::graph;
|
||||
use rustc_data_structures::graph::implementation as graph;
|
||||
use ty::TyCtxt;
|
||||
use hir;
|
||||
use hir::def_id::DefId;
|
||||
|
@ -9,7 +9,9 @@
|
||||
// except according to those terms.
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::graph::{Direction, INCOMING, Graph, NodeIndex, OUTGOING};
|
||||
use rustc_data_structures::graph::implementation::{
|
||||
Direction, INCOMING, Graph, NodeIndex, OUTGOING
|
||||
};
|
||||
|
||||
use super::DepNode;
|
||||
|
||||
|
@ -20,7 +20,7 @@ use infer::region_constraints::VerifyBound;
|
||||
use middle::free_region::RegionRelations;
|
||||
use rustc_data_structures::indexed_vec::{Idx, IndexVec};
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_data_structures::graph::{self, Direction, NodeIndex, OUTGOING};
|
||||
use rustc_data_structures::graph::implementation::{Graph, Direction, NodeIndex, INCOMING, OUTGOING};
|
||||
use std::fmt;
|
||||
use std::u32;
|
||||
use ty::{self, TyCtxt};
|
||||
@ -99,7 +99,7 @@ struct RegionAndOrigin<'tcx> {
|
||||
origin: SubregionOrigin<'tcx>,
|
||||
}
|
||||
|
||||
type RegionGraph<'tcx> = graph::Graph<(), Constraint<'tcx>>;
|
||||
type RegionGraph<'tcx> = Graph<(), Constraint<'tcx>>;
|
||||
|
||||
struct LexicalResolver<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
|
||||
region_rels: &'cx RegionRelations<'cx, 'gcx, 'tcx>,
|
||||
@ -501,7 +501,7 @@ impl<'cx, 'gcx, 'tcx> LexicalResolver<'cx, 'gcx, 'tcx> {
|
||||
fn construct_graph(&self) -> RegionGraph<'tcx> {
|
||||
let num_vars = self.num_vars();
|
||||
|
||||
let mut graph = graph::Graph::new();
|
||||
let mut graph = Graph::new();
|
||||
|
||||
for _ in 0..num_vars {
|
||||
graph.add_node(());
|
||||
@ -550,9 +550,9 @@ impl<'cx, 'gcx, 'tcx> LexicalResolver<'cx, 'gcx, 'tcx> {
|
||||
// Errors in expanding nodes result from a lower-bound that is
|
||||
// not contained by an upper-bound.
|
||||
let (mut lower_bounds, lower_dup) =
|
||||
self.collect_concrete_regions(graph, node_idx, graph::INCOMING, dup_vec);
|
||||
self.collect_concrete_regions(graph, node_idx, INCOMING, dup_vec);
|
||||
let (mut upper_bounds, upper_dup) =
|
||||
self.collect_concrete_regions(graph, node_idx, graph::OUTGOING, dup_vec);
|
||||
self.collect_concrete_regions(graph, node_idx, OUTGOING, dup_vec);
|
||||
|
||||
if lower_dup || upper_dup {
|
||||
return;
|
||||
|
@ -22,7 +22,7 @@ use std::mem;
|
||||
use std::usize;
|
||||
use syntax::print::pprust::PrintState;
|
||||
|
||||
use rustc_data_structures::graph::OUTGOING;
|
||||
use rustc_data_structures::graph::implementation::OUTGOING;
|
||||
|
||||
use util::nodemap::FxHashMap;
|
||||
use hir;
|
||||
|
@ -21,9 +21,8 @@ use mir::interpret::{EvalErrorKind, Scalar, Value};
|
||||
use mir::visit::MirVisitable;
|
||||
use rustc_apfloat::ieee::{Double, Single};
|
||||
use rustc_apfloat::Float;
|
||||
use rustc_data_structures::control_flow_graph::dominators::{dominators, Dominators};
|
||||
use rustc_data_structures::control_flow_graph::ControlFlowGraph;
|
||||
use rustc_data_structures::control_flow_graph::{GraphPredecessors, GraphSuccessors};
|
||||
use rustc_data_structures::graph::dominators::{dominators, Dominators};
|
||||
use rustc_data_structures::graph::{self, GraphPredecessors, GraphSuccessors};
|
||||
use rustc_data_structures::indexed_vec::{Idx, IndexVec};
|
||||
use rustc_data_structures::small_vec::SmallVec;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
@ -2289,23 +2288,32 @@ fn item_path_str(def_id: DefId) -> String {
|
||||
ty::tls::with(|tcx| tcx.item_path_str(def_id))
|
||||
}
|
||||
|
||||
impl<'tcx> ControlFlowGraph for Mir<'tcx> {
|
||||
impl<'tcx> graph::DirectedGraph for Mir<'tcx> {
|
||||
type Node = BasicBlock;
|
||||
}
|
||||
|
||||
impl<'tcx> graph::WithNumNodes for Mir<'tcx> {
|
||||
fn num_nodes(&self) -> usize {
|
||||
self.basic_blocks.len()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> graph::WithStartNode for Mir<'tcx> {
|
||||
fn start_node(&self) -> Self::Node {
|
||||
START_BLOCK
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> graph::WithPredecessors for Mir<'tcx> {
|
||||
fn predecessors<'graph>(
|
||||
&'graph self,
|
||||
node: Self::Node,
|
||||
) -> <Self as GraphPredecessors<'graph>>::Iter {
|
||||
self.predecessors_for(node).clone().into_iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> graph::WithSuccessors for Mir<'tcx> {
|
||||
fn successors<'graph>(
|
||||
&'graph self,
|
||||
node: Self::Node,
|
||||
@ -2314,12 +2322,12 @@ impl<'tcx> ControlFlowGraph for Mir<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'b> GraphPredecessors<'b> for Mir<'a> {
|
||||
impl<'a, 'b> graph::GraphPredecessors<'b> for Mir<'a> {
|
||||
type Item = BasicBlock;
|
||||
type Iter = IntoIter<BasicBlock>;
|
||||
}
|
||||
|
||||
impl<'a, 'b> GraphSuccessors<'b> for Mir<'a> {
|
||||
impl<'a, 'b> graph::GraphSuccessors<'b> for Mir<'a> {
|
||||
type Item = BasicBlock;
|
||||
type Iter = iter::Cloned<Successors<'b>>;
|
||||
}
|
||||
|
@ -12,7 +12,7 @@
|
||||
//! which do not.
|
||||
|
||||
use rustc_data_structures::bitvec::BitVector;
|
||||
use rustc_data_structures::control_flow_graph::dominators::Dominators;
|
||||
use rustc_data_structures::graph::dominators::Dominators;
|
||||
use rustc_data_structures::indexed_vec::{Idx, IndexVec};
|
||||
use rustc::mir::{self, Location, TerminatorKind};
|
||||
use rustc::mir::visit::{Visitor, PlaceContext};
|
||||
|
@ -1,42 +0,0 @@
|
||||
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use super::indexed_vec::Idx;
|
||||
|
||||
pub mod dominators;
|
||||
pub mod iterate;
|
||||
mod reference;
|
||||
|
||||
#[cfg(test)]
|
||||
mod test;
|
||||
|
||||
pub trait ControlFlowGraph
|
||||
where Self: for<'graph> GraphPredecessors<'graph, Item=<Self as ControlFlowGraph>::Node>,
|
||||
Self: for<'graph> GraphSuccessors<'graph, Item=<Self as ControlFlowGraph>::Node>
|
||||
{
|
||||
type Node: Idx;
|
||||
|
||||
fn num_nodes(&self) -> usize;
|
||||
fn start_node(&self) -> Self::Node;
|
||||
fn predecessors<'graph>(&'graph self, node: Self::Node)
|
||||
-> <Self as GraphPredecessors<'graph>>::Iter;
|
||||
fn successors<'graph>(&'graph self, node: Self::Node)
|
||||
-> <Self as GraphSuccessors<'graph>>::Iter;
|
||||
}
|
||||
|
||||
pub trait GraphPredecessors<'graph> {
|
||||
type Item;
|
||||
type Iter: Iterator<Item = Self::Item>;
|
||||
}
|
||||
|
||||
pub trait GraphSuccessors<'graph> {
|
||||
type Item;
|
||||
type Iter: Iterator<Item = Self::Item>;
|
||||
}
|
@ -14,9 +14,9 @@
|
||||
//! Rice Computer Science TS-06-33870
|
||||
//! <https://www.cs.rice.edu/~keith/EMBED/dom.pdf>
|
||||
|
||||
use super::ControlFlowGraph;
|
||||
use super::super::indexed_vec::{Idx, IndexVec};
|
||||
use super::iterate::reverse_post_order;
|
||||
use super::super::indexed_vec::{IndexVec, Idx};
|
||||
use super::ControlFlowGraph;
|
||||
|
||||
use std::fmt;
|
||||
|
||||
@ -29,15 +29,16 @@ pub fn dominators<G: ControlFlowGraph>(graph: &G) -> Dominators<G::Node> {
|
||||
dominators_given_rpo(graph, &rpo)
|
||||
}
|
||||
|
||||
pub fn dominators_given_rpo<G: ControlFlowGraph>(graph: &G,
|
||||
rpo: &[G::Node])
|
||||
-> Dominators<G::Node> {
|
||||
pub fn dominators_given_rpo<G: ControlFlowGraph>(
|
||||
graph: &G,
|
||||
rpo: &[G::Node],
|
||||
) -> Dominators<G::Node> {
|
||||
let start_node = graph.start_node();
|
||||
assert_eq!(rpo[0], start_node);
|
||||
|
||||
// compute the post order index (rank) for each node
|
||||
let mut post_order_rank: IndexVec<G::Node, usize> = IndexVec::from_elem_n(usize::default(),
|
||||
graph.num_nodes());
|
||||
let mut post_order_rank: IndexVec<G::Node, usize> =
|
||||
IndexVec::from_elem_n(usize::default(), graph.num_nodes());
|
||||
for (index, node) in rpo.iter().rev().cloned().enumerate() {
|
||||
post_order_rank[node] = index;
|
||||
}
|
||||
@ -56,10 +57,12 @@ pub fn dominators_given_rpo<G: ControlFlowGraph>(graph: &G,
|
||||
if immediate_dominators[pred].is_some() {
|
||||
// (*)
|
||||
// (*) dominators for `pred` have been calculated
|
||||
new_idom = intersect_opt(&post_order_rank,
|
||||
&immediate_dominators,
|
||||
new_idom,
|
||||
Some(pred));
|
||||
new_idom = intersect_opt(
|
||||
&post_order_rank,
|
||||
&immediate_dominators,
|
||||
new_idom,
|
||||
Some(pred),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -76,11 +79,12 @@ pub fn dominators_given_rpo<G: ControlFlowGraph>(graph: &G,
|
||||
}
|
||||
}
|
||||
|
||||
fn intersect_opt<Node: Idx>(post_order_rank: &IndexVec<Node, usize>,
|
||||
immediate_dominators: &IndexVec<Node, Option<Node>>,
|
||||
node1: Option<Node>,
|
||||
node2: Option<Node>)
|
||||
-> Option<Node> {
|
||||
fn intersect_opt<Node: Idx>(
|
||||
post_order_rank: &IndexVec<Node, usize>,
|
||||
immediate_dominators: &IndexVec<Node, Option<Node>>,
|
||||
node1: Option<Node>,
|
||||
node2: Option<Node>,
|
||||
) -> Option<Node> {
|
||||
match (node1, node2) {
|
||||
(None, None) => None,
|
||||
(Some(n), None) | (None, Some(n)) => Some(n),
|
||||
@ -88,11 +92,12 @@ fn intersect_opt<Node: Idx>(post_order_rank: &IndexVec<Node, usize>,
|
||||
}
|
||||
}
|
||||
|
||||
fn intersect<Node: Idx>(post_order_rank: &IndexVec<Node, usize>,
|
||||
immediate_dominators: &IndexVec<Node, Option<Node>>,
|
||||
mut node1: Node,
|
||||
mut node2: Node)
|
||||
-> Node {
|
||||
fn intersect<Node: Idx>(
|
||||
post_order_rank: &IndexVec<Node, usize>,
|
||||
immediate_dominators: &IndexVec<Node, Option<Node>>,
|
||||
mut node1: Node,
|
||||
mut node2: Node,
|
||||
) -> Node {
|
||||
while node1 != node2 {
|
||||
while post_order_rank[node1] < post_order_rank[node2] {
|
||||
node1 = immediate_dominators[node1].unwrap();
|
||||
@ -176,11 +181,13 @@ impl<Node: Idx> DominatorTree<Node> {
|
||||
|
||||
impl<Node: Idx> fmt::Debug for DominatorTree<Node> {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Debug::fmt(&DominatorTreeNode {
|
||||
tree: self,
|
||||
node: self.root,
|
||||
},
|
||||
fmt)
|
||||
fmt::Debug::fmt(
|
||||
&DominatorTreeNode {
|
||||
tree: self,
|
||||
node: self.root,
|
||||
},
|
||||
fmt,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@ -194,11 +201,9 @@ impl<'tree, Node: Idx> fmt::Debug for DominatorTreeNode<'tree, Node> {
|
||||
let subtrees: Vec<_> = self.tree
|
||||
.children(self.node)
|
||||
.iter()
|
||||
.map(|&child| {
|
||||
DominatorTreeNode {
|
||||
tree: self.tree,
|
||||
node: child,
|
||||
}
|
||||
.map(|&child| DominatorTreeNode {
|
||||
tree: self.tree,
|
||||
node: child,
|
||||
})
|
||||
.collect();
|
||||
fmt.debug_tuple("")
|
417
src/librustc_data_structures/graph/implementation/mod.rs
Normal file
417
src/librustc_data_structures/graph/implementation/mod.rs
Normal file
@ -0,0 +1,417 @@
|
||||
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! A graph module for use in dataflow, region resolution, and elsewhere.
|
||||
//!
|
||||
//! # Interface details
|
||||
//!
|
||||
//! You customize the graph by specifying a "node data" type `N` and an
|
||||
//! "edge data" type `E`. You can then later gain access (mutable or
|
||||
//! immutable) to these "user-data" bits. Currently, you can only add
|
||||
//! nodes or edges to the graph. You cannot remove or modify them once
|
||||
//! added. This could be changed if we have a need.
|
||||
//!
|
||||
//! # Implementation details
|
||||
//!
|
||||
//! The main tricky thing about this code is the way that edges are
|
||||
//! stored. The edges are stored in a central array, but they are also
|
||||
//! threaded onto two linked lists for each node, one for incoming edges
|
||||
//! and one for outgoing edges. Note that every edge is a member of some
|
||||
//! incoming list and some outgoing list. Basically you can load the
|
||||
//! first index of the linked list from the node data structures (the
|
||||
//! field `first_edge`) and then, for each edge, load the next index from
|
||||
//! the field `next_edge`). Each of those fields is an array that should
|
||||
//! be indexed by the direction (see the type `Direction`).
|
||||
|
||||
use bitvec::BitVector;
|
||||
use std::fmt::Debug;
|
||||
use std::usize;
|
||||
use snapshot_vec::{SnapshotVec, SnapshotVecDelegate};
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
pub struct Graph<N, E> {
|
||||
nodes: SnapshotVec<Node<N>>,
|
||||
edges: SnapshotVec<Edge<E>>,
|
||||
}
|
||||
|
||||
pub struct Node<N> {
|
||||
first_edge: [EdgeIndex; 2], // see module comment
|
||||
pub data: N,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Edge<E> {
|
||||
next_edge: [EdgeIndex; 2], // see module comment
|
||||
source: NodeIndex,
|
||||
target: NodeIndex,
|
||||
pub data: E,
|
||||
}
|
||||
|
||||
impl<N> SnapshotVecDelegate for Node<N> {
|
||||
type Value = Node<N>;
|
||||
type Undo = ();
|
||||
|
||||
fn reverse(_: &mut Vec<Node<N>>, _: ()) {}
|
||||
}
|
||||
|
||||
impl<N> SnapshotVecDelegate for Edge<N> {
|
||||
type Value = Edge<N>;
|
||||
type Undo = ();
|
||||
|
||||
fn reverse(_: &mut Vec<Edge<N>>, _: ()) {}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
|
||||
pub struct NodeIndex(pub usize);
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
|
||||
pub struct EdgeIndex(pub usize);
|
||||
|
||||
pub const INVALID_EDGE_INDEX: EdgeIndex = EdgeIndex(usize::MAX);
|
||||
|
||||
// Use a private field here to guarantee no more instances are created:
|
||||
#[derive(Copy, Clone, Debug, PartialEq)]
|
||||
pub struct Direction {
|
||||
repr: usize,
|
||||
}
|
||||
|
||||
pub const OUTGOING: Direction = Direction { repr: 0 };
|
||||
|
||||
pub const INCOMING: Direction = Direction { repr: 1 };
|
||||
|
||||
impl NodeIndex {
|
||||
/// Returns unique id (unique with respect to the graph holding associated node).
|
||||
pub fn node_id(&self) -> usize {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: Debug, E: Debug> Graph<N, E> {
|
||||
pub fn new() -> Graph<N, E> {
|
||||
Graph {
|
||||
nodes: SnapshotVec::new(),
|
||||
edges: SnapshotVec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_capacity(nodes: usize, edges: usize) -> Graph<N, E> {
|
||||
Graph {
|
||||
nodes: SnapshotVec::with_capacity(nodes),
|
||||
edges: SnapshotVec::with_capacity(edges),
|
||||
}
|
||||
}
|
||||
|
||||
// # Simple accessors
|
||||
|
||||
#[inline]
|
||||
pub fn all_nodes(&self) -> &[Node<N>] {
|
||||
&self.nodes
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn len_nodes(&self) -> usize {
|
||||
self.nodes.len()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn all_edges(&self) -> &[Edge<E>] {
|
||||
&self.edges
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn len_edges(&self) -> usize {
|
||||
self.edges.len()
|
||||
}
|
||||
|
||||
// # Node construction
|
||||
|
||||
pub fn next_node_index(&self) -> NodeIndex {
|
||||
NodeIndex(self.nodes.len())
|
||||
}
|
||||
|
||||
pub fn add_node(&mut self, data: N) -> NodeIndex {
|
||||
let idx = self.next_node_index();
|
||||
self.nodes.push(Node {
|
||||
first_edge: [INVALID_EDGE_INDEX, INVALID_EDGE_INDEX],
|
||||
data,
|
||||
});
|
||||
idx
|
||||
}
|
||||
|
||||
pub fn mut_node_data(&mut self, idx: NodeIndex) -> &mut N {
|
||||
&mut self.nodes[idx.0].data
|
||||
}
|
||||
|
||||
pub fn node_data(&self, idx: NodeIndex) -> &N {
|
||||
&self.nodes[idx.0].data
|
||||
}
|
||||
|
||||
pub fn node(&self, idx: NodeIndex) -> &Node<N> {
|
||||
&self.nodes[idx.0]
|
||||
}
|
||||
|
||||
// # Edge construction and queries
|
||||
|
||||
pub fn next_edge_index(&self) -> EdgeIndex {
|
||||
EdgeIndex(self.edges.len())
|
||||
}
|
||||
|
||||
pub fn add_edge(&mut self, source: NodeIndex, target: NodeIndex, data: E) -> EdgeIndex {
|
||||
debug!("graph: add_edge({:?}, {:?}, {:?})", source, target, data);
|
||||
|
||||
let idx = self.next_edge_index();
|
||||
|
||||
// read current first of the list of edges from each node
|
||||
let source_first = self.nodes[source.0].first_edge[OUTGOING.repr];
|
||||
let target_first = self.nodes[target.0].first_edge[INCOMING.repr];
|
||||
|
||||
// create the new edge, with the previous firsts from each node
|
||||
// as the next pointers
|
||||
self.edges.push(Edge {
|
||||
next_edge: [source_first, target_first],
|
||||
source,
|
||||
target,
|
||||
data,
|
||||
});
|
||||
|
||||
// adjust the firsts for each node target be the next object.
|
||||
self.nodes[source.0].first_edge[OUTGOING.repr] = idx;
|
||||
self.nodes[target.0].first_edge[INCOMING.repr] = idx;
|
||||
|
||||
return idx;
|
||||
}
|
||||
|
||||
pub fn edge(&self, idx: EdgeIndex) -> &Edge<E> {
|
||||
&self.edges[idx.0]
|
||||
}
|
||||
|
||||
// # Iterating over nodes, edges
|
||||
|
||||
pub fn enumerated_nodes(&self) -> impl Iterator<Item = (NodeIndex, &Node<N>)> {
|
||||
self.nodes
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, n)| (NodeIndex(idx), n))
|
||||
}
|
||||
|
||||
pub fn enumerated_edges(&self) -> impl Iterator<Item = (EdgeIndex, &Edge<E>)> {
|
||||
self.edges
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, e)| (EdgeIndex(idx), e))
|
||||
}
|
||||
|
||||
pub fn each_node<'a>(&'a self, mut f: impl FnMut(NodeIndex, &'a Node<N>) -> bool) -> bool {
|
||||
//! Iterates over all edges defined in the graph.
|
||||
self.enumerated_nodes()
|
||||
.all(|(node_idx, node)| f(node_idx, node))
|
||||
}
|
||||
|
||||
pub fn each_edge<'a>(&'a self, mut f: impl FnMut(EdgeIndex, &'a Edge<E>) -> bool) -> bool {
|
||||
//! Iterates over all edges defined in the graph
|
||||
self.enumerated_edges()
|
||||
.all(|(edge_idx, edge)| f(edge_idx, edge))
|
||||
}
|
||||
|
||||
pub fn outgoing_edges(&self, source: NodeIndex) -> AdjacentEdges<N, E> {
|
||||
self.adjacent_edges(source, OUTGOING)
|
||||
}
|
||||
|
||||
pub fn incoming_edges(&self, source: NodeIndex) -> AdjacentEdges<N, E> {
|
||||
self.adjacent_edges(source, INCOMING)
|
||||
}
|
||||
|
||||
pub fn adjacent_edges(&self, source: NodeIndex, direction: Direction) -> AdjacentEdges<N, E> {
|
||||
let first_edge = self.node(source).first_edge[direction.repr];
|
||||
AdjacentEdges {
|
||||
graph: self,
|
||||
direction,
|
||||
next: first_edge,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn successor_nodes<'a>(
|
||||
&'a self,
|
||||
source: NodeIndex,
|
||||
) -> impl Iterator<Item = NodeIndex> + 'a {
|
||||
self.outgoing_edges(source).targets()
|
||||
}
|
||||
|
||||
pub fn predecessor_nodes<'a>(
|
||||
&'a self,
|
||||
target: NodeIndex,
|
||||
) -> impl Iterator<Item = NodeIndex> + 'a {
|
||||
self.incoming_edges(target).sources()
|
||||
}
|
||||
|
||||
pub fn depth_traverse<'a>(
|
||||
&'a self,
|
||||
start: NodeIndex,
|
||||
direction: Direction,
|
||||
) -> DepthFirstTraversal<'a, N, E> {
|
||||
DepthFirstTraversal::with_start_node(self, start, direction)
|
||||
}
|
||||
|
||||
pub fn nodes_in_postorder<'a>(
|
||||
&'a self,
|
||||
direction: Direction,
|
||||
entry_node: NodeIndex,
|
||||
) -> Vec<NodeIndex> {
|
||||
let mut visited = BitVector::new(self.len_nodes());
|
||||
let mut stack = vec![];
|
||||
let mut result = Vec::with_capacity(self.len_nodes());
|
||||
let mut push_node = |stack: &mut Vec<_>, node: NodeIndex| {
|
||||
if visited.insert(node.0) {
|
||||
stack.push((node, self.adjacent_edges(node, direction)));
|
||||
}
|
||||
};
|
||||
|
||||
for node in Some(entry_node)
|
||||
.into_iter()
|
||||
.chain(self.enumerated_nodes().map(|(node, _)| node))
|
||||
{
|
||||
push_node(&mut stack, node);
|
||||
while let Some((node, mut iter)) = stack.pop() {
|
||||
if let Some((_, child)) = iter.next() {
|
||||
let target = child.source_or_target(direction);
|
||||
// the current node needs more processing, so
|
||||
// add it back to the stack
|
||||
stack.push((node, iter));
|
||||
// and then push the new node
|
||||
push_node(&mut stack, target);
|
||||
} else {
|
||||
result.push(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert_eq!(result.len(), self.len_nodes());
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
// # Iterators
|
||||
|
||||
pub struct AdjacentEdges<'g, N, E>
|
||||
where
|
||||
N: 'g,
|
||||
E: 'g,
|
||||
{
|
||||
graph: &'g Graph<N, E>,
|
||||
direction: Direction,
|
||||
next: EdgeIndex,
|
||||
}
|
||||
|
||||
impl<'g, N: Debug, E: Debug> AdjacentEdges<'g, N, E> {
|
||||
fn targets(self) -> impl Iterator<Item = NodeIndex> + 'g {
|
||||
self.into_iter().map(|(_, edge)| edge.target)
|
||||
}
|
||||
|
||||
fn sources(self) -> impl Iterator<Item = NodeIndex> + 'g {
|
||||
self.into_iter().map(|(_, edge)| edge.source)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'g, N: Debug, E: Debug> Iterator for AdjacentEdges<'g, N, E> {
|
||||
type Item = (EdgeIndex, &'g Edge<E>);
|
||||
|
||||
fn next(&mut self) -> Option<(EdgeIndex, &'g Edge<E>)> {
|
||||
let edge_index = self.next;
|
||||
if edge_index == INVALID_EDGE_INDEX {
|
||||
return None;
|
||||
}
|
||||
|
||||
let edge = self.graph.edge(edge_index);
|
||||
self.next = edge.next_edge[self.direction.repr];
|
||||
Some((edge_index, edge))
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
// At most, all the edges in the graph.
|
||||
(0, Some(self.graph.len_edges()))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DepthFirstTraversal<'g, N, E>
|
||||
where
|
||||
N: 'g,
|
||||
E: 'g,
|
||||
{
|
||||
graph: &'g Graph<N, E>,
|
||||
stack: Vec<NodeIndex>,
|
||||
visited: BitVector,
|
||||
direction: Direction,
|
||||
}
|
||||
|
||||
impl<'g, N: Debug, E: Debug> DepthFirstTraversal<'g, N, E> {
|
||||
pub fn with_start_node(
|
||||
graph: &'g Graph<N, E>,
|
||||
start_node: NodeIndex,
|
||||
direction: Direction,
|
||||
) -> Self {
|
||||
let mut visited = BitVector::new(graph.len_nodes());
|
||||
visited.insert(start_node.node_id());
|
||||
DepthFirstTraversal {
|
||||
graph,
|
||||
stack: vec![start_node],
|
||||
visited,
|
||||
direction,
|
||||
}
|
||||
}
|
||||
|
||||
fn visit(&mut self, node: NodeIndex) {
|
||||
if self.visited.insert(node.node_id()) {
|
||||
self.stack.push(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'g, N: Debug, E: Debug> Iterator for DepthFirstTraversal<'g, N, E> {
|
||||
type Item = NodeIndex;
|
||||
|
||||
fn next(&mut self) -> Option<NodeIndex> {
|
||||
let next = self.stack.pop();
|
||||
if let Some(idx) = next {
|
||||
for (_, edge) in self.graph.adjacent_edges(idx, self.direction) {
|
||||
let target = edge.source_or_target(self.direction);
|
||||
self.visit(target);
|
||||
}
|
||||
}
|
||||
next
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
// We will visit every node in the graph exactly once.
|
||||
let remaining = self.graph.len_nodes() - self.visited.count();
|
||||
(remaining, Some(remaining))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'g, N: Debug, E: Debug> ExactSizeIterator for DepthFirstTraversal<'g, N, E> {}
|
||||
|
||||
impl<E> Edge<E> {
|
||||
pub fn source(&self) -> NodeIndex {
|
||||
self.source
|
||||
}
|
||||
|
||||
pub fn target(&self) -> NodeIndex {
|
||||
self.target
|
||||
}
|
||||
|
||||
pub fn source_or_target(&self, direction: Direction) -> NodeIndex {
|
||||
if direction == OUTGOING {
|
||||
self.target
|
||||
} else {
|
||||
self.source
|
||||
}
|
||||
}
|
||||
}
|
@ -8,7 +8,7 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use graph::*;
|
||||
use graph::implementation::*;
|
||||
use std::fmt::Debug;
|
||||
|
||||
type TestGraph = Graph<&'static str, &'static str>;
|
@ -8,20 +8,24 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use super::ControlFlowGraph;
|
||||
use super::super::indexed_vec::IndexVec;
|
||||
use super::{DirectedGraph, WithSuccessors, WithNumNodes};
|
||||
|
||||
#[cfg(test)]
|
||||
mod test;
|
||||
|
||||
pub fn post_order_from<G: ControlFlowGraph>(graph: &G, start_node: G::Node) -> Vec<G::Node> {
|
||||
pub fn post_order_from<G: DirectedGraph + WithSuccessors + WithNumNodes>(
|
||||
graph: &G,
|
||||
start_node: G::Node,
|
||||
) -> Vec<G::Node> {
|
||||
post_order_from_to(graph, start_node, None)
|
||||
}
|
||||
|
||||
pub fn post_order_from_to<G: ControlFlowGraph>(graph: &G,
|
||||
start_node: G::Node,
|
||||
end_node: Option<G::Node>)
|
||||
-> Vec<G::Node> {
|
||||
pub fn post_order_from_to<G: DirectedGraph + WithSuccessors + WithNumNodes>(
|
||||
graph: &G,
|
||||
start_node: G::Node,
|
||||
end_node: Option<G::Node>,
|
||||
) -> Vec<G::Node> {
|
||||
let mut visited: IndexVec<G::Node, bool> = IndexVec::from_elem_n(false, graph.num_nodes());
|
||||
let mut result: Vec<G::Node> = Vec::with_capacity(graph.num_nodes());
|
||||
if let Some(end_node) = end_node {
|
||||
@ -31,10 +35,12 @@ pub fn post_order_from_to<G: ControlFlowGraph>(graph: &G,
|
||||
result
|
||||
}
|
||||
|
||||
fn post_order_walk<G: ControlFlowGraph>(graph: &G,
|
||||
node: G::Node,
|
||||
result: &mut Vec<G::Node>,
|
||||
visited: &mut IndexVec<G::Node, bool>) {
|
||||
fn post_order_walk<G: DirectedGraph + WithSuccessors + WithNumNodes>(
|
||||
graph: &G,
|
||||
node: G::Node,
|
||||
result: &mut Vec<G::Node>,
|
||||
visited: &mut IndexVec<G::Node, bool>,
|
||||
) {
|
||||
if visited[node] {
|
||||
return;
|
||||
}
|
||||
@ -47,7 +53,10 @@ fn post_order_walk<G: ControlFlowGraph>(graph: &G,
|
||||
result.push(node);
|
||||
}
|
||||
|
||||
pub fn reverse_post_order<G: ControlFlowGraph>(graph: &G, start_node: G::Node) -> Vec<G::Node> {
|
||||
pub fn reverse_post_order<G: DirectedGraph + WithSuccessors + WithNumNodes>(
|
||||
graph: &G,
|
||||
start_node: G::Node,
|
||||
) -> Vec<G::Node> {
|
||||
let mut vec = post_order_from(graph, start_node);
|
||||
vec.reverse();
|
||||
vec
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
|
||||
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
@ -8,410 +8,72 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! A graph module for use in dataflow, region resolution, and elsewhere.
|
||||
//!
|
||||
//! # Interface details
|
||||
//!
|
||||
//! You customize the graph by specifying a "node data" type `N` and an
|
||||
//! "edge data" type `E`. You can then later gain access (mutable or
|
||||
//! immutable) to these "user-data" bits. Currently, you can only add
|
||||
//! nodes or edges to the graph. You cannot remove or modify them once
|
||||
//! added. This could be changed if we have a need.
|
||||
//!
|
||||
//! # Implementation details
|
||||
//!
|
||||
//! The main tricky thing about this code is the way that edges are
|
||||
//! stored. The edges are stored in a central array, but they are also
|
||||
//! threaded onto two linked lists for each node, one for incoming edges
|
||||
//! and one for outgoing edges. Note that every edge is a member of some
|
||||
//! incoming list and some outgoing list. Basically you can load the
|
||||
//! first index of the linked list from the node data structures (the
|
||||
//! field `first_edge`) and then, for each edge, load the next index from
|
||||
//! the field `next_edge`). Each of those fields is an array that should
|
||||
//! be indexed by the direction (see the type `Direction`).
|
||||
use super::indexed_vec::Idx;
|
||||
|
||||
use bitvec::BitVector;
|
||||
use std::fmt::Debug;
|
||||
use std::usize;
|
||||
use snapshot_vec::{SnapshotVec, SnapshotVecDelegate};
|
||||
pub mod dominators;
|
||||
pub mod implementation;
|
||||
pub mod iterate;
|
||||
mod reference;
|
||||
pub mod scc;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
mod test;
|
||||
|
||||
pub struct Graph<N, E> {
|
||||
nodes: SnapshotVec<Node<N>>,
|
||||
edges: SnapshotVec<Edge<E>>,
|
||||
pub trait DirectedGraph {
|
||||
type Node: Idx;
|
||||
}
|
||||
|
||||
pub struct Node<N> {
|
||||
first_edge: [EdgeIndex; 2], // see module comment
|
||||
pub data: N,
|
||||
pub trait WithNumNodes: DirectedGraph {
|
||||
fn num_nodes(&self) -> usize;
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Edge<E> {
|
||||
next_edge: [EdgeIndex; 2], // see module comment
|
||||
source: NodeIndex,
|
||||
target: NodeIndex,
|
||||
pub data: E,
|
||||
}
|
||||
|
||||
impl<N> SnapshotVecDelegate for Node<N> {
|
||||
type Value = Node<N>;
|
||||
type Undo = ();
|
||||
|
||||
fn reverse(_: &mut Vec<Node<N>>, _: ()) {}
|
||||
}
|
||||
|
||||
impl<N> SnapshotVecDelegate for Edge<N> {
|
||||
type Value = Edge<N>;
|
||||
type Undo = ();
|
||||
|
||||
fn reverse(_: &mut Vec<Edge<N>>, _: ()) {}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
|
||||
pub struct NodeIndex(pub usize);
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
|
||||
pub struct EdgeIndex(pub usize);
|
||||
|
||||
pub const INVALID_EDGE_INDEX: EdgeIndex = EdgeIndex(usize::MAX);
|
||||
|
||||
// Use a private field here to guarantee no more instances are created:
|
||||
#[derive(Copy, Clone, Debug, PartialEq)]
|
||||
pub struct Direction {
|
||||
repr: usize,
|
||||
}
|
||||
|
||||
pub const OUTGOING: Direction = Direction { repr: 0 };
|
||||
|
||||
pub const INCOMING: Direction = Direction { repr: 1 };
|
||||
|
||||
impl NodeIndex {
|
||||
/// Returns unique id (unique with respect to the graph holding associated node).
|
||||
pub fn node_id(&self) -> usize {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<N: Debug, E: Debug> Graph<N, E> {
|
||||
pub fn new() -> Graph<N, E> {
|
||||
Graph {
|
||||
nodes: SnapshotVec::new(),
|
||||
edges: SnapshotVec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_capacity(nodes: usize, edges: usize) -> Graph<N, E> {
|
||||
Graph {
|
||||
nodes: SnapshotVec::with_capacity(nodes),
|
||||
edges: SnapshotVec::with_capacity(edges),
|
||||
}
|
||||
}
|
||||
|
||||
// # Simple accessors
|
||||
|
||||
#[inline]
|
||||
pub fn all_nodes(&self) -> &[Node<N>] {
|
||||
&self.nodes
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn len_nodes(&self) -> usize {
|
||||
self.nodes.len()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn all_edges(&self) -> &[Edge<E>] {
|
||||
&self.edges
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn len_edges(&self) -> usize {
|
||||
self.edges.len()
|
||||
}
|
||||
|
||||
// # Node construction
|
||||
|
||||
pub fn next_node_index(&self) -> NodeIndex {
|
||||
NodeIndex(self.nodes.len())
|
||||
}
|
||||
|
||||
pub fn add_node(&mut self, data: N) -> NodeIndex {
|
||||
let idx = self.next_node_index();
|
||||
self.nodes.push(Node {
|
||||
first_edge: [INVALID_EDGE_INDEX, INVALID_EDGE_INDEX],
|
||||
data,
|
||||
});
|
||||
idx
|
||||
}
|
||||
|
||||
pub fn mut_node_data(&mut self, idx: NodeIndex) -> &mut N {
|
||||
&mut self.nodes[idx.0].data
|
||||
}
|
||||
|
||||
pub fn node_data(&self, idx: NodeIndex) -> &N {
|
||||
&self.nodes[idx.0].data
|
||||
}
|
||||
|
||||
pub fn node(&self, idx: NodeIndex) -> &Node<N> {
|
||||
&self.nodes[idx.0]
|
||||
}
|
||||
|
||||
// # Edge construction and queries
|
||||
|
||||
pub fn next_edge_index(&self) -> EdgeIndex {
|
||||
EdgeIndex(self.edges.len())
|
||||
}
|
||||
|
||||
pub fn add_edge(&mut self, source: NodeIndex, target: NodeIndex, data: E) -> EdgeIndex {
|
||||
debug!("graph: add_edge({:?}, {:?}, {:?})", source, target, data);
|
||||
|
||||
let idx = self.next_edge_index();
|
||||
|
||||
// read current first of the list of edges from each node
|
||||
let source_first = self.nodes[source.0].first_edge[OUTGOING.repr];
|
||||
let target_first = self.nodes[target.0].first_edge[INCOMING.repr];
|
||||
|
||||
// create the new edge, with the previous firsts from each node
|
||||
// as the next pointers
|
||||
self.edges.push(Edge {
|
||||
next_edge: [source_first, target_first],
|
||||
source,
|
||||
target,
|
||||
data,
|
||||
});
|
||||
|
||||
// adjust the firsts for each node target be the next object.
|
||||
self.nodes[source.0].first_edge[OUTGOING.repr] = idx;
|
||||
self.nodes[target.0].first_edge[INCOMING.repr] = idx;
|
||||
|
||||
return idx;
|
||||
}
|
||||
|
||||
pub fn edge(&self, idx: EdgeIndex) -> &Edge<E> {
|
||||
&self.edges[idx.0]
|
||||
}
|
||||
|
||||
// # Iterating over nodes, edges
|
||||
|
||||
pub fn enumerated_nodes(&self) -> impl Iterator<Item = (NodeIndex, &Node<N>)> {
|
||||
self.nodes
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, n)| (NodeIndex(idx), n))
|
||||
}
|
||||
|
||||
pub fn enumerated_edges(&self) -> impl Iterator<Item = (EdgeIndex, &Edge<E>)> {
|
||||
self.edges
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, e)| (EdgeIndex(idx), e))
|
||||
}
|
||||
|
||||
pub fn each_node<'a>(&'a self, mut f: impl FnMut(NodeIndex, &'a Node<N>) -> bool) -> bool {
|
||||
//! Iterates over all edges defined in the graph.
|
||||
self.enumerated_nodes()
|
||||
.all(|(node_idx, node)| f(node_idx, node))
|
||||
}
|
||||
|
||||
pub fn each_edge<'a>(&'a self, mut f: impl FnMut(EdgeIndex, &'a Edge<E>) -> bool) -> bool {
|
||||
//! Iterates over all edges defined in the graph
|
||||
self.enumerated_edges()
|
||||
.all(|(edge_idx, edge)| f(edge_idx, edge))
|
||||
}
|
||||
|
||||
pub fn outgoing_edges(&self, source: NodeIndex) -> AdjacentEdges<N, E> {
|
||||
self.adjacent_edges(source, OUTGOING)
|
||||
}
|
||||
|
||||
pub fn incoming_edges(&self, source: NodeIndex) -> AdjacentEdges<N, E> {
|
||||
self.adjacent_edges(source, INCOMING)
|
||||
}
|
||||
|
||||
pub fn adjacent_edges(&self, source: NodeIndex, direction: Direction) -> AdjacentEdges<N, E> {
|
||||
let first_edge = self.node(source).first_edge[direction.repr];
|
||||
AdjacentEdges {
|
||||
graph: self,
|
||||
direction,
|
||||
next: first_edge,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn successor_nodes<'a>(
|
||||
&'a self,
|
||||
source: NodeIndex,
|
||||
) -> impl Iterator<Item = NodeIndex> + 'a {
|
||||
self.outgoing_edges(source).targets()
|
||||
}
|
||||
|
||||
pub fn predecessor_nodes<'a>(
|
||||
&'a self,
|
||||
target: NodeIndex,
|
||||
) -> impl Iterator<Item = NodeIndex> + 'a {
|
||||
self.incoming_edges(target).sources()
|
||||
}
|
||||
|
||||
pub fn depth_traverse<'a>(
|
||||
&'a self,
|
||||
start: NodeIndex,
|
||||
direction: Direction,
|
||||
) -> DepthFirstTraversal<'a, N, E> {
|
||||
DepthFirstTraversal::with_start_node(self, start, direction)
|
||||
}
|
||||
|
||||
pub fn nodes_in_postorder<'a>(
|
||||
&'a self,
|
||||
direction: Direction,
|
||||
entry_node: NodeIndex,
|
||||
) -> Vec<NodeIndex> {
|
||||
let mut visited = BitVector::new(self.len_nodes());
|
||||
let mut stack = vec![];
|
||||
let mut result = Vec::with_capacity(self.len_nodes());
|
||||
let mut push_node = |stack: &mut Vec<_>, node: NodeIndex| {
|
||||
if visited.insert(node.0) {
|
||||
stack.push((node, self.adjacent_edges(node, direction)));
|
||||
}
|
||||
};
|
||||
|
||||
for node in Some(entry_node)
|
||||
.into_iter()
|
||||
.chain(self.enumerated_nodes().map(|(node, _)| node))
|
||||
{
|
||||
push_node(&mut stack, node);
|
||||
while let Some((node, mut iter)) = stack.pop() {
|
||||
if let Some((_, child)) = iter.next() {
|
||||
let target = child.source_or_target(direction);
|
||||
// the current node needs more processing, so
|
||||
// add it back to the stack
|
||||
stack.push((node, iter));
|
||||
// and then push the new node
|
||||
push_node(&mut stack, target);
|
||||
} else {
|
||||
result.push(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assert_eq!(result.len(), self.len_nodes());
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
// # Iterators
|
||||
|
||||
pub struct AdjacentEdges<'g, N, E>
|
||||
pub trait WithSuccessors: DirectedGraph
|
||||
where
|
||||
N: 'g,
|
||||
E: 'g,
|
||||
Self: for<'graph> GraphSuccessors<'graph, Item = <Self as DirectedGraph>::Node>,
|
||||
{
|
||||
graph: &'g Graph<N, E>,
|
||||
direction: Direction,
|
||||
next: EdgeIndex,
|
||||
fn successors<'graph>(
|
||||
&'graph self,
|
||||
node: Self::Node,
|
||||
) -> <Self as GraphSuccessors<'graph>>::Iter;
|
||||
}
|
||||
|
||||
impl<'g, N: Debug, E: Debug> AdjacentEdges<'g, N, E> {
|
||||
fn targets(self) -> impl Iterator<Item = NodeIndex> + 'g {
|
||||
self.into_iter().map(|(_, edge)| edge.target)
|
||||
}
|
||||
|
||||
fn sources(self) -> impl Iterator<Item = NodeIndex> + 'g {
|
||||
self.into_iter().map(|(_, edge)| edge.source)
|
||||
}
|
||||
pub trait GraphSuccessors<'graph> {
|
||||
type Item;
|
||||
type Iter: Iterator<Item = Self::Item>;
|
||||
}
|
||||
|
||||
impl<'g, N: Debug, E: Debug> Iterator for AdjacentEdges<'g, N, E> {
|
||||
type Item = (EdgeIndex, &'g Edge<E>);
|
||||
|
||||
fn next(&mut self) -> Option<(EdgeIndex, &'g Edge<E>)> {
|
||||
let edge_index = self.next;
|
||||
if edge_index == INVALID_EDGE_INDEX {
|
||||
return None;
|
||||
}
|
||||
|
||||
let edge = self.graph.edge(edge_index);
|
||||
self.next = edge.next_edge[self.direction.repr];
|
||||
Some((edge_index, edge))
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
// At most, all the edges in the graph.
|
||||
(0, Some(self.graph.len_edges()))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DepthFirstTraversal<'g, N, E>
|
||||
pub trait WithPredecessors: DirectedGraph
|
||||
where
|
||||
N: 'g,
|
||||
E: 'g,
|
||||
Self: for<'graph> GraphPredecessors<'graph, Item = <Self as DirectedGraph>::Node>,
|
||||
{
|
||||
graph: &'g Graph<N, E>,
|
||||
stack: Vec<NodeIndex>,
|
||||
visited: BitVector,
|
||||
direction: Direction,
|
||||
fn predecessors<'graph>(
|
||||
&'graph self,
|
||||
node: Self::Node,
|
||||
) -> <Self as GraphPredecessors<'graph>>::Iter;
|
||||
}
|
||||
|
||||
impl<'g, N: Debug, E: Debug> DepthFirstTraversal<'g, N, E> {
|
||||
pub fn with_start_node(
|
||||
graph: &'g Graph<N, E>,
|
||||
start_node: NodeIndex,
|
||||
direction: Direction,
|
||||
) -> Self {
|
||||
let mut visited = BitVector::new(graph.len_nodes());
|
||||
visited.insert(start_node.node_id());
|
||||
DepthFirstTraversal {
|
||||
graph,
|
||||
stack: vec![start_node],
|
||||
visited,
|
||||
direction,
|
||||
}
|
||||
}
|
||||
|
||||
fn visit(&mut self, node: NodeIndex) {
|
||||
if self.visited.insert(node.node_id()) {
|
||||
self.stack.push(node);
|
||||
}
|
||||
}
|
||||
pub trait GraphPredecessors<'graph> {
|
||||
type Item;
|
||||
type Iter: Iterator<Item = Self::Item>;
|
||||
}
|
||||
|
||||
impl<'g, N: Debug, E: Debug> Iterator for DepthFirstTraversal<'g, N, E> {
|
||||
type Item = NodeIndex;
|
||||
|
||||
fn next(&mut self) -> Option<NodeIndex> {
|
||||
let next = self.stack.pop();
|
||||
if let Some(idx) = next {
|
||||
for (_, edge) in self.graph.adjacent_edges(idx, self.direction) {
|
||||
let target = edge.source_or_target(self.direction);
|
||||
self.visit(target);
|
||||
}
|
||||
}
|
||||
next
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
// We will visit every node in the graph exactly once.
|
||||
let remaining = self.graph.len_nodes() - self.visited.count();
|
||||
(remaining, Some(remaining))
|
||||
}
|
||||
pub trait WithStartNode: DirectedGraph {
|
||||
fn start_node(&self) -> Self::Node;
|
||||
}
|
||||
|
||||
impl<'g, N: Debug, E: Debug> ExactSizeIterator for DepthFirstTraversal<'g, N, E> {}
|
||||
|
||||
impl<E> Edge<E> {
|
||||
pub fn source(&self) -> NodeIndex {
|
||||
self.source
|
||||
}
|
||||
|
||||
pub fn target(&self) -> NodeIndex {
|
||||
self.target
|
||||
}
|
||||
|
||||
pub fn source_or_target(&self, direction: Direction) -> NodeIndex {
|
||||
if direction == OUTGOING {
|
||||
self.target
|
||||
} else {
|
||||
self.source
|
||||
}
|
||||
}
|
||||
pub trait ControlFlowGraph:
|
||||
DirectedGraph + WithStartNode + WithPredecessors + WithStartNode + WithSuccessors + WithNumNodes
|
||||
{
|
||||
// convenient trait
|
||||
}
|
||||
|
||||
impl<T> ControlFlowGraph for T
|
||||
where
|
||||
T: DirectedGraph
|
||||
+ WithStartNode
|
||||
+ WithPredecessors
|
||||
+ WithStartNode
|
||||
+ WithSuccessors
|
||||
+ WithNumNodes,
|
||||
{
|
||||
}
|
||||
|
@ -10,34 +10,42 @@
|
||||
|
||||
use super::*;
|
||||
|
||||
impl<'graph, G: ControlFlowGraph> ControlFlowGraph for &'graph G {
|
||||
impl<'graph, G: DirectedGraph> DirectedGraph for &'graph G {
|
||||
type Node = G::Node;
|
||||
}
|
||||
|
||||
impl<'graph, G: WithNumNodes> WithNumNodes for &'graph G {
|
||||
fn num_nodes(&self) -> usize {
|
||||
(**self).num_nodes()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'graph, G: WithStartNode> WithStartNode for &'graph G {
|
||||
fn start_node(&self) -> Self::Node {
|
||||
(**self).start_node()
|
||||
}
|
||||
}
|
||||
|
||||
fn predecessors<'iter>(&'iter self,
|
||||
node: Self::Node)
|
||||
-> <Self as GraphPredecessors<'iter>>::Iter {
|
||||
(**self).predecessors(node)
|
||||
}
|
||||
|
||||
impl<'graph, G: WithSuccessors> WithSuccessors for &'graph G {
|
||||
fn successors<'iter>(&'iter self, node: Self::Node) -> <Self as GraphSuccessors<'iter>>::Iter {
|
||||
(**self).successors(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'iter, 'graph, G: ControlFlowGraph> GraphPredecessors<'iter> for &'graph G {
|
||||
impl<'graph, G: WithPredecessors> WithPredecessors for &'graph G {
|
||||
fn predecessors<'iter>(&'iter self,
|
||||
node: Self::Node)
|
||||
-> <Self as GraphPredecessors<'iter>>::Iter {
|
||||
(**self).predecessors(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'iter, 'graph, G: WithPredecessors> GraphPredecessors<'iter> for &'graph G {
|
||||
type Item = G::Node;
|
||||
type Iter = <G as GraphPredecessors<'iter>>::Iter;
|
||||
}
|
||||
|
||||
impl<'iter, 'graph, G: ControlFlowGraph> GraphSuccessors<'iter> for &'graph G {
|
||||
impl<'iter, 'graph, G: WithSuccessors> GraphSuccessors<'iter> for &'graph G {
|
||||
type Item = G::Node;
|
||||
type Iter = <G as GraphSuccessors<'iter>>::Iter;
|
||||
}
|
361
src/librustc_data_structures/graph/scc/mod.rs
Normal file
361
src/librustc_data_structures/graph/scc/mod.rs
Normal file
@ -0,0 +1,361 @@
|
||||
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! Routine to compute the strongly connected components (SCCs) of a
|
||||
//! graph, as well as the resulting DAG if each SCC is replaced with a
|
||||
//! node in the graph. This uses Tarjan's algorithm that completes in
|
||||
//! O(n) time.
|
||||
|
||||
use fx::FxHashSet;
|
||||
use graph::{DirectedGraph, WithNumNodes, WithSuccessors};
|
||||
use indexed_vec::{Idx, IndexVec};
|
||||
use std::ops::Range;
|
||||
|
||||
mod test;
|
||||
|
||||
/// Strongly connected components (SCC) of a graph. The type `N` is
|
||||
/// the index type for the graph nodes and `S` is the index type for
|
||||
/// the SCCs. We can map from each node to the SCC that it
|
||||
/// participates in, and we also have the successors of each SCC.
|
||||
pub struct Sccs<N: Idx, S: Idx> {
|
||||
/// For each node, what is the SCC index of the SCC to which it
|
||||
/// belongs.
|
||||
scc_indices: IndexVec<N, S>,
|
||||
|
||||
/// Data about each SCC.
|
||||
scc_data: SccData<S>,
|
||||
}
|
||||
|
||||
struct SccData<S: Idx> {
|
||||
/// For each SCC, the range of `all_successors` where its
|
||||
/// successors can be found.
|
||||
ranges: IndexVec<S, Range<usize>>,
|
||||
|
||||
/// Contains the succcessors for all the Sccs, concatenated. The
|
||||
/// range of indices corresponding to a given SCC is found in its
|
||||
/// SccData.
|
||||
all_successors: Vec<S>,
|
||||
}
|
||||
|
||||
impl<N: Idx, S: Idx> Sccs<N, S> {
|
||||
pub fn new(graph: &(impl DirectedGraph<Node = N> + WithNumNodes + WithSuccessors)) -> Self {
|
||||
SccsConstruction::construct(graph)
|
||||
}
|
||||
|
||||
/// Returns the number of SCCs in the graph.
|
||||
pub fn num_sccs(&self) -> usize {
|
||||
self.scc_data.len()
|
||||
}
|
||||
|
||||
/// Returns an iterator over the SCCs in the graph.
|
||||
pub fn all_sccs(&self) -> impl Iterator<Item = S> {
|
||||
(0 .. self.scc_data.len()).map(S::new)
|
||||
}
|
||||
|
||||
/// Returns the SCC to which a node `r` belongs.
|
||||
pub fn scc(&self, r: N) -> S {
|
||||
self.scc_indices[r]
|
||||
}
|
||||
|
||||
/// Returns the successors of the given SCC.
|
||||
pub fn successors(&self, scc: S) -> &[S] {
|
||||
self.scc_data.successors(scc)
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: Idx> SccData<S> {
|
||||
/// Number of SCCs,
|
||||
fn len(&self) -> usize {
|
||||
self.ranges.len()
|
||||
}
|
||||
|
||||
/// Returns the successors of the given SCC.
|
||||
fn successors(&self, scc: S) -> &[S] {
|
||||
// Annoyingly, `range` does not implement `Copy`, so we have
|
||||
// to do `range.start..range.end`:
|
||||
let range = &self.ranges[scc];
|
||||
&self.all_successors[range.start..range.end]
|
||||
}
|
||||
|
||||
/// Creates a new SCC with `successors` as its successors and
|
||||
/// returns the resulting index.
|
||||
fn create_scc(&mut self, successors: impl IntoIterator<Item = S>) -> S {
|
||||
// Store the successors on `scc_successors_vec`, remembering
|
||||
// the range of indices.
|
||||
let all_successors_start = self.all_successors.len();
|
||||
self.all_successors.extend(successors);
|
||||
let all_successors_end = self.all_successors.len();
|
||||
|
||||
debug!(
|
||||
"create_scc({:?}) successors={:?}",
|
||||
self.ranges.len(),
|
||||
&self.all_successors[all_successors_start..all_successors_end],
|
||||
);
|
||||
|
||||
self.ranges.push(all_successors_start..all_successors_end)
|
||||
}
|
||||
}
|
||||
|
||||
struct SccsConstruction<'c, G: DirectedGraph + WithNumNodes + WithSuccessors + 'c, S: Idx> {
|
||||
graph: &'c G,
|
||||
|
||||
/// The state of each node; used during walk to record the stack
|
||||
/// and after walk to record what cycle each node ended up being
|
||||
/// in.
|
||||
node_states: IndexVec<G::Node, NodeState<G::Node, S>>,
|
||||
|
||||
/// The stack of nodes that we are visiting as part of the DFS.
|
||||
node_stack: Vec<G::Node>,
|
||||
|
||||
/// The stack of successors: as we visit a node, we mark our
|
||||
/// position in this stack, and when we encounter a successor SCC,
|
||||
/// we push it on the stack. When we complete an SCC, we can pop
|
||||
/// everything off the stack that was found along the way.
|
||||
successors_stack: Vec<S>,
|
||||
|
||||
/// A set used to strip duplicates. As we accumulate successors
|
||||
/// into the successors_stack, we sometimes get duplicate entries.
|
||||
/// We use this set to remove those -- we also keep its storage
|
||||
/// around between successors to amortize memory allocation costs.
|
||||
duplicate_set: FxHashSet<S>,
|
||||
|
||||
scc_data: SccData<S>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum NodeState<N, S> {
|
||||
/// This node has not yet been visited as part of the DFS.
|
||||
///
|
||||
/// After SCC construction is complete, this state ought to be
|
||||
/// impossible.
|
||||
NotVisited,
|
||||
|
||||
/// This node is currently being walk as part of our DFS. It is on
|
||||
/// the stack at the depth `depth`.
|
||||
///
|
||||
/// After SCC construction is complete, this state ought to be
|
||||
/// impossible.
|
||||
BeingVisited { depth: usize },
|
||||
|
||||
/// Indicates that this node is a member of the given cycle.
|
||||
InCycle { scc_index: S },
|
||||
|
||||
/// Indicates that this node is a member of whatever cycle
|
||||
/// `parent` is a member of. This state is transient: whenever we
|
||||
/// see it, we try to overwrite it with the current state of
|
||||
/// `parent` (this is the "path compression" step of a union-find
|
||||
/// algorithm).
|
||||
InCycleWith { parent: N },
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum WalkReturn<S> {
|
||||
Cycle { min_depth: usize },
|
||||
Complete { scc_index: S },
|
||||
}
|
||||
|
||||
impl<'c, G, S> SccsConstruction<'c, G, S>
|
||||
where
|
||||
G: DirectedGraph + WithNumNodes + WithSuccessors,
|
||||
S: Idx,
|
||||
{
|
||||
/// Identifies SCCs in the graph `G` and computes the resulting
|
||||
/// DAG. This uses a variant of [Tarjan's
|
||||
/// algorithm][wikipedia]. The high-level summary of the algorithm
|
||||
/// is that we do a depth-first search. Along the way, we keep a
|
||||
/// stack of each node whose successors are being visited. We
|
||||
/// track the depth of each node on this stack (there is no depth
|
||||
/// if the node is not on the stack). When we find that some node
|
||||
/// N with depth D can reach some other node N' with lower depth
|
||||
/// D' (i.e., D' < D), we know that N, N', and all nodes in
|
||||
/// between them on the stack are part of an SCC.
|
||||
///
|
||||
/// [wikipedia]: https://bit.ly/2EZIx84
|
||||
fn construct(graph: &'c G) -> Sccs<G::Node, S> {
|
||||
let num_nodes = graph.num_nodes();
|
||||
|
||||
let mut this = Self {
|
||||
graph,
|
||||
node_states: IndexVec::from_elem_n(NodeState::NotVisited, num_nodes),
|
||||
node_stack: Vec::with_capacity(num_nodes),
|
||||
successors_stack: Vec::new(),
|
||||
scc_data: SccData {
|
||||
ranges: IndexVec::new(),
|
||||
all_successors: Vec::new(),
|
||||
},
|
||||
duplicate_set: FxHashSet::default(),
|
||||
};
|
||||
|
||||
let scc_indices = (0..num_nodes)
|
||||
.map(G::Node::new)
|
||||
.map(|node| match this.walk_node(0, node) {
|
||||
WalkReturn::Complete { scc_index } => scc_index,
|
||||
WalkReturn::Cycle { min_depth } => panic!(
|
||||
"`walk_node(0, {:?})` returned cycle with depth {:?}",
|
||||
node, min_depth
|
||||
),
|
||||
})
|
||||
.collect();
|
||||
|
||||
Sccs {
|
||||
scc_indices,
|
||||
scc_data: this.scc_data,
|
||||
}
|
||||
}
|
||||
|
||||
/// Visit a node during the DFS. We first examine its current
|
||||
/// state -- if it is not yet visited (`NotVisited`), we can push
|
||||
/// it onto the stack and start walking its successors.
|
||||
///
|
||||
/// If it is already on the DFS stack it will be in the state
|
||||
/// `BeingVisited`. In that case, we have found a cycle and we
|
||||
/// return the depth from the stack.
|
||||
///
|
||||
/// Otherwise, we are looking at a node that has already been
|
||||
/// completely visited. We therefore return `WalkReturn::Complete`
|
||||
/// with its associated SCC index.
|
||||
fn walk_node(&mut self, depth: usize, node: G::Node) -> WalkReturn<S> {
|
||||
debug!("walk_node(depth = {:?}, node = {:?})", depth, node);
|
||||
match self.find_state(node) {
|
||||
NodeState::InCycle { scc_index } => WalkReturn::Complete { scc_index },
|
||||
|
||||
NodeState::BeingVisited { depth: min_depth } => WalkReturn::Cycle { min_depth },
|
||||
|
||||
NodeState::NotVisited => self.walk_unvisited_node(depth, node),
|
||||
|
||||
NodeState::InCycleWith { parent } => panic!(
|
||||
"`find_state` returned `InCycleWith({:?})`, which ought to be impossible",
|
||||
parent
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
/// Fetches the state of the node `r`. If `r` is recorded as being
|
||||
/// in a cycle with some other node `r2`, then fetches the state
|
||||
/// of `r2` (and updates `r` to reflect current result). This is
|
||||
/// basically the "find" part of a standard union-find algorithm
|
||||
/// (with path compression).
|
||||
fn find_state(&mut self, r: G::Node) -> NodeState<G::Node, S> {
|
||||
debug!("find_state(r = {:?} in state {:?})", r, self.node_states[r]);
|
||||
match self.node_states[r] {
|
||||
NodeState::InCycle { scc_index } => NodeState::InCycle { scc_index },
|
||||
NodeState::BeingVisited { depth } => NodeState::BeingVisited { depth },
|
||||
NodeState::NotVisited => NodeState::NotVisited,
|
||||
NodeState::InCycleWith { parent } => {
|
||||
let parent_state = self.find_state(parent);
|
||||
debug!("find_state: parent_state = {:?}", parent_state);
|
||||
match parent_state {
|
||||
NodeState::InCycle { .. } => {
|
||||
self.node_states[r] = parent_state;
|
||||
parent_state
|
||||
}
|
||||
|
||||
NodeState::BeingVisited { depth } => {
|
||||
self.node_states[r] = NodeState::InCycleWith {
|
||||
parent: self.node_stack[depth],
|
||||
};
|
||||
parent_state
|
||||
}
|
||||
|
||||
NodeState::NotVisited | NodeState::InCycleWith { .. } => {
|
||||
panic!("invalid parent state: {:?}", parent_state)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Walks a node that has never been visited before.
|
||||
fn walk_unvisited_node(&mut self, depth: usize, node: G::Node) -> WalkReturn<S> {
|
||||
debug!(
|
||||
"walk_unvisited_node(depth = {:?}, node = {:?})",
|
||||
depth, node
|
||||
);
|
||||
|
||||
debug_assert!(match self.node_states[node] {
|
||||
NodeState::NotVisited => true,
|
||||
_ => false,
|
||||
});
|
||||
|
||||
// Push `node` onto the stack.
|
||||
self.node_states[node] = NodeState::BeingVisited { depth };
|
||||
self.node_stack.push(node);
|
||||
|
||||
// Walk each successor of the node, looking to see if any of
|
||||
// them can reach a node that is presently on the stack. If
|
||||
// so, that means they can also reach us.
|
||||
let mut min_depth = depth;
|
||||
let mut min_cycle_root = node;
|
||||
let successors_len = self.successors_stack.len();
|
||||
for successor_node in self.graph.successors(node) {
|
||||
debug!(
|
||||
"walk_unvisited_node: node = {:?} successor_ode = {:?}",
|
||||
node, successor_node
|
||||
);
|
||||
match self.walk_node(depth + 1, successor_node) {
|
||||
WalkReturn::Cycle {
|
||||
min_depth: successor_min_depth,
|
||||
} => {
|
||||
// Track the minimum depth we can reach.
|
||||
assert!(successor_min_depth <= depth);
|
||||
if successor_min_depth < min_depth {
|
||||
debug!(
|
||||
"walk_unvisited_node: node = {:?} successor_min_depth = {:?}",
|
||||
node, successor_min_depth
|
||||
);
|
||||
min_depth = successor_min_depth;
|
||||
min_cycle_root = successor_node;
|
||||
}
|
||||
}
|
||||
|
||||
WalkReturn::Complete {
|
||||
scc_index: successor_scc_index,
|
||||
} => {
|
||||
// Push the completed SCC indices onto
|
||||
// the `successors_stack` for later.
|
||||
debug!(
|
||||
"walk_unvisited_node: node = {:?} successor_scc_index = {:?}",
|
||||
node, successor_scc_index
|
||||
);
|
||||
self.successors_stack.push(successor_scc_index);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Completed walk, remove `node` from the stack.
|
||||
let r = self.node_stack.pop();
|
||||
debug_assert_eq!(r, Some(node));
|
||||
|
||||
// If `min_depth == depth`, then we are the root of the
|
||||
// cycle: we can't reach anyone further down the stack.
|
||||
if min_depth == depth {
|
||||
// Note that successor stack may have duplicates, so we
|
||||
// want to remove those:
|
||||
let deduplicated_successors = {
|
||||
let duplicate_set = &mut self.duplicate_set;
|
||||
duplicate_set.clear();
|
||||
self.successors_stack
|
||||
.drain(successors_len..)
|
||||
.filter(move |&i| duplicate_set.insert(i))
|
||||
};
|
||||
let scc_index = self.scc_data.create_scc(deduplicated_successors);
|
||||
self.node_states[node] = NodeState::InCycle { scc_index };
|
||||
WalkReturn::Complete { scc_index }
|
||||
} else {
|
||||
// We are not the head of the cycle. Return back to our
|
||||
// caller. They will take ownership of the
|
||||
// `self.successors` data that we pushed.
|
||||
self.node_states[node] = NodeState::InCycleWith {
|
||||
parent: min_cycle_root,
|
||||
};
|
||||
WalkReturn::Cycle { min_depth }
|
||||
}
|
||||
}
|
||||
}
|
180
src/librustc_data_structures/graph/scc/test.rs
Normal file
180
src/librustc_data_structures/graph/scc/test.rs
Normal file
@ -0,0 +1,180 @@
|
||||
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
#![cfg(test)]
|
||||
|
||||
use graph::test::TestGraph;
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn diamond() {
|
||||
let graph = TestGraph::new(0, &[(0, 1), (0, 2), (1, 3), (2, 3)]);
|
||||
let sccs: Sccs<_, usize> = Sccs::new(&graph);
|
||||
assert_eq!(sccs.num_sccs(), 4);
|
||||
assert_eq!(sccs.num_sccs(), 4);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_big_scc() {
|
||||
// The order in which things will be visited is important to this
|
||||
// test.
|
||||
//
|
||||
// We will visit:
|
||||
//
|
||||
// 0 -> 1 -> 2 -> 0
|
||||
//
|
||||
// and at this point detect a cycle. 2 will return back to 1 which
|
||||
// will visit 3. 3 will visit 2 before the cycle is complete, and
|
||||
// hence it too will return a cycle.
|
||||
|
||||
/*
|
||||
+-> 0
|
||||
| |
|
||||
| v
|
||||
| 1 -> 3
|
||||
| | |
|
||||
| v |
|
||||
+-- 2 <--+
|
||||
*/
|
||||
let graph = TestGraph::new(0, &[
|
||||
(0, 1),
|
||||
(1, 2),
|
||||
(1, 3),
|
||||
(2, 0),
|
||||
(3, 2),
|
||||
]);
|
||||
let sccs: Sccs<_, usize> = Sccs::new(&graph);
|
||||
assert_eq!(sccs.num_sccs(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_three_sccs() {
|
||||
/*
|
||||
0
|
||||
|
|
||||
v
|
||||
+-> 1 3
|
||||
| | |
|
||||
| v |
|
||||
+-- 2 <--+
|
||||
*/
|
||||
let graph = TestGraph::new(0, &[
|
||||
(0, 1),
|
||||
(1, 2),
|
||||
(2, 1),
|
||||
(3, 2),
|
||||
]);
|
||||
let sccs: Sccs<_, usize> = Sccs::new(&graph);
|
||||
assert_eq!(sccs.num_sccs(), 3);
|
||||
assert_eq!(sccs.scc(0), 1);
|
||||
assert_eq!(sccs.scc(1), 0);
|
||||
assert_eq!(sccs.scc(2), 0);
|
||||
assert_eq!(sccs.scc(3), 2);
|
||||
assert_eq!(sccs.successors(0), &[]);
|
||||
assert_eq!(sccs.successors(1), &[0]);
|
||||
assert_eq!(sccs.successors(2), &[0]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_state_2() {
|
||||
// The order in which things will be visited is important to this
|
||||
// test. It tests part of the `find_state` behavior. Here is the
|
||||
// graph:
|
||||
//
|
||||
//
|
||||
// /----+
|
||||
// 0 <--+ |
|
||||
// | | |
|
||||
// v | |
|
||||
// +-> 1 -> 3 4
|
||||
// | | |
|
||||
// | v |
|
||||
// +-- 2 <----+
|
||||
|
||||
let graph = TestGraph::new(0, &[
|
||||
(0, 1),
|
||||
(0, 4),
|
||||
(1, 2),
|
||||
(1, 3),
|
||||
(2, 1),
|
||||
(3, 0),
|
||||
(4, 2),
|
||||
]);
|
||||
|
||||
// For this graph, we will start in our DFS by visiting:
|
||||
//
|
||||
// 0 -> 1 -> 2 -> 1
|
||||
//
|
||||
// and at this point detect a cycle. The state of 2 will thus be
|
||||
// `InCycleWith { 1 }`. We will then visit the 1 -> 3 edge, which
|
||||
// will attempt to visit 0 as well, thus going to the state
|
||||
// `InCycleWith { 0 }`. Finally, node 1 will complete; the lowest
|
||||
// depth of any successor was 3 which had depth 0, and thus it
|
||||
// will be in the state `InCycleWith { 3 }`.
|
||||
//
|
||||
// When we finally traverse the `0 -> 4` edge and then visit node 2,
|
||||
// the states of the nodes are:
|
||||
//
|
||||
// 0 BeingVisited { 0 }
|
||||
// 1 InCycleWith { 3 }
|
||||
// 2 InCycleWith { 1 }
|
||||
// 3 InCycleWith { 0 }
|
||||
//
|
||||
// and hence 4 will traverse the links, finding an ultimate depth of 0.
|
||||
// If will also collapse the states to the following:
|
||||
//
|
||||
// 0 BeingVisited { 0 }
|
||||
// 1 InCycleWith { 3 }
|
||||
// 2 InCycleWith { 1 }
|
||||
// 3 InCycleWith { 0 }
|
||||
|
||||
let sccs: Sccs<_, usize> = Sccs::new(&graph);
|
||||
assert_eq!(sccs.num_sccs(), 1);
|
||||
assert_eq!(sccs.scc(0), 0);
|
||||
assert_eq!(sccs.scc(1), 0);
|
||||
assert_eq!(sccs.scc(2), 0);
|
||||
assert_eq!(sccs.scc(3), 0);
|
||||
assert_eq!(sccs.scc(4), 0);
|
||||
assert_eq!(sccs.successors(0), &[]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_find_state_3() {
|
||||
/*
|
||||
/----+
|
||||
0 <--+ |
|
||||
| | |
|
||||
v | |
|
||||
+-> 1 -> 3 4 5
|
||||
| | | |
|
||||
| v | |
|
||||
+-- 2 <----+-+
|
||||
*/
|
||||
let graph = TestGraph::new(0, &[
|
||||
(0, 1),
|
||||
(0, 4),
|
||||
(1, 2),
|
||||
(1, 3),
|
||||
(2, 1),
|
||||
(3, 0),
|
||||
(4, 2),
|
||||
(5, 2),
|
||||
]);
|
||||
let sccs: Sccs<_, usize> = Sccs::new(&graph);
|
||||
assert_eq!(sccs.num_sccs(), 2);
|
||||
assert_eq!(sccs.scc(0), 0);
|
||||
assert_eq!(sccs.scc(1), 0);
|
||||
assert_eq!(sccs.scc(2), 0);
|
||||
assert_eq!(sccs.scc(3), 0);
|
||||
assert_eq!(sccs.scc(4), 0);
|
||||
assert_eq!(sccs.scc(5), 1);
|
||||
assert_eq!(sccs.successors(0), &[]);
|
||||
assert_eq!(sccs.successors(1), &[0]);
|
||||
}
|
@ -13,7 +13,7 @@ use std::cmp::max;
|
||||
use std::slice;
|
||||
use std::iter;
|
||||
|
||||
use super::{ControlFlowGraph, GraphPredecessors, GraphSuccessors};
|
||||
use super::*;
|
||||
|
||||
pub struct TestGraph {
|
||||
num_nodes: usize,
|
||||
@ -44,23 +44,31 @@ impl TestGraph {
|
||||
}
|
||||
}
|
||||
|
||||
impl ControlFlowGraph for TestGraph {
|
||||
impl DirectedGraph for TestGraph {
|
||||
type Node = usize;
|
||||
}
|
||||
|
||||
impl WithStartNode for TestGraph {
|
||||
fn start_node(&self) -> usize {
|
||||
self.start_node
|
||||
}
|
||||
}
|
||||
|
||||
impl WithNumNodes for TestGraph {
|
||||
fn num_nodes(&self) -> usize {
|
||||
self.num_nodes
|
||||
}
|
||||
}
|
||||
|
||||
impl WithPredecessors for TestGraph {
|
||||
fn predecessors<'graph>(&'graph self,
|
||||
node: usize)
|
||||
-> <Self as GraphPredecessors<'graph>>::Iter {
|
||||
self.predecessors[&node].iter().cloned()
|
||||
}
|
||||
}
|
||||
|
||||
impl WithSuccessors for TestGraph {
|
||||
fn successors<'graph>(&'graph self, node: usize) -> <Self as GraphSuccessors<'graph>>::Iter {
|
||||
self.successors[&node].iter().cloned()
|
||||
}
|
@ -14,6 +14,7 @@ use std::slice;
|
||||
use std::marker::PhantomData;
|
||||
use std::ops::{Index, IndexMut, Range, RangeBounds};
|
||||
use std::fmt;
|
||||
use std::hash::Hash;
|
||||
use std::vec;
|
||||
use std::u32;
|
||||
|
||||
@ -22,7 +23,7 @@ use rustc_serialize as serialize;
|
||||
/// Represents some newtyped `usize` wrapper.
|
||||
///
|
||||
/// (purpose: avoid mixing indexes for different bitvector domains.)
|
||||
pub trait Idx: Copy + 'static + Eq + Debug {
|
||||
pub trait Idx: Copy + 'static + Ord + Debug + Hash {
|
||||
fn new(idx: usize) -> Self;
|
||||
fn index(self) -> usize;
|
||||
}
|
||||
|
@ -61,7 +61,6 @@ pub mod small_vec;
|
||||
pub mod base_n;
|
||||
pub mod bitslice;
|
||||
pub mod bitvec;
|
||||
pub mod graph;
|
||||
pub mod indexed_set;
|
||||
pub mod indexed_vec;
|
||||
pub mod obligation_forest;
|
||||
@ -73,7 +72,7 @@ pub mod transitive_relation;
|
||||
pub use ena::unify;
|
||||
pub mod fx;
|
||||
pub mod tuple_slice;
|
||||
pub mod control_flow_graph;
|
||||
pub mod graph;
|
||||
pub mod flock;
|
||||
pub mod sync;
|
||||
pub mod owning_ref;
|
||||
|
@ -49,7 +49,9 @@ use rustc::dep_graph::debug::{DepNodeFilter, EdgeFilter};
|
||||
use rustc::hir::def_id::DefId;
|
||||
use rustc::ty::TyCtxt;
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_data_structures::graph::{Direction, INCOMING, OUTGOING, NodeIndex};
|
||||
use rustc_data_structures::graph::implementation::{
|
||||
Direction, INCOMING, OUTGOING, NodeIndex
|
||||
};
|
||||
use rustc::hir;
|
||||
use rustc::hir::intravisit::{self, NestedVisitorMap, Visitor};
|
||||
use rustc::ich::{ATTR_IF_THIS_CHANGED, ATTR_THEN_THIS_WOULD_NEED};
|
||||
|
@ -23,7 +23,7 @@ use rustc::mir::{Terminator, TerminatorKind};
|
||||
use rustc::ty::query::Providers;
|
||||
use rustc::ty::{self, ParamEnv, TyCtxt};
|
||||
|
||||
use rustc_data_structures::control_flow_graph::dominators::Dominators;
|
||||
use rustc_data_structures::graph::dominators::Dominators;
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_data_structures::indexed_set::IdxSetBuf;
|
||||
use rustc_data_structures::indexed_vec::Idx;
|
||||
|
@ -13,14 +13,11 @@ use borrow_check::location::LocationTable;
|
||||
use borrow_check::nll::ToRegionVid;
|
||||
use borrow_check::nll::facts::AllFacts;
|
||||
use borrow_check::nll::region_infer::RegionInferenceContext;
|
||||
use borrow_check::nll::type_check::AtLocation;
|
||||
use rustc::hir;
|
||||
use rustc::infer::InferCtxt;
|
||||
use rustc::mir::visit::TyContext;
|
||||
use rustc::mir::visit::Visitor;
|
||||
use rustc::mir::Place::Projection;
|
||||
use rustc::mir::{BasicBlock, BasicBlockData, Location, Mir, Place, Rvalue};
|
||||
use rustc::mir::{Local, PlaceProjection, ProjectionElem, Statement, Terminator};
|
||||
use rustc::mir::{Local, Statement, Terminator};
|
||||
use rustc::ty::fold::TypeFoldable;
|
||||
use rustc::ty::subst::Substs;
|
||||
use rustc::ty::{self, CanonicalTy, ClosureSubsts, GeneratorSubsts};
|
||||
@ -41,7 +38,6 @@ pub(super) fn generate_constraints<'cx, 'gcx, 'tcx>(
|
||||
regioncx,
|
||||
location_table,
|
||||
all_facts,
|
||||
mir,
|
||||
};
|
||||
|
||||
cg.add_region_liveness_constraints_from_type_check(liveness_set_from_typeck);
|
||||
@ -57,7 +53,6 @@ struct ConstraintGeneration<'cg, 'cx: 'cg, 'gcx: 'tcx, 'tcx: 'cx> {
|
||||
all_facts: &'cg mut Option<AllFacts>,
|
||||
location_table: &'cg LocationTable,
|
||||
regioncx: &'cg mut RegionInferenceContext<'tcx>,
|
||||
mir: &'cg Mir<'tcx>,
|
||||
borrow_set: &'cg BorrowSet<'tcx>,
|
||||
}
|
||||
|
||||
@ -184,41 +179,6 @@ impl<'cg, 'cx, 'gcx, 'tcx> Visitor<'tcx> for ConstraintGeneration<'cg, 'cx, 'gcx
|
||||
self.super_terminator(block, terminator, location);
|
||||
}
|
||||
|
||||
fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
|
||||
debug!("visit_rvalue(rvalue={:?}, location={:?})", rvalue, location);
|
||||
|
||||
match rvalue {
|
||||
Rvalue::Ref(region, _borrow_kind, borrowed_place) => {
|
||||
// In some cases, e.g. when borrowing from an unsafe
|
||||
// place, we don't bother to create a loan, since
|
||||
// there are no conditions to validate.
|
||||
if let Some(all_facts) = self.all_facts {
|
||||
if let Some(borrow_index) = self.borrow_set.location_map.get(&location) {
|
||||
let region_vid = region.to_region_vid();
|
||||
all_facts.borrow_region.push((
|
||||
region_vid,
|
||||
*borrow_index,
|
||||
self.location_table.mid_index(location),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Look for an rvalue like:
|
||||
//
|
||||
// & L
|
||||
//
|
||||
// where L is the path that is borrowed. In that case, we have
|
||||
// to add the reborrow constraints (which don't fall out
|
||||
// naturally from the type-checker).
|
||||
self.add_reborrow_constraint(location, region, borrowed_place);
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
|
||||
self.super_rvalue(rvalue, location);
|
||||
}
|
||||
|
||||
fn visit_user_assert_ty(
|
||||
&mut self,
|
||||
_c_ty: &CanonicalTy<'tcx>,
|
||||
@ -250,7 +210,7 @@ impl<'cx, 'cg, 'gcx, 'tcx> ConstraintGeneration<'cx, 'cg, 'gcx, 'tcx> {
|
||||
for (region, location) in liveness_set {
|
||||
debug!("generate: {:#?} is live at {:#?}", region, location);
|
||||
let region_vid = regioncx.to_region_vid(region);
|
||||
regioncx.add_live_point(region_vid, *location);
|
||||
regioncx.add_live_element(region_vid, *location);
|
||||
}
|
||||
|
||||
if let Some(all_facts) = all_facts {
|
||||
@ -282,103 +242,7 @@ impl<'cx, 'cg, 'gcx, 'tcx> ConstraintGeneration<'cx, 'cg, 'gcx, 'tcx> {
|
||||
.tcx
|
||||
.for_each_free_region(&live_ty, |live_region| {
|
||||
let vid = live_region.to_region_vid();
|
||||
self.regioncx.add_live_point(vid, location);
|
||||
self.regioncx.add_live_element(vid, location);
|
||||
});
|
||||
}
|
||||
|
||||
// Add the reborrow constraint at `location` so that `borrowed_place`
|
||||
// is valid for `borrow_region`.
|
||||
fn add_reborrow_constraint(
|
||||
&mut self,
|
||||
location: Location,
|
||||
borrow_region: ty::Region<'tcx>,
|
||||
borrowed_place: &Place<'tcx>,
|
||||
) {
|
||||
let mut borrowed_place = borrowed_place;
|
||||
|
||||
debug!(
|
||||
"add_reborrow_constraint({:?}, {:?}, {:?})",
|
||||
location, borrow_region, borrowed_place
|
||||
);
|
||||
while let Projection(box PlaceProjection { base, elem }) = borrowed_place {
|
||||
debug!("add_reborrow_constraint - iteration {:?}", borrowed_place);
|
||||
|
||||
match *elem {
|
||||
ProjectionElem::Deref => {
|
||||
let tcx = self.infcx.tcx;
|
||||
let base_ty = base.ty(self.mir, tcx).to_ty(tcx);
|
||||
|
||||
debug!("add_reborrow_constraint - base_ty = {:?}", base_ty);
|
||||
match base_ty.sty {
|
||||
ty::TyRef(ref_region, _, mutbl) => {
|
||||
self.regioncx.add_outlives(
|
||||
location.boring(),
|
||||
ref_region.to_region_vid(),
|
||||
borrow_region.to_region_vid(),
|
||||
);
|
||||
|
||||
if let Some(all_facts) = self.all_facts {
|
||||
all_facts.outlives.push((
|
||||
ref_region.to_region_vid(),
|
||||
borrow_region.to_region_vid(),
|
||||
self.location_table.mid_index(location),
|
||||
));
|
||||
}
|
||||
|
||||
match mutbl {
|
||||
hir::Mutability::MutImmutable => {
|
||||
// Immutable reference. We don't need the base
|
||||
// to be valid for the entire lifetime of
|
||||
// the borrow.
|
||||
break;
|
||||
}
|
||||
hir::Mutability::MutMutable => {
|
||||
// Mutable reference. We *do* need the base
|
||||
// to be valid, because after the base becomes
|
||||
// invalid, someone else can use our mutable deref.
|
||||
|
||||
// This is in order to make the following function
|
||||
// illegal:
|
||||
// ```
|
||||
// fn unsafe_deref<'a, 'b>(x: &'a &'b mut T) -> &'b mut T {
|
||||
// &mut *x
|
||||
// }
|
||||
// ```
|
||||
//
|
||||
// As otherwise you could clone `&mut T` using the
|
||||
// following function:
|
||||
// ```
|
||||
// fn bad(x: &mut T) -> (&mut T, &mut T) {
|
||||
// let my_clone = unsafe_deref(&'a x);
|
||||
// ENDREGION 'a;
|
||||
// (my_clone, x)
|
||||
// }
|
||||
// ```
|
||||
}
|
||||
}
|
||||
}
|
||||
ty::TyRawPtr(..) => {
|
||||
// deref of raw pointer, guaranteed to be valid
|
||||
break;
|
||||
}
|
||||
ty::TyAdt(def, _) if def.is_box() => {
|
||||
// deref of `Box`, need the base to be valid - propagate
|
||||
}
|
||||
_ => bug!("unexpected deref ty {:?} in {:?}", base_ty, borrowed_place),
|
||||
}
|
||||
}
|
||||
ProjectionElem::Field(..)
|
||||
| ProjectionElem::Downcast(..)
|
||||
| ProjectionElem::Index(..)
|
||||
| ProjectionElem::ConstantIndex { .. }
|
||||
| ProjectionElem::Subslice { .. } => {
|
||||
// other field access
|
||||
}
|
||||
}
|
||||
|
||||
// The "propagate" case. We need to check that our base is valid
|
||||
// for the borrow's lifetime.
|
||||
borrowed_place = base;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
134
src/librustc_mir/borrow_check/nll/constraints/graph.rs
Normal file
134
src/librustc_mir/borrow_check/nll/constraints/graph.rs
Normal file
@ -0,0 +1,134 @@
|
||||
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use borrow_check::nll::constraints::{ConstraintIndex, ConstraintSet};
|
||||
use rustc::ty::RegionVid;
|
||||
use rustc_data_structures::graph;
|
||||
use rustc_data_structures::indexed_vec::IndexVec;
|
||||
|
||||
crate struct ConstraintGraph {
|
||||
first_constraints: IndexVec<RegionVid, Option<ConstraintIndex>>,
|
||||
next_constraints: IndexVec<ConstraintIndex, Option<ConstraintIndex>>,
|
||||
}
|
||||
|
||||
impl ConstraintGraph {
|
||||
/// Create a "dependency graph" where each region constraint `R1:
|
||||
/// R2` is treated as an edge `R1 -> R2`. We use this graph to
|
||||
/// construct SCCs for region inference but also for error
|
||||
/// reporting.
|
||||
crate fn new(set: &ConstraintSet, num_region_vars: usize) -> Self {
|
||||
let mut first_constraints = IndexVec::from_elem_n(None, num_region_vars);
|
||||
let mut next_constraints = IndexVec::from_elem(None, &set.constraints);
|
||||
|
||||
for (idx, constraint) in set.constraints.iter_enumerated().rev() {
|
||||
let mut head = &mut first_constraints[constraint.sup];
|
||||
let mut next = &mut next_constraints[idx];
|
||||
debug_assert!(next.is_none());
|
||||
*next = *head;
|
||||
*head = Some(idx);
|
||||
}
|
||||
|
||||
Self {
|
||||
first_constraints,
|
||||
next_constraints,
|
||||
}
|
||||
}
|
||||
|
||||
/// Given a region `R`, iterate over all constraints `R: R1`.
|
||||
crate fn outgoing_edges(&self, region_sup: RegionVid) -> Edges<'_> {
|
||||
let first = self.first_constraints[region_sup];
|
||||
Edges {
|
||||
graph: self,
|
||||
pointer: first,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
crate struct Edges<'s> {
|
||||
graph: &'s ConstraintGraph,
|
||||
pointer: Option<ConstraintIndex>,
|
||||
}
|
||||
|
||||
impl<'s> Iterator for Edges<'s> {
|
||||
type Item = ConstraintIndex;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if let Some(p) = self.pointer {
|
||||
self.pointer = self.graph.next_constraints[p];
|
||||
Some(p)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
crate struct RegionGraph<'s> {
|
||||
set: &'s ConstraintSet,
|
||||
constraint_graph: &'s ConstraintGraph,
|
||||
}
|
||||
|
||||
impl<'s> RegionGraph<'s> {
|
||||
/// Create a "dependency graph" where each region constraint `R1:
|
||||
/// R2` is treated as an edge `R1 -> R2`. We use this graph to
|
||||
/// construct SCCs for region inference but also for error
|
||||
/// reporting.
|
||||
crate fn new(set: &'s ConstraintSet, constraint_graph: &'s ConstraintGraph) -> Self {
|
||||
Self {
|
||||
set,
|
||||
constraint_graph,
|
||||
}
|
||||
}
|
||||
|
||||
/// Given a region `R`, iterate over all regions `R1` such that
|
||||
/// there exists a constraint `R: R1`.
|
||||
crate fn sub_regions(&self, region_sup: RegionVid) -> Successors<'_> {
|
||||
Successors {
|
||||
set: self.set,
|
||||
edges: self.constraint_graph.outgoing_edges(region_sup),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
crate struct Successors<'s> {
|
||||
set: &'s ConstraintSet,
|
||||
edges: Edges<'s>,
|
||||
}
|
||||
|
||||
impl<'s> Iterator for Successors<'s> {
|
||||
type Item = RegionVid;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.edges.next().map(|c| self.set[c].sub)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'s> graph::DirectedGraph for RegionGraph<'s> {
|
||||
type Node = RegionVid;
|
||||
}
|
||||
|
||||
impl<'s> graph::WithNumNodes for RegionGraph<'s> {
|
||||
fn num_nodes(&self) -> usize {
|
||||
self.constraint_graph.first_constraints.len()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'s> graph::WithSuccessors for RegionGraph<'s> {
|
||||
fn successors<'graph>(
|
||||
&'graph self,
|
||||
node: Self::Node,
|
||||
) -> <Self as graph::GraphSuccessors<'graph>>::Iter {
|
||||
self.sub_regions(node)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'s, 'graph> graph::GraphSuccessors<'graph> for RegionGraph<'s> {
|
||||
type Item = RegionVid;
|
||||
type Iter = Successors<'graph>;
|
||||
}
|
@ -9,21 +9,24 @@
|
||||
// except according to those terms.
|
||||
|
||||
use rustc::ty::RegionVid;
|
||||
use rustc_data_structures::graph::scc::Sccs;
|
||||
use rustc_data_structures::indexed_vec::{Idx, IndexVec};
|
||||
use borrow_check::nll::type_check::Locations;
|
||||
|
||||
use std::fmt;
|
||||
use std::ops::Deref;
|
||||
|
||||
crate mod graph;
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
crate struct ConstraintSet {
|
||||
constraints: IndexVec<ConstraintIndex, OutlivesConstraint>,
|
||||
}
|
||||
|
||||
impl ConstraintSet {
|
||||
pub fn push(&mut self, constraint: OutlivesConstraint) {
|
||||
crate fn push(&mut self, constraint: OutlivesConstraint) {
|
||||
debug!(
|
||||
"add_outlives({:?}: {:?} @ {:?})",
|
||||
"ConstraintSet::push({:?}: {:?} @ {:?}",
|
||||
constraint.sup, constraint.sub, constraint.locations
|
||||
);
|
||||
if constraint.sup == constraint.sub {
|
||||
@ -33,44 +36,32 @@ impl ConstraintSet {
|
||||
self.constraints.push(constraint);
|
||||
}
|
||||
|
||||
/// Once all constraints have been added, `link()` is used to thread together the constraints
|
||||
/// based on which would be affected when a particular region changes. See the next field of
|
||||
/// `OutlivesContraint` for more details.
|
||||
/// link returns a map that is needed later by `each_affected_by_dirty`.
|
||||
pub fn link(&mut self, len: usize) -> IndexVec<RegionVid, Option<ConstraintIndex>> {
|
||||
let mut map = IndexVec::from_elem_n(None, len);
|
||||
|
||||
for (idx, constraint) in self.constraints.iter_enumerated_mut().rev() {
|
||||
let mut head = &mut map[constraint.sub];
|
||||
debug_assert!(constraint.next.is_none());
|
||||
constraint.next = *head;
|
||||
*head = Some(idx);
|
||||
}
|
||||
|
||||
map
|
||||
/// Constructs a graph from the constraint set; the graph makes it
|
||||
/// easy to find the constraints affecting a particular region
|
||||
/// (you should not mutate the set once this graph is
|
||||
/// constructed).
|
||||
crate fn graph(&self, num_region_vars: usize) -> graph::ConstraintGraph {
|
||||
graph::ConstraintGraph::new(self, num_region_vars)
|
||||
}
|
||||
|
||||
/// When a region R1 changes, we need to reprocess all constraints R2: R1 to take into account
|
||||
/// any new elements that R1 now has. This method will quickly enumerate all such constraints
|
||||
/// (that is, constraints where R1 is in the "subregion" position).
|
||||
/// To use it, invoke with `map[R1]` where map is the map returned by `link`;
|
||||
/// the callback op will be invoked for each affected constraint.
|
||||
pub fn each_affected_by_dirty(
|
||||
/// Compute cycles (SCCs) in the graph of regions. In particular,
|
||||
/// find all regions R1, R2 such that R1: R2 and R2: R1 and group
|
||||
/// them into an SCC, and find the relationships between SCCs.
|
||||
crate fn compute_sccs(
|
||||
&self,
|
||||
mut opt_dep_idx: Option<ConstraintIndex>,
|
||||
mut op: impl FnMut(ConstraintIndex),
|
||||
) {
|
||||
while let Some(dep_idx) = opt_dep_idx {
|
||||
op(dep_idx);
|
||||
opt_dep_idx = self.constraints[dep_idx].next;
|
||||
}
|
||||
constraint_graph: &graph::ConstraintGraph,
|
||||
) -> Sccs<RegionVid, ConstraintSccIndex> {
|
||||
let region_graph = &graph::RegionGraph::new(self, constraint_graph);
|
||||
Sccs::new(region_graph)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for ConstraintSet {
|
||||
type Target = IndexVec<ConstraintIndex, OutlivesConstraint>;
|
||||
|
||||
fn deref(&self) -> &Self::Target { &self.constraints }
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.constraints
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
@ -85,16 +76,6 @@ pub struct OutlivesConstraint {
|
||||
/// Region that must be outlived.
|
||||
pub sub: RegionVid,
|
||||
|
||||
/// Later on, we thread the constraints onto a linked list
|
||||
/// grouped by their `sub` field. So if you had:
|
||||
///
|
||||
/// Index | Constraint | Next Field
|
||||
/// ----- | ---------- | ----------
|
||||
/// 0 | `'a: 'b` | Some(2)
|
||||
/// 1 | `'b: 'c` | None
|
||||
/// 2 | `'c: 'b` | None
|
||||
pub next: Option<ConstraintIndex>,
|
||||
|
||||
/// Where did this constraint arise?
|
||||
pub locations: Locations,
|
||||
}
|
||||
@ -110,3 +91,5 @@ impl fmt::Debug for OutlivesConstraint {
|
||||
}
|
||||
|
||||
newtype_index!(ConstraintIndex { DEBUG_FORMAT = "ConstraintIndex({})" });
|
||||
|
||||
newtype_index!(ConstraintSccIndex { DEBUG_FORMAT = "ConstraintSccIndex({})" });
|
@ -57,7 +57,7 @@ impl<'cx, 'gcx, 'tcx> UseFinder<'cx, 'gcx, 'tcx> {
|
||||
|
||||
queue.push_back(self.start_point);
|
||||
while let Some(p) = queue.pop_front() {
|
||||
if !self.regioncx.region_contains_point(self.region_vid, p) {
|
||||
if !self.regioncx.region_contains(self.region_vid, p) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -29,7 +29,7 @@ use rustc::mir::{Terminator, TerminatorKind};
|
||||
use rustc::mir::{Field, Operand, BorrowKind};
|
||||
use rustc::ty::{self, ParamEnv};
|
||||
use rustc_data_structures::indexed_vec::Idx;
|
||||
use rustc_data_structures::control_flow_graph::dominators::Dominators;
|
||||
use rustc_data_structures::graph::dominators::Dominators;
|
||||
|
||||
pub(super) fn generate_invalidates<'cx, 'gcx, 'tcx>(
|
||||
infcx: &InferCtxt<'cx, 'gcx, 'tcx>,
|
||||
|
@ -45,7 +45,7 @@ mod renumber;
|
||||
crate mod type_check;
|
||||
mod universal_regions;
|
||||
|
||||
crate mod constraint_set;
|
||||
mod constraints;
|
||||
|
||||
use self::facts::AllFacts;
|
||||
use self::region_infer::RegionInferenceContext;
|
||||
@ -108,6 +108,7 @@ pub(in borrow_check) fn compute_regions<'cx, 'gcx, 'tcx>(
|
||||
def_id,
|
||||
&universal_regions,
|
||||
location_table,
|
||||
borrow_set,
|
||||
&liveness,
|
||||
&mut all_facts,
|
||||
flow_inits,
|
||||
@ -294,8 +295,15 @@ fn dump_mir_results<'a, 'gcx, 'tcx>(
|
||||
// Also dump the inference graph constraints as a graphviz file.
|
||||
let _: io::Result<()> = do catch {
|
||||
let mut file =
|
||||
pretty::create_dump_file(infcx.tcx, "regioncx.dot", None, "nll", &0, source)?;
|
||||
regioncx.dump_graphviz(&mut file)?;
|
||||
pretty::create_dump_file(infcx.tcx, "regioncx.all.dot", None, "nll", &0, source)?;
|
||||
regioncx.dump_graphviz_raw_constraints(&mut file)?;
|
||||
};
|
||||
|
||||
// Also dump the inference graph constraints as a graphviz file.
|
||||
let _: io::Result<()> = do catch {
|
||||
let mut file =
|
||||
pretty::create_dump_file(infcx.tcx, "regioncx.scc.dot", None, "nll", &0, source)?;
|
||||
regioncx.dump_graphviz_scc_constraints(&mut file)?;
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -83,7 +83,6 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
sup,
|
||||
sub,
|
||||
locations,
|
||||
next: _,
|
||||
} = constraint;
|
||||
with_msg(&format!(
|
||||
"{:?}: {:?} due to {:?}",
|
||||
|
@ -50,18 +50,10 @@ impl fmt::Display for ConstraintCategory {
|
||||
|
||||
impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
/// Walks the graph of constraints (where `'a: 'b` is considered
|
||||
/// an edge `'b -> 'a`) to find all paths from `from_region` to
|
||||
/// an edge `'a -> 'b`) to find all paths from `from_region` to
|
||||
/// `to_region`. The paths are accumulated into the vector
|
||||
/// `results`. The paths are stored as a series of
|
||||
/// `ConstraintIndex` values -- in other words, a list of *edges*.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// - `from_region`
|
||||
/// When reporting an error, it is useful to be able to determine
|
||||
/// which constraints influenced the region being reported as an
|
||||
/// error. This function finds all of the paths from the
|
||||
/// constraint.
|
||||
fn find_constraint_paths_between_regions(
|
||||
&self,
|
||||
from_region: RegionVid,
|
||||
@ -89,8 +81,6 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
stack: &mut Vec<ConstraintIndex>,
|
||||
results: &mut Vec<Vec<ConstraintIndex>>,
|
||||
) {
|
||||
let dependency_map = self.dependency_map.as_ref().unwrap();
|
||||
|
||||
// Check if we already visited this region.
|
||||
if !visited.insert(current_region) {
|
||||
return;
|
||||
@ -99,26 +89,25 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
// Check if we reached the region we were looking for.
|
||||
if target_test(current_region) {
|
||||
if !stack.is_empty() {
|
||||
assert_eq!(self.constraints[stack[0]].sub, from_region);
|
||||
assert_eq!(self.constraints[stack[0]].sup, from_region);
|
||||
results.push(stack.clone());
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
self.constraints
|
||||
.each_affected_by_dirty(dependency_map[current_region], |constraint| {
|
||||
assert_eq!(self.constraints[constraint].sub, current_region);
|
||||
stack.push(constraint);
|
||||
self.find_constraint_paths_between_regions_helper(
|
||||
from_region,
|
||||
self.constraints[constraint].sup,
|
||||
target_test,
|
||||
visited,
|
||||
stack,
|
||||
results,
|
||||
);
|
||||
stack.pop();
|
||||
});
|
||||
for constraint in self.constraint_graph.outgoing_edges(current_region) {
|
||||
assert_eq!(self.constraints[constraint].sup, current_region);
|
||||
stack.push(constraint);
|
||||
self.find_constraint_paths_between_regions_helper(
|
||||
from_region,
|
||||
self.constraints[constraint].sub,
|
||||
target_test,
|
||||
visited,
|
||||
stack,
|
||||
results,
|
||||
);
|
||||
stack.pop();
|
||||
}
|
||||
}
|
||||
|
||||
/// This function will return true if a constraint is interesting and false if a constraint
|
||||
@ -210,7 +199,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
}
|
||||
|
||||
// Find all paths
|
||||
let constraint_paths = self.find_constraint_paths_between_regions(outlived_fr, |r| r == fr);
|
||||
let constraint_paths = self.find_constraint_paths_between_regions(fr, |r| r == outlived_fr);
|
||||
debug!("report_error: constraint_paths={:#?}", constraint_paths);
|
||||
|
||||
// Find the shortest such path.
|
||||
@ -319,7 +308,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
|
||||
while changed {
|
||||
changed = false;
|
||||
for constraint in &*self.constraints {
|
||||
for constraint in self.constraints.iter() {
|
||||
if let Some(n) = result_set[constraint.sup] {
|
||||
let m = n + 1;
|
||||
if result_set[constraint.sub]
|
||||
|
@ -12,22 +12,40 @@
|
||||
//! libgraphviz traits, specialized to attaching borrowck analysis
|
||||
//! data to rendered labels.
|
||||
|
||||
use super::*;
|
||||
use borrow_check::nll::constraints::OutlivesConstraint;
|
||||
use dot::{self, IntoCow};
|
||||
use rustc_data_structures::indexed_vec::Idx;
|
||||
use std::borrow::Cow;
|
||||
use std::io::{self, Write};
|
||||
use super::*;
|
||||
use borrow_check::nll::constraint_set::OutlivesConstraint;
|
||||
|
||||
|
||||
impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
/// Write out the region constraint graph.
|
||||
pub(crate) fn dump_graphviz(&self, mut w: &mut dyn Write) -> io::Result<()> {
|
||||
dot::render(self, &mut w)
|
||||
crate fn dump_graphviz_raw_constraints(&self, mut w: &mut dyn Write) -> io::Result<()> {
|
||||
dot::render(&RawConstraints { regioncx: self }, &mut w)
|
||||
}
|
||||
|
||||
/// Write out the region constraint graph.
|
||||
crate fn dump_graphviz_scc_constraints(&self, mut w: &mut dyn Write) -> io::Result<()> {
|
||||
let mut nodes_per_scc: IndexVec<ConstraintSccIndex, _> = self.constraint_sccs
|
||||
.all_sccs()
|
||||
.map(|_| Vec::new())
|
||||
.collect();
|
||||
|
||||
for region in self.definitions.indices() {
|
||||
let scc = self.constraint_sccs.scc(region);
|
||||
nodes_per_scc[scc].push(region);
|
||||
}
|
||||
|
||||
dot::render(&SccConstraints { regioncx: self, nodes_per_scc }, &mut w)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'this, 'tcx> dot::Labeller<'this> for RegionInferenceContext<'tcx> {
|
||||
struct RawConstraints<'a, 'tcx: 'a> {
|
||||
regioncx: &'a RegionInferenceContext<'tcx>,
|
||||
}
|
||||
|
||||
impl<'a, 'this, 'tcx> dot::Labeller<'this> for RawConstraints<'a, 'tcx> {
|
||||
type Node = RegionVid;
|
||||
type Edge = OutlivesConstraint;
|
||||
|
||||
@ -48,26 +66,86 @@ impl<'this, 'tcx> dot::Labeller<'this> for RegionInferenceContext<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'this, 'tcx> dot::GraphWalk<'this> for RegionInferenceContext<'tcx> {
|
||||
impl<'a, 'this, 'tcx> dot::GraphWalk<'this> for RawConstraints<'a, 'tcx> {
|
||||
type Node = RegionVid;
|
||||
type Edge = OutlivesConstraint;
|
||||
|
||||
fn nodes(&'this self) -> dot::Nodes<'this, RegionVid> {
|
||||
let vids: Vec<RegionVid> = self.definitions.indices().collect();
|
||||
let vids: Vec<RegionVid> = self.regioncx.definitions.indices().collect();
|
||||
vids.into_cow()
|
||||
}
|
||||
fn edges(&'this self) -> dot::Edges<'this, OutlivesConstraint> {
|
||||
(&self.constraints.raw[..]).into_cow()
|
||||
(&self.regioncx.constraints.raw[..]).into_cow()
|
||||
}
|
||||
|
||||
// Render `a: b` as `a <- b`, indicating the flow
|
||||
// Render `a: b` as `a -> b`, indicating the flow
|
||||
// of data during inference.
|
||||
|
||||
fn source(&'this self, edge: &OutlivesConstraint) -> RegionVid {
|
||||
edge.sub
|
||||
edge.sup
|
||||
}
|
||||
|
||||
fn target(&'this self, edge: &OutlivesConstraint) -> RegionVid {
|
||||
edge.sup
|
||||
edge.sub
|
||||
}
|
||||
}
|
||||
|
||||
struct SccConstraints<'a, 'tcx: 'a> {
|
||||
regioncx: &'a RegionInferenceContext<'tcx>,
|
||||
nodes_per_scc: IndexVec<ConstraintSccIndex, Vec<RegionVid>>,
|
||||
}
|
||||
|
||||
impl<'a, 'this, 'tcx> dot::Labeller<'this> for SccConstraints<'a, 'tcx> {
|
||||
type Node = ConstraintSccIndex;
|
||||
type Edge = (ConstraintSccIndex, ConstraintSccIndex);
|
||||
|
||||
fn graph_id(&'this self) -> dot::Id<'this> {
|
||||
dot::Id::new(format!("RegionInferenceContext")).unwrap()
|
||||
}
|
||||
fn node_id(&'this self, n: &ConstraintSccIndex) -> dot::Id<'this> {
|
||||
dot::Id::new(format!("r{}", n.index())).unwrap()
|
||||
}
|
||||
fn node_shape(&'this self, _node: &ConstraintSccIndex) -> Option<dot::LabelText<'this>> {
|
||||
Some(dot::LabelText::LabelStr(Cow::Borrowed("box")))
|
||||
}
|
||||
fn node_label(&'this self, n: &ConstraintSccIndex) -> dot::LabelText<'this> {
|
||||
let nodes = &self.nodes_per_scc[*n];
|
||||
dot::LabelText::LabelStr(format!("{:?} = {:?}", n, nodes).into_cow())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'this, 'tcx> dot::GraphWalk<'this> for SccConstraints<'a, 'tcx> {
|
||||
type Node = ConstraintSccIndex;
|
||||
type Edge = (ConstraintSccIndex, ConstraintSccIndex);
|
||||
|
||||
fn nodes(&'this self) -> dot::Nodes<'this, ConstraintSccIndex> {
|
||||
let vids: Vec<ConstraintSccIndex> = self.regioncx.constraint_sccs.all_sccs().collect();
|
||||
vids.into_cow()
|
||||
}
|
||||
fn edges(&'this self) -> dot::Edges<'this, (ConstraintSccIndex, ConstraintSccIndex)> {
|
||||
let edges: Vec<_> = self.regioncx
|
||||
.constraint_sccs
|
||||
.all_sccs()
|
||||
.flat_map(|scc_a| {
|
||||
self.regioncx
|
||||
.constraint_sccs
|
||||
.successors(scc_a)
|
||||
.iter()
|
||||
.map(move |&scc_b| (scc_a, scc_b))
|
||||
})
|
||||
.collect();
|
||||
|
||||
edges.into_cow()
|
||||
}
|
||||
|
||||
// Render `a: b` as `a -> b`, indicating the flow
|
||||
// of data during inference.
|
||||
|
||||
fn source(&'this self, edge: &(ConstraintSccIndex, ConstraintSccIndex)) -> ConstraintSccIndex {
|
||||
edge.0
|
||||
}
|
||||
|
||||
fn target(&'this self, edge: &(ConstraintSccIndex, ConstraintSccIndex)) -> ConstraintSccIndex {
|
||||
edge.1
|
||||
}
|
||||
}
|
||||
|
@ -9,7 +9,11 @@
|
||||
// except according to those terms.
|
||||
|
||||
use super::universal_regions::UniversalRegions;
|
||||
use borrow_check::nll::constraint_set::{ConstraintIndex, ConstraintSet, OutlivesConstraint};
|
||||
use borrow_check::nll::constraints::{
|
||||
ConstraintIndex, ConstraintSccIndex, ConstraintSet, OutlivesConstraint,
|
||||
};
|
||||
use borrow_check::nll::constraints::graph::ConstraintGraph;
|
||||
use borrow_check::nll::region_infer::values::ToElementIndex;
|
||||
use borrow_check::nll::type_check::Locations;
|
||||
use rustc::hir::def_id::DefId;
|
||||
use rustc::infer::canonical::QueryRegionConstraint;
|
||||
@ -23,8 +27,9 @@ use rustc::mir::{
|
||||
};
|
||||
use rustc::ty::{self, RegionVid, Ty, TyCtxt, TypeFoldable};
|
||||
use rustc::util::common;
|
||||
use rustc_data_structures::bitvec::BitVector;
|
||||
use rustc_data_structures::indexed_vec::{Idx, IndexVec};
|
||||
use rustc_data_structures::graph::scc::Sccs;
|
||||
use rustc_data_structures::indexed_set::{IdxSet, IdxSetBuf};
|
||||
use rustc_data_structures::indexed_vec::IndexVec;
|
||||
|
||||
use std::rc::Rc;
|
||||
|
||||
@ -51,28 +56,31 @@ pub struct RegionInferenceContext<'tcx> {
|
||||
/// regions, these start out empty and steadily grow, though for
|
||||
/// each universally quantified region R they start out containing
|
||||
/// the entire CFG and `end(R)`.
|
||||
liveness_constraints: RegionValues,
|
||||
liveness_constraints: RegionValues<RegionVid>,
|
||||
|
||||
/// The final inferred values of the inference variables; `None`
|
||||
/// until `solve` is invoked.
|
||||
inferred_values: Option<RegionValues>,
|
||||
/// The outlives constraints computed by the type-check.
|
||||
constraints: Rc<ConstraintSet>,
|
||||
|
||||
/// For each variable, stores the index of the first constraint
|
||||
/// where that variable appears on the RHS. This is the start of a
|
||||
/// 'linked list' threaded by the `next` field in `Constraint`.
|
||||
///
|
||||
/// This map is build when values are inferred.
|
||||
dependency_map: Option<IndexVec<RegionVid, Option<ConstraintIndex>>>,
|
||||
/// The constraint-set, but in graph form, making it easy to traverse
|
||||
/// the constraints adjacent to a particular region. Used to construct
|
||||
/// the SCC (see `constraint_sccs`) and for error reporting.
|
||||
constraint_graph: Rc<ConstraintGraph>,
|
||||
|
||||
/// The constraints we have accumulated and used during solving.
|
||||
constraints: ConstraintSet,
|
||||
/// The SCC computed from `constraints` and
|
||||
/// `constraint_graph`. Used to compute the values of each region.
|
||||
constraint_sccs: Rc<Sccs<RegionVid, ConstraintSccIndex>>,
|
||||
|
||||
/// The final inferred values of the region variables; we compute
|
||||
/// one value per SCC. To get the value for any given *region*,
|
||||
/// you first find which scc it is a part of.
|
||||
scc_values: RegionValues<ConstraintSccIndex>,
|
||||
|
||||
/// Type constraints that we check after solving.
|
||||
type_tests: Vec<TypeTest<'tcx>>,
|
||||
|
||||
/// Information about the universally quantified regions in scope
|
||||
/// on this function and their (known) relations to one another.
|
||||
universal_regions: UniversalRegions<'tcx>,
|
||||
universal_regions: Rc<UniversalRegions<'tcx>>,
|
||||
}
|
||||
|
||||
struct RegionDefinition<'tcx> {
|
||||
@ -203,27 +211,32 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
outlives_constraints: ConstraintSet,
|
||||
type_tests: Vec<TypeTest<'tcx>>,
|
||||
) -> Self {
|
||||
// The `next` field should not yet have been initialized:
|
||||
debug_assert!(outlives_constraints.iter().all(|c| c.next.is_none()));
|
||||
|
||||
let universal_regions = Rc::new(universal_regions);
|
||||
let num_region_variables = var_infos.len();
|
||||
let num_universal_regions = universal_regions.len();
|
||||
|
||||
let elements = &Rc::new(RegionValueElements::new(mir, num_universal_regions));
|
||||
|
||||
// Create a RegionDefinition for each inference variable.
|
||||
let definitions = var_infos
|
||||
let definitions: IndexVec<_, _> = var_infos
|
||||
.into_iter()
|
||||
.map(|info| RegionDefinition::new(info.origin))
|
||||
.collect();
|
||||
|
||||
let constraints = Rc::new(outlives_constraints); // freeze constraints
|
||||
let constraint_graph = Rc::new(constraints.graph(definitions.len()));
|
||||
let constraint_sccs = Rc::new(constraints.compute_sccs(&constraint_graph));
|
||||
|
||||
let scc_values = RegionValues::new(elements, constraint_sccs.num_sccs());
|
||||
|
||||
let mut result = Self {
|
||||
definitions,
|
||||
elements: elements.clone(),
|
||||
liveness_constraints: RegionValues::new(elements, num_region_variables),
|
||||
inferred_values: None,
|
||||
dependency_map: None,
|
||||
constraints: outlives_constraints,
|
||||
constraints,
|
||||
constraint_sccs,
|
||||
constraint_graph,
|
||||
scc_values,
|
||||
type_tests,
|
||||
universal_regions,
|
||||
};
|
||||
@ -265,7 +278,9 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
}
|
||||
|
||||
// For each universally quantified region X:
|
||||
for variable in self.universal_regions.universal_regions() {
|
||||
let elements = self.elements.clone();
|
||||
let universal_regions = self.universal_regions.clone();
|
||||
for variable in universal_regions.universal_regions() {
|
||||
// These should be free-region variables.
|
||||
assert!(match self.definitions[variable].origin {
|
||||
RegionVariableOrigin::NLL(NLLRegionVariableOrigin::FreeRegion) => true,
|
||||
@ -275,12 +290,12 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
self.definitions[variable].is_universal = true;
|
||||
|
||||
// Add all nodes in the CFG to liveness constraints
|
||||
for point_index in self.elements.all_point_indices() {
|
||||
self.liveness_constraints.add_element(variable, point_index);
|
||||
for point_index in elements.all_point_indices() {
|
||||
self.add_live_element(variable, point_index);
|
||||
}
|
||||
|
||||
// Add `end(X)` into the set for X.
|
||||
self.liveness_constraints.add_element(variable, variable);
|
||||
self.add_live_element(variable, variable);
|
||||
}
|
||||
}
|
||||
|
||||
@ -300,48 +315,38 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
/// Returns true if the region `r` contains the point `p`.
|
||||
///
|
||||
/// Panics if called before `solve()` executes,
|
||||
pub fn region_contains_point<R>(&self, r: R, p: Location) -> bool
|
||||
where
|
||||
R: ToRegionVid,
|
||||
{
|
||||
let inferred_values = self
|
||||
.inferred_values
|
||||
.as_ref()
|
||||
.expect("region values not yet inferred");
|
||||
inferred_values.contains(r.to_region_vid(), p)
|
||||
crate fn region_contains(&self, r: impl ToRegionVid, p: impl ToElementIndex) -> bool {
|
||||
let scc = self.constraint_sccs.scc(r.to_region_vid());
|
||||
self.scc_values.contains(scc, p)
|
||||
}
|
||||
|
||||
/// Returns access to the value of `r` for debugging purposes.
|
||||
crate fn region_value_str(&self, r: RegionVid) -> String {
|
||||
let inferred_values = self
|
||||
.inferred_values
|
||||
.as_ref()
|
||||
.expect("region values not yet inferred");
|
||||
|
||||
inferred_values.region_value_str(r)
|
||||
let scc = self.constraint_sccs.scc(r.to_region_vid());
|
||||
self.scc_values.region_value_str(scc)
|
||||
}
|
||||
|
||||
/// Indicates that the region variable `v` is live at the point `point`.
|
||||
///
|
||||
/// Returns `true` if this constraint is new and `false` is the
|
||||
/// constraint was already present.
|
||||
pub(super) fn add_live_point(&mut self, v: RegionVid, point: Location) -> bool {
|
||||
debug!("add_live_point({:?}, {:?})", v, point);
|
||||
assert!(self.inferred_values.is_none(), "values already inferred");
|
||||
pub(super) fn add_live_element(
|
||||
&mut self,
|
||||
v: RegionVid,
|
||||
elem: impl ToElementIndex,
|
||||
) -> bool {
|
||||
debug!("add_live_element({:?}, {:?})", v, elem);
|
||||
|
||||
let element = self.elements.index(point);
|
||||
self.liveness_constraints.add_element(v, element)
|
||||
}
|
||||
// Add to the liveness values for `v`...
|
||||
if self.liveness_constraints.add_element(v, elem) {
|
||||
// ...but also add to the SCC in which `v` appears.
|
||||
let scc = self.constraint_sccs.scc(v);
|
||||
self.scc_values.add_element(scc, elem);
|
||||
|
||||
/// Indicates that the region variable `sup` must outlive `sub` is live at the point `point`.
|
||||
pub(super) fn add_outlives(&mut self, locations: Locations, sup: RegionVid, sub: RegionVid) {
|
||||
assert!(self.inferred_values.is_none(), "values already inferred");
|
||||
self.constraints.push(OutlivesConstraint {
|
||||
locations,
|
||||
sup,
|
||||
sub,
|
||||
next: None,
|
||||
})
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Perform region inference and report errors if we see any
|
||||
@ -366,8 +371,6 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
mir: &Mir<'tcx>,
|
||||
mir_def_id: DefId,
|
||||
) -> Option<ClosureRegionRequirements<'gcx>> {
|
||||
assert!(self.inferred_values.is_none(), "values already inferred");
|
||||
|
||||
self.propagate_constraints(mir);
|
||||
|
||||
// If this is a closure, we can propagate unsatisfied
|
||||
@ -402,65 +405,62 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
/// for each region variable until all the constraints are
|
||||
/// satisfied. Note that some values may grow **too** large to be
|
||||
/// feasible, but we check this later.
|
||||
fn propagate_constraints(&mut self, mir: &Mir<'tcx>) {
|
||||
self.dependency_map = Some(self.build_dependency_map());
|
||||
let inferred_values = self.compute_region_values(mir);
|
||||
self.inferred_values = Some(inferred_values);
|
||||
}
|
||||
fn propagate_constraints(&mut self, _mir: &Mir<'tcx>) {
|
||||
debug!("propagate_constraints()");
|
||||
|
||||
fn compute_region_values(&self, _mir: &Mir<'tcx>) -> RegionValues {
|
||||
debug!("compute_region_values()");
|
||||
debug!("compute_region_values: constraints={:#?}", {
|
||||
debug!("propagate_constraints: constraints={:#?}", {
|
||||
let mut constraints: Vec<_> = self.constraints.iter().collect();
|
||||
constraints.sort();
|
||||
constraints
|
||||
});
|
||||
|
||||
// The initial values for each region are derived from the liveness
|
||||
// constraints we have accumulated.
|
||||
let mut inferred_values = self.liveness_constraints.clone();
|
||||
|
||||
let dependency_map = self.dependency_map.as_ref().unwrap();
|
||||
|
||||
// Constraints that may need to be repropagated (initially all):
|
||||
let mut dirty_list: Vec<_> = self.constraints.indices().collect();
|
||||
|
||||
// Set to 0 for each constraint that is on the dirty list:
|
||||
let mut clean_bit_vec = BitVector::new(dirty_list.len());
|
||||
|
||||
debug!("propagate_constraints: --------------------");
|
||||
while let Some(constraint_idx) = dirty_list.pop() {
|
||||
clean_bit_vec.insert(constraint_idx.index());
|
||||
|
||||
let constraint = &self.constraints[constraint_idx];
|
||||
debug!("propagate_constraints: constraint={:?}", constraint);
|
||||
|
||||
if inferred_values.add_region(constraint.sup, constraint.sub) {
|
||||
debug!("propagate_constraints: sub={:?}", constraint.sub);
|
||||
debug!("propagate_constraints: sup={:?}", constraint.sup);
|
||||
|
||||
self.constraints.each_affected_by_dirty(
|
||||
dependency_map[constraint.sup],
|
||||
|dep_idx| {
|
||||
if clean_bit_vec.remove(dep_idx.index()) {
|
||||
dirty_list.push(dep_idx);
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
debug!("\n");
|
||||
// To propagate constriants, we walk the DAG induced by the
|
||||
// SCC. For each SCC, we visit its successors and compute
|
||||
// their values, then we union all those values to get our
|
||||
// own.
|
||||
let visited = &mut IdxSetBuf::new_empty(self.constraint_sccs.num_sccs());
|
||||
for scc_index in self.constraint_sccs.all_sccs() {
|
||||
self.propagate_constraint_sccs_if_new(scc_index, visited);
|
||||
}
|
||||
|
||||
inferred_values
|
||||
}
|
||||
|
||||
/// Builds up a map from each region variable X to a vector with the
|
||||
/// indices of constraints that need to be re-evaluated when X changes.
|
||||
/// These are constraints like Y: X @ P -- so if X changed, we may
|
||||
/// need to grow Y.
|
||||
fn build_dependency_map(&mut self) -> IndexVec<RegionVid, Option<ConstraintIndex>> {
|
||||
self.constraints.link(self.definitions.len())
|
||||
#[inline]
|
||||
fn propagate_constraint_sccs_if_new(
|
||||
&mut self,
|
||||
scc_a: ConstraintSccIndex,
|
||||
visited: &mut IdxSet<ConstraintSccIndex>,
|
||||
) {
|
||||
if visited.add(&scc_a) {
|
||||
self.propagate_constraint_sccs_new(scc_a, visited);
|
||||
}
|
||||
}
|
||||
|
||||
fn propagate_constraint_sccs_new(
|
||||
&mut self,
|
||||
scc_a: ConstraintSccIndex,
|
||||
visited: &mut IdxSet<ConstraintSccIndex>,
|
||||
) {
|
||||
let constraint_sccs = self.constraint_sccs.clone();
|
||||
|
||||
// Walk each SCC `B` such that `A: B`...
|
||||
for &scc_b in constraint_sccs.successors(scc_a) {
|
||||
debug!(
|
||||
"propagate_constraint_sccs: scc_a = {:?} scc_b = {:?}",
|
||||
scc_a, scc_b
|
||||
);
|
||||
|
||||
// ...compute the value of `B`...
|
||||
self.propagate_constraint_sccs_if_new(scc_b, visited);
|
||||
|
||||
// ...and add elements from `B` into `A`.
|
||||
self.scc_values.add_region(scc_a, scc_b);
|
||||
}
|
||||
|
||||
debug!(
|
||||
"propagate_constraint_sccs: scc_a = {:?} has value {:?}",
|
||||
scc_a,
|
||||
self.scc_values.region_value_str(scc_a),
|
||||
);
|
||||
}
|
||||
|
||||
/// Once regions have been propagated, this method is used to see
|
||||
@ -535,12 +535,9 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
if self.universal_regions.is_universal_region(r) {
|
||||
return self.definitions[r].external_name;
|
||||
} else {
|
||||
let inferred_values = self
|
||||
.inferred_values
|
||||
.as_ref()
|
||||
.expect("region values not yet inferred");
|
||||
let r_scc = self.constraint_sccs.scc(r);
|
||||
let upper_bound = self.universal_upper_bound(r);
|
||||
if inferred_values.contains(r, upper_bound) {
|
||||
if self.scc_values.contains(r_scc, upper_bound) {
|
||||
self.to_error_region(upper_bound)
|
||||
} else {
|
||||
None
|
||||
@ -575,11 +572,8 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
// region, which ensures it can be encoded in a `ClosureOutlivesRequirement`.
|
||||
let lower_bound_plus = self.non_local_universal_upper_bound(*lower_bound);
|
||||
assert!(self.universal_regions.is_universal_region(lower_bound_plus));
|
||||
assert!(
|
||||
!self
|
||||
.universal_regions
|
||||
.is_local_free_region(lower_bound_plus)
|
||||
);
|
||||
assert!(!self.universal_regions
|
||||
.is_local_free_region(lower_bound_plus));
|
||||
|
||||
propagated_outlives_requirements.push(ClosureOutlivesRequirement {
|
||||
subject,
|
||||
@ -607,10 +601,6 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
) -> Option<ClosureOutlivesSubject<'gcx>> {
|
||||
let tcx = infcx.tcx;
|
||||
let gcx = tcx.global_tcx();
|
||||
let inferred_values = self
|
||||
.inferred_values
|
||||
.as_ref()
|
||||
.expect("region values not yet inferred");
|
||||
|
||||
debug!("try_promote_type_test_subject(ty = {:?})", ty);
|
||||
|
||||
@ -653,7 +643,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
// `'static` is not contained in `r`, we would fail to
|
||||
// find an equivalent.
|
||||
let upper_bound = self.non_local_universal_upper_bound(region_vid);
|
||||
if inferred_values.contains(region_vid, upper_bound) {
|
||||
if self.region_contains(region_vid, upper_bound) {
|
||||
tcx.mk_region(ty::ReClosureBound(upper_bound))
|
||||
} else {
|
||||
// In the case of a failure, use a `ReVar`
|
||||
@ -686,12 +676,10 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
/// except that it converts further takes the non-local upper
|
||||
/// bound of `'y`, so that the final result is non-local.
|
||||
fn non_local_universal_upper_bound(&self, r: RegionVid) -> RegionVid {
|
||||
let inferred_values = self.inferred_values.as_ref().unwrap();
|
||||
|
||||
debug!(
|
||||
"non_local_universal_upper_bound(r={:?}={})",
|
||||
r,
|
||||
inferred_values.region_value_str(r)
|
||||
self.region_value_str(r)
|
||||
);
|
||||
|
||||
let lub = self.universal_upper_bound(r);
|
||||
@ -723,18 +711,17 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
/// - For each `end('x)` element in `'r`, compute the mutual LUB, yielding
|
||||
/// a result `'y`.
|
||||
fn universal_upper_bound(&self, r: RegionVid) -> RegionVid {
|
||||
let inferred_values = self.inferred_values.as_ref().unwrap();
|
||||
|
||||
debug!(
|
||||
"universal_upper_bound(r={:?}={})",
|
||||
r,
|
||||
inferred_values.region_value_str(r)
|
||||
self.region_value_str(r)
|
||||
);
|
||||
|
||||
// Find the smallest universal region that contains all other
|
||||
// universal regions within `region`.
|
||||
let mut lub = self.universal_regions.fr_fn_body;
|
||||
for ur in inferred_values.universal_regions_outlived_by(r) {
|
||||
let r_scc = self.constraint_sccs.scc(r);
|
||||
for ur in self.scc_values.universal_regions_outlived_by(r_scc) {
|
||||
lub = self.universal_regions.postdom_upper_bound(lub, ur);
|
||||
}
|
||||
|
||||
@ -779,31 +766,29 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
) -> bool {
|
||||
debug!("eval_outlives({:?}: {:?})", sup_region, sub_region);
|
||||
|
||||
let inferred_values = self
|
||||
.inferred_values
|
||||
.as_ref()
|
||||
.expect("values for regions not yet inferred");
|
||||
|
||||
debug!(
|
||||
"eval_outlives: sup_region's value = {:?}",
|
||||
inferred_values.region_value_str(sup_region),
|
||||
self.region_value_str(sup_region),
|
||||
);
|
||||
debug!(
|
||||
"eval_outlives: sub_region's value = {:?}",
|
||||
inferred_values.region_value_str(sub_region),
|
||||
self.region_value_str(sub_region),
|
||||
);
|
||||
|
||||
let sub_region_scc = self.constraint_sccs.scc(sub_region);
|
||||
let sup_region_scc = self.constraint_sccs.scc(sup_region);
|
||||
|
||||
// Both the `sub_region` and `sup_region` consist of the union
|
||||
// of some number of universal regions (along with the union
|
||||
// of various points in the CFG; ignore those points for
|
||||
// now). Therefore, the sup-region outlives the sub-region if,
|
||||
// for each universal region R1 in the sub-region, there
|
||||
// exists some region R2 in the sup-region that outlives R1.
|
||||
let universal_outlives = inferred_values
|
||||
.universal_regions_outlived_by(sub_region)
|
||||
let universal_outlives = self.scc_values
|
||||
.universal_regions_outlived_by(sub_region_scc)
|
||||
.all(|r1| {
|
||||
inferred_values
|
||||
.universal_regions_outlived_by(sup_region)
|
||||
self.scc_values
|
||||
.universal_regions_outlived_by(sup_region_scc)
|
||||
.any(|r2| self.universal_regions.outlives(r2, r1))
|
||||
});
|
||||
|
||||
@ -819,7 +804,8 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
return true;
|
||||
}
|
||||
|
||||
inferred_values.contains_points(sup_region, sub_region)
|
||||
self.scc_values
|
||||
.contains_points(sup_region_scc, sub_region_scc)
|
||||
}
|
||||
|
||||
/// Once regions have been propagated, this method is used to see
|
||||
@ -848,8 +834,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
) {
|
||||
// The universal regions are always found in a prefix of the
|
||||
// full list.
|
||||
let universal_definitions = self
|
||||
.definitions
|
||||
let universal_definitions = self.definitions
|
||||
.iter_enumerated()
|
||||
.take_while(|(_, fr_definition)| fr_definition.is_universal);
|
||||
|
||||
@ -883,13 +868,13 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
longer_fr: RegionVid,
|
||||
propagated_outlives_requirements: &mut Option<&mut Vec<ClosureOutlivesRequirement<'gcx>>>,
|
||||
) {
|
||||
let inferred_values = self.inferred_values.as_ref().unwrap();
|
||||
|
||||
debug!("check_universal_region(fr={:?})", longer_fr);
|
||||
|
||||
let longer_fr_scc = self.constraint_sccs.scc(longer_fr);
|
||||
|
||||
// Find every region `o` such that `fr: o`
|
||||
// (because `fr` includes `end(o)`).
|
||||
for shorter_fr in inferred_values.universal_regions_outlived_by(longer_fr) {
|
||||
for shorter_fr in self.scc_values.universal_regions_outlived_by(longer_fr_scc) {
|
||||
// If it is known that `fr: o`, carry on.
|
||||
if self.universal_regions.outlives(longer_fr, shorter_fr) {
|
||||
continue;
|
||||
|
@ -18,7 +18,7 @@ use std::rc::Rc;
|
||||
|
||||
/// Maps between the various kinds of elements of a region value to
|
||||
/// the internal indices that w use.
|
||||
pub(super) struct RegionValueElements {
|
||||
crate struct RegionValueElements {
|
||||
/// For each basic block, how many points are contained within?
|
||||
statements_before_block: IndexVec<BasicBlock, usize>,
|
||||
num_points: usize,
|
||||
@ -26,7 +26,7 @@ pub(super) struct RegionValueElements {
|
||||
}
|
||||
|
||||
impl RegionValueElements {
|
||||
pub(super) fn new(mir: &Mir<'_>, num_universal_regions: usize) -> Self {
|
||||
crate fn new(mir: &Mir<'_>, num_universal_regions: usize) -> Self {
|
||||
let mut num_points = 0;
|
||||
let statements_before_block = mir
|
||||
.basic_blocks()
|
||||
@ -56,22 +56,22 @@ impl RegionValueElements {
|
||||
}
|
||||
|
||||
/// Total number of element indices that exist.
|
||||
pub(super) fn num_elements(&self) -> usize {
|
||||
crate fn num_elements(&self) -> usize {
|
||||
self.num_points + self.num_universal_regions
|
||||
}
|
||||
|
||||
/// Converts an element of a region value into a `RegionElementIndex`.
|
||||
pub(super) fn index<T: ToElementIndex>(&self, elem: T) -> RegionElementIndex {
|
||||
crate fn index<T: ToElementIndex>(&self, elem: T) -> RegionElementIndex {
|
||||
elem.to_element_index(self)
|
||||
}
|
||||
|
||||
/// Iterates over the `RegionElementIndex` for all points in the CFG.
|
||||
pub(super) fn all_point_indices<'a>(&'a self) -> impl Iterator<Item = RegionElementIndex> + 'a {
|
||||
crate fn all_point_indices<'a>(&'a self) -> impl Iterator<Item = RegionElementIndex> + 'a {
|
||||
(0..self.num_points).map(move |i| RegionElementIndex::new(i + self.num_universal_regions))
|
||||
}
|
||||
|
||||
/// Converts a particular `RegionElementIndex` to the `RegionElement` it represents.
|
||||
pub(super) fn to_element(&self, i: RegionElementIndex) -> RegionElement {
|
||||
crate fn to_element(&self, i: RegionElementIndex) -> RegionElement {
|
||||
debug!("to_element(i={:?})", i);
|
||||
|
||||
if let Some(r) = self.to_universal_region(i) {
|
||||
@ -114,7 +114,7 @@ impl RegionValueElements {
|
||||
/// Converts a particular `RegionElementIndex` to a universal
|
||||
/// region, if that is what it represents. Returns `None`
|
||||
/// otherwise.
|
||||
pub(super) fn to_universal_region(&self, i: RegionElementIndex) -> Option<RegionVid> {
|
||||
crate fn to_universal_region(&self, i: RegionElementIndex) -> Option<RegionVid> {
|
||||
if i.index() < self.num_universal_regions {
|
||||
Some(RegionVid::new(i.index()))
|
||||
} else {
|
||||
@ -138,7 +138,7 @@ newtype_index!(RegionElementIndex { DEBUG_FORMAT = "RegionElementIndex({})" });
|
||||
/// An individual element in a region value -- the value of a
|
||||
/// particular region variable consists of a set of these elements.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub(super) enum RegionElement {
|
||||
crate enum RegionElement {
|
||||
/// A point in the control-flow graph.
|
||||
Location(Location),
|
||||
|
||||
@ -146,7 +146,7 @@ pub(super) enum RegionElement {
|
||||
UniversalRegion(RegionVid),
|
||||
}
|
||||
|
||||
pub(super) trait ToElementIndex: Debug + Copy {
|
||||
crate trait ToElementIndex: Debug + Copy {
|
||||
fn to_element_index(self, elements: &RegionValueElements) -> RegionElementIndex;
|
||||
}
|
||||
|
||||
@ -179,16 +179,16 @@ impl ToElementIndex for RegionElementIndex {
|
||||
/// variable. The columns consist of either universal regions or
|
||||
/// points in the CFG.
|
||||
#[derive(Clone)]
|
||||
pub(super) struct RegionValues {
|
||||
crate struct RegionValues<N: Idx> {
|
||||
elements: Rc<RegionValueElements>,
|
||||
matrix: SparseBitMatrix<RegionVid, RegionElementIndex>,
|
||||
matrix: SparseBitMatrix<N, RegionElementIndex>,
|
||||
}
|
||||
|
||||
impl RegionValues {
|
||||
impl<N: Idx> RegionValues<N> {
|
||||
/// Creates a new set of "region values" that tracks causal information.
|
||||
/// Each of the regions in num_region_variables will be initialized with an
|
||||
/// empty set of points and no causal information.
|
||||
pub(super) fn new(elements: &Rc<RegionValueElements>, num_region_variables: usize) -> Self {
|
||||
crate fn new(elements: &Rc<RegionValueElements>, num_region_variables: usize) -> Self {
|
||||
assert!(
|
||||
elements.num_universal_regions <= num_region_variables,
|
||||
"universal regions are a subset of the region variables"
|
||||
@ -197,7 +197,7 @@ impl RegionValues {
|
||||
Self {
|
||||
elements: elements.clone(),
|
||||
matrix: SparseBitMatrix::new(
|
||||
RegionVid::new(num_region_variables),
|
||||
N::new(num_region_variables),
|
||||
RegionElementIndex::new(elements.num_elements()),
|
||||
),
|
||||
}
|
||||
@ -205,7 +205,11 @@ impl RegionValues {
|
||||
|
||||
/// Adds the given element to the value for the given region. Returns true if
|
||||
/// the element is newly added (i.e., was not already present).
|
||||
pub(super) fn add_element<E: ToElementIndex>(&mut self, r: RegionVid, elem: E) -> bool {
|
||||
crate fn add_element(
|
||||
&mut self,
|
||||
r: N,
|
||||
elem: impl ToElementIndex,
|
||||
) -> bool {
|
||||
let i = self.elements.index(elem);
|
||||
debug!("add(r={:?}, elem={:?})", r, elem);
|
||||
self.matrix.add(r, i)
|
||||
@ -213,19 +217,19 @@ impl RegionValues {
|
||||
|
||||
/// Add all elements in `r_from` to `r_to` (because e.g. `r_to:
|
||||
/// r_from`).
|
||||
pub(super) fn add_region(&mut self, r_to: RegionVid, r_from: RegionVid) -> bool {
|
||||
crate fn add_region(&mut self, r_to: N, r_from: N) -> bool {
|
||||
self.matrix.merge(r_from, r_to)
|
||||
}
|
||||
|
||||
/// True if the region `r` contains the given element.
|
||||
pub(super) fn contains<E: ToElementIndex>(&self, r: RegionVid, elem: E) -> bool {
|
||||
crate fn contains(&self, r: N, elem: impl ToElementIndex) -> bool {
|
||||
let i = self.elements.index(elem);
|
||||
self.matrix.contains(r, i)
|
||||
}
|
||||
|
||||
/// True if `sup_region` contains all the CFG points that
|
||||
/// `sub_region` contains. Ignores universal regions.
|
||||
pub(super) fn contains_points(&self, sup_region: RegionVid, sub_region: RegionVid) -> bool {
|
||||
crate fn contains_points(&self, sup_region: N, sub_region: N) -> bool {
|
||||
// This could be done faster by comparing the bitsets. But I
|
||||
// am lazy.
|
||||
self.element_indices_contained_in(sub_region)
|
||||
@ -236,17 +240,17 @@ impl RegionValues {
|
||||
/// Iterate over the value of the region `r`, yielding up element
|
||||
/// indices. You may prefer `universal_regions_outlived_by` or
|
||||
/// `elements_contained_in`.
|
||||
pub(super) fn element_indices_contained_in<'a>(
|
||||
crate fn element_indices_contained_in<'a>(
|
||||
&'a self,
|
||||
r: RegionVid,
|
||||
r: N,
|
||||
) -> impl Iterator<Item = RegionElementIndex> + 'a {
|
||||
self.matrix.iter(r).map(move |i| i)
|
||||
}
|
||||
|
||||
/// Returns just the universal regions that are contained in a given region's value.
|
||||
pub(super) fn universal_regions_outlived_by<'a>(
|
||||
crate fn universal_regions_outlived_by<'a>(
|
||||
&'a self,
|
||||
r: RegionVid,
|
||||
r: N,
|
||||
) -> impl Iterator<Item = RegionVid> + 'a {
|
||||
self.element_indices_contained_in(r)
|
||||
.map(move |i| self.elements.to_universal_region(i))
|
||||
@ -255,16 +259,16 @@ impl RegionValues {
|
||||
}
|
||||
|
||||
/// Returns all the elements contained in a given region's value.
|
||||
pub(super) fn elements_contained_in<'a>(
|
||||
crate fn elements_contained_in<'a>(
|
||||
&'a self,
|
||||
r: RegionVid,
|
||||
r: N,
|
||||
) -> impl Iterator<Item = RegionElement> + 'a {
|
||||
self.element_indices_contained_in(r)
|
||||
.map(move |r| self.elements.to_element(r))
|
||||
}
|
||||
|
||||
/// Returns a "pretty" string value of the region. Meant for debugging.
|
||||
pub(super) fn region_value_str(&self, r: RegionVid) -> String {
|
||||
crate fn region_value_str(&self, r: N) -> String {
|
||||
let mut result = String::new();
|
||||
result.push_str("{");
|
||||
|
||||
|
@ -9,12 +9,11 @@
|
||||
// except according to those terms.
|
||||
|
||||
use borrow_check::location::LocationTable;
|
||||
use borrow_check::nll::constraint_set::OutlivesConstraint;
|
||||
use borrow_check::nll::constraints::{ConstraintSet, OutlivesConstraint};
|
||||
use borrow_check::nll::facts::AllFacts;
|
||||
use borrow_check::nll::region_infer::{RegionTest, TypeTest};
|
||||
use borrow_check::nll::type_check::Locations;
|
||||
use borrow_check::nll::universal_regions::UniversalRegions;
|
||||
use borrow_check::nll::constraint_set::ConstraintSet;
|
||||
use rustc::infer::canonical::QueryRegionConstraint;
|
||||
use rustc::infer::outlives::obligations::{TypeOutlives, TypeOutlivesDelegate};
|
||||
use rustc::infer::region_constraints::{GenericKind, VerifyBound};
|
||||
@ -186,7 +185,6 @@ impl<'a, 'gcx, 'tcx> ConstraintConversion<'a, 'gcx, 'tcx> {
|
||||
locations: self.locations,
|
||||
sub,
|
||||
sup,
|
||||
next: None,
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -11,14 +11,17 @@
|
||||
//! This pass type-checks the MIR to ensure it is not broken.
|
||||
#![allow(unreachable_code)]
|
||||
|
||||
use borrow_check::borrow_set::BorrowSet;
|
||||
use borrow_check::location::LocationTable;
|
||||
use borrow_check::nll::constraint_set::ConstraintSet;
|
||||
use borrow_check::nll::constraints::{ConstraintSet, OutlivesConstraint};
|
||||
use borrow_check::nll::facts::AllFacts;
|
||||
use borrow_check::nll::region_infer::{ClosureRegionRequirementsExt, TypeTest};
|
||||
use borrow_check::nll::universal_regions::UniversalRegions;
|
||||
use borrow_check::nll::ToRegionVid;
|
||||
use dataflow::move_paths::MoveData;
|
||||
use dataflow::FlowAtLocation;
|
||||
use dataflow::MaybeInitializedPlaces;
|
||||
use rustc::hir;
|
||||
use rustc::hir::def_id::DefId;
|
||||
use rustc::infer::canonical::QueryRegionConstraint;
|
||||
use rustc::infer::region_constraints::GenericKind;
|
||||
@ -103,6 +106,7 @@ pub(crate) fn type_check<'gcx, 'tcx>(
|
||||
mir_def_id: DefId,
|
||||
universal_regions: &UniversalRegions<'tcx>,
|
||||
location_table: &LocationTable,
|
||||
borrow_set: &BorrowSet<'tcx>,
|
||||
liveness: &LivenessResults,
|
||||
all_facts: &mut Option<AllFacts>,
|
||||
flow_inits: &mut FlowAtLocation<MaybeInitializedPlaces<'_, 'gcx, 'tcx>>,
|
||||
@ -119,6 +123,7 @@ pub(crate) fn type_check<'gcx, 'tcx>(
|
||||
Some(BorrowCheckContext {
|
||||
universal_regions,
|
||||
location_table,
|
||||
borrow_set,
|
||||
all_facts,
|
||||
}),
|
||||
&mut |cx| {
|
||||
@ -141,6 +146,7 @@ fn type_check_internal<'gcx, 'tcx>(
|
||||
) -> MirTypeckRegionConstraints<'tcx> {
|
||||
let mut checker = TypeChecker::new(
|
||||
infcx,
|
||||
mir,
|
||||
mir_def_id,
|
||||
param_env,
|
||||
region_bound_pairs,
|
||||
@ -592,6 +598,7 @@ struct TypeChecker<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
|
||||
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
|
||||
param_env: ty::ParamEnv<'gcx>,
|
||||
last_span: Span,
|
||||
mir: &'a Mir<'tcx>,
|
||||
mir_def_id: DefId,
|
||||
region_bound_pairs: &'a [(ty::Region<'tcx>, GenericKind<'tcx>)],
|
||||
implicit_region_bound: Option<ty::Region<'tcx>>,
|
||||
@ -604,6 +611,7 @@ struct BorrowCheckContext<'a, 'tcx: 'a> {
|
||||
universal_regions: &'a UniversalRegions<'tcx>,
|
||||
location_table: &'a LocationTable,
|
||||
all_facts: &'a mut Option<AllFacts>,
|
||||
borrow_set: &'a BorrowSet<'tcx>,
|
||||
}
|
||||
|
||||
/// A collection of region constraints that must be satisfied for the
|
||||
@ -704,6 +712,7 @@ impl Locations {
|
||||
impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
|
||||
fn new(
|
||||
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
|
||||
mir: &'a Mir<'tcx>,
|
||||
mir_def_id: DefId,
|
||||
param_env: ty::ParamEnv<'gcx>,
|
||||
region_bound_pairs: &'a [(ty::Region<'tcx>, GenericKind<'tcx>)],
|
||||
@ -713,6 +722,7 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
|
||||
TypeChecker {
|
||||
infcx,
|
||||
last_span: DUMMY_SP,
|
||||
mir,
|
||||
mir_def_id,
|
||||
param_env,
|
||||
region_bound_pairs,
|
||||
@ -857,8 +867,7 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
|
||||
}
|
||||
StatementKind::UserAssertTy(ref c_ty, ref local) => {
|
||||
let local_ty = mir.local_decls()[*local].ty;
|
||||
let (ty, _) = self
|
||||
.infcx
|
||||
let (ty, _) = self.infcx
|
||||
.instantiate_canonical_with_fresh_inference_vars(stmt.source_info.span, c_ty);
|
||||
debug!(
|
||||
"check_stmt: user_assert_ty ty={:?} local_ty={:?}",
|
||||
@ -1400,9 +1409,12 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
|
||||
CastKind::Misc => {}
|
||||
},
|
||||
|
||||
Rvalue::Ref(region, _borrow_kind, borrowed_place) => {
|
||||
self.add_reborrow_constraint(location, region, borrowed_place);
|
||||
}
|
||||
|
||||
// FIXME: These other cases have to be implemented in future PRs
|
||||
Rvalue::Use(..)
|
||||
| Rvalue::Ref(..)
|
||||
| Rvalue::Len(..)
|
||||
| Rvalue::BinaryOp(..)
|
||||
| Rvalue::CheckedBinaryOp(..)
|
||||
@ -1457,6 +1469,141 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Add the constraints that arise from a borrow expression `&'a P` at the location `L`.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// - `location`: the location `L` where the borrow expression occurs
|
||||
/// - `borrow_region`: the region `'a` associated with the borrow
|
||||
/// - `borrowed_place`: the place `P` being borrowed
|
||||
fn add_reborrow_constraint(
|
||||
&mut self,
|
||||
location: Location,
|
||||
borrow_region: ty::Region<'tcx>,
|
||||
borrowed_place: &Place<'tcx>,
|
||||
) {
|
||||
// These constraints are only meaningful during borrowck:
|
||||
let BorrowCheckContext {
|
||||
borrow_set,
|
||||
location_table,
|
||||
all_facts,
|
||||
..
|
||||
} = match &mut self.borrowck_context {
|
||||
Some(borrowck_context) => borrowck_context,
|
||||
None => return,
|
||||
};
|
||||
|
||||
// In Polonius mode, we also push a `borrow_region` fact
|
||||
// linking the loan to the region (in some cases, though,
|
||||
// there is no loan associated with this borrow expression --
|
||||
// that occurs when we are borrowing an unsafe place, for
|
||||
// example).
|
||||
if let Some(all_facts) = all_facts {
|
||||
if let Some(borrow_index) = borrow_set.location_map.get(&location) {
|
||||
let region_vid = borrow_region.to_region_vid();
|
||||
all_facts.borrow_region.push((
|
||||
region_vid,
|
||||
*borrow_index,
|
||||
location_table.mid_index(location),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// If we are reborrowing the referent of another reference, we
|
||||
// need to add outlives relationships. In a case like `&mut
|
||||
// *p`, where the `p` has type `&'b mut Foo`, for example, we
|
||||
// need to ensure that `'b: 'a`.
|
||||
|
||||
let mut borrowed_place = borrowed_place;
|
||||
|
||||
debug!(
|
||||
"add_reborrow_constraint({:?}, {:?}, {:?})",
|
||||
location, borrow_region, borrowed_place
|
||||
);
|
||||
while let Place::Projection(box PlaceProjection { base, elem }) = borrowed_place {
|
||||
debug!("add_reborrow_constraint - iteration {:?}", borrowed_place);
|
||||
|
||||
match *elem {
|
||||
ProjectionElem::Deref => {
|
||||
let tcx = self.infcx.tcx;
|
||||
let base_ty = base.ty(self.mir, tcx).to_ty(tcx);
|
||||
|
||||
debug!("add_reborrow_constraint - base_ty = {:?}", base_ty);
|
||||
match base_ty.sty {
|
||||
ty::TyRef(ref_region, _, mutbl) => {
|
||||
self.constraints
|
||||
.outlives_constraints
|
||||
.push(OutlivesConstraint {
|
||||
sup: ref_region.to_region_vid(),
|
||||
sub: borrow_region.to_region_vid(),
|
||||
locations: location.boring(),
|
||||
});
|
||||
|
||||
if let Some(all_facts) = all_facts {
|
||||
all_facts.outlives.push((
|
||||
ref_region.to_region_vid(),
|
||||
borrow_region.to_region_vid(),
|
||||
location_table.mid_index(location),
|
||||
));
|
||||
}
|
||||
|
||||
match mutbl {
|
||||
hir::Mutability::MutImmutable => {
|
||||
// Immutable reference. We don't need the base
|
||||
// to be valid for the entire lifetime of
|
||||
// the borrow.
|
||||
break;
|
||||
}
|
||||
hir::Mutability::MutMutable => {
|
||||
// Mutable reference. We *do* need the base
|
||||
// to be valid, because after the base becomes
|
||||
// invalid, someone else can use our mutable deref.
|
||||
|
||||
// This is in order to make the following function
|
||||
// illegal:
|
||||
// ```
|
||||
// fn unsafe_deref<'a, 'b>(x: &'a &'b mut T) -> &'b mut T {
|
||||
// &mut *x
|
||||
// }
|
||||
// ```
|
||||
//
|
||||
// As otherwise you could clone `&mut T` using the
|
||||
// following function:
|
||||
// ```
|
||||
// fn bad(x: &mut T) -> (&mut T, &mut T) {
|
||||
// let my_clone = unsafe_deref(&'a x);
|
||||
// ENDREGION 'a;
|
||||
// (my_clone, x)
|
||||
// }
|
||||
// ```
|
||||
}
|
||||
}
|
||||
}
|
||||
ty::TyRawPtr(..) => {
|
||||
// deref of raw pointer, guaranteed to be valid
|
||||
break;
|
||||
}
|
||||
ty::TyAdt(def, _) if def.is_box() => {
|
||||
// deref of `Box`, need the base to be valid - propagate
|
||||
}
|
||||
_ => bug!("unexpected deref ty {:?} in {:?}", base_ty, borrowed_place),
|
||||
}
|
||||
}
|
||||
ProjectionElem::Field(..)
|
||||
| ProjectionElem::Downcast(..)
|
||||
| ProjectionElem::Index(..)
|
||||
| ProjectionElem::ConstantIndex { .. }
|
||||
| ProjectionElem::Subslice { .. } => {
|
||||
// other field access
|
||||
}
|
||||
}
|
||||
|
||||
// The "propagate" case. We need to check that our base is valid
|
||||
// for the borrow's lifetime.
|
||||
borrowed_place = base;
|
||||
}
|
||||
}
|
||||
|
||||
fn prove_aggregate_predicates(
|
||||
&mut self,
|
||||
aggregate_kind: &AggregateKind<'tcx>,
|
||||
|
@ -16,7 +16,7 @@ use dataflow::indexes::BorrowIndex;
|
||||
use rustc::mir::{BasicBlock, Location, Mir, Place};
|
||||
use rustc::mir::{ProjectionElem, BorrowKind};
|
||||
use rustc::ty::TyCtxt;
|
||||
use rustc_data_structures::control_flow_graph::dominators::Dominators;
|
||||
use rustc_data_structures::graph::dominators::Dominators;
|
||||
|
||||
/// Returns true if the borrow represented by `kind` is
|
||||
/// allowed to be split into separate Reservation and
|
||||
|
@ -76,7 +76,7 @@ fn precompute_borrows_out_of_scope<'a, 'tcx>(
|
||||
while let Some(location) = stack.pop() {
|
||||
// If region does not contain a point at the location, then add to list and skip
|
||||
// successor locations.
|
||||
if !regioncx.region_contains_point(borrow_region, location) {
|
||||
if !regioncx.region_contains(borrow_region, location) {
|
||||
debug!("borrow {:?} gets killed at {:?}", borrow_index, location);
|
||||
borrows_out_of_scope_at_location
|
||||
.entry(location)
|
||||
|
@ -52,9 +52,9 @@ fn supply<'a, 'b, 'c>(cell_a: Cell<&'a u32>, cell_b: Cell<&'b u32>, cell_c: Cell
|
||||
cell_c,
|
||||
|_outlives1, _outlives2, _outlives3, x, y| {
|
||||
// Only works if 'x: 'y:
|
||||
let p = x.get();
|
||||
let p = x.get(); //~ ERROR
|
||||
//~^ WARN not reporting region error due to nll
|
||||
demand_y(x, y, p) //~ ERROR
|
||||
demand_y(x, y, p)
|
||||
},
|
||||
);
|
||||
}
|
||||
|
@ -1,28 +1,28 @@
|
||||
warning: not reporting region error due to nll
|
||||
--> $DIR/propagate-approximated-fail-no-postdom.rs:55:21
|
||||
|
|
||||
LL | let p = x.get();
|
||||
LL | let p = x.get(); //~ ERROR
|
||||
| ^^^^^^^
|
||||
|
||||
error: unsatisfied lifetime constraints
|
||||
--> $DIR/propagate-approximated-fail-no-postdom.rs:57:13
|
||||
--> $DIR/propagate-approximated-fail-no-postdom.rs:55:21
|
||||
|
|
||||
LL | |_outlives1, _outlives2, _outlives3, x, y| {
|
||||
| ---------- ---------- lifetime `'2` appears in this argument
|
||||
| |
|
||||
| lifetime `'1` appears in this argument
|
||||
...
|
||||
LL | demand_y(x, y, p) //~ ERROR
|
||||
| ^^^^^^^^^^^^^^^^^ argument requires that `'1` must outlive `'2`
|
||||
LL | // Only works if 'x: 'y:
|
||||
LL | let p = x.get(); //~ ERROR
|
||||
| ^^^^^^^ argument requires that `'1` must outlive `'2`
|
||||
|
||||
note: No external requirements
|
||||
--> $DIR/propagate-approximated-fail-no-postdom.rs:53:9
|
||||
|
|
||||
LL | / |_outlives1, _outlives2, _outlives3, x, y| {
|
||||
LL | | // Only works if 'x: 'y:
|
||||
LL | | let p = x.get();
|
||||
LL | | let p = x.get(); //~ ERROR
|
||||
LL | | //~^ WARN not reporting region error due to nll
|
||||
LL | | demand_y(x, y, p) //~ ERROR
|
||||
LL | | demand_y(x, y, p)
|
||||
LL | | },
|
||||
| |_________^
|
||||
|
|
||||
|
@ -5,7 +5,7 @@ LL | foo(cell, |cell_a, cell_x| {
|
||||
| ^^^
|
||||
|
||||
error: unsatisfied lifetime constraints
|
||||
--> $DIR/propagate-approximated-shorter-to-static-comparing-against-free.rs:33:9
|
||||
--> $DIR/propagate-approximated-shorter-to-static-comparing-against-free.rs:33:20
|
||||
|
|
||||
LL | foo(cell, |cell_a, cell_x| {
|
||||
| ------ ------ lifetime `'1` appears in this argument
|
||||
@ -13,7 +13,7 @@ LL | foo(cell, |cell_a, cell_x| {
|
||||
| lifetime `'2` appears in this argument
|
||||
LL | //~^ WARNING not reporting region error due to nll
|
||||
LL | cell_a.set(cell_x.get()); // forces 'x: 'a, error in closure
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^ argument requires that `'1` must outlive `'2`
|
||||
| ^^^^^^^^^^^^ argument requires that `'1` must outlive `'2`
|
||||
|
||||
note: No external requirements
|
||||
--> $DIR/propagate-approximated-shorter-to-static-comparing-against-free.rs:31:15
|
||||
|
@ -5,7 +5,7 @@ LL | demand_y(x, y, x.get())
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
error: unsatisfied lifetime constraints
|
||||
--> $DIR/propagate-fail-to-approximate-longer-no-bounds.rs:47:9
|
||||
--> $DIR/propagate-fail-to-approximate-longer-no-bounds.rs:47:24
|
||||
|
|
||||
LL | establish_relationships(&cell_a, &cell_b, |_outlives, x, y| {
|
||||
| --------- - lifetime `'1` appears in this argument
|
||||
@ -13,7 +13,7 @@ LL | establish_relationships(&cell_a, &cell_b, |_outlives, x, y| {
|
||||
| lifetime `'2` appears in this argument
|
||||
LL | // Only works if 'x: 'y:
|
||||
LL | demand_y(x, y, x.get())
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ argument requires that `'1` must outlive `'2`
|
||||
| ^^^^^^^ argument requires that `'1` must outlive `'2`
|
||||
|
||||
note: No external requirements
|
||||
--> $DIR/propagate-fail-to-approximate-longer-no-bounds.rs:45:47
|
||||
|
@ -5,7 +5,7 @@ LL | demand_y(x, y, x.get())
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
error: unsatisfied lifetime constraints
|
||||
--> $DIR/propagate-fail-to-approximate-longer-wrong-bounds.rs:51:9
|
||||
--> $DIR/propagate-fail-to-approximate-longer-wrong-bounds.rs:51:24
|
||||
|
|
||||
LL | establish_relationships(&cell_a, &cell_b, |_outlives1, _outlives2, x, y| {
|
||||
| ---------- ---------- lifetime `'2` appears in this argument
|
||||
@ -13,7 +13,7 @@ LL | establish_relationships(&cell_a, &cell_b, |_outlives1, _outlives2, x, y
|
||||
| lifetime `'1` appears in this argument
|
||||
LL | // Only works if 'x: 'y:
|
||||
LL | demand_y(x, y, x.get())
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^ argument requires that `'1` must outlive `'2`
|
||||
| ^^^^^^^ argument requires that `'1` must outlive `'2`
|
||||
|
||||
note: No external requirements
|
||||
--> $DIR/propagate-fail-to-approximate-longer-wrong-bounds.rs:49:47
|
||||
|
Loading…
Reference in New Issue
Block a user