Auto merge of #106975 - tmiasko:basic-blocks-cache, r=cjgillot

Refactor basic blocks control flow caches

No functional changes.
This commit is contained in:
bors 2023-01-22 21:35:21 +00:00
commit a5fa99eed2
6 changed files with 74 additions and 296 deletions

View File

@ -1,38 +1,44 @@
use crate::mir::graph_cyclic_cache::GraphIsCyclicCache;
use crate::mir::predecessors::{PredecessorCache, Predecessors};
use crate::mir::switch_sources::{SwitchSourceCache, SwitchSources};
use crate::mir::traversal::PostorderCache;
use crate::mir::{BasicBlock, BasicBlockData, Successors, START_BLOCK};
use crate::mir::traversal::Postorder;
use crate::mir::{BasicBlock, BasicBlockData, Successors, Terminator, TerminatorKind, START_BLOCK};
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::graph;
use rustc_data_structures::graph::dominators::{dominators, Dominators};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::OnceCell;
use rustc_index::vec::IndexVec;
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
use smallvec::SmallVec;
#[derive(Clone, TyEncodable, TyDecodable, Debug, HashStable, TypeFoldable, TypeVisitable)]
pub struct BasicBlocks<'tcx> {
basic_blocks: IndexVec<BasicBlock, BasicBlockData<'tcx>>,
predecessor_cache: PredecessorCache,
switch_source_cache: SwitchSourceCache,
is_cyclic: GraphIsCyclicCache,
postorder_cache: PostorderCache,
cache: Cache,
}
// Typically 95%+ of basic blocks have 4 or fewer predecessors.
pub type Predecessors = IndexVec<BasicBlock, SmallVec<[BasicBlock; 4]>>;
pub type SwitchSources = FxHashMap<(BasicBlock, BasicBlock), SmallVec<[Option<u128>; 1]>>;
#[derive(Clone, Default, Debug)]
struct Cache {
predecessors: OnceCell<Predecessors>,
switch_sources: OnceCell<SwitchSources>,
is_cyclic: OnceCell<bool>,
postorder: OnceCell<Vec<BasicBlock>>,
}
impl<'tcx> BasicBlocks<'tcx> {
#[inline]
pub fn new(basic_blocks: IndexVec<BasicBlock, BasicBlockData<'tcx>>) -> Self {
BasicBlocks {
basic_blocks,
predecessor_cache: PredecessorCache::new(),
switch_source_cache: SwitchSourceCache::new(),
is_cyclic: GraphIsCyclicCache::new(),
postorder_cache: PostorderCache::new(),
}
BasicBlocks { basic_blocks, cache: Cache::default() }
}
/// Returns true if control-flow graph contains a cycle reachable from the `START_BLOCK`.
#[inline]
pub fn is_cfg_cyclic(&self) -> bool {
self.is_cyclic.is_cyclic(self)
*self.cache.is_cyclic.get_or_init(|| graph::is_cyclic(self))
}
#[inline]
@ -43,20 +49,46 @@ impl<'tcx> BasicBlocks<'tcx> {
/// Returns predecessors for each basic block.
#[inline]
pub fn predecessors(&self) -> &Predecessors {
self.predecessor_cache.compute(&self.basic_blocks)
self.cache.predecessors.get_or_init(|| {
let mut preds = IndexVec::from_elem(SmallVec::new(), &self.basic_blocks);
for (bb, data) in self.basic_blocks.iter_enumerated() {
if let Some(term) = &data.terminator {
for succ in term.successors() {
preds[succ].push(bb);
}
}
}
preds
})
}
/// Returns basic blocks in a postorder.
#[inline]
pub fn postorder(&self) -> &[BasicBlock] {
self.postorder_cache.compute(&self.basic_blocks)
self.cache.postorder.get_or_init(|| {
Postorder::new(&self.basic_blocks, START_BLOCK).map(|(bb, _)| bb).collect()
})
}
/// `switch_sources()[&(target, switch)]` returns a list of switch
/// values that lead to a `target` block from a `switch` block.
#[inline]
pub fn switch_sources(&self) -> &SwitchSources {
self.switch_source_cache.compute(&self.basic_blocks)
self.cache.switch_sources.get_or_init(|| {
let mut switch_sources: SwitchSources = FxHashMap::default();
for (bb, data) in self.basic_blocks.iter_enumerated() {
if let Some(Terminator {
kind: TerminatorKind::SwitchInt { targets, .. }, ..
}) = &data.terminator
{
for (value, target) in targets.iter() {
switch_sources.entry((target, bb)).or_default().push(Some(value));
}
switch_sources.entry((targets.otherwise(), bb)).or_default().push(None);
}
}
switch_sources
})
}
/// Returns mutable reference to basic blocks. Invalidates CFG cache.
@ -88,10 +120,7 @@ impl<'tcx> BasicBlocks<'tcx> {
/// All other methods that allow you to mutate the basic blocks also call this method
/// themselves, thereby avoiding any risk of accidentally cache invalidation.
pub fn invalidate_cfg_cache(&mut self) {
self.predecessor_cache.invalidate();
self.switch_source_cache.invalidate();
self.is_cyclic.invalidate();
self.postorder_cache.invalidate();
self.cache = Cache::default();
}
}
@ -145,3 +174,24 @@ impl<'tcx> graph::WithPredecessors for BasicBlocks<'tcx> {
self.predecessors()[node].iter().copied()
}
}
TrivialTypeTraversalAndLiftImpls! {
Cache,
}
impl<S: Encoder> Encodable<S> for Cache {
#[inline]
fn encode(&self, _s: &mut S) {}
}
impl<D: Decoder> Decodable<D> for Cache {
#[inline]
fn decode(_: &mut D) -> Self {
Default::default()
}
}
impl<CTX> HashStable<CTX> for Cache {
#[inline]
fn hash_stable(&self, _: &mut CTX, _: &mut StableHasher) {}
}

View File

@ -1,63 +0,0 @@
use rustc_data_structures::graph::{
self, DirectedGraph, WithNumNodes, WithStartNode, WithSuccessors,
};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::OnceCell;
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
/// Helper type to cache the result of `graph::is_cyclic`.
#[derive(Clone, Debug)]
pub(super) struct GraphIsCyclicCache {
cache: OnceCell<bool>,
}
impl GraphIsCyclicCache {
#[inline]
pub(super) fn new() -> Self {
GraphIsCyclicCache { cache: OnceCell::new() }
}
pub(super) fn is_cyclic<G>(&self, graph: &G) -> bool
where
G: ?Sized + DirectedGraph + WithStartNode + WithSuccessors + WithNumNodes,
{
*self.cache.get_or_init(|| graph::is_cyclic(graph))
}
/// Invalidates the cache.
#[inline]
pub(super) fn invalidate(&mut self) {
// Invalidating the cache requires mutating the MIR, which in turn requires a unique
// reference (`&mut`) to the `mir::Body`. Because of this, we can assume that all
// callers of `invalidate` have a unique reference to the MIR and thus to the
// cache. This means we never need to do synchronization when `invalidate` is called,
// we can simply reinitialize the `OnceCell`.
self.cache = OnceCell::new();
}
}
impl<S: Encoder> Encodable<S> for GraphIsCyclicCache {
#[inline]
fn encode(&self, s: &mut S) {
Encodable::encode(&(), s);
}
}
impl<D: Decoder> Decodable<D> for GraphIsCyclicCache {
#[inline]
fn decode(d: &mut D) -> Self {
let () = Decodable::decode(d);
Self::new()
}
}
impl<CTX> HashStable<CTX> for GraphIsCyclicCache {
#[inline]
fn hash_stable(&self, _: &mut CTX, _: &mut StableHasher) {
// do nothing
}
}
TrivialTypeTraversalAndLiftImpls! {
GraphIsCyclicCache,
}

View File

@ -47,18 +47,15 @@ mod basic_blocks;
pub mod coverage;
mod generic_graph;
pub mod generic_graphviz;
mod graph_cyclic_cache;
pub mod graphviz;
pub mod interpret;
pub mod mono;
pub mod patch;
mod predecessors;
pub mod pretty;
mod query;
pub mod spanview;
mod syntax;
pub use syntax::*;
mod switch_sources;
pub mod tcx;
pub mod terminator;
pub use terminator::*;

View File

@ -1,78 +0,0 @@
//! Lazily compute the reverse control-flow graph for the MIR.
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::OnceCell;
use rustc_index::vec::IndexVec;
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
use smallvec::SmallVec;
use crate::mir::{BasicBlock, BasicBlockData};
// Typically 95%+ of basic blocks have 4 or fewer predecessors.
pub type Predecessors = IndexVec<BasicBlock, SmallVec<[BasicBlock; 4]>>;
#[derive(Clone, Debug)]
pub(super) struct PredecessorCache {
cache: OnceCell<Predecessors>,
}
impl PredecessorCache {
#[inline]
pub(super) fn new() -> Self {
PredecessorCache { cache: OnceCell::new() }
}
/// Invalidates the predecessor cache.
#[inline]
pub(super) fn invalidate(&mut self) {
// Invalidating the predecessor cache requires mutating the MIR, which in turn requires a
// unique reference (`&mut`) to the `mir::Body`. Because of this, we can assume that all
// callers of `invalidate` have a unique reference to the MIR and thus to the predecessor
// cache. This means we never need to do synchronization when `invalidate` is called, we can
// simply reinitialize the `OnceCell`.
self.cache = OnceCell::new();
}
/// Returns the predecessor graph for this MIR.
#[inline]
pub(super) fn compute(
&self,
basic_blocks: &IndexVec<BasicBlock, BasicBlockData<'_>>,
) -> &Predecessors {
self.cache.get_or_init(|| {
let mut preds = IndexVec::from_elem(SmallVec::new(), basic_blocks);
for (bb, data) in basic_blocks.iter_enumerated() {
if let Some(term) = &data.terminator {
for succ in term.successors() {
preds[succ].push(bb);
}
}
}
preds
})
}
}
impl<S: Encoder> Encodable<S> for PredecessorCache {
#[inline]
fn encode(&self, _s: &mut S) {}
}
impl<D: Decoder> Decodable<D> for PredecessorCache {
#[inline]
fn decode(_: &mut D) -> Self {
Self::new()
}
}
impl<CTX> HashStable<CTX> for PredecessorCache {
#[inline]
fn hash_stable(&self, _: &mut CTX, _: &mut StableHasher) {
// do nothing
}
}
TrivialTypeTraversalAndLiftImpls! {
PredecessorCache,
}

View File

@ -1,78 +0,0 @@
//! Lazily compute the inverse of each `SwitchInt`'s switch targets. Modeled after
//! `Predecessors`/`PredecessorCache`.
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::OnceCell;
use rustc_index::vec::IndexVec;
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
use smallvec::SmallVec;
use crate::mir::{BasicBlock, BasicBlockData, Terminator, TerminatorKind};
pub type SwitchSources = FxHashMap<(BasicBlock, BasicBlock), SmallVec<[Option<u128>; 1]>>;
#[derive(Clone, Debug)]
pub(super) struct SwitchSourceCache {
cache: OnceCell<SwitchSources>,
}
impl SwitchSourceCache {
#[inline]
pub(super) fn new() -> Self {
SwitchSourceCache { cache: OnceCell::new() }
}
/// Invalidates the switch source cache.
#[inline]
pub(super) fn invalidate(&mut self) {
self.cache = OnceCell::new();
}
/// Returns the switch sources for this MIR.
#[inline]
pub(super) fn compute(
&self,
basic_blocks: &IndexVec<BasicBlock, BasicBlockData<'_>>,
) -> &SwitchSources {
self.cache.get_or_init(|| {
let mut switch_sources: SwitchSources = FxHashMap::default();
for (bb, data) in basic_blocks.iter_enumerated() {
if let Some(Terminator {
kind: TerminatorKind::SwitchInt { targets, .. }, ..
}) = &data.terminator
{
for (value, target) in targets.iter() {
switch_sources.entry((target, bb)).or_default().push(Some(value));
}
switch_sources.entry((targets.otherwise(), bb)).or_default().push(None);
}
}
switch_sources
})
}
}
impl<S: Encoder> Encodable<S> for SwitchSourceCache {
#[inline]
fn encode(&self, _s: &mut S) {}
}
impl<D: Decoder> Decodable<D> for SwitchSourceCache {
#[inline]
fn decode(_: &mut D) -> Self {
Self::new()
}
}
impl<CTX> HashStable<CTX> for SwitchSourceCache {
#[inline]
fn hash_stable(&self, _: &mut CTX, _: &mut StableHasher) {
// do nothing
}
}
TrivialTypeTraversalAndLiftImpls! {
SwitchSourceCache,
}

View File

@ -1,7 +1,4 @@
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::OnceCell;
use rustc_index::bit_set::BitSet;
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
use super::*;
@ -339,50 +336,3 @@ pub fn reverse_postorder<'a, 'tcx>(body: &'a Body<'tcx>) -> ReversePostorderIter
let len = blocks.len();
ReversePostorderIter { body, blocks, idx: len }
}
#[derive(Clone, Debug)]
pub(super) struct PostorderCache {
cache: OnceCell<Vec<BasicBlock>>,
}
impl PostorderCache {
#[inline]
pub(super) fn new() -> Self {
PostorderCache { cache: OnceCell::new() }
}
/// Invalidates the postorder cache.
#[inline]
pub(super) fn invalidate(&mut self) {
self.cache = OnceCell::new();
}
/// Returns the `&[BasicBlocks]` represents the postorder graph for this MIR.
#[inline]
pub(super) fn compute(&self, body: &IndexVec<BasicBlock, BasicBlockData<'_>>) -> &[BasicBlock] {
self.cache.get_or_init(|| Postorder::new(body, START_BLOCK).map(|(bb, _)| bb).collect())
}
}
impl<S: Encoder> Encodable<S> for PostorderCache {
#[inline]
fn encode(&self, _s: &mut S) {}
}
impl<D: Decoder> Decodable<D> for PostorderCache {
#[inline]
fn decode(_: &mut D) -> Self {
Self::new()
}
}
impl<CTX> HashStable<CTX> for PostorderCache {
#[inline]
fn hash_stable(&self, _: &mut CTX, _: &mut StableHasher) {
// do nothing
}
}
TrivialTypeTraversalAndLiftImpls! {
PostorderCache,
}