diff --git a/compiler/rustc_codegen_ssa/src/mir/mod.rs b/compiler/rustc_codegen_ssa/src/mir/mod.rs index 6c139df0a85..0c958de64fa 100644 --- a/compiler/rustc_codegen_ssa/src/mir/mod.rs +++ b/compiler/rustc_codegen_ssa/src/mir/mod.rs @@ -244,7 +244,6 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( fx.debug_introduce_locals(&mut bx); // Codegen the body of each block using reverse postorder - // FIXME(eddyb) reuse RPO iterator between `analysis` and this. for (bb, _) in traversal::reverse_postorder(&mir) { fx.codegen_block(bb); } diff --git a/compiler/rustc_const_eval/src/transform/promote_consts.rs b/compiler/rustc_const_eval/src/transform/promote_consts.rs index faea2111d92..1052d588fad 100644 --- a/compiler/rustc_const_eval/src/transform/promote_consts.rs +++ b/compiler/rustc_const_eval/src/transform/promote_consts.rs @@ -13,7 +13,7 @@ //! move analysis runs after promotion on broken MIR. use rustc_hir as hir; -use rustc_middle::mir::traversal::ReversePostorder; +use rustc_middle::mir::traversal::ReversePostorderIter; use rustc_middle::mir::visit::{MutVisitor, MutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::*; use rustc_middle::ty::cast::CastTy; @@ -170,7 +170,7 @@ impl<'tcx> Visitor<'tcx> for Collector<'_, 'tcx> { pub fn collect_temps_and_candidates<'tcx>( ccx: &ConstCx<'_, 'tcx>, - rpo: &mut ReversePostorder<'_, 'tcx>, + rpo: &mut ReversePostorderIter<'_, 'tcx>, ) -> (IndexVec, Vec) { let mut collector = Collector { temps: IndexVec::from_elem(TempState::Undefined, &ccx.body.local_decls), diff --git a/compiler/rustc_middle/src/mir/mod.rs b/compiler/rustc_middle/src/mir/mod.rs index 883fc72cd56..45999f87658 100644 --- a/compiler/rustc_middle/src/mir/mod.rs +++ b/compiler/rustc_middle/src/mir/mod.rs @@ -62,7 +62,9 @@ pub mod spanview; mod switch_sources; pub mod tcx; pub mod terminator; +use crate::mir::traversal::PostorderCache; pub use terminator::*; + pub mod traversal; mod type_foldable; pub mod visit; @@ -323,6 +325,7 @@ pub struct Body<'tcx> { predecessor_cache: PredecessorCache, switch_source_cache: SwitchSourceCache, is_cyclic: GraphIsCyclicCache, + postorder_cache: PostorderCache, pub tainted_by_errors: Option, } @@ -372,6 +375,7 @@ impl<'tcx> Body<'tcx> { predecessor_cache: PredecessorCache::new(), switch_source_cache: SwitchSourceCache::new(), is_cyclic: GraphIsCyclicCache::new(), + postorder_cache: PostorderCache::new(), tainted_by_errors, }; body.is_polymorphic = body.has_param_types_or_consts(); @@ -401,6 +405,7 @@ impl<'tcx> Body<'tcx> { predecessor_cache: PredecessorCache::new(), switch_source_cache: SwitchSourceCache::new(), is_cyclic: GraphIsCyclicCache::new(), + postorder_cache: PostorderCache::new(), tainted_by_errors: None, }; body.is_polymorphic = body.has_param_types_or_consts(); @@ -422,6 +427,7 @@ impl<'tcx> Body<'tcx> { self.predecessor_cache.invalidate(); self.switch_source_cache.invalidate(); self.is_cyclic.invalidate(); + self.postorder_cache.invalidate(); &mut self.basic_blocks } @@ -432,6 +438,7 @@ impl<'tcx> Body<'tcx> { self.predecessor_cache.invalidate(); self.switch_source_cache.invalidate(); self.is_cyclic.invalidate(); + self.postorder_cache.invalidate(); (&mut self.basic_blocks, &mut self.local_decls) } @@ -446,6 +453,7 @@ impl<'tcx> Body<'tcx> { self.predecessor_cache.invalidate(); self.switch_source_cache.invalidate(); self.is_cyclic.invalidate(); + self.postorder_cache.invalidate(); (&mut self.basic_blocks, &mut self.local_decls, &mut self.var_debug_info) } diff --git a/compiler/rustc_middle/src/mir/traversal.rs b/compiler/rustc_middle/src/mir/traversal.rs index d08bede1d73..8d831cc73b8 100644 --- a/compiler/rustc_middle/src/mir/traversal.rs +++ b/compiler/rustc_middle/src/mir/traversal.rs @@ -1,4 +1,7 @@ +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; +use rustc_data_structures::sync::OnceCell; use rustc_index::bit_set::BitSet; +use rustc_serialize as serialize; use super::*; @@ -268,10 +271,6 @@ impl<'a, 'tcx> ReversePostorder<'a, 'tcx> { } } -pub fn reverse_postorder<'a, 'tcx>(body: &'a Body<'tcx>) -> ReversePostorder<'a, 'tcx> { - ReversePostorder::new(body, START_BLOCK) -} - impl<'a, 'tcx> Iterator for ReversePostorder<'a, 'tcx> { type Item = (BasicBlock, &'a BasicBlockData<'tcx>); @@ -307,3 +306,86 @@ pub fn reachable_as_bitset<'tcx>(body: &Body<'tcx>) -> BitSet { (&mut iter).for_each(drop); iter.visited } + +#[derive(Clone)] +pub struct ReversePostorderIter<'a, 'tcx> { + body: &'a Body<'tcx>, + blocks: &'a Vec, + idx: usize, +} + +impl<'a, 'tcx> Iterator for ReversePostorderIter<'a, 'tcx> { + type Item = (BasicBlock, &'a BasicBlockData<'tcx>); + + fn next(&mut self) -> Option<(BasicBlock, &'a BasicBlockData<'tcx>)> { + if self.idx == 0 { + return None; + } + self.idx -= 1; + + self.blocks.get(self.idx).map(|&bb| (bb, &self.body[bb])) + } + + fn size_hint(&self) -> (usize, Option) { + (self.idx, Some(self.idx)) + } +} + +impl<'a, 'tcx> ExactSizeIterator for ReversePostorderIter<'a, 'tcx> {} + +pub fn reverse_postorder<'a, 'tcx>(body: &'a Body<'tcx>) -> ReversePostorderIter<'a, 'tcx> { + let blocks = body.postorder_cache.compute(body); + + let len = blocks.len(); + + ReversePostorderIter { body, blocks, idx: len } +} + +#[derive(Clone, Debug)] +pub(super) struct PostorderCache { + cache: OnceCell>, +} + +impl PostorderCache { + #[inline] + pub(super) fn new() -> Self { + PostorderCache { cache: OnceCell::new() } + } + + /// Invalidates the postorder cache. + #[inline] + pub(super) fn invalidate(&mut self) { + self.cache = OnceCell::new(); + } + + /// Returns the &Vec represents the postorder graph for this MIR. + #[inline] + pub(super) fn compute(&self, body: &Body<'_>) -> &Vec { + self.cache.get_or_init(|| Postorder::new(body, START_BLOCK).map(|(bb, _)| bb).collect()) + } +} + +impl serialize::Encodable for PostorderCache { + #[inline] + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_unit() + } +} + +impl serialize::Decodable for PostorderCache { + #[inline] + fn decode(_: &mut D) -> Self { + Self::new() + } +} + +impl HashStable for PostorderCache { + #[inline] + fn hash_stable(&self, _: &mut CTX, _: &mut StableHasher) { + // do nothing + } +} + +TrivialTypeFoldableAndLiftImpls! { + PostorderCache, +}