mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-22 06:44:35 +00:00
Rollup merge of #99027 - tmiasko:basic-blocks, r=oli-obk
Replace `Body::basic_blocks()` with field access Since the refactoring in #98930, it is possible to borrow the basic blocks independently from other parts of MIR by accessing the `basic_blocks` field directly. Replace unnecessary `Body::basic_blocks()` method with a direct field access, which has an additional benefit of borrowing the basic blocks only.
This commit is contained in:
commit
d182081de1
@ -31,7 +31,7 @@ pub(super) fn generate_constraints<'cx, 'tcx>(
|
||||
body,
|
||||
};
|
||||
|
||||
for (bb, data) in body.basic_blocks().iter_enumerated() {
|
||||
for (bb, data) in body.basic_blocks.iter_enumerated() {
|
||||
cg.visit_basic_block_data(bb, data);
|
||||
}
|
||||
}
|
||||
|
@ -143,7 +143,7 @@ struct OutOfScopePrecomputer<'a, 'tcx> {
|
||||
impl<'a, 'tcx> OutOfScopePrecomputer<'a, 'tcx> {
|
||||
fn new(body: &'a Body<'tcx>, regioncx: &'a RegionInferenceContext<'tcx>) -> Self {
|
||||
OutOfScopePrecomputer {
|
||||
visited: BitSet::new_empty(body.basic_blocks().len()),
|
||||
visited: BitSet::new_empty(body.basic_blocks.len()),
|
||||
visit_stack: vec![],
|
||||
body,
|
||||
regioncx,
|
||||
|
@ -459,7 +459,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
return outmost_back_edge;
|
||||
}
|
||||
|
||||
let block = &self.body.basic_blocks()[location.block];
|
||||
let block = &self.body.basic_blocks[location.block];
|
||||
|
||||
if location.statement_index < block.statements.len() {
|
||||
let successor = location.successor_within_block();
|
||||
@ -518,7 +518,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
}
|
||||
|
||||
if loop_head.dominates(from, &self.dominators) {
|
||||
let block = &self.body.basic_blocks()[from.block];
|
||||
let block = &self.body.basic_blocks[from.block];
|
||||
|
||||
if from.statement_index < block.statements.len() {
|
||||
let successor = from.successor_within_block();
|
||||
@ -568,7 +568,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
UseSpans::PatUse(span)
|
||||
| UseSpans::OtherUse(span)
|
||||
| UseSpans::FnSelfUse { var_span: span, .. } => {
|
||||
let block = &self.body.basic_blocks()[location.block];
|
||||
let block = &self.body.basic_blocks[location.block];
|
||||
|
||||
let kind = if let Some(&Statement {
|
||||
kind: StatementKind::FakeRead(box (FakeReadCause::ForLet(_), _)),
|
||||
|
@ -88,7 +88,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
|
||||
if let Some(StatementKind::Assign(box (
|
||||
place,
|
||||
Rvalue::Use(Operand::Move(move_from)),
|
||||
))) = self.body.basic_blocks()[location.block]
|
||||
))) = self.body.basic_blocks[location.block]
|
||||
.statements
|
||||
.get(location.statement_index)
|
||||
.map(|stmt| &stmt.kind)
|
||||
|
@ -33,7 +33,7 @@ impl LocationTable {
|
||||
pub(crate) fn new(body: &Body<'_>) -> Self {
|
||||
let mut num_points = 0;
|
||||
let statements_before_block = body
|
||||
.basic_blocks()
|
||||
.basic_blocks
|
||||
.iter()
|
||||
.map(|block_data| {
|
||||
let v = num_points;
|
||||
|
@ -25,7 +25,7 @@ impl RegionValueElements {
|
||||
pub(crate) fn new(body: &Body<'_>) -> Self {
|
||||
let mut num_points = 0;
|
||||
let statements_before_block: IndexVec<BasicBlock, usize> = body
|
||||
.basic_blocks()
|
||||
.basic_blocks
|
||||
.iter()
|
||||
.map(|block_data| {
|
||||
let v = num_points;
|
||||
@ -37,7 +37,7 @@ impl RegionValueElements {
|
||||
debug!("RegionValueElements: num_points={:#?}", num_points);
|
||||
|
||||
let mut basic_blocks = IndexVec::with_capacity(num_points);
|
||||
for (bb, bb_data) in body.basic_blocks().iter_enumerated() {
|
||||
for (bb, bb_data) in body.basic_blocks.iter_enumerated() {
|
||||
basic_blocks.extend((0..=bb_data.statements.len()).map(|_| bb));
|
||||
}
|
||||
|
||||
|
@ -2633,7 +2633,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
self.check_local(&body, local, local_decl);
|
||||
}
|
||||
|
||||
for (block, block_data) in body.basic_blocks().iter_enumerated() {
|
||||
for (block, block_data) in body.basic_blocks.iter_enumerated() {
|
||||
let mut location = Location { block, statement_index: 0 };
|
||||
for stmt in &block_data.statements {
|
||||
if !stmt.source_info.span.is_dummy() {
|
||||
|
@ -26,7 +26,7 @@ pub(crate) fn analyze(fx: &FunctionCx<'_, '_, '_>) -> IndexVec<Local, SsaKind> {
|
||||
})
|
||||
.collect::<IndexVec<Local, SsaKind>>();
|
||||
|
||||
for bb in fx.mir.basic_blocks().iter() {
|
||||
for bb in fx.mir.basic_blocks.iter() {
|
||||
for stmt in bb.statements.iter() {
|
||||
match &stmt.kind {
|
||||
Assign(place_and_rval) => match &place_and_rval.1 {
|
||||
|
@ -73,7 +73,7 @@ pub(crate) fn codegen_fn<'tcx>(
|
||||
// Predefine blocks
|
||||
let start_block = bcx.create_block();
|
||||
let block_map: IndexVec<BasicBlock, Block> =
|
||||
(0..mir.basic_blocks().len()).map(|_| bcx.create_block()).collect();
|
||||
(0..mir.basic_blocks.len()).map(|_| bcx.create_block()).collect();
|
||||
|
||||
// Make FunctionCx
|
||||
let target_config = module.target_config();
|
||||
@ -271,7 +271,7 @@ fn codegen_fn_body(fx: &mut FunctionCx<'_, '_, '_>, start_block: Block) {
|
||||
}
|
||||
fx.tcx.sess.time("codegen prelude", || crate::abi::codegen_fn_prelude(fx, start_block));
|
||||
|
||||
for (bb, bb_data) in fx.mir.basic_blocks().iter_enumerated() {
|
||||
for (bb, bb_data) in fx.mir.basic_blocks.iter_enumerated() {
|
||||
let block = fx.get_block(bb);
|
||||
fx.bcx.switch_to_block(block);
|
||||
|
||||
|
@ -505,7 +505,7 @@ pub(crate) fn mir_operand_get_const_val<'tcx>(
|
||||
return None;
|
||||
}
|
||||
let mut computed_const_val = None;
|
||||
for bb_data in fx.mir.basic_blocks() {
|
||||
for bb_data in fx.mir.basic_blocks.iter() {
|
||||
for stmt in &bb_data.statements {
|
||||
match &stmt.kind {
|
||||
StatementKind::Assign(local_and_rvalue) if &local_and_rvalue.0 == place => {
|
||||
|
@ -266,7 +266,7 @@ pub fn cleanup_kinds(mir: &mir::Body<'_>) -> IndexVec<mir::BasicBlock, CleanupKi
|
||||
result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
|
||||
mir: &mir::Body<'tcx>,
|
||||
) {
|
||||
for (bb, data) in mir.basic_blocks().iter_enumerated() {
|
||||
for (bb, data) in mir.basic_blocks.iter_enumerated() {
|
||||
match data.terminator().kind {
|
||||
TerminatorKind::Goto { .. }
|
||||
| TerminatorKind::Resume
|
||||
@ -296,7 +296,7 @@ pub fn cleanup_kinds(mir: &mir::Body<'_>) -> IndexVec<mir::BasicBlock, CleanupKi
|
||||
}
|
||||
|
||||
fn propagate<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>, mir: &mir::Body<'tcx>) {
|
||||
let mut funclet_succs = IndexVec::from_elem(None, mir.basic_blocks());
|
||||
let mut funclet_succs = IndexVec::from_elem(None, &mir.basic_blocks);
|
||||
|
||||
let mut set_successor = |funclet: mir::BasicBlock, succ| match funclet_succs[funclet] {
|
||||
ref mut s @ None => {
|
||||
@ -359,7 +359,7 @@ pub fn cleanup_kinds(mir: &mir::Body<'_>) -> IndexVec<mir::BasicBlock, CleanupKi
|
||||
}
|
||||
}
|
||||
|
||||
let mut result = IndexVec::from_elem(CleanupKind::NotCleanup, mir.basic_blocks());
|
||||
let mut result = IndexVec::from_elem(CleanupKind::NotCleanup, &mir.basic_blocks);
|
||||
|
||||
discover_masters(&mut result, mir);
|
||||
propagate(&mut result, mir);
|
||||
|
@ -150,13 +150,13 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
||||
let start_llbb = Bx::append_block(cx, llfn, "start");
|
||||
let mut bx = Bx::build(cx, start_llbb);
|
||||
|
||||
if mir.basic_blocks().iter().any(|bb| bb.is_cleanup) {
|
||||
if mir.basic_blocks.iter().any(|bb| bb.is_cleanup) {
|
||||
bx.set_personality_fn(cx.eh_personality());
|
||||
}
|
||||
|
||||
let cleanup_kinds = analyze::cleanup_kinds(&mir);
|
||||
let cached_llbbs: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>> = mir
|
||||
.basic_blocks()
|
||||
.basic_blocks
|
||||
.indices()
|
||||
.map(|bb| if bb == mir::START_BLOCK { Some(start_llbb) } else { None })
|
||||
.collect();
|
||||
@ -172,8 +172,8 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
||||
unreachable_block: None,
|
||||
double_unwind_guard: None,
|
||||
cleanup_kinds,
|
||||
landing_pads: IndexVec::from_elem(None, mir.basic_blocks()),
|
||||
funclets: IndexVec::from_fn_n(|_| None, mir.basic_blocks().len()),
|
||||
landing_pads: IndexVec::from_elem(None, &mir.basic_blocks),
|
||||
funclets: IndexVec::from_fn_n(|_| None, mir.basic_blocks.len()),
|
||||
locals: IndexVec::new(),
|
||||
debug_context,
|
||||
per_local_var_debug_info: None,
|
||||
|
@ -782,7 +782,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
assert_eq!(
|
||||
unwinding,
|
||||
match self.frame().loc {
|
||||
Ok(loc) => self.body().basic_blocks()[loc.block].is_cleanup,
|
||||
Ok(loc) => self.body().basic_blocks[loc.block].is_cleanup,
|
||||
Err(_) => true,
|
||||
}
|
||||
);
|
||||
|
@ -28,7 +28,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
let mut source_info = *frame.body.source_info(loc);
|
||||
|
||||
// If this is a `Call` terminator, use the `fn_span` instead.
|
||||
let block = &frame.body.basic_blocks()[loc.block];
|
||||
let block = &frame.body.basic_blocks[loc.block];
|
||||
if loc.statement_index == block.statements.len() {
|
||||
debug!(
|
||||
"find_closest_untracked_caller_location: got terminator {:?} ({:?})",
|
||||
|
@ -53,7 +53,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
self.pop_stack_frame(/* unwinding */ true)?;
|
||||
return Ok(true);
|
||||
};
|
||||
let basic_block = &self.body().basic_blocks()[loc.block];
|
||||
let basic_block = &self.body().basic_blocks[loc.block];
|
||||
|
||||
if let Some(stmt) = basic_block.statements.get(loc.statement_index) {
|
||||
let old_frames = self.frame_idx();
|
||||
|
@ -135,7 +135,7 @@ impl<'mir, 'tcx> Qualifs<'mir, 'tcx> {
|
||||
// qualifs for the return type.
|
||||
let return_block = ccx
|
||||
.body
|
||||
.basic_blocks()
|
||||
.basic_blocks
|
||||
.iter_enumerated()
|
||||
.find(|(_, block)| matches!(block.terminator().kind, TerminatorKind::Return))
|
||||
.map(|(bb, _)| bb);
|
||||
|
@ -710,7 +710,7 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> {
|
||||
}
|
||||
|
||||
fn assign(&mut self, dest: Local, rvalue: Rvalue<'tcx>, span: Span) {
|
||||
let last = self.promoted.basic_blocks().last().unwrap();
|
||||
let last = self.promoted.basic_blocks.last().unwrap();
|
||||
let data = &mut self.promoted[last];
|
||||
data.statements.push(Statement {
|
||||
source_info: SourceInfo::outermost(span),
|
||||
@ -803,7 +803,7 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> {
|
||||
self.visit_operand(arg, loc);
|
||||
}
|
||||
|
||||
let last = self.promoted.basic_blocks().last().unwrap();
|
||||
let last = self.promoted.basic_blocks.last().unwrap();
|
||||
let new_target = self.new_block();
|
||||
|
||||
*self.promoted[last].terminator_mut() = Terminator {
|
||||
@ -1041,7 +1041,7 @@ pub fn is_const_fn_in_array_repeat_expression<'tcx>(
|
||||
_ => {}
|
||||
}
|
||||
|
||||
for block in body.basic_blocks() {
|
||||
for block in body.basic_blocks.iter() {
|
||||
if let Some(Terminator { kind: TerminatorKind::Call { func, destination, .. }, .. }) =
|
||||
&block.terminator
|
||||
{
|
||||
|
@ -140,8 +140,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
if bb == START_BLOCK {
|
||||
self.fail(location, "start block must not have predecessors")
|
||||
}
|
||||
if let Some(bb) = self.body.basic_blocks().get(bb) {
|
||||
let src = self.body.basic_blocks().get(location.block).unwrap();
|
||||
if let Some(bb) = self.body.basic_blocks.get(bb) {
|
||||
let src = self.body.basic_blocks.get(location.block).unwrap();
|
||||
match (src.is_cleanup, bb.is_cleanup, edge_kind) {
|
||||
// Non-cleanup blocks can jump to non-cleanup blocks along non-unwind edges
|
||||
(false, false, EdgeKind::Normal)
|
||||
@ -881,13 +881,13 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
|
||||
}
|
||||
TerminatorKind::Resume | TerminatorKind::Abort => {
|
||||
let bb = location.block;
|
||||
if !self.body.basic_blocks()[bb].is_cleanup {
|
||||
if !self.body.basic_blocks[bb].is_cleanup {
|
||||
self.fail(location, "Cannot `Resume` or `Abort` from non-cleanup basic block")
|
||||
}
|
||||
}
|
||||
TerminatorKind::Return => {
|
||||
let bb = location.block;
|
||||
if self.body.basic_blocks()[bb].is_cleanup {
|
||||
if self.body.basic_blocks[bb].is_cleanup {
|
||||
self.fail(location, "Cannot `Return` from cleanup basic block")
|
||||
}
|
||||
}
|
||||
|
@ -12,14 +12,14 @@ pub fn mir_fn_to_generic_graph<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'_>) -> Grap
|
||||
|
||||
// Nodes
|
||||
let nodes: Vec<Node> = body
|
||||
.basic_blocks()
|
||||
.basic_blocks
|
||||
.iter_enumerated()
|
||||
.map(|(block, _)| bb_to_graph_node(block, body, dark_mode))
|
||||
.collect();
|
||||
|
||||
// Edges
|
||||
let mut edges = Vec::new();
|
||||
for (source, _) in body.basic_blocks().iter_enumerated() {
|
||||
for (source, _) in body.basic_blocks.iter_enumerated() {
|
||||
let def_id = body.source.def_id();
|
||||
let terminator = body[source].terminator();
|
||||
let labels = terminator.kind.fmt_successor_labels();
|
||||
|
@ -331,11 +331,6 @@ impl<'tcx> Body<'tcx> {
|
||||
body
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn basic_blocks(&self) -> &IndexVec<BasicBlock, BasicBlockData<'tcx>> {
|
||||
&self.basic_blocks
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn basic_blocks_mut(&mut self) -> &mut IndexVec<BasicBlock, BasicBlockData<'tcx>> {
|
||||
self.basic_blocks.as_mut()
|
||||
@ -490,7 +485,7 @@ impl<'tcx> Index<BasicBlock> for Body<'tcx> {
|
||||
|
||||
#[inline]
|
||||
fn index(&self, index: BasicBlock) -> &BasicBlockData<'tcx> {
|
||||
&self.basic_blocks()[index]
|
||||
&self.basic_blocks[index]
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -19,7 +19,7 @@ pub struct MirPatch<'tcx> {
|
||||
impl<'tcx> MirPatch<'tcx> {
|
||||
pub fn new(body: &Body<'tcx>) -> Self {
|
||||
let mut result = MirPatch {
|
||||
patch_map: IndexVec::from_elem(None, body.basic_blocks()),
|
||||
patch_map: IndexVec::from_elem(None, &body.basic_blocks),
|
||||
new_blocks: vec![],
|
||||
new_statements: vec![],
|
||||
new_locals: vec![],
|
||||
@ -29,7 +29,7 @@ impl<'tcx> MirPatch<'tcx> {
|
||||
};
|
||||
|
||||
// Check if we already have a resume block
|
||||
for (bb, block) in body.basic_blocks().iter_enumerated() {
|
||||
for (bb, block) in body.basic_blocks.iter_enumerated() {
|
||||
if let TerminatorKind::Resume = block.terminator().kind && block.statements.is_empty() {
|
||||
result.resume_block = Some(bb);
|
||||
break;
|
||||
@ -61,7 +61,7 @@ impl<'tcx> MirPatch<'tcx> {
|
||||
}
|
||||
|
||||
pub fn terminator_loc(&self, body: &Body<'tcx>, bb: BasicBlock) -> Location {
|
||||
let offset = match bb.index().checked_sub(body.basic_blocks().len()) {
|
||||
let offset = match bb.index().checked_sub(body.basic_blocks.len()) {
|
||||
Some(index) => self.new_blocks[index].statements.len(),
|
||||
None => body[bb].statements.len(),
|
||||
};
|
||||
@ -129,7 +129,7 @@ impl<'tcx> MirPatch<'tcx> {
|
||||
debug!(
|
||||
"MirPatch: {} new blocks, starting from index {}",
|
||||
self.new_blocks.len(),
|
||||
body.basic_blocks().len()
|
||||
body.basic_blocks.len()
|
||||
);
|
||||
let bbs = if self.patch_map.is_empty() && self.new_blocks.is_empty() {
|
||||
body.basic_blocks.as_mut_preserves_cfg()
|
||||
@ -173,7 +173,7 @@ impl<'tcx> MirPatch<'tcx> {
|
||||
}
|
||||
|
||||
pub fn source_info_for_location(&self, body: &Body<'tcx>, loc: Location) -> SourceInfo {
|
||||
let data = match loc.block.index().checked_sub(body.basic_blocks().len()) {
|
||||
let data = match loc.block.index().checked_sub(body.basic_blocks.len()) {
|
||||
Some(new) => &self.new_blocks[new],
|
||||
None => &body[loc.block],
|
||||
};
|
||||
|
@ -318,10 +318,10 @@ where
|
||||
F: FnMut(PassWhere, &mut dyn Write) -> io::Result<()>,
|
||||
{
|
||||
write_mir_intro(tcx, body, w)?;
|
||||
for block in body.basic_blocks().indices() {
|
||||
for block in body.basic_blocks.indices() {
|
||||
extra_data(PassWhere::BeforeBlock(block), w)?;
|
||||
write_basic_block(tcx, block, body, extra_data, w)?;
|
||||
if block.index() + 1 != body.basic_blocks().len() {
|
||||
if block.index() + 1 != body.basic_blocks.len() {
|
||||
writeln!(w)?;
|
||||
}
|
||||
}
|
||||
|
@ -105,7 +105,7 @@ where
|
||||
}
|
||||
let body_span = hir_body.unwrap().value.span;
|
||||
let mut span_viewables = Vec::new();
|
||||
for (bb, data) in body.basic_blocks().iter_enumerated() {
|
||||
for (bb, data) in body.basic_blocks.iter_enumerated() {
|
||||
match spanview {
|
||||
MirSpanview::Statement => {
|
||||
for (i, statement) in data.statements.iter().enumerate() {
|
||||
|
@ -37,7 +37,7 @@ impl<'a, 'tcx> Preorder<'a, 'tcx> {
|
||||
|
||||
Preorder {
|
||||
body,
|
||||
visited: BitSet::new_empty(body.basic_blocks().len()),
|
||||
visited: BitSet::new_empty(body.basic_blocks.len()),
|
||||
worklist,
|
||||
root_is_start_block: root == START_BLOCK,
|
||||
}
|
||||
@ -71,7 +71,7 @@ impl<'a, 'tcx> Iterator for Preorder<'a, 'tcx> {
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
// All the blocks, minus the number of blocks we've visited.
|
||||
let upper = self.body.basic_blocks().len() - self.visited.count();
|
||||
let upper = self.body.basic_blocks.len() - self.visited.count();
|
||||
|
||||
let lower = if self.root_is_start_block {
|
||||
// We will visit all remaining blocks exactly once.
|
||||
|
@ -951,7 +951,7 @@ macro_rules! basic_blocks {
|
||||
$body.basic_blocks.as_mut_preserves_cfg()
|
||||
};
|
||||
($body:ident,) => {
|
||||
$body.basic_blocks()
|
||||
$body.basic_blocks
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -272,7 +272,7 @@ fn mir_build(tcx: TyCtxt<'_>, def: ty::WithOptConstParam<LocalDefId>) -> Body<'_
|
||||
// by borrow checking.
|
||||
debug_assert!(
|
||||
!(body.local_decls.has_free_regions()
|
||||
|| body.basic_blocks().has_free_regions()
|
||||
|| body.basic_blocks.has_free_regions()
|
||||
|| body.var_debug_info.has_free_regions()
|
||||
|| body.yield_ty().has_free_regions()),
|
||||
"Unexpected free regions in MIR: {:?}",
|
||||
|
@ -111,9 +111,9 @@ where
|
||||
// Otherwise, compute and store the cumulative transfer function for each block.
|
||||
|
||||
let identity = GenKillSet::identity(analysis.bottom_value(body).domain_size());
|
||||
let mut trans_for_block = IndexVec::from_elem(identity, body.basic_blocks());
|
||||
let mut trans_for_block = IndexVec::from_elem(identity, &body.basic_blocks);
|
||||
|
||||
for (block, block_data) in body.basic_blocks().iter_enumerated() {
|
||||
for (block, block_data) in body.basic_blocks.iter_enumerated() {
|
||||
let trans = &mut trans_for_block[block];
|
||||
A::Direction::gen_kill_effects_in_block(&analysis, trans, block, block_data);
|
||||
}
|
||||
@ -147,7 +147,7 @@ where
|
||||
apply_trans_for_block: Option<Box<dyn Fn(BasicBlock, &mut A::Domain)>>,
|
||||
) -> Self {
|
||||
let bottom_value = analysis.bottom_value(body);
|
||||
let mut entry_sets = IndexVec::from_elem(bottom_value.clone(), body.basic_blocks());
|
||||
let mut entry_sets = IndexVec::from_elem(bottom_value.clone(), &body.basic_blocks);
|
||||
analysis.initialize_start_block(body, &mut entry_sets[mir::START_BLOCK]);
|
||||
|
||||
if A::Direction::IS_BACKWARD && entry_sets[mir::START_BLOCK] != bottom_value {
|
||||
@ -200,8 +200,7 @@ where
|
||||
..
|
||||
} = self;
|
||||
|
||||
let mut dirty_queue: WorkQueue<BasicBlock> =
|
||||
WorkQueue::with_none(body.basic_blocks().len());
|
||||
let mut dirty_queue: WorkQueue<BasicBlock> = WorkQueue::with_none(body.basic_blocks.len());
|
||||
|
||||
if A::Direction::IS_FORWARD {
|
||||
for (bb, _) in traversal::reverse_postorder(body) {
|
||||
|
@ -108,12 +108,12 @@ where
|
||||
type Edge = CfgEdge;
|
||||
|
||||
fn nodes(&self) -> dot::Nodes<'_, Self::Node> {
|
||||
self.body.basic_blocks().indices().collect::<Vec<_>>().into()
|
||||
self.body.basic_blocks.indices().collect::<Vec<_>>().into()
|
||||
}
|
||||
|
||||
fn edges(&self) -> dot::Edges<'_, Self::Edge> {
|
||||
self.body
|
||||
.basic_blocks()
|
||||
.basic_blocks
|
||||
.indices()
|
||||
.flat_map(|bb| dataflow_successors(self.body, bb))
|
||||
.collect::<Vec<_>>()
|
||||
|
@ -100,9 +100,9 @@ impl<D: Direction> MockAnalysis<'_, D> {
|
||||
|
||||
fn mock_entry_sets(&self) -> IndexVec<BasicBlock, BitSet<usize>> {
|
||||
let empty = self.bottom_value(self.body);
|
||||
let mut ret = IndexVec::from_elem(empty, &self.body.basic_blocks());
|
||||
let mut ret = IndexVec::from_elem(empty, &self.body.basic_blocks);
|
||||
|
||||
for (bb, _) in self.body.basic_blocks().iter_enumerated() {
|
||||
for (bb, _) in self.body.basic_blocks.iter_enumerated() {
|
||||
ret[bb] = self.mock_entry_set(bb);
|
||||
}
|
||||
|
||||
@ -169,7 +169,7 @@ impl<'tcx, D: Direction> AnalysisDomain<'tcx> for MockAnalysis<'tcx, D> {
|
||||
const NAME: &'static str = "mock";
|
||||
|
||||
fn bottom_value(&self, body: &mir::Body<'tcx>) -> Self::Domain {
|
||||
BitSet::new_empty(Self::BASIC_BLOCK_OFFSET + body.basic_blocks().len())
|
||||
BitSet::new_empty(Self::BASIC_BLOCK_OFFSET + body.basic_blocks.len())
|
||||
}
|
||||
|
||||
fn initialize_start_block(&self, _: &mir::Body<'tcx>, _: &mut Self::Domain) {
|
||||
@ -271,9 +271,7 @@ fn test_cursor<D: Direction>(analysis: MockAnalysis<'_, D>) {
|
||||
cursor.allow_unreachable();
|
||||
|
||||
let every_target = || {
|
||||
body.basic_blocks()
|
||||
.iter_enumerated()
|
||||
.flat_map(|(bb, _)| SeekTarget::iter_in_block(body, bb))
|
||||
body.basic_blocks.iter_enumerated().flat_map(|(bb, _)| SeekTarget::iter_in_block(body, bb))
|
||||
};
|
||||
|
||||
let mut seek_to_target = |targ| {
|
||||
|
@ -243,7 +243,7 @@ pub(super) fn gather_moves<'tcx>(
|
||||
|
||||
builder.gather_args();
|
||||
|
||||
for (bb, block) in body.basic_blocks().iter_enumerated() {
|
||||
for (bb, block) in body.basic_blocks.iter_enumerated() {
|
||||
for (i, stmt) in block.statements.iter().enumerate() {
|
||||
let source = Location { block: bb, statement_index: i };
|
||||
builder.gather_statement(source, stmt);
|
||||
|
@ -217,7 +217,7 @@ where
|
||||
fn new(body: &Body<'_>) -> Self {
|
||||
LocationMap {
|
||||
map: body
|
||||
.basic_blocks()
|
||||
.basic_blocks
|
||||
.iter()
|
||||
.map(|block| vec![T::default(); block.statements.len() + 1])
|
||||
.collect(),
|
||||
|
@ -101,7 +101,7 @@ pub fn sanity_check_via_rustc_peek<'tcx, A>(
|
||||
|
||||
let mut cursor = ResultsCursor::new(body, results);
|
||||
|
||||
let peek_calls = body.basic_blocks().iter_enumerated().filter_map(|(bb, block_data)| {
|
||||
let peek_calls = body.basic_blocks.iter_enumerated().filter_map(|(bb, block_data)| {
|
||||
PeekCall::from_terminator(tcx, block_data.terminator()).map(|call| (bb, block_data, call))
|
||||
});
|
||||
|
||||
|
@ -7,7 +7,7 @@ use rustc_middle::mir::{self, Local};
|
||||
pub fn always_storage_live_locals(body: &mir::Body<'_>) -> BitSet<Local> {
|
||||
let mut always_live_locals = BitSet::new_filled(body.local_decls.len());
|
||||
|
||||
for block in body.basic_blocks() {
|
||||
for block in &*body.basic_blocks {
|
||||
for statement in &block.statements {
|
||||
use mir::StatementKind::{StorageDead, StorageLive};
|
||||
if let StorageLive(l) | StorageDead(l) = statement.kind {
|
||||
|
@ -56,7 +56,7 @@ impl<'tcx> MirPass<'tcx> for AbortUnwindingCalls {
|
||||
// example.
|
||||
let mut calls_to_terminate = Vec::new();
|
||||
let mut cleanups_to_remove = Vec::new();
|
||||
for (id, block) in body.basic_blocks().iter_enumerated() {
|
||||
for (id, block) in body.basic_blocks.iter_enumerated() {
|
||||
if block.is_cleanup {
|
||||
continue;
|
||||
}
|
||||
|
@ -45,7 +45,7 @@ impl AddCallGuards {
|
||||
// We need a place to store the new blocks generated
|
||||
let mut new_blocks = Vec::new();
|
||||
|
||||
let cur_len = body.basic_blocks().len();
|
||||
let cur_len = body.basic_blocks.len();
|
||||
|
||||
for block in body.basic_blocks_mut() {
|
||||
match block.terminator {
|
||||
|
@ -55,7 +55,7 @@ fn add_moves_for_packed_drops_patch<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>)
|
||||
let mut patch = MirPatch::new(body);
|
||||
let param_env = tcx.param_env(def_id);
|
||||
|
||||
for (bb, data) in body.basic_blocks().iter_enumerated() {
|
||||
for (bb, data) in body.basic_blocks.iter_enumerated() {
|
||||
let loc = Location { block: bb, statement_index: data.statements.len() };
|
||||
let terminator = data.terminator();
|
||||
|
||||
|
@ -61,14 +61,14 @@ impl<'tcx> Visitor<'tcx> for ConstGotoOptimizationFinder<'_, 'tcx> {
|
||||
let _: Option<_> = try {
|
||||
let target = terminator.kind.as_goto()?;
|
||||
// We only apply this optimization if the last statement is a const assignment
|
||||
let last_statement = self.body.basic_blocks()[location.block].statements.last()?;
|
||||
let last_statement = self.body.basic_blocks[location.block].statements.last()?;
|
||||
|
||||
if let (place, Rvalue::Use(Operand::Constant(_const))) =
|
||||
last_statement.kind.as_assign()?
|
||||
{
|
||||
// We found a constant being assigned to `place`.
|
||||
// Now check that the target of this Goto switches on this place.
|
||||
let target_bb = &self.body.basic_blocks()[target];
|
||||
let target_bb = &self.body.basic_blocks[target];
|
||||
|
||||
// The `StorageDead(..)` statement does not affect the functionality of mir.
|
||||
// We can move this part of the statement up to the predecessor.
|
||||
|
@ -131,7 +131,7 @@ impl<'tcx> MirPass<'tcx> for ConstProp {
|
||||
|
||||
let dummy_body = &Body::new(
|
||||
body.source,
|
||||
body.basic_blocks().clone(),
|
||||
(*body.basic_blocks).clone(),
|
||||
body.source_scopes.clone(),
|
||||
body.local_decls.clone(),
|
||||
Default::default(),
|
||||
|
@ -105,7 +105,7 @@ impl<'tcx> MirLint<'tcx> for ConstProp {
|
||||
|
||||
let dummy_body = &Body::new(
|
||||
body.source,
|
||||
body.basic_blocks().clone(),
|
||||
(*body.basic_blocks).clone(),
|
||||
body.source_scopes.clone(),
|
||||
body.local_decls.clone(),
|
||||
Default::default(),
|
||||
@ -522,7 +522,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
|
||||
|
||||
impl<'tcx> Visitor<'tcx> for ConstPropagator<'_, 'tcx> {
|
||||
fn visit_body(&mut self, body: &Body<'tcx>) {
|
||||
for (bb, data) in body.basic_blocks().iter_enumerated() {
|
||||
for (bb, data) in body.basic_blocks.iter_enumerated() {
|
||||
self.visit_basic_block_data(bb, data);
|
||||
}
|
||||
}
|
||||
|
@ -713,7 +713,7 @@ impl<
|
||||
|
||||
ShortCircuitPreorder {
|
||||
body,
|
||||
visited: BitSet::new_empty(body.basic_blocks().len()),
|
||||
visited: BitSet::new_empty(body.basic_blocks.len()),
|
||||
worklist,
|
||||
filtered_successors,
|
||||
}
|
||||
@ -747,7 +747,7 @@ impl<
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
let size = self.body.basic_blocks().len() - self.visited.count();
|
||||
let size = self.body.basic_blocks.len() - self.visited.count();
|
||||
(size, Some(size))
|
||||
}
|
||||
}
|
||||
|
@ -80,7 +80,7 @@ impl<'tcx> MirPass<'tcx> for InstrumentCoverage {
|
||||
return;
|
||||
}
|
||||
|
||||
match mir_body.basic_blocks()[mir::START_BLOCK].terminator().kind {
|
||||
match mir_body.basic_blocks[mir::START_BLOCK].terminator().kind {
|
||||
TerminatorKind::Unreachable => {
|
||||
trace!("InstrumentCoverage skipped for unreachable `START_BLOCK`");
|
||||
return;
|
||||
|
@ -84,7 +84,7 @@ impl CoverageVisitor {
|
||||
}
|
||||
|
||||
fn visit_body(&mut self, body: &Body<'_>) {
|
||||
for bb_data in body.basic_blocks().iter() {
|
||||
for bb_data in body.basic_blocks.iter() {
|
||||
for statement in bb_data.statements.iter() {
|
||||
if let StatementKind::Coverage(box ref coverage) = statement.kind {
|
||||
if is_inlined(body, statement) {
|
||||
@ -138,7 +138,7 @@ fn coverageinfo<'tcx>(tcx: TyCtxt<'tcx>, instance_def: ty::InstanceDef<'tcx>) ->
|
||||
|
||||
fn covered_code_regions<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> Vec<&'tcx CodeRegion> {
|
||||
let body = mir_body(tcx, def_id);
|
||||
body.basic_blocks()
|
||||
body.basic_blocks
|
||||
.iter()
|
||||
.flat_map(|data| {
|
||||
data.statements.iter().filter_map(|statement| match statement.kind {
|
||||
|
@ -176,7 +176,7 @@ fn debug_basic_blocks<'tcx>(mir_body: &Body<'tcx>) -> String {
|
||||
format!(
|
||||
"{:?}",
|
||||
mir_body
|
||||
.basic_blocks()
|
||||
.basic_blocks
|
||||
.iter_enumerated()
|
||||
.map(|(bb, data)| {
|
||||
let term = &data.terminator();
|
||||
@ -213,7 +213,7 @@ fn print_mir_graphviz(name: &str, mir_body: &Body<'_>) {
|
||||
"digraph {} {{\n{}\n}}",
|
||||
name,
|
||||
mir_body
|
||||
.basic_blocks()
|
||||
.basic_blocks
|
||||
.iter_enumerated()
|
||||
.map(|(bb, data)| {
|
||||
format!(
|
||||
@ -653,7 +653,7 @@ fn test_traverse_coverage_with_loops() {
|
||||
|
||||
fn synthesize_body_span_from_terminators(mir_body: &Body<'_>) -> Span {
|
||||
let mut some_span: Option<Span> = None;
|
||||
for (_, data) in mir_body.basic_blocks().iter_enumerated() {
|
||||
for (_, data) in mir_body.basic_blocks.iter_enumerated() {
|
||||
let term_span = data.terminator().source_info.span;
|
||||
if let Some(span) = some_span.as_mut() {
|
||||
*span = span.to(term_span);
|
||||
|
@ -58,7 +58,7 @@ fn find_duplicates(body: &Body<'_>) -> FxHashMap<BasicBlock, BasicBlock> {
|
||||
let mut duplicates = FxHashMap::default();
|
||||
|
||||
let bbs_to_go_through =
|
||||
body.basic_blocks().iter_enumerated().filter(|(_, bbd)| !bbd.is_cleanup).count();
|
||||
body.basic_blocks.iter_enumerated().filter(|(_, bbd)| !bbd.is_cleanup).count();
|
||||
|
||||
let mut same_hashes =
|
||||
FxHashMap::with_capacity_and_hasher(bbs_to_go_through, Default::default());
|
||||
@ -71,8 +71,7 @@ fn find_duplicates(body: &Body<'_>) -> FxHashMap<BasicBlock, BasicBlock> {
|
||||
// When we see bb1, we see that it is a duplicate of bb3, and therefore insert it in the duplicates list
|
||||
// with replacement bb3.
|
||||
// When the duplicates are removed, we will end up with only bb3.
|
||||
for (bb, bbd) in body.basic_blocks().iter_enumerated().rev().filter(|(_, bbd)| !bbd.is_cleanup)
|
||||
{
|
||||
for (bb, bbd) in body.basic_blocks.iter_enumerated().rev().filter(|(_, bbd)| !bbd.is_cleanup) {
|
||||
// Basic blocks can get really big, so to avoid checking for duplicates in basic blocks
|
||||
// that are unlikely to have duplicates, we stop early. The early bail number has been
|
||||
// found experimentally by eprintln while compiling the crates in the rustc-perf suite.
|
||||
|
@ -150,7 +150,7 @@ impl<'tcx> MirPass<'tcx> for DestinationPropagation {
|
||||
def_id,
|
||||
body.local_decls.len(),
|
||||
relevant,
|
||||
body.basic_blocks().len()
|
||||
body.basic_blocks.len()
|
||||
);
|
||||
if relevant > MAX_LOCALS {
|
||||
warn!(
|
||||
@ -159,11 +159,11 @@ impl<'tcx> MirPass<'tcx> for DestinationPropagation {
|
||||
);
|
||||
return;
|
||||
}
|
||||
if body.basic_blocks().len() > MAX_BLOCKS {
|
||||
if body.basic_blocks.len() > MAX_BLOCKS {
|
||||
warn!(
|
||||
"too many blocks in {:?} ({}, max is {}), not optimizing",
|
||||
def_id,
|
||||
body.basic_blocks().len(),
|
||||
body.basic_blocks.len(),
|
||||
MAX_BLOCKS
|
||||
);
|
||||
return;
|
||||
|
@ -104,8 +104,8 @@ impl<'tcx> MirPass<'tcx> for EarlyOtherwiseBranch {
|
||||
let mut should_cleanup = false;
|
||||
|
||||
// Also consider newly generated bbs in the same pass
|
||||
for i in 0..body.basic_blocks().len() {
|
||||
let bbs = body.basic_blocks();
|
||||
for i in 0..body.basic_blocks.len() {
|
||||
let bbs = &*body.basic_blocks;
|
||||
let parent = BasicBlock::from_usize(i);
|
||||
let Some(opt_data) = evaluate_candidate(tcx, body, parent) else {
|
||||
continue
|
||||
@ -316,7 +316,7 @@ fn evaluate_candidate<'tcx>(
|
||||
body: &Body<'tcx>,
|
||||
parent: BasicBlock,
|
||||
) -> Option<OptimizationData<'tcx>> {
|
||||
let bbs = body.basic_blocks();
|
||||
let bbs = &body.basic_blocks;
|
||||
let TerminatorKind::SwitchInt {
|
||||
targets,
|
||||
switch_ty: parent_ty,
|
||||
|
@ -89,13 +89,13 @@ fn find_dead_unwinds<'tcx>(
|
||||
debug!("find_dead_unwinds({:?})", body.span);
|
||||
// We only need to do this pass once, because unwind edges can only
|
||||
// reach cleanup blocks, which can't have unwind edges themselves.
|
||||
let mut dead_unwinds = BitSet::new_empty(body.basic_blocks().len());
|
||||
let mut dead_unwinds = BitSet::new_empty(body.basic_blocks.len());
|
||||
let mut flow_inits = MaybeInitializedPlaces::new(tcx, body, &env)
|
||||
.into_engine(tcx, body)
|
||||
.pass_name("find_dead_unwinds")
|
||||
.iterate_to_fixpoint()
|
||||
.into_results_cursor(body);
|
||||
for (bb, bb_data) in body.basic_blocks().iter_enumerated() {
|
||||
for (bb, bb_data) in body.basic_blocks.iter_enumerated() {
|
||||
let place = match bb_data.terminator().kind {
|
||||
TerminatorKind::Drop { ref place, unwind: Some(_), .. }
|
||||
| TerminatorKind::DropAndReplace { ref place, unwind: Some(_), .. } => {
|
||||
@ -303,7 +303,7 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
|
||||
}
|
||||
|
||||
fn collect_drop_flags(&mut self) {
|
||||
for (bb, data) in self.body.basic_blocks().iter_enumerated() {
|
||||
for (bb, data) in self.body.basic_blocks.iter_enumerated() {
|
||||
let terminator = data.terminator();
|
||||
let place = match terminator.kind {
|
||||
TerminatorKind::Drop { ref place, .. }
|
||||
@ -358,7 +358,7 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
|
||||
}
|
||||
|
||||
fn elaborate_drops(&mut self) {
|
||||
for (bb, data) in self.body.basic_blocks().iter_enumerated() {
|
||||
for (bb, data) in self.body.basic_blocks.iter_enumerated() {
|
||||
let loc = Location { block: bb, statement_index: data.statements.len() };
|
||||
let terminator = data.terminator();
|
||||
|
||||
@ -515,7 +515,7 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
|
||||
}
|
||||
|
||||
fn drop_flags_for_fn_rets(&mut self) {
|
||||
for (bb, data) in self.body.basic_blocks().iter_enumerated() {
|
||||
for (bb, data) in self.body.basic_blocks.iter_enumerated() {
|
||||
if let TerminatorKind::Call {
|
||||
destination, target: Some(tgt), cleanup: Some(_), ..
|
||||
} = data.terminator().kind
|
||||
@ -550,7 +550,7 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
|
||||
// drop flags by themselves, to avoid the drop flags being
|
||||
// clobbered before they are read.
|
||||
|
||||
for (bb, data) in self.body.basic_blocks().iter_enumerated() {
|
||||
for (bb, data) in self.body.basic_blocks.iter_enumerated() {
|
||||
debug!("drop_flags_for_locs({:?})", data);
|
||||
for i in 0..(data.statements.len() + 1) {
|
||||
debug!("drop_flag_for_locs: stmt {}", i);
|
||||
|
@ -65,7 +65,7 @@ fn has_ffi_unwind_calls(tcx: TyCtxt<'_>, local_def_id: LocalDefId) -> bool {
|
||||
|
||||
let mut tainted = false;
|
||||
|
||||
for block in body.basic_blocks() {
|
||||
for block in body.basic_blocks.iter() {
|
||||
if block.is_cleanup {
|
||||
continue;
|
||||
}
|
||||
|
@ -490,12 +490,12 @@ fn locals_live_across_suspend_points<'tcx>(
|
||||
.iterate_to_fixpoint()
|
||||
.into_results_cursor(body_ref);
|
||||
|
||||
let mut storage_liveness_map = IndexVec::from_elem(None, body.basic_blocks());
|
||||
let mut storage_liveness_map = IndexVec::from_elem(None, &body.basic_blocks);
|
||||
let mut live_locals_at_suspension_points = Vec::new();
|
||||
let mut source_info_at_suspension_points = Vec::new();
|
||||
let mut live_locals_at_any_suspension_point = BitSet::new_empty(body.local_decls.len());
|
||||
|
||||
for (block, data) in body.basic_blocks().iter_enumerated() {
|
||||
for (block, data) in body.basic_blocks.iter_enumerated() {
|
||||
if let TerminatorKind::Yield { .. } = data.terminator().kind {
|
||||
let loc = Location { block, statement_index: data.statements.len() };
|
||||
|
||||
@ -704,7 +704,7 @@ impl<'mir, 'tcx> rustc_mir_dataflow::ResultsVisitor<'mir, 'tcx>
|
||||
impl StorageConflictVisitor<'_, '_, '_> {
|
||||
fn apply_state(&mut self, flow_state: &BitSet<Local>, loc: Location) {
|
||||
// Ignore unreachable blocks.
|
||||
if self.body.basic_blocks()[loc.block].terminator().kind == TerminatorKind::Unreachable {
|
||||
if self.body.basic_blocks[loc.block].terminator().kind == TerminatorKind::Unreachable {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -886,7 +886,7 @@ fn elaborate_generator_drops<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
|
||||
|
||||
let mut elaborator = DropShimElaborator { body, patch: MirPatch::new(body), tcx, param_env };
|
||||
|
||||
for (block, block_data) in body.basic_blocks().iter_enumerated() {
|
||||
for (block, block_data) in body.basic_blocks.iter_enumerated() {
|
||||
let (target, unwind, source_info) = match block_data.terminator() {
|
||||
Terminator { source_info, kind: TerminatorKind::Drop { place, target, unwind } } => {
|
||||
if let Some(local) = place.as_local() {
|
||||
@ -991,7 +991,7 @@ fn insert_panic_block<'tcx>(
|
||||
body: &mut Body<'tcx>,
|
||||
message: AssertMessage<'tcx>,
|
||||
) -> BasicBlock {
|
||||
let assert_block = BasicBlock::new(body.basic_blocks().len());
|
||||
let assert_block = BasicBlock::new(body.basic_blocks.len());
|
||||
let term = TerminatorKind::Assert {
|
||||
cond: Operand::Constant(Box::new(Constant {
|
||||
span: body.span,
|
||||
@ -1021,7 +1021,7 @@ fn can_return<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, param_env: ty::ParamEn
|
||||
}
|
||||
|
||||
// If there's a return terminator the function may return.
|
||||
for block in body.basic_blocks() {
|
||||
for block in body.basic_blocks.iter() {
|
||||
if let TerminatorKind::Return = block.terminator().kind {
|
||||
return true;
|
||||
}
|
||||
@ -1038,7 +1038,7 @@ fn can_unwind<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) -> bool {
|
||||
}
|
||||
|
||||
// Unwinds can only start at certain terminators.
|
||||
for block in body.basic_blocks() {
|
||||
for block in body.basic_blocks.iter() {
|
||||
match block.terminator().kind {
|
||||
// These never unwind.
|
||||
TerminatorKind::Goto { .. }
|
||||
|
@ -95,7 +95,7 @@ fn inline<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) -> bool {
|
||||
history: Vec::new(),
|
||||
changed: false,
|
||||
};
|
||||
let blocks = BasicBlock::new(0)..body.basic_blocks().next_index();
|
||||
let blocks = BasicBlock::new(0)..body.basic_blocks.next_index();
|
||||
this.process_blocks(body, blocks);
|
||||
this.changed
|
||||
}
|
||||
@ -217,9 +217,9 @@ impl<'tcx> Inliner<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
let old_blocks = caller_body.basic_blocks().next_index();
|
||||
let old_blocks = caller_body.basic_blocks.next_index();
|
||||
self.inline_call(caller_body, &callsite, callee_body);
|
||||
let new_blocks = old_blocks..caller_body.basic_blocks().next_index();
|
||||
let new_blocks = old_blocks..caller_body.basic_blocks.next_index();
|
||||
|
||||
Ok(new_blocks)
|
||||
}
|
||||
@ -409,14 +409,14 @@ impl<'tcx> Inliner<'tcx> {
|
||||
// Give a bonus functions with a small number of blocks,
|
||||
// We normally have two or three blocks for even
|
||||
// very small functions.
|
||||
if callee_body.basic_blocks().len() <= 3 {
|
||||
if callee_body.basic_blocks.len() <= 3 {
|
||||
threshold += threshold / 4;
|
||||
}
|
||||
debug!(" final inline threshold = {}", threshold);
|
||||
|
||||
// FIXME: Give a bonus to functions with only a single caller
|
||||
let diverges = matches!(
|
||||
callee_body.basic_blocks()[START_BLOCK].terminator().kind,
|
||||
callee_body.basic_blocks[START_BLOCK].terminator().kind,
|
||||
TerminatorKind::Unreachable | TerminatorKind::Call { target: None, .. }
|
||||
);
|
||||
if diverges && !matches!(callee_attrs.inline, InlineAttr::Always) {
|
||||
@ -434,13 +434,13 @@ impl<'tcx> Inliner<'tcx> {
|
||||
|
||||
// Traverse the MIR manually so we can account for the effects of inlining on the CFG.
|
||||
let mut work_list = vec![START_BLOCK];
|
||||
let mut visited = BitSet::new_empty(callee_body.basic_blocks().len());
|
||||
let mut visited = BitSet::new_empty(callee_body.basic_blocks.len());
|
||||
while let Some(bb) = work_list.pop() {
|
||||
if !visited.insert(bb.index()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let blk = &callee_body.basic_blocks()[bb];
|
||||
let blk = &callee_body.basic_blocks[bb];
|
||||
checker.visit_basic_block_data(bb, blk);
|
||||
|
||||
let term = blk.terminator();
|
||||
@ -541,7 +541,7 @@ impl<'tcx> Inliner<'tcx> {
|
||||
args: &args,
|
||||
new_locals: Local::new(caller_body.local_decls.len())..,
|
||||
new_scopes: SourceScope::new(caller_body.source_scopes.len())..,
|
||||
new_blocks: BasicBlock::new(caller_body.basic_blocks().len())..,
|
||||
new_blocks: BasicBlock::new(caller_body.basic_blocks.len())..,
|
||||
destination: dest,
|
||||
callsite_scope: caller_body.source_scopes[callsite.source_info.scope].clone(),
|
||||
callsite,
|
||||
|
@ -153,7 +153,7 @@ pub(crate) fn mir_inliner_callees<'tcx>(
|
||||
_ => tcx.instance_mir(instance),
|
||||
};
|
||||
let mut calls = FxIndexSet::default();
|
||||
for bb_data in body.basic_blocks() {
|
||||
for bb_data in body.basic_blocks.iter() {
|
||||
let terminator = bb_data.terminator();
|
||||
if let TerminatorKind::Call { func, .. } = &terminator.kind {
|
||||
let ty = func.ty(&body.local_decls, tcx);
|
||||
|
@ -15,7 +15,7 @@ impl<'tcx> MirPass<'tcx> for MultipleReturnTerminators {
|
||||
|
||||
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
|
||||
// find basic blocks with no statement and a return terminator
|
||||
let mut bbs_simple_returns = BitSet::new_empty(body.basic_blocks().len());
|
||||
let mut bbs_simple_returns = BitSet::new_empty(body.basic_blocks.len());
|
||||
let def_id = body.source.def_id();
|
||||
let bbs = body.basic_blocks_mut();
|
||||
for idx in bbs.indices() {
|
||||
|
@ -21,10 +21,10 @@ impl<'tcx> MirPass<'tcx> for NormalizeArrayLen {
|
||||
|
||||
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
|
||||
// early returns for edge cases of highly unrolled functions
|
||||
if body.basic_blocks().len() > MAX_NUM_BLOCKS {
|
||||
if body.basic_blocks.len() > MAX_NUM_BLOCKS {
|
||||
return;
|
||||
}
|
||||
if body.local_decls().len() > MAX_NUM_LOCALS {
|
||||
if body.local_decls.len() > MAX_NUM_LOCALS {
|
||||
return;
|
||||
}
|
||||
normalize_array_len_calls(tcx, body)
|
||||
|
@ -89,7 +89,7 @@ fn local_eligible_for_nrvo(body: &mut mir::Body<'_>) -> Option<Local> {
|
||||
}
|
||||
|
||||
let mut copied_to_return_place = None;
|
||||
for block in body.basic_blocks().indices() {
|
||||
for block in body.basic_blocks.indices() {
|
||||
// Look for blocks with a `Return` terminator.
|
||||
if !matches!(body[block].terminator().kind, mir::TerminatorKind::Return) {
|
||||
continue;
|
||||
@ -122,7 +122,7 @@ fn find_local_assigned_to_return_place(
|
||||
body: &mut mir::Body<'_>,
|
||||
) -> Option<Local> {
|
||||
let mut block = start;
|
||||
let mut seen = HybridBitSet::new_empty(body.basic_blocks().len());
|
||||
let mut seen = HybridBitSet::new_empty(body.basic_blocks.len());
|
||||
|
||||
// Iterate as long as `block` has exactly one predecessor that we have not yet visited.
|
||||
while seen.insert(block) {
|
||||
|
@ -94,7 +94,7 @@ impl RemoveNoopLandingPads {
|
||||
|
||||
let mut jumps_folded = 0;
|
||||
let mut landing_pads_removed = 0;
|
||||
let mut nop_landing_pads = BitSet::new_empty(body.basic_blocks().len());
|
||||
let mut nop_landing_pads = BitSet::new_empty(body.basic_blocks.len());
|
||||
|
||||
// This is a post-order traversal, so that if A post-dominates B
|
||||
// then A will be visited before B.
|
||||
|
@ -35,7 +35,7 @@ impl<'tcx> MirPass<'tcx> for RemoveUninitDrops {
|
||||
.into_results_cursor(body);
|
||||
|
||||
let mut to_remove = vec![];
|
||||
for (bb, block) in body.basic_blocks().iter_enumerated() {
|
||||
for (bb, block) in body.basic_blocks.iter_enumerated() {
|
||||
let terminator = block.terminator();
|
||||
let (TerminatorKind::Drop { place, .. } | TerminatorKind::DropAndReplace { place, .. })
|
||||
= &terminator.kind
|
||||
|
@ -62,7 +62,7 @@ impl<'tcx> MirPass<'tcx> for SeparateConstSwitch {
|
||||
pub fn separate_const_switch(body: &mut Body<'_>) -> usize {
|
||||
let mut new_blocks: SmallVec<[(BasicBlock, BasicBlock); 6]> = SmallVec::new();
|
||||
let predecessors = body.basic_blocks.predecessors();
|
||||
'block_iter: for (block_id, block) in body.basic_blocks().iter_enumerated() {
|
||||
'block_iter: for (block_id, block) in body.basic_blocks.iter_enumerated() {
|
||||
if let TerminatorKind::SwitchInt {
|
||||
discr: Operand::Copy(switch_place) | Operand::Move(switch_place),
|
||||
..
|
||||
@ -90,7 +90,7 @@ pub fn separate_const_switch(body: &mut Body<'_>) -> usize {
|
||||
|
||||
let mut predecessors_left = predecessors[block_id].len();
|
||||
'predec_iter: for predecessor_id in predecessors[block_id].iter().copied() {
|
||||
let predecessor = &body.basic_blocks()[predecessor_id];
|
||||
let predecessor = &body.basic_blocks[predecessor_id];
|
||||
|
||||
// First we make sure the predecessor jumps
|
||||
// in a reasonable way
|
||||
|
@ -74,7 +74,7 @@ pub struct CfgSimplifier<'a, 'tcx> {
|
||||
|
||||
impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> {
|
||||
pub fn new(body: &'a mut Body<'tcx>) -> Self {
|
||||
let mut pred_count = IndexVec::from_elem(0u32, body.basic_blocks());
|
||||
let mut pred_count = IndexVec::from_elem(0u32, &body.basic_blocks);
|
||||
|
||||
// we can't use mir.predecessors() here because that counts
|
||||
// dead blocks, which we don't want to.
|
||||
@ -263,7 +263,7 @@ impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> {
|
||||
|
||||
pub fn remove_dead_blocks<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
|
||||
let reachable = traversal::reachable_as_bitset(body);
|
||||
let num_blocks = body.basic_blocks().len();
|
||||
let num_blocks = body.basic_blocks.len();
|
||||
if num_blocks == reachable.count() {
|
||||
return;
|
||||
}
|
||||
|
@ -151,7 +151,7 @@ struct OptimizationFinder<'a, 'tcx> {
|
||||
impl<'tcx> OptimizationFinder<'_, 'tcx> {
|
||||
fn find_optimizations(&self) -> Vec<OptimizationInfo<'tcx>> {
|
||||
self.body
|
||||
.basic_blocks()
|
||||
.basic_blocks
|
||||
.iter_enumerated()
|
||||
.filter_map(|(bb_idx, bb)| {
|
||||
// find switch
|
||||
|
@ -596,7 +596,7 @@ struct SimplifyBranchSameOptimizationFinder<'a, 'tcx> {
|
||||
impl<'tcx> SimplifyBranchSameOptimizationFinder<'_, 'tcx> {
|
||||
fn find(&self) -> Vec<SimplifyBranchSameOptimization> {
|
||||
self.body
|
||||
.basic_blocks()
|
||||
.basic_blocks
|
||||
.iter_enumerated()
|
||||
.filter_map(|(bb_idx, bb)| {
|
||||
let (discr_switched_on, targets_and_values) = match &bb.terminator().kind {
|
||||
@ -632,7 +632,7 @@ impl<'tcx> SimplifyBranchSameOptimizationFinder<'_, 'tcx> {
|
||||
|
||||
let mut iter_bbs_reachable = targets_and_values
|
||||
.iter()
|
||||
.map(|target_and_value| (target_and_value, &self.body.basic_blocks()[target_and_value.target]))
|
||||
.map(|target_and_value| (target_and_value, &self.body.basic_blocks[target_and_value.target]))
|
||||
.filter(|(_, bb)| {
|
||||
// Reaching `unreachable` is UB so assume it doesn't happen.
|
||||
bb.terminator().kind != TerminatorKind::Unreachable
|
||||
|
@ -79,7 +79,7 @@ fn ensure_otherwise_unreachable<'tcx>(
|
||||
targets: &SwitchTargets,
|
||||
) -> Option<BasicBlockData<'tcx>> {
|
||||
let otherwise = targets.otherwise();
|
||||
let bb = &body.basic_blocks()[otherwise];
|
||||
let bb = &body.basic_blocks[otherwise];
|
||||
if bb.terminator().kind == TerminatorKind::Unreachable
|
||||
&& bb.statements.iter().all(|s| matches!(&s.kind, StatementKind::StorageDead(_)))
|
||||
{
|
||||
@ -102,10 +102,10 @@ impl<'tcx> MirPass<'tcx> for UninhabitedEnumBranching {
|
||||
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
|
||||
trace!("UninhabitedEnumBranching starting for {:?}", body.source);
|
||||
|
||||
for bb in body.basic_blocks().indices() {
|
||||
for bb in body.basic_blocks.indices() {
|
||||
trace!("processing block {:?}", bb);
|
||||
|
||||
let Some(discriminant_ty) = get_switched_on_type(&body.basic_blocks()[bb], tcx, body) else {
|
||||
let Some(discriminant_ty) = get_switched_on_type(&body.basic_blocks[bb], tcx, body) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
|
@ -481,7 +481,7 @@ fn codegened_and_inlined_items<'tcx>(tcx: TyCtxt<'tcx>, (): ()) -> &'tcx DefIdSe
|
||||
continue;
|
||||
}
|
||||
let body = tcx.instance_mir(instance.def);
|
||||
for block in body.basic_blocks() {
|
||||
for block in body.basic_blocks.iter() {
|
||||
for statement in &block.statements {
|
||||
let mir::StatementKind::Coverage(_) = statement.kind else { continue };
|
||||
let scope = statement.source_info.scope;
|
||||
|
@ -348,7 +348,7 @@ fn instance_def_size_estimate<'tcx>(
|
||||
match instance_def {
|
||||
InstanceDef::Item(..) | InstanceDef::DropGlue(..) => {
|
||||
let mir = tcx.instance_mir(instance_def);
|
||||
mir.basic_blocks().iter().map(|bb| bb.statements.len() + 1).sum()
|
||||
mir.basic_blocks.iter().map(|bb| bb.statements.len() + 1).sum()
|
||||
}
|
||||
// Estimate the size of other compiler-generated shims to be 1.
|
||||
_ => 1,
|
||||
|
@ -105,7 +105,7 @@ impl<'tcx> LateLintPass<'tcx> for RedundantClone {
|
||||
vis.into_map(cx, maybe_storage_live_result)
|
||||
};
|
||||
|
||||
for (bb, bbdata) in mir.basic_blocks().iter_enumerated() {
|
||||
for (bb, bbdata) in mir.basic_blocks.iter_enumerated() {
|
||||
let terminator = bbdata.terminator();
|
||||
|
||||
if terminator.source_info.span.from_expansion() {
|
||||
@ -186,7 +186,7 @@ impl<'tcx> LateLintPass<'tcx> for RedundantClone {
|
||||
unwrap_or_continue!(find_stmt_assigns_to(cx, mir, pred_arg, true, ps[0]));
|
||||
let loc = mir::Location {
|
||||
block: bb,
|
||||
statement_index: mir.basic_blocks()[bb].statements.len(),
|
||||
statement_index: mir.basic_blocks[bb].statements.len(),
|
||||
};
|
||||
|
||||
// This can be turned into `res = move local` if `arg` and `cloned` are not borrowed
|
||||
@ -310,7 +310,7 @@ fn find_stmt_assigns_to<'tcx>(
|
||||
by_ref: bool,
|
||||
bb: mir::BasicBlock,
|
||||
) -> Option<(mir::Local, CannotMoveOut)> {
|
||||
let rvalue = mir.basic_blocks()[bb].statements.iter().rev().find_map(|stmt| {
|
||||
let rvalue = mir.basic_blocks[bb].statements.iter().rev().find_map(|stmt| {
|
||||
if let mir::StatementKind::Assign(box (mir::Place { local, .. }, v)) = &stmt.kind {
|
||||
return if *local == to_local { Some(v) } else { None };
|
||||
}
|
||||
|
@ -55,7 +55,7 @@ pub fn is_min_const_fn<'a, 'tcx>(tcx: TyCtxt<'tcx>, body: &'a Body<'tcx>, msrv:
|
||||
body.local_decls.iter().next().unwrap().source_info.span,
|
||||
)?;
|
||||
|
||||
for bb in body.basic_blocks() {
|
||||
for bb in body.basic_blocks.iter() {
|
||||
check_terminator(tcx, body, bb.terminator(), msrv)?;
|
||||
for stmt in &bb.statements {
|
||||
check_statement(tcx, body, def_id, stmt)?;
|
||||
|
Loading…
Reference in New Issue
Block a user