mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-22 06:44:35 +00:00
Rollup merge of #102778 - nbdd0121:mir, r=tmiasko
Fix MIR inlining of asm_unwind The MIR inlining currently doesn't handle inline asm's unwind edge correctly. This code will cause ICE: ```rust struct D; impl Drop for D { fn drop(&mut self) {} } #[inline(always)] fn foo() { let _d = D; unsafe { std::arch::asm!("", options(may_unwind)) }; } pub fn main() { foo(); } ``` This PR fixes this issue. I also take the opportunity to extract common code into a method.
This commit is contained in:
commit
2f2664923b
@ -977,6 +977,21 @@ impl Integrator<'_, '_> {
|
||||
trace!("mapping block `{:?}` to `{:?}`", block, new);
|
||||
new
|
||||
}
|
||||
|
||||
fn map_unwind(&self, unwind: Option<BasicBlock>) -> Option<BasicBlock> {
|
||||
if self.in_cleanup_block {
|
||||
if unwind.is_some() {
|
||||
bug!("cleanup on cleanup block");
|
||||
}
|
||||
return unwind;
|
||||
}
|
||||
|
||||
match unwind {
|
||||
Some(target) => Some(self.map_block(target)),
|
||||
// Add an unwind edge to the original call's cleanup block
|
||||
None => self.cleanup_block,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> MutVisitor<'tcx> for Integrator<'_, 'tcx> {
|
||||
@ -1085,35 +1100,17 @@ impl<'tcx> MutVisitor<'tcx> for Integrator<'_, 'tcx> {
|
||||
TerminatorKind::Drop { ref mut target, ref mut unwind, .. }
|
||||
| TerminatorKind::DropAndReplace { ref mut target, ref mut unwind, .. } => {
|
||||
*target = self.map_block(*target);
|
||||
if let Some(tgt) = *unwind {
|
||||
*unwind = Some(self.map_block(tgt));
|
||||
} else if !self.in_cleanup_block {
|
||||
// Unless this drop is in a cleanup block, add an unwind edge to
|
||||
// the original call's cleanup block
|
||||
*unwind = self.cleanup_block;
|
||||
}
|
||||
*unwind = self.map_unwind(*unwind);
|
||||
}
|
||||
TerminatorKind::Call { ref mut target, ref mut cleanup, .. } => {
|
||||
if let Some(ref mut tgt) = *target {
|
||||
*tgt = self.map_block(*tgt);
|
||||
}
|
||||
if let Some(tgt) = *cleanup {
|
||||
*cleanup = Some(self.map_block(tgt));
|
||||
} else if !self.in_cleanup_block {
|
||||
// Unless this call is in a cleanup block, add an unwind edge to
|
||||
// the original call's cleanup block
|
||||
*cleanup = self.cleanup_block;
|
||||
}
|
||||
*cleanup = self.map_unwind(*cleanup);
|
||||
}
|
||||
TerminatorKind::Assert { ref mut target, ref mut cleanup, .. } => {
|
||||
*target = self.map_block(*target);
|
||||
if let Some(tgt) = *cleanup {
|
||||
*cleanup = Some(self.map_block(tgt));
|
||||
} else if !self.in_cleanup_block {
|
||||
// Unless this assert is in a cleanup block, add an unwind edge to
|
||||
// the original call's cleanup block
|
||||
*cleanup = self.cleanup_block;
|
||||
}
|
||||
*cleanup = self.map_unwind(*cleanup);
|
||||
}
|
||||
TerminatorKind::Return => {
|
||||
terminator.kind = if let Some(tgt) = self.callsite.target {
|
||||
@ -1141,11 +1138,8 @@ impl<'tcx> MutVisitor<'tcx> for Integrator<'_, 'tcx> {
|
||||
TerminatorKind::InlineAsm { ref mut destination, ref mut cleanup, .. } => {
|
||||
if let Some(ref mut tgt) = *destination {
|
||||
*tgt = self.map_block(*tgt);
|
||||
} else if !self.in_cleanup_block {
|
||||
// Unless this inline asm is in a cleanup block, add an unwind edge to
|
||||
// the original call's cleanup block
|
||||
*cleanup = self.cleanup_block;
|
||||
}
|
||||
*cleanup = self.map_unwind(*cleanup);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
22
src/test/mir-opt/inline/asm-unwind.rs
Normal file
22
src/test/mir-opt/inline/asm-unwind.rs
Normal file
@ -0,0 +1,22 @@
|
||||
// Tests inlining of `may_unwind` inline assembly.
|
||||
//
|
||||
// ignore-wasm32-bare compiled with panic=abort by default
|
||||
// needs-asm-support
|
||||
#![feature(asm_unwind)]
|
||||
|
||||
struct D;
|
||||
|
||||
impl Drop for D {
|
||||
fn drop(&mut self) {}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn foo() {
|
||||
let _d = D;
|
||||
unsafe { std::arch::asm!("", options(may_unwind)) };
|
||||
}
|
||||
|
||||
// EMIT_MIR asm_unwind.main.Inline.diff
|
||||
pub fn main() {
|
||||
foo();
|
||||
}
|
45
src/test/mir-opt/inline/asm_unwind.main.Inline.diff
Normal file
45
src/test/mir-opt/inline/asm_unwind.main.Inline.diff
Normal file
@ -0,0 +1,45 @@
|
||||
- // MIR for `main` before Inline
|
||||
+ // MIR for `main` after Inline
|
||||
|
||||
fn main() -> () {
|
||||
let mut _0: (); // return place in scope 0 at $DIR/asm-unwind.rs:+0:15: +0:15
|
||||
let _1: (); // in scope 0 at $DIR/asm-unwind.rs:+1:5: +1:10
|
||||
+ scope 1 (inlined foo) { // at $DIR/asm-unwind.rs:21:5: 21:10
|
||||
+ let _2: D; // in scope 1 at $DIR/asm-unwind.rs:15:9: 15:11
|
||||
+ scope 2 {
|
||||
+ debug _d => _2; // in scope 2 at $DIR/asm-unwind.rs:15:9: 15:11
|
||||
+ scope 3 {
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
|
||||
bb0: {
|
||||
StorageLive(_1); // scope 0 at $DIR/asm-unwind.rs:+1:5: +1:10
|
||||
- _1 = foo() -> bb1; // scope 0 at $DIR/asm-unwind.rs:+1:5: +1:10
|
||||
- // mir::Constant
|
||||
- // + span: $DIR/asm-unwind.rs:21:5: 21:8
|
||||
- // + literal: Const { ty: fn() {foo}, val: Value(<ZST>) }
|
||||
+ StorageLive(_2); // scope 1 at $DIR/asm-unwind.rs:15:9: 15:11
|
||||
+ asm!("", options(MAY_UNWIND)) -> [return: bb1, unwind: bb3]; // scope 3 at $DIR/asm-unwind.rs:16:14: 16:54
|
||||
}
|
||||
|
||||
bb1: {
|
||||
+ drop(_2) -> bb2; // scope 1 at $DIR/asm-unwind.rs:17:1: 17:2
|
||||
+ }
|
||||
+
|
||||
+ bb2: {
|
||||
+ StorageDead(_2); // scope 1 at $DIR/asm-unwind.rs:17:1: 17:2
|
||||
StorageDead(_1); // scope 0 at $DIR/asm-unwind.rs:+1:10: +1:11
|
||||
_0 = const (); // scope 0 at $DIR/asm-unwind.rs:+0:15: +2:2
|
||||
return; // scope 0 at $DIR/asm-unwind.rs:+2:2: +2:2
|
||||
+ }
|
||||
+
|
||||
+ bb3 (cleanup): {
|
||||
+ drop(_2) -> bb4; // scope 1 at $DIR/asm-unwind.rs:17:1: 17:2
|
||||
+ }
|
||||
+
|
||||
+ bb4 (cleanup): {
|
||||
+ resume; // scope 1 at $DIR/asm-unwind.rs:14:1: 17:2
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user