Refactor call terminator to always hold a destination place

This commit is contained in:
Jakob Degen 2022-04-16 09:27:54 -04:00
parent 222c5724ec
commit 09b0936db2
67 changed files with 422 additions and 412 deletions

View File

@ -140,9 +140,7 @@ impl<'cg, 'cx, 'tcx> Visitor<'tcx> for ConstraintGeneration<'cg, 'cx, 'tcx> {
// A `Call` terminator's return value can be a local which has borrows,
// so we need to record those as `killed` as well.
if let TerminatorKind::Call { destination, .. } = terminator.kind {
if let Some((place, _)) = destination {
self.record_killed_borrows_for_place(place, location);
}
self.record_killed_borrows_for_place(destination, location);
}
self.super_terminator(terminator, location);

View File

@ -2198,10 +2198,10 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
"annotate_argument_and_return_for_borrow: target={:?} terminator={:?}",
target, terminator
);
if let TerminatorKind::Call { destination: Some((place, _)), args, .. } =
if let TerminatorKind::Call { destination, target: Some(_), args, .. } =
&terminator.kind
{
if let Some(assigned_to) = place.as_local() {
if let Some(assigned_to) = destination.as_local() {
debug!(
"annotate_argument_and_return_for_borrow: assigned_to={:?} args={:?}",
assigned_to, args

View File

@ -705,10 +705,10 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
let terminator = block.terminator();
debug!("was_captured_by_trait_object: terminator={:?}", terminator);
if let TerminatorKind::Call { destination: Some((place, block)), args, .. } =
if let TerminatorKind::Call { destination, target: Some(block), args, .. } =
&terminator.kind
{
if let Some(dest) = place.as_local() {
if let Some(dest) = destination.as_local() {
debug!(
"was_captured_by_trait_object: target={:?} dest={:?} args={:?}",
target, dest, args

View File

@ -124,6 +124,7 @@ impl<'cx, 'tcx> Visitor<'tcx> for InvalidationGenerator<'cx, 'tcx> {
ref func,
ref args,
destination,
target: _,
cleanup: _,
from_hir_call: _,
fn_span: _,
@ -132,9 +133,7 @@ impl<'cx, 'tcx> Visitor<'tcx> for InvalidationGenerator<'cx, 'tcx> {
for arg in args {
self.consume_operand(location, arg);
}
if let Some((dest, _ /*bb*/)) = destination {
self.mutate_place(location, *dest, Deep);
}
self.mutate_place(location, *destination, Deep);
}
TerminatorKind::Assert { ref cond, expected: _, ref msg, target: _, cleanup: _ } => {
self.consume_operand(location, cond);

View File

@ -661,7 +661,8 @@ impl<'cx, 'tcx> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx> for MirBorrowckCtx
TerminatorKind::Call {
ref func,
ref args,
ref destination,
destination,
target: _,
cleanup: _,
from_hir_call: _,
fn_span: _,
@ -670,9 +671,7 @@ impl<'cx, 'tcx> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx> for MirBorrowckCtx
for arg in args {
self.consume_operand(loc, (arg, span), flow_state);
}
if let Some((dest, _ /*bb*/)) = *destination {
self.mutate_place(loc, (dest, span), Deep, flow_state);
}
self.mutate_place(loc, (destination, span), Deep, flow_state);
}
TerminatorKind::Assert { ref cond, expected: _, ref msg, target: _, cleanup: _ } => {
self.consume_operand(loc, (cond, span), flow_state);

View File

@ -1403,7 +1403,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
}
// FIXME: check the values
}
TerminatorKind::Call { ref func, ref args, ref destination, from_hir_call, .. } => {
TerminatorKind::Call {
ref func, ref args, destination, target, from_hir_call, ..
} => {
self.check_operand(func, term_location);
for arg in args {
self.check_operand(arg, term_location);
@ -1424,7 +1426,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
sig,
);
let sig = self.normalize(sig, term_location);
self.check_call_dest(body, term, &sig, destination, term_location);
self.check_call_dest(body, term, &sig, destination, target, term_location);
self.prove_predicates(
sig.inputs_and_output
@ -1502,15 +1504,16 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
body: &Body<'tcx>,
term: &Terminator<'tcx>,
sig: &ty::FnSig<'tcx>,
destination: &Option<(Place<'tcx>, BasicBlock)>,
destination: Place<'tcx>,
target: Option<BasicBlock>,
term_location: Location,
) {
let tcx = self.tcx();
match *destination {
Some((ref dest, _target_block)) => {
let dest_ty = dest.ty(body, tcx).ty;
match target {
Some(_) => {
let dest_ty = destination.ty(body, tcx).ty;
let dest_ty = self.normalize(dest_ty, term_location);
let category = match dest.as_local() {
let category = match destination.as_local() {
Some(RETURN_PLACE) => {
if let BorrowCheckContext {
universal_regions:
@ -1659,8 +1662,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
self.assert_iscleanup(body, block_data, unwind, true);
}
}
TerminatorKind::Call { ref destination, cleanup, .. } => {
if let &Some((_, target)) = destination {
TerminatorKind::Call { ref target, cleanup, .. } => {
if let &Some(target) = target {
self.assert_iscleanup(body, block_data, target, is_cleanup);
}
if let Some(cleanup) = cleanup {

View File

@ -66,8 +66,8 @@ impl<'visit, 'cx, 'tcx> Visitor<'tcx> for GatherUsedMutsVisitor<'visit, 'cx, 'tc
fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
debug!("visit_terminator: terminator={:?}", terminator);
match &terminator.kind {
TerminatorKind::Call { destination: Some((into, _)), .. } => {
self.remove_never_initialized_mut_locals(*into);
TerminatorKind::Call { destination, .. } => {
self.remove_never_initialized_mut_locals(*destination);
}
TerminatorKind::DropAndReplace { place, .. } => {
self.remove_never_initialized_mut_locals(*place);

View File

@ -312,13 +312,14 @@ pub(crate) fn codegen_terminator_call<'tcx>(
source_info: mir::SourceInfo,
func: &Operand<'tcx>,
args: &[Operand<'tcx>],
mir_dest: Option<(Place<'tcx>, BasicBlock)>,
destination: Place<'tcx>,
target: Option<BasicBlock>,
) {
let fn_ty = fx.monomorphize(func.ty(fx.mir, fx.tcx));
let fn_sig =
fx.tcx.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), fn_ty.fn_sig(fx.tcx));
let destination = mir_dest.map(|(place, bb)| (codegen_place(fx, place), bb));
let ret_place = codegen_place(fx, destination);
// Handle special calls like instrinsics and empty drop glue.
let instance = if let ty::FnDef(def_id, substs) = *fn_ty.kind() {
@ -333,7 +334,8 @@ pub(crate) fn codegen_terminator_call<'tcx>(
&fx.tcx.symbol_name(instance).name,
substs,
args,
destination,
ret_place,
target,
);
return;
}
@ -344,14 +346,15 @@ pub(crate) fn codegen_terminator_call<'tcx>(
fx,
instance,
args,
destination,
ret_place,
target,
source_info,
);
return;
}
InstanceDef::DropGlue(_, None) => {
// empty drop glue - a nop.
let (_, dest) = destination.expect("Non terminating drop_in_place_real???");
let dest = target.expect("Non terminating drop_in_place_real???");
let ret_block = fx.get_block(dest);
fx.bcx.ins().jump(ret_block, &[]);
return;
@ -377,7 +380,7 @@ pub(crate) fn codegen_terminator_call<'tcx>(
.unwrap_or(false);
if is_cold {
fx.bcx.set_cold_block(fx.bcx.current_block().unwrap());
if let Some((_place, destination_block)) = destination {
if let Some(destination_block) = target {
fx.bcx.set_cold_block(fx.get_block(destination_block));
}
}
@ -459,7 +462,6 @@ pub(crate) fn codegen_terminator_call<'tcx>(
}
};
let ret_place = destination.map(|(place, _)| place);
self::returning::codegen_with_call_return_arg(fx, &fn_abi.ret, ret_place, |fx, return_ptr| {
let call_args = return_ptr
.into_iter()
@ -511,7 +513,7 @@ pub(crate) fn codegen_terminator_call<'tcx>(
call_inst
});
if let Some((_, dest)) = destination {
if let Some(dest) = target {
let ret_block = fx.get_block(dest);
fx.bcx.ins().jump(ret_block, &[]);
} else {

View File

@ -56,23 +56,22 @@ pub(super) fn codegen_return_param<'tcx>(
pub(super) fn codegen_with_call_return_arg<'tcx>(
fx: &mut FunctionCx<'_, '_, 'tcx>,
ret_arg_abi: &ArgAbi<'tcx, Ty<'tcx>>,
ret_place: Option<CPlace<'tcx>>,
ret_place: CPlace<'tcx>,
f: impl FnOnce(&mut FunctionCx<'_, '_, 'tcx>, Option<Value>) -> Inst,
) {
let (ret_temp_place, return_ptr) = match ret_arg_abi.mode {
PassMode::Ignore => (None, None),
PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ } => match ret_place {
Some(ret_place) if matches!(ret_place.inner(), CPlaceInner::Addr(_, None)) => {
PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ } => {
if matches!(ret_place.inner(), CPlaceInner::Addr(_, None)) {
// This is an optimization to prevent unnecessary copies of the return value when
// the return place is already a memory place as opposed to a register.
// This match arm can be safely removed.
(None, Some(ret_place.to_ptr().get_addr(fx)))
}
_ => {
} else {
let place = CPlace::new_stack_slot(fx, ret_arg_abi.layout);
(Some(place), Some(place.to_ptr().get_addr(fx)))
}
},
}
PassMode::Indirect { attrs: _, extra_attrs: Some(_), on_stack: _ } => {
unreachable!("unsized return value")
}
@ -84,39 +83,25 @@ pub(super) fn codegen_with_call_return_arg<'tcx>(
match ret_arg_abi.mode {
PassMode::Ignore => {}
PassMode::Direct(_) => {
if let Some(ret_place) = ret_place {
let ret_val = fx.bcx.inst_results(call_inst)[0];
ret_place.write_cvalue(fx, CValue::by_val(ret_val, ret_arg_abi.layout));
}
let ret_val = fx.bcx.inst_results(call_inst)[0];
ret_place.write_cvalue(fx, CValue::by_val(ret_val, ret_arg_abi.layout));
}
PassMode::Pair(_, _) => {
if let Some(ret_place) = ret_place {
let ret_val_a = fx.bcx.inst_results(call_inst)[0];
let ret_val_b = fx.bcx.inst_results(call_inst)[1];
ret_place.write_cvalue(
fx,
CValue::by_val_pair(ret_val_a, ret_val_b, ret_arg_abi.layout),
);
}
let ret_val_a = fx.bcx.inst_results(call_inst)[0];
let ret_val_b = fx.bcx.inst_results(call_inst)[1];
ret_place
.write_cvalue(fx, CValue::by_val_pair(ret_val_a, ret_val_b, ret_arg_abi.layout));
}
PassMode::Cast(cast) => {
if let Some(ret_place) = ret_place {
let results = fx
.bcx
.inst_results(call_inst)
.iter()
.copied()
.collect::<SmallVec<[Value; 2]>>();
let result =
super::pass_mode::from_casted_value(fx, &results, ret_place.layout(), cast);
ret_place.write_cvalue(fx, result);
}
let results =
fx.bcx.inst_results(call_inst).iter().copied().collect::<SmallVec<[Value; 2]>>();
let result =
super::pass_mode::from_casted_value(fx, &results, ret_place.layout(), cast);
ret_place.write_cvalue(fx, result);
}
PassMode::Indirect { attrs: _, extra_attrs: None, on_stack: _ } => {
if let (Some(ret_place), Some(ret_temp_place)) = (ret_place, ret_temp_place) {
// Both ret_place and ret_temp_place must be Some. If ret_place is None, this is
// a non-returning call. If ret_temp_place is None, it is not necessary to copy the
// return value.
if let Some(ret_temp_place) = ret_temp_place {
// If ret_temp_place is None, it is not necessary to copy the return value.
let ret_temp_value = ret_temp_place.to_cvalue(fx);
ret_place.write_cvalue(fx, ret_temp_value);
}

View File

@ -393,6 +393,7 @@ fn codegen_fn_content(fx: &mut FunctionCx<'_, '_, '_>) {
func,
args,
destination,
target,
fn_span,
cleanup: _,
from_hir_call: _,
@ -404,6 +405,7 @@ fn codegen_fn_content(fx: &mut FunctionCx<'_, '_, '_>) {
func,
args,
*destination,
*target,
)
});
}

View File

@ -542,8 +542,8 @@ pub(crate) fn mir_operand_get_const_val<'tcx>(
| TerminatorKind::FalseEdge { .. }
| TerminatorKind::FalseUnwind { .. } => unreachable!(),
TerminatorKind::InlineAsm { .. } => return None,
TerminatorKind::Call { destination: Some((call_place, _)), .. }
if call_place == place =>
TerminatorKind::Call { destination, target: Some(_), .. }
if destination == place =>
{
return None;
}

View File

@ -10,10 +10,9 @@ pub(crate) fn codegen_llvm_intrinsic_call<'tcx>(
intrinsic: &str,
_substs: SubstsRef<'tcx>,
args: &[mir::Operand<'tcx>],
destination: Option<(CPlace<'tcx>, BasicBlock)>,
ret: CPlace<'tcx>,
target: Option<BasicBlock>,
) {
let ret = destination.unwrap().0;
intrinsic_match! {
fx, intrinsic, args,
_ => {
@ -126,7 +125,7 @@ pub(crate) fn codegen_llvm_intrinsic_call<'tcx>(
};
}
let dest = destination.expect("all llvm intrinsics used by stdlib should return").1;
let dest = target.expect("all llvm intrinsics used by stdlib should return");
let ret_block = fx.get_block(dest);
fx.bcx.ins().jump(ret_block, &[]);
}

View File

@ -217,35 +217,42 @@ pub(crate) fn codegen_intrinsic_call<'tcx>(
fx: &mut FunctionCx<'_, '_, 'tcx>,
instance: Instance<'tcx>,
args: &[mir::Operand<'tcx>],
destination: Option<(CPlace<'tcx>, BasicBlock)>,
destination: CPlace<'tcx>,
target: Option<BasicBlock>,
source_info: mir::SourceInfo,
) {
let intrinsic = fx.tcx.item_name(instance.def_id());
let substs = instance.substs;
let ret = match destination {
Some((place, _)) => place,
None => {
// Insert non returning intrinsics here
match intrinsic {
sym::abort => {
fx.bcx.ins().trap(TrapCode::User(0));
}
sym::transmute => {
crate::base::codegen_panic(fx, "Transmuting to uninhabited type.", source_info);
}
_ => unimplemented!("unsupported instrinsic {}", intrinsic),
let target = if let Some(target) = target {
target
} else {
// Insert non returning intrinsics here
match intrinsic {
sym::abort => {
fx.bcx.ins().trap(TrapCode::User(0));
}
return;
sym::transmute => {
crate::base::codegen_panic(fx, "Transmuting to uninhabited type.", source_info);
}
_ => unimplemented!("unsupported instrinsic {}", intrinsic),
}
return;
};
if intrinsic.as_str().starts_with("simd_") {
self::simd::codegen_simd_intrinsic_call(fx, intrinsic, substs, args, ret, source_info.span);
let ret_block = fx.get_block(destination.expect("SIMD intrinsics don't diverge").1);
self::simd::codegen_simd_intrinsic_call(
fx,
intrinsic,
substs,
args,
destination,
source_info.span,
);
let ret_block = fx.get_block(target);
fx.bcx.ins().jump(ret_block, &[]);
} else if codegen_float_intrinsic_call(fx, intrinsic, args, ret) {
let ret_block = fx.get_block(destination.expect("Float intrinsics don't diverge").1);
} else if codegen_float_intrinsic_call(fx, intrinsic, args, destination) {
let ret_block = fx.get_block(target);
fx.bcx.ins().jump(ret_block, &[]);
} else {
codegen_regular_intrinsic_call(
@ -254,9 +261,9 @@ pub(crate) fn codegen_intrinsic_call<'tcx>(
intrinsic,
substs,
args,
ret,
source_info,
destination,
Some(target),
source_info,
);
}
}
@ -339,8 +346,8 @@ fn codegen_regular_intrinsic_call<'tcx>(
substs: SubstsRef<'tcx>,
args: &[mir::Operand<'tcx>],
ret: CPlace<'tcx>,
destination: Option<BasicBlock>,
source_info: mir::SourceInfo,
destination: Option<(CPlace<'tcx>, BasicBlock)>,
) {
let usize_layout = fx.layout_of(fx.tcx.types.usize);
@ -761,7 +768,7 @@ fn codegen_regular_intrinsic_call<'tcx>(
if fx.tcx.is_compiler_builtins(LOCAL_CRATE) {
// special case for compiler-builtins to avoid having to patch it
crate::trap::trap_unimplemented(fx, "128bit atomics not yet supported");
let ret_block = fx.get_block(destination.unwrap().1);
let ret_block = fx.get_block(destination.unwrap());
fx.bcx.ins().jump(ret_block, &[]);
return;
} else {
@ -789,7 +796,7 @@ fn codegen_regular_intrinsic_call<'tcx>(
if fx.tcx.is_compiler_builtins(LOCAL_CRATE) {
// special case for compiler-builtins to avoid having to patch it
crate::trap::trap_unimplemented(fx, "128bit atomics not yet supported");
let ret_block = fx.get_block(destination.unwrap().1);
let ret_block = fx.get_block(destination.unwrap());
fx.bcx.ins().jump(ret_block, &[]);
return;
} else {
@ -1130,6 +1137,6 @@ fn codegen_regular_intrinsic_call<'tcx>(
};
}
let ret_block = fx.get_block(destination.unwrap().1);
let ret_block = fx.get_block(destination.unwrap());
fx.bcx.ins().jump(ret_block, &[]);
}

View File

@ -519,7 +519,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
intrinsic: Option<Symbol>,
instance: Option<Instance<'tcx>>,
source_info: mir::SourceInfo,
destination: &Option<(mir::Place<'tcx>, mir::BasicBlock)>,
target: Option<mir::BasicBlock>,
cleanup: Option<mir::BasicBlock>,
) -> bool {
// Emit a panic or a no-op for `assert_*` intrinsics.
@ -576,12 +576,12 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
fn_abi,
llfn,
&[msg.0, msg.1, location],
destination.as_ref().map(|(_, bb)| (ReturnDest::Nothing, *bb)),
target.as_ref().map(|bb| (ReturnDest::Nothing, *bb)),
cleanup,
);
} else {
// a NOP
let target = destination.as_ref().unwrap().1;
let target = target.unwrap();
helper.funclet_br(self, bx, target)
}
true
@ -597,7 +597,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
terminator: &mir::Terminator<'tcx>,
func: &mir::Operand<'tcx>,
args: &[mir::Operand<'tcx>],
destination: &Option<(mir::Place<'tcx>, mir::BasicBlock)>,
destination: mir::Place<'tcx>,
target: Option<mir::BasicBlock>,
cleanup: Option<mir::BasicBlock>,
fn_span: Span,
) {
@ -624,7 +625,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
if let Some(ty::InstanceDef::DropGlue(_, None)) = def {
// Empty drop glue; a no-op.
let &(_, target) = destination.as_ref().unwrap();
let target = target.unwrap();
helper.funclet_br(self, &mut bx, target);
return;
}
@ -653,9 +654,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
};
if intrinsic == Some(sym::transmute) {
if let Some(destination_ref) = destination.as_ref() {
let &(dest, target) = destination_ref;
self.codegen_transmute(&mut bx, &args[0], dest);
if let Some(target) = target {
self.codegen_transmute(&mut bx, &args[0], destination);
helper.funclet_br(self, &mut bx, target);
} else {
// If we are trying to transmute to an uninhabited type,
@ -676,7 +676,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
intrinsic,
instance,
source_info,
destination,
target,
cleanup,
) {
return;
@ -687,15 +687,15 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let mut llargs = Vec::with_capacity(arg_count);
// Prepare the return value destination
let ret_dest = if let Some((dest, _)) = *destination {
let ret_dest = if target.is_some() {
let is_intrinsic = intrinsic.is_some();
self.make_return_dest(&mut bx, dest, &fn_abi.ret, &mut llargs, is_intrinsic)
self.make_return_dest(&mut bx, destination, &fn_abi.ret, &mut llargs, is_intrinsic)
} else {
ReturnDest::Nothing
};
if intrinsic == Some(sym::caller_location) {
if let Some((_, target)) = destination.as_ref() {
if let Some(target) = target {
let location = self
.get_caller_location(&mut bx, mir::SourceInfo { span: fn_span, ..source_info });
@ -703,7 +703,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
location.val.store(&mut bx, tmp);
}
self.store_return(&mut bx, ret_dest, &fn_abi.ret, location.immediate());
helper.funclet_br(self, &mut bx, *target);
helper.funclet_br(self, &mut bx, target);
}
return;
}
@ -766,7 +766,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
self.store_return(&mut bx, ret_dest, &fn_abi.ret, dst.llval);
}
if let Some((_, target)) = *destination {
if let Some(target) = target {
helper.funclet_br(self, &mut bx, target);
} else {
bx.unreachable();
@ -913,7 +913,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
fn_abi,
fn_ptr,
&llargs,
destination.as_ref().map(|&(_, target)| (ret_dest, target)),
target.as_ref().map(|&target| (ret_dest, target)),
cleanup,
);
@ -930,7 +930,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
fn_abi,
fn_ptr,
&llargs,
destination.as_ref().map(|&(_, target)| (ret_dest, target)),
target.as_ref().map(|&target| (ret_dest, target)),
cleanup,
);
}
@ -1083,7 +1083,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
mir::TerminatorKind::Call {
ref func,
ref args,
ref destination,
destination,
target,
cleanup,
from_hir_call: _,
fn_span,
@ -1095,6 +1096,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
func,
args,
destination,
target,
cleanup,
fn_span,
);

View File

@ -60,7 +60,7 @@ fn eval_body_using_ecx<'mir, 'tcx>(
ecx.push_stack_frame(
cid.instance,
body,
Some(&ret.into()),
&ret.into(),
StackPopCleanup::Root { cleanup: false },
)?;

View File

@ -265,7 +265,8 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
instance: ty::Instance<'tcx>,
_abi: Abi,
args: &[OpTy<'tcx>],
_ret: Option<(&PlaceTy<'tcx>, mir::BasicBlock)>,
_dest: &PlaceTy<'tcx>,
_ret: Option<mir::BasicBlock>,
_unwind: StackPopUnwind, // unwinding is not supported in consts
) -> InterpResult<'tcx, Option<(&'mir mir::Body<'tcx>, ty::Instance<'tcx>)>> {
debug!("find_mir_or_eval_fn: {:?}", instance);
@ -293,6 +294,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
new_instance,
_abi,
args,
_dest,
_ret,
_unwind,
)?
@ -307,17 +309,18 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
ecx: &mut InterpCx<'mir, 'tcx, Self>,
instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx>],
ret: Option<(&PlaceTy<'tcx>, mir::BasicBlock)>,
dest: &PlaceTy<'tcx, Self::PointerTag>,
target: Option<mir::BasicBlock>,
_unwind: StackPopUnwind,
) -> InterpResult<'tcx> {
// Shared intrinsics.
if ecx.emulate_intrinsic(instance, args, ret)? {
if ecx.emulate_intrinsic(instance, args, dest, target)? {
return Ok(());
}
let intrinsic_name = ecx.tcx.item_name(instance.def_id());
// CTFE-specific intrinsics.
let Some((dest, ret)) = ret else {
let Some(ret) = target else {
return Err(ConstEvalErrKind::NeedsRfc(format!(
"calling intrinsic `{}`",
intrinsic_name

View File

@ -105,7 +105,7 @@ pub struct Frame<'mir, 'tcx, Tag: Provenance = AllocId, Extra = ()> {
/// The location where the result of the current stack frame should be written to,
/// and its layout in the caller.
pub return_place: Option<PlaceTy<'tcx, Tag>>,
pub return_place: PlaceTy<'tcx, Tag>,
/// The list of locals for this stack frame, stored in order as
/// `[return_ptr, arguments..., variables..., temporaries...]`.
@ -676,7 +676,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
&mut self,
instance: ty::Instance<'tcx>,
body: &'mir mir::Body<'tcx>,
return_place: Option<&PlaceTy<'tcx, M::PointerTag>>,
return_place: &PlaceTy<'tcx, M::PointerTag>,
return_to_block: StackPopCleanup,
) -> InterpResult<'tcx> {
trace!("body: {:#?}", body);
@ -685,7 +685,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
body,
loc: Err(body.span), // Span used for errors caused during preamble.
return_to_block,
return_place: return_place.copied(),
return_place: *return_place,
// empty local array, we fill it in below, after we are inside the stack frame and
// all methods actually know about the frame
locals: IndexVec::new(),
@ -807,14 +807,9 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.stack_mut().pop().expect("tried to pop a stack frame, but there were none");
if !unwinding {
// Copy the return value to the caller's stack frame.
if let Some(ref return_place) = frame.return_place {
let op = self.access_local(&frame, mir::RETURN_PLACE, None)?;
self.copy_op_transmute(&op, return_place)?;
trace!("{:?}", self.dump_place(**return_place));
} else {
throw_ub!(Unreachable);
}
let op = self.access_local(&frame, mir::RETURN_PLACE, None)?;
self.copy_op_transmute(&op, &frame.return_place)?;
trace!("{:?}", self.dump_place(*frame.return_place));
}
let return_to_block = frame.return_to_block;
@ -1055,7 +1050,7 @@ where
body.hash_stable(hcx, hasher);
instance.hash_stable(hcx, hasher);
return_to_block.hash_stable(hcx, hasher);
return_place.as_ref().map(|r| &**r).hash_stable(hcx, hasher);
return_place.hash_stable(hcx, hasher);
locals.hash_stable(hcx, hasher);
loc.hash_stable(hcx, hasher);
extra.hash_stable(hcx, hasher);

View File

@ -115,13 +115,14 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
&mut self,
instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx, M::PointerTag>],
ret: Option<(&PlaceTy<'tcx, M::PointerTag>, mir::BasicBlock)>,
dest: &PlaceTy<'tcx, M::PointerTag>,
ret: Option<mir::BasicBlock>,
) -> InterpResult<'tcx, bool> {
let substs = instance.substs;
let intrinsic_name = self.tcx.item_name(instance.def_id());
// First handle intrinsics without return place.
let (dest, ret) = match ret {
let ret = match ret {
None => match intrinsic_name {
sym::transmute => throw_ub_format!("transmuting to uninhabited type"),
sym::abort => M::abort(self, "the program aborted execution".to_owned())?,

View File

@ -169,7 +169,8 @@ pub trait Machine<'mir, 'tcx>: Sized {
instance: ty::Instance<'tcx>,
abi: Abi,
args: &[OpTy<'tcx, Self::PointerTag>],
ret: Option<(&PlaceTy<'tcx, Self::PointerTag>, mir::BasicBlock)>,
destination: &PlaceTy<'tcx, Self::PointerTag>,
target: Option<mir::BasicBlock>,
unwind: StackPopUnwind,
) -> InterpResult<'tcx, Option<(&'mir mir::Body<'tcx>, ty::Instance<'tcx>)>>;
@ -180,7 +181,8 @@ pub trait Machine<'mir, 'tcx>: Sized {
fn_val: Self::ExtraFnVal,
abi: Abi,
args: &[OpTy<'tcx, Self::PointerTag>],
ret: Option<(&PlaceTy<'tcx, Self::PointerTag>, mir::BasicBlock)>,
destination: &PlaceTy<'tcx, Self::PointerTag>,
target: Option<mir::BasicBlock>,
unwind: StackPopUnwind,
) -> InterpResult<'tcx>;
@ -190,7 +192,8 @@ pub trait Machine<'mir, 'tcx>: Sized {
ecx: &mut InterpCx<'mir, 'tcx, Self>,
instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx, Self::PointerTag>],
ret: Option<(&PlaceTy<'tcx, Self::PointerTag>, mir::BasicBlock)>,
destination: &PlaceTy<'tcx, Self::PointerTag>,
target: Option<mir::BasicBlock>,
unwind: StackPopUnwind,
) -> InterpResult<'tcx>;
@ -470,7 +473,8 @@ pub macro compile_time_machine(<$mir: lifetime, $tcx: lifetime>) {
fn_val: !,
_abi: Abi,
_args: &[OpTy<$tcx>],
_ret: Option<(&PlaceTy<$tcx>, mir::BasicBlock)>,
_destination: &PlaceTy<$tcx, Self::PointerTag>,
_target: Option<mir::BasicBlock>,
_unwind: StackPopUnwind,
) -> InterpResult<$tcx> {
match fn_val {}

View File

@ -57,7 +57,15 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.go_to_block(target_block);
}
Call { ref func, ref args, destination, ref cleanup, from_hir_call: _, fn_span: _ } => {
Call {
ref func,
ref args,
destination,
target,
ref cleanup,
from_hir_call: _,
fn_span: _,
} => {
let old_stack = self.frame_idx();
let old_loc = self.frame().loc;
let func = self.eval_operand(func, None)?;
@ -91,20 +99,14 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
),
};
let dest_place;
let ret = match destination {
Some((dest, ret)) => {
dest_place = self.eval_place(dest)?;
Some((&dest_place, ret))
}
None => None,
};
let destination = self.eval_place(destination)?;
self.eval_fn_call(
fn_val,
(fn_sig.abi, fn_abi),
&args,
with_caller_location,
ret,
&destination,
target,
match (cleanup, fn_abi.can_unwind) {
(Some(cleanup), true) => StackPopUnwind::Cleanup(*cleanup),
(None, true) => StackPopUnwind::Skip,
@ -299,7 +301,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
(caller_abi, caller_fn_abi): (Abi, &FnAbi<'tcx, Ty<'tcx>>),
args: &[OpTy<'tcx, M::PointerTag>],
with_caller_location: bool,
ret: Option<(&PlaceTy<'tcx, M::PointerTag>, mir::BasicBlock)>,
destination: &PlaceTy<'tcx, M::PointerTag>,
target: Option<mir::BasicBlock>,
mut unwind: StackPopUnwind,
) -> InterpResult<'tcx> {
trace!("eval_fn_call: {:#?}", fn_val);
@ -307,7 +310,15 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let instance = match fn_val {
FnVal::Instance(instance) => instance,
FnVal::Other(extra) => {
return M::call_extra_fn(self, extra, caller_abi, args, ret, unwind);
return M::call_extra_fn(
self,
extra,
caller_abi,
args,
destination,
target,
unwind,
);
}
};
@ -315,7 +326,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
ty::InstanceDef::Intrinsic(def_id) => {
assert!(self.tcx.is_intrinsic(def_id));
// caller_fn_abi is not relevant here, we interpret the arguments directly for each intrinsic.
M::call_intrinsic(self, instance, args, ret, unwind)
M::call_intrinsic(self, instance, args, destination, target, unwind)
}
ty::InstanceDef::VtableShim(..)
| ty::InstanceDef::ReifyShim(..)
@ -326,7 +337,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
| ty::InstanceDef::Item(_) => {
// We need MIR for this fn
let Some((body, instance)) =
M::find_mir_or_eval_fn(self, instance, caller_abi, args, ret, unwind)? else {
M::find_mir_or_eval_fn(self, instance, caller_abi, args, destination, target, unwind)? else {
return Ok(());
};
@ -362,8 +373,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.push_stack_frame(
instance,
body,
ret.map(|p| p.0),
StackPopCleanup::Goto { ret: ret.map(|p| p.1), unwind },
destination,
StackPopCleanup::Goto { ret: target, unwind },
)?;
// If an error is raised here, pop the frame again to get an accurate backtrace.
@ -540,7 +551,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
(caller_abi, caller_fn_abi),
&args,
with_caller_location,
ret,
destination,
target,
unwind,
)
}
@ -582,7 +594,8 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
(Abi::Rust, fn_abi),
&[arg.into()],
false,
Some((&dest.into(), target)),
&dest.into(),
Some(target),
match unwind {
Some(cleanup) => StackPopUnwind::Cleanup(cleanup),
None => StackPopUnwind::Skip,

View File

@ -788,7 +788,7 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> {
} else {
let terminator = self.source[loc.block].terminator_mut();
let target = match terminator.kind {
TerminatorKind::Call { destination: Some((_, target)), .. } => target,
TerminatorKind::Call { target: Some(target), .. } => target,
ref kind => {
span_bug!(terminator.source_info.span, "{:?} not promotable", kind);
}
@ -814,7 +814,8 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> {
func,
args,
cleanup: None,
destination: Some((Place::from(new_temp), new_target)),
destination: Place::from(new_temp),
target: Some(new_target),
from_hir_call,
fn_span,
},
@ -1054,11 +1055,9 @@ pub fn is_const_fn_in_array_repeat_expression<'tcx>(
{
if let Operand::Constant(box Constant { literal, .. }) = func {
if let ty::FnDef(def_id, _) = *literal.ty().kind() {
if let Some((destination_place, _)) = destination {
if destination_place == place {
if ccx.tcx.is_const_fn(def_id) {
return true;
}
if destination == place {
if ccx.tcx.is_const_fn(def_id) {
return true;
}
}
}

View File

@ -673,7 +673,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
self.check_edge(location, *unwind, EdgeKind::Unwind);
}
}
TerminatorKind::Call { func, args, destination, cleanup, .. } => {
TerminatorKind::Call { func, args, destination, target, cleanup, .. } => {
let func_ty = func.ty(&self.body.local_decls, self.tcx);
match func_ty.kind() {
ty::FnPtr(..) | ty::FnDef(..) => {}
@ -682,7 +682,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
format!("encountered non-callable type {} in `Call` terminator", func_ty),
),
}
if let Some((_, target)) = destination {
if let Some(target) = target {
self.check_edge(location, *target, EdgeKind::Normal);
}
if let Some(cleanup) = cleanup {
@ -693,9 +693,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
// passed by a reference to the callee. Consequently they must be non-overlapping.
// Currently this simply checks for duplicate places.
self.place_cache.clear();
if let Some((destination, _)) = destination {
self.place_cache.push(destination.as_ref());
}
self.place_cache.push(destination.as_ref());
for arg in args {
if let Operand::Move(place) = arg {
self.place_cache.push(place.as_ref());

View File

@ -250,8 +250,10 @@ pub enum TerminatorKind<'tcx> {
/// This allows the memory occupied by "by-value" arguments to be
/// reused across function calls without duplicating the contents.
args: Vec<Operand<'tcx>>,
/// Destination for the return value. If none, the call necessarily diverges.
destination: Option<(Place<'tcx>, BasicBlock)>,
/// Where the returned value will be written
destination: Place<'tcx>,
/// Where to go after this call returns. If none, the call necessarily diverges.
target: Option<BasicBlock>,
/// Cleanups to be done if the call unwinds.
cleanup: Option<BasicBlock>,
/// `true` if this is from a call in HIR rather than from an overloaded
@ -415,13 +417,13 @@ impl<'tcx> TerminatorKind<'tcx> {
| GeneratorDrop
| Return
| Unreachable
| Call { destination: None, cleanup: None, .. }
| Call { target: None, cleanup: None, .. }
| InlineAsm { destination: None, cleanup: None, .. } => {
None.into_iter().chain((&[]).into_iter().copied())
}
Goto { target: t }
| Call { destination: None, cleanup: Some(t), .. }
| Call { destination: Some((_, t)), cleanup: None, .. }
| Call { target: None, cleanup: Some(t), .. }
| Call { target: Some(t), cleanup: None, .. }
| Yield { resume: t, drop: None, .. }
| DropAndReplace { target: t, unwind: None, .. }
| Drop { target: t, unwind: None, .. }
@ -431,7 +433,7 @@ impl<'tcx> TerminatorKind<'tcx> {
| InlineAsm { destination: None, cleanup: Some(t), .. } => {
Some(t).into_iter().chain((&[]).into_iter().copied())
}
Call { destination: Some((_, t)), cleanup: Some(ref u), .. }
Call { target: Some(t), cleanup: Some(ref u), .. }
| Yield { resume: t, drop: Some(ref u), .. }
| DropAndReplace { target: t, unwind: Some(ref u), .. }
| Drop { target: t, unwind: Some(ref u), .. }
@ -457,11 +459,11 @@ impl<'tcx> TerminatorKind<'tcx> {
| GeneratorDrop
| Return
| Unreachable
| Call { destination: None, cleanup: None, .. }
| Call { target: None, cleanup: None, .. }
| InlineAsm { destination: None, cleanup: None, .. } => None.into_iter().chain(&mut []),
Goto { target: ref mut t }
| Call { destination: None, cleanup: Some(ref mut t), .. }
| Call { destination: Some((_, ref mut t)), cleanup: None, .. }
| Call { target: None, cleanup: Some(ref mut t), .. }
| Call { target: Some(ref mut t), cleanup: None, .. }
| Yield { resume: ref mut t, drop: None, .. }
| DropAndReplace { target: ref mut t, unwind: None, .. }
| Drop { target: ref mut t, unwind: None, .. }
@ -471,7 +473,7 @@ impl<'tcx> TerminatorKind<'tcx> {
| InlineAsm { destination: None, cleanup: Some(ref mut t), .. } => {
Some(t).into_iter().chain(&mut [])
}
Call { destination: Some((_, ref mut t)), cleanup: Some(ref mut u), .. }
Call { target: Some(ref mut t), cleanup: Some(ref mut u), .. }
| Yield { resume: ref mut t, drop: Some(ref mut u), .. }
| DropAndReplace { target: ref mut t, unwind: Some(ref mut u), .. }
| Drop { target: ref mut t, unwind: Some(ref mut u), .. }
@ -590,9 +592,7 @@ impl<'tcx> TerminatorKind<'tcx> {
write!(fmt, "replace({:?} <- {:?})", place, value)
}
Call { func, args, destination, .. } => {
if let Some((destination, _)) = destination {
write!(fmt, "{:?} = ", destination)?;
}
write!(fmt, "{:?} = ", destination)?;
write!(fmt, "{:?}(", func)?;
for (index, arg) in args.iter().enumerate() {
if index > 0 {
@ -683,12 +683,12 @@ impl<'tcx> TerminatorKind<'tcx> {
.chain(iter::once("otherwise".into()))
.collect()
}),
Call { destination: Some(_), cleanup: Some(_), .. } => {
Call { target: Some(_), cleanup: Some(_), .. } => {
vec!["return".into(), "unwind".into()]
}
Call { destination: Some(_), cleanup: None, .. } => vec!["return".into()],
Call { destination: None, cleanup: Some(_), .. } => vec!["unwind".into()],
Call { destination: None, cleanup: None, .. } => vec![],
Call { target: Some(_), cleanup: None, .. } => vec!["return".into()],
Call { target: None, cleanup: Some(_), .. } => vec!["unwind".into()],
Call { target: None, cleanup: None, .. } => vec![],
Yield { drop: Some(_), .. } => vec!["resume".into(), "drop".into()],
Yield { drop: None, .. } => vec!["resume".into()],
DropAndReplace { unwind: None, .. } | Drop { unwind: None, .. } => {

View File

@ -44,20 +44,15 @@ impl<'tcx> TypeFoldable<'tcx> for Terminator<'tcx> {
resume_arg: resume_arg.try_fold_with(folder)?,
drop,
},
Call { func, args, destination, cleanup, from_hir_call, fn_span } => {
let dest = destination
.map(|(loc, dest)| (loc.try_fold_with(folder).map(|loc| (loc, dest))))
.transpose()?;
Call {
func: func.try_fold_with(folder)?,
args: args.try_fold_with(folder)?,
destination: dest,
cleanup,
from_hir_call,
fn_span,
}
}
Call { func, args, destination, target, cleanup, from_hir_call, fn_span } => Call {
func: func.try_fold_with(folder)?,
args: args.try_fold_with(folder)?,
destination: destination.try_fold_with(folder)?,
target,
cleanup,
from_hir_call,
fn_span,
},
Assert { cond, expected, msg, target, cleanup } => {
use AssertKind::*;
let msg = match msg {
@ -113,9 +108,7 @@ impl<'tcx> TypeFoldable<'tcx> for Terminator<'tcx> {
}
Yield { ref value, .. } => value.visit_with(visitor),
Call { ref func, ref args, ref destination, .. } => {
if let Some((ref loc, _)) = *destination {
loc.visit_with(visitor)?;
};
destination.visit_with(visitor)?;
func.visit_with(visitor)?;
args.visit_with(visitor)
}

View File

@ -534,6 +534,7 @@ macro_rules! make_mir_visitor {
func,
args,
destination,
target: _,
cleanup: _,
from_hir_call: _,
fn_span: _
@ -542,13 +543,11 @@ macro_rules! make_mir_visitor {
for arg in args {
self.visit_operand(arg, location);
}
if let Some((destination, _)) = destination {
self.visit_place(
destination,
PlaceContext::MutatingUse(MutatingUseContext::Call),
location
);
}
self.visit_place(
destination,
PlaceContext::MutatingUse(MutatingUseContext::Call),
location
);
}
TerminatorKind::Assert {

View File

@ -141,7 +141,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
TerminatorKind::Call {
func: exchange_malloc,
args: vec![Operand::Move(size), Operand::Move(align)],
destination: Some((storage, success)),
destination: storage,
target: Some(success),
cleanup: None,
from_hir_call: false,
fn_span: expr_span,

View File

@ -255,18 +255,19 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
func: fun,
args,
cleanup: None,
destination,
// The presence or absence of a return edge affects control-flow sensitive
// MIR checks and ultimately whether code is accepted or not. We can only
// omit the return edge if a return type is visibly uninhabited to a module
// that makes the call.
destination: if this.tcx.is_ty_uninhabited_from(
target: if this.tcx.is_ty_uninhabited_from(
this.parent_module,
expr.ty,
this.param_env,
) {
None
} else {
Some((destination, success))
Some(success)
},
from_hir_call,
fn_span,

View File

@ -444,7 +444,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
literal: method,
})),
args: vec![val, expect],
destination: Some((eq_result, eq_block)),
destination: eq_result,
target: Some(eq_block),
cleanup: None,
from_hir_call: false,
fn_span: source_info.span,

View File

@ -625,7 +625,8 @@ where
kind: TerminatorKind::Call {
func: Operand::function_handle(tcx, drop_fn, substs, self.source_info.span),
args: vec![Operand::Move(Place::from(ref_place))],
destination: Some((unit_temp, succ)),
destination: unit_temp,
target: Some(succ),
cleanup: unwind.into_option(),
from_hir_call: true,
fn_span: self.source_info.span,
@ -963,7 +964,8 @@ where
let call = TerminatorKind::Call {
func: Operand::function_handle(tcx, free_func, substs, self.source_info.span),
args,
destination: Some((unit_temp, target)),
destination: unit_temp,
target: Some(target),
cleanup: None,
from_hir_call: false,
fn_span: self.source_info.span,

View File

@ -237,14 +237,12 @@ impl Direction for Backward {
// Apply terminator-specific edge effects.
//
// FIXME(ecstaticmorse): Avoid cloning the exit state unconditionally.
mir::TerminatorKind::Call { destination: Some((return_place, dest)), .. }
if dest == bb =>
{
mir::TerminatorKind::Call { destination, target: Some(dest), .. } if dest == bb => {
let mut tmp = exit_state.clone();
analysis.apply_call_return_effect(
&mut tmp,
pred,
CallReturnPlaces::Call(return_place),
CallReturnPlaces::Call(destination),
);
propagate(pred, &tmp);
}
@ -532,20 +530,28 @@ impl Direction for Forward {
propagate(target, exit_state);
}
Call { cleanup, destination, func: _, args: _, from_hir_call: _, fn_span: _ } => {
Call {
cleanup,
destination,
target,
func: _,
args: _,
from_hir_call: _,
fn_span: _,
} => {
if let Some(unwind) = cleanup {
if dead_unwinds.map_or(true, |dead| !dead.contains(bb)) {
propagate(unwind, exit_state);
}
}
if let Some((dest_place, target)) = destination {
if let Some(target) = target {
// N.B.: This must be done *last*, otherwise the unwind path will see the call
// return effect.
analysis.apply_call_return_effect(
exit_state,
bb,
CallReturnPlaces::Call(dest_place),
CallReturnPlaces::Call(destination),
);
propagate(target, exit_state);
}

View File

@ -218,7 +218,7 @@ where
self.results.seek_to_block_end(block);
if self.results.get() != &block_start_state || A::Direction::is_backward() {
let after_terminator_name = match terminator.kind {
mir::TerminatorKind::Call { destination: Some(_), .. } => "(on unwind)",
mir::TerminatorKind::Call { target: Some(_), .. } => "(on unwind)",
_ => "(on end)",
};
@ -231,14 +231,14 @@ where
// for the basic block itself. That way, we could display terminator-specific effects for
// backward dataflow analyses as well as effects for `SwitchInt` terminators.
match terminator.kind {
mir::TerminatorKind::Call { destination: Some((return_place, _)), .. } => {
mir::TerminatorKind::Call { destination, .. } => {
self.write_row(w, "", "(on successful return)", |this, w, fmt| {
let state_on_unwind = this.results.get().clone();
this.results.apply_custom_effect(|analysis, state| {
analysis.apply_call_return_effect(
state,
block,
CallReturnPlaces::Call(return_place),
CallReturnPlaces::Call(destination),
);
});

View File

@ -37,7 +37,8 @@ fn mock_body<'tcx>() -> mir::Body<'tcx> {
mir::TerminatorKind::Call {
func: mir::Operand::Copy(dummy_place.clone()),
args: vec![],
destination: Some((dummy_place.clone(), mir::START_BLOCK)),
destination: dummy_place.clone(),
target: Some(mir::START_BLOCK),
cleanup: None,
from_hir_call: false,
fn_span: DUMMY_SP,
@ -50,7 +51,8 @@ fn mock_body<'tcx>() -> mir::Body<'tcx> {
mir::TerminatorKind::Call {
func: mir::Operand::Copy(dummy_place.clone()),
args: vec![],
destination: Some((dummy_place.clone(), mir::START_BLOCK)),
destination: dummy_place.clone(),
target: Some(mir::START_BLOCK),
cleanup: None,
from_hir_call: false,
fn_span: DUMMY_SP,

View File

@ -169,8 +169,8 @@ impl<'mir, 'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'mir, 'tc
self.borrowed_locals.borrow().analysis().terminator_effect(trans, terminator, loc);
match &terminator.kind {
TerminatorKind::Call { destination: Some((place, _)), .. } => {
trans.gen(place.local);
TerminatorKind::Call { destination, .. } => {
trans.gen(destination.local);
}
// Note that we do *not* gen the `resume_arg` of `Yield` terminators. The reason for
@ -198,8 +198,7 @@ impl<'mir, 'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'mir, 'tc
// Nothing to do for these. Match exhaustively so this fails to compile when new
// variants are added.
TerminatorKind::Call { destination: None, .. }
| TerminatorKind::Abort
TerminatorKind::Abort
| TerminatorKind::Assert { .. }
| TerminatorKind::Drop { .. }
| TerminatorKind::DropAndReplace { .. }
@ -225,8 +224,8 @@ impl<'mir, 'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'mir, 'tc
// and after the call returns successfully, but not after a panic.
// Since `propagate_call_unwind` doesn't exist, we have to kill the
// destination here, and then gen it again in `call_return_effect`.
TerminatorKind::Call { destination: Some((place, _)), .. } => {
trans.kill(place.local);
TerminatorKind::Call { destination, .. } => {
trans.kill(destination.local);
}
// The same applies to InlineAsm outputs.
@ -236,8 +235,7 @@ impl<'mir, 'tcx> crate::GenKillAnalysis<'tcx> for MaybeRequiresStorage<'mir, 'tc
// Nothing to do for these. Match exhaustively so this fails to compile when new
// variants are added.
TerminatorKind::Call { destination: None, .. }
| TerminatorKind::Yield { .. }
TerminatorKind::Yield { .. }
| TerminatorKind::Abort
| TerminatorKind::Assert { .. }
| TerminatorKind::Drop { .. }

View File

@ -376,7 +376,8 @@ impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> {
TerminatorKind::Call {
ref func,
ref args,
ref destination,
destination,
target,
cleanup: _,
from_hir_call: _,
fn_span: _,
@ -385,7 +386,7 @@ impl<'b, 'a, 'tcx> Gatherer<'b, 'a, 'tcx> {
for arg in args {
self.gather_operand(arg);
}
if let Some((destination, _bb)) = *destination {
if let Some(_bb) = target {
self.create_move_path(destination);
self.gather_init(destination.as_ref(), InitKind::NonPanicPathOnly);
}

View File

@ -50,12 +50,7 @@ impl AddCallGuards {
for block in body.basic_blocks_mut() {
match block.terminator {
Some(Terminator {
kind:
TerminatorKind::Call {
destination: Some((_, ref mut destination)),
cleanup,
..
},
kind: TerminatorKind::Call { target: Some(ref mut destination), cleanup, .. },
source_info,
}) if pred_count[*destination] > 1
&& (cleanup.is_some() || self == &AllCallEdges) =>

View File

@ -130,11 +130,11 @@ impl<'tcx> MirPass<'tcx> for AddRetag {
.iter_mut()
.filter_map(|block_data| {
match block_data.terminator().kind {
TerminatorKind::Call { destination: Some(ref destination), .. }
if needs_retag(&destination.0) =>
TerminatorKind::Call { target: Some(target), destination, .. }
if needs_retag(&destination) =>
{
// Remember the return destination for later
Some((block_data.terminator().source_info, destination.0, destination.1))
Some((block_data.terminator().source_info, destination, target))
}
// `Drop` is also a call, but it doesn't return anything so we are good.

View File

@ -200,7 +200,8 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx>
_instance: ty::Instance<'tcx>,
_abi: Abi,
_args: &[OpTy<'tcx>],
_ret: Option<(&PlaceTy<'tcx>, BasicBlock)>,
_destination: &PlaceTy<'tcx>,
_target: Option<BasicBlock>,
_unwind: StackPopUnwind,
) -> InterpResult<'tcx, Option<(&'mir Body<'tcx>, ty::Instance<'tcx>)>> {
Ok(None)
@ -210,7 +211,8 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx>
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
_instance: ty::Instance<'tcx>,
_args: &[OpTy<'tcx>],
_ret: Option<(&PlaceTy<'tcx>, BasicBlock)>,
_destination: &PlaceTy<'tcx>,
_target: Option<BasicBlock>,
_unwind: StackPopUnwind,
) -> InterpResult<'tcx> {
throw_machine_stop_str!("calling intrinsics isn't supported in ConstProp")
@ -384,24 +386,22 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
ConstPropMachine::new(only_propagate_inside_block_locals, can_const_prop),
);
let ret = ecx
let ret_layout = ecx
.layout_of(EarlyBinder(body.return_ty()).subst(tcx, substs))
.ok()
// Don't bother allocating memory for ZST types which have no values
// or for large values.
.filter(|ret_layout| {
!ret_layout.is_zst() && ret_layout.size < Size::from_bytes(MAX_ALLOC_LIMIT)
})
.map(|ret_layout| {
ecx.allocate(ret_layout, MemoryKind::Stack)
.expect("couldn't perform small allocation")
.into()
});
// Don't bother allocating memory for large values.
.filter(|ret_layout| ret_layout.size < Size::from_bytes(MAX_ALLOC_LIMIT))
.unwrap_or_else(|| ecx.layout_of(tcx.types.unit).unwrap());
let ret = ecx
.allocate(ret_layout, MemoryKind::Stack)
.expect("couldn't perform small allocation")
.into();
ecx.push_stack_frame(
Instance::new(def_id, substs),
dummy_body,
ret.as_ref(),
&ret,
StackPopCleanup::Root { cleanup: false },
)
.expect("failed to push initial stack frame");

View File

@ -192,7 +192,8 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx>
_instance: ty::Instance<'tcx>,
_abi: Abi,
_args: &[OpTy<'tcx>],
_ret: Option<(&PlaceTy<'tcx>, BasicBlock)>,
_destination: &PlaceTy<'tcx>,
_target: Option<BasicBlock>,
_unwind: StackPopUnwind,
) -> InterpResult<'tcx, Option<(&'mir Body<'tcx>, ty::Instance<'tcx>)>> {
Ok(None)
@ -202,7 +203,8 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx>
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
_instance: ty::Instance<'tcx>,
_args: &[OpTy<'tcx>],
_ret: Option<(&PlaceTy<'tcx>, BasicBlock)>,
_destination: &PlaceTy<'tcx>,
_target: Option<BasicBlock>,
_unwind: StackPopUnwind,
) -> InterpResult<'tcx> {
throw_machine_stop_str!("calling intrinsics isn't supported in ConstProp")
@ -377,24 +379,22 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
ConstPropMachine::new(only_propagate_inside_block_locals, can_const_prop),
);
let ret = ecx
let ret_layout = ecx
.layout_of(EarlyBinder(body.return_ty()).subst(tcx, substs))
.ok()
// Don't bother allocating memory for ZST types which have no values
// or for large values.
.filter(|ret_layout| {
!ret_layout.is_zst() && ret_layout.size < Size::from_bytes(MAX_ALLOC_LIMIT)
})
.map(|ret_layout| {
ecx.allocate(ret_layout, MemoryKind::Stack)
.expect("couldn't perform small allocation")
.into()
});
// Don't bother allocating memory for large values.
.filter(|ret_layout| ret_layout.size < Size::from_bytes(MAX_ALLOC_LIMIT))
.unwrap_or_else(|| ecx.layout_of(tcx.types.unit).unwrap());
let ret = ecx
.allocate(ret_layout, MemoryKind::Stack)
.expect("couldn't perform small allocation")
.into();
ecx.push_stack_frame(
Instance::new(def_id, substs),
dummy_body,
ret.as_ref(),
&ret,
StackPopCleanup::Root { cleanup: false },
)
.expect("failed to push initial stack frame");

View File

@ -84,7 +84,7 @@ impl<'tcx> MockBlocks<'tcx> {
fn link(&mut self, from_block: BasicBlock, to_block: BasicBlock) {
match self.blocks[from_block].terminator_mut().kind {
TerminatorKind::Assert { ref mut target, .. }
| TerminatorKind::Call { destination: Some((_, ref mut target)), .. }
| TerminatorKind::Call { target: Some(ref mut target), .. }
| TerminatorKind::Drop { ref mut target, .. }
| TerminatorKind::DropAndReplace { ref mut target, .. }
| TerminatorKind::FalseEdge { real_target: ref mut target, .. }
@ -139,7 +139,8 @@ impl<'tcx> MockBlocks<'tcx> {
TerminatorKind::Call {
func: Operand::Copy(self.dummy_place.clone()),
args: vec![],
destination: Some((self.dummy_place.clone(), TEMP_BLOCK)),
destination: self.dummy_place.clone(),
target: Some(TEMP_BLOCK),
cleanup: None,
from_hir_call: false,
fn_span: DUMMY_SP,
@ -182,7 +183,7 @@ fn debug_basic_blocks<'tcx>(mir_body: &Body<'tcx>) -> String {
let sp = format!("(span:{},{})", span.lo().to_u32(), span.hi().to_u32());
match kind {
TerminatorKind::Assert { target, .. }
| TerminatorKind::Call { destination: Some((_, target)), .. }
| TerminatorKind::Call { target: Some(target), .. }
| TerminatorKind::Drop { target, .. }
| TerminatorKind::DropAndReplace { target, .. }
| TerminatorKind::FalseEdge { real_target: target, .. }

View File

@ -575,7 +575,8 @@ impl<'a> Conflicts<'a> {
TerminatorKind::Call {
func,
args,
destination: Some((dest_place, _)),
destination,
target: _,
cleanup: _,
from_hir_call: _,
fn_span: _,
@ -583,9 +584,9 @@ impl<'a> Conflicts<'a> {
// No arguments may overlap with the destination.
for arg in args.iter().chain(Some(func)) {
if let Some(place) = arg.place() {
if !place.is_indirect() && !dest_place.is_indirect() {
if !place.is_indirect() && !destination.is_indirect() {
self.record_local_conflict(
dest_place.local,
destination.local,
place.local,
"call dest/arg overlap",
);
@ -691,7 +692,6 @@ impl<'a> Conflicts<'a> {
}
TerminatorKind::Goto { .. }
| TerminatorKind::Call { destination: None, .. }
| TerminatorKind::SwitchInt { .. }
| TerminatorKind::Resume
| TerminatorKind::Abort

View File

@ -494,15 +494,13 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
fn drop_flags_for_fn_rets(&mut self) {
for (bb, data) in self.body.basic_blocks().iter_enumerated() {
if let TerminatorKind::Call {
destination: Some((ref place, tgt)),
cleanup: Some(_),
..
destination, target: Some(tgt), cleanup: Some(_), ..
} = data.terminator().kind
{
assert!(!self.patch.is_patched(bb));
let loc = Location { block: tgt, statement_index: 0 };
let path = self.move_data().rev_lookup.find(place.as_ref());
let path = self.move_data().rev_lookup.find(destination.as_ref());
on_lookup_result_bits(self.tcx, self.body, self.move_data(), path, |child| {
self.set_drop_flag(loc, child, DropFlagState::Present)
});
@ -576,14 +574,13 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> {
// There may be a critical edge after this call,
// so mark the return as initialized *before* the
// call.
if let TerminatorKind::Call {
destination: Some((ref place, _)), cleanup: None, ..
} = data.terminator().kind
if let TerminatorKind::Call { destination, target: Some(_), cleanup: None, .. } =
data.terminator().kind
{
assert!(!self.patch.is_patched(bb));
let loc = Location { block: bb, statement_index: data.statements.len() };
let path = self.move_data().rev_lookup.find(place.as_ref());
let path = self.move_data().rev_lookup.find(destination.as_ref());
on_lookup_result_bits(self.tcx, self.body, self.move_data(), path, |child| {
self.set_drop_flag(loc, child, DropFlagState::Present)
});

View File

@ -37,6 +37,7 @@ impl<'tcx> Visitor<'tcx> for FunctionItemRefChecker<'_, 'tcx> {
func,
args,
destination: _,
target: _,
cleanup: _,
from_hir_call: _,
fn_span: _,

View File

@ -1459,12 +1459,13 @@ impl<'tcx> Visitor<'tcx> for EnsureGeneratorFieldAssignmentsNeverAlias<'_> {
TerminatorKind::Call {
func,
args,
destination: Some((dest, _)),
destination,
target: Some(_),
cleanup: _,
from_hir_call: _,
fn_span: _,
} => {
self.check_assigned_place(*dest, |this| {
self.check_assigned_place(*destination, |this| {
this.visit_operand(func, location);
for arg in args {
this.visit_operand(arg, location);

View File

@ -248,7 +248,7 @@ impl<'tcx> Inliner<'tcx> {
) -> Option<CallSite<'tcx>> {
// Only consider direct calls to functions
let terminator = bb_data.terminator();
if let TerminatorKind::Call { ref func, ref destination, .. } = terminator.kind {
if let TerminatorKind::Call { ref func, target, .. } = terminator.kind {
let func_ty = func.ty(caller_body, self.tcx);
if let ty::FnDef(def_id, substs) = *func_ty.kind() {
// To resolve an instance its substs have to be fully normalized.
@ -266,7 +266,7 @@ impl<'tcx> Inliner<'tcx> {
callee,
fn_sig,
block: bb,
target: destination.map(|(_, target)| target),
target,
source_info: terminator.source_info,
});
}
@ -395,7 +395,7 @@ impl<'tcx> Inliner<'tcx> {
}
}
TerminatorKind::Unreachable | TerminatorKind::Call { destination: None, .. }
TerminatorKind::Unreachable | TerminatorKind::Call { target: None, .. }
if first_block =>
{
// If the function always diverges, don't inline
@ -512,27 +512,22 @@ impl<'tcx> Inliner<'tcx> {
false
}
let dest = if let Some((destination_place, _)) = destination {
if dest_needs_borrow(destination_place) {
trace!("creating temp for return destination");
let dest = Rvalue::Ref(
self.tcx.lifetimes.re_erased,
BorrowKind::Mut { allow_two_phase_borrow: false },
destination_place,
);
let dest_ty = dest.ty(caller_body, self.tcx);
let temp = Place::from(self.new_call_temp(caller_body, &callsite, dest_ty));
caller_body[callsite.block].statements.push(Statement {
source_info: callsite.source_info,
kind: StatementKind::Assign(Box::new((temp, dest))),
});
self.tcx.mk_place_deref(temp)
} else {
destination_place
}
let dest = if dest_needs_borrow(destination) {
trace!("creating temp for return destination");
let dest = Rvalue::Ref(
self.tcx.lifetimes.re_erased,
BorrowKind::Mut { allow_two_phase_borrow: false },
destination,
);
let dest_ty = dest.ty(caller_body, self.tcx);
let temp = Place::from(self.new_call_temp(caller_body, &callsite, dest_ty));
caller_body[callsite.block].statements.push(Statement {
source_info: callsite.source_info,
kind: StatementKind::Assign(Box::new((temp, dest))),
});
self.tcx.mk_place_deref(temp)
} else {
trace!("creating temp for return place");
Place::from(self.new_call_temp(caller_body, &callsite, callee_body.return_ty()))
destination
};
// Copy the arguments if needed.
@ -914,8 +909,8 @@ impl<'tcx> MutVisitor<'tcx> for Integrator<'_, 'tcx> {
*unwind = self.cleanup_block;
}
}
TerminatorKind::Call { ref mut destination, ref mut cleanup, .. } => {
if let Some((_, ref mut tgt)) = *destination {
TerminatorKind::Call { ref mut target, ref mut cleanup, .. } => {
if let Some(ref mut tgt) = *target {
*tgt = self.map_block(*tgt);
}
if let Some(tgt) = *cleanup {

View File

@ -141,7 +141,7 @@ impl<'tcx> InstCombineContext<'tcx, '_> {
terminator: &mut Terminator<'tcx>,
statements: &mut Vec<Statement<'tcx>>,
) {
let TerminatorKind::Call { func, args, destination, .. } = &mut terminator.kind
let TerminatorKind::Call { func, args, destination, target, .. } = &mut terminator.kind
else { return };
// It's definitely not a clone if there are multiple arguments
@ -149,7 +149,7 @@ impl<'tcx> InstCombineContext<'tcx, '_> {
return;
}
let Some((destination_place, destination_block)) = *destination
let Some(destination_block) = *target
else { return };
// Only bother looking more if it's easy to know what we're calling
@ -193,7 +193,7 @@ impl<'tcx> InstCombineContext<'tcx, '_> {
statements.push(Statement {
source_info: terminator.source_info,
kind: StatementKind::Assign(Box::new((
destination_place,
*destination,
Rvalue::Use(Operand::Copy(
arg_place.project_deeper(&[ProjectionElem::Deref], self.tcx),
)),

View File

@ -14,7 +14,9 @@ impl<'tcx> MirPass<'tcx> for LowerIntrinsics {
let (basic_blocks, local_decls) = body.basic_blocks_and_local_decls_mut();
for block in basic_blocks {
let terminator = block.terminator.as_mut().unwrap();
if let TerminatorKind::Call { func, args, destination, .. } = &mut terminator.kind {
if let TerminatorKind::Call { func, args, destination, target, .. } =
&mut terminator.kind
{
let func_ty = func.ty(local_decls, tcx);
let Some((intrinsic_name, substs)) = resolve_rust_intrinsic(tcx, func_ty) else {
continue;
@ -24,11 +26,11 @@ impl<'tcx> MirPass<'tcx> for LowerIntrinsics {
terminator.kind = TerminatorKind::Unreachable;
}
sym::forget => {
if let Some((destination, target)) = *destination {
if let Some(target) = *target {
block.statements.push(Statement {
source_info: terminator.source_info,
kind: StatementKind::Assign(Box::new((
destination,
*destination,
Rvalue::Use(Operand::Constant(Box::new(Constant {
span: terminator.source_info.span,
user_ty: None,
@ -40,7 +42,7 @@ impl<'tcx> MirPass<'tcx> for LowerIntrinsics {
}
}
sym::copy_nonoverlapping => {
let target = destination.unwrap().1;
let target = target.unwrap();
let mut args = args.drain(..);
block.statements.push(Statement {
source_info: terminator.source_info,
@ -61,7 +63,7 @@ impl<'tcx> MirPass<'tcx> for LowerIntrinsics {
terminator.kind = TerminatorKind::Goto { target };
}
sym::wrapping_add | sym::wrapping_sub | sym::wrapping_mul => {
if let Some((destination, target)) = *destination {
if let Some(target) = *target {
let lhs;
let rhs;
{
@ -78,7 +80,7 @@ impl<'tcx> MirPass<'tcx> for LowerIntrinsics {
block.statements.push(Statement {
source_info: terminator.source_info,
kind: StatementKind::Assign(Box::new((
destination,
*destination,
Rvalue::BinaryOp(bin_op, Box::new((lhs, rhs))),
))),
});
@ -91,7 +93,7 @@ impl<'tcx> MirPass<'tcx> for LowerIntrinsics {
// during codegen. Issue #35310.
}
sym::size_of | sym::min_align_of => {
if let Some((destination, target)) = *destination {
if let Some(target) = *target {
let tp_ty = substs.type_at(0);
let null_op = match intrinsic_name {
sym::size_of => NullOp::SizeOf,
@ -101,7 +103,7 @@ impl<'tcx> MirPass<'tcx> for LowerIntrinsics {
block.statements.push(Statement {
source_info: terminator.source_info,
kind: StatementKind::Assign(Box::new((
destination,
*destination,
Rvalue::NullaryOp(null_op, tp_ty),
))),
});
@ -109,14 +111,12 @@ impl<'tcx> MirPass<'tcx> for LowerIntrinsics {
}
}
sym::discriminant_value => {
if let (Some((destination, target)), Some(arg)) =
(*destination, args[0].place())
{
if let (Some(target), Some(arg)) = (*target, args[0].place()) {
let arg = tcx.mk_place_deref(arg);
block.statements.push(Statement {
source_info: terminator.source_info,
kind: StatementKind::Assign(Box::new((
destination,
*destination,
Rvalue::Discriminant(arg),
))),
});

View File

@ -52,7 +52,8 @@ fn lower_slice_len_call<'tcx>(
TerminatorKind::Call {
func,
args,
destination: Some((dest, bb)),
destination,
target: Some(bb),
cleanup: None,
from_hir_call: true,
..
@ -73,7 +74,8 @@ fn lower_slice_len_call<'tcx>(
// make new RValue for Len
let deref_arg = tcx.mk_place_deref(arg);
let r_value = Rvalue::Len(deref_arg);
let len_statement_kind = StatementKind::Assign(Box::new((*dest, r_value)));
let len_statement_kind =
StatementKind::Assign(Box::new((*destination, r_value)));
let add_statement =
Statement { kind: len_statement_kind, source_info: terminator.source_info };

View File

@ -450,7 +450,8 @@ impl<'tcx> CloneShimBuilder<'tcx> {
TerminatorKind::Call {
func,
args: vec![Operand::Move(ref_loc)],
destination: Some((dest, next)),
destination: dest,
target: Some(next),
cleanup: Some(cleanup),
from_hir_call: true,
fn_span: self.span,
@ -676,7 +677,8 @@ fn build_call_shim<'tcx>(
TerminatorKind::Call {
func: callee,
args,
destination: Some((Place::return_place(), BasicBlock::new(1))),
destination: Place::return_place(),
target: Some(BasicBlock::new(1)),
cleanup: if let Some(Adjustment::RefMut) = rcvr_adjustment {
Some(BasicBlock::new(3))
} else {

View File

@ -10,7 +10,7 @@
// CHECK: @STATIC = {{.*}}, align 4
// This checks the constants from inline_enum_const
// CHECK: @alloc12 = {{.*}}, align 2
// CHECK: @alloc14 = {{.*}}, align 2
// This checks the constants from {low,high}_align_const, they share the same
// constant, but the alignment differs, so the higher one should be used

View File

@ -16,7 +16,7 @@
bb1: {
StorageLive(_2); // scope 0 at $SRC_DIR/std/src/panic.rs:LL:COL
begin_panic::<&str>(const "explicit panic"); // scope 0 at $SRC_DIR/std/src/panic.rs:LL:COL
_2 = begin_panic::<&str>(const "explicit panic"); // scope 0 at $SRC_DIR/std/src/panic.rs:LL:COL
// mir::Constant
// + span: $SRC_DIR/std/src/panic.rs:LL:COL
// + literal: Const { ty: fn(&str) -> ! {begin_panic::<&str>}, val: Value(Scalar(<ZST>)) }

View File

@ -5,17 +5,15 @@
let mut _0: (); // return place in scope 0 at $DIR/inline-diverging.rs:7:12: 7:12
let mut _1: !; // in scope 0 at $DIR/inline-diverging.rs:7:12: 9:2
let _2: !; // in scope 0 at $DIR/inline-diverging.rs:8:5: 8:12
+ let mut _3: !; // in scope 0 at $DIR/inline-diverging.rs:8:5: 8:12
+ scope 1 (inlined sleep) { // at $DIR/inline-diverging.rs:8:5: 8:12
+ }
bb0: {
StorageLive(_2); // scope 0 at $DIR/inline-diverging.rs:8:5: 8:12
- sleep(); // scope 0 at $DIR/inline-diverging.rs:8:5: 8:12
- _2 = sleep(); // scope 0 at $DIR/inline-diverging.rs:8:5: 8:12
- // mir::Constant
- // + span: $DIR/inline-diverging.rs:8:5: 8:10
- // + literal: Const { ty: fn() -> ! {sleep}, val: Value(Scalar(<ZST>)) }
+ StorageLive(_3); // scope 0 at $DIR/inline-diverging.rs:8:5: 8:12
+ goto -> bb1; // scope 0 at $DIR/inline-diverging.rs:8:5: 8:12
+ }
+

View File

@ -9,8 +9,8 @@
let mut _4: i32; // in scope 0 at $DIR/inline-diverging.rs:14:9: 14:10
let mut _5: !; // in scope 0 at $DIR/inline-diverging.rs:15:12: 17:6
let _6: !; // in scope 0 at $DIR/inline-diverging.rs:16:9: 16:16
+ let mut _7: !; // in scope 0 at $DIR/inline-diverging.rs:16:9: 16:16
+ scope 1 (inlined panic) { // at $DIR/inline-diverging.rs:16:9: 16:16
+ let mut _7: !; // in scope 1 at $SRC_DIR/std/src/panic.rs:LL:COL
+ }
bb0: {
@ -33,9 +33,9 @@
bb2: {
StorageLive(_6); // scope 0 at $DIR/inline-diverging.rs:16:9: 16:16
- panic(); // scope 0 at $DIR/inline-diverging.rs:16:9: 16:16
+ StorageLive(_7); // scope 0 at $DIR/inline-diverging.rs:16:9: 16:16
+ begin_panic::<&str>(const "explicit panic"); // scope 1 at $SRC_DIR/std/src/panic.rs:LL:COL
- _6 = panic(); // scope 0 at $DIR/inline-diverging.rs:16:9: 16:16
+ StorageLive(_7); // scope 1 at $SRC_DIR/std/src/panic.rs:LL:COL
+ _7 = begin_panic::<&str>(const "explicit panic"); // scope 1 at $SRC_DIR/std/src/panic.rs:LL:COL
// mir::Constant
- // + span: $DIR/inline-diverging.rs:16:9: 16:14
- // + literal: Const { ty: fn() -> ! {panic}, val: Value(Scalar(<ZST>)) }

View File

@ -4,22 +4,21 @@
fn h() -> () {
let mut _0: (); // return place in scope 0 at $DIR/inline-diverging.rs:21:12: 21:12
let _1: (!, !); // in scope 0 at $DIR/inline-diverging.rs:22:5: 22:22
+ let mut _2: (!, !); // in scope 0 at $DIR/inline-diverging.rs:22:5: 22:22
+ let mut _3: fn() -> ! {sleep}; // in scope 0 at $DIR/inline-diverging.rs:22:5: 22:22
+ let mut _10: (); // in scope 0 at $DIR/inline-diverging.rs:27:13: 27:16
+ let mut _11: (); // in scope 0 at $DIR/inline-diverging.rs:28:13: 28:16
+ let mut _2: fn() -> ! {sleep}; // in scope 0 at $DIR/inline-diverging.rs:22:5: 22:22
+ let mut _9: (); // in scope 0 at $DIR/inline-diverging.rs:27:13: 27:16
+ let mut _10: (); // in scope 0 at $DIR/inline-diverging.rs:28:13: 28:16
+ scope 1 (inlined call_twice::<!, fn() -> ! {sleep}>) { // at $DIR/inline-diverging.rs:22:5: 22:22
+ debug f => _3; // in scope 1 at $DIR/inline-diverging.rs:26:36: 26:37
+ let _4: !; // in scope 1 at $DIR/inline-diverging.rs:27:9: 27:10
+ let mut _5: &fn() -> ! {sleep}; // in scope 1 at $DIR/inline-diverging.rs:27:13: 27:14
+ let mut _7: &fn() -> ! {sleep}; // in scope 1 at $DIR/inline-diverging.rs:28:13: 28:14
+ let mut _8: !; // in scope 1 at $DIR/inline-diverging.rs:29:6: 29:7
+ let mut _9: !; // in scope 1 at $DIR/inline-diverging.rs:29:9: 29:10
+ debug f => _2; // in scope 1 at $DIR/inline-diverging.rs:26:36: 26:37
+ let _3: !; // in scope 1 at $DIR/inline-diverging.rs:27:9: 27:10
+ let mut _4: &fn() -> ! {sleep}; // in scope 1 at $DIR/inline-diverging.rs:27:13: 27:14
+ let mut _6: &fn() -> ! {sleep}; // in scope 1 at $DIR/inline-diverging.rs:28:13: 28:14
+ let mut _7: !; // in scope 1 at $DIR/inline-diverging.rs:29:6: 29:7
+ let mut _8: !; // in scope 1 at $DIR/inline-diverging.rs:29:9: 29:10
+ scope 2 {
+ debug a => _4; // in scope 2 at $DIR/inline-diverging.rs:27:9: 27:10
+ let _6: !; // in scope 2 at $DIR/inline-diverging.rs:28:9: 28:10
+ debug a => _3; // in scope 2 at $DIR/inline-diverging.rs:27:9: 27:10
+ let _5: !; // in scope 2 at $DIR/inline-diverging.rs:28:9: 28:10
+ scope 3 {
+ debug b => _6; // in scope 3 at $DIR/inline-diverging.rs:28:9: 28:10
+ debug b => _5; // in scope 3 at $DIR/inline-diverging.rs:28:9: 28:10
+ }
+ scope 6 (inlined <fn() -> ! {sleep} as Fn<()>>::call - shim(fn() -> ! {sleep})) { // at $DIR/inline-diverging.rs:28:13: 28:16
+ scope 7 (inlined sleep) { // at $SRC_DIR/core/src/ops/function.rs:LL:COL
@ -34,21 +33,20 @@
bb0: {
StorageLive(_1); // scope 0 at $DIR/inline-diverging.rs:22:5: 22:22
- call_twice::<!, fn() -> ! {sleep}>(sleep); // scope 0 at $DIR/inline-diverging.rs:22:5: 22:22
- _1 = call_twice::<!, fn() -> ! {sleep}>(sleep); // scope 0 at $DIR/inline-diverging.rs:22:5: 22:22
+ StorageLive(_2); // scope 0 at $DIR/inline-diverging.rs:22:5: 22:22
+ StorageLive(_3); // scope 0 at $DIR/inline-diverging.rs:22:5: 22:22
+ _3 = sleep; // scope 0 at $DIR/inline-diverging.rs:22:5: 22:22
+ _2 = sleep; // scope 0 at $DIR/inline-diverging.rs:22:5: 22:22
// mir::Constant
- // + span: $DIR/inline-diverging.rs:22:5: 22:15
- // + literal: Const { ty: fn(fn() -> ! {sleep}) -> (!, !) {call_twice::<!, fn() -> ! {sleep}>}, val: Value(Scalar(<ZST>)) }
- // mir::Constant
// + span: $DIR/inline-diverging.rs:22:16: 22:21
// + literal: Const { ty: fn() -> ! {sleep}, val: Value(Scalar(<ZST>)) }
+ StorageLive(_4); // scope 1 at $DIR/inline-diverging.rs:27:9: 27:10
+ StorageLive(_5); // scope 1 at $DIR/inline-diverging.rs:27:13: 27:14
+ _5 = &_3; // scope 1 at $DIR/inline-diverging.rs:27:13: 27:14
+ StorageLive(_10); // scope 1 at $DIR/inline-diverging.rs:27:13: 27:16
+ _10 = const (); // scope 1 at $DIR/inline-diverging.rs:27:13: 27:16
+ StorageLive(_3); // scope 1 at $DIR/inline-diverging.rs:27:9: 27:10
+ StorageLive(_4); // scope 1 at $DIR/inline-diverging.rs:27:13: 27:14
+ _4 = &_2; // scope 1 at $DIR/inline-diverging.rs:27:13: 27:14
+ StorageLive(_9); // scope 1 at $DIR/inline-diverging.rs:27:13: 27:16
+ _9 = const (); // scope 1 at $DIR/inline-diverging.rs:27:13: 27:16
+ goto -> bb1; // scope 5 at $DIR/inline-diverging.rs:39:5: 39:12
+ }
+

View File

@ -21,7 +21,7 @@ fn main() -> () {
StorageLive(_2); // scope 0 at $DIR/issue-72181-1.rs:16:9: 16:10
StorageLive(_3); // scope 2 at $DIR/issue-72181-1.rs:17:41: 17:43
_3 = (); // scope 2 at $DIR/issue-72181-1.rs:17:41: 17:43
transmute::<(), Void>(move _3) -> bb4; // scope 2 at $DIR/issue-72181-1.rs:17:9: 17:44
_2 = transmute::<(), Void>(move _3) -> bb4; // scope 2 at $DIR/issue-72181-1.rs:17:9: 17:44
// mir::Constant
// + span: $DIR/issue-72181-1.rs:17:9: 17:40
// + literal: Const { ty: unsafe extern "rust-intrinsic" fn(()) -> Void {transmute::<(), Void>}, val: Value(Scalar(<ZST>)) }
@ -34,7 +34,7 @@ fn main() -> () {
StorageLive(_4); // scope 1 at $DIR/issue-72181-1.rs:20:5: 20:9
StorageLive(_5); // scope 1 at $DIR/issue-72181-1.rs:20:7: 20:8
_5 = move _2; // scope 1 at $DIR/issue-72181-1.rs:20:7: 20:8
f(move _5) -> bb4; // scope 1 at $DIR/issue-72181-1.rs:20:5: 20:9
_4 = f(move _5) -> bb4; // scope 1 at $DIR/issue-72181-1.rs:20:5: 20:9
// mir::Constant
// + span: $DIR/issue-72181-1.rs:20:5: 20:6
// + literal: Const { ty: fn(Void) -> ! {f}, val: Value(Scalar(<ZST>)) }

View File

@ -13,11 +13,12 @@
let mut _11: bool; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let mut _12: bool; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let mut _13: i32; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let mut _15: &i32; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let _16: &i32; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let mut _17: &i32; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let _18: &i32; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let mut _19: std::option::Option<std::fmt::Arguments>; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let _15: !; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let mut _16: &i32; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let _17: &i32; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let mut _18: &i32; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let _19: &i32; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let mut _20: std::option::Option<std::fmt::Arguments>; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
scope 1 {
debug split => _1; // in scope 1 at $DIR/issue-73223.rs:2:9: 2:14
let _4: std::option::Option<i32>; // in scope 1 at $DIR/issue-73223.rs:7:9: 7:14
@ -25,7 +26,7 @@
debug _prev => _4; // in scope 3 at $DIR/issue-73223.rs:7:9: 7:14
let _9: &i32; // in scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let _10: &i32; // in scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let mut _20: &i32; // in scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let mut _21: &i32; // in scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
scope 4 {
debug left_val => _9; // in scope 4 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
debug right_val => _10; // in scope 4 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
@ -62,11 +63,11 @@
StorageLive(_7); // scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_7 = &_1; // scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_8); // scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_20 = const main::promoted[0]; // scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_21 = const main::promoted[0]; // scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
// mir::Constant
// + span: $SRC_DIR/core/src/macros/mod.rs:LL:COL
// + literal: Const { ty: &i32, val: Unevaluated(main, [], Some(promoted[0])) }
_8 = _20; // scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_8 = _21; // scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
Deinit(_6); // scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
(_6.0: &i32) = move _7; // scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
(_6.1: &i32) = move _8; // scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
@ -93,16 +94,17 @@
discriminant(_14) = 0; // scope 4 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_15); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_16); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_16 = _9; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_15 = _16; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_17); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_17 = _9; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_16 = _17; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_18); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_18 = _10; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_17 = _18; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_19); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
Deinit(_19); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
discriminant(_19) = 0; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
core::panicking::assert_failed::<i32, i32>(const core::panicking::AssertKind::Eq, move _15, move _17, move _19); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_19 = _10; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_18 = _19; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_20); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
Deinit(_20); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
discriminant(_20) = 0; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_15 = core::panicking::assert_failed::<i32, i32>(const core::panicking::AssertKind::Eq, move _16, move _18, move _20); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
// mir::Constant
// + span: $SRC_DIR/core/src/macros/mod.rs:LL:COL
// + literal: Const { ty: for<'r, 's, 't0> fn(core::panicking::AssertKind, &'r i32, &'s i32, Option<Arguments<'t0>>) -> ! {core::panicking::assert_failed::<i32, i32>}, val: Value(Scalar(<ZST>)) }

View File

@ -13,11 +13,12 @@
let mut _11: bool; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let mut _12: bool; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let mut _13: i32; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let mut _15: &i32; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let _16: &i32; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let mut _17: &i32; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let _18: &i32; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let mut _19: std::option::Option<std::fmt::Arguments>; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let _15: !; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let mut _16: &i32; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let _17: &i32; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let mut _18: &i32; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let _19: &i32; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let mut _20: std::option::Option<std::fmt::Arguments>; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
scope 1 {
debug split => _1; // in scope 1 at $DIR/issue-73223.rs:2:9: 2:14
let _4: std::option::Option<i32>; // in scope 1 at $DIR/issue-73223.rs:7:9: 7:14
@ -25,7 +26,7 @@
debug _prev => _4; // in scope 3 at $DIR/issue-73223.rs:7:9: 7:14
let _9: &i32; // in scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let _10: &i32; // in scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let mut _20: &i32; // in scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let mut _21: &i32; // in scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
scope 4 {
debug left_val => _9; // in scope 4 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
debug right_val => _10; // in scope 4 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
@ -62,11 +63,11 @@
StorageLive(_7); // scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_7 = &_1; // scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_8); // scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_20 = const main::promoted[0]; // scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_21 = const main::promoted[0]; // scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
// mir::Constant
// + span: $SRC_DIR/core/src/macros/mod.rs:LL:COL
// + literal: Const { ty: &i32, val: Unevaluated(main, [], Some(promoted[0])) }
_8 = _20; // scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_8 = _21; // scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
Deinit(_6); // scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
(_6.0: &i32) = move _7; // scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
(_6.1: &i32) = move _8; // scope 3 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
@ -93,16 +94,17 @@
discriminant(_14) = 0; // scope 4 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_15); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_16); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_16 = _9; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_15 = _16; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_17); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_17 = _9; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_16 = _17; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_18); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_18 = _10; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_17 = _18; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_19); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
Deinit(_19); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
discriminant(_19) = 0; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
core::panicking::assert_failed::<i32, i32>(const core::panicking::AssertKind::Eq, move _15, move _17, move _19); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_19 = _10; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_18 = _19; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_20); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
Deinit(_20); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
discriminant(_20) = 0; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_15 = core::panicking::assert_failed::<i32, i32>(const core::panicking::AssertKind::Eq, move _16, move _18, move _20); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
// mir::Constant
// + span: $SRC_DIR/core/src/macros/mod.rs:LL:COL
// + literal: Const { ty: for<'r, 's, 't0> fn(core::panicking::AssertKind, &'r i32, &'s i32, Option<Arguments<'t0>>) -> ! {core::panicking::assert_failed::<i32, i32>}, val: Value(Scalar(<ZST>)) }

View File

@ -132,7 +132,7 @@
StorageLive(_27); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
Deinit(_27); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
discriminant(_27) = 0; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
core::panicking::assert_failed::<i32, i32>(const core::panicking::AssertKind::Eq, move _23, move _25, move _27); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_21 = core::panicking::assert_failed::<i32, i32>(const core::panicking::AssertKind::Eq, move _23, move _25, move _27); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
// mir::Constant
// + span: $SRC_DIR/core/src/macros/mod.rs:LL:COL
// + literal: Const { ty: for<'r, 's, 't0> fn(core::panicking::AssertKind, &'r i32, &'s i32, Option<Arguments<'t0>>) -> ! {core::panicking::assert_failed::<i32, i32>}, val: Value(Scalar(<ZST>)) }

View File

@ -132,7 +132,7 @@
StorageLive(_27); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
Deinit(_27); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
discriminant(_27) = 0; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
core::panicking::assert_failed::<i32, i32>(const core::panicking::AssertKind::Eq, move _23, move _25, move _27); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_21 = core::panicking::assert_failed::<i32, i32>(const core::panicking::AssertKind::Eq, move _23, move _25, move _27); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
// mir::Constant
// + span: $SRC_DIR/core/src/macros/mod.rs:LL:COL
// + literal: Const { ty: for<'r, 's, 't0> fn(core::panicking::AssertKind, &'r i32, &'s i32, Option<Arguments<'t0>>) -> ! {core::panicking::assert_failed::<i32, i32>}, val: Value(Scalar(<ZST>)) }

View File

@ -67,7 +67,7 @@
bb1: {
StorageLive(_22); // scope 1 at $SRC_DIR/core/src/panic.rs:LL:COL
core::panicking::panic(const "internal error: entered unreachable code"); // scope 1 at $SRC_DIR/core/src/panic.rs:LL:COL
_22 = core::panicking::panic(const "internal error: entered unreachable code"); // scope 1 at $SRC_DIR/core/src/panic.rs:LL:COL
// mir::Constant
// + span: $SRC_DIR/core/src/panic.rs:LL:COL
// + literal: Const { ty: fn(&'static str) -> ! {core::panicking::panic}, val: Value(Scalar(<ZST>)) }

View File

@ -7,7 +7,7 @@ fn num_to_digit(_1: char) -> u32 {
let mut _3: std::option::Option<u32>; // in scope 0 at $DIR/issue-59352.rs:14:26: 14:41
let mut _4: char; // in scope 0 at $DIR/issue-59352.rs:14:26: 14:29
let mut _5: u32; // in scope 0 at $DIR/issue-59352.rs:14:8: 14:23
let mut _11: isize; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
let mut _12: isize; // in scope 0 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
scope 1 (inlined char::methods::<impl char>::is_digit) { // at $DIR/issue-59352.rs:14:8: 14:23
debug self => _2; // in scope 1 at $SRC_DIR/core/src/char/methods.rs:LL:COL
debug radix => _5; // in scope 1 at $SRC_DIR/core/src/char/methods.rs:LL:COL
@ -22,6 +22,7 @@ fn num_to_digit(_1: char) -> u32 {
scope 3 (inlined #[track_caller] Option::<u32>::unwrap) { // at $DIR/issue-59352.rs:14:26: 14:50
debug self => _3; // in scope 3 at $SRC_DIR/core/src/option.rs:LL:COL
let mut _10: isize; // in scope 3 at $SRC_DIR/core/src/option.rs:LL:COL
let mut _11: !; // in scope 3 at $SRC_DIR/core/src/option.rs:LL:COL
scope 4 {
debug val => _0; // in scope 4 at $SRC_DIR/core/src/option.rs:LL:COL
}
@ -43,7 +44,7 @@ fn num_to_digit(_1: char) -> u32 {
}
bb1: {
StorageDead(_11); // scope 0 at $DIR/issue-59352.rs:14:8: 14:23
StorageDead(_12); // scope 0 at $DIR/issue-59352.rs:14:8: 14:23
StorageLive(_3); // scope 0 at $DIR/issue-59352.rs:14:26: 14:41
StorageLive(_4); // scope 0 at $DIR/issue-59352.rs:14:26: 14:29
_4 = _1; // scope 0 at $DIR/issue-59352.rs:14:26: 14:29
@ -61,7 +62,7 @@ fn num_to_digit(_1: char) -> u32 {
}
bb3: {
StorageDead(_11); // scope 0 at $DIR/issue-59352.rs:14:8: 14:23
StorageDead(_12); // scope 0 at $DIR/issue-59352.rs:14:8: 14:23
_0 = const 0_u32; // scope 0 at $DIR/issue-59352.rs:14:60: 14:61
goto -> bb4; // scope 0 at $DIR/issue-59352.rs:14:5: 14:63
}
@ -75,18 +76,19 @@ fn num_to_digit(_1: char) -> u32 {
StorageDead(_8); // scope 1 at $SRC_DIR/core/src/char/methods.rs:LL:COL
StorageLive(_9); // scope 1 at $SRC_DIR/core/src/char/methods.rs:LL:COL
_9 = discriminant((*_6)); // scope 2 at $SRC_DIR/core/src/option.rs:LL:COL
StorageLive(_11); // scope 2 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_11 = move _9; // scope 2 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_12); // scope 2 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_12 = move _9; // scope 2 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageDead(_9); // scope 1 at $SRC_DIR/core/src/char/methods.rs:LL:COL
StorageDead(_6); // scope 1 at $SRC_DIR/core/src/char/methods.rs:LL:COL
StorageDead(_7); // scope 1 at $SRC_DIR/core/src/char/methods.rs:LL:COL
StorageDead(_5); // scope 0 at $DIR/issue-59352.rs:14:8: 14:23
StorageDead(_2); // scope 0 at $DIR/issue-59352.rs:14:22: 14:23
switchInt(move _11) -> [1_isize: bb1, otherwise: bb3]; // scope 0 at $DIR/issue-59352.rs:14:8: 14:23
switchInt(move _12) -> [1_isize: bb1, otherwise: bb3]; // scope 0 at $DIR/issue-59352.rs:14:8: 14:23
}
bb6: {
core::panicking::panic(const "called `Option::unwrap()` on a `None` value"); // scope 3 at $SRC_DIR/core/src/option.rs:LL:COL
StorageLive(_11); // scope 3 at $SRC_DIR/core/src/option.rs:LL:COL
_11 = core::panicking::panic(const "called `Option::unwrap()` on a `None` value"); // scope 3 at $SRC_DIR/core/src/option.rs:LL:COL
// mir::Constant
// + span: $SRC_DIR/core/src/option.rs:LL:COL
// + literal: Const { ty: fn(&'static str) -> ! {core::panicking::panic}, val: Value(Scalar(<ZST>)) }

View File

@ -12,7 +12,7 @@
bb0: {
StorageLive(_2); // scope 0 at $DIR/lower_intrinsics.rs:29:5: 29:47
StorageLive(_3); // scope 1 at $DIR/lower_intrinsics.rs:29:14: 29:45
- std::intrinsics::unreachable(); // scope 1 at $DIR/lower_intrinsics.rs:29:14: 29:45
- _3 = std::intrinsics::unreachable(); // scope 1 at $DIR/lower_intrinsics.rs:29:14: 29:45
- // mir::Constant
- // + span: $DIR/lower_intrinsics.rs:29:14: 29:43
- // + literal: Const { ty: unsafe extern "rust-intrinsic" fn() -> ! {std::intrinsics::unreachable}, val: Value(Scalar(<ZST>)) }

View File

@ -20,7 +20,7 @@ fn unwrap(_1: Option<T>) -> T {
bb1: {
StorageLive(_4); // scope 0 at $SRC_DIR/std/src/panic.rs:LL:COL
begin_panic::<&str>(const "explicit panic") -> bb4; // scope 0 at $SRC_DIR/std/src/panic.rs:LL:COL
_4 = begin_panic::<&str>(const "explicit panic") -> bb4; // scope 0 at $SRC_DIR/std/src/panic.rs:LL:COL
// mir::Constant
// + span: $SRC_DIR/std/src/panic.rs:LL:COL
// + literal: Const { ty: fn(&str) -> ! {begin_panic::<&str>}, val: Value(Scalar(<ZST>)) }

View File

@ -171,7 +171,7 @@ fn array_casts() -> () {
Retag(_32); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_34); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_34 = Option::<Arguments>::None; // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
core::panicking::assert_failed::<usize, usize>(move _29, move _30, move _32, move _34); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_28 = core::panicking::assert_failed::<usize, usize>(move _29, move _30, move _32, move _34); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
// mir::Constant
// + span: $SRC_DIR/core/src/macros/mod.rs:LL:COL
// + literal: Const { ty: for<'r, 's, 't0> fn(core::panicking::AssertKind, &'r usize, &'s usize, Option<Arguments<'t0>>) -> ! {core::panicking::assert_failed::<usize, usize>}, val: Value(Scalar(<ZST>)) }

View File

@ -6,7 +6,7 @@ LL | fn main() {
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 4, align: 4) {
╾─alloc7──╼ │ ╾──╼
╾─alloc8──╼ │ ╾──╼
}
error: erroneous constant used

View File

@ -6,7 +6,7 @@ LL | fn main() {
|
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
= note: the raw bytes of the constant (size: 8, align: 8) {
╾───────alloc7────────╼ │ ╾──────╼
╾───────alloc8────────╼ │ ╾──────╼
}
error: erroneous constant used

View File

@ -292,7 +292,7 @@ fn is_call_with_ref_arg<'tcx>(
if let (inner_ty, 1) = walk_ptrs_ty_depth(args[0].ty(&*mir, cx.tcx));
if !is_copy(cx, inner_ty);
then {
Some((def_id, *local, inner_ty, destination.as_ref().map(|(dest, _)| dest)?.as_local()?))
Some((def_id, *local, inner_ty, destination.as_local()?))
} else {
None
}
@ -584,7 +584,7 @@ impl<'a, 'tcx> mir::visit::Visitor<'tcx> for PossibleBorrowerVisitor<'a, 'tcx> {
fn visit_terminator(&mut self, terminator: &mir::Terminator<'_>, _loc: mir::Location) {
if let mir::TerminatorKind::Call {
args,
destination: Some((mir::Place { local: dest, .. }, _)),
destination: mir::Place { local: dest, .. },
..
} = &terminator.kind
{

View File

@ -301,6 +301,7 @@ fn check_terminator<'a, 'tcx>(
args,
from_hir_call: _,
destination: _,
target: _,
cleanup: _,
fn_span: _,
} => {