rust/compiler/rustc_mir_transform/src/inline/cycle.rs

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

190 lines
7.2 KiB
Rust
Raw Normal View History

2022-05-12 10:03:41 +00:00
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet};
2020-12-29 16:21:52 +00:00
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_middle::mir::TerminatorKind;
2023-02-22 02:18:40 +00:00
use rustc_middle::ty::TypeVisitableExt;
use rustc_middle::ty::{self, GenericArgsRef, InstanceDef, TyCtxt};
use rustc_session::Limit;
use rustc_span::sym;
2020-12-29 16:21:52 +00:00
// FIXME: check whether it is cheaper to precompute the entire call graph instead of invoking
// this query ridiculously often.
#[instrument(level = "debug", skip(tcx, root, target))]
pub(crate) fn mir_callgraph_reachable<'tcx>(
2020-12-29 16:21:52 +00:00
tcx: TyCtxt<'tcx>,
(root, target): (ty::Instance<'tcx>, LocalDefId),
) -> bool {
trace!(%root, target = %tcx.def_path_str(target));
2020-12-29 16:21:52 +00:00
let param_env = tcx.param_env_reveal_all_normalized(target);
assert_ne!(
root.def_id().expect_local(),
target,
"you should not call `mir_callgraph_reachable` on immediate self recursion"
);
assert!(
matches!(root.def, InstanceDef::Item(_)),
"you should not call `mir_callgraph_reachable` on shims"
);
assert!(
!tcx.is_constructor(root.def_id()),
"you should not call `mir_callgraph_reachable` on enum/struct constructor functions"
);
#[instrument(
level = "debug",
skip(tcx, param_env, target, stack, seen, recursion_limiter, caller, recursion_limit)
)]
fn process<'tcx>(
2020-12-29 16:21:52 +00:00
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
caller: ty::Instance<'tcx>,
target: LocalDefId,
stack: &mut Vec<ty::Instance<'tcx>>,
seen: &mut FxHashSet<ty::Instance<'tcx>>,
recursion_limiter: &mut FxHashMap<DefId, usize>,
recursion_limit: Limit,
2020-12-29 16:21:52 +00:00
) -> bool {
trace!(%caller);
for &(callee, args) in tcx.mir_inliner_callees(caller.def) {
2023-09-25 13:46:38 +00:00
let Ok(args) = caller.try_instantiate_mir_and_normalize_erasing_regions(
tcx,
param_env,
ty::EarlyBinder::bind(args),
) else {
trace!(?caller, ?param_env, ?args, "cannot normalize, skipping");
continue;
};
let Ok(Some(callee)) = ty::Instance::resolve(tcx, param_env, callee, args) else {
2022-02-18 23:48:49 +00:00
trace!(?callee, "cannot resolve, skipping");
continue;
2020-12-29 16:21:52 +00:00
};
// Found a path.
if callee.def_id() == target.to_def_id() {
return true;
}
if tcx.is_constructor(callee.def_id()) {
trace!("constructors always have MIR");
// Constructor functions cannot cause a query cycle.
continue;
}
match callee.def {
InstanceDef::Item(_) => {
// If there is no MIR available (either because it was not in metadata or
// because it has no MIR because it's an extern function), then the inliner
// won't cause cycles on this.
if !tcx.is_mir_available(callee.def_id()) {
trace!(?callee, "no mir available, skipping");
continue;
}
}
// These have no own callable MIR.
InstanceDef::Intrinsic(_) | InstanceDef::Virtual(..) => continue,
2024-02-12 06:39:32 +00:00
// These have MIR and if that MIR is inlined, instantiated and then inlining is run
2020-12-29 16:21:52 +00:00
// again, a function item can end up getting inlined. Thus we'll be able to cause
// a cycle that way
InstanceDef::VTableShim(_)
| InstanceDef::ReifyShim(..)
2020-12-29 16:21:52 +00:00
| InstanceDef::FnPtrShim(..)
| InstanceDef::ClosureOnceShim { .. }
| InstanceDef::ConstructCoroutineInClosureShim { .. }
| InstanceDef::CoroutineKindShim { .. }
| InstanceDef::ThreadLocalShim { .. }
2020-12-29 16:21:52 +00:00
| InstanceDef::CloneShim(..) => {}
2022-07-20 12:32:58 +00:00
// This shim does not call any other functions, thus there can be no recursion.
InstanceDef::FnPtrAddrShim(..) => {
continue;
}
InstanceDef::DropGlue(..) | InstanceDef::AsyncDropGlueCtorShim(..) => {
2024-02-12 06:39:32 +00:00
// FIXME: A not fully instantiated drop shim can cause ICEs if one attempts to
2020-12-29 16:21:52 +00:00
// have its MIR built. Likely oli-obk just screwed up the `ParamEnv`s, so this
// needs some more analysis.
2023-04-27 06:52:17 +00:00
if callee.has_param() {
2020-12-29 16:21:52 +00:00
continue;
}
}
}
if seen.insert(callee) {
let recursion = recursion_limiter.entry(callee.def_id()).or_default();
trace!(?callee, recursion = *recursion);
if recursion_limit.value_within_limit(*recursion) {
2020-12-29 16:21:52 +00:00
*recursion += 1;
stack.push(callee);
let found_recursion = ensure_sufficient_stack(|| {
process(
tcx,
param_env,
callee,
target,
stack,
seen,
recursion_limiter,
recursion_limit,
)
2020-12-29 16:21:52 +00:00
});
if found_recursion {
return true;
}
stack.pop();
} else {
// Pessimistically assume that there could be recursion.
return true;
}
}
}
false
}
process(
tcx,
param_env,
root,
target,
&mut Vec::new(),
&mut FxHashSet::default(),
&mut FxHashMap::default(),
tcx.recursion_limit(),
2020-12-29 16:21:52 +00:00
)
}
pub(crate) fn mir_inliner_callees<'tcx>(
2020-12-29 16:21:52 +00:00
tcx: TyCtxt<'tcx>,
instance: ty::InstanceDef<'tcx>,
) -> &'tcx [(DefId, GenericArgsRef<'tcx>)] {
2020-12-29 16:21:52 +00:00
let steal;
let guard;
let body = match (instance, instance.def_id().as_local()) {
(InstanceDef::Item(_), Some(def_id)) => {
2022-05-08 13:53:19 +00:00
steal = tcx.mir_promoted(def_id).0;
2020-12-29 16:21:52 +00:00
guard = steal.borrow();
&*guard
}
// Functions from other crates and MIR shims
_ => tcx.instance_mir(instance),
};
2022-05-12 10:03:41 +00:00
let mut calls = FxIndexSet::default();
for bb_data in body.basic_blocks.iter() {
2020-12-29 16:21:52 +00:00
let terminator = bb_data.terminator();
if let TerminatorKind::Call { func, args: call_args, .. } = &terminator.kind {
2020-12-29 16:21:52 +00:00
let ty = func.ty(&body.local_decls, tcx);
let ty::FnDef(def_id, generic_args) = ty.kind() else {
continue;
};
let call = if tcx.is_intrinsic(*def_id, sym::const_eval_select) {
let func = &call_args[2].node;
let ty = func.ty(&body.local_decls, tcx);
let ty::FnDef(def_id, generic_args) = ty.kind() else {
continue;
};
(*def_id, *generic_args)
} else {
(*def_id, *generic_args)
2020-12-29 16:21:52 +00:00
};
calls.insert(call);
2020-12-29 16:21:52 +00:00
}
}
tcx.arena.alloc_from_iter(calls.iter().copied())
2020-12-29 16:21:52 +00:00
}