2022-05-12 10:03:41 +00:00
|
|
|
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet};
|
2020-12-29 16:21:52 +00:00
|
|
|
use rustc_data_structures::stack::ensure_sufficient_stack;
|
|
|
|
use rustc_hir::def_id::{DefId, LocalDefId};
|
|
|
|
use rustc_middle::mir::TerminatorKind;
|
2024-06-17 01:35:16 +00:00
|
|
|
use rustc_middle::ty::{self, GenericArgsRef, InstanceKind, TyCtxt, TypeVisitableExt};
|
2021-06-25 23:48:26 +00:00
|
|
|
use rustc_session::Limit;
|
2024-04-04 04:10:01 +00:00
|
|
|
use rustc_span::sym;
|
2024-08-28 05:03:14 +00:00
|
|
|
use tracing::{instrument, trace};
|
2020-12-29 16:21:52 +00:00
|
|
|
|
|
|
|
// FIXME: check whether it is cheaper to precompute the entire call graph instead of invoking
|
2022-03-30 19:14:15 +00:00
|
|
|
// this query ridiculously often.
|
2021-02-13 00:00:00 +00:00
|
|
|
#[instrument(level = "debug", skip(tcx, root, target))]
|
2022-05-20 23:51:09 +00:00
|
|
|
pub(crate) fn mir_callgraph_reachable<'tcx>(
|
2020-12-29 16:21:52 +00:00
|
|
|
tcx: TyCtxt<'tcx>,
|
|
|
|
(root, target): (ty::Instance<'tcx>, LocalDefId),
|
|
|
|
) -> bool {
|
2023-02-16 09:25:11 +00:00
|
|
|
trace!(%root, target = %tcx.def_path_str(target));
|
2020-12-29 16:21:52 +00:00
|
|
|
let param_env = tcx.param_env_reveal_all_normalized(target);
|
|
|
|
assert_ne!(
|
|
|
|
root.def_id().expect_local(),
|
|
|
|
target,
|
|
|
|
"you should not call `mir_callgraph_reachable` on immediate self recursion"
|
|
|
|
);
|
|
|
|
assert!(
|
2024-06-17 01:35:16 +00:00
|
|
|
matches!(root.def, InstanceKind::Item(_)),
|
2020-12-29 16:21:52 +00:00
|
|
|
"you should not call `mir_callgraph_reachable` on shims"
|
|
|
|
);
|
|
|
|
assert!(
|
|
|
|
!tcx.is_constructor(root.def_id()),
|
|
|
|
"you should not call `mir_callgraph_reachable` on enum/struct constructor functions"
|
|
|
|
);
|
2021-02-13 00:00:00 +00:00
|
|
|
#[instrument(
|
|
|
|
level = "debug",
|
2021-06-25 23:48:26 +00:00
|
|
|
skip(tcx, param_env, target, stack, seen, recursion_limiter, caller, recursion_limit)
|
2021-02-13 00:00:00 +00:00
|
|
|
)]
|
2021-12-06 08:48:37 +00:00
|
|
|
fn process<'tcx>(
|
2020-12-29 16:21:52 +00:00
|
|
|
tcx: TyCtxt<'tcx>,
|
|
|
|
param_env: ty::ParamEnv<'tcx>,
|
|
|
|
caller: ty::Instance<'tcx>,
|
|
|
|
target: LocalDefId,
|
|
|
|
stack: &mut Vec<ty::Instance<'tcx>>,
|
|
|
|
seen: &mut FxHashSet<ty::Instance<'tcx>>,
|
|
|
|
recursion_limiter: &mut FxHashMap<DefId, usize>,
|
2021-06-25 23:48:26 +00:00
|
|
|
recursion_limit: Limit,
|
2020-12-29 16:21:52 +00:00
|
|
|
) -> bool {
|
|
|
|
trace!(%caller);
|
2023-07-11 21:35:29 +00:00
|
|
|
for &(callee, args) in tcx.mir_inliner_callees(caller.def) {
|
2023-09-25 13:46:38 +00:00
|
|
|
let Ok(args) = caller.try_instantiate_mir_and_normalize_erasing_regions(
|
2023-04-14 15:59:03 +00:00
|
|
|
tcx,
|
|
|
|
param_env,
|
2023-07-11 21:35:29 +00:00
|
|
|
ty::EarlyBinder::bind(args),
|
2023-04-14 15:59:03 +00:00
|
|
|
) else {
|
2023-07-11 21:35:29 +00:00
|
|
|
trace!(?caller, ?param_env, ?args, "cannot normalize, skipping");
|
2022-05-12 08:22:36 +00:00
|
|
|
continue;
|
|
|
|
};
|
2024-07-02 19:55:17 +00:00
|
|
|
let Ok(Some(callee)) = ty::Instance::try_resolve(tcx, param_env, callee, args) else {
|
2022-02-18 23:48:49 +00:00
|
|
|
trace!(?callee, "cannot resolve, skipping");
|
|
|
|
continue;
|
2020-12-29 16:21:52 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
// Found a path.
|
|
|
|
if callee.def_id() == target.to_def_id() {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
if tcx.is_constructor(callee.def_id()) {
|
|
|
|
trace!("constructors always have MIR");
|
|
|
|
// Constructor functions cannot cause a query cycle.
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
match callee.def {
|
2024-06-17 01:35:16 +00:00
|
|
|
InstanceKind::Item(_) => {
|
2020-12-29 16:21:52 +00:00
|
|
|
// If there is no MIR available (either because it was not in metadata or
|
|
|
|
// because it has no MIR because it's an extern function), then the inliner
|
|
|
|
// won't cause cycles on this.
|
|
|
|
if !tcx.is_mir_available(callee.def_id()) {
|
|
|
|
trace!(?callee, "no mir available, skipping");
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// These have no own callable MIR.
|
2024-06-17 01:35:16 +00:00
|
|
|
InstanceKind::Intrinsic(_) | InstanceKind::Virtual(..) => continue,
|
2024-02-12 06:39:32 +00:00
|
|
|
// These have MIR and if that MIR is inlined, instantiated and then inlining is run
|
2020-12-29 16:21:52 +00:00
|
|
|
// again, a function item can end up getting inlined. Thus we'll be able to cause
|
|
|
|
// a cycle that way
|
2024-06-17 01:35:16 +00:00
|
|
|
InstanceKind::VTableShim(_)
|
|
|
|
| InstanceKind::ReifyShim(..)
|
|
|
|
| InstanceKind::FnPtrShim(..)
|
|
|
|
| InstanceKind::ClosureOnceShim { .. }
|
|
|
|
| InstanceKind::ConstructCoroutineInClosureShim { .. }
|
|
|
|
| InstanceKind::ThreadLocalShim { .. }
|
|
|
|
| InstanceKind::CloneShim(..) => {}
|
2022-07-20 12:32:58 +00:00
|
|
|
|
|
|
|
// This shim does not call any other functions, thus there can be no recursion.
|
2024-06-17 01:35:16 +00:00
|
|
|
InstanceKind::FnPtrAddrShim(..) => {
|
2024-02-13 09:31:41 +00:00
|
|
|
continue;
|
|
|
|
}
|
2024-06-17 01:35:16 +00:00
|
|
|
InstanceKind::DropGlue(..) | InstanceKind::AsyncDropGlueCtorShim(..) => {
|
2024-02-12 06:39:32 +00:00
|
|
|
// FIXME: A not fully instantiated drop shim can cause ICEs if one attempts to
|
2020-12-29 16:21:52 +00:00
|
|
|
// have its MIR built. Likely oli-obk just screwed up the `ParamEnv`s, so this
|
|
|
|
// needs some more analysis.
|
2023-04-27 06:52:17 +00:00
|
|
|
if callee.has_param() {
|
2020-12-29 16:21:52 +00:00
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if seen.insert(callee) {
|
|
|
|
let recursion = recursion_limiter.entry(callee.def_id()).or_default();
|
|
|
|
trace!(?callee, recursion = *recursion);
|
2021-06-25 23:48:26 +00:00
|
|
|
if recursion_limit.value_within_limit(*recursion) {
|
2020-12-29 16:21:52 +00:00
|
|
|
*recursion += 1;
|
|
|
|
stack.push(callee);
|
|
|
|
let found_recursion = ensure_sufficient_stack(|| {
|
2021-06-25 23:48:26 +00:00
|
|
|
process(
|
|
|
|
tcx,
|
|
|
|
param_env,
|
|
|
|
callee,
|
|
|
|
target,
|
|
|
|
stack,
|
|
|
|
seen,
|
|
|
|
recursion_limiter,
|
|
|
|
recursion_limit,
|
|
|
|
)
|
2020-12-29 16:21:52 +00:00
|
|
|
});
|
|
|
|
if found_recursion {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
stack.pop();
|
|
|
|
} else {
|
|
|
|
// Pessimistically assume that there could be recursion.
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
false
|
|
|
|
}
|
2024-08-28 23:36:46 +00:00
|
|
|
// FIXME(-Znext-solver): Remove this hack when trait solver overflow can return an error.
|
|
|
|
// In code like that pointed out in #128887, the type complexity we ask the solver to deal with
|
|
|
|
// grows as we recurse into the call graph. If we use the same recursion limit here and in the
|
|
|
|
// solver, the solver hits the limit first and emits a fatal error. But if we use a reduced
|
|
|
|
// limit, we will hit the limit first and give up on looking for inlining. And in any case,
|
|
|
|
// the default recursion limits are quite generous for us. If we need to recurse 64 times
|
|
|
|
// into the call graph, we're probably not going to find any useful MIR inlining.
|
|
|
|
let recursion_limit = tcx.recursion_limit() / 2;
|
2020-12-29 16:21:52 +00:00
|
|
|
process(
|
|
|
|
tcx,
|
|
|
|
param_env,
|
|
|
|
root,
|
|
|
|
target,
|
|
|
|
&mut Vec::new(),
|
|
|
|
&mut FxHashSet::default(),
|
|
|
|
&mut FxHashMap::default(),
|
2024-08-28 23:36:46 +00:00
|
|
|
recursion_limit,
|
2020-12-29 16:21:52 +00:00
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2022-05-20 23:51:09 +00:00
|
|
|
pub(crate) fn mir_inliner_callees<'tcx>(
|
2020-12-29 16:21:52 +00:00
|
|
|
tcx: TyCtxt<'tcx>,
|
2024-06-17 01:35:16 +00:00
|
|
|
instance: ty::InstanceKind<'tcx>,
|
2023-07-11 21:35:29 +00:00
|
|
|
) -> &'tcx [(DefId, GenericArgsRef<'tcx>)] {
|
2020-12-29 16:21:52 +00:00
|
|
|
let steal;
|
|
|
|
let guard;
|
|
|
|
let body = match (instance, instance.def_id().as_local()) {
|
2024-06-17 01:35:16 +00:00
|
|
|
(InstanceKind::Item(_), Some(def_id)) => {
|
2022-05-08 13:53:19 +00:00
|
|
|
steal = tcx.mir_promoted(def_id).0;
|
2020-12-29 16:21:52 +00:00
|
|
|
guard = steal.borrow();
|
|
|
|
&*guard
|
|
|
|
}
|
|
|
|
// Functions from other crates and MIR shims
|
|
|
|
_ => tcx.instance_mir(instance),
|
|
|
|
};
|
2022-05-12 10:03:41 +00:00
|
|
|
let mut calls = FxIndexSet::default();
|
2022-07-05 00:00:00 +00:00
|
|
|
for bb_data in body.basic_blocks.iter() {
|
2020-12-29 16:21:52 +00:00
|
|
|
let terminator = bb_data.terminator();
|
2024-04-04 04:10:01 +00:00
|
|
|
if let TerminatorKind::Call { func, args: call_args, .. } = &terminator.kind {
|
2020-12-29 16:21:52 +00:00
|
|
|
let ty = func.ty(&body.local_decls, tcx);
|
2024-04-04 04:10:01 +00:00
|
|
|
let ty::FnDef(def_id, generic_args) = ty.kind() else {
|
|
|
|
continue;
|
|
|
|
};
|
|
|
|
let call = if tcx.is_intrinsic(*def_id, sym::const_eval_select) {
|
|
|
|
let func = &call_args[2].node;
|
|
|
|
let ty = func.ty(&body.local_decls, tcx);
|
|
|
|
let ty::FnDef(def_id, generic_args) = ty.kind() else {
|
|
|
|
continue;
|
|
|
|
};
|
|
|
|
(*def_id, *generic_args)
|
|
|
|
} else {
|
|
|
|
(*def_id, *generic_args)
|
2020-12-29 16:21:52 +00:00
|
|
|
};
|
2021-02-24 00:00:00 +00:00
|
|
|
calls.insert(call);
|
2020-12-29 16:21:52 +00:00
|
|
|
}
|
|
|
|
}
|
2021-02-24 00:00:00 +00:00
|
|
|
tcx.arena.alloc_from_iter(calls.iter().copied())
|
2020-12-29 16:21:52 +00:00
|
|
|
}
|