rustc_mir_transform/inline/
cycle.rs1use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet};
2use rustc_data_structures::stack::ensure_sufficient_stack;
3use rustc_data_structures::unord::UnordSet;
4use rustc_hir::def_id::{DefId, LocalDefId};
5use rustc_middle::mir::TerminatorKind;
6use rustc_middle::ty::{self, GenericArgsRef, InstanceKind, TyCtxt, TypeVisitableExt};
7use rustc_session::Limit;
8use rustc_span::sym;
9use tracing::{instrument, trace};
10
11#[instrument(level = "debug", skip(tcx), ret)]
12fn should_recurse<'tcx>(tcx: TyCtxt<'tcx>, callee: ty::Instance<'tcx>) -> bool {
13 match callee.def {
14 InstanceKind::Item(_) => {
18 if !tcx.is_mir_available(callee.def_id()) {
19 return false;
20 }
21 }
22
23 InstanceKind::Intrinsic(_) | InstanceKind::Virtual(..) => return false,
25
26 InstanceKind::VTableShim(_)
30 | InstanceKind::ReifyShim(..)
31 | InstanceKind::FnPtrShim(..)
32 | InstanceKind::ClosureOnceShim { .. }
33 | InstanceKind::ConstructCoroutineInClosureShim { .. }
34 | InstanceKind::ThreadLocalShim { .. }
35 | InstanceKind::CloneShim(..) => {}
36
37 InstanceKind::FnPtrAddrShim(..) => return false,
39
40 InstanceKind::DropGlue(..)
44 | InstanceKind::FutureDropPollShim(..)
45 | InstanceKind::AsyncDropGlue(..)
46 | InstanceKind::AsyncDropGlueCtorShim(..) => {
47 if callee.has_param() {
48 return false;
49 }
50 }
51 }
52
53 crate::pm::should_run_pass(tcx, &crate::inline::Inline, crate::pm::Optimizations::Allowed)
54 || crate::inline::ForceInline::should_run_pass_for_callee(tcx, callee.def.def_id())
55}
56
57#[instrument(
58 level = "debug",
59 skip(tcx, typing_env, seen, involved, recursion_limiter, recursion_limit),
60 ret
61)]
62fn process<'tcx>(
63 tcx: TyCtxt<'tcx>,
64 typing_env: ty::TypingEnv<'tcx>,
65 caller: ty::Instance<'tcx>,
66 target: LocalDefId,
67 seen: &mut FxHashMap<ty::Instance<'tcx>, bool>,
68 involved: &mut FxHashSet<LocalDefId>,
69 recursion_limiter: &mut FxHashMap<DefId, usize>,
70 recursion_limit: Limit,
71) -> bool {
72 trace!(%caller);
73 let mut reaches_root = false;
74
75 for &(callee_def_id, args) in tcx.mir_inliner_callees(caller.def) {
76 let Ok(args) = caller.try_instantiate_mir_and_normalize_erasing_regions(
77 tcx,
78 typing_env,
79 ty::EarlyBinder::bind(args),
80 ) else {
81 trace!(?caller, ?typing_env, ?args, "cannot normalize, skipping");
82 continue;
83 };
84 let Ok(Some(callee)) = ty::Instance::try_resolve(tcx, typing_env, callee_def_id, args)
85 else {
86 trace!(?callee_def_id, "cannot resolve, skipping");
87 continue;
88 };
89
90 if callee.def_id() == target.to_def_id() {
92 reaches_root = true;
93 seen.insert(callee, true);
94 continue;
95 }
96
97 if tcx.is_constructor(callee.def_id()) {
98 trace!("constructors always have MIR");
99 continue;
101 }
102
103 if !should_recurse(tcx, callee) {
104 continue;
105 }
106
107 let callee_reaches_root = if let Some(&c) = seen.get(&callee) {
108 c
113 } else {
114 seen.insert(callee, false);
115 let recursion = recursion_limiter.entry(callee.def_id()).or_default();
116 trace!(?callee, recursion = *recursion);
117 let callee_reaches_root = if recursion_limit.value_within_limit(*recursion) {
118 *recursion += 1;
119 ensure_sufficient_stack(|| {
120 process(
121 tcx,
122 typing_env,
123 callee,
124 target,
125 seen,
126 involved,
127 recursion_limiter,
128 recursion_limit,
129 )
130 })
131 } else {
132 true
134 };
135 seen.insert(callee, callee_reaches_root);
136 callee_reaches_root
137 };
138 if callee_reaches_root {
139 if let Some(callee_def_id) = callee.def_id().as_local() {
140 involved.insert(callee_def_id);
142 }
143 reaches_root = true;
144 }
145 }
146
147 reaches_root
148}
149
150#[instrument(level = "debug", skip(tcx), ret)]
151pub(crate) fn mir_callgraph_cyclic<'tcx>(
152 tcx: TyCtxt<'tcx>,
153 root: LocalDefId,
154) -> UnordSet<LocalDefId> {
155 assert!(
156 !tcx.is_constructor(root.to_def_id()),
157 "you should not call `mir_callgraph_reachable` on enum/struct constructor functions"
158 );
159
160 let recursion_limit = tcx.recursion_limit() / 2;
168 let mut involved = FxHashSet::default();
169 let typing_env = ty::TypingEnv::post_analysis(tcx, root);
170 let root_instance =
171 ty::Instance::new_raw(root.to_def_id(), ty::GenericArgs::identity_for_item(tcx, root));
172 if !should_recurse(tcx, root_instance) {
173 trace!("cannot walk, skipping");
174 return involved.into();
175 }
176 process(
177 tcx,
178 typing_env,
179 root_instance,
180 root,
181 &mut FxHashMap::default(),
182 &mut involved,
183 &mut FxHashMap::default(),
184 recursion_limit,
185 );
186 involved.into()
187}
188
189pub(crate) fn mir_inliner_callees<'tcx>(
190 tcx: TyCtxt<'tcx>,
191 instance: ty::InstanceKind<'tcx>,
192) -> &'tcx [(DefId, GenericArgsRef<'tcx>)] {
193 let steal;
194 let guard;
195 let body = match (instance, instance.def_id().as_local()) {
196 (InstanceKind::Item(_), Some(def_id)) => {
197 steal = tcx.mir_promoted(def_id).0;
198 guard = steal.borrow();
199 &*guard
200 }
201 _ => tcx.instance_mir(instance),
203 };
204 let mut calls = FxIndexSet::default();
205 for bb_data in body.basic_blocks.iter() {
206 let terminator = bb_data.terminator();
207 if let TerminatorKind::Call { func, args: call_args, .. } = &terminator.kind {
208 let ty = func.ty(&body.local_decls, tcx);
209 let ty::FnDef(def_id, generic_args) = ty.kind() else {
210 continue;
211 };
212 let call = if tcx.is_intrinsic(*def_id, sym::const_eval_select) {
213 let func = &call_args[2].node;
214 let ty = func.ty(&body.local_decls, tcx);
215 let ty::FnDef(def_id, generic_args) = ty.kind() else {
216 continue;
217 };
218 (*def_id, *generic_args)
219 } else {
220 (*def_id, *generic_args)
221 };
222 calls.insert(call);
223 }
224 }
225 tcx.arena.alloc_from_iter(calls.iter().copied())
226}