rustc_mir_transform/inline/
cycle.rs

1use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet};
2use rustc_data_structures::stack::ensure_sufficient_stack;
3use rustc_hir::def_id::{DefId, LocalDefId};
4use rustc_middle::mir::TerminatorKind;
5use rustc_middle::ty::{self, GenericArgsRef, InstanceKind, TyCtxt, TypeVisitableExt};
6use rustc_session::Limit;
7use rustc_span::sym;
8use tracing::{instrument, trace};
9
10// FIXME: check whether it is cheaper to precompute the entire call graph instead of invoking
11// this query ridiculously often.
12#[instrument(level = "debug", skip(tcx, root, target))]
13pub(crate) fn mir_callgraph_reachable<'tcx>(
14    tcx: TyCtxt<'tcx>,
15    (root, target): (ty::Instance<'tcx>, LocalDefId),
16) -> bool {
17    trace!(%root, target = %tcx.def_path_str(target));
18    assert_ne!(
19        root.def_id().expect_local(),
20        target,
21        "you should not call `mir_callgraph_reachable` on immediate self recursion"
22    );
23    assert!(
24        matches!(root.def, InstanceKind::Item(_)),
25        "you should not call `mir_callgraph_reachable` on shims"
26    );
27    assert!(
28        !tcx.is_constructor(root.def_id()),
29        "you should not call `mir_callgraph_reachable` on enum/struct constructor functions"
30    );
31    #[instrument(
32        level = "debug",
33        skip(tcx, typing_env, target, stack, seen, recursion_limiter, caller, recursion_limit)
34    )]
35    fn process<'tcx>(
36        tcx: TyCtxt<'tcx>,
37        typing_env: ty::TypingEnv<'tcx>,
38        caller: ty::Instance<'tcx>,
39        target: LocalDefId,
40        stack: &mut Vec<ty::Instance<'tcx>>,
41        seen: &mut FxHashSet<ty::Instance<'tcx>>,
42        recursion_limiter: &mut FxHashMap<DefId, usize>,
43        recursion_limit: Limit,
44    ) -> bool {
45        trace!(%caller);
46        for &(callee, args) in tcx.mir_inliner_callees(caller.def) {
47            let Ok(args) = caller.try_instantiate_mir_and_normalize_erasing_regions(
48                tcx,
49                typing_env,
50                ty::EarlyBinder::bind(args),
51            ) else {
52                trace!(?caller, ?typing_env, ?args, "cannot normalize, skipping");
53                continue;
54            };
55            let Ok(Some(callee)) = ty::Instance::try_resolve(tcx, typing_env, callee, args) else {
56                trace!(?callee, "cannot resolve, skipping");
57                continue;
58            };
59
60            // Found a path.
61            if callee.def_id() == target.to_def_id() {
62                return true;
63            }
64
65            if tcx.is_constructor(callee.def_id()) {
66                trace!("constructors always have MIR");
67                // Constructor functions cannot cause a query cycle.
68                continue;
69            }
70
71            match callee.def {
72                InstanceKind::Item(_) => {
73                    // If there is no MIR available (either because it was not in metadata or
74                    // because it has no MIR because it's an extern function), then the inliner
75                    // won't cause cycles on this.
76                    if !tcx.is_mir_available(callee.def_id()) {
77                        trace!(?callee, "no mir available, skipping");
78                        continue;
79                    }
80                }
81                // These have no own callable MIR.
82                InstanceKind::Intrinsic(_) | InstanceKind::Virtual(..) => continue,
83                // These have MIR and if that MIR is inlined, instantiated and then inlining is run
84                // again, a function item can end up getting inlined. Thus we'll be able to cause
85                // a cycle that way
86                InstanceKind::VTableShim(_)
87                | InstanceKind::ReifyShim(..)
88                | InstanceKind::FnPtrShim(..)
89                | InstanceKind::ClosureOnceShim { .. }
90                | InstanceKind::ConstructCoroutineInClosureShim { .. }
91                | InstanceKind::ThreadLocalShim { .. }
92                | InstanceKind::CloneShim(..) => {}
93
94                // This shim does not call any other functions, thus there can be no recursion.
95                InstanceKind::FnPtrAddrShim(..) => {
96                    continue;
97                }
98                InstanceKind::DropGlue(..)
99                | InstanceKind::FutureDropPollShim(..)
100                | InstanceKind::AsyncDropGlue(..)
101                | InstanceKind::AsyncDropGlueCtorShim(..) => {
102                    // FIXME: A not fully instantiated drop shim can cause ICEs if one attempts to
103                    // have its MIR built. Likely oli-obk just screwed up the `ParamEnv`s, so this
104                    // needs some more analysis.
105                    if callee.has_param() {
106                        continue;
107                    }
108                }
109            }
110
111            if seen.insert(callee) {
112                let recursion = recursion_limiter.entry(callee.def_id()).or_default();
113                trace!(?callee, recursion = *recursion);
114                if recursion_limit.value_within_limit(*recursion) {
115                    *recursion += 1;
116                    stack.push(callee);
117                    let found_recursion = ensure_sufficient_stack(|| {
118                        process(
119                            tcx,
120                            typing_env,
121                            callee,
122                            target,
123                            stack,
124                            seen,
125                            recursion_limiter,
126                            recursion_limit,
127                        )
128                    });
129                    if found_recursion {
130                        return true;
131                    }
132                    stack.pop();
133                } else {
134                    // Pessimistically assume that there could be recursion.
135                    return true;
136                }
137            }
138        }
139        false
140    }
141    // FIXME(-Znext-solver): Remove this hack when trait solver overflow can return an error.
142    // In code like that pointed out in #128887, the type complexity we ask the solver to deal with
143    // grows as we recurse into the call graph. If we use the same recursion limit here and in the
144    // solver, the solver hits the limit first and emits a fatal error. But if we use a reduced
145    // limit, we will hit the limit first and give up on looking for inlining. And in any case,
146    // the default recursion limits are quite generous for us. If we need to recurse 64 times
147    // into the call graph, we're probably not going to find any useful MIR inlining.
148    let recursion_limit = tcx.recursion_limit() / 2;
149    process(
150        tcx,
151        ty::TypingEnv::post_analysis(tcx, target),
152        root,
153        target,
154        &mut Vec::new(),
155        &mut FxHashSet::default(),
156        &mut FxHashMap::default(),
157        recursion_limit,
158    )
159}
160
161pub(crate) fn mir_inliner_callees<'tcx>(
162    tcx: TyCtxt<'tcx>,
163    instance: ty::InstanceKind<'tcx>,
164) -> &'tcx [(DefId, GenericArgsRef<'tcx>)] {
165    let steal;
166    let guard;
167    let body = match (instance, instance.def_id().as_local()) {
168        (InstanceKind::Item(_), Some(def_id)) => {
169            steal = tcx.mir_promoted(def_id).0;
170            guard = steal.borrow();
171            &*guard
172        }
173        // Functions from other crates and MIR shims
174        _ => tcx.instance_mir(instance),
175    };
176    let mut calls = FxIndexSet::default();
177    for bb_data in body.basic_blocks.iter() {
178        let terminator = bb_data.terminator();
179        if let TerminatorKind::Call { func, args: call_args, .. } = &terminator.kind {
180            let ty = func.ty(&body.local_decls, tcx);
181            let ty::FnDef(def_id, generic_args) = ty.kind() else {
182                continue;
183            };
184            let call = if tcx.is_intrinsic(*def_id, sym::const_eval_select) {
185                let func = &call_args[2].node;
186                let ty = func.ty(&body.local_decls, tcx);
187                let ty::FnDef(def_id, generic_args) = ty.kind() else {
188                    continue;
189                };
190                (*def_id, *generic_args)
191            } else {
192                (*def_id, *generic_args)
193            };
194            calls.insert(call);
195        }
196    }
197    tcx.arena.alloc_from_iter(calls.iter().copied())
198}