rustc_codegen_ssa/mir/
analyze.rs

1//! An analysis to determine which locals require allocas and
2//! which do not.
3
4use rustc_abi as abi;
5use rustc_data_structures::graph::dominators::Dominators;
6use rustc_index::bit_set::DenseBitSet;
7use rustc_index::{IndexSlice, IndexVec};
8use rustc_middle::mir::visit::{MutatingUseContext, NonMutatingUseContext, PlaceContext, Visitor};
9use rustc_middle::mir::{self, DefLocation, Location, TerminatorKind, traversal};
10use rustc_middle::ty::layout::LayoutOf;
11use rustc_middle::{bug, span_bug};
12use tracing::debug;
13
14use super::FunctionCx;
15use crate::traits::*;
16
17pub(crate) fn non_ssa_locals<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
18    fx: &FunctionCx<'a, 'tcx, Bx>,
19    traversal_order: &[mir::BasicBlock],
20) -> DenseBitSet<mir::Local> {
21    let mir = fx.mir;
22    let dominators = mir.basic_blocks.dominators();
23    let locals = mir
24        .local_decls
25        .iter()
26        .map(|decl| {
27            let ty = fx.monomorphize(decl.ty);
28            let layout = fx.cx.spanned_layout_of(ty, decl.source_info.span);
29            if layout.is_zst() { LocalKind::ZST } else { LocalKind::Unused }
30        })
31        .collect();
32
33    let mut analyzer = LocalAnalyzer { fx, dominators, locals };
34
35    // Arguments get assigned to by means of the function being called
36    for arg in mir.args_iter() {
37        analyzer.define(arg, DefLocation::Argument);
38    }
39
40    // If there exists a local definition that dominates all uses of that local,
41    // the definition should be visited first. Traverse blocks in an order that
42    // is a topological sort of dominance partial order.
43    for bb in traversal_order.iter().copied() {
44        let data = &mir.basic_blocks[bb];
45        analyzer.visit_basic_block_data(bb, data);
46    }
47
48    let mut non_ssa_locals = DenseBitSet::new_empty(analyzer.locals.len());
49    for (local, kind) in analyzer.locals.iter_enumerated() {
50        if matches!(kind, LocalKind::Memory) {
51            non_ssa_locals.insert(local);
52        }
53    }
54
55    non_ssa_locals
56}
57
58#[derive(Copy, Clone, PartialEq, Eq)]
59enum LocalKind {
60    ZST,
61    /// A local that requires an alloca.
62    Memory,
63    /// A scalar or a scalar pair local that is neither defined nor used.
64    Unused,
65    /// A scalar or a scalar pair local with a single definition that dominates all uses.
66    SSA(DefLocation),
67}
68
69struct LocalAnalyzer<'a, 'b, 'tcx, Bx: BuilderMethods<'b, 'tcx>> {
70    fx: &'a FunctionCx<'b, 'tcx, Bx>,
71    dominators: &'a Dominators<mir::BasicBlock>,
72    locals: IndexVec<mir::Local, LocalKind>,
73}
74
75impl<'a, 'b, 'tcx, Bx: BuilderMethods<'b, 'tcx>> LocalAnalyzer<'a, 'b, 'tcx, Bx> {
76    fn define(&mut self, local: mir::Local, location: DefLocation) {
77        let fx = self.fx;
78        let kind = &mut self.locals[local];
79        let decl = &fx.mir.local_decls[local];
80        match *kind {
81            LocalKind::ZST => {}
82            LocalKind::Memory => {}
83            LocalKind::Unused => {
84                let ty = fx.monomorphize(decl.ty);
85                let layout = fx.cx.spanned_layout_of(ty, decl.source_info.span);
86                *kind =
87                    if fx.cx.is_backend_immediate(layout) || fx.cx.is_backend_scalar_pair(layout) {
88                        LocalKind::SSA(location)
89                    } else {
90                        LocalKind::Memory
91                    };
92            }
93            LocalKind::SSA(_) => *kind = LocalKind::Memory,
94        }
95    }
96
97    fn process_place(
98        &mut self,
99        place_ref: &mir::PlaceRef<'tcx>,
100        context: PlaceContext,
101        location: Location,
102    ) {
103        if !place_ref.projection.is_empty() {
104            const COPY_CONTEXT: PlaceContext =
105                PlaceContext::NonMutatingUse(NonMutatingUseContext::Copy);
106
107            // `PlaceElem::Index` is the only variant that can mention other `Local`s,
108            // so check for those up-front before any potential short-circuits.
109            for elem in place_ref.projection {
110                if let mir::PlaceElem::Index(index_local) = *elem {
111                    self.visit_local(index_local, COPY_CONTEXT, location);
112                }
113            }
114
115            // If our local is already memory, nothing can make it *more* memory
116            // so we don't need to bother checking the projections further.
117            if self.locals[place_ref.local] == LocalKind::Memory {
118                return;
119            }
120
121            if place_ref.is_indirect_first_projection() {
122                // If this starts with a `Deref`, we only need to record a read of the
123                // pointer being dereferenced, as all the subsequent projections are
124                // working on a place which is always supported. (And because we're
125                // looking at codegen MIR, it can only happen as the first projection.)
126                self.visit_local(place_ref.local, COPY_CONTEXT, location);
127                return;
128            }
129
130            if context.is_mutating_use() {
131                // If it's a mutating use it doesn't matter what the projections are,
132                // if there are *any* then we need a place to write. (For example,
133                // `_1 = Foo()` works in SSA but `_2.0 = Foo()` does not.)
134                let mut_projection = PlaceContext::MutatingUse(MutatingUseContext::Projection);
135                self.visit_local(place_ref.local, mut_projection, location);
136                return;
137            }
138
139            // Scan through to ensure the only projections are those which
140            // `FunctionCx::maybe_codegen_consume_direct` can handle.
141            let base_ty = self.fx.monomorphized_place_ty(mir::PlaceRef::from(place_ref.local));
142            let mut layout = self.fx.cx.layout_of(base_ty);
143            for elem in place_ref.projection {
144                layout = match *elem {
145                    mir::PlaceElem::Field(fidx, ..) => layout.field(self.fx.cx, fidx.as_usize()),
146                    mir::PlaceElem::Downcast(_, vidx)
147                        if let abi::Variants::Single { index: single_variant } =
148                            layout.variants
149                            && vidx == single_variant =>
150                    {
151                        layout.for_variant(self.fx.cx, vidx)
152                    }
153                    mir::PlaceElem::Subtype(subtype_ty) => {
154                        let subtype_ty = self.fx.monomorphize(subtype_ty);
155                        self.fx.cx.layout_of(subtype_ty)
156                    }
157                    _ => {
158                        self.locals[place_ref.local] = LocalKind::Memory;
159                        return;
160                    }
161                }
162            }
163            debug_assert!(
164                !self.fx.cx.is_backend_ref(layout),
165                "Post-projection {place_ref:?} layout should be non-Ref, but it's {layout:?}",
166            );
167        }
168
169        // Even with supported projections, we still need to have `visit_local`
170        // check for things that can't be done in SSA (like `SharedBorrow`).
171        self.visit_local(place_ref.local, context, location);
172    }
173}
174
175impl<'a, 'b, 'tcx, Bx: BuilderMethods<'b, 'tcx>> Visitor<'tcx> for LocalAnalyzer<'a, 'b, 'tcx, Bx> {
176    fn visit_assign(
177        &mut self,
178        place: &mir::Place<'tcx>,
179        rvalue: &mir::Rvalue<'tcx>,
180        location: Location,
181    ) {
182        debug!("visit_assign(place={:?}, rvalue={:?})", place, rvalue);
183
184        if let Some(local) = place.as_local() {
185            self.define(local, DefLocation::Assignment(location));
186        } else {
187            self.visit_place(place, PlaceContext::MutatingUse(MutatingUseContext::Store), location);
188        }
189
190        self.visit_rvalue(rvalue, location);
191    }
192
193    fn visit_place(&mut self, place: &mir::Place<'tcx>, context: PlaceContext, location: Location) {
194        debug!("visit_place(place={:?}, context={:?})", place, context);
195        self.process_place(&place.as_ref(), context, location);
196    }
197
198    fn visit_local(&mut self, local: mir::Local, context: PlaceContext, location: Location) {
199        match context {
200            PlaceContext::MutatingUse(MutatingUseContext::Call) => {
201                let call = location.block;
202                let TerminatorKind::Call { target, .. } =
203                    self.fx.mir.basic_blocks[call].terminator().kind
204                else {
205                    bug!()
206                };
207                self.define(local, DefLocation::CallReturn { call, target });
208            }
209
210            PlaceContext::NonUse(_)
211            | PlaceContext::NonMutatingUse(NonMutatingUseContext::PlaceMention)
212            | PlaceContext::MutatingUse(MutatingUseContext::Retag) => {}
213
214            PlaceContext::NonMutatingUse(
215                NonMutatingUseContext::Copy
216                | NonMutatingUseContext::Move
217                // Inspect covers things like `PtrMetadata` and `Discriminant`
218                // which we can treat similar to `Copy` use for the purpose of
219                // whether we can use SSA variables for things.
220                | NonMutatingUseContext::Inspect,
221            ) => match &mut self.locals[local] {
222                LocalKind::ZST => {}
223                LocalKind::Memory => {}
224                LocalKind::SSA(def) if def.dominates(location, self.dominators) => {}
225                // Reads from uninitialized variables (e.g., in dead code, after
226                // optimizations) require locals to be in (uninitialized) memory.
227                // N.B., there can be uninitialized reads of a local visited after
228                // an assignment to that local, if they happen on disjoint paths.
229                kind @ (LocalKind::Unused | LocalKind::SSA(_)) => {
230                    *kind = LocalKind::Memory;
231                }
232            },
233
234            PlaceContext::MutatingUse(
235                MutatingUseContext::Store
236                | MutatingUseContext::Deinit
237                | MutatingUseContext::SetDiscriminant
238                | MutatingUseContext::AsmOutput
239                | MutatingUseContext::Borrow
240                | MutatingUseContext::RawBorrow
241                | MutatingUseContext::Projection,
242            )
243            | PlaceContext::NonMutatingUse(
244                NonMutatingUseContext::SharedBorrow
245                | NonMutatingUseContext::FakeBorrow
246                | NonMutatingUseContext::RawBorrow
247                | NonMutatingUseContext::Projection,
248            ) => {
249                self.locals[local] = LocalKind::Memory;
250            }
251
252            PlaceContext::MutatingUse(MutatingUseContext::Drop) => {
253                let kind = &mut self.locals[local];
254                if *kind != LocalKind::Memory {
255                    let ty = self.fx.mir.local_decls[local].ty;
256                    let ty = self.fx.monomorphize(ty);
257                    if self.fx.cx.type_needs_drop(ty) {
258                        // Only need the place if we're actually dropping it.
259                        *kind = LocalKind::Memory;
260                    }
261                }
262            }
263
264            PlaceContext::MutatingUse(MutatingUseContext::Yield) => bug!(),
265        }
266    }
267}
268
269#[derive(Copy, Clone, Debug, PartialEq, Eq)]
270pub(crate) enum CleanupKind {
271    NotCleanup,
272    Funclet,
273    Internal { funclet: mir::BasicBlock },
274}
275
276impl CleanupKind {
277    pub(crate) fn funclet_bb(self, for_bb: mir::BasicBlock) -> Option<mir::BasicBlock> {
278        match self {
279            CleanupKind::NotCleanup => None,
280            CleanupKind::Funclet => Some(for_bb),
281            CleanupKind::Internal { funclet } => Some(funclet),
282        }
283    }
284}
285
286/// MSVC requires unwinding code to be split to a tree of *funclets*, where each funclet can only
287/// branch to itself or to its parent. Luckily, the code we generates matches this pattern.
288/// Recover that structure in an analyze pass.
289pub(crate) fn cleanup_kinds(mir: &mir::Body<'_>) -> IndexVec<mir::BasicBlock, CleanupKind> {
290    fn discover_masters<'tcx>(
291        result: &mut IndexSlice<mir::BasicBlock, CleanupKind>,
292        mir: &mir::Body<'tcx>,
293    ) {
294        for (bb, data) in mir.basic_blocks.iter_enumerated() {
295            match data.terminator().kind {
296                TerminatorKind::Goto { .. }
297                | TerminatorKind::UnwindResume
298                | TerminatorKind::UnwindTerminate(_)
299                | TerminatorKind::Return
300                | TerminatorKind::TailCall { .. }
301                | TerminatorKind::CoroutineDrop
302                | TerminatorKind::Unreachable
303                | TerminatorKind::SwitchInt { .. }
304                | TerminatorKind::Yield { .. }
305                | TerminatorKind::FalseEdge { .. }
306                | TerminatorKind::FalseUnwind { .. } => { /* nothing to do */ }
307                TerminatorKind::Call { unwind, .. }
308                | TerminatorKind::InlineAsm { unwind, .. }
309                | TerminatorKind::Assert { unwind, .. }
310                | TerminatorKind::Drop { unwind, .. } => {
311                    if let mir::UnwindAction::Cleanup(unwind) = unwind {
312                        debug!(
313                            "cleanup_kinds: {:?}/{:?} registering {:?} as funclet",
314                            bb, data, unwind
315                        );
316                        result[unwind] = CleanupKind::Funclet;
317                    }
318                }
319            }
320        }
321    }
322
323    fn propagate<'tcx>(
324        result: &mut IndexSlice<mir::BasicBlock, CleanupKind>,
325        mir: &mir::Body<'tcx>,
326    ) {
327        let mut funclet_succs = IndexVec::from_elem(None, &mir.basic_blocks);
328
329        let mut set_successor = |funclet: mir::BasicBlock, succ| match funclet_succs[funclet] {
330            ref mut s @ None => {
331                debug!("set_successor: updating successor of {:?} to {:?}", funclet, succ);
332                *s = Some(succ);
333            }
334            Some(s) => {
335                if s != succ {
336                    span_bug!(
337                        mir.span,
338                        "funclet {:?} has 2 parents - {:?} and {:?}",
339                        funclet,
340                        s,
341                        succ
342                    );
343                }
344            }
345        };
346
347        for (bb, data) in traversal::reverse_postorder(mir) {
348            let funclet = match result[bb] {
349                CleanupKind::NotCleanup => continue,
350                CleanupKind::Funclet => bb,
351                CleanupKind::Internal { funclet } => funclet,
352            };
353
354            debug!(
355                "cleanup_kinds: {:?}/{:?}/{:?} propagating funclet {:?}",
356                bb, data, result[bb], funclet
357            );
358
359            for succ in data.terminator().successors() {
360                let kind = result[succ];
361                debug!("cleanup_kinds: propagating {:?} to {:?}/{:?}", funclet, succ, kind);
362                match kind {
363                    CleanupKind::NotCleanup => {
364                        result[succ] = CleanupKind::Internal { funclet };
365                    }
366                    CleanupKind::Funclet => {
367                        if funclet != succ {
368                            set_successor(funclet, succ);
369                        }
370                    }
371                    CleanupKind::Internal { funclet: succ_funclet } => {
372                        if funclet != succ_funclet {
373                            // `succ` has 2 different funclet going into it, so it must
374                            // be a funclet by itself.
375
376                            debug!(
377                                "promoting {:?} to a funclet and updating {:?}",
378                                succ, succ_funclet
379                            );
380                            result[succ] = CleanupKind::Funclet;
381                            set_successor(succ_funclet, succ);
382                            set_successor(funclet, succ);
383                        }
384                    }
385                }
386            }
387        }
388    }
389
390    let mut result = IndexVec::from_elem(CleanupKind::NotCleanup, &mir.basic_blocks);
391
392    discover_masters(&mut result, mir);
393    propagate(&mut result, mir);
394    debug!("cleanup_kinds: result={:?}", result);
395    result
396}