rustc_mir_transform/
elaborate_drop.rs

1use std::{fmt, iter, mem};
2
3use rustc_abi::{FIRST_VARIANT, FieldIdx, VariantIdx};
4use rustc_hir::lang_items::LangItem;
5use rustc_index::Idx;
6use rustc_middle::mir::*;
7use rustc_middle::ty::adjustment::PointerCoercion;
8use rustc_middle::ty::util::IntTypeExt;
9use rustc_middle::ty::{self, GenericArg, GenericArgsRef, Ty, TyCtxt};
10use rustc_middle::{bug, span_bug, traits};
11use rustc_span::DUMMY_SP;
12use rustc_span::source_map::{Spanned, dummy_spanned};
13use tracing::{debug, instrument};
14
15use crate::patch::MirPatch;
16
17/// Describes how/if a value should be dropped.
18#[derive(Debug)]
19pub(crate) enum DropStyle {
20    /// The value is already dead at the drop location, no drop will be executed.
21    Dead,
22
23    /// The value is known to always be initialized at the drop location, drop will always be
24    /// executed.
25    Static,
26
27    /// Whether the value needs to be dropped depends on its drop flag.
28    Conditional,
29
30    /// An "open" drop is one where only the fields of a value are dropped.
31    ///
32    /// For example, this happens when moving out of a struct field: The rest of the struct will be
33    /// dropped in such an "open" drop. It is also used to generate drop glue for the individual
34    /// components of a value, for example for dropping array elements.
35    Open,
36}
37
38/// Which drop flags to affect/check with an operation.
39#[derive(Debug)]
40pub(crate) enum DropFlagMode {
41    /// Only affect the top-level drop flag, not that of any contained fields.
42    Shallow,
43    /// Affect all nested drop flags in addition to the top-level one.
44    Deep,
45}
46
47/// Describes if unwinding is necessary and where to unwind to if a panic occurs.
48#[derive(Copy, Clone, Debug)]
49pub(crate) enum Unwind {
50    /// Unwind to this block.
51    To(BasicBlock),
52    /// Already in an unwind path, any panic will cause an abort.
53    InCleanup,
54}
55
56impl Unwind {
57    fn is_cleanup(self) -> bool {
58        match self {
59            Unwind::To(..) => false,
60            Unwind::InCleanup => true,
61        }
62    }
63
64    fn into_action(self) -> UnwindAction {
65        match self {
66            Unwind::To(bb) => UnwindAction::Cleanup(bb),
67            Unwind::InCleanup => UnwindAction::Terminate(UnwindTerminateReason::InCleanup),
68        }
69    }
70
71    fn map<F>(self, f: F) -> Self
72    where
73        F: FnOnce(BasicBlock) -> BasicBlock,
74    {
75        match self {
76            Unwind::To(bb) => Unwind::To(f(bb)),
77            Unwind::InCleanup => Unwind::InCleanup,
78        }
79    }
80}
81
82pub(crate) trait DropElaborator<'a, 'tcx>: fmt::Debug {
83    /// The type representing paths that can be moved out of.
84    ///
85    /// Users can move out of individual fields of a struct, such as `a.b.c`. This type is used to
86    /// represent such move paths. Sometimes tracking individual move paths is not necessary, in
87    /// which case this may be set to (for example) `()`.
88    type Path: Copy + fmt::Debug;
89
90    // Accessors
91
92    fn patch_ref(&self) -> &MirPatch<'tcx>;
93    fn patch(&mut self) -> &mut MirPatch<'tcx>;
94    fn body(&self) -> &'a Body<'tcx>;
95    fn tcx(&self) -> TyCtxt<'tcx>;
96    fn typing_env(&self) -> ty::TypingEnv<'tcx>;
97    fn allow_async_drops(&self) -> bool;
98
99    fn terminator_loc(&self, bb: BasicBlock) -> Location;
100
101    // Drop logic
102
103    /// Returns how `path` should be dropped, given `mode`.
104    fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle;
105
106    /// Returns the drop flag of `path` as a MIR `Operand` (or `None` if `path` has no drop flag).
107    fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>>;
108
109    /// Modifies the MIR patch so that the drop flag of `path` (if any) is cleared at `location`.
110    ///
111    /// If `mode` is deep, drop flags of all child paths should also be cleared by inserting
112    /// additional statements.
113    fn clear_drop_flag(&mut self, location: Location, path: Self::Path, mode: DropFlagMode);
114
115    // Subpaths
116
117    /// Returns the subpath of a field of `path` (or `None` if there is no dedicated subpath).
118    ///
119    /// If this returns `None`, `field` will not get a dedicated drop flag.
120    fn field_subpath(&self, path: Self::Path, field: FieldIdx) -> Option<Self::Path>;
121
122    /// Returns the subpath of a dereference of `path` (or `None` if there is no dedicated subpath).
123    ///
124    /// If this returns `None`, `*path` will not get a dedicated drop flag.
125    ///
126    /// This is only relevant for `Box<T>`, where the contained `T` can be moved out of the box.
127    fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path>;
128
129    /// Returns the subpath of downcasting `path` to one of its variants.
130    ///
131    /// If this returns `None`, the downcast of `path` will not get a dedicated drop flag.
132    fn downcast_subpath(&self, path: Self::Path, variant: VariantIdx) -> Option<Self::Path>;
133
134    /// Returns the subpath of indexing a fixed-size array `path`.
135    ///
136    /// If this returns `None`, elements of `path` will not get a dedicated drop flag.
137    ///
138    /// This is only relevant for array patterns, which can move out of individual array elements.
139    fn array_subpath(&self, path: Self::Path, index: u64, size: u64) -> Option<Self::Path>;
140}
141
142#[derive(Debug)]
143struct DropCtxt<'a, 'b, 'tcx, D>
144where
145    D: DropElaborator<'b, 'tcx>,
146{
147    elaborator: &'a mut D,
148
149    source_info: SourceInfo,
150
151    place: Place<'tcx>,
152    path: D::Path,
153    succ: BasicBlock,
154    unwind: Unwind,
155    dropline: Option<BasicBlock>,
156}
157
158/// "Elaborates" a drop of `place`/`path` and patches `bb`'s terminator to execute it.
159///
160/// The passed `elaborator` is used to determine what should happen at the drop terminator. It
161/// decides whether the drop can be statically determined or whether it needs a dynamic drop flag,
162/// and whether the drop is "open", ie. should be expanded to drop all subfields of the dropped
163/// value.
164///
165/// When this returns, the MIR patch in the `elaborator` contains the necessary changes.
166pub(crate) fn elaborate_drop<'b, 'tcx, D>(
167    elaborator: &mut D,
168    source_info: SourceInfo,
169    place: Place<'tcx>,
170    path: D::Path,
171    succ: BasicBlock,
172    unwind: Unwind,
173    bb: BasicBlock,
174    dropline: Option<BasicBlock>,
175) where
176    D: DropElaborator<'b, 'tcx>,
177    'tcx: 'b,
178{
179    DropCtxt { elaborator, source_info, place, path, succ, unwind, dropline }.elaborate_drop(bb)
180}
181
182impl<'a, 'b, 'tcx, D> DropCtxt<'a, 'b, 'tcx, D>
183where
184    D: DropElaborator<'b, 'tcx>,
185    'tcx: 'b,
186{
187    #[instrument(level = "trace", skip(self), ret)]
188    fn place_ty(&self, place: Place<'tcx>) -> Ty<'tcx> {
189        if place.local < self.elaborator.body().local_decls.next_index() {
190            place.ty(self.elaborator.body(), self.tcx()).ty
191        } else {
192            // We don't have a slice with all the locals, since some are in the patch.
193            PlaceTy::from_ty(self.elaborator.patch_ref().local_ty(place.local))
194                .multi_projection_ty(self.elaborator.tcx(), place.projection)
195                .ty
196        }
197    }
198
199    fn tcx(&self) -> TyCtxt<'tcx> {
200        self.elaborator.tcx()
201    }
202
203    // Generates three blocks:
204    // * #1:pin_obj_bb:   call Pin<ObjTy>::new_unchecked(&mut obj)
205    // * #2:call_drop_bb: fut = call obj.<AsyncDrop::drop>() OR call async_drop_in_place<T>(obj)
206    // * #3:drop_term_bb: drop (obj, fut, ...)
207    // We keep async drop unexpanded to poll-loop here, to expand it later, at StateTransform -
208    //   into states expand.
209    // call_destructor_only - to call only AsyncDrop::drop, not full async_drop_in_place glue
210    fn build_async_drop(
211        &mut self,
212        place: Place<'tcx>,
213        drop_ty: Ty<'tcx>,
214        bb: Option<BasicBlock>,
215        succ: BasicBlock,
216        unwind: Unwind,
217        dropline: Option<BasicBlock>,
218        call_destructor_only: bool,
219    ) -> BasicBlock {
220        let tcx = self.tcx();
221        let span = self.source_info.span;
222
223        let pin_obj_bb = bb.unwrap_or_else(|| {
224            self.elaborator.patch().new_block(BasicBlockData {
225                statements: vec![],
226                terminator: Some(Terminator {
227                    // Temporary terminator, will be replaced by patch
228                    source_info: self.source_info,
229                    kind: TerminatorKind::Return,
230                }),
231                is_cleanup: false,
232            })
233        });
234
235        let (fut_ty, drop_fn_def_id, trait_args) = if call_destructor_only {
236            // Resolving obj.<AsyncDrop::drop>()
237            let trait_ref = ty::TraitRef::new(
238                tcx,
239                tcx.require_lang_item(LangItem::AsyncDrop, Some(span)),
240                [drop_ty],
241            );
242            let (drop_trait, trait_args) = match tcx.codegen_select_candidate(
243                ty::TypingEnv::fully_monomorphized().as_query_input(trait_ref),
244            ) {
245                Ok(traits::ImplSource::UserDefined(traits::ImplSourceUserDefinedData {
246                    impl_def_id,
247                    args,
248                    ..
249                })) => (*impl_def_id, *args),
250                impl_source => {
251                    span_bug!(span, "invalid `AsyncDrop` impl_source: {:?}", impl_source);
252                }
253            };
254            // impl_item_refs may be empty if drop fn is not implemented in 'impl AsyncDrop for ...'
255            // (#140974).
256            // Such code will report error, so just generate sync drop here and return
257            let Some(drop_fn_def_id) =
258                tcx.associated_item_def_ids(drop_trait).into_iter().nth(0).copied()
259            else {
260                tcx.dcx().span_delayed_bug(
261                    self.elaborator.body().span,
262                    "AsyncDrop type without correct `async fn drop(...)`.",
263                );
264                self.elaborator.patch().patch_terminator(
265                    pin_obj_bb,
266                    TerminatorKind::Drop {
267                        place,
268                        target: succ,
269                        unwind: unwind.into_action(),
270                        replace: false,
271                        drop: None,
272                        async_fut: None,
273                    },
274                );
275                return pin_obj_bb;
276            };
277            let drop_fn = Ty::new_fn_def(tcx, drop_fn_def_id, trait_args);
278            let sig = drop_fn.fn_sig(tcx);
279            let sig = tcx.instantiate_bound_regions_with_erased(sig);
280            (sig.output(), drop_fn_def_id, trait_args)
281        } else {
282            // Resolving async_drop_in_place<T> function for drop_ty
283            let drop_fn_def_id = tcx.require_lang_item(LangItem::AsyncDropInPlace, Some(span));
284            let trait_args = tcx.mk_args(&[drop_ty.into()]);
285            let sig = tcx.fn_sig(drop_fn_def_id).instantiate(tcx, trait_args);
286            let sig = tcx.instantiate_bound_regions_with_erased(sig);
287            (sig.output(), drop_fn_def_id, trait_args)
288        };
289
290        let fut = Place::from(self.new_temp(fut_ty));
291
292        // #1:pin_obj_bb >>> obj_ref = &mut obj
293        let obj_ref_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, drop_ty);
294        let obj_ref_place = Place::from(self.new_temp(obj_ref_ty));
295
296        let term_loc = self.elaborator.terminator_loc(pin_obj_bb);
297        self.elaborator.patch().add_assign(
298            term_loc,
299            obj_ref_place,
300            Rvalue::Ref(
301                tcx.lifetimes.re_erased,
302                BorrowKind::Mut { kind: MutBorrowKind::Default },
303                place,
304            ),
305        );
306
307        // pin_obj_place preparation
308        let pin_obj_new_unchecked_fn = Ty::new_fn_def(
309            tcx,
310            tcx.require_lang_item(LangItem::PinNewUnchecked, Some(span)),
311            [GenericArg::from(obj_ref_ty)],
312        );
313        let pin_obj_ty = pin_obj_new_unchecked_fn.fn_sig(tcx).output().no_bound_vars().unwrap();
314        let pin_obj_place = Place::from(self.new_temp(pin_obj_ty));
315        let pin_obj_new_unchecked_fn = Operand::Constant(Box::new(ConstOperand {
316            span,
317            user_ty: None,
318            const_: Const::zero_sized(pin_obj_new_unchecked_fn),
319        }));
320
321        // #3:drop_term_bb
322        let drop_term_bb = self.new_block(
323            unwind,
324            TerminatorKind::Drop {
325                place,
326                target: succ,
327                unwind: unwind.into_action(),
328                replace: false,
329                drop: dropline,
330                async_fut: Some(fut.local),
331            },
332        );
333
334        // #2:call_drop_bb
335        let mut call_statements = Vec::new();
336        let drop_arg = if call_destructor_only {
337            pin_obj_place
338        } else {
339            let ty::Adt(adt_def, adt_args) = pin_obj_ty.kind() else {
340                bug!();
341            };
342            let obj_ptr_ty = Ty::new_mut_ptr(tcx, drop_ty);
343            let unwrap_ty = adt_def.non_enum_variant().fields[FieldIdx::ZERO].ty(tcx, adt_args);
344            let obj_ref_place = Place::from(self.new_temp(unwrap_ty));
345            call_statements.push(self.assign(
346                obj_ref_place,
347                Rvalue::Use(Operand::Copy(tcx.mk_place_field(
348                    pin_obj_place,
349                    FieldIdx::ZERO,
350                    unwrap_ty,
351                ))),
352            ));
353
354            let obj_ptr_place = Place::from(self.new_temp(obj_ptr_ty));
355
356            let addr = Rvalue::RawPtr(RawPtrKind::Mut, tcx.mk_place_deref(obj_ref_place));
357            call_statements.push(self.assign(obj_ptr_place, addr));
358            obj_ptr_place
359        };
360        call_statements.push(Statement {
361            source_info: self.source_info,
362            kind: StatementKind::StorageLive(fut.local),
363        });
364
365        let call_drop_bb = self.new_block_with_statements(
366            unwind,
367            call_statements,
368            TerminatorKind::Call {
369                func: Operand::function_handle(tcx, drop_fn_def_id, trait_args, span),
370                args: [Spanned { node: Operand::Move(drop_arg), span: DUMMY_SP }].into(),
371                destination: fut,
372                target: Some(drop_term_bb),
373                unwind: unwind.into_action(),
374                call_source: CallSource::Misc,
375                fn_span: self.source_info.span,
376            },
377        );
378
379        // StorageDead(fut) in self.succ block (at the begin)
380        self.elaborator.patch().add_statement(
381            Location { block: self.succ, statement_index: 0 },
382            StatementKind::StorageDead(fut.local),
383        );
384
385        // #1:pin_obj_bb >>> call Pin<ObjTy>::new_unchecked(&mut obj)
386        self.elaborator.patch().patch_terminator(
387            pin_obj_bb,
388            TerminatorKind::Call {
389                func: pin_obj_new_unchecked_fn,
390                args: [dummy_spanned(Operand::Move(obj_ref_place))].into(),
391                destination: pin_obj_place,
392                target: Some(call_drop_bb),
393                unwind: unwind.into_action(),
394                call_source: CallSource::Misc,
395                fn_span: span,
396            },
397        );
398        pin_obj_bb
399    }
400
401    fn build_drop(&mut self, bb: BasicBlock) {
402        let drop_ty = self.place_ty(self.place);
403        if self.tcx().features().async_drop()
404            && self.elaborator.body().coroutine.is_some()
405            && self.elaborator.allow_async_drops()
406            && !self.elaborator.patch_ref().block(self.elaborator.body(), bb).is_cleanup
407            && drop_ty.needs_async_drop(self.tcx(), self.elaborator.typing_env())
408        {
409            self.build_async_drop(
410                self.place,
411                drop_ty,
412                Some(bb),
413                self.succ,
414                self.unwind,
415                self.dropline,
416                false,
417            );
418        } else {
419            self.elaborator.patch().patch_terminator(
420                bb,
421                TerminatorKind::Drop {
422                    place: self.place,
423                    target: self.succ,
424                    unwind: self.unwind.into_action(),
425                    replace: false,
426                    drop: None,
427                    async_fut: None,
428                },
429            );
430        }
431    }
432
433    /// This elaborates a single drop instruction, located at `bb`, and
434    /// patches over it.
435    ///
436    /// The elaborated drop checks the drop flags to only drop what
437    /// is initialized.
438    ///
439    /// In addition, the relevant drop flags also need to be cleared
440    /// to avoid double-drops. However, in the middle of a complex
441    /// drop, one must avoid clearing some of the flags before they
442    /// are read, as that would cause a memory leak.
443    ///
444    /// In particular, when dropping an ADT, multiple fields may be
445    /// joined together under the `rest` subpath. They are all controlled
446    /// by the primary drop flag, but only the last rest-field dropped
447    /// should clear it (and it must also not clear anything else).
448    //
449    // FIXME: I think we should just control the flags externally,
450    // and then we do not need this machinery.
451    #[instrument(level = "debug")]
452    fn elaborate_drop(&mut self, bb: BasicBlock) {
453        match self.elaborator.drop_style(self.path, DropFlagMode::Deep) {
454            DropStyle::Dead => {
455                self.elaborator
456                    .patch()
457                    .patch_terminator(bb, TerminatorKind::Goto { target: self.succ });
458            }
459            DropStyle::Static => {
460                self.build_drop(bb);
461            }
462            DropStyle::Conditional => {
463                let drop_bb = self.complete_drop(self.succ, self.unwind);
464                self.elaborator
465                    .patch()
466                    .patch_terminator(bb, TerminatorKind::Goto { target: drop_bb });
467            }
468            DropStyle::Open => {
469                let drop_bb = self.open_drop();
470                self.elaborator
471                    .patch()
472                    .patch_terminator(bb, TerminatorKind::Goto { target: drop_bb });
473            }
474        }
475    }
476
477    /// Returns the place and move path for each field of `variant`,
478    /// (the move path is `None` if the field is a rest field).
479    fn move_paths_for_fields(
480        &self,
481        base_place: Place<'tcx>,
482        variant_path: D::Path,
483        variant: &'tcx ty::VariantDef,
484        args: GenericArgsRef<'tcx>,
485    ) -> Vec<(Place<'tcx>, Option<D::Path>)> {
486        variant
487            .fields
488            .iter_enumerated()
489            .map(|(field_idx, field)| {
490                let subpath = self.elaborator.field_subpath(variant_path, field_idx);
491                let tcx = self.tcx();
492
493                assert_eq!(self.elaborator.typing_env().typing_mode, ty::TypingMode::PostAnalysis);
494                let field_ty = match tcx.try_normalize_erasing_regions(
495                    self.elaborator.typing_env(),
496                    field.ty(tcx, args),
497                ) {
498                    Ok(t) => t,
499                    Err(_) => Ty::new_error(
500                        self.tcx(),
501                        self.tcx().dcx().span_delayed_bug(
502                            self.elaborator.body().span,
503                            "Error normalizing in drop elaboration.",
504                        ),
505                    ),
506                };
507
508                (tcx.mk_place_field(base_place, field_idx, field_ty), subpath)
509            })
510            .collect()
511    }
512
513    fn drop_subpath(
514        &mut self,
515        place: Place<'tcx>,
516        path: Option<D::Path>,
517        succ: BasicBlock,
518        unwind: Unwind,
519        dropline: Option<BasicBlock>,
520    ) -> BasicBlock {
521        if let Some(path) = path {
522            debug!("drop_subpath: for std field {:?}", place);
523
524            DropCtxt {
525                elaborator: self.elaborator,
526                source_info: self.source_info,
527                path,
528                place,
529                succ,
530                unwind,
531                dropline,
532            }
533            .elaborated_drop_block()
534        } else {
535            debug!("drop_subpath: for rest field {:?}", place);
536
537            DropCtxt {
538                elaborator: self.elaborator,
539                source_info: self.source_info,
540                place,
541                succ,
542                unwind,
543                dropline,
544                // Using `self.path` here to condition the drop on
545                // our own drop flag.
546                path: self.path,
547            }
548            .complete_drop(succ, unwind)
549        }
550    }
551
552    /// Creates one-half of the drop ladder for a list of fields, and return
553    /// the list of steps in it in reverse order, with the first step
554    /// dropping 0 fields and so on.
555    ///
556    /// `unwind_ladder` is such a list of steps in reverse order,
557    /// which is called if the matching step of the drop glue panics.
558    ///
559    /// `dropline_ladder` is a similar list of steps in reverse order,
560    /// which is called if the matching step of the drop glue will contain async drop
561    /// (expanded later to Yield) and the containing coroutine will be dropped at this point.
562    fn drop_halfladder(
563        &mut self,
564        unwind_ladder: &[Unwind],
565        dropline_ladder: &[Option<BasicBlock>],
566        mut succ: BasicBlock,
567        fields: &[(Place<'tcx>, Option<D::Path>)],
568    ) -> Vec<BasicBlock> {
569        iter::once(succ)
570            .chain(itertools::izip!(fields.iter().rev(), unwind_ladder, dropline_ladder).map(
571                |(&(place, path), &unwind_succ, &dropline_to)| {
572                    succ = self.drop_subpath(place, path, succ, unwind_succ, dropline_to);
573                    succ
574                },
575            ))
576            .collect()
577    }
578
579    fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind, Option<BasicBlock>) {
580        // Clear the "master" drop flag at the end. This is needed
581        // because the "master" drop protects the ADT's discriminant,
582        // which is invalidated after the ADT is dropped.
583        (
584            self.drop_flag_reset_block(DropFlagMode::Shallow, self.succ, self.unwind),
585            self.unwind,
586            self.dropline,
587        )
588    }
589
590    /// Creates a full drop ladder, consisting of 2 connected half-drop-ladders
591    ///
592    /// For example, with 3 fields, the drop ladder is
593    ///
594    /// .d0:
595    ///     ELAB(drop location.0 [target=.d1, unwind=.c1])
596    /// .d1:
597    ///     ELAB(drop location.1 [target=.d2, unwind=.c2])
598    /// .d2:
599    ///     ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`])
600    /// .c1:
601    ///     ELAB(drop location.1 [target=.c2])
602    /// .c2:
603    ///     ELAB(drop location.2 [target=`self.unwind`])
604    ///
605    /// For possible-async drops in coroutines we also need dropline ladder
606    /// .d0 (mainline):
607    ///     ELAB(drop location.0 [target=.d1, unwind=.c1, drop=.e1])
608    /// .d1 (mainline):
609    ///     ELAB(drop location.1 [target=.d2, unwind=.c2, drop=.e2])
610    /// .d2 (mainline):
611    ///     ELAB(drop location.2 [target=`self.succ`, unwind=`self.unwind`, drop=`self.drop`])
612    /// .c1 (unwind):
613    ///     ELAB(drop location.1 [target=.c2])
614    /// .c2 (unwind):
615    ///     ELAB(drop location.2 [target=`self.unwind`])
616    /// .e1 (dropline):
617    ///     ELAB(drop location.1 [target=.e2, unwind=.c2])
618    /// .e2 (dropline):
619    ///     ELAB(drop location.2 [target=`self.drop`, unwind=`self.unwind`])
620    ///
621    /// NOTE: this does not clear the master drop flag, so you need
622    /// to point succ/unwind on a `drop_ladder_bottom`.
623    fn drop_ladder(
624        &mut self,
625        fields: Vec<(Place<'tcx>, Option<D::Path>)>,
626        succ: BasicBlock,
627        unwind: Unwind,
628        dropline: Option<BasicBlock>,
629    ) -> (BasicBlock, Unwind, Option<BasicBlock>) {
630        debug!("drop_ladder({:?}, {:?})", self, fields);
631        assert!(
632            if unwind.is_cleanup() { dropline.is_none() } else { true },
633            "Dropline is set for cleanup drop ladder"
634        );
635
636        let mut fields = fields;
637        fields.retain(|&(place, _)| {
638            self.place_ty(place).needs_drop(self.tcx(), self.elaborator.typing_env())
639        });
640
641        debug!("drop_ladder - fields needing drop: {:?}", fields);
642
643        let dropline_ladder: Vec<Option<BasicBlock>> = vec![None; fields.len() + 1];
644        let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
645        let unwind_ladder: Vec<_> = if let Unwind::To(succ) = unwind {
646            let halfladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields);
647            halfladder.into_iter().map(Unwind::To).collect()
648        } else {
649            unwind_ladder
650        };
651        let dropline_ladder: Vec<_> = if let Some(succ) = dropline {
652            let halfladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields);
653            halfladder.into_iter().map(Some).collect()
654        } else {
655            dropline_ladder
656        };
657
658        let normal_ladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields);
659
660        (
661            *normal_ladder.last().unwrap(),
662            *unwind_ladder.last().unwrap(),
663            *dropline_ladder.last().unwrap(),
664        )
665    }
666
667    fn open_drop_for_tuple(&mut self, tys: &[Ty<'tcx>]) -> BasicBlock {
668        debug!("open_drop_for_tuple({:?}, {:?})", self, tys);
669
670        let fields = tys
671            .iter()
672            .enumerate()
673            .map(|(i, &ty)| {
674                (
675                    self.tcx().mk_place_field(self.place, FieldIdx::new(i), ty),
676                    self.elaborator.field_subpath(self.path, FieldIdx::new(i)),
677                )
678            })
679            .collect();
680
681        let (succ, unwind, dropline) = self.drop_ladder_bottom();
682        self.drop_ladder(fields, succ, unwind, dropline).0
683    }
684
685    /// Drops the T contained in a `Box<T>` if it has not been moved out of
686    #[instrument(level = "debug", ret)]
687    fn open_drop_for_box_contents(
688        &mut self,
689        adt: ty::AdtDef<'tcx>,
690        args: GenericArgsRef<'tcx>,
691        succ: BasicBlock,
692        unwind: Unwind,
693        dropline: Option<BasicBlock>,
694    ) -> BasicBlock {
695        // drop glue is sent straight to codegen
696        // box cannot be directly dereferenced
697        let unique_ty = adt.non_enum_variant().fields[FieldIdx::ZERO].ty(self.tcx(), args);
698        let unique_variant = unique_ty.ty_adt_def().unwrap().non_enum_variant();
699        let nonnull_ty = unique_variant.fields[FieldIdx::ZERO].ty(self.tcx(), args);
700        let ptr_ty = Ty::new_imm_ptr(self.tcx(), args[0].expect_ty());
701
702        let unique_place = self.tcx().mk_place_field(self.place, FieldIdx::ZERO, unique_ty);
703        let nonnull_place = self.tcx().mk_place_field(unique_place, FieldIdx::ZERO, nonnull_ty);
704
705        let ptr_local = self.new_temp(ptr_ty);
706
707        let interior = self.tcx().mk_place_deref(Place::from(ptr_local));
708        let interior_path = self.elaborator.deref_subpath(self.path);
709
710        let do_drop_bb = self.drop_subpath(interior, interior_path, succ, unwind, dropline);
711
712        let setup_bbd = BasicBlockData {
713            statements: vec![self.assign(
714                Place::from(ptr_local),
715                Rvalue::Cast(CastKind::Transmute, Operand::Copy(nonnull_place), ptr_ty),
716            )],
717            terminator: Some(Terminator {
718                kind: TerminatorKind::Goto { target: do_drop_bb },
719                source_info: self.source_info,
720            }),
721            is_cleanup: unwind.is_cleanup(),
722        };
723        self.elaborator.patch().new_block(setup_bbd)
724    }
725
726    #[instrument(level = "debug", ret)]
727    fn open_drop_for_adt(
728        &mut self,
729        adt: ty::AdtDef<'tcx>,
730        args: GenericArgsRef<'tcx>,
731    ) -> BasicBlock {
732        if adt.variants().is_empty() {
733            return self.elaborator.patch().new_block(BasicBlockData {
734                statements: vec![],
735                terminator: Some(Terminator {
736                    source_info: self.source_info,
737                    kind: TerminatorKind::Unreachable,
738                }),
739                is_cleanup: self.unwind.is_cleanup(),
740            });
741        }
742
743        let skip_contents = adt.is_union() || adt.is_manually_drop();
744        let contents_drop = if skip_contents {
745            (self.succ, self.unwind, self.dropline)
746        } else {
747            self.open_drop_for_adt_contents(adt, args)
748        };
749
750        if adt.is_box() {
751            // we need to drop the inside of the box before running the destructor
752            let succ = self.destructor_call_block_sync((contents_drop.0, contents_drop.1));
753            let unwind = contents_drop
754                .1
755                .map(|unwind| self.destructor_call_block_sync((unwind, Unwind::InCleanup)));
756            let dropline = contents_drop
757                .2
758                .map(|dropline| self.destructor_call_block_sync((dropline, contents_drop.1)));
759
760            self.open_drop_for_box_contents(adt, args, succ, unwind, dropline)
761        } else if adt.has_dtor(self.tcx()) {
762            self.destructor_call_block(contents_drop)
763        } else {
764            contents_drop.0
765        }
766    }
767
768    fn open_drop_for_adt_contents(
769        &mut self,
770        adt: ty::AdtDef<'tcx>,
771        args: GenericArgsRef<'tcx>,
772    ) -> (BasicBlock, Unwind, Option<BasicBlock>) {
773        let (succ, unwind, dropline) = self.drop_ladder_bottom();
774        if !adt.is_enum() {
775            let fields =
776                self.move_paths_for_fields(self.place, self.path, adt.variant(FIRST_VARIANT), args);
777            self.drop_ladder(fields, succ, unwind, dropline)
778        } else {
779            self.open_drop_for_multivariant(adt, args, succ, unwind, dropline)
780        }
781    }
782
783    fn open_drop_for_multivariant(
784        &mut self,
785        adt: ty::AdtDef<'tcx>,
786        args: GenericArgsRef<'tcx>,
787        succ: BasicBlock,
788        unwind: Unwind,
789        dropline: Option<BasicBlock>,
790    ) -> (BasicBlock, Unwind, Option<BasicBlock>) {
791        let mut values = Vec::with_capacity(adt.variants().len());
792        let mut normal_blocks = Vec::with_capacity(adt.variants().len());
793        let mut unwind_blocks =
794            if unwind.is_cleanup() { None } else { Some(Vec::with_capacity(adt.variants().len())) };
795        let mut dropline_blocks =
796            if dropline.is_none() { None } else { Some(Vec::with_capacity(adt.variants().len())) };
797
798        let mut have_otherwise_with_drop_glue = false;
799        let mut have_otherwise = false;
800        let tcx = self.tcx();
801
802        for (variant_index, discr) in adt.discriminants(tcx) {
803            let variant = &adt.variant(variant_index);
804            let subpath = self.elaborator.downcast_subpath(self.path, variant_index);
805
806            if let Some(variant_path) = subpath {
807                let base_place = tcx.mk_place_elem(
808                    self.place,
809                    ProjectionElem::Downcast(Some(variant.name), variant_index),
810                );
811                let fields = self.move_paths_for_fields(base_place, variant_path, variant, args);
812                values.push(discr.val);
813                if let Unwind::To(unwind) = unwind {
814                    // We can't use the half-ladder from the original
815                    // drop ladder, because this breaks the
816                    // "funclet can't have 2 successor funclets"
817                    // requirement from MSVC:
818                    //
819                    //           switch       unwind-switch
820                    //          /      \         /        \
821                    //         v1.0    v2.0  v2.0-unwind  v1.0-unwind
822                    //         |        |      /             |
823                    //    v1.1-unwind  v2.1-unwind           |
824                    //      ^                                |
825                    //       \-------------------------------/
826                    //
827                    // Create a duplicate half-ladder to avoid that. We
828                    // could technically only do this on MSVC, but I
829                    // I want to minimize the divergence between MSVC
830                    // and non-MSVC.
831
832                    let unwind_blocks = unwind_blocks.as_mut().unwrap();
833                    let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
834                    let dropline_ladder: Vec<Option<BasicBlock>> = vec![None; fields.len() + 1];
835                    let halfladder =
836                        self.drop_halfladder(&unwind_ladder, &dropline_ladder, unwind, &fields);
837                    unwind_blocks.push(halfladder.last().cloned().unwrap());
838                }
839                let (normal, _, drop_bb) = self.drop_ladder(fields, succ, unwind, dropline);
840                normal_blocks.push(normal);
841                if dropline.is_some() {
842                    dropline_blocks.as_mut().unwrap().push(drop_bb.unwrap());
843                }
844            } else {
845                have_otherwise = true;
846
847                let typing_env = self.elaborator.typing_env();
848                let have_field_with_drop_glue = variant
849                    .fields
850                    .iter()
851                    .any(|field| field.ty(tcx, args).needs_drop(tcx, typing_env));
852                if have_field_with_drop_glue {
853                    have_otherwise_with_drop_glue = true;
854                }
855            }
856        }
857
858        if !have_otherwise {
859            values.pop();
860        } else if !have_otherwise_with_drop_glue {
861            normal_blocks.push(self.goto_block(succ, unwind));
862            if let Unwind::To(unwind) = unwind {
863                unwind_blocks.as_mut().unwrap().push(self.goto_block(unwind, Unwind::InCleanup));
864            }
865        } else {
866            normal_blocks.push(self.drop_block(succ, unwind));
867            if let Unwind::To(unwind) = unwind {
868                unwind_blocks.as_mut().unwrap().push(self.drop_block(unwind, Unwind::InCleanup));
869            }
870        }
871
872        (
873            self.adt_switch_block(adt, normal_blocks, &values, succ, unwind),
874            unwind.map(|unwind| {
875                self.adt_switch_block(
876                    adt,
877                    unwind_blocks.unwrap(),
878                    &values,
879                    unwind,
880                    Unwind::InCleanup,
881                )
882            }),
883            dropline.map(|dropline| {
884                self.adt_switch_block(adt, dropline_blocks.unwrap(), &values, dropline, unwind)
885            }),
886        )
887    }
888
889    fn adt_switch_block(
890        &mut self,
891        adt: ty::AdtDef<'tcx>,
892        blocks: Vec<BasicBlock>,
893        values: &[u128],
894        succ: BasicBlock,
895        unwind: Unwind,
896    ) -> BasicBlock {
897        // If there are multiple variants, then if something
898        // is present within the enum the discriminant, tracked
899        // by the rest path, must be initialized.
900        //
901        // Additionally, we do not want to switch on the
902        // discriminant after it is free-ed, because that
903        // way lies only trouble.
904        let discr_ty = adt.repr().discr_type().to_ty(self.tcx());
905        let discr = Place::from(self.new_temp(discr_ty));
906        let discr_rv = Rvalue::Discriminant(self.place);
907        let switch_block = BasicBlockData {
908            statements: vec![self.assign(discr, discr_rv)],
909            terminator: Some(Terminator {
910                source_info: self.source_info,
911                kind: TerminatorKind::SwitchInt {
912                    discr: Operand::Move(discr),
913                    targets: SwitchTargets::new(
914                        values.iter().copied().zip(blocks.iter().copied()),
915                        *blocks.last().unwrap(),
916                    ),
917                },
918            }),
919            is_cleanup: unwind.is_cleanup(),
920        };
921        let switch_block = self.elaborator.patch().new_block(switch_block);
922        self.drop_flag_test_block(switch_block, succ, unwind)
923    }
924
925    fn destructor_call_block_sync(&mut self, (succ, unwind): (BasicBlock, Unwind)) -> BasicBlock {
926        debug!("destructor_call_block_sync({:?}, {:?})", self, succ);
927        let tcx = self.tcx();
928        let drop_trait = tcx.require_lang_item(LangItem::Drop, None);
929        let drop_fn = tcx.associated_item_def_ids(drop_trait)[0];
930        let ty = self.place_ty(self.place);
931
932        let ref_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, ty);
933        let ref_place = self.new_temp(ref_ty);
934        let unit_temp = Place::from(self.new_temp(tcx.types.unit));
935
936        let result = BasicBlockData {
937            statements: vec![self.assign(
938                Place::from(ref_place),
939                Rvalue::Ref(
940                    tcx.lifetimes.re_erased,
941                    BorrowKind::Mut { kind: MutBorrowKind::Default },
942                    self.place,
943                ),
944            )],
945            terminator: Some(Terminator {
946                kind: TerminatorKind::Call {
947                    func: Operand::function_handle(
948                        tcx,
949                        drop_fn,
950                        [ty.into()],
951                        self.source_info.span,
952                    ),
953                    args: [Spanned { node: Operand::Move(Place::from(ref_place)), span: DUMMY_SP }]
954                        .into(),
955                    destination: unit_temp,
956                    target: Some(succ),
957                    unwind: unwind.into_action(),
958                    call_source: CallSource::Misc,
959                    fn_span: self.source_info.span,
960                },
961                source_info: self.source_info,
962            }),
963            is_cleanup: unwind.is_cleanup(),
964        };
965
966        let destructor_block = self.elaborator.patch().new_block(result);
967
968        let block_start = Location { block: destructor_block, statement_index: 0 };
969        self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
970
971        self.drop_flag_test_block(destructor_block, succ, unwind)
972    }
973
974    fn destructor_call_block(
975        &mut self,
976        (succ, unwind, dropline): (BasicBlock, Unwind, Option<BasicBlock>),
977    ) -> BasicBlock {
978        debug!("destructor_call_block({:?}, {:?})", self, succ);
979        let ty = self.place_ty(self.place);
980        if self.tcx().features().async_drop()
981            && self.elaborator.body().coroutine.is_some()
982            && self.elaborator.allow_async_drops()
983            && !unwind.is_cleanup()
984            && ty.is_async_drop(self.tcx(), self.elaborator.typing_env())
985        {
986            let destructor_block =
987                self.build_async_drop(self.place, ty, None, succ, unwind, dropline, true);
988
989            let block_start = Location { block: destructor_block, statement_index: 0 };
990            self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
991
992            self.drop_flag_test_block(destructor_block, succ, unwind)
993        } else {
994            self.destructor_call_block_sync((succ, unwind))
995        }
996    }
997
998    /// Create a loop that drops an array:
999    ///
1000    /// ```text
1001    /// loop-block:
1002    ///    can_go = cur == len
1003    ///    if can_go then succ else drop-block
1004    /// drop-block:
1005    ///    ptr = &raw mut P[cur]
1006    ///    cur = cur + 1
1007    ///    drop(ptr)
1008    /// ```
1009    fn drop_loop(
1010        &mut self,
1011        succ: BasicBlock,
1012        cur: Local,
1013        len: Local,
1014        ety: Ty<'tcx>,
1015        unwind: Unwind,
1016        dropline: Option<BasicBlock>,
1017    ) -> BasicBlock {
1018        let copy = |place: Place<'tcx>| Operand::Copy(place);
1019        let move_ = |place: Place<'tcx>| Operand::Move(place);
1020        let tcx = self.tcx();
1021
1022        let ptr_ty = Ty::new_mut_ptr(tcx, ety);
1023        let ptr = Place::from(self.new_temp(ptr_ty));
1024        let can_go = Place::from(self.new_temp(tcx.types.bool));
1025        let one = self.constant_usize(1);
1026
1027        let drop_block = BasicBlockData {
1028            statements: vec![
1029                self.assign(
1030                    ptr,
1031                    Rvalue::RawPtr(RawPtrKind::Mut, tcx.mk_place_index(self.place, cur)),
1032                ),
1033                self.assign(
1034                    cur.into(),
1035                    Rvalue::BinaryOp(BinOp::Add, Box::new((move_(cur.into()), one))),
1036                ),
1037            ],
1038            is_cleanup: unwind.is_cleanup(),
1039            terminator: Some(Terminator {
1040                source_info: self.source_info,
1041                // this gets overwritten by drop elaboration.
1042                kind: TerminatorKind::Unreachable,
1043            }),
1044        };
1045        let drop_block = self.elaborator.patch().new_block(drop_block);
1046
1047        let loop_block = BasicBlockData {
1048            statements: vec![self.assign(
1049                can_go,
1050                Rvalue::BinaryOp(BinOp::Eq, Box::new((copy(Place::from(cur)), copy(len.into())))),
1051            )],
1052            is_cleanup: unwind.is_cleanup(),
1053            terminator: Some(Terminator {
1054                source_info: self.source_info,
1055                kind: TerminatorKind::if_(move_(can_go), succ, drop_block),
1056            }),
1057        };
1058        let loop_block = self.elaborator.patch().new_block(loop_block);
1059
1060        let place = tcx.mk_place_deref(ptr);
1061        if self.tcx().features().async_drop()
1062            && self.elaborator.body().coroutine.is_some()
1063            && self.elaborator.allow_async_drops()
1064            && !unwind.is_cleanup()
1065            && ety.needs_async_drop(self.tcx(), self.elaborator.typing_env())
1066        {
1067            self.build_async_drop(
1068                place,
1069                ety,
1070                Some(drop_block),
1071                loop_block,
1072                unwind,
1073                dropline,
1074                false,
1075            );
1076        } else {
1077            self.elaborator.patch().patch_terminator(
1078                drop_block,
1079                TerminatorKind::Drop {
1080                    place,
1081                    target: loop_block,
1082                    unwind: unwind.into_action(),
1083                    replace: false,
1084                    drop: None,
1085                    async_fut: None,
1086                },
1087            );
1088        }
1089        loop_block
1090    }
1091
1092    fn open_drop_for_array(
1093        &mut self,
1094        array_ty: Ty<'tcx>,
1095        ety: Ty<'tcx>,
1096        opt_size: Option<u64>,
1097    ) -> BasicBlock {
1098        debug!("open_drop_for_array({:?}, {:?}, {:?})", array_ty, ety, opt_size);
1099        let tcx = self.tcx();
1100
1101        if let Some(size) = opt_size {
1102            enum ProjectionKind<Path> {
1103                Drop(std::ops::Range<u64>),
1104                Keep(u64, Path),
1105            }
1106            // Previously, we'd make a projection for every element in the array and create a drop
1107            // ladder if any `array_subpath` was `Some`, i.e. moving out with an array pattern.
1108            // This caused huge memory usage when generating the drops for large arrays, so we instead
1109            // record the *subslices* which are dropped and the *indexes* which are kept
1110            let mut drop_ranges = vec![];
1111            let mut dropping = true;
1112            let mut start = 0;
1113            for i in 0..size {
1114                let path = self.elaborator.array_subpath(self.path, i, size);
1115                if dropping && path.is_some() {
1116                    drop_ranges.push(ProjectionKind::Drop(start..i));
1117                    dropping = false;
1118                } else if !dropping && path.is_none() {
1119                    dropping = true;
1120                    start = i;
1121                }
1122                if let Some(path) = path {
1123                    drop_ranges.push(ProjectionKind::Keep(i, path));
1124                }
1125            }
1126            if !drop_ranges.is_empty() {
1127                if dropping {
1128                    drop_ranges.push(ProjectionKind::Drop(start..size));
1129                }
1130                let fields = drop_ranges
1131                    .iter()
1132                    .rev()
1133                    .map(|p| {
1134                        let (project, path) = match p {
1135                            ProjectionKind::Drop(r) => (
1136                                ProjectionElem::Subslice {
1137                                    from: r.start,
1138                                    to: r.end,
1139                                    from_end: false,
1140                                },
1141                                None,
1142                            ),
1143                            &ProjectionKind::Keep(offset, path) => (
1144                                ProjectionElem::ConstantIndex {
1145                                    offset,
1146                                    min_length: size,
1147                                    from_end: false,
1148                                },
1149                                Some(path),
1150                            ),
1151                        };
1152                        (tcx.mk_place_elem(self.place, project), path)
1153                    })
1154                    .collect::<Vec<_>>();
1155                let (succ, unwind, dropline) = self.drop_ladder_bottom();
1156                return self.drop_ladder(fields, succ, unwind, dropline).0;
1157            }
1158        }
1159
1160        let array_ptr_ty = Ty::new_mut_ptr(tcx, array_ty);
1161        let array_ptr = self.new_temp(array_ptr_ty);
1162
1163        let slice_ty = Ty::new_slice(tcx, ety);
1164        let slice_ptr_ty = Ty::new_mut_ptr(tcx, slice_ty);
1165        let slice_ptr = self.new_temp(slice_ptr_ty);
1166
1167        let mut delegate_block = BasicBlockData {
1168            statements: vec![
1169                self.assign(Place::from(array_ptr), Rvalue::RawPtr(RawPtrKind::Mut, self.place)),
1170                self.assign(
1171                    Place::from(slice_ptr),
1172                    Rvalue::Cast(
1173                        CastKind::PointerCoercion(
1174                            PointerCoercion::Unsize,
1175                            CoercionSource::Implicit,
1176                        ),
1177                        Operand::Move(Place::from(array_ptr)),
1178                        slice_ptr_ty,
1179                    ),
1180                ),
1181            ],
1182            is_cleanup: self.unwind.is_cleanup(),
1183            terminator: None,
1184        };
1185
1186        let array_place = mem::replace(
1187            &mut self.place,
1188            Place::from(slice_ptr).project_deeper(&[PlaceElem::Deref], tcx),
1189        );
1190        let slice_block = self.drop_loop_trio_for_slice(ety);
1191        self.place = array_place;
1192
1193        delegate_block.terminator = Some(Terminator {
1194            source_info: self.source_info,
1195            kind: TerminatorKind::Goto { target: slice_block },
1196        });
1197        self.elaborator.patch().new_block(delegate_block)
1198    }
1199
1200    /// Creates a trio of drop-loops of `place`, which drops its contents, even
1201    /// in the case of 1 panic or in the case of coroutine drop
1202    fn drop_loop_trio_for_slice(&mut self, ety: Ty<'tcx>) -> BasicBlock {
1203        debug!("drop_loop_trio_for_slice({:?})", ety);
1204        let tcx = self.tcx();
1205        let len = self.new_temp(tcx.types.usize);
1206        let cur = self.new_temp(tcx.types.usize);
1207
1208        let unwind = self
1209            .unwind
1210            .map(|unwind| self.drop_loop(unwind, cur, len, ety, Unwind::InCleanup, None));
1211
1212        let dropline =
1213            self.dropline.map(|dropline| self.drop_loop(dropline, cur, len, ety, unwind, None));
1214
1215        let loop_block = self.drop_loop(self.succ, cur, len, ety, unwind, dropline);
1216
1217        let [PlaceElem::Deref] = self.place.projection.as_slice() else {
1218            span_bug!(
1219                self.source_info.span,
1220                "Expected place for slice drop shim to be *_n, but it's {:?}",
1221                self.place,
1222            );
1223        };
1224
1225        let zero = self.constant_usize(0);
1226        let block = BasicBlockData {
1227            statements: vec![
1228                self.assign(
1229                    len.into(),
1230                    Rvalue::UnaryOp(
1231                        UnOp::PtrMetadata,
1232                        Operand::Copy(Place::from(self.place.local)),
1233                    ),
1234                ),
1235                self.assign(cur.into(), Rvalue::Use(zero)),
1236            ],
1237            is_cleanup: unwind.is_cleanup(),
1238            terminator: Some(Terminator {
1239                source_info: self.source_info,
1240                kind: TerminatorKind::Goto { target: loop_block },
1241            }),
1242        };
1243
1244        let drop_block = self.elaborator.patch().new_block(block);
1245        // FIXME(#34708): handle partially-dropped array/slice elements.
1246        let reset_block = self.drop_flag_reset_block(DropFlagMode::Deep, drop_block, unwind);
1247        self.drop_flag_test_block(reset_block, self.succ, unwind)
1248    }
1249
1250    /// The slow-path - create an "open", elaborated drop for a type
1251    /// which is moved-out-of only partially, and patch `bb` to a jump
1252    /// to it. This must not be called on ADTs with a destructor,
1253    /// as these can't be moved-out-of, except for `Box<T>`, which is
1254    /// special-cased.
1255    ///
1256    /// This creates a "drop ladder" that drops the needed fields of the
1257    /// ADT, both in the success case or if one of the destructors fail.
1258    fn open_drop(&mut self) -> BasicBlock {
1259        let ty = self.place_ty(self.place);
1260        match ty.kind() {
1261            ty::Closure(_, args) => self.open_drop_for_tuple(args.as_closure().upvar_tys()),
1262            ty::CoroutineClosure(_, args) => {
1263                self.open_drop_for_tuple(args.as_coroutine_closure().upvar_tys())
1264            }
1265            // Note that `elaborate_drops` only drops the upvars of a coroutine,
1266            // and this is ok because `open_drop` here can only be reached
1267            // within that own coroutine's resume function.
1268            // This should only happen for the self argument on the resume function.
1269            // It effectively only contains upvars until the coroutine transformation runs.
1270            // See librustc_body/transform/coroutine.rs for more details.
1271            ty::Coroutine(_, args) => self.open_drop_for_tuple(args.as_coroutine().upvar_tys()),
1272            ty::Tuple(fields) => self.open_drop_for_tuple(fields),
1273            ty::Adt(def, args) => self.open_drop_for_adt(*def, args),
1274            ty::Dynamic(..) => self.complete_drop(self.succ, self.unwind),
1275            ty::Array(ety, size) => {
1276                let size = size.try_to_target_usize(self.tcx());
1277                self.open_drop_for_array(ty, *ety, size)
1278            }
1279            ty::Slice(ety) => self.drop_loop_trio_for_slice(*ety),
1280
1281            ty::UnsafeBinder(_) => {
1282                // Unsafe binders may elaborate drops if their inner type isn't copy.
1283                // This is enforced in typeck, so this should never happen.
1284                self.tcx().dcx().span_delayed_bug(
1285                    self.source_info.span,
1286                    "open drop for unsafe binder shouldn't be encountered",
1287                );
1288                self.elaborator.patch().new_block(BasicBlockData {
1289                    statements: vec![],
1290                    terminator: Some(Terminator {
1291                        source_info: self.source_info,
1292                        kind: TerminatorKind::Unreachable,
1293                    }),
1294                    is_cleanup: self.unwind.is_cleanup(),
1295                })
1296            }
1297
1298            _ => span_bug!(self.source_info.span, "open drop from non-ADT `{:?}`", ty),
1299        }
1300    }
1301
1302    fn complete_drop(&mut self, succ: BasicBlock, unwind: Unwind) -> BasicBlock {
1303        debug!("complete_drop(succ={:?}, unwind={:?})", succ, unwind);
1304
1305        let drop_block = self.drop_block(succ, unwind);
1306
1307        self.drop_flag_test_block(drop_block, succ, unwind)
1308    }
1309
1310    /// Creates a block that resets the drop flag. If `mode` is deep, all children drop flags will
1311    /// also be cleared.
1312    fn drop_flag_reset_block(
1313        &mut self,
1314        mode: DropFlagMode,
1315        succ: BasicBlock,
1316        unwind: Unwind,
1317    ) -> BasicBlock {
1318        debug!("drop_flag_reset_block({:?},{:?})", self, mode);
1319
1320        if unwind.is_cleanup() {
1321            // The drop flag isn't read again on the unwind path, so don't
1322            // bother setting it.
1323            return succ;
1324        }
1325        let block = self.new_block(unwind, TerminatorKind::Goto { target: succ });
1326        let block_start = Location { block, statement_index: 0 };
1327        self.elaborator.clear_drop_flag(block_start, self.path, mode);
1328        block
1329    }
1330
1331    fn elaborated_drop_block(&mut self) -> BasicBlock {
1332        debug!("elaborated_drop_block({:?})", self);
1333        let blk = self.drop_block_simple(self.succ, self.unwind);
1334        self.elaborate_drop(blk);
1335        blk
1336    }
1337
1338    fn drop_block_simple(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
1339        let block = TerminatorKind::Drop {
1340            place: self.place,
1341            target,
1342            unwind: unwind.into_action(),
1343            replace: false,
1344            drop: self.dropline,
1345            async_fut: None,
1346        };
1347        self.new_block(unwind, block)
1348    }
1349
1350    fn drop_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
1351        let drop_ty = self.place_ty(self.place);
1352        if self.tcx().features().async_drop()
1353            && self.elaborator.body().coroutine.is_some()
1354            && self.elaborator.allow_async_drops()
1355            && !unwind.is_cleanup()
1356            && drop_ty.needs_async_drop(self.tcx(), self.elaborator.typing_env())
1357        {
1358            self.build_async_drop(
1359                self.place,
1360                drop_ty,
1361                None,
1362                self.succ,
1363                unwind,
1364                self.dropline,
1365                false,
1366            )
1367        } else {
1368            let block = TerminatorKind::Drop {
1369                place: self.place,
1370                target,
1371                unwind: unwind.into_action(),
1372                replace: false,
1373                drop: None,
1374                async_fut: None,
1375            };
1376            self.new_block(unwind, block)
1377        }
1378    }
1379
1380    fn goto_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
1381        let block = TerminatorKind::Goto { target };
1382        self.new_block(unwind, block)
1383    }
1384
1385    /// Returns the block to jump to in order to test the drop flag and execute the drop.
1386    ///
1387    /// Depending on the required `DropStyle`, this might be a generated block with an `if`
1388    /// terminator (for dynamic/open drops), or it might be `on_set` or `on_unset` itself, in case
1389    /// the drop can be statically determined.
1390    fn drop_flag_test_block(
1391        &mut self,
1392        on_set: BasicBlock,
1393        on_unset: BasicBlock,
1394        unwind: Unwind,
1395    ) -> BasicBlock {
1396        let style = self.elaborator.drop_style(self.path, DropFlagMode::Shallow);
1397        debug!(
1398            "drop_flag_test_block({:?},{:?},{:?},{:?}) - {:?}",
1399            self, on_set, on_unset, unwind, style
1400        );
1401
1402        match style {
1403            DropStyle::Dead => on_unset,
1404            DropStyle::Static => on_set,
1405            DropStyle::Conditional | DropStyle::Open => {
1406                let flag = self.elaborator.get_drop_flag(self.path).unwrap();
1407                let term = TerminatorKind::if_(flag, on_set, on_unset);
1408                self.new_block(unwind, term)
1409            }
1410        }
1411    }
1412
1413    fn new_block(&mut self, unwind: Unwind, k: TerminatorKind<'tcx>) -> BasicBlock {
1414        self.elaborator.patch().new_block(BasicBlockData {
1415            statements: vec![],
1416            terminator: Some(Terminator { source_info: self.source_info, kind: k }),
1417            is_cleanup: unwind.is_cleanup(),
1418        })
1419    }
1420
1421    fn new_block_with_statements(
1422        &mut self,
1423        unwind: Unwind,
1424        statements: Vec<Statement<'tcx>>,
1425        k: TerminatorKind<'tcx>,
1426    ) -> BasicBlock {
1427        self.elaborator.patch().new_block(BasicBlockData {
1428            statements,
1429            terminator: Some(Terminator { source_info: self.source_info, kind: k }),
1430            is_cleanup: unwind.is_cleanup(),
1431        })
1432    }
1433
1434    fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
1435        self.elaborator.patch().new_temp(ty, self.source_info.span)
1436    }
1437
1438    fn constant_usize(&self, val: u16) -> Operand<'tcx> {
1439        Operand::Constant(Box::new(ConstOperand {
1440            span: self.source_info.span,
1441            user_ty: None,
1442            const_: Const::from_usize(self.tcx(), val.into()),
1443        }))
1444    }
1445
1446    fn assign(&self, lhs: Place<'tcx>, rhs: Rvalue<'tcx>) -> Statement<'tcx> {
1447        Statement {
1448            source_info: self.source_info,
1449            kind: StatementKind::Assign(Box::new((lhs, rhs))),
1450        }
1451    }
1452}