1use std::{fmt, iter, mem};
2
3use rustc_abi::{FIRST_VARIANT, FieldIdx, VariantIdx};
4use rustc_hir::def::DefKind;
5use rustc_hir::lang_items::LangItem;
6use rustc_index::Idx;
7use rustc_middle::mir::*;
8use rustc_middle::ty::adjustment::PointerCoercion;
9use rustc_middle::ty::util::IntTypeExt;
10use rustc_middle::ty::{self, GenericArg, GenericArgsRef, Ty, TyCtxt};
11use rustc_middle::{bug, span_bug, traits};
12use rustc_span::DUMMY_SP;
13use rustc_span::source_map::{Spanned, dummy_spanned};
14use tracing::{debug, instrument};
15
16use crate::patch::MirPatch;
17
18#[derive(Debug)]
20pub(crate) enum DropStyle {
21 Dead,
23
24 Static,
27
28 Conditional,
30
31 Open,
37}
38
39#[derive(Debug)]
41pub(crate) enum DropFlagMode {
42 Shallow,
44 Deep,
46}
47
48#[derive(Copy, Clone, Debug)]
50pub(crate) enum Unwind {
51 To(BasicBlock),
53 InCleanup,
55}
56
57impl Unwind {
58 fn is_cleanup(self) -> bool {
59 match self {
60 Unwind::To(..) => false,
61 Unwind::InCleanup => true,
62 }
63 }
64
65 fn into_action(self) -> UnwindAction {
66 match self {
67 Unwind::To(bb) => UnwindAction::Cleanup(bb),
68 Unwind::InCleanup => UnwindAction::Terminate(UnwindTerminateReason::InCleanup),
69 }
70 }
71
72 fn map<F>(self, f: F) -> Self
73 where
74 F: FnOnce(BasicBlock) -> BasicBlock,
75 {
76 match self {
77 Unwind::To(bb) => Unwind::To(f(bb)),
78 Unwind::InCleanup => Unwind::InCleanup,
79 }
80 }
81}
82
83pub(crate) trait DropElaborator<'a, 'tcx>: fmt::Debug {
84 type Path: Copy + fmt::Debug;
90
91 fn patch_ref(&self) -> &MirPatch<'tcx>;
94 fn patch(&mut self) -> &mut MirPatch<'tcx>;
95 fn body(&self) -> &'a Body<'tcx>;
96 fn tcx(&self) -> TyCtxt<'tcx>;
97 fn typing_env(&self) -> ty::TypingEnv<'tcx>;
98 fn allow_async_drops(&self) -> bool;
99
100 fn terminator_loc(&self, bb: BasicBlock) -> Location;
101
102 fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle;
106
107 fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>>;
109
110 fn clear_drop_flag(&mut self, location: Location, path: Self::Path, mode: DropFlagMode);
115
116 fn field_subpath(&self, path: Self::Path, field: FieldIdx) -> Option<Self::Path>;
122
123 fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path>;
129
130 fn downcast_subpath(&self, path: Self::Path, variant: VariantIdx) -> Option<Self::Path>;
134
135 fn array_subpath(&self, path: Self::Path, index: u64, size: u64) -> Option<Self::Path>;
141}
142
143#[derive(Debug)]
144struct DropCtxt<'a, 'b, 'tcx, D>
145where
146 D: DropElaborator<'b, 'tcx>,
147{
148 elaborator: &'a mut D,
149
150 source_info: SourceInfo,
151
152 place: Place<'tcx>,
153 path: D::Path,
154 succ: BasicBlock,
155 unwind: Unwind,
156 dropline: Option<BasicBlock>,
157}
158
159pub(crate) fn elaborate_drop<'b, 'tcx, D>(
168 elaborator: &mut D,
169 source_info: SourceInfo,
170 place: Place<'tcx>,
171 path: D::Path,
172 succ: BasicBlock,
173 unwind: Unwind,
174 bb: BasicBlock,
175 dropline: Option<BasicBlock>,
176) where
177 D: DropElaborator<'b, 'tcx>,
178 'tcx: 'b,
179{
180 DropCtxt { elaborator, source_info, place, path, succ, unwind, dropline }.elaborate_drop(bb)
181}
182
183impl<'a, 'b, 'tcx, D> DropCtxt<'a, 'b, 'tcx, D>
184where
185 D: DropElaborator<'b, 'tcx>,
186 'tcx: 'b,
187{
188 #[instrument(level = "trace", skip(self), ret)]
189 fn place_ty(&self, place: Place<'tcx>) -> Ty<'tcx> {
190 if place.local < self.elaborator.body().local_decls.next_index() {
191 place.ty(self.elaborator.body(), self.tcx()).ty
192 } else {
193 PlaceTy::from_ty(self.elaborator.patch_ref().local_ty(place.local))
195 .multi_projection_ty(self.elaborator.tcx(), place.projection)
196 .ty
197 }
198 }
199
200 fn tcx(&self) -> TyCtxt<'tcx> {
201 self.elaborator.tcx()
202 }
203
204 fn build_async_drop(
212 &mut self,
213 place: Place<'tcx>,
214 drop_ty: Ty<'tcx>,
215 bb: Option<BasicBlock>,
216 succ: BasicBlock,
217 unwind: Unwind,
218 dropline: Option<BasicBlock>,
219 call_destructor_only: bool,
220 ) -> BasicBlock {
221 let tcx = self.tcx();
222 let span = self.source_info.span;
223
224 let pin_obj_bb = bb.unwrap_or_else(|| {
225 self.elaborator.patch().new_block(BasicBlockData::new(
226 Some(Terminator {
227 source_info: self.source_info,
229 kind: TerminatorKind::Return,
230 }),
231 false,
232 ))
233 });
234
235 let (fut_ty, drop_fn_def_id, trait_args) = if call_destructor_only {
236 let trait_ref =
238 ty::TraitRef::new(tcx, tcx.require_lang_item(LangItem::AsyncDrop, span), [drop_ty]);
239 let (drop_trait, trait_args) = match tcx.codegen_select_candidate(
240 ty::TypingEnv::fully_monomorphized().as_query_input(trait_ref),
241 ) {
242 Ok(traits::ImplSource::UserDefined(traits::ImplSourceUserDefinedData {
243 impl_def_id,
244 args,
245 ..
246 })) => (*impl_def_id, *args),
247 impl_source => {
248 span_bug!(span, "invalid `AsyncDrop` impl_source: {:?}", impl_source);
249 }
250 };
251 let Some(drop_fn_def_id) = tcx
255 .associated_item_def_ids(drop_trait)
256 .first()
257 .and_then(|def_id| {
258 if tcx.def_kind(def_id) == DefKind::AssocFn
259 && tcx.check_args_compatible(*def_id, trait_args)
260 {
261 Some(def_id)
262 } else {
263 None
264 }
265 })
266 .copied()
267 else {
268 tcx.dcx().span_delayed_bug(
269 self.elaborator.body().span,
270 "AsyncDrop type without correct `async fn drop(...)`.",
271 );
272 self.elaborator.patch().patch_terminator(
273 pin_obj_bb,
274 TerminatorKind::Drop {
275 place,
276 target: succ,
277 unwind: unwind.into_action(),
278 replace: false,
279 drop: None,
280 async_fut: None,
281 },
282 );
283 return pin_obj_bb;
284 };
285 let drop_fn = Ty::new_fn_def(tcx, drop_fn_def_id, trait_args);
286 let sig = drop_fn.fn_sig(tcx);
287 let sig = tcx.instantiate_bound_regions_with_erased(sig);
288 (sig.output(), drop_fn_def_id, trait_args)
289 } else {
290 let drop_fn_def_id = tcx.require_lang_item(LangItem::AsyncDropInPlace, span);
292 let trait_args = tcx.mk_args(&[drop_ty.into()]);
293 let sig = tcx.fn_sig(drop_fn_def_id).instantiate(tcx, trait_args);
294 let sig = tcx.instantiate_bound_regions_with_erased(sig);
295 (sig.output(), drop_fn_def_id, trait_args)
296 };
297
298 let fut = Place::from(self.new_temp(fut_ty));
299
300 let obj_ref_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, drop_ty);
302 let obj_ref_place = Place::from(self.new_temp(obj_ref_ty));
303
304 let term_loc = self.elaborator.terminator_loc(pin_obj_bb);
305 self.elaborator.patch().add_assign(
306 term_loc,
307 obj_ref_place,
308 Rvalue::Ref(
309 tcx.lifetimes.re_erased,
310 BorrowKind::Mut { kind: MutBorrowKind::Default },
311 place,
312 ),
313 );
314
315 let pin_obj_new_unchecked_fn = Ty::new_fn_def(
317 tcx,
318 tcx.require_lang_item(LangItem::PinNewUnchecked, span),
319 [GenericArg::from(obj_ref_ty)],
320 );
321 let pin_obj_ty = pin_obj_new_unchecked_fn.fn_sig(tcx).output().no_bound_vars().unwrap();
322 let pin_obj_place = Place::from(self.new_temp(pin_obj_ty));
323 let pin_obj_new_unchecked_fn = Operand::Constant(Box::new(ConstOperand {
324 span,
325 user_ty: None,
326 const_: Const::zero_sized(pin_obj_new_unchecked_fn),
327 }));
328
329 let drop_term_bb = self.new_block(
331 unwind,
332 TerminatorKind::Drop {
333 place,
334 target: succ,
335 unwind: unwind.into_action(),
336 replace: false,
337 drop: dropline,
338 async_fut: Some(fut.local),
339 },
340 );
341
342 let mut call_statements = Vec::new();
344 let drop_arg = if call_destructor_only {
345 pin_obj_place
346 } else {
347 let ty::Adt(adt_def, adt_args) = pin_obj_ty.kind() else {
348 bug!();
349 };
350 let obj_ptr_ty = Ty::new_mut_ptr(tcx, drop_ty);
351 let unwrap_ty = adt_def.non_enum_variant().fields[FieldIdx::ZERO].ty(tcx, adt_args);
352 let obj_ref_place = Place::from(self.new_temp(unwrap_ty));
353 call_statements.push(self.assign(
354 obj_ref_place,
355 Rvalue::Use(Operand::Copy(tcx.mk_place_field(
356 pin_obj_place,
357 FieldIdx::ZERO,
358 unwrap_ty,
359 ))),
360 ));
361
362 let obj_ptr_place = Place::from(self.new_temp(obj_ptr_ty));
363
364 let addr = Rvalue::RawPtr(RawPtrKind::Mut, tcx.mk_place_deref(obj_ref_place));
365 call_statements.push(self.assign(obj_ptr_place, addr));
366 obj_ptr_place
367 };
368 call_statements
369 .push(Statement::new(self.source_info, StatementKind::StorageLive(fut.local)));
370
371 let call_drop_bb = self.new_block_with_statements(
372 unwind,
373 call_statements,
374 TerminatorKind::Call {
375 func: Operand::function_handle(tcx, drop_fn_def_id, trait_args, span),
376 args: [Spanned { node: Operand::Move(drop_arg), span: DUMMY_SP }].into(),
377 destination: fut,
378 target: Some(drop_term_bb),
379 unwind: unwind.into_action(),
380 call_source: CallSource::Misc,
381 fn_span: self.source_info.span,
382 },
383 );
384
385 self.elaborator.patch().add_statement(
387 Location { block: self.succ, statement_index: 0 },
388 StatementKind::StorageDead(fut.local),
389 );
390 if let Unwind::To(block) = unwind {
392 self.elaborator.patch().add_statement(
393 Location { block, statement_index: 0 },
394 StatementKind::StorageDead(fut.local),
395 );
396 }
397 if let Some(block) = dropline {
399 self.elaborator.patch().add_statement(
400 Location { block, statement_index: 0 },
401 StatementKind::StorageDead(fut.local),
402 );
403 }
404
405 self.elaborator.patch().patch_terminator(
407 pin_obj_bb,
408 TerminatorKind::Call {
409 func: pin_obj_new_unchecked_fn,
410 args: [dummy_spanned(Operand::Move(obj_ref_place))].into(),
411 destination: pin_obj_place,
412 target: Some(call_drop_bb),
413 unwind: unwind.into_action(),
414 call_source: CallSource::Misc,
415 fn_span: span,
416 },
417 );
418 pin_obj_bb
419 }
420
421 fn build_drop(&mut self, bb: BasicBlock) {
422 let drop_ty = self.place_ty(self.place);
423 if self.tcx().features().async_drop()
424 && self.elaborator.body().coroutine.is_some()
425 && self.elaborator.allow_async_drops()
426 && !self.elaborator.patch_ref().block(self.elaborator.body(), bb).is_cleanup
427 && drop_ty.needs_async_drop(self.tcx(), self.elaborator.typing_env())
428 {
429 self.build_async_drop(
430 self.place,
431 drop_ty,
432 Some(bb),
433 self.succ,
434 self.unwind,
435 self.dropline,
436 false,
437 );
438 } else {
439 self.elaborator.patch().patch_terminator(
440 bb,
441 TerminatorKind::Drop {
442 place: self.place,
443 target: self.succ,
444 unwind: self.unwind.into_action(),
445 replace: false,
446 drop: None,
447 async_fut: None,
448 },
449 );
450 }
451 }
452
453 #[instrument(level = "debug")]
472 fn elaborate_drop(&mut self, bb: BasicBlock) {
473 match self.elaborator.drop_style(self.path, DropFlagMode::Deep) {
474 DropStyle::Dead => {
475 self.elaborator
476 .patch()
477 .patch_terminator(bb, TerminatorKind::Goto { target: self.succ });
478 }
479 DropStyle::Static => {
480 self.build_drop(bb);
481 }
482 DropStyle::Conditional => {
483 let drop_bb = self.complete_drop(self.succ, self.unwind);
484 self.elaborator
485 .patch()
486 .patch_terminator(bb, TerminatorKind::Goto { target: drop_bb });
487 }
488 DropStyle::Open => {
489 let drop_bb = self.open_drop();
490 self.elaborator
491 .patch()
492 .patch_terminator(bb, TerminatorKind::Goto { target: drop_bb });
493 }
494 }
495 }
496
497 fn move_paths_for_fields(
500 &self,
501 base_place: Place<'tcx>,
502 variant_path: D::Path,
503 variant: &'tcx ty::VariantDef,
504 args: GenericArgsRef<'tcx>,
505 ) -> Vec<(Place<'tcx>, Option<D::Path>)> {
506 variant
507 .fields
508 .iter_enumerated()
509 .map(|(field_idx, field)| {
510 let subpath = self.elaborator.field_subpath(variant_path, field_idx);
511 let tcx = self.tcx();
512
513 assert_eq!(self.elaborator.typing_env().typing_mode, ty::TypingMode::PostAnalysis);
514 let field_ty = match tcx.try_normalize_erasing_regions(
515 self.elaborator.typing_env(),
516 field.ty(tcx, args),
517 ) {
518 Ok(t) => t,
519 Err(_) => Ty::new_error(
520 self.tcx(),
521 self.tcx().dcx().span_delayed_bug(
522 self.elaborator.body().span,
523 "Error normalizing in drop elaboration.",
524 ),
525 ),
526 };
527
528 (tcx.mk_place_field(base_place, field_idx, field_ty), subpath)
529 })
530 .collect()
531 }
532
533 fn drop_subpath(
534 &mut self,
535 place: Place<'tcx>,
536 path: Option<D::Path>,
537 succ: BasicBlock,
538 unwind: Unwind,
539 dropline: Option<BasicBlock>,
540 ) -> BasicBlock {
541 if let Some(path) = path {
542 debug!("drop_subpath: for std field {:?}", place);
543
544 DropCtxt {
545 elaborator: self.elaborator,
546 source_info: self.source_info,
547 path,
548 place,
549 succ,
550 unwind,
551 dropline,
552 }
553 .elaborated_drop_block()
554 } else {
555 debug!("drop_subpath: for rest field {:?}", place);
556
557 DropCtxt {
558 elaborator: self.elaborator,
559 source_info: self.source_info,
560 place,
561 succ,
562 unwind,
563 dropline,
564 path: self.path,
567 }
568 .complete_drop(succ, unwind)
569 }
570 }
571
572 fn drop_halfladder(
583 &mut self,
584 unwind_ladder: &[Unwind],
585 dropline_ladder: &[Option<BasicBlock>],
586 mut succ: BasicBlock,
587 fields: &[(Place<'tcx>, Option<D::Path>)],
588 ) -> Vec<BasicBlock> {
589 iter::once(succ)
590 .chain(itertools::izip!(fields.iter().rev(), unwind_ladder, dropline_ladder).map(
591 |(&(place, path), &unwind_succ, &dropline_to)| {
592 succ = self.drop_subpath(place, path, succ, unwind_succ, dropline_to);
593 succ
594 },
595 ))
596 .collect()
597 }
598
599 fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind, Option<BasicBlock>) {
600 (
604 self.drop_flag_reset_block(DropFlagMode::Shallow, self.succ, self.unwind),
605 self.unwind,
606 self.dropline,
607 )
608 }
609
610 fn drop_ladder(
648 &mut self,
649 fields: Vec<(Place<'tcx>, Option<D::Path>)>,
650 succ: BasicBlock,
651 unwind: Unwind,
652 dropline: Option<BasicBlock>,
653 ) -> (BasicBlock, Unwind, Option<BasicBlock>) {
654 debug!("drop_ladder({:?}, {:?})", self, fields);
655 assert!(
656 if unwind.is_cleanup() { dropline.is_none() } else { true },
657 "Dropline is set for cleanup drop ladder"
658 );
659
660 let mut fields = fields;
661 fields.retain(|&(place, _)| {
662 self.place_ty(place).needs_drop(self.tcx(), self.elaborator.typing_env())
663 });
664
665 debug!("drop_ladder - fields needing drop: {:?}", fields);
666
667 let dropline_ladder: Vec<Option<BasicBlock>> = vec![None; fields.len() + 1];
668 let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
669 let unwind_ladder: Vec<_> = if let Unwind::To(succ) = unwind {
670 let halfladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields);
671 halfladder.into_iter().map(Unwind::To).collect()
672 } else {
673 unwind_ladder
674 };
675 let dropline_ladder: Vec<_> = if let Some(succ) = dropline {
676 let halfladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields);
677 halfladder.into_iter().map(Some).collect()
678 } else {
679 dropline_ladder
680 };
681
682 let normal_ladder = self.drop_halfladder(&unwind_ladder, &dropline_ladder, succ, &fields);
683
684 (
685 *normal_ladder.last().unwrap(),
686 *unwind_ladder.last().unwrap(),
687 *dropline_ladder.last().unwrap(),
688 )
689 }
690
691 fn open_drop_for_tuple(&mut self, tys: &[Ty<'tcx>]) -> BasicBlock {
692 debug!("open_drop_for_tuple({:?}, {:?})", self, tys);
693
694 let fields = tys
695 .iter()
696 .enumerate()
697 .map(|(i, &ty)| {
698 (
699 self.tcx().mk_place_field(self.place, FieldIdx::new(i), ty),
700 self.elaborator.field_subpath(self.path, FieldIdx::new(i)),
701 )
702 })
703 .collect();
704
705 let (succ, unwind, dropline) = self.drop_ladder_bottom();
706 self.drop_ladder(fields, succ, unwind, dropline).0
707 }
708
709 #[instrument(level = "debug", ret)]
711 fn open_drop_for_box_contents(
712 &mut self,
713 adt: ty::AdtDef<'tcx>,
714 args: GenericArgsRef<'tcx>,
715 succ: BasicBlock,
716 unwind: Unwind,
717 dropline: Option<BasicBlock>,
718 ) -> BasicBlock {
719 let unique_ty = adt.non_enum_variant().fields[FieldIdx::ZERO].ty(self.tcx(), args);
722 let unique_variant = unique_ty.ty_adt_def().unwrap().non_enum_variant();
723 let nonnull_ty = unique_variant.fields[FieldIdx::ZERO].ty(self.tcx(), args);
724 let ptr_ty = Ty::new_imm_ptr(self.tcx(), args[0].expect_ty());
725
726 let unique_place = self.tcx().mk_place_field(self.place, FieldIdx::ZERO, unique_ty);
727 let nonnull_place = self.tcx().mk_place_field(unique_place, FieldIdx::ZERO, nonnull_ty);
728
729 let ptr_local = self.new_temp(ptr_ty);
730
731 let interior = self.tcx().mk_place_deref(Place::from(ptr_local));
732 let interior_path = self.elaborator.deref_subpath(self.path);
733
734 let do_drop_bb = self.drop_subpath(interior, interior_path, succ, unwind, dropline);
735
736 let setup_bbd = BasicBlockData::new_stmts(
737 vec![self.assign(
738 Place::from(ptr_local),
739 Rvalue::Cast(CastKind::Transmute, Operand::Copy(nonnull_place), ptr_ty),
740 )],
741 Some(Terminator {
742 kind: TerminatorKind::Goto { target: do_drop_bb },
743 source_info: self.source_info,
744 }),
745 unwind.is_cleanup(),
746 );
747 self.elaborator.patch().new_block(setup_bbd)
748 }
749
750 #[instrument(level = "debug", ret)]
751 fn open_drop_for_adt(
752 &mut self,
753 adt: ty::AdtDef<'tcx>,
754 args: GenericArgsRef<'tcx>,
755 ) -> BasicBlock {
756 if adt.variants().is_empty() {
757 return self.elaborator.patch().new_block(BasicBlockData::new(
758 Some(Terminator {
759 source_info: self.source_info,
760 kind: TerminatorKind::Unreachable,
761 }),
762 self.unwind.is_cleanup(),
763 ));
764 }
765
766 let skip_contents = adt.is_union() || adt.is_manually_drop();
767 let contents_drop = if skip_contents {
768 if adt.has_dtor(self.tcx()) && self.elaborator.get_drop_flag(self.path).is_some() {
769 span_bug!(self.source_info.span, "open dropping partially moved union");
776 }
777
778 (self.succ, self.unwind, self.dropline)
779 } else {
780 self.open_drop_for_adt_contents(adt, args)
781 };
782
783 if adt.has_dtor(self.tcx()) {
784 let destructor_block = if adt.is_box() {
785 let succ = self.destructor_call_block_sync((contents_drop.0, contents_drop.1));
787 let unwind = contents_drop
788 .1
789 .map(|unwind| self.destructor_call_block_sync((unwind, Unwind::InCleanup)));
790 let dropline = contents_drop
791 .2
792 .map(|dropline| self.destructor_call_block_sync((dropline, contents_drop.1)));
793 self.open_drop_for_box_contents(adt, args, succ, unwind, dropline)
794 } else {
795 self.destructor_call_block(contents_drop)
796 };
797
798 self.drop_flag_test_block(destructor_block, contents_drop.0, contents_drop.1)
799 } else {
800 contents_drop.0
801 }
802 }
803
804 fn open_drop_for_adt_contents(
805 &mut self,
806 adt: ty::AdtDef<'tcx>,
807 args: GenericArgsRef<'tcx>,
808 ) -> (BasicBlock, Unwind, Option<BasicBlock>) {
809 let (succ, unwind, dropline) = self.drop_ladder_bottom();
810 if !adt.is_enum() {
811 let fields =
812 self.move_paths_for_fields(self.place, self.path, adt.variant(FIRST_VARIANT), args);
813 self.drop_ladder(fields, succ, unwind, dropline)
814 } else {
815 self.open_drop_for_multivariant(adt, args, succ, unwind, dropline)
816 }
817 }
818
819 fn open_drop_for_multivariant(
820 &mut self,
821 adt: ty::AdtDef<'tcx>,
822 args: GenericArgsRef<'tcx>,
823 succ: BasicBlock,
824 unwind: Unwind,
825 dropline: Option<BasicBlock>,
826 ) -> (BasicBlock, Unwind, Option<BasicBlock>) {
827 let mut values = Vec::with_capacity(adt.variants().len());
828 let mut normal_blocks = Vec::with_capacity(adt.variants().len());
829 let mut unwind_blocks =
830 if unwind.is_cleanup() { None } else { Some(Vec::with_capacity(adt.variants().len())) };
831 let mut dropline_blocks =
832 if dropline.is_none() { None } else { Some(Vec::with_capacity(adt.variants().len())) };
833
834 let mut have_otherwise_with_drop_glue = false;
835 let mut have_otherwise = false;
836 let tcx = self.tcx();
837
838 for (variant_index, discr) in adt.discriminants(tcx) {
839 let variant = &adt.variant(variant_index);
840 let subpath = self.elaborator.downcast_subpath(self.path, variant_index);
841
842 if let Some(variant_path) = subpath {
843 let base_place = tcx.mk_place_elem(
844 self.place,
845 ProjectionElem::Downcast(Some(variant.name), variant_index),
846 );
847 let fields = self.move_paths_for_fields(base_place, variant_path, variant, args);
848 values.push(discr.val);
849 if let Unwind::To(unwind) = unwind {
850 let unwind_blocks = unwind_blocks.as_mut().unwrap();
869 let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
870 let dropline_ladder: Vec<Option<BasicBlock>> = vec![None; fields.len() + 1];
871 let halfladder =
872 self.drop_halfladder(&unwind_ladder, &dropline_ladder, unwind, &fields);
873 unwind_blocks.push(halfladder.last().cloned().unwrap());
874 }
875 let (normal, _, drop_bb) = self.drop_ladder(fields, succ, unwind, dropline);
876 normal_blocks.push(normal);
877 if dropline.is_some() {
878 dropline_blocks.as_mut().unwrap().push(drop_bb.unwrap());
879 }
880 } else {
881 have_otherwise = true;
882
883 let typing_env = self.elaborator.typing_env();
884 let have_field_with_drop_glue = variant
885 .fields
886 .iter()
887 .any(|field| field.ty(tcx, args).needs_drop(tcx, typing_env));
888 if have_field_with_drop_glue {
889 have_otherwise_with_drop_glue = true;
890 }
891 }
892 }
893
894 if !have_otherwise {
895 values.pop();
896 } else if !have_otherwise_with_drop_glue {
897 normal_blocks.push(self.goto_block(succ, unwind));
898 if let Unwind::To(unwind) = unwind {
899 unwind_blocks.as_mut().unwrap().push(self.goto_block(unwind, Unwind::InCleanup));
900 }
901 } else {
902 normal_blocks.push(self.drop_block(succ, unwind));
903 if let Unwind::To(unwind) = unwind {
904 unwind_blocks.as_mut().unwrap().push(self.drop_block(unwind, Unwind::InCleanup));
905 }
906 }
907
908 (
909 self.adt_switch_block(adt, normal_blocks, &values, succ, unwind),
910 unwind.map(|unwind| {
911 self.adt_switch_block(
912 adt,
913 unwind_blocks.unwrap(),
914 &values,
915 unwind,
916 Unwind::InCleanup,
917 )
918 }),
919 dropline.map(|dropline| {
920 self.adt_switch_block(adt, dropline_blocks.unwrap(), &values, dropline, unwind)
921 }),
922 )
923 }
924
925 fn adt_switch_block(
926 &mut self,
927 adt: ty::AdtDef<'tcx>,
928 blocks: Vec<BasicBlock>,
929 values: &[u128],
930 succ: BasicBlock,
931 unwind: Unwind,
932 ) -> BasicBlock {
933 let discr_ty = adt.repr().discr_type().to_ty(self.tcx());
941 let discr = Place::from(self.new_temp(discr_ty));
942 let discr_rv = Rvalue::Discriminant(self.place);
943 let switch_block = BasicBlockData::new_stmts(
944 vec![self.assign(discr, discr_rv)],
945 Some(Terminator {
946 source_info: self.source_info,
947 kind: TerminatorKind::SwitchInt {
948 discr: Operand::Move(discr),
949 targets: SwitchTargets::new(
950 values.iter().copied().zip(blocks.iter().copied()),
951 *blocks.last().unwrap(),
952 ),
953 },
954 }),
955 unwind.is_cleanup(),
956 );
957 let switch_block = self.elaborator.patch().new_block(switch_block);
958 self.drop_flag_test_block(switch_block, succ, unwind)
959 }
960
961 fn destructor_call_block_sync(&mut self, (succ, unwind): (BasicBlock, Unwind)) -> BasicBlock {
962 debug!("destructor_call_block_sync({:?}, {:?})", self, succ);
963 let tcx = self.tcx();
964 let drop_trait = tcx.require_lang_item(LangItem::Drop, DUMMY_SP);
965 let drop_fn = tcx.associated_item_def_ids(drop_trait)[0];
966 let ty = self.place_ty(self.place);
967
968 let ref_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, ty);
969 let ref_place = self.new_temp(ref_ty);
970 let unit_temp = Place::from(self.new_temp(tcx.types.unit));
971
972 let result = BasicBlockData::new_stmts(
973 vec![self.assign(
974 Place::from(ref_place),
975 Rvalue::Ref(
976 tcx.lifetimes.re_erased,
977 BorrowKind::Mut { kind: MutBorrowKind::Default },
978 self.place,
979 ),
980 )],
981 Some(Terminator {
982 kind: TerminatorKind::Call {
983 func: Operand::function_handle(
984 tcx,
985 drop_fn,
986 [ty.into()],
987 self.source_info.span,
988 ),
989 args: [Spanned { node: Operand::Move(Place::from(ref_place)), span: DUMMY_SP }]
990 .into(),
991 destination: unit_temp,
992 target: Some(succ),
993 unwind: unwind.into_action(),
994 call_source: CallSource::Misc,
995 fn_span: self.source_info.span,
996 },
997 source_info: self.source_info,
998 }),
999 unwind.is_cleanup(),
1000 );
1001
1002 self.elaborator.patch().new_block(result)
1003 }
1004
1005 fn destructor_call_block(
1006 &mut self,
1007 (succ, unwind, dropline): (BasicBlock, Unwind, Option<BasicBlock>),
1008 ) -> BasicBlock {
1009 debug!("destructor_call_block({:?}, {:?})", self, succ);
1010 let ty = self.place_ty(self.place);
1011 if self.tcx().features().async_drop()
1012 && self.elaborator.body().coroutine.is_some()
1013 && self.elaborator.allow_async_drops()
1014 && !unwind.is_cleanup()
1015 && ty.is_async_drop(self.tcx(), self.elaborator.typing_env())
1016 {
1017 self.build_async_drop(self.place, ty, None, succ, unwind, dropline, true)
1018 } else {
1019 self.destructor_call_block_sync((succ, unwind))
1020 }
1021 }
1022
1023 fn drop_loop(
1035 &mut self,
1036 succ: BasicBlock,
1037 cur: Local,
1038 len: Local,
1039 ety: Ty<'tcx>,
1040 unwind: Unwind,
1041 dropline: Option<BasicBlock>,
1042 ) -> BasicBlock {
1043 let copy = |place: Place<'tcx>| Operand::Copy(place);
1044 let move_ = |place: Place<'tcx>| Operand::Move(place);
1045 let tcx = self.tcx();
1046
1047 let ptr_ty = Ty::new_mut_ptr(tcx, ety);
1048 let ptr = Place::from(self.new_temp(ptr_ty));
1049 let can_go = Place::from(self.new_temp(tcx.types.bool));
1050 let one = self.constant_usize(1);
1051
1052 let drop_block = BasicBlockData::new_stmts(
1053 vec![
1054 self.assign(
1055 ptr,
1056 Rvalue::RawPtr(RawPtrKind::Mut, tcx.mk_place_index(self.place, cur)),
1057 ),
1058 self.assign(
1059 cur.into(),
1060 Rvalue::BinaryOp(BinOp::Add, Box::new((move_(cur.into()), one))),
1061 ),
1062 ],
1063 Some(Terminator {
1064 source_info: self.source_info,
1065 kind: TerminatorKind::Unreachable,
1067 }),
1068 unwind.is_cleanup(),
1069 );
1070 let drop_block = self.elaborator.patch().new_block(drop_block);
1071
1072 let loop_block = BasicBlockData::new_stmts(
1073 vec![self.assign(
1074 can_go,
1075 Rvalue::BinaryOp(BinOp::Eq, Box::new((copy(Place::from(cur)), copy(len.into())))),
1076 )],
1077 Some(Terminator {
1078 source_info: self.source_info,
1079 kind: TerminatorKind::if_(move_(can_go), succ, drop_block),
1080 }),
1081 unwind.is_cleanup(),
1082 );
1083 let loop_block = self.elaborator.patch().new_block(loop_block);
1084
1085 let place = tcx.mk_place_deref(ptr);
1086 if self.tcx().features().async_drop()
1087 && self.elaborator.body().coroutine.is_some()
1088 && self.elaborator.allow_async_drops()
1089 && !unwind.is_cleanup()
1090 && ety.needs_async_drop(self.tcx(), self.elaborator.typing_env())
1091 {
1092 self.build_async_drop(
1093 place,
1094 ety,
1095 Some(drop_block),
1096 loop_block,
1097 unwind,
1098 dropline,
1099 false,
1100 );
1101 } else {
1102 self.elaborator.patch().patch_terminator(
1103 drop_block,
1104 TerminatorKind::Drop {
1105 place,
1106 target: loop_block,
1107 unwind: unwind.into_action(),
1108 replace: false,
1109 drop: None,
1110 async_fut: None,
1111 },
1112 );
1113 }
1114 loop_block
1115 }
1116
1117 fn open_drop_for_array(
1118 &mut self,
1119 array_ty: Ty<'tcx>,
1120 ety: Ty<'tcx>,
1121 opt_size: Option<u64>,
1122 ) -> BasicBlock {
1123 debug!("open_drop_for_array({:?}, {:?}, {:?})", array_ty, ety, opt_size);
1124 let tcx = self.tcx();
1125
1126 if let Some(size) = opt_size {
1127 enum ProjectionKind<Path> {
1128 Drop(std::ops::Range<u64>),
1129 Keep(u64, Path),
1130 }
1131 let mut drop_ranges = vec![];
1136 let mut dropping = true;
1137 let mut start = 0;
1138 for i in 0..size {
1139 let path = self.elaborator.array_subpath(self.path, i, size);
1140 if dropping && path.is_some() {
1141 drop_ranges.push(ProjectionKind::Drop(start..i));
1142 dropping = false;
1143 } else if !dropping && path.is_none() {
1144 dropping = true;
1145 start = i;
1146 }
1147 if let Some(path) = path {
1148 drop_ranges.push(ProjectionKind::Keep(i, path));
1149 }
1150 }
1151 if !drop_ranges.is_empty() {
1152 if dropping {
1153 drop_ranges.push(ProjectionKind::Drop(start..size));
1154 }
1155 let fields = drop_ranges
1156 .iter()
1157 .rev()
1158 .map(|p| {
1159 let (project, path) = match p {
1160 ProjectionKind::Drop(r) => (
1161 ProjectionElem::Subslice {
1162 from: r.start,
1163 to: r.end,
1164 from_end: false,
1165 },
1166 None,
1167 ),
1168 &ProjectionKind::Keep(offset, path) => (
1169 ProjectionElem::ConstantIndex {
1170 offset,
1171 min_length: size,
1172 from_end: false,
1173 },
1174 Some(path),
1175 ),
1176 };
1177 (tcx.mk_place_elem(self.place, project), path)
1178 })
1179 .collect::<Vec<_>>();
1180 let (succ, unwind, dropline) = self.drop_ladder_bottom();
1181 return self.drop_ladder(fields, succ, unwind, dropline).0;
1182 }
1183 }
1184
1185 let array_ptr_ty = Ty::new_mut_ptr(tcx, array_ty);
1186 let array_ptr = self.new_temp(array_ptr_ty);
1187
1188 let slice_ty = Ty::new_slice(tcx, ety);
1189 let slice_ptr_ty = Ty::new_mut_ptr(tcx, slice_ty);
1190 let slice_ptr = self.new_temp(slice_ptr_ty);
1191
1192 let mut delegate_block = BasicBlockData::new_stmts(
1193 vec![
1194 self.assign(Place::from(array_ptr), Rvalue::RawPtr(RawPtrKind::Mut, self.place)),
1195 self.assign(
1196 Place::from(slice_ptr),
1197 Rvalue::Cast(
1198 CastKind::PointerCoercion(
1199 PointerCoercion::Unsize,
1200 CoercionSource::Implicit,
1201 ),
1202 Operand::Move(Place::from(array_ptr)),
1203 slice_ptr_ty,
1204 ),
1205 ),
1206 ],
1207 None,
1208 self.unwind.is_cleanup(),
1209 );
1210
1211 let array_place = mem::replace(
1212 &mut self.place,
1213 Place::from(slice_ptr).project_deeper(&[PlaceElem::Deref], tcx),
1214 );
1215 let slice_block = self.drop_loop_trio_for_slice(ety);
1216 self.place = array_place;
1217
1218 delegate_block.terminator = Some(Terminator {
1219 source_info: self.source_info,
1220 kind: TerminatorKind::Goto { target: slice_block },
1221 });
1222 self.elaborator.patch().new_block(delegate_block)
1223 }
1224
1225 fn drop_loop_trio_for_slice(&mut self, ety: Ty<'tcx>) -> BasicBlock {
1228 debug!("drop_loop_trio_for_slice({:?})", ety);
1229 let tcx = self.tcx();
1230 let len = self.new_temp(tcx.types.usize);
1231 let cur = self.new_temp(tcx.types.usize);
1232
1233 let unwind = self
1234 .unwind
1235 .map(|unwind| self.drop_loop(unwind, cur, len, ety, Unwind::InCleanup, None));
1236
1237 let dropline =
1238 self.dropline.map(|dropline| self.drop_loop(dropline, cur, len, ety, unwind, None));
1239
1240 let loop_block = self.drop_loop(self.succ, cur, len, ety, unwind, dropline);
1241
1242 let [PlaceElem::Deref] = self.place.projection.as_slice() else {
1243 span_bug!(
1244 self.source_info.span,
1245 "Expected place for slice drop shim to be *_n, but it's {:?}",
1246 self.place,
1247 );
1248 };
1249
1250 let zero = self.constant_usize(0);
1251 let block = BasicBlockData::new_stmts(
1252 vec![
1253 self.assign(
1254 len.into(),
1255 Rvalue::UnaryOp(
1256 UnOp::PtrMetadata,
1257 Operand::Copy(Place::from(self.place.local)),
1258 ),
1259 ),
1260 self.assign(cur.into(), Rvalue::Use(zero)),
1261 ],
1262 Some(Terminator {
1263 source_info: self.source_info,
1264 kind: TerminatorKind::Goto { target: loop_block },
1265 }),
1266 unwind.is_cleanup(),
1267 );
1268
1269 let drop_block = self.elaborator.patch().new_block(block);
1270 let reset_block = self.drop_flag_reset_block(DropFlagMode::Deep, drop_block, unwind);
1272 self.drop_flag_test_block(reset_block, self.succ, unwind)
1273 }
1274
1275 fn open_drop(&mut self) -> BasicBlock {
1284 let ty = self.place_ty(self.place);
1285 match ty.kind() {
1286 ty::Closure(_, args) => self.open_drop_for_tuple(args.as_closure().upvar_tys()),
1287 ty::CoroutineClosure(_, args) => {
1288 self.open_drop_for_tuple(args.as_coroutine_closure().upvar_tys())
1289 }
1290 ty::Coroutine(_, args) => self.open_drop_for_tuple(args.as_coroutine().upvar_tys()),
1297 ty::Tuple(fields) => self.open_drop_for_tuple(fields),
1298 ty::Adt(def, args) => self.open_drop_for_adt(*def, args),
1299 ty::Dynamic(..) => self.complete_drop(self.succ, self.unwind),
1300 ty::Array(ety, size) => {
1301 let size = size.try_to_target_usize(self.tcx());
1302 self.open_drop_for_array(ty, *ety, size)
1303 }
1304 ty::Slice(ety) => self.drop_loop_trio_for_slice(*ety),
1305
1306 ty::UnsafeBinder(_) => {
1307 self.tcx().dcx().span_delayed_bug(
1310 self.source_info.span,
1311 "open drop for unsafe binder shouldn't be encountered",
1312 );
1313 self.elaborator.patch().new_block(BasicBlockData::new(
1314 Some(Terminator {
1315 source_info: self.source_info,
1316 kind: TerminatorKind::Unreachable,
1317 }),
1318 self.unwind.is_cleanup(),
1319 ))
1320 }
1321
1322 _ => span_bug!(self.source_info.span, "open drop from non-ADT `{:?}`", ty),
1323 }
1324 }
1325
1326 fn complete_drop(&mut self, succ: BasicBlock, unwind: Unwind) -> BasicBlock {
1327 debug!("complete_drop(succ={:?}, unwind={:?})", succ, unwind);
1328
1329 let drop_block = self.drop_block(succ, unwind);
1330
1331 self.drop_flag_test_block(drop_block, succ, unwind)
1332 }
1333
1334 fn drop_flag_reset_block(
1337 &mut self,
1338 mode: DropFlagMode,
1339 succ: BasicBlock,
1340 unwind: Unwind,
1341 ) -> BasicBlock {
1342 debug!("drop_flag_reset_block({:?},{:?})", self, mode);
1343
1344 if unwind.is_cleanup() {
1345 return succ;
1348 }
1349 let block = self.new_block(unwind, TerminatorKind::Goto { target: succ });
1350 let block_start = Location { block, statement_index: 0 };
1351 self.elaborator.clear_drop_flag(block_start, self.path, mode);
1352 block
1353 }
1354
1355 fn elaborated_drop_block(&mut self) -> BasicBlock {
1356 debug!("elaborated_drop_block({:?})", self);
1357 let blk = self.drop_block_simple(self.succ, self.unwind);
1358 self.elaborate_drop(blk);
1359 blk
1360 }
1361
1362 fn drop_block_simple(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
1363 let block = TerminatorKind::Drop {
1364 place: self.place,
1365 target,
1366 unwind: unwind.into_action(),
1367 replace: false,
1368 drop: self.dropline,
1369 async_fut: None,
1370 };
1371 self.new_block(unwind, block)
1372 }
1373
1374 fn drop_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
1375 let drop_ty = self.place_ty(self.place);
1376 if self.tcx().features().async_drop()
1377 && self.elaborator.body().coroutine.is_some()
1378 && self.elaborator.allow_async_drops()
1379 && !unwind.is_cleanup()
1380 && drop_ty.needs_async_drop(self.tcx(), self.elaborator.typing_env())
1381 {
1382 self.build_async_drop(
1383 self.place,
1384 drop_ty,
1385 None,
1386 self.succ,
1387 unwind,
1388 self.dropline,
1389 false,
1390 )
1391 } else {
1392 let block = TerminatorKind::Drop {
1393 place: self.place,
1394 target,
1395 unwind: unwind.into_action(),
1396 replace: false,
1397 drop: None,
1398 async_fut: None,
1399 };
1400 self.new_block(unwind, block)
1401 }
1402 }
1403
1404 fn goto_block(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
1405 let block = TerminatorKind::Goto { target };
1406 self.new_block(unwind, block)
1407 }
1408
1409 fn drop_flag_test_block(
1415 &mut self,
1416 on_set: BasicBlock,
1417 on_unset: BasicBlock,
1418 unwind: Unwind,
1419 ) -> BasicBlock {
1420 let style = self.elaborator.drop_style(self.path, DropFlagMode::Shallow);
1421 debug!(
1422 "drop_flag_test_block({:?},{:?},{:?},{:?}) - {:?}",
1423 self, on_set, on_unset, unwind, style
1424 );
1425
1426 match style {
1427 DropStyle::Dead => on_unset,
1428 DropStyle::Static => on_set,
1429 DropStyle::Conditional | DropStyle::Open => {
1430 let flag = self.elaborator.get_drop_flag(self.path).unwrap();
1431 let term = TerminatorKind::if_(flag, on_set, on_unset);
1432 self.new_block(unwind, term)
1433 }
1434 }
1435 }
1436
1437 fn new_block(&mut self, unwind: Unwind, k: TerminatorKind<'tcx>) -> BasicBlock {
1438 self.elaborator.patch().new_block(BasicBlockData::new(
1439 Some(Terminator { source_info: self.source_info, kind: k }),
1440 unwind.is_cleanup(),
1441 ))
1442 }
1443
1444 fn new_block_with_statements(
1445 &mut self,
1446 unwind: Unwind,
1447 statements: Vec<Statement<'tcx>>,
1448 k: TerminatorKind<'tcx>,
1449 ) -> BasicBlock {
1450 self.elaborator.patch().new_block(BasicBlockData::new_stmts(
1451 statements,
1452 Some(Terminator { source_info: self.source_info, kind: k }),
1453 unwind.is_cleanup(),
1454 ))
1455 }
1456
1457 fn new_temp(&mut self, ty: Ty<'tcx>) -> Local {
1458 self.elaborator.patch().new_temp(ty, self.source_info.span)
1459 }
1460
1461 fn constant_usize(&self, val: u16) -> Operand<'tcx> {
1462 Operand::Constant(Box::new(ConstOperand {
1463 span: self.source_info.span,
1464 user_ty: None,
1465 const_: Const::from_usize(self.tcx(), val.into()),
1466 }))
1467 }
1468
1469 fn assign(&self, lhs: Place<'tcx>, rhs: Rvalue<'tcx>) -> Statement<'tcx> {
1470 Statement::new(self.source_info, StatementKind::Assign(Box::new((lhs, rhs))))
1471 }
1472}