1use std::fmt;
2
3use rustc_abi::{FieldIdx, VariantIdx};
4use rustc_index::IndexVec;
5use rustc_index::bit_set::DenseBitSet;
6use rustc_middle::mir::*;
7use rustc_middle::ty::{self, TyCtxt};
8use rustc_mir_dataflow::impls::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
9use rustc_mir_dataflow::move_paths::{LookupResult, MoveData, MovePathIndex};
10use rustc_mir_dataflow::{
11 Analysis, DropFlagState, MoveDataTypingEnv, ResultsCursor, on_all_children_bits,
12 on_lookup_result_bits,
13};
14use rustc_span::Span;
15use tracing::{debug, instrument};
16
17use crate::deref_separator::deref_finder;
18use crate::elaborate_drop::{DropElaborator, DropFlagMode, DropStyle, Unwind, elaborate_drop};
19use crate::patch::MirPatch;
20
21pub(super) struct ElaborateDrops;
50
51impl<'tcx> crate::MirPass<'tcx> for ElaborateDrops {
52 #[instrument(level = "trace", skip(self, tcx, body))]
53 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
54 debug!("elaborate_drops({:?} @ {:?})", body.source, body.span);
55 let typing_env = ty::TypingEnv::post_analysis(tcx, body.source.def_id());
58 let move_data = MoveData::gather_moves(body, tcx, |ty| ty.needs_drop(tcx, typing_env));
61 let elaborate_patch = {
62 let env = MoveDataTypingEnv { move_data, typing_env };
63
64 let mut inits = MaybeInitializedPlaces::new(tcx, body, &env.move_data)
65 .exclude_inactive_in_otherwise()
66 .skipping_unreachable_unwind()
67 .iterate_to_fixpoint(tcx, body, Some("elaborate_drops"))
68 .into_results_cursor(body);
69 let dead_unwinds = compute_dead_unwinds(body, &mut inits);
70
71 let uninits = MaybeUninitializedPlaces::new(tcx, body, &env.move_data)
72 .include_inactive_in_otherwise()
73 .mark_inactive_variants_as_uninit()
74 .skipping_unreachable_unwind(dead_unwinds)
75 .iterate_to_fixpoint(tcx, body, Some("elaborate_drops"))
76 .into_results_cursor(body);
77
78 let drop_flags = IndexVec::from_elem(None, &env.move_data.move_paths);
79 ElaborateDropsCtxt {
80 tcx,
81 body,
82 env: &env,
83 init_data: InitializationData { inits, uninits },
84 drop_flags,
85 patch: MirPatch::new(body),
86 }
87 .elaborate()
88 };
89 elaborate_patch.apply(body);
90 deref_finder(tcx, body);
91 }
92
93 fn is_required(&self) -> bool {
94 true
95 }
96}
97
98#[instrument(level = "trace", skip(body, flow_inits), ret)]
101fn compute_dead_unwinds<'a, 'tcx>(
102 body: &'a Body<'tcx>,
103 flow_inits: &mut ResultsCursor<'a, 'tcx, MaybeInitializedPlaces<'a, 'tcx>>,
104) -> DenseBitSet<BasicBlock> {
105 let mut dead_unwinds = DenseBitSet::new_empty(body.basic_blocks.len());
108 for (bb, bb_data) in body.basic_blocks.iter_enumerated() {
109 let TerminatorKind::Drop { place, unwind: UnwindAction::Cleanup(_), .. } =
110 bb_data.terminator().kind
111 else {
112 continue;
113 };
114
115 flow_inits.seek_before_primary_effect(body.terminator_loc(bb));
116 if flow_inits.analysis().is_unwind_dead(place, flow_inits.get()) {
117 dead_unwinds.insert(bb);
118 }
119 }
120
121 dead_unwinds
122}
123
124struct InitializationData<'a, 'tcx> {
125 inits: ResultsCursor<'a, 'tcx, MaybeInitializedPlaces<'a, 'tcx>>,
126 uninits: ResultsCursor<'a, 'tcx, MaybeUninitializedPlaces<'a, 'tcx>>,
127}
128
129impl InitializationData<'_, '_> {
130 fn seek_before(&mut self, loc: Location) {
131 self.inits.seek_before_primary_effect(loc);
132 self.uninits.seek_before_primary_effect(loc);
133 }
134
135 fn maybe_init_uninit(&self, path: MovePathIndex) -> (bool, bool) {
136 (self.inits.get().contains(path), self.uninits.get().contains(path))
137 }
138}
139
140impl<'a, 'tcx> DropElaborator<'a, 'tcx> for ElaborateDropsCtxt<'a, 'tcx> {
141 type Path = MovePathIndex;
142
143 fn patch_ref(&self) -> &MirPatch<'tcx> {
144 &self.patch
145 }
146
147 fn patch(&mut self) -> &mut MirPatch<'tcx> {
148 &mut self.patch
149 }
150
151 fn body(&self) -> &'a Body<'tcx> {
152 self.body
153 }
154
155 fn tcx(&self) -> TyCtxt<'tcx> {
156 self.tcx
157 }
158
159 fn typing_env(&self) -> ty::TypingEnv<'tcx> {
160 self.env.typing_env
161 }
162
163 fn allow_async_drops(&self) -> bool {
164 true
165 }
166
167 fn terminator_loc(&self, bb: BasicBlock) -> Location {
168 self.patch.terminator_loc(self.body, bb)
169 }
170
171 #[instrument(level = "debug", skip(self), ret)]
172 fn drop_style(&self, path: Self::Path, mode: DropFlagMode) -> DropStyle {
173 let ((maybe_init, maybe_uninit), multipart) = match mode {
174 DropFlagMode::Shallow => (self.init_data.maybe_init_uninit(path), false),
175 DropFlagMode::Deep => {
176 let mut some_maybe_init = false;
177 let mut some_maybe_uninit = false;
178 let mut children_count = 0;
179 on_all_children_bits(self.move_data(), path, |child| {
180 let (maybe_init, maybe_uninit) = self.init_data.maybe_init_uninit(child);
181 debug!("elaborate_drop: state({:?}) = {:?}", child, (maybe_init, maybe_uninit));
182 some_maybe_init |= maybe_init;
183 some_maybe_uninit |= maybe_uninit;
184 children_count += 1;
185 });
186 ((some_maybe_init, some_maybe_uninit), children_count != 1)
187 }
188 };
189 match (maybe_init, maybe_uninit, multipart) {
190 (false, _, _) => DropStyle::Dead,
191 (true, false, _) => DropStyle::Static,
192 (true, true, false) => DropStyle::Conditional,
193 (true, true, true) => DropStyle::Open,
194 }
195 }
196
197 fn clear_drop_flag(&mut self, loc: Location, path: Self::Path, mode: DropFlagMode) {
198 match mode {
199 DropFlagMode::Shallow => {
200 self.set_drop_flag(loc, path, DropFlagState::Absent);
201 }
202 DropFlagMode::Deep => {
203 on_all_children_bits(self.move_data(), path, |child| {
204 self.set_drop_flag(loc, child, DropFlagState::Absent)
205 });
206 }
207 }
208 }
209
210 fn field_subpath(&self, path: Self::Path, field: FieldIdx) -> Option<Self::Path> {
211 rustc_mir_dataflow::move_path_children_matching(self.move_data(), path, |e| match e {
212 ProjectionElem::Field(idx, _) => idx == field,
213 _ => false,
214 })
215 }
216
217 fn array_subpath(&self, path: Self::Path, index: u64, size: u64) -> Option<Self::Path> {
218 rustc_mir_dataflow::move_path_children_matching(self.move_data(), path, |e| match e {
219 ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
220 debug_assert!(size == min_length, "min_length should be exact for arrays");
221 assert!(!from_end, "from_end should not be used for array element ConstantIndex");
222 offset == index
223 }
224 _ => false,
225 })
226 }
227
228 fn deref_subpath(&self, path: Self::Path) -> Option<Self::Path> {
229 rustc_mir_dataflow::move_path_children_matching(self.move_data(), path, |e| {
230 e == ProjectionElem::Deref
231 })
232 }
233
234 fn downcast_subpath(&self, path: Self::Path, variant: VariantIdx) -> Option<Self::Path> {
235 rustc_mir_dataflow::move_path_children_matching(self.move_data(), path, |e| match e {
236 ProjectionElem::Downcast(_, idx) => idx == variant,
237 _ => false,
238 })
239 }
240
241 fn get_drop_flag(&mut self, path: Self::Path) -> Option<Operand<'tcx>> {
242 self.drop_flag(path).map(Operand::Copy)
243 }
244}
245
246struct ElaborateDropsCtxt<'a, 'tcx> {
247 tcx: TyCtxt<'tcx>,
248 body: &'a Body<'tcx>,
249 env: &'a MoveDataTypingEnv<'tcx>,
250 init_data: InitializationData<'a, 'tcx>,
251 drop_flags: IndexVec<MovePathIndex, Option<Local>>,
252 patch: MirPatch<'tcx>,
253}
254
255impl fmt::Debug for ElaborateDropsCtxt<'_, '_> {
256 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
257 f.debug_struct("ElaborateDropsCtxt").finish_non_exhaustive()
258 }
259}
260
261impl<'a, 'tcx> ElaborateDropsCtxt<'a, 'tcx> {
262 fn move_data(&self) -> &'a MoveData<'tcx> {
263 &self.env.move_data
264 }
265
266 fn create_drop_flag(&mut self, index: MovePathIndex, span: Span) {
267 let patch = &mut self.patch;
268 debug!("create_drop_flag({:?})", self.body.span);
269 self.drop_flags[index].get_or_insert_with(|| patch.new_temp(self.tcx.types.bool, span));
270 }
271
272 fn drop_flag(&mut self, index: MovePathIndex) -> Option<Place<'tcx>> {
273 self.drop_flags[index].map(Place::from)
274 }
275
276 fn elaborate(mut self) -> MirPatch<'tcx> {
279 self.collect_drop_flags();
280
281 self.elaborate_drops();
282
283 self.drop_flags_on_init();
284 self.drop_flags_for_fn_rets();
285 self.drop_flags_for_args();
286 self.drop_flags_for_locs();
287
288 self.patch
289 }
290
291 fn collect_drop_flags(&mut self) {
292 for (bb, data) in self.body.basic_blocks.iter_enumerated() {
293 let terminator = data.terminator();
294 let TerminatorKind::Drop { ref place, .. } = terminator.kind else { continue };
295
296 let path = self.move_data().rev_lookup.find(place.as_ref());
297 debug!("collect_drop_flags: {:?}, place {:?} ({:?})", bb, place, path);
298
299 match path {
300 LookupResult::Exact(path) => {
301 self.init_data.seek_before(self.body.terminator_loc(bb));
302 on_all_children_bits(self.move_data(), path, |child| {
303 let (maybe_init, maybe_uninit) = self.init_data.maybe_init_uninit(child);
304 debug!(
305 "collect_drop_flags: collecting {:?} from {:?}@{:?} - {:?}",
306 child,
307 place,
308 path,
309 (maybe_init, maybe_uninit)
310 );
311 if maybe_init && maybe_uninit {
312 self.create_drop_flag(child, terminator.source_info.span)
313 }
314 });
315 }
316 LookupResult::Parent(None) => {}
317 LookupResult::Parent(Some(parent)) => {
318 if self.body.local_decls[place.local].is_deref_temp() {
319 continue;
320 }
321
322 self.init_data.seek_before(self.body.terminator_loc(bb));
323 let (_maybe_init, maybe_uninit) = self.init_data.maybe_init_uninit(parent);
324 if maybe_uninit {
325 self.tcx.dcx().span_delayed_bug(
326 terminator.source_info.span,
327 format!(
328 "drop of untracked, uninitialized value {bb:?}, place {place:?} ({path:?})"
329 ),
330 );
331 }
332 }
333 };
334 }
335 }
336
337 fn elaborate_drops(&mut self) {
338 for (bb, data) in self.body.basic_blocks.iter_enumerated() {
340 let terminator = data.terminator();
341 let TerminatorKind::Drop { place, target, unwind, replace, drop, async_fut: _ } =
342 terminator.kind
343 else {
344 continue;
345 };
346
347 if !place
351 .ty(&self.body.local_decls, self.tcx)
352 .ty
353 .needs_drop(self.tcx, self.typing_env())
354 {
355 self.patch.patch_terminator(bb, TerminatorKind::Goto { target });
356 continue;
357 }
358
359 let path = self.move_data().rev_lookup.find(place.as_ref());
360 match path {
361 LookupResult::Exact(path) => {
362 let unwind = match unwind {
363 _ if data.is_cleanup => Unwind::InCleanup,
364 UnwindAction::Cleanup(cleanup) => Unwind::To(cleanup),
365 UnwindAction::Continue => Unwind::To(self.patch.resume_block()),
366 UnwindAction::Unreachable => {
367 Unwind::To(self.patch.unreachable_cleanup_block())
368 }
369 UnwindAction::Terminate(reason) => {
370 debug_assert_ne!(
371 reason,
372 UnwindTerminateReason::InCleanup,
373 "we are not in a cleanup block, InCleanup reason should be impossible"
374 );
375 Unwind::To(self.patch.terminate_block(reason))
376 }
377 };
378 self.init_data.seek_before(self.body.terminator_loc(bb));
379 elaborate_drop(
380 self,
381 terminator.source_info,
382 place,
383 path,
384 target,
385 unwind,
386 bb,
387 drop,
388 )
389 }
390 LookupResult::Parent(None) => {}
391 LookupResult::Parent(Some(_)) => {
392 if !replace {
393 self.tcx.dcx().span_bug(
394 terminator.source_info.span,
395 format!("drop of untracked value {bb:?}"),
396 );
397 }
398 assert!(!data.is_cleanup);
402 }
403 }
404 }
405 }
406
407 fn constant_bool(&self, span: Span, val: bool) -> Rvalue<'tcx> {
408 Rvalue::Use(Operand::Constant(Box::new(ConstOperand {
409 span,
410 user_ty: None,
411 const_: Const::from_bool(self.tcx, val),
412 })))
413 }
414
415 fn set_drop_flag(&mut self, loc: Location, path: MovePathIndex, val: DropFlagState) {
416 if let Some(flag) = self.drop_flags[path] {
417 let span = self.patch.source_info_for_location(self.body, loc).span;
418 let val = self.constant_bool(span, val.value());
419 self.patch.add_assign(loc, Place::from(flag), val);
420 }
421 }
422
423 fn drop_flags_on_init(&mut self) {
424 let loc = Location::START;
425 let span = self.patch.source_info_for_location(self.body, loc).span;
426 let false_ = self.constant_bool(span, false);
427 for flag in self.drop_flags.iter().flatten() {
428 self.patch.add_assign(loc, Place::from(*flag), false_.clone());
429 }
430 }
431
432 fn drop_flags_for_fn_rets(&mut self) {
433 for (bb, data) in self.body.basic_blocks.iter_enumerated() {
434 if let TerminatorKind::Call {
435 destination,
436 target: Some(tgt),
437 unwind: UnwindAction::Cleanup(_),
438 ..
439 } = data.terminator().kind
440 {
441 assert!(!self.patch.is_term_patched(bb));
442
443 let loc = Location { block: tgt, statement_index: 0 };
444 let path = self.move_data().rev_lookup.find(destination.as_ref());
445 on_lookup_result_bits(self.move_data(), path, |child| {
446 self.set_drop_flag(loc, child, DropFlagState::Present)
447 });
448 }
449 }
450 }
451
452 fn drop_flags_for_args(&mut self) {
453 let loc = Location::START;
454 rustc_mir_dataflow::drop_flag_effects_for_function_entry(
455 self.body,
456 &self.env.move_data,
457 |path, ds| {
458 self.set_drop_flag(loc, path, ds);
459 },
460 )
461 }
462
463 fn drop_flags_for_locs(&mut self) {
464 for (bb, data) in self.body.basic_blocks.iter_enumerated() {
471 debug!("drop_flags_for_locs({:?})", data);
472 for i in 0..(data.statements.len() + 1) {
473 debug!("drop_flag_for_locs: stmt {}", i);
474 if i == data.statements.len() {
475 match data.terminator().kind {
476 TerminatorKind::Drop { .. } => {
477 continue;
479 }
480 TerminatorKind::UnwindResume => {
481 }
485 _ => {
486 assert!(!self.patch.is_term_patched(bb));
487 }
488 }
489 }
490 let loc = Location { block: bb, statement_index: i };
491 rustc_mir_dataflow::drop_flag_effects_for_location(
492 self.body,
493 &self.env.move_data,
494 loc,
495 |path, ds| self.set_drop_flag(loc, path, ds),
496 )
497 }
498
499 if let TerminatorKind::Call {
503 destination,
504 target: Some(_),
505 unwind:
506 UnwindAction::Continue | UnwindAction::Unreachable | UnwindAction::Terminate(_),
507 ..
508 } = data.terminator().kind
509 {
510 assert!(!self.patch.is_term_patched(bb));
511
512 let loc = Location { block: bb, statement_index: data.statements.len() };
513 let path = self.move_data().rev_lookup.find(destination.as_ref());
514 on_lookup_result_bits(self.move_data(), path, |child| {
515 self.set_drop_flag(loc, child, DropFlagState::Present)
516 });
517 }
518 }
519 }
520}