1use crate::analysis::{core::alias_analysis::default::MopAAResultMap, safedrop::SafeDropGraph};
2use crate::rap_error;
3use rustc_data_structures::fx::FxHashSet;
4use rustc_middle::{
5 mir::{
6 Operand::{self, Constant, Copy, Move},
7 Place, TerminatorKind,
8 },
9 ty::{TyCtxt, TyKind, TypingEnv},
10};
11use std::collections::{HashMap, HashSet};
12
13pub const VISIT_LIMIT: usize = 1000;
14
15impl<'tcx> SafeDropGraph<'tcx> {
16 pub fn drop_check(&mut self, bb_index: usize, tcx: TyCtxt<'tcx>) {
18 let cur_block = self.blocks[bb_index].clone();
19 for drop in cur_block.drops {
20 match drop.kind {
21 TerminatorKind::Drop {
22 ref place,
23 target: _,
24 unwind: _,
25 replace: _,
26 drop: _,
27 async_fut: _,
28 } => {
29 if !self.drop_heap_item_check(place, tcx) {
30 continue;
31 }
32 let birth = self.scc_indices[bb_index];
33 let drop_local = self.projection(tcx, false, place.clone());
34 let info = drop.source_info.clone();
35 self.dead_node(drop_local, birth, &info, false);
36 }
37 TerminatorKind::Call {
38 func: _, ref args, ..
39 } => {
40 if args.len() > 0 {
41 let birth = self.scc_indices[bb_index];
42 let place = match args[0].node {
43 Operand::Copy(place) => place,
44 Operand::Move(place) => place,
45 _ => {
46 rap_error!("Constant operand exists: {:?}", args[0]);
47 return;
48 }
49 };
50 let drop_local = self.projection(tcx, false, place.clone());
51 let info = drop.source_info.clone();
52 self.dead_node(drop_local, birth, &info, false);
53 }
54 }
55 _ => {}
56 }
57 }
58 }
59
60 pub fn drop_heap_item_check(&self, place: &Place<'tcx>, tcx: TyCtxt<'tcx>) -> bool {
61 let place_ty = place.ty(&tcx.optimized_mir(self.def_id).local_decls, tcx);
62 match place_ty.ty.kind() {
63 TyKind::Adt(adtdef, ..) => match self.adt_owner.get(&adtdef.did()) {
64 None => true,
65 Some(owenr_unit) => {
66 let idx = match place_ty.variant_index {
67 Some(vdx) => vdx.index(),
68 None => 0,
69 };
70 if owenr_unit[idx].0.is_onheap() || owenr_unit[idx].1.contains(&true) {
71 true
72 } else {
73 false
74 }
75 }
76 },
77 _ => true,
78 }
79 }
80
81 pub fn split_check(&mut self, bb_index: usize, tcx: TyCtxt<'tcx>, fn_map: &MopAAResultMap) {
82 let backup_values = self.values.clone(); let backup_constant = self.constant.clone();
85 let backup_alias_set = self.alias_set.clone();
86 let backup_dead = self.dead_record.clone();
87 self.check(bb_index, tcx, fn_map);
88 self.values = backup_values;
90 self.constant = backup_constant;
91 self.alias_set = backup_alias_set;
92 self.dead_record = backup_dead;
93 }
94
95 pub fn split_check_with_cond(
96 &mut self,
97 bb_index: usize,
98 path_discr_id: usize,
99 path_discr_val: usize,
100 tcx: TyCtxt<'tcx>,
101 fn_map: &MopAAResultMap,
102 ) {
103 let backup_values = self.values.clone(); let backup_constant = self.constant.clone();
106 let backup_alias_set = self.alias_set.clone();
107 let backup_dead = self.dead_record.clone();
108 self.constant.insert(path_discr_id, path_discr_val);
110 self.check(bb_index, tcx, fn_map);
111 self.values = backup_values;
113 self.constant = backup_constant;
114 self.alias_set = backup_alias_set;
115 self.dead_record = backup_dead;
116 }
117
118 pub fn check(&mut self, bb_index: usize, tcx: TyCtxt<'tcx>, fn_map: &MopAAResultMap) {
120 self.visit_times += 1;
121 if self.visit_times > VISIT_LIMIT {
122 return;
123 }
124 let cur_block = self.blocks[self.scc_indices[bb_index]].clone();
125 self.alias_bb(self.scc_indices[bb_index], tcx);
126 self.alias_bbcall(self.scc_indices[bb_index], tcx, fn_map);
127 self.drop_check(self.scc_indices[bb_index], tcx);
128
129 if self.child_scc.get(&self.scc_indices[bb_index]).is_some() {
130 let init_index = self.scc_indices[bb_index];
131 let (init_block, cur_targets, scc_block_set) =
132 self.child_scc.get(&init_index).unwrap().clone();
133
134 for enum_index in cur_targets.all_targets() {
135 let backup_values = self.values.clone();
136 let backup_constant = self.constant.clone();
137
138 let mut block_node = if bb_index == init_index {
139 init_block.clone()
140 } else {
141 self.blocks[bb_index].clone()
142 };
143
144 if !block_node.switch_stmts.is_empty() {
145 let TerminatorKind::SwitchInt { targets, .. } =
146 block_node.switch_stmts[0].kind.clone()
147 else {
148 unreachable!();
149 };
150 if cur_targets == targets {
151 block_node.next = FxHashSet::default();
152 block_node.next.insert(enum_index.index());
153 }
154 }
155
156 let mut work_list = Vec::new();
157 let mut work_set = FxHashSet::<usize>::default();
158 work_list.push(bb_index);
159 work_set.insert(bb_index);
160 while !work_list.is_empty() {
161 let current_node = work_list.pop().unwrap();
162 block_node.scc_sub_blocks.push(current_node);
163 let real_node = if current_node != init_index {
164 self.blocks[current_node].clone()
165 } else {
166 init_block.clone()
167 };
168
169 if real_node.switch_stmts.is_empty() {
170 for next in &real_node.next {
171 block_node.next.insert(*next);
172 }
173 } else {
174 let TerminatorKind::SwitchInt { ref targets, .. } =
175 real_node.switch_stmts[0].kind
176 else {
177 unreachable!();
178 };
179
180 if cur_targets == *targets {
181 block_node.next.insert(enum_index.index());
182 } else {
183 for next in &real_node.next {
184 block_node.next.insert(*next);
185 }
186 }
187 }
188
189 if real_node.switch_stmts.is_empty() {
190 for next in &real_node.next {
191 if scc_block_set.contains(next) && !work_set.contains(next) {
192 work_set.insert(*next);
193 work_list.push(*next);
194 }
195 }
196 } else {
197 let TerminatorKind::SwitchInt { ref targets, .. } =
198 real_node.switch_stmts[0].kind
199 else {
200 unreachable!();
201 };
202
203 if cur_targets == *targets {
204 let next_index = enum_index.index();
205 if scc_block_set.contains(&next_index)
206 && !work_set.contains(&next_index)
207 {
208 work_set.insert(next_index);
209 work_list.push(next_index);
210 }
211 } else {
212 for next in &real_node.next {
213 if scc_block_set.contains(next) && !work_set.contains(next) {
214 work_set.insert(*next);
215 work_list.push(*next);
216 }
217 }
218 }
219 }
220 }
221
222 let mut to_remove = Vec::new();
224 for i in block_node.next.iter() {
225 if self.scc_indices[*i] == init_index {
226 to_remove.push(*i);
227 }
228 }
229 for i in to_remove {
230 block_node.next.remove(&i);
231 }
232
233 for i in block_node.scc_sub_blocks.clone() {
234 self.alias_bb(i, tcx);
235 self.alias_bbcall(i, tcx, fn_map);
236 self.drop_check(i, tcx);
237 }
238 match block_node.next.len() {
240 0 => {
241 if Self::should_check(self.def_id) {
243 self.dp_check(&cur_block);
244 }
245 return;
246 }
247 _ => {
248 for next in block_node.next {
253 self.check(next, tcx, fn_map);
254 }
255 }
256 }
257
258 self.values = backup_values;
259 self.constant = backup_constant;
260 }
261
262 return;
263 }
264
265 let mut order = vec![];
266 order.push(vec![]);
267
268 if !cur_block.scc_sub_blocks.is_empty() {
270 match std::env::var_os("SAFEDROP") {
271 Some(val) if val == "0" => {
272 order.push(cur_block.scc_sub_blocks.clone());
273 }
274 _ => {
275 self.calculate_scc_order(
276 &mut cur_block.scc_sub_blocks.clone(),
277 &mut vec![],
278 &mut order,
279 &mut HashMap::new(),
280 bb_index,
281 bb_index,
282 &mut HashSet::new(),
283 );
284 }
285 }
286 }
287
288 let backup_values = self.values.clone(); let backup_constant = self.constant.clone();
290 let backup_alias_set = self.alias_set.clone();
291 for scc_each in order {
292 self.alias_set = backup_alias_set.clone();
293 self.values = backup_values.clone();
294 self.constant = backup_constant.clone();
295
296 if !scc_each.is_empty() {
297 for idx in scc_each {
298 self.alias_bb(idx, tcx);
299 self.alias_bbcall(idx, tcx, fn_map);
300 }
301 }
302
303 let cur_block = cur_block.clone();
304 match cur_block.next.len() {
306 0 => {
307 if Self::should_check(self.def_id) {
308 self.dp_check(&cur_block);
309 }
310 return;
311 }
312 1 => {
313 for next in cur_block.next {
318 self.check(next, tcx, fn_map);
319 }
320 return;
321 }
322 _ => { }
324 }
325
326 let mut single_target = false;
328 let mut sw_val = 0;
329 let mut sw_target = 0; let mut path_discr_id = 0; let mut sw_targets = None; if !cur_block.switch_stmts.is_empty() && cur_block.scc_sub_blocks.is_empty() {
333 if let TerminatorKind::SwitchInt {
334 ref discr,
335 ref targets,
336 } = cur_block.switch_stmts[0].clone().kind
337 {
338 match discr {
339 Copy(p) | Move(p) => {
340 let place = self.projection(tcx, false, *p);
341 if let Some(father) = self.disc_map.get(&self.values[place].local) {
342 if let Some(constant) = self.constant.get(father) {
343 if *constant != usize::MAX {
344 single_target = true;
345 sw_val = *constant;
346 }
347 }
348 if self.values[place].local == place {
349 path_discr_id = *father;
350 sw_targets = Some(targets.clone());
351 }
352 }
353 }
354 Constant(c) => {
355 single_target = true;
356 let ty_env = TypingEnv::post_analysis(self.tcx, self.def_id);
357 if let Some(val) = c.const_.try_eval_target_usize(self.tcx, ty_env) {
358 sw_val = val as usize;
359 }
360 }
361 }
362 if single_target {
363 for iter in targets.iter() {
368 if iter.0 as usize == sw_val {
369 sw_target = iter.1.as_usize();
370 break;
371 }
372 }
373 if sw_target == 0 {
378 let all_target = targets.all_targets();
379 sw_target = all_target[all_target.len() - 1].as_usize();
380 }
381 }
382 }
383 }
384 if single_target {
387 self.check(sw_target, tcx, fn_map);
388 } else {
389 if let Some(targets) = sw_targets {
391 for iter in targets.iter() {
392 if self.visit_times > VISIT_LIMIT {
393 continue;
394 }
395 let next_index = iter.1.as_usize();
396 let path_discr_val = iter.0 as usize;
397 self.split_check_with_cond(
398 next_index,
399 path_discr_id,
400 path_discr_val,
401 tcx,
402 fn_map,
403 );
404 }
405 let all_targets = targets.all_targets();
406 let next_index = all_targets[all_targets.len() - 1].as_usize();
407 let path_discr_val = usize::MAX; self.split_check_with_cond(
409 next_index,
410 path_discr_id,
411 path_discr_val,
412 tcx,
413 fn_map,
414 );
415 } else {
416 for i in cur_block.next {
417 if self.visit_times > VISIT_LIMIT {
418 continue;
419 }
420 let next_index = i;
421 self.split_check(next_index, tcx, fn_map);
422 }
423 }
424 }
425 }
426 }
427
428 pub fn calculate_scc_order(
429 &mut self,
430 scc: &Vec<usize>,
431 path: &mut Vec<usize>,
432 ans: &mut Vec<Vec<usize>>,
433 disc_map: &mut HashMap<usize, usize>,
434 idx: usize,
435 root: usize,
436 visit: &mut HashSet<usize>,
437 ) {
438 if idx == root && !path.is_empty() {
439 ans.push(path.clone());
440 return;
441 }
442 visit.insert(idx);
443 let term = &self.terms[idx].clone();
444
445 match term {
446 TerminatorKind::SwitchInt { discr, targets } => match discr {
447 Copy(p) | Move(p) => {
448 let place = self.projection(self.tcx, false, *p);
449 if let Some(father) = self.disc_map.get(&self.values[place].local) {
450 let father = *father;
451 if let Some(constant) = disc_map.get(&father) {
452 let constant = *constant;
453 for t in targets.iter() {
454 if t.0 as usize == constant {
455 let target = t.1.as_usize();
456 if path.contains(&target) {
457 continue;
458 }
459 path.push(target);
460 self.calculate_scc_order(
461 scc, path, ans, disc_map, target, root, visit,
462 );
463 path.pop();
464 }
465 }
466 } else {
467 for t in targets.iter() {
468 let constant = t.0 as usize;
469 let target = t.1.as_usize();
470 if path.contains(&target) {
471 continue;
472 }
473 path.push(target);
474 disc_map.insert(father, constant);
475 self.calculate_scc_order(
476 scc, path, ans, disc_map, target, root, visit,
477 );
478 disc_map.remove(&father);
479 path.pop();
480 }
481 }
482 } else {
483 if let Some(constant) = disc_map.get(&place) {
484 let constant = *constant;
485 for t in targets.iter() {
486 if t.0 as usize == constant {
487 let target = t.1.as_usize();
488 if path.contains(&target) {
489 continue;
490 }
491 path.push(target);
492 self.calculate_scc_order(
493 scc, path, ans, disc_map, target, root, visit,
494 );
495 path.pop();
496 }
497 }
498 } else {
499 for t in targets.iter() {
500 let constant = t.0 as usize;
501 let target = t.1.as_usize();
502 if path.contains(&target) {
503 continue;
504 }
505 path.push(target);
506 disc_map.insert(place, constant);
507 self.calculate_scc_order(
508 scc, path, ans, disc_map, target, root, visit,
509 );
510 disc_map.remove(&place);
511 path.pop();
512 }
513
514 let constant = targets.iter().len();
515 let target = targets.otherwise().as_usize();
516 if !path.contains(&target) {
517 path.push(target);
518 disc_map.insert(place, constant);
519 self.calculate_scc_order(
520 scc, path, ans, disc_map, target, root, visit,
521 );
522 disc_map.remove(&place);
523 path.pop();
524 }
525 }
526 }
527 }
528 _ => {}
529 },
530 _ => {
531 for bidx in self.blocks[idx].next.clone() {
532 if !scc.contains(&bidx) && bidx != root {
533 continue;
534 }
535 if bidx != root {
536 path.push(bidx);
537 self.calculate_scc_order(scc, path, ans, disc_map, bidx, root, visit);
538 path.pop();
539 } else {
540 self.calculate_scc_order(scc, path, ans, disc_map, bidx, root, visit);
541 }
542 }
543 }
544 }
545
546 visit.remove(&idx);
547 }
548}