rapx/analysis/opt/memory_cloning/
used_as_immutable.rs1use annotate_snippets::Level;
2use annotate_snippets::Renderer;
3use annotate_snippets::Snippet;
4use once_cell::sync::OnceCell;
5use rustc_ast::Mutability;
6
7use crate::analysis::core::dataflow::graph::DFSStatus;
8use crate::analysis::core::dataflow::graph::Direction;
9use crate::analysis::core::dataflow::graph::EdgeIdx;
10use crate::analysis::opt::OptCheck;
11use rustc_middle::mir::Local;
12use rustc_middle::ty::{TyCtxt, TyKind};
13use rustc_span::Span;
14use std::cell::Cell;
15use std::collections::HashSet;
16static DEFPATHS: OnceCell<DefPaths> = OnceCell::new();
17
18use crate::analysis::core::dataflow::graph::Graph;
19use crate::analysis::core::dataflow::graph::NodeOp;
20use crate::analysis::utils::def_path::DefPath;
21use crate::utils::log::{
22 relative_pos_range, span_to_filename, span_to_line_number, span_to_source_code,
23};
24
25use super::super::LEVEL;
26
27struct DefPaths {
28 clone: DefPath,
29 to_owned: DefPath,
31 deref: DefPath,
32}
33
34impl DefPaths {
35 pub fn new(tcx: &TyCtxt<'_>) -> Self {
36 Self {
37 clone: DefPath::new("std::clone::Clone::clone", tcx),
38 to_owned: DefPath::new("std::borrow::ToOwned::to_owned", tcx),
40 deref: DefPath::new("std::ops::Deref::deref", tcx),
41 }
42 }
43}
44
45fn find_downside_use_as_param(graph: &Graph, clone_node_idx: Local) -> Option<(Local, EdgeIdx)> {
47 let mut record = None;
48 let edge_idx = Cell::new(0 as usize);
49 let deref_id = DEFPATHS.get().unwrap().deref.last_def_id();
50 let mut node_operator = |graph: &Graph, idx: Local| {
51 if idx == clone_node_idx {
52 return DFSStatus::Continue; }
54 let node = &graph.nodes[idx];
55 for op in node.ops.iter() {
56 if let NodeOp::Call(def_id) = op {
57 if *def_id == deref_id {
58 return DFSStatus::Continue;
60 }
61 record = Some((idx, edge_idx.get())); return DFSStatus::Stop;
63 }
64 }
65 DFSStatus::Continue
66 };
67 let mut edge_operator = |graph: &Graph, idx: EdgeIdx| {
68 edge_idx.set(idx);
69 Graph::equivalent_edge_validator(graph, idx) };
71 let mut seen = HashSet::new();
72 graph.dfs(
73 clone_node_idx,
74 Direction::Downside,
75 &mut node_operator,
76 &mut edge_operator,
77 true,
78 &mut seen,
79 );
80 record
81}
82
83pub struct UsedAsImmutableCheck {
84 record: Vec<(Span, Span)>,
85}
86
87impl OptCheck for UsedAsImmutableCheck {
88 fn new() -> Self {
89 Self { record: Vec::new() }
90 }
91
92 fn check(&mut self, graph: &Graph, tcx: &TyCtxt) {
93 let _ = &DEFPATHS.get_or_init(|| DefPaths::new(tcx));
94 let def_paths = &DEFPATHS.get().unwrap();
95 let level = LEVEL.lock().unwrap();
96 for (idx, node) in graph.nodes.iter_enumerated() {
97 if node.ops.len() > 1 {
98 continue;
100 }
101 if let NodeOp::Call(def_id) = node.ops[0] {
102 if def_id == def_paths.clone.last_def_id()
103 || def_id == def_paths.to_owned.last_def_id()
105 {
106 if let Some((node_idx, edge_idx)) = find_downside_use_as_param(graph, idx) {
107 let use_node = &graph.nodes[node_idx];
108
109 let seq = graph.edges[edge_idx].seq;
110 let filtered_in_edges: Vec<&usize> = use_node
111 .in_edges
112 .iter()
113 .filter(|idx| graph.edges[**idx].seq == seq)
114 .collect();
115 let index = filtered_in_edges.binary_search(&&edge_idx).unwrap();
116 if let NodeOp::Call(callee_def_id) = use_node.ops[seq] {
117 let fn_sig = tcx.try_normalize_erasing_regions(
118 rustc_middle::ty::TypingEnv::post_analysis(*tcx, def_id),
119 tcx.fn_sig(callee_def_id).skip_binder(),
120 );
121 if fn_sig.is_ok() {
122 let fn_sig = fn_sig.unwrap().skip_binder();
123 let ty = fn_sig.inputs().iter().nth(index).unwrap();
124 if let TyKind::Ref(_, _, mutability) = ty.kind() {
125 if *mutability == Mutability::Mut {
127 break;
128 }
129 }
130 let callee_func_name = format!("{:?}", callee_def_id);
131 if *level != 2
132 && (callee_func_name.contains("into")
133 || callee_func_name.contains("new"))
134 {
135 break;
137 }
138 let clone_span = node.span;
139 let use_span = use_node.span;
140 self.record.push((clone_span, use_span));
141 }
142 }
143 }
144 }
145 }
146 }
147 }
148
149 fn report(&self, graph: &Graph) {
150 for (clone_span, use_span) in self.record.iter() {
151 report_used_as_immutable(graph, *clone_span, *use_span);
152 }
153 }
154
155 fn cnt(&self) -> usize {
156 self.record.len()
157 }
158}
159
160fn report_used_as_immutable(graph: &Graph, clone_span: Span, use_span: Span) {
161 let code_source = span_to_source_code(graph.span);
162 let filename = span_to_filename(clone_span);
163 let snippet = Snippet::source(&code_source)
164 .line_start(span_to_line_number(graph.span))
165 .origin(&filename)
166 .fold(true)
167 .annotation(
168 Level::Error
169 .span(relative_pos_range(graph.span, clone_span))
170 .label("Cloning happens here."),
171 )
172 .annotation(
173 Level::Error
174 .span(relative_pos_range(graph.span, use_span))
175 .label("Used here"),
176 );
177 let message = Level::Warning
178 .title("Unnecessary memory cloning detected")
179 .snippet(snippet)
180 .footer(Level::Help.title("Use borrowings instead."));
181 let renderer = Renderer::styled();
182 println!("{}", renderer.render(message));
183}