1use rustc_ast::Mutability;
2use rustc_hir::{Expr, ExprKind, UnOp};
3use rustc_middle::ty::layout::{LayoutOf as _, TyAndLayout};
4use rustc_middle::ty::{self};
5use rustc_session::{declare_lint, declare_lint_pass};
6use rustc_span::sym;
7
8use crate::lints::InvalidReferenceCastingDiag;
9use crate::utils::peel_casts;
10use crate::{LateContext, LateLintPass, LintContext};
11
12declare_lint! {
13 INVALID_REFERENCE_CASTING,
36 Deny,
37 "casts of `&T` to `&mut T` without interior mutability"
38}
39
40declare_lint_pass!(InvalidReferenceCasting => [INVALID_REFERENCE_CASTING]);
41
42impl<'tcx> LateLintPass<'tcx> for InvalidReferenceCasting {
43 fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) {
44 if let Some((e, pat)) = borrow_or_assign(cx, expr) {
45 let init = cx.expr_or_init(e);
46 let orig_cast = if init.span != e.span { Some(init.span) } else { None };
47
48 let mut peel_casts = {
50 let mut peel_casts_cache = None;
51 move || *peel_casts_cache.get_or_insert_with(|| peel_casts(cx, init))
52 };
53
54 if matches!(pat, PatternKind::Borrow { mutbl: Mutability::Mut } | PatternKind::Assign)
55 && let Some(ty_has_interior_mutability) =
56 is_cast_from_ref_to_mut_ptr(cx, init, &mut peel_casts)
57 {
58 cx.emit_span_lint(
59 INVALID_REFERENCE_CASTING,
60 expr.span,
61 if pat == PatternKind::Assign {
62 InvalidReferenceCastingDiag::AssignToRef {
63 orig_cast,
64 ty_has_interior_mutability,
65 }
66 } else {
67 InvalidReferenceCastingDiag::BorrowAsMut {
68 orig_cast,
69 ty_has_interior_mutability,
70 }
71 },
72 );
73 }
74
75 if let Some((from_ty_layout, to_ty_layout, e_alloc)) =
76 is_cast_to_bigger_memory_layout(cx, init, &mut peel_casts)
77 {
78 cx.emit_span_lint(
79 INVALID_REFERENCE_CASTING,
80 expr.span,
81 InvalidReferenceCastingDiag::BiggerLayout {
82 orig_cast,
83 alloc: e_alloc.span,
84 from_ty: from_ty_layout.ty,
85 from_size: from_ty_layout.layout.size().bytes(),
86 to_ty: to_ty_layout.ty,
87 to_size: to_ty_layout.layout.size().bytes(),
88 },
89 );
90 }
91 }
92 }
93}
94
95#[derive(Debug, Clone, Copy, PartialEq, Eq)]
96enum PatternKind {
97 Borrow { mutbl: Mutability },
98 Assign,
99}
100
101fn borrow_or_assign<'tcx>(
102 cx: &LateContext<'tcx>,
103 e: &'tcx Expr<'tcx>,
104) -> Option<(&'tcx Expr<'tcx>, PatternKind)> {
105 fn deref_assign_or_addr_of<'tcx>(
106 expr: &'tcx Expr<'tcx>,
107 ) -> Option<(&'tcx Expr<'tcx>, PatternKind)> {
108 let (inner, pat) = if let ExprKind::AddrOf(_, mutbl, expr) = expr.kind {
110 (expr, PatternKind::Borrow { mutbl })
111 } else if let ExprKind::Assign(expr, _, _) = expr.kind {
113 (expr, PatternKind::Assign)
114 } else if let ExprKind::AssignOp(_, expr, _) = expr.kind {
116 (expr, PatternKind::Assign)
117 } else {
118 return None;
119 };
120
121 let ExprKind::Unary(UnOp::Deref, e) = &inner.kind else {
123 return None;
124 };
125 Some((e, pat))
126 }
127
128 fn ptr_write<'tcx>(
129 cx: &LateContext<'tcx>,
130 e: &'tcx Expr<'tcx>,
131 ) -> Option<(&'tcx Expr<'tcx>, PatternKind)> {
132 if let ExprKind::Call(path, [arg_ptr, _arg_val]) = e.kind
133 && let ExprKind::Path(ref qpath) = path.kind
134 && let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
135 && matches!(
136 cx.tcx.get_diagnostic_name(def_id),
137 Some(sym::ptr_write | sym::ptr_write_volatile | sym::ptr_write_unaligned)
138 )
139 {
140 Some((arg_ptr, PatternKind::Assign))
141 } else {
142 None
143 }
144 }
145
146 deref_assign_or_addr_of(e).or_else(|| ptr_write(cx, e))
147}
148
149fn is_cast_from_ref_to_mut_ptr<'tcx>(
150 cx: &LateContext<'tcx>,
151 orig_expr: &'tcx Expr<'tcx>,
152 mut peel_casts: impl FnMut() -> (&'tcx Expr<'tcx>, bool),
153) -> Option<bool> {
154 let end_ty = cx.typeck_results().node_type(orig_expr.hir_id);
155
156 if !matches!(end_ty.kind(), ty::RawPtr(_, Mutability::Mut)) {
158 return None;
159 }
160
161 let (e, need_check_freeze) = peel_casts();
162
163 let start_ty = cx.typeck_results().node_type(e.hir_id);
164 if let ty::Ref(_, inner_ty, Mutability::Not) = start_ty.kind() {
165 let inner_ty_has_interior_mutability =
172 !inner_ty.is_freeze(cx.tcx, cx.typing_env()) && inner_ty.has_concrete_skeleton();
173 (!need_check_freeze || !inner_ty_has_interior_mutability)
174 .then_some(inner_ty_has_interior_mutability)
175 } else {
176 None
177 }
178}
179
180fn is_cast_to_bigger_memory_layout<'tcx>(
181 cx: &LateContext<'tcx>,
182 orig_expr: &'tcx Expr<'tcx>,
183 mut peel_casts: impl FnMut() -> (&'tcx Expr<'tcx>, bool),
184) -> Option<(TyAndLayout<'tcx>, TyAndLayout<'tcx>, Expr<'tcx>)> {
185 let end_ty = cx.typeck_results().node_type(orig_expr.hir_id);
186
187 let ty::RawPtr(inner_end_ty, _) = end_ty.kind() else {
188 return None;
189 };
190
191 let (e, _) = peel_casts();
192 let start_ty = cx.typeck_results().node_type(e.hir_id);
193
194 let ty::Ref(_, inner_start_ty, _) = start_ty.kind() else {
195 return None;
196 };
197
198 let e_alloc = cx.expr_or_init(e);
200 let e_alloc =
201 if let ExprKind::AddrOf(_, _, inner_expr) = e_alloc.kind { inner_expr } else { e_alloc };
202
203 if let ExprKind::Index(..) | ExprKind::Field(..) | ExprKind::Unary(UnOp::Deref, ..) =
207 e_alloc.kind
208 {
209 return None;
210 }
211
212 let alloc_ty = cx.typeck_results().node_type(e_alloc.hir_id);
213
214 if alloc_ty.is_any_ptr() {
217 return None;
218 }
219
220 let from_layout = cx.layout_of(*inner_start_ty).ok()?;
221
222 if from_layout.is_unsized() {
225 return None;
226 }
227
228 let alloc_layout = cx.layout_of(alloc_ty).ok()?;
229 let to_layout = cx.layout_of(*inner_end_ty).ok()?;
230
231 if to_layout.layout.size() > from_layout.layout.size()
232 && to_layout.layout.size() > alloc_layout.layout.size()
233 {
234 Some((from_layout, to_layout, *e_alloc))
235 } else {
236 None
237 }
238}