rustc_mir_transform/
gvn.rs

1//! Global value numbering.
2//!
3//! MIR may contain repeated and/or redundant computations. The objective of this pass is to detect
4//! such redundancies and re-use the already-computed result when possible.
5//!
6//! From those assignments, we construct a mapping `VnIndex -> Vec<(Local, Location)>` of available
7//! values, the locals in which they are stored, and the assignment location.
8//!
9//! We traverse all assignments `x = rvalue` and operands.
10//!
11//! For each SSA one, we compute a symbolic representation of values that are assigned to SSA
12//! locals. This symbolic representation is defined by the `Value` enum. Each produced instance of
13//! `Value` is interned as a `VnIndex`, which allows us to cheaply compute identical values.
14//!
15//! For each non-SSA
16//! one, we compute the `VnIndex` of the rvalue. If this `VnIndex` is associated to a constant, we
17//! replace the rvalue/operand by that constant. Otherwise, if there is an SSA local `y`
18//! associated to this `VnIndex`, and if its definition location strictly dominates the assignment
19//! to `x`, we replace the assignment by `x = y`.
20//!
21//! By opportunity, this pass simplifies some `Rvalue`s based on the accumulated knowledge.
22//!
23//! # Operational semantic
24//!
25//! Operationally, this pass attempts to prove bitwise equality between locals. Given this MIR:
26//! ```ignore (MIR)
27//! _a = some value // has VnIndex i
28//! // some MIR
29//! _b = some other value // also has VnIndex i
30//! ```
31//!
32//! We consider it to be replaceable by:
33//! ```ignore (MIR)
34//! _a = some value // has VnIndex i
35//! // some MIR
36//! _c = some other value // also has VnIndex i
37//! assume(_a bitwise equal to _c) // follows from having the same VnIndex
38//! _b = _a // follows from the `assume`
39//! ```
40//!
41//! Which is simplifiable to:
42//! ```ignore (MIR)
43//! _a = some value // has VnIndex i
44//! // some MIR
45//! _b = _a
46//! ```
47//!
48//! # Handling of references
49//!
50//! We handle references by assigning a different "provenance" index to each Ref/RawPtr rvalue.
51//! This ensure that we do not spuriously merge borrows that should not be merged. Meanwhile, we
52//! consider all the derefs of an immutable reference to a freeze type to give the same value:
53//! ```ignore (MIR)
54//! _a = *_b // _b is &Freeze
55//! _c = *_b // replaced by _c = _a
56//! ```
57//!
58//! # Determinism of constant propagation
59//!
60//! When registering a new `Value`, we attempt to opportunistically evaluate it as a constant.
61//! The evaluated form is inserted in `evaluated` as an `OpTy` or `None` if evaluation failed.
62//!
63//! The difficulty is non-deterministic evaluation of MIR constants. Some `Const` can have
64//! different runtime values each time they are evaluated. This is the case with
65//! `Const::Slice` which have a new pointer each time they are evaluated, and constants that
66//! contain a fn pointer (`AllocId` pointing to a `GlobalAlloc::Function`) pointing to a different
67//! symbol in each codegen unit.
68//!
69//! Meanwhile, we want to be able to read indirect constants. For instance:
70//! ```
71//! static A: &'static &'static u8 = &&63;
72//! fn foo() -> u8 {
73//!     **A // We want to replace by 63.
74//! }
75//! fn bar() -> u8 {
76//!     b"abc"[1] // We want to replace by 'b'.
77//! }
78//! ```
79//!
80//! The `Value::Constant` variant stores a possibly unevaluated constant. Evaluating that constant
81//! may be non-deterministic. When that happens, we assign a disambiguator to ensure that we do not
82//! merge the constants. See `duplicate_slice` test in `gvn.rs`.
83//!
84//! Second, when writing constants in MIR, we do not write `Const::Slice` or `Const`
85//! that contain `AllocId`s.
86
87use std::borrow::Cow;
88
89use either::Either;
90use itertools::Itertools as _;
91use rustc_abi::{self as abi, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, VariantIdx};
92use rustc_const_eval::const_eval::DummyMachine;
93use rustc_const_eval::interpret::{
94    ImmTy, Immediate, InterpCx, MemPlaceMeta, MemoryKind, OpTy, Projectable, Scalar,
95    intern_const_alloc_for_constprop,
96};
97use rustc_data_structures::fx::{FxIndexSet, MutableValues};
98use rustc_data_structures::graph::dominators::Dominators;
99use rustc_hir::def::DefKind;
100use rustc_index::bit_set::DenseBitSet;
101use rustc_index::{IndexVec, newtype_index};
102use rustc_middle::bug;
103use rustc_middle::mir::interpret::GlobalAlloc;
104use rustc_middle::mir::visit::*;
105use rustc_middle::mir::*;
106use rustc_middle::ty::layout::HasTypingEnv;
107use rustc_middle::ty::{self, Ty, TyCtxt};
108use rustc_span::DUMMY_SP;
109use smallvec::SmallVec;
110use tracing::{debug, instrument, trace};
111
112use crate::ssa::SsaLocals;
113
114pub(super) struct GVN;
115
116impl<'tcx> crate::MirPass<'tcx> for GVN {
117    fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
118        sess.mir_opt_level() >= 2
119    }
120
121    #[instrument(level = "trace", skip(self, tcx, body))]
122    fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
123        debug!(def_id = ?body.source.def_id());
124
125        let typing_env = body.typing_env(tcx);
126        let ssa = SsaLocals::new(tcx, body, typing_env);
127        // Clone dominators because we need them while mutating the body.
128        let dominators = body.basic_blocks.dominators().clone();
129
130        let mut state = VnState::new(tcx, body, typing_env, &ssa, dominators, &body.local_decls);
131
132        for local in body.args_iter().filter(|&local| ssa.is_ssa(local)) {
133            let opaque = state.new_opaque(body.local_decls[local].ty);
134            state.assign(local, opaque);
135        }
136
137        let reverse_postorder = body.basic_blocks.reverse_postorder().to_vec();
138        for bb in reverse_postorder {
139            let data = &mut body.basic_blocks.as_mut_preserves_cfg()[bb];
140            state.visit_basic_block_data(bb, data);
141        }
142
143        // For each local that is reused (`y` above), we remove its storage statements do avoid any
144        // difficulty. Those locals are SSA, so should be easy to optimize by LLVM without storage
145        // statements.
146        StorageRemover { tcx, reused_locals: state.reused_locals }.visit_body_preserves_cfg(body);
147    }
148
149    fn is_required(&self) -> bool {
150        false
151    }
152}
153
154newtype_index! {
155    struct VnIndex {}
156}
157
158#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
159enum AddressKind {
160    Ref(BorrowKind),
161    Address(RawPtrKind),
162}
163
164#[derive(Debug, PartialEq, Eq, Hash)]
165enum Value<'tcx> {
166    // Root values.
167    /// Used to represent values we know nothing about.
168    /// The `usize` is a counter incremented by `new_opaque`.
169    Opaque(usize),
170    /// Evaluated or unevaluated constant value.
171    Constant {
172        value: Const<'tcx>,
173        /// Some constants do not have a deterministic value. To avoid merging two instances of the
174        /// same `Const`, we assign them an additional integer index.
175        // `disambiguator` is 0 iff the constant is deterministic.
176        disambiguator: usize,
177    },
178    /// An aggregate value, either tuple/closure/struct/enum.
179    /// This does not contain unions, as we cannot reason with the value.
180    Aggregate(VariantIdx, Vec<VnIndex>),
181    /// A raw pointer aggregate built from a thin pointer and metadata.
182    RawPtr {
183        /// Thin pointer component. This is field 0 in MIR.
184        pointer: VnIndex,
185        /// Metadata component. This is field 1 in MIR.
186        metadata: VnIndex,
187    },
188    /// This corresponds to a `[value; count]` expression.
189    Repeat(VnIndex, ty::Const<'tcx>),
190    /// The address of a place.
191    Address {
192        place: Place<'tcx>,
193        kind: AddressKind,
194        /// Give each borrow and pointer a different provenance, so we don't merge them.
195        provenance: usize,
196    },
197
198    // Extractions.
199    /// This is the *value* obtained by projecting another value.
200    Projection(VnIndex, ProjectionElem<VnIndex, ()>),
201    /// Discriminant of the given value.
202    Discriminant(VnIndex),
203    /// Length of an array or slice.
204    Len(VnIndex),
205
206    // Operations.
207    NullaryOp(NullOp<'tcx>, Ty<'tcx>),
208    UnaryOp(UnOp, VnIndex),
209    BinaryOp(BinOp, VnIndex, VnIndex),
210    Cast {
211        kind: CastKind,
212        value: VnIndex,
213    },
214}
215
216struct VnState<'body, 'tcx> {
217    tcx: TyCtxt<'tcx>,
218    ecx: InterpCx<'tcx, DummyMachine>,
219    local_decls: &'body LocalDecls<'tcx>,
220    is_coroutine: bool,
221    /// Value stored in each local.
222    locals: IndexVec<Local, Option<VnIndex>>,
223    /// Locals that are assigned that value.
224    // This vector does not hold all the values of `VnIndex` that we create.
225    rev_locals: IndexVec<VnIndex, SmallVec<[Local; 1]>>,
226    values: FxIndexSet<(Value<'tcx>, Ty<'tcx>)>,
227    /// Values evaluated as constants if possible.
228    evaluated: IndexVec<VnIndex, Option<OpTy<'tcx>>>,
229    /// Counter to generate different values.
230    next_opaque: usize,
231    /// Cache the deref values.
232    derefs: Vec<VnIndex>,
233    ssa: &'body SsaLocals,
234    dominators: Dominators<BasicBlock>,
235    reused_locals: DenseBitSet<Local>,
236}
237
238impl<'body, 'tcx> VnState<'body, 'tcx> {
239    fn new(
240        tcx: TyCtxt<'tcx>,
241        body: &Body<'tcx>,
242        typing_env: ty::TypingEnv<'tcx>,
243        ssa: &'body SsaLocals,
244        dominators: Dominators<BasicBlock>,
245        local_decls: &'body LocalDecls<'tcx>,
246    ) -> Self {
247        // Compute a rough estimate of the number of values in the body from the number of
248        // statements. This is meant to reduce the number of allocations, but it's all right if
249        // we miss the exact amount. We estimate based on 2 values per statement (one in LHS and
250        // one in RHS) and 4 values per terminator (for call operands).
251        let num_values =
252            2 * body.basic_blocks.iter().map(|bbdata| bbdata.statements.len()).sum::<usize>()
253                + 4 * body.basic_blocks.len();
254        VnState {
255            tcx,
256            ecx: InterpCx::new(tcx, DUMMY_SP, typing_env, DummyMachine),
257            local_decls,
258            is_coroutine: body.coroutine.is_some(),
259            locals: IndexVec::from_elem(None, local_decls),
260            rev_locals: IndexVec::with_capacity(num_values),
261            values: FxIndexSet::with_capacity_and_hasher(num_values, Default::default()),
262            evaluated: IndexVec::with_capacity(num_values),
263            next_opaque: 1,
264            derefs: Vec::new(),
265            ssa,
266            dominators,
267            reused_locals: DenseBitSet::new_empty(local_decls.len()),
268        }
269    }
270
271    fn typing_env(&self) -> ty::TypingEnv<'tcx> {
272        self.ecx.typing_env()
273    }
274
275    #[instrument(level = "trace", skip(self), ret)]
276    fn insert(&mut self, ty: Ty<'tcx>, value: Value<'tcx>) -> VnIndex {
277        let (index, new) = self.values.insert_full((value, ty));
278        let index = VnIndex::from_usize(index);
279        if new {
280            // Grow `evaluated` and `rev_locals` here to amortize the allocations.
281            let evaluated = self.eval_to_const(index);
282            let _index = self.evaluated.push(evaluated);
283            debug_assert_eq!(index, _index);
284            let _index = self.rev_locals.push(SmallVec::new());
285            debug_assert_eq!(index, _index);
286        }
287        index
288    }
289
290    fn next_opaque(&mut self) -> usize {
291        let next_opaque = self.next_opaque;
292        self.next_opaque += 1;
293        next_opaque
294    }
295
296    /// Create a new `Value` for which we have no information at all, except that it is distinct
297    /// from all the others.
298    #[instrument(level = "trace", skip(self), ret)]
299    fn new_opaque(&mut self, ty: Ty<'tcx>) -> VnIndex {
300        let value = Value::Opaque(self.next_opaque());
301        self.insert(ty, value)
302    }
303
304    /// Create a new `Value::Address` distinct from all the others.
305    #[instrument(level = "trace", skip(self), ret)]
306    fn new_pointer(&mut self, place: Place<'tcx>, kind: AddressKind) -> VnIndex {
307        let pty = place.ty(self.local_decls, self.tcx).ty;
308        let ty = match kind {
309            AddressKind::Ref(bk) => {
310                Ty::new_ref(self.tcx, self.tcx.lifetimes.re_erased, pty, bk.to_mutbl_lossy())
311            }
312            AddressKind::Address(mutbl) => Ty::new_ptr(self.tcx, pty, mutbl.to_mutbl_lossy()),
313        };
314        let value = Value::Address { place, kind, provenance: self.next_opaque() };
315        self.insert(ty, value)
316    }
317
318    #[inline]
319    fn get(&self, index: VnIndex) -> &Value<'tcx> {
320        &self.values.get_index(index.as_usize()).unwrap().0
321    }
322
323    #[inline]
324    fn ty(&self, index: VnIndex) -> Ty<'tcx> {
325        self.values.get_index(index.as_usize()).unwrap().1
326    }
327
328    /// Record that `local` is assigned `value`. `local` must be SSA.
329    #[instrument(level = "trace", skip(self))]
330    fn assign(&mut self, local: Local, value: VnIndex) {
331        debug_assert!(self.ssa.is_ssa(local));
332        self.locals[local] = Some(value);
333        self.rev_locals[value].push(local);
334    }
335
336    fn insert_constant(&mut self, value: Const<'tcx>) -> VnIndex {
337        let disambiguator = if value.is_deterministic() {
338            // The constant is deterministic, no need to disambiguate.
339            0
340        } else {
341            // Multiple mentions of this constant will yield different values,
342            // so assign a different `disambiguator` to ensure they do not get the same `VnIndex`.
343            let disambiguator = self.next_opaque();
344            // `disambiguator: 0` means deterministic.
345            debug_assert_ne!(disambiguator, 0);
346            disambiguator
347        };
348        self.insert(value.ty(), Value::Constant { value, disambiguator })
349    }
350
351    fn insert_bool(&mut self, flag: bool) -> VnIndex {
352        // Booleans are deterministic.
353        let value = Const::from_bool(self.tcx, flag);
354        debug_assert!(value.is_deterministic());
355        self.insert(self.tcx.types.bool, Value::Constant { value, disambiguator: 0 })
356    }
357
358    fn insert_scalar(&mut self, ty: Ty<'tcx>, scalar: Scalar) -> VnIndex {
359        // Scalars are deterministic.
360        let value = Const::from_scalar(self.tcx, scalar, ty);
361        debug_assert!(value.is_deterministic());
362        self.insert(ty, Value::Constant { value, disambiguator: 0 })
363    }
364
365    fn insert_tuple(&mut self, ty: Ty<'tcx>, values: Vec<VnIndex>) -> VnIndex {
366        self.insert(ty, Value::Aggregate(VariantIdx::ZERO, values))
367    }
368
369    fn insert_deref(&mut self, ty: Ty<'tcx>, value: VnIndex) -> VnIndex {
370        let value = self.insert(ty, Value::Projection(value, ProjectionElem::Deref));
371        self.derefs.push(value);
372        value
373    }
374
375    fn invalidate_derefs(&mut self) {
376        for deref in std::mem::take(&mut self.derefs) {
377            let opaque = self.next_opaque();
378            self.values.get_index_mut2(deref.index()).unwrap().0 = Value::Opaque(opaque);
379        }
380    }
381
382    #[instrument(level = "trace", skip(self), ret)]
383    fn eval_to_const(&mut self, value: VnIndex) -> Option<OpTy<'tcx>> {
384        use Value::*;
385        let ty = self.ty(value);
386        // Avoid computing layouts inside a coroutine, as that can cause cycles.
387        let ty = if !self.is_coroutine || ty.is_scalar() {
388            self.ecx.layout_of(ty).ok()?
389        } else {
390            return None;
391        };
392        let op = match *self.get(value) {
393            _ if ty.is_zst() => ImmTy::uninit(ty).into(),
394
395            Opaque(_) => return None,
396            // Do not bother evaluating repeat expressions. This would uselessly consume memory.
397            Repeat(..) => return None,
398
399            Constant { ref value, disambiguator: _ } => {
400                self.ecx.eval_mir_constant(value, DUMMY_SP, None).discard_err()?
401            }
402            Aggregate(variant, ref fields) => {
403                let fields = fields
404                    .iter()
405                    .map(|&f| self.evaluated[f].as_ref())
406                    .collect::<Option<Vec<_>>>()?;
407                let variant = if ty.ty.is_enum() { Some(variant) } else { None };
408                if matches!(ty.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..))
409                {
410                    let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
411                    let variant_dest = if let Some(variant) = variant {
412                        self.ecx.project_downcast(&dest, variant).discard_err()?
413                    } else {
414                        dest.clone()
415                    };
416                    for (field_index, op) in fields.into_iter().enumerate() {
417                        let field_dest = self
418                            .ecx
419                            .project_field(&variant_dest, FieldIdx::from_usize(field_index))
420                            .discard_err()?;
421                        self.ecx.copy_op(op, &field_dest).discard_err()?;
422                    }
423                    self.ecx
424                        .write_discriminant(variant.unwrap_or(FIRST_VARIANT), &dest)
425                        .discard_err()?;
426                    self.ecx
427                        .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
428                        .discard_err()?;
429                    dest.into()
430                } else {
431                    return None;
432                }
433            }
434            RawPtr { pointer, metadata } => {
435                let pointer = self.evaluated[pointer].as_ref()?;
436                let metadata = self.evaluated[metadata].as_ref()?;
437
438                // Pointers don't have fields, so don't `project_field` them.
439                let data = self.ecx.read_pointer(pointer).discard_err()?;
440                let meta = if metadata.layout.is_zst() {
441                    MemPlaceMeta::None
442                } else {
443                    MemPlaceMeta::Meta(self.ecx.read_scalar(metadata).discard_err()?)
444                };
445                let ptr_imm = Immediate::new_pointer_with_meta(data, meta, &self.ecx);
446                ImmTy::from_immediate(ptr_imm, ty).into()
447            }
448
449            Projection(base, elem) => {
450                let base = self.evaluated[base].as_ref()?;
451                // `Index` by constants should have been replaced by `ConstantIndex` by
452                // `simplify_place_projection`.
453                let elem = elem.try_map(|_| None, |()| ty.ty)?;
454                self.ecx.project(base, elem).discard_err()?
455            }
456            Address { place, kind: _, provenance: _ } => {
457                if !place.is_indirect_first_projection() {
458                    return None;
459                }
460                let local = self.locals[place.local]?;
461                let pointer = self.evaluated[local].as_ref()?;
462                let mut mplace = self.ecx.deref_pointer(pointer).discard_err()?;
463                for elem in place.projection.iter().skip(1) {
464                    // `Index` by constants should have been replaced by `ConstantIndex` by
465                    // `simplify_place_projection`.
466                    let elem = elem.try_map(|_| None, |ty| ty)?;
467                    mplace = self.ecx.project(&mplace, elem).discard_err()?;
468                }
469                let pointer = mplace.to_ref(&self.ecx);
470                ImmTy::from_immediate(pointer, ty).into()
471            }
472
473            Discriminant(base) => {
474                let base = self.evaluated[base].as_ref()?;
475                let variant = self.ecx.read_discriminant(base).discard_err()?;
476                let discr_value =
477                    self.ecx.discriminant_for_variant(base.layout.ty, variant).discard_err()?;
478                discr_value.into()
479            }
480            Len(slice) => {
481                let slice = self.evaluated[slice].as_ref()?;
482                let len = slice.len(&self.ecx).discard_err()?;
483                ImmTy::from_uint(len, ty).into()
484            }
485            NullaryOp(null_op, arg_ty) => {
486                let arg_layout = self.ecx.layout_of(arg_ty).ok()?;
487                if let NullOp::SizeOf | NullOp::AlignOf = null_op
488                    && arg_layout.is_unsized()
489                {
490                    return None;
491                }
492                let val = match null_op {
493                    NullOp::SizeOf => arg_layout.size.bytes(),
494                    NullOp::AlignOf => arg_layout.align.abi.bytes(),
495                    NullOp::OffsetOf(fields) => self
496                        .ecx
497                        .tcx
498                        .offset_of_subfield(self.typing_env(), arg_layout, fields.iter())
499                        .bytes(),
500                    NullOp::UbChecks => return None,
501                    NullOp::ContractChecks => return None,
502                };
503                ImmTy::from_uint(val, ty).into()
504            }
505            UnaryOp(un_op, operand) => {
506                let operand = self.evaluated[operand].as_ref()?;
507                let operand = self.ecx.read_immediate(operand).discard_err()?;
508                let val = self.ecx.unary_op(un_op, &operand).discard_err()?;
509                val.into()
510            }
511            BinaryOp(bin_op, lhs, rhs) => {
512                let lhs = self.evaluated[lhs].as_ref()?;
513                let lhs = self.ecx.read_immediate(lhs).discard_err()?;
514                let rhs = self.evaluated[rhs].as_ref()?;
515                let rhs = self.ecx.read_immediate(rhs).discard_err()?;
516                let val = self.ecx.binary_op(bin_op, &lhs, &rhs).discard_err()?;
517                val.into()
518            }
519            Cast { kind, value } => match kind {
520                CastKind::IntToInt | CastKind::IntToFloat => {
521                    let value = self.evaluated[value].as_ref()?;
522                    let value = self.ecx.read_immediate(value).discard_err()?;
523                    let res = self.ecx.int_to_int_or_float(&value, ty).discard_err()?;
524                    res.into()
525                }
526                CastKind::FloatToFloat | CastKind::FloatToInt => {
527                    let value = self.evaluated[value].as_ref()?;
528                    let value = self.ecx.read_immediate(value).discard_err()?;
529                    let res = self.ecx.float_to_float_or_int(&value, ty).discard_err()?;
530                    res.into()
531                }
532                CastKind::Transmute => {
533                    let value = self.evaluated[value].as_ref()?;
534                    // `offset` for immediates generally only supports projections that match the
535                    // type of the immediate. However, as a HACK, we exploit that it can also do
536                    // limited transmutes: it only works between types with the same layout, and
537                    // cannot transmute pointers to integers.
538                    if value.as_mplace_or_imm().is_right() {
539                        let can_transmute = match (value.layout.backend_repr, ty.backend_repr) {
540                            (BackendRepr::Scalar(s1), BackendRepr::Scalar(s2)) => {
541                                s1.size(&self.ecx) == s2.size(&self.ecx)
542                                    && !matches!(s1.primitive(), Primitive::Pointer(..))
543                            }
544                            (BackendRepr::ScalarPair(a1, b1), BackendRepr::ScalarPair(a2, b2)) => {
545                                a1.size(&self.ecx) == a2.size(&self.ecx) &&
546                                b1.size(&self.ecx) == b2.size(&self.ecx) &&
547                                // The alignment of the second component determines its offset, so that also needs to match.
548                                b1.align(&self.ecx) == b2.align(&self.ecx) &&
549                                // None of the inputs may be a pointer.
550                                !matches!(a1.primitive(), Primitive::Pointer(..))
551                                    && !matches!(b1.primitive(), Primitive::Pointer(..))
552                            }
553                            _ => false,
554                        };
555                        if !can_transmute {
556                            return None;
557                        }
558                    }
559                    value.offset(Size::ZERO, ty, &self.ecx).discard_err()?
560                }
561                CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) => {
562                    let src = self.evaluated[value].as_ref()?;
563                    let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
564                    self.ecx.unsize_into(src, ty, &dest).discard_err()?;
565                    self.ecx
566                        .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
567                        .discard_err()?;
568                    dest.into()
569                }
570                CastKind::FnPtrToPtr | CastKind::PtrToPtr => {
571                    let src = self.evaluated[value].as_ref()?;
572                    let src = self.ecx.read_immediate(src).discard_err()?;
573                    let ret = self.ecx.ptr_to_ptr(&src, ty).discard_err()?;
574                    ret.into()
575                }
576                CastKind::PointerCoercion(ty::adjustment::PointerCoercion::UnsafeFnPointer, _) => {
577                    let src = self.evaluated[value].as_ref()?;
578                    let src = self.ecx.read_immediate(src).discard_err()?;
579                    ImmTy::from_immediate(*src, ty).into()
580                }
581                _ => return None,
582            },
583        };
584        Some(op)
585    }
586
587    fn project(
588        &mut self,
589        place_ty: PlaceTy<'tcx>,
590        value: VnIndex,
591        proj: PlaceElem<'tcx>,
592        from_non_ssa_index: &mut bool,
593    ) -> Option<(PlaceTy<'tcx>, VnIndex)> {
594        let projection_ty = place_ty.projection_ty(self.tcx, proj);
595        let proj = match proj {
596            ProjectionElem::Deref => {
597                if let Some(Mutability::Not) = place_ty.ty.ref_mutability()
598                    && projection_ty.ty.is_freeze(self.tcx, self.typing_env())
599                {
600                    // An immutable borrow `_x` always points to the same value for the
601                    // lifetime of the borrow, so we can merge all instances of `*_x`.
602                    return Some((projection_ty, self.insert_deref(projection_ty.ty, value)));
603                } else {
604                    return None;
605                }
606            }
607            ProjectionElem::Downcast(name, index) => ProjectionElem::Downcast(name, index),
608            ProjectionElem::Field(f, _) => {
609                if let Value::Aggregate(_, fields) = self.get(value) {
610                    return Some((projection_ty, fields[f.as_usize()]));
611                } else if let Value::Projection(outer_value, ProjectionElem::Downcast(_, read_variant)) = self.get(value)
612                    && let Value::Aggregate(written_variant, fields) = self.get(*outer_value)
613                    // This pass is not aware of control-flow, so we do not know whether the
614                    // replacement we are doing is actually reachable. We could be in any arm of
615                    // ```
616                    // match Some(x) {
617                    //     Some(y) => /* stuff */,
618                    //     None => /* other */,
619                    // }
620                    // ```
621                    //
622                    // In surface rust, the current statement would be unreachable.
623                    //
624                    // However, from the reference chapter on enums and RFC 2195,
625                    // accessing the wrong variant is not UB if the enum has repr.
626                    // So it's not impossible for a series of MIR opts to generate
627                    // a downcast to an inactive variant.
628                    && written_variant == read_variant
629                {
630                    return Some((projection_ty, fields[f.as_usize()]));
631                }
632                ProjectionElem::Field(f, ())
633            }
634            ProjectionElem::Index(idx) => {
635                if let Value::Repeat(inner, _) = self.get(value) {
636                    *from_non_ssa_index |= self.locals[idx].is_none();
637                    return Some((projection_ty, *inner));
638                }
639                let idx = self.locals[idx]?;
640                ProjectionElem::Index(idx)
641            }
642            ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
643                match self.get(value) {
644                    Value::Repeat(inner, _) => {
645                        return Some((projection_ty, *inner));
646                    }
647                    Value::Aggregate(_, operands) => {
648                        let offset = if from_end {
649                            operands.len() - offset as usize
650                        } else {
651                            offset as usize
652                        };
653                        let value = operands.get(offset).copied()?;
654                        return Some((projection_ty, value));
655                    }
656                    _ => {}
657                };
658                ProjectionElem::ConstantIndex { offset, min_length, from_end }
659            }
660            ProjectionElem::Subslice { from, to, from_end } => {
661                ProjectionElem::Subslice { from, to, from_end }
662            }
663            ProjectionElem::OpaqueCast(_) => ProjectionElem::OpaqueCast(()),
664            ProjectionElem::Subtype(_) => ProjectionElem::Subtype(()),
665            ProjectionElem::UnwrapUnsafeBinder(_) => ProjectionElem::UnwrapUnsafeBinder(()),
666        };
667
668        let value = self.insert(projection_ty.ty, Value::Projection(value, proj));
669        Some((projection_ty, value))
670    }
671
672    /// Simplify the projection chain if we know better.
673    #[instrument(level = "trace", skip(self))]
674    fn simplify_place_projection(&mut self, place: &mut Place<'tcx>, location: Location) {
675        // If the projection is indirect, we treat the local as a value, so can replace it with
676        // another local.
677        if place.is_indirect_first_projection()
678            && let Some(base) = self.locals[place.local]
679            && let Some(new_local) = self.try_as_local(base, location)
680            && place.local != new_local
681        {
682            place.local = new_local;
683            self.reused_locals.insert(new_local);
684        }
685
686        let mut projection = Cow::Borrowed(&place.projection[..]);
687
688        for i in 0..projection.len() {
689            let elem = projection[i];
690            if let ProjectionElem::Index(idx_local) = elem
691                && let Some(idx) = self.locals[idx_local]
692            {
693                if let Some(offset) = self.evaluated[idx].as_ref()
694                    && let Some(offset) = self.ecx.read_target_usize(offset).discard_err()
695                    && let Some(min_length) = offset.checked_add(1)
696                {
697                    projection.to_mut()[i] =
698                        ProjectionElem::ConstantIndex { offset, min_length, from_end: false };
699                } else if let Some(new_idx_local) = self.try_as_local(idx, location)
700                    && idx_local != new_idx_local
701                {
702                    projection.to_mut()[i] = ProjectionElem::Index(new_idx_local);
703                    self.reused_locals.insert(new_idx_local);
704                }
705            }
706        }
707
708        if projection.is_owned() {
709            place.projection = self.tcx.mk_place_elems(&projection);
710        }
711
712        trace!(?place);
713    }
714
715    /// Represent the *value* which would be read from `place`, and point `place` to a preexisting
716    /// place with the same value (if that already exists).
717    #[instrument(level = "trace", skip(self), ret)]
718    fn simplify_place_value(
719        &mut self,
720        place: &mut Place<'tcx>,
721        location: Location,
722    ) -> Option<VnIndex> {
723        self.simplify_place_projection(place, location);
724
725        // Invariant: `place` and `place_ref` point to the same value, even if they point to
726        // different memory locations.
727        let mut place_ref = place.as_ref();
728
729        // Invariant: `value` holds the value up-to the `index`th projection excluded.
730        let mut value = self.locals[place.local]?;
731        // Invariant: `value` has type `place_ty`, with optional downcast variant if needed.
732        let mut place_ty = PlaceTy::from_ty(self.local_decls[place.local].ty);
733        let mut from_non_ssa_index = false;
734        for (index, proj) in place.projection.iter().enumerate() {
735            if let Value::Projection(pointer, ProjectionElem::Deref) = *self.get(value)
736                && let Value::Address { place: mut pointee, kind, .. } = *self.get(pointer)
737                && let AddressKind::Ref(BorrowKind::Shared) = kind
738                && let Some(v) = self.simplify_place_value(&mut pointee, location)
739            {
740                value = v;
741                // `pointee` holds a `Place`, so `ProjectionElem::Index` holds a `Local`.
742                // That local is SSA, but we otherwise have no guarantee on that local's value at
743                // the current location compared to its value where `pointee` was borrowed.
744                if pointee.projection.iter().all(|elem| !matches!(elem, ProjectionElem::Index(_))) {
745                    place_ref =
746                        pointee.project_deeper(&place.projection[index..], self.tcx).as_ref();
747                }
748            }
749            if let Some(local) = self.try_as_local(value, location) {
750                // Both `local` and `Place { local: place.local, projection: projection[..index] }`
751                // hold the same value. Therefore, following place holds the value in the original
752                // `place`.
753                place_ref = PlaceRef { local, projection: &place.projection[index..] };
754            }
755
756            (place_ty, value) = self.project(place_ty, value, proj, &mut from_non_ssa_index)?;
757        }
758
759        if let Value::Projection(pointer, ProjectionElem::Deref) = *self.get(value)
760            && let Value::Address { place: mut pointee, kind, .. } = *self.get(pointer)
761            && let AddressKind::Ref(BorrowKind::Shared) = kind
762            && let Some(v) = self.simplify_place_value(&mut pointee, location)
763        {
764            value = v;
765            // `pointee` holds a `Place`, so `ProjectionElem::Index` holds a `Local`.
766            // That local is SSA, but we otherwise have no guarantee on that local's value at
767            // the current location compared to its value where `pointee` was borrowed.
768            if pointee.projection.iter().all(|elem| !matches!(elem, ProjectionElem::Index(_))) {
769                place_ref = pointee.project_deeper(&[], self.tcx).as_ref();
770            }
771        }
772        if let Some(new_local) = self.try_as_local(value, location) {
773            place_ref = PlaceRef { local: new_local, projection: &[] };
774        } else if from_non_ssa_index {
775            // If access to non-SSA locals is unavoidable, bail out.
776            return None;
777        }
778
779        if place_ref.local != place.local || place_ref.projection.len() < place.projection.len() {
780            // By the invariant on `place_ref`.
781            *place = place_ref.project_deeper(&[], self.tcx);
782            self.reused_locals.insert(place_ref.local);
783        }
784
785        Some(value)
786    }
787
788    #[instrument(level = "trace", skip(self), ret)]
789    fn simplify_operand(
790        &mut self,
791        operand: &mut Operand<'tcx>,
792        location: Location,
793    ) -> Option<VnIndex> {
794        match *operand {
795            Operand::Constant(ref constant) => Some(self.insert_constant(constant.const_)),
796            Operand::Copy(ref mut place) | Operand::Move(ref mut place) => {
797                let value = self.simplify_place_value(place, location)?;
798                if let Some(const_) = self.try_as_constant(value) {
799                    *operand = Operand::Constant(Box::new(const_));
800                }
801                Some(value)
802            }
803        }
804    }
805
806    #[instrument(level = "trace", skip(self), ret)]
807    fn simplify_rvalue(
808        &mut self,
809        lhs: &Place<'tcx>,
810        rvalue: &mut Rvalue<'tcx>,
811        location: Location,
812    ) -> Option<VnIndex> {
813        let value = match *rvalue {
814            // Forward values.
815            Rvalue::Use(ref mut operand) => return self.simplify_operand(operand, location),
816            Rvalue::CopyForDeref(place) => {
817                let mut operand = Operand::Copy(place);
818                let val = self.simplify_operand(&mut operand, location);
819                *rvalue = Rvalue::Use(operand);
820                return val;
821            }
822
823            // Roots.
824            Rvalue::Repeat(ref mut op, amount) => {
825                let op = self.simplify_operand(op, location)?;
826                Value::Repeat(op, amount)
827            }
828            Rvalue::NullaryOp(op, ty) => Value::NullaryOp(op, ty),
829            Rvalue::Aggregate(..) => return self.simplify_aggregate(lhs, rvalue, location),
830            Rvalue::Ref(_, borrow_kind, ref mut place) => {
831                self.simplify_place_projection(place, location);
832                return Some(self.new_pointer(*place, AddressKind::Ref(borrow_kind)));
833            }
834            Rvalue::RawPtr(mutbl, ref mut place) => {
835                self.simplify_place_projection(place, location);
836                return Some(self.new_pointer(*place, AddressKind::Address(mutbl)));
837            }
838            Rvalue::WrapUnsafeBinder(ref mut op, _) => {
839                let value = self.simplify_operand(op, location)?;
840                Value::Cast { kind: CastKind::Transmute, value }
841            }
842
843            // Operations.
844            Rvalue::Len(ref mut place) => return self.simplify_len(place, location),
845            Rvalue::Cast(ref mut kind, ref mut value, to) => {
846                return self.simplify_cast(kind, value, to, location);
847            }
848            Rvalue::BinaryOp(op, box (ref mut lhs, ref mut rhs)) => {
849                return self.simplify_binary(op, lhs, rhs, location);
850            }
851            Rvalue::UnaryOp(op, ref mut arg_op) => {
852                return self.simplify_unary(op, arg_op, location);
853            }
854            Rvalue::Discriminant(ref mut place) => {
855                let place = self.simplify_place_value(place, location)?;
856                if let Some(discr) = self.simplify_discriminant(place) {
857                    return Some(discr);
858                }
859                Value::Discriminant(place)
860            }
861
862            // Unsupported values.
863            Rvalue::ThreadLocalRef(..) | Rvalue::ShallowInitBox(..) => return None,
864        };
865        let ty = rvalue.ty(self.local_decls, self.tcx);
866        Some(self.insert(ty, value))
867    }
868
869    fn simplify_discriminant(&mut self, place: VnIndex) -> Option<VnIndex> {
870        let enum_ty = self.ty(place);
871        if enum_ty.is_enum()
872            && let Value::Aggregate(variant, _) = *self.get(place)
873        {
874            let discr = self.ecx.discriminant_for_variant(enum_ty, variant).discard_err()?;
875            return Some(self.insert_scalar(discr.layout.ty, discr.to_scalar()));
876        }
877
878        None
879    }
880
881    fn try_as_place_elem(
882        &mut self,
883        ty: Ty<'tcx>,
884        proj: ProjectionElem<VnIndex, ()>,
885        loc: Location,
886    ) -> Option<PlaceElem<'tcx>> {
887        proj.try_map(
888            |value| {
889                let local = self.try_as_local(value, loc)?;
890                self.reused_locals.insert(local);
891                Some(local)
892            },
893            |()| ty,
894        )
895    }
896
897    fn simplify_aggregate_to_copy(
898        &mut self,
899        ty: Ty<'tcx>,
900        variant_index: VariantIdx,
901        fields: &[VnIndex],
902    ) -> Option<VnIndex> {
903        let Some(&first_field) = fields.first() else { return None };
904        let Value::Projection(copy_from_value, _) = *self.get(first_field) else { return None };
905
906        // All fields must correspond one-to-one and come from the same aggregate value.
907        if fields.iter().enumerate().any(|(index, &v)| {
908            if let Value::Projection(pointer, ProjectionElem::Field(from_index, _)) = *self.get(v)
909                && copy_from_value == pointer
910                && from_index.index() == index
911            {
912                return false;
913            }
914            true
915        }) {
916            return None;
917        }
918
919        let mut copy_from_local_value = copy_from_value;
920        if let Value::Projection(pointer, proj) = *self.get(copy_from_value)
921            && let ProjectionElem::Downcast(_, read_variant) = proj
922        {
923            if variant_index == read_variant {
924                // When copying a variant, there is no need to downcast.
925                copy_from_local_value = pointer;
926            } else {
927                // The copied variant must be identical.
928                return None;
929            }
930        }
931
932        // Both must be variants of the same type.
933        if self.ty(copy_from_local_value) == ty { Some(copy_from_local_value) } else { None }
934    }
935
936    fn simplify_aggregate(
937        &mut self,
938        lhs: &Place<'tcx>,
939        rvalue: &mut Rvalue<'tcx>,
940        location: Location,
941    ) -> Option<VnIndex> {
942        let tcx = self.tcx;
943        let ty = rvalue.ty(self.local_decls, tcx);
944
945        let Rvalue::Aggregate(box ref kind, ref mut field_ops) = *rvalue else { bug!() };
946
947        if field_ops.is_empty() {
948            let is_zst = match *kind {
949                AggregateKind::Array(..)
950                | AggregateKind::Tuple
951                | AggregateKind::Closure(..)
952                | AggregateKind::CoroutineClosure(..) => true,
953                // Only enums can be non-ZST.
954                AggregateKind::Adt(did, ..) => tcx.def_kind(did) != DefKind::Enum,
955                // Coroutines are never ZST, as they at least contain the implicit states.
956                AggregateKind::Coroutine(..) => false,
957                AggregateKind::RawPtr(..) => bug!("MIR for RawPtr aggregate must have 2 fields"),
958            };
959
960            if is_zst {
961                return Some(self.insert_constant(Const::zero_sized(ty)));
962            }
963        }
964
965        let fields: Vec<_> = field_ops
966            .iter_mut()
967            .map(|op| {
968                self.simplify_operand(op, location)
969                    .unwrap_or_else(|| self.new_opaque(op.ty(self.local_decls, self.tcx)))
970            })
971            .collect();
972
973        let variant_index = match *kind {
974            AggregateKind::Array(..) | AggregateKind::Tuple => {
975                assert!(!field_ops.is_empty());
976                FIRST_VARIANT
977            }
978            AggregateKind::Closure(..)
979            | AggregateKind::CoroutineClosure(..)
980            | AggregateKind::Coroutine(..) => FIRST_VARIANT,
981            AggregateKind::Adt(_, variant_index, _, _, None) => variant_index,
982            // Do not track unions.
983            AggregateKind::Adt(_, _, _, _, Some(_)) => return None,
984            AggregateKind::RawPtr(..) => {
985                assert_eq!(field_ops.len(), 2);
986                let [mut pointer, metadata] = fields.try_into().unwrap();
987
988                // Any thin pointer of matching mutability is fine as the data pointer.
989                let mut was_updated = false;
990                while let Value::Cast { kind: CastKind::PtrToPtr, value: cast_value } =
991                    self.get(pointer)
992                    && let ty::RawPtr(from_pointee_ty, from_mtbl) = self.ty(*cast_value).kind()
993                    && let ty::RawPtr(_, output_mtbl) = ty.kind()
994                    && from_mtbl == output_mtbl
995                    && from_pointee_ty.is_sized(self.tcx, self.typing_env())
996                {
997                    pointer = *cast_value;
998                    was_updated = true;
999                }
1000
1001                if was_updated && let Some(op) = self.try_as_operand(pointer, location) {
1002                    field_ops[FieldIdx::ZERO] = op;
1003                }
1004
1005                return Some(self.insert(ty, Value::RawPtr { pointer, metadata }));
1006            }
1007        };
1008
1009        if ty.is_array()
1010            && fields.len() > 4
1011            && let Ok(&first) = fields.iter().all_equal_value()
1012        {
1013            let len = ty::Const::from_target_usize(self.tcx, fields.len().try_into().unwrap());
1014            if let Some(op) = self.try_as_operand(first, location) {
1015                *rvalue = Rvalue::Repeat(op, len);
1016            }
1017            return Some(self.insert(ty, Value::Repeat(first, len)));
1018        }
1019
1020        if let Some(value) = self.simplify_aggregate_to_copy(ty, variant_index, &fields) {
1021            // Allow introducing places with non-constant offsets, as those are still better than
1022            // reconstructing an aggregate. But avoid creating `*a = copy (*b)`, as they might be
1023            // aliases resulting in overlapping assignments.
1024            let allow_complex_projection =
1025                lhs.projection[..].iter().all(PlaceElem::is_stable_offset);
1026            if let Some(place) = self.try_as_place(value, location, allow_complex_projection) {
1027                self.reused_locals.insert(place.local);
1028                *rvalue = Rvalue::Use(Operand::Copy(place));
1029            }
1030            return Some(value);
1031        }
1032
1033        Some(self.insert(ty, Value::Aggregate(variant_index, fields)))
1034    }
1035
1036    #[instrument(level = "trace", skip(self), ret)]
1037    fn simplify_unary(
1038        &mut self,
1039        op: UnOp,
1040        arg_op: &mut Operand<'tcx>,
1041        location: Location,
1042    ) -> Option<VnIndex> {
1043        let mut arg_index = self.simplify_operand(arg_op, location)?;
1044        let arg_ty = self.ty(arg_index);
1045        let ret_ty = op.ty(self.tcx, arg_ty);
1046
1047        // PtrMetadata doesn't care about *const vs *mut vs & vs &mut,
1048        // so start by removing those distinctions so we can update the `Operand`
1049        if op == UnOp::PtrMetadata {
1050            let mut was_updated = false;
1051            loop {
1052                match self.get(arg_index) {
1053                    // Pointer casts that preserve metadata, such as
1054                    // `*const [i32]` <-> `*mut [i32]` <-> `*mut [f32]`.
1055                    // It's critical that this not eliminate cases like
1056                    // `*const [T]` -> `*const T` which remove metadata.
1057                    // We run on potentially-generic MIR, though, so unlike codegen
1058                    // we can't always know exactly what the metadata are.
1059                    // To allow things like `*mut (?A, ?T)` <-> `*mut (?B, ?T)`,
1060                    // it's fine to get a projection as the type.
1061                    Value::Cast { kind: CastKind::PtrToPtr, value: inner }
1062                        if self.pointers_have_same_metadata(self.ty(*inner), arg_ty) =>
1063                    {
1064                        arg_index = *inner;
1065                        was_updated = true;
1066                        continue;
1067                    }
1068
1069                    // `&mut *p`, `&raw *p`, etc don't change metadata.
1070                    Value::Address { place, kind: _, provenance: _ }
1071                        if let PlaceRef { local, projection: [PlaceElem::Deref] } =
1072                            place.as_ref()
1073                            && let Some(local_index) = self.locals[local] =>
1074                    {
1075                        arg_index = local_index;
1076                        was_updated = true;
1077                        continue;
1078                    }
1079
1080                    _ => {
1081                        if was_updated && let Some(op) = self.try_as_operand(arg_index, location) {
1082                            *arg_op = op;
1083                        }
1084                        break;
1085                    }
1086                }
1087            }
1088        }
1089
1090        let value = match (op, self.get(arg_index)) {
1091            (UnOp::Not, Value::UnaryOp(UnOp::Not, inner)) => return Some(*inner),
1092            (UnOp::Neg, Value::UnaryOp(UnOp::Neg, inner)) => return Some(*inner),
1093            (UnOp::Not, Value::BinaryOp(BinOp::Eq, lhs, rhs)) => {
1094                Value::BinaryOp(BinOp::Ne, *lhs, *rhs)
1095            }
1096            (UnOp::Not, Value::BinaryOp(BinOp::Ne, lhs, rhs)) => {
1097                Value::BinaryOp(BinOp::Eq, *lhs, *rhs)
1098            }
1099            (UnOp::PtrMetadata, Value::RawPtr { metadata, .. }) => return Some(*metadata),
1100            // We have an unsizing cast, which assigns the length to wide pointer metadata.
1101            (
1102                UnOp::PtrMetadata,
1103                Value::Cast {
1104                    kind: CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _),
1105                    value: inner,
1106                },
1107            ) if let ty::Slice(..) = arg_ty.builtin_deref(true).unwrap().kind()
1108                && let ty::Array(_, len) = self.ty(*inner).builtin_deref(true).unwrap().kind() =>
1109            {
1110                return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1111            }
1112            _ => Value::UnaryOp(op, arg_index),
1113        };
1114        Some(self.insert(ret_ty, value))
1115    }
1116
1117    #[instrument(level = "trace", skip(self), ret)]
1118    fn simplify_binary(
1119        &mut self,
1120        op: BinOp,
1121        lhs_operand: &mut Operand<'tcx>,
1122        rhs_operand: &mut Operand<'tcx>,
1123        location: Location,
1124    ) -> Option<VnIndex> {
1125        let lhs = self.simplify_operand(lhs_operand, location);
1126        let rhs = self.simplify_operand(rhs_operand, location);
1127
1128        // Only short-circuit options after we called `simplify_operand`
1129        // on both operands for side effect.
1130        let mut lhs = lhs?;
1131        let mut rhs = rhs?;
1132
1133        let lhs_ty = self.ty(lhs);
1134
1135        // If we're comparing pointers, remove `PtrToPtr` casts if the from
1136        // types of both casts and the metadata all match.
1137        if let BinOp::Eq | BinOp::Ne | BinOp::Lt | BinOp::Le | BinOp::Gt | BinOp::Ge = op
1138            && lhs_ty.is_any_ptr()
1139            && let Value::Cast { kind: CastKind::PtrToPtr, value: lhs_value } = self.get(lhs)
1140            && let Value::Cast { kind: CastKind::PtrToPtr, value: rhs_value } = self.get(rhs)
1141            && let lhs_from = self.ty(*lhs_value)
1142            && lhs_from == self.ty(*rhs_value)
1143            && self.pointers_have_same_metadata(lhs_from, lhs_ty)
1144        {
1145            lhs = *lhs_value;
1146            rhs = *rhs_value;
1147            if let Some(lhs_op) = self.try_as_operand(lhs, location)
1148                && let Some(rhs_op) = self.try_as_operand(rhs, location)
1149            {
1150                *lhs_operand = lhs_op;
1151                *rhs_operand = rhs_op;
1152            }
1153        }
1154
1155        if let Some(value) = self.simplify_binary_inner(op, lhs_ty, lhs, rhs) {
1156            return Some(value);
1157        }
1158        let ty = op.ty(self.tcx, lhs_ty, self.ty(rhs));
1159        let value = Value::BinaryOp(op, lhs, rhs);
1160        Some(self.insert(ty, value))
1161    }
1162
1163    fn simplify_binary_inner(
1164        &mut self,
1165        op: BinOp,
1166        lhs_ty: Ty<'tcx>,
1167        lhs: VnIndex,
1168        rhs: VnIndex,
1169    ) -> Option<VnIndex> {
1170        // Floats are weird enough that none of the logic below applies.
1171        let reasonable_ty =
1172            lhs_ty.is_integral() || lhs_ty.is_bool() || lhs_ty.is_char() || lhs_ty.is_any_ptr();
1173        if !reasonable_ty {
1174            return None;
1175        }
1176
1177        let layout = self.ecx.layout_of(lhs_ty).ok()?;
1178
1179        let as_bits = |value: VnIndex| {
1180            let constant = self.evaluated[value].as_ref()?;
1181            if layout.backend_repr.is_scalar() {
1182                let scalar = self.ecx.read_scalar(constant).discard_err()?;
1183                scalar.to_bits(constant.layout.size).discard_err()
1184            } else {
1185                // `constant` is a wide pointer. Do not evaluate to bits.
1186                None
1187            }
1188        };
1189
1190        // Represent the values as `Left(bits)` or `Right(VnIndex)`.
1191        use Either::{Left, Right};
1192        let a = as_bits(lhs).map_or(Right(lhs), Left);
1193        let b = as_bits(rhs).map_or(Right(rhs), Left);
1194
1195        let result = match (op, a, b) {
1196            // Neutral elements.
1197            (
1198                BinOp::Add
1199                | BinOp::AddWithOverflow
1200                | BinOp::AddUnchecked
1201                | BinOp::BitOr
1202                | BinOp::BitXor,
1203                Left(0),
1204                Right(p),
1205            )
1206            | (
1207                BinOp::Add
1208                | BinOp::AddWithOverflow
1209                | BinOp::AddUnchecked
1210                | BinOp::BitOr
1211                | BinOp::BitXor
1212                | BinOp::Sub
1213                | BinOp::SubWithOverflow
1214                | BinOp::SubUnchecked
1215                | BinOp::Offset
1216                | BinOp::Shl
1217                | BinOp::Shr,
1218                Right(p),
1219                Left(0),
1220            )
1221            | (BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked, Left(1), Right(p))
1222            | (
1223                BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::Div,
1224                Right(p),
1225                Left(1),
1226            ) => p,
1227            // Attempt to simplify `x & ALL_ONES` to `x`, with `ALL_ONES` depending on type size.
1228            (BinOp::BitAnd, Right(p), Left(ones)) | (BinOp::BitAnd, Left(ones), Right(p))
1229                if ones == layout.size.truncate(u128::MAX)
1230                    || (layout.ty.is_bool() && ones == 1) =>
1231            {
1232                p
1233            }
1234            // Absorbing elements.
1235            (
1236                BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::BitAnd,
1237                _,
1238                Left(0),
1239            )
1240            | (BinOp::Rem, _, Left(1))
1241            | (
1242                BinOp::Mul
1243                | BinOp::MulWithOverflow
1244                | BinOp::MulUnchecked
1245                | BinOp::Div
1246                | BinOp::Rem
1247                | BinOp::BitAnd
1248                | BinOp::Shl
1249                | BinOp::Shr,
1250                Left(0),
1251                _,
1252            ) => self.insert_scalar(lhs_ty, Scalar::from_uint(0u128, layout.size)),
1253            // Attempt to simplify `x | ALL_ONES` to `ALL_ONES`.
1254            (BinOp::BitOr, _, Left(ones)) | (BinOp::BitOr, Left(ones), _)
1255                if ones == layout.size.truncate(u128::MAX)
1256                    || (layout.ty.is_bool() && ones == 1) =>
1257            {
1258                self.insert_scalar(lhs_ty, Scalar::from_uint(ones, layout.size))
1259            }
1260            // Sub/Xor with itself.
1261            (BinOp::Sub | BinOp::SubWithOverflow | BinOp::SubUnchecked | BinOp::BitXor, a, b)
1262                if a == b =>
1263            {
1264                self.insert_scalar(lhs_ty, Scalar::from_uint(0u128, layout.size))
1265            }
1266            // Comparison:
1267            // - if both operands can be computed as bits, just compare the bits;
1268            // - if we proved that both operands have the same value, we can insert true/false;
1269            // - otherwise, do nothing, as we do not try to prove inequality.
1270            (BinOp::Eq, Left(a), Left(b)) => self.insert_bool(a == b),
1271            (BinOp::Eq, a, b) if a == b => self.insert_bool(true),
1272            (BinOp::Ne, Left(a), Left(b)) => self.insert_bool(a != b),
1273            (BinOp::Ne, a, b) if a == b => self.insert_bool(false),
1274            _ => return None,
1275        };
1276
1277        if op.is_overflowing() {
1278            let ty = Ty::new_tup(self.tcx, &[self.ty(result), self.tcx.types.bool]);
1279            let false_val = self.insert_bool(false);
1280            Some(self.insert_tuple(ty, vec![result, false_val]))
1281        } else {
1282            Some(result)
1283        }
1284    }
1285
1286    fn simplify_cast(
1287        &mut self,
1288        initial_kind: &mut CastKind,
1289        initial_operand: &mut Operand<'tcx>,
1290        to: Ty<'tcx>,
1291        location: Location,
1292    ) -> Option<VnIndex> {
1293        use CastKind::*;
1294        use rustc_middle::ty::adjustment::PointerCoercion::*;
1295
1296        let mut kind = *initial_kind;
1297        let mut value = self.simplify_operand(initial_operand, location)?;
1298        let mut from = self.ty(value);
1299        if from == to {
1300            return Some(value);
1301        }
1302
1303        if let CastKind::PointerCoercion(ReifyFnPointer | ClosureFnPointer(_), _) = kind {
1304            // Each reification of a generic fn may get a different pointer.
1305            // Do not try to merge them.
1306            return Some(self.new_opaque(to));
1307        }
1308
1309        let mut was_ever_updated = false;
1310        loop {
1311            let mut was_updated_this_iteration = false;
1312
1313            // Transmuting between raw pointers is just a pointer cast so long as
1314            // they have the same metadata type (like `*const i32` <=> `*mut u64`
1315            // or `*mut [i32]` <=> `*const [u64]`), including the common special
1316            // case of `*const T` <=> `*mut T`.
1317            if let Transmute = kind
1318                && from.is_raw_ptr()
1319                && to.is_raw_ptr()
1320                && self.pointers_have_same_metadata(from, to)
1321            {
1322                kind = PtrToPtr;
1323                was_updated_this_iteration = true;
1324            }
1325
1326            // If a cast just casts away the metadata again, then we can get it by
1327            // casting the original thin pointer passed to `from_raw_parts`
1328            if let PtrToPtr = kind
1329                && let Value::RawPtr { pointer, .. } = self.get(value)
1330                && let ty::RawPtr(to_pointee, _) = to.kind()
1331                && to_pointee.is_sized(self.tcx, self.typing_env())
1332            {
1333                from = self.ty(*pointer);
1334                value = *pointer;
1335                was_updated_this_iteration = true;
1336                if from == to {
1337                    return Some(*pointer);
1338                }
1339            }
1340
1341            // Aggregate-then-Transmute can just transmute the original field value,
1342            // so long as the bytes of a value from only from a single field.
1343            if let Transmute = kind
1344                && let Value::Aggregate(variant_idx, field_values) = self.get(value)
1345                && let Some((field_idx, field_ty)) =
1346                    self.value_is_all_in_one_field(from, *variant_idx)
1347            {
1348                from = field_ty;
1349                value = field_values[field_idx.as_usize()];
1350                was_updated_this_iteration = true;
1351                if field_ty == to {
1352                    return Some(value);
1353                }
1354            }
1355
1356            // Various cast-then-cast cases can be simplified.
1357            if let Value::Cast { kind: inner_kind, value: inner_value } = *self.get(value) {
1358                let inner_from = self.ty(inner_value);
1359                let new_kind = match (inner_kind, kind) {
1360                    // Even if there's a narrowing cast in here that's fine, because
1361                    // things like `*mut [i32] -> *mut i32 -> *const i32` and
1362                    // `*mut [i32] -> *const [i32] -> *const i32` can skip the middle in MIR.
1363                    (PtrToPtr, PtrToPtr) => Some(PtrToPtr),
1364                    // PtrToPtr-then-Transmute is fine so long as the pointer cast is identity:
1365                    // `*const T -> *mut T -> NonNull<T>` is fine, but we need to check for narrowing
1366                    // to skip things like `*const [i32] -> *const i32 -> NonNull<T>`.
1367                    (PtrToPtr, Transmute) if self.pointers_have_same_metadata(inner_from, from) => {
1368                        Some(Transmute)
1369                    }
1370                    // Similarly, for Transmute-then-PtrToPtr. Note that we need to check different
1371                    // variables for their metadata, and thus this can't merge with the previous arm.
1372                    (Transmute, PtrToPtr) if self.pointers_have_same_metadata(from, to) => {
1373                        Some(Transmute)
1374                    }
1375                    // If would be legal to always do this, but we don't want to hide information
1376                    // from the backend that it'd otherwise be able to use for optimizations.
1377                    (Transmute, Transmute)
1378                        if !self.type_may_have_niche_of_interest_to_backend(from) =>
1379                    {
1380                        Some(Transmute)
1381                    }
1382                    _ => None,
1383                };
1384                if let Some(new_kind) = new_kind {
1385                    kind = new_kind;
1386                    from = inner_from;
1387                    value = inner_value;
1388                    was_updated_this_iteration = true;
1389                    if inner_from == to {
1390                        return Some(inner_value);
1391                    }
1392                }
1393            }
1394
1395            if was_updated_this_iteration {
1396                was_ever_updated = true;
1397            } else {
1398                break;
1399            }
1400        }
1401
1402        if was_ever_updated && let Some(op) = self.try_as_operand(value, location) {
1403            *initial_operand = op;
1404            *initial_kind = kind;
1405        }
1406
1407        Some(self.insert(to, Value::Cast { kind, value }))
1408    }
1409
1410    fn simplify_len(&mut self, place: &mut Place<'tcx>, location: Location) -> Option<VnIndex> {
1411        // Trivial case: we are fetching a statically known length.
1412        let place_ty = place.ty(self.local_decls, self.tcx).ty;
1413        if let ty::Array(_, len) = place_ty.kind() {
1414            return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1415        }
1416
1417        let mut inner = self.simplify_place_value(place, location)?;
1418
1419        // The length information is stored in the wide pointer.
1420        // Reborrowing copies length information from one pointer to the other.
1421        while let Value::Address { place: borrowed, .. } = self.get(inner)
1422            && let [PlaceElem::Deref] = borrowed.projection[..]
1423            && let Some(borrowed) = self.locals[borrowed.local]
1424        {
1425            inner = borrowed;
1426        }
1427
1428        // We have an unsizing cast, which assigns the length to wide pointer metadata.
1429        if let Value::Cast { kind, value: from } = self.get(inner)
1430            && let CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) = kind
1431            && let Some(from) = self.ty(*from).builtin_deref(true)
1432            && let ty::Array(_, len) = from.kind()
1433            && let Some(to) = self.ty(inner).builtin_deref(true)
1434            && let ty::Slice(..) = to.kind()
1435        {
1436            return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1437        }
1438
1439        // Fallback: a symbolic `Len`.
1440        Some(self.insert(self.tcx.types.usize, Value::Len(inner)))
1441    }
1442
1443    fn pointers_have_same_metadata(&self, left_ptr_ty: Ty<'tcx>, right_ptr_ty: Ty<'tcx>) -> bool {
1444        let left_meta_ty = left_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1445        let right_meta_ty = right_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1446        if left_meta_ty == right_meta_ty {
1447            true
1448        } else if let Ok(left) =
1449            self.tcx.try_normalize_erasing_regions(self.typing_env(), left_meta_ty)
1450            && let Ok(right) =
1451                self.tcx.try_normalize_erasing_regions(self.typing_env(), right_meta_ty)
1452        {
1453            left == right
1454        } else {
1455            false
1456        }
1457    }
1458
1459    /// Returns `false` if we know for sure that this type has no interesting niche,
1460    /// and thus we can skip transmuting through it without worrying.
1461    ///
1462    /// The backend will emit `assume`s when transmuting between types with niches,
1463    /// so we want to preserve `i32 -> char -> u32` so that that data is around,
1464    /// but it's fine to skip whole-range-is-value steps like `A -> u32 -> B`.
1465    fn type_may_have_niche_of_interest_to_backend(&self, ty: Ty<'tcx>) -> bool {
1466        let Ok(layout) = self.ecx.layout_of(ty) else {
1467            // If it's too generic or something, then assume it might be interesting later.
1468            return true;
1469        };
1470
1471        if layout.uninhabited {
1472            return true;
1473        }
1474
1475        match layout.backend_repr {
1476            BackendRepr::Scalar(a) => !a.is_always_valid(&self.ecx),
1477            BackendRepr::ScalarPair(a, b) => {
1478                !a.is_always_valid(&self.ecx) || !b.is_always_valid(&self.ecx)
1479            }
1480            BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => false,
1481        }
1482    }
1483
1484    fn value_is_all_in_one_field(
1485        &self,
1486        ty: Ty<'tcx>,
1487        variant: VariantIdx,
1488    ) -> Option<(FieldIdx, Ty<'tcx>)> {
1489        if let Ok(layout) = self.ecx.layout_of(ty)
1490            && let abi::Variants::Single { index } = layout.variants
1491            && index == variant
1492            && let Some((field_idx, field_layout)) = layout.non_1zst_field(&self.ecx)
1493            && layout.size == field_layout.size
1494        {
1495            // We needed to check the variant to avoid trying to read the tag
1496            // field from an enum where no fields have variants, since that tag
1497            // field isn't in the `Aggregate` from which we're getting values.
1498            Some((field_idx, field_layout.ty))
1499        } else if let ty::Adt(adt, args) = ty.kind()
1500            && adt.is_struct()
1501            && adt.repr().transparent()
1502            && let [single_field] = adt.non_enum_variant().fields.raw.as_slice()
1503        {
1504            Some((FieldIdx::ZERO, single_field.ty(self.tcx, args)))
1505        } else {
1506            None
1507        }
1508    }
1509}
1510
1511fn op_to_prop_const<'tcx>(
1512    ecx: &mut InterpCx<'tcx, DummyMachine>,
1513    op: &OpTy<'tcx>,
1514) -> Option<ConstValue> {
1515    // Do not attempt to propagate unsized locals.
1516    if op.layout.is_unsized() {
1517        return None;
1518    }
1519
1520    // This constant is a ZST, just return an empty value.
1521    if op.layout.is_zst() {
1522        return Some(ConstValue::ZeroSized);
1523    }
1524
1525    // Do not synthetize too large constants. Codegen will just memcpy them, which we'd like to
1526    // avoid.
1527    if !matches!(op.layout.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) {
1528        return None;
1529    }
1530
1531    // If this constant has scalar ABI, return it as a `ConstValue::Scalar`.
1532    if let BackendRepr::Scalar(abi::Scalar::Initialized { .. }) = op.layout.backend_repr
1533        && let Some(scalar) = ecx.read_scalar(op).discard_err()
1534    {
1535        if !scalar.try_to_scalar_int().is_ok() {
1536            // Check that we do not leak a pointer.
1537            // Those pointers may lose part of their identity in codegen.
1538            // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1539            return None;
1540        }
1541        return Some(ConstValue::Scalar(scalar));
1542    }
1543
1544    // If this constant is already represented as an `Allocation`,
1545    // try putting it into global memory to return it.
1546    if let Either::Left(mplace) = op.as_mplace_or_imm() {
1547        let (size, _align) = ecx.size_and_align_of_val(&mplace).discard_err()??;
1548
1549        // Do not try interning a value that contains provenance.
1550        // Due to https://github.com/rust-lang/rust/issues/79738, doing so could lead to bugs.
1551        // FIXME: remove this hack once that issue is fixed.
1552        let alloc_ref = ecx.get_ptr_alloc(mplace.ptr(), size).discard_err()??;
1553        if alloc_ref.has_provenance() {
1554            return None;
1555        }
1556
1557        let pointer = mplace.ptr().into_pointer_or_addr().ok()?;
1558        let (prov, offset) = pointer.prov_and_relative_offset();
1559        let alloc_id = prov.alloc_id();
1560        intern_const_alloc_for_constprop(ecx, alloc_id).discard_err()?;
1561
1562        // `alloc_id` may point to a static. Codegen will choke on an `Indirect` with anything
1563        // by `GlobalAlloc::Memory`, so do fall through to copying if needed.
1564        // FIXME: find a way to treat this more uniformly (probably by fixing codegen)
1565        if let GlobalAlloc::Memory(alloc) = ecx.tcx.global_alloc(alloc_id)
1566            // Transmuting a constant is just an offset in the allocation. If the alignment of the
1567            // allocation is not enough, fallback to copying into a properly aligned value.
1568            && alloc.inner().align >= op.layout.align.abi
1569        {
1570            return Some(ConstValue::Indirect { alloc_id, offset });
1571        }
1572    }
1573
1574    // Everything failed: create a new allocation to hold the data.
1575    let alloc_id =
1576        ecx.intern_with_temp_alloc(op.layout, |ecx, dest| ecx.copy_op(op, dest)).discard_err()?;
1577    let value = ConstValue::Indirect { alloc_id, offset: Size::ZERO };
1578
1579    // Check that we do not leak a pointer.
1580    // Those pointers may lose part of their identity in codegen.
1581    // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1582    if ecx.tcx.global_alloc(alloc_id).unwrap_memory().inner().provenance().ptrs().is_empty() {
1583        return Some(value);
1584    }
1585
1586    None
1587}
1588
1589impl<'tcx> VnState<'_, 'tcx> {
1590    /// If either [`Self::try_as_constant`] as [`Self::try_as_place`] succeeds,
1591    /// returns that result as an [`Operand`].
1592    fn try_as_operand(&mut self, index: VnIndex, location: Location) -> Option<Operand<'tcx>> {
1593        if let Some(const_) = self.try_as_constant(index) {
1594            Some(Operand::Constant(Box::new(const_)))
1595        } else if let Some(place) = self.try_as_place(index, location, false) {
1596            self.reused_locals.insert(place.local);
1597            Some(Operand::Copy(place))
1598        } else {
1599            None
1600        }
1601    }
1602
1603    /// If `index` is a `Value::Constant`, return the `Constant` to be put in the MIR.
1604    fn try_as_constant(&mut self, index: VnIndex) -> Option<ConstOperand<'tcx>> {
1605        // This was already constant in MIR, do not change it. If the constant is not
1606        // deterministic, adding an additional mention of it in MIR will not give the same value as
1607        // the former mention.
1608        if let Value::Constant { value, disambiguator: 0 } = *self.get(index) {
1609            debug_assert!(value.is_deterministic());
1610            return Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_: value });
1611        }
1612
1613        let op = self.evaluated[index].as_ref()?;
1614        if op.layout.is_unsized() {
1615            // Do not attempt to propagate unsized locals.
1616            return None;
1617        }
1618
1619        let value = op_to_prop_const(&mut self.ecx, op)?;
1620
1621        // Check that we do not leak a pointer.
1622        // Those pointers may lose part of their identity in codegen.
1623        // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1624        assert!(!value.may_have_provenance(self.tcx, op.layout.size));
1625
1626        let const_ = Const::Val(value, op.layout.ty);
1627        Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_ })
1628    }
1629
1630    /// Construct a place which holds the same value as `index` and for which all locals strictly
1631    /// dominate `loc`. If you used this place, add its base local to `reused_locals` to remove
1632    /// storage statements.
1633    #[instrument(level = "trace", skip(self), ret)]
1634    fn try_as_place(
1635        &mut self,
1636        mut index: VnIndex,
1637        loc: Location,
1638        allow_complex_projection: bool,
1639    ) -> Option<Place<'tcx>> {
1640        let mut projection = SmallVec::<[PlaceElem<'tcx>; 1]>::new();
1641        loop {
1642            if let Some(local) = self.try_as_local(index, loc) {
1643                projection.reverse();
1644                let place =
1645                    Place { local, projection: self.tcx.mk_place_elems(projection.as_slice()) };
1646                return Some(place);
1647            } else if projection.last() == Some(&PlaceElem::Deref) {
1648                // `Deref` can only be the first projection in a place.
1649                // If we are here, we failed to find a local, and we already have a `Deref`.
1650                // Trying to add projections will only result in an ill-formed place.
1651                return None;
1652            } else if let Value::Projection(pointer, proj) = *self.get(index)
1653                && (allow_complex_projection || proj.is_stable_offset())
1654                && let Some(proj) = self.try_as_place_elem(self.ty(index), proj, loc)
1655            {
1656                projection.push(proj);
1657                index = pointer;
1658            } else {
1659                return None;
1660            }
1661        }
1662    }
1663
1664    /// If there is a local which is assigned `index`, and its assignment strictly dominates `loc`,
1665    /// return it. If you used this local, add it to `reused_locals` to remove storage statements.
1666    fn try_as_local(&mut self, index: VnIndex, loc: Location) -> Option<Local> {
1667        let other = self.rev_locals.get(index)?;
1668        other
1669            .iter()
1670            .find(|&&other| self.ssa.assignment_dominates(&self.dominators, other, loc))
1671            .copied()
1672    }
1673}
1674
1675impl<'tcx> MutVisitor<'tcx> for VnState<'_, 'tcx> {
1676    fn tcx(&self) -> TyCtxt<'tcx> {
1677        self.tcx
1678    }
1679
1680    fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
1681        self.simplify_place_projection(place, location);
1682        if context.is_mutating_use() && place.is_indirect() {
1683            // Non-local mutation maybe invalidate deref.
1684            self.invalidate_derefs();
1685        }
1686        self.super_place(place, context, location);
1687    }
1688
1689    fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
1690        self.simplify_operand(operand, location);
1691        self.super_operand(operand, location);
1692    }
1693
1694    fn visit_assign(
1695        &mut self,
1696        lhs: &mut Place<'tcx>,
1697        rvalue: &mut Rvalue<'tcx>,
1698        location: Location,
1699    ) {
1700        self.simplify_place_projection(lhs, location);
1701
1702        let value = self.simplify_rvalue(lhs, rvalue, location);
1703        if let Some(value) = value {
1704            if let Some(const_) = self.try_as_constant(value) {
1705                *rvalue = Rvalue::Use(Operand::Constant(Box::new(const_)));
1706            } else if let Some(place) = self.try_as_place(value, location, false)
1707                && *rvalue != Rvalue::Use(Operand::Move(place))
1708                && *rvalue != Rvalue::Use(Operand::Copy(place))
1709            {
1710                *rvalue = Rvalue::Use(Operand::Copy(place));
1711                self.reused_locals.insert(place.local);
1712            }
1713        }
1714
1715        if lhs.is_indirect() {
1716            // Non-local mutation maybe invalidate deref.
1717            self.invalidate_derefs();
1718        }
1719
1720        if let Some(local) = lhs.as_local()
1721            && self.ssa.is_ssa(local)
1722            && let rvalue_ty = rvalue.ty(self.local_decls, self.tcx)
1723            // FIXME(#112651) `rvalue` may have a subtype to `local`. We can only mark
1724            // `local` as reusable if we have an exact type match.
1725            && self.local_decls[local].ty == rvalue_ty
1726        {
1727            let value = value.unwrap_or_else(|| self.new_opaque(rvalue_ty));
1728            self.assign(local, value);
1729        }
1730    }
1731
1732    fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, location: Location) {
1733        if let Terminator { kind: TerminatorKind::Call { destination, .. }, .. } = terminator {
1734            if let Some(local) = destination.as_local()
1735                && self.ssa.is_ssa(local)
1736            {
1737                let ty = self.local_decls[local].ty;
1738                let opaque = self.new_opaque(ty);
1739                self.assign(local, opaque);
1740            }
1741        }
1742        // Function calls and ASM may invalidate (nested) derefs. We must handle them carefully.
1743        // Currently, only preserving derefs for trivial terminators like SwitchInt and Goto.
1744        let safe_to_preserve_derefs = matches!(
1745            terminator.kind,
1746            TerminatorKind::SwitchInt { .. } | TerminatorKind::Goto { .. }
1747        );
1748        if !safe_to_preserve_derefs {
1749            self.invalidate_derefs();
1750        }
1751        self.super_terminator(terminator, location);
1752    }
1753}
1754
1755struct StorageRemover<'tcx> {
1756    tcx: TyCtxt<'tcx>,
1757    reused_locals: DenseBitSet<Local>,
1758}
1759
1760impl<'tcx> MutVisitor<'tcx> for StorageRemover<'tcx> {
1761    fn tcx(&self) -> TyCtxt<'tcx> {
1762        self.tcx
1763    }
1764
1765    fn visit_operand(&mut self, operand: &mut Operand<'tcx>, _: Location) {
1766        if let Operand::Move(place) = *operand
1767            && !place.is_indirect_first_projection()
1768            && self.reused_locals.contains(place.local)
1769        {
1770            *operand = Operand::Copy(place);
1771        }
1772    }
1773
1774    fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, loc: Location) {
1775        match stmt.kind {
1776            // When removing storage statements, we need to remove both (#107511).
1777            StatementKind::StorageLive(l) | StatementKind::StorageDead(l)
1778                if self.reused_locals.contains(l) =>
1779            {
1780                stmt.make_nop()
1781            }
1782            _ => self.super_statement(stmt, loc),
1783        }
1784    }
1785}