rustc_mir_transform/
gvn.rs

1//! Global value numbering.
2//!
3//! MIR may contain repeated and/or redundant computations. The objective of this pass is to detect
4//! such redundancies and re-use the already-computed result when possible.
5//!
6//! From those assignments, we construct a mapping `VnIndex -> Vec<(Local, Location)>` of available
7//! values, the locals in which they are stored, and the assignment location.
8//!
9//! We traverse all assignments `x = rvalue` and operands.
10//!
11//! For each SSA one, we compute a symbolic representation of values that are assigned to SSA
12//! locals. This symbolic representation is defined by the `Value` enum. Each produced instance of
13//! `Value` is interned as a `VnIndex`, which allows us to cheaply compute identical values.
14//!
15//! For each non-SSA
16//! one, we compute the `VnIndex` of the rvalue. If this `VnIndex` is associated to a constant, we
17//! replace the rvalue/operand by that constant. Otherwise, if there is an SSA local `y`
18//! associated to this `VnIndex`, and if its definition location strictly dominates the assignment
19//! to `x`, we replace the assignment by `x = y`.
20//!
21//! By opportunity, this pass simplifies some `Rvalue`s based on the accumulated knowledge.
22//!
23//! # Operational semantic
24//!
25//! Operationally, this pass attempts to prove bitwise equality between locals. Given this MIR:
26//! ```ignore (MIR)
27//! _a = some value // has VnIndex i
28//! // some MIR
29//! _b = some other value // also has VnIndex i
30//! ```
31//!
32//! We consider it to be replacable by:
33//! ```ignore (MIR)
34//! _a = some value // has VnIndex i
35//! // some MIR
36//! _c = some other value // also has VnIndex i
37//! assume(_a bitwise equal to _c) // follows from having the same VnIndex
38//! _b = _a // follows from the `assume`
39//! ```
40//!
41//! Which is simplifiable to:
42//! ```ignore (MIR)
43//! _a = some value // has VnIndex i
44//! // some MIR
45//! _b = _a
46//! ```
47//!
48//! # Handling of references
49//!
50//! We handle references by assigning a different "provenance" index to each Ref/RawPtr rvalue.
51//! This ensure that we do not spuriously merge borrows that should not be merged. Meanwhile, we
52//! consider all the derefs of an immutable reference to a freeze type to give the same value:
53//! ```ignore (MIR)
54//! _a = *_b // _b is &Freeze
55//! _c = *_b // replaced by _c = _a
56//! ```
57//!
58//! # Determinism of constant propagation
59//!
60//! When registering a new `Value`, we attempt to opportunistically evaluate it as a constant.
61//! The evaluated form is inserted in `evaluated` as an `OpTy` or `None` if evaluation failed.
62//!
63//! The difficulty is non-deterministic evaluation of MIR constants. Some `Const` can have
64//! different runtime values each time they are evaluated. This is the case with
65//! `Const::Slice` which have a new pointer each time they are evaluated, and constants that
66//! contain a fn pointer (`AllocId` pointing to a `GlobalAlloc::Function`) pointing to a different
67//! symbol in each codegen unit.
68//!
69//! Meanwhile, we want to be able to read indirect constants. For instance:
70//! ```
71//! static A: &'static &'static u8 = &&63;
72//! fn foo() -> u8 {
73//!     **A // We want to replace by 63.
74//! }
75//! fn bar() -> u8 {
76//!     b"abc"[1] // We want to replace by 'b'.
77//! }
78//! ```
79//!
80//! The `Value::Constant` variant stores a possibly unevaluated constant. Evaluating that constant
81//! may be non-deterministic. When that happens, we assign a disambiguator to ensure that we do not
82//! merge the constants. See `duplicate_slice` test in `gvn.rs`.
83//!
84//! Second, when writing constants in MIR, we do not write `Const::Slice` or `Const`
85//! that contain `AllocId`s.
86
87use std::borrow::Cow;
88
89use either::Either;
90use rustc_abi::{self as abi, BackendRepr, FIRST_VARIANT, FieldIdx, Primitive, Size, VariantIdx};
91use rustc_const_eval::const_eval::DummyMachine;
92use rustc_const_eval::interpret::{
93    ImmTy, Immediate, InterpCx, MemPlaceMeta, MemoryKind, OpTy, Projectable, Scalar,
94    intern_const_alloc_for_constprop,
95};
96use rustc_data_structures::fx::{FxIndexSet, MutableValues};
97use rustc_data_structures::graph::dominators::Dominators;
98use rustc_hir::def::DefKind;
99use rustc_index::bit_set::DenseBitSet;
100use rustc_index::{IndexVec, newtype_index};
101use rustc_middle::bug;
102use rustc_middle::mir::interpret::GlobalAlloc;
103use rustc_middle::mir::visit::*;
104use rustc_middle::mir::*;
105use rustc_middle::ty::layout::{HasTypingEnv, LayoutOf};
106use rustc_middle::ty::{self, Ty, TyCtxt};
107use rustc_span::DUMMY_SP;
108use rustc_span::def_id::DefId;
109use smallvec::SmallVec;
110use tracing::{debug, instrument, trace};
111
112use crate::ssa::SsaLocals;
113
114pub(super) struct GVN;
115
116impl<'tcx> crate::MirPass<'tcx> for GVN {
117    fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
118        sess.mir_opt_level() >= 2
119    }
120
121    #[instrument(level = "trace", skip(self, tcx, body))]
122    fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
123        debug!(def_id = ?body.source.def_id());
124
125        let typing_env = body.typing_env(tcx);
126        let ssa = SsaLocals::new(tcx, body, typing_env);
127        // Clone dominators because we need them while mutating the body.
128        let dominators = body.basic_blocks.dominators().clone();
129
130        let mut state = VnState::new(tcx, body, typing_env, &ssa, dominators, &body.local_decls);
131
132        for local in body.args_iter().filter(|&local| ssa.is_ssa(local)) {
133            let opaque = state.new_opaque();
134            state.assign(local, opaque);
135        }
136
137        let reverse_postorder = body.basic_blocks.reverse_postorder().to_vec();
138        for bb in reverse_postorder {
139            let data = &mut body.basic_blocks.as_mut_preserves_cfg()[bb];
140            state.visit_basic_block_data(bb, data);
141        }
142
143        // For each local that is reused (`y` above), we remove its storage statements do avoid any
144        // difficulty. Those locals are SSA, so should be easy to optimize by LLVM without storage
145        // statements.
146        StorageRemover { tcx, reused_locals: state.reused_locals }.visit_body_preserves_cfg(body);
147    }
148
149    fn is_required(&self) -> bool {
150        false
151    }
152}
153
154newtype_index! {
155    struct VnIndex {}
156}
157
158/// Computing the aggregate's type can be quite slow, so we only keep the minimal amount of
159/// information to reconstruct it when needed.
160#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
161enum AggregateTy<'tcx> {
162    /// Invariant: this must not be used for an empty array.
163    Array,
164    Tuple,
165    Def(DefId, ty::GenericArgsRef<'tcx>),
166    RawPtr {
167        /// Needed for cast propagation.
168        data_pointer_ty: Ty<'tcx>,
169        /// The data pointer can be anything thin, so doesn't determine the output.
170        output_pointer_ty: Ty<'tcx>,
171    },
172}
173
174#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
175enum AddressKind {
176    Ref(BorrowKind),
177    Address(RawPtrKind),
178}
179
180#[derive(Debug, PartialEq, Eq, Hash)]
181enum Value<'tcx> {
182    // Root values.
183    /// Used to represent values we know nothing about.
184    /// The `usize` is a counter incremented by `new_opaque`.
185    Opaque(usize),
186    /// Evaluated or unevaluated constant value.
187    Constant {
188        value: Const<'tcx>,
189        /// Some constants do not have a deterministic value. To avoid merging two instances of the
190        /// same `Const`, we assign them an additional integer index.
191        // `disambiguator` is 0 iff the constant is deterministic.
192        disambiguator: usize,
193    },
194    /// An aggregate value, either tuple/closure/struct/enum.
195    /// This does not contain unions, as we cannot reason with the value.
196    Aggregate(AggregateTy<'tcx>, VariantIdx, Vec<VnIndex>),
197    /// This corresponds to a `[value; count]` expression.
198    Repeat(VnIndex, ty::Const<'tcx>),
199    /// The address of a place.
200    Address {
201        place: Place<'tcx>,
202        kind: AddressKind,
203        /// Give each borrow and pointer a different provenance, so we don't merge them.
204        provenance: usize,
205    },
206
207    // Extractions.
208    /// This is the *value* obtained by projecting another value.
209    Projection(VnIndex, ProjectionElem<VnIndex, Ty<'tcx>>),
210    /// Discriminant of the given value.
211    Discriminant(VnIndex),
212    /// Length of an array or slice.
213    Len(VnIndex),
214
215    // Operations.
216    NullaryOp(NullOp<'tcx>, Ty<'tcx>),
217    UnaryOp(UnOp, VnIndex),
218    BinaryOp(BinOp, VnIndex, VnIndex),
219    Cast {
220        kind: CastKind,
221        value: VnIndex,
222        from: Ty<'tcx>,
223        to: Ty<'tcx>,
224    },
225}
226
227struct VnState<'body, 'tcx> {
228    tcx: TyCtxt<'tcx>,
229    ecx: InterpCx<'tcx, DummyMachine>,
230    local_decls: &'body LocalDecls<'tcx>,
231    /// Value stored in each local.
232    locals: IndexVec<Local, Option<VnIndex>>,
233    /// Locals that are assigned that value.
234    // This vector does not hold all the values of `VnIndex` that we create.
235    rev_locals: IndexVec<VnIndex, SmallVec<[Local; 1]>>,
236    values: FxIndexSet<Value<'tcx>>,
237    /// Values evaluated as constants if possible.
238    evaluated: IndexVec<VnIndex, Option<OpTy<'tcx>>>,
239    /// Counter to generate different values.
240    next_opaque: usize,
241    /// Cache the deref values.
242    derefs: Vec<VnIndex>,
243    /// Cache the value of the `unsized_locals` features, to avoid fetching it repeatedly in a loop.
244    feature_unsized_locals: bool,
245    ssa: &'body SsaLocals,
246    dominators: Dominators<BasicBlock>,
247    reused_locals: DenseBitSet<Local>,
248}
249
250impl<'body, 'tcx> VnState<'body, 'tcx> {
251    fn new(
252        tcx: TyCtxt<'tcx>,
253        body: &Body<'tcx>,
254        typing_env: ty::TypingEnv<'tcx>,
255        ssa: &'body SsaLocals,
256        dominators: Dominators<BasicBlock>,
257        local_decls: &'body LocalDecls<'tcx>,
258    ) -> Self {
259        // Compute a rough estimate of the number of values in the body from the number of
260        // statements. This is meant to reduce the number of allocations, but it's all right if
261        // we miss the exact amount. We estimate based on 2 values per statement (one in LHS and
262        // one in RHS) and 4 values per terminator (for call operands).
263        let num_values =
264            2 * body.basic_blocks.iter().map(|bbdata| bbdata.statements.len()).sum::<usize>()
265                + 4 * body.basic_blocks.len();
266        VnState {
267            tcx,
268            ecx: InterpCx::new(tcx, DUMMY_SP, typing_env, DummyMachine),
269            local_decls,
270            locals: IndexVec::from_elem(None, local_decls),
271            rev_locals: IndexVec::with_capacity(num_values),
272            values: FxIndexSet::with_capacity_and_hasher(num_values, Default::default()),
273            evaluated: IndexVec::with_capacity(num_values),
274            next_opaque: 1,
275            derefs: Vec::new(),
276            feature_unsized_locals: tcx.features().unsized_locals(),
277            ssa,
278            dominators,
279            reused_locals: DenseBitSet::new_empty(local_decls.len()),
280        }
281    }
282
283    fn typing_env(&self) -> ty::TypingEnv<'tcx> {
284        self.ecx.typing_env()
285    }
286
287    #[instrument(level = "trace", skip(self), ret)]
288    fn insert(&mut self, value: Value<'tcx>) -> VnIndex {
289        let (index, new) = self.values.insert_full(value);
290        let index = VnIndex::from_usize(index);
291        if new {
292            // Grow `evaluated` and `rev_locals` here to amortize the allocations.
293            let evaluated = self.eval_to_const(index);
294            let _index = self.evaluated.push(evaluated);
295            debug_assert_eq!(index, _index);
296            let _index = self.rev_locals.push(SmallVec::new());
297            debug_assert_eq!(index, _index);
298        }
299        index
300    }
301
302    fn next_opaque(&mut self) -> usize {
303        let next_opaque = self.next_opaque;
304        self.next_opaque += 1;
305        next_opaque
306    }
307
308    /// Create a new `Value` for which we have no information at all, except that it is distinct
309    /// from all the others.
310    #[instrument(level = "trace", skip(self), ret)]
311    fn new_opaque(&mut self) -> VnIndex {
312        let value = Value::Opaque(self.next_opaque());
313        self.insert(value)
314    }
315
316    /// Create a new `Value::Address` distinct from all the others.
317    #[instrument(level = "trace", skip(self), ret)]
318    fn new_pointer(&mut self, place: Place<'tcx>, kind: AddressKind) -> VnIndex {
319        let value = Value::Address { place, kind, provenance: self.next_opaque() };
320        self.insert(value)
321    }
322
323    fn get(&self, index: VnIndex) -> &Value<'tcx> {
324        self.values.get_index(index.as_usize()).unwrap()
325    }
326
327    /// Record that `local` is assigned `value`. `local` must be SSA.
328    #[instrument(level = "trace", skip(self))]
329    fn assign(&mut self, local: Local, value: VnIndex) {
330        debug_assert!(self.ssa.is_ssa(local));
331        self.locals[local] = Some(value);
332
333        // Only register the value if its type is `Sized`, as we will emit copies of it.
334        let is_sized = !self.feature_unsized_locals
335            || self.local_decls[local].ty.is_sized(self.tcx, self.typing_env());
336        if is_sized {
337            self.rev_locals[value].push(local);
338        }
339    }
340
341    fn insert_constant(&mut self, value: Const<'tcx>) -> VnIndex {
342        let disambiguator = if value.is_deterministic() {
343            // The constant is deterministic, no need to disambiguate.
344            0
345        } else {
346            // Multiple mentions of this constant will yield different values,
347            // so assign a different `disambiguator` to ensure they do not get the same `VnIndex`.
348            let disambiguator = self.next_opaque();
349            // `disambiguator: 0` means deterministic.
350            debug_assert_ne!(disambiguator, 0);
351            disambiguator
352        };
353        self.insert(Value::Constant { value, disambiguator })
354    }
355
356    fn insert_bool(&mut self, flag: bool) -> VnIndex {
357        // Booleans are deterministic.
358        let value = Const::from_bool(self.tcx, flag);
359        debug_assert!(value.is_deterministic());
360        self.insert(Value::Constant { value, disambiguator: 0 })
361    }
362
363    fn insert_scalar(&mut self, scalar: Scalar, ty: Ty<'tcx>) -> VnIndex {
364        // Scalars are deterministic.
365        let value = Const::from_scalar(self.tcx, scalar, ty);
366        debug_assert!(value.is_deterministic());
367        self.insert(Value::Constant { value, disambiguator: 0 })
368    }
369
370    fn insert_tuple(&mut self, values: Vec<VnIndex>) -> VnIndex {
371        self.insert(Value::Aggregate(AggregateTy::Tuple, VariantIdx::ZERO, values))
372    }
373
374    fn insert_deref(&mut self, value: VnIndex) -> VnIndex {
375        let value = self.insert(Value::Projection(value, ProjectionElem::Deref));
376        self.derefs.push(value);
377        value
378    }
379
380    fn invalidate_derefs(&mut self) {
381        for deref in std::mem::take(&mut self.derefs) {
382            let opaque = self.next_opaque();
383            *self.values.get_index_mut2(deref.index()).unwrap() = Value::Opaque(opaque);
384        }
385    }
386
387    #[instrument(level = "trace", skip(self), ret)]
388    fn eval_to_const(&mut self, value: VnIndex) -> Option<OpTy<'tcx>> {
389        use Value::*;
390        let op = match *self.get(value) {
391            Opaque(_) => return None,
392            // Do not bother evaluating repeat expressions. This would uselessly consume memory.
393            Repeat(..) => return None,
394
395            Constant { ref value, disambiguator: _ } => {
396                self.ecx.eval_mir_constant(value, DUMMY_SP, None).discard_err()?
397            }
398            Aggregate(kind, variant, ref fields) => {
399                let fields = fields
400                    .iter()
401                    .map(|&f| self.evaluated[f].as_ref())
402                    .collect::<Option<Vec<_>>>()?;
403                let ty = match kind {
404                    AggregateTy::Array => {
405                        assert!(fields.len() > 0);
406                        Ty::new_array(self.tcx, fields[0].layout.ty, fields.len() as u64)
407                    }
408                    AggregateTy::Tuple => {
409                        Ty::new_tup_from_iter(self.tcx, fields.iter().map(|f| f.layout.ty))
410                    }
411                    AggregateTy::Def(def_id, args) => {
412                        self.tcx.type_of(def_id).instantiate(self.tcx, args)
413                    }
414                    AggregateTy::RawPtr { output_pointer_ty, .. } => output_pointer_ty,
415                };
416                let variant = if ty.is_enum() { Some(variant) } else { None };
417                let ty = self.ecx.layout_of(ty).ok()?;
418                if ty.is_zst() {
419                    ImmTy::uninit(ty).into()
420                } else if matches!(kind, AggregateTy::RawPtr { .. }) {
421                    // Pointers don't have fields, so don't `project_field` them.
422                    let data = self.ecx.read_pointer(fields[0]).discard_err()?;
423                    let meta = if fields[1].layout.is_zst() {
424                        MemPlaceMeta::None
425                    } else {
426                        MemPlaceMeta::Meta(self.ecx.read_scalar(fields[1]).discard_err()?)
427                    };
428                    let ptr_imm = Immediate::new_pointer_with_meta(data, meta, &self.ecx);
429                    ImmTy::from_immediate(ptr_imm, ty).into()
430                } else if matches!(
431                    ty.backend_repr,
432                    BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)
433                ) {
434                    let dest = self.ecx.allocate(ty, MemoryKind::Stack).discard_err()?;
435                    let variant_dest = if let Some(variant) = variant {
436                        self.ecx.project_downcast(&dest, variant).discard_err()?
437                    } else {
438                        dest.clone()
439                    };
440                    for (field_index, op) in fields.into_iter().enumerate() {
441                        let field_dest =
442                            self.ecx.project_field(&variant_dest, field_index).discard_err()?;
443                        self.ecx.copy_op(op, &field_dest).discard_err()?;
444                    }
445                    self.ecx
446                        .write_discriminant(variant.unwrap_or(FIRST_VARIANT), &dest)
447                        .discard_err()?;
448                    self.ecx
449                        .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
450                        .discard_err()?;
451                    dest.into()
452                } else {
453                    return None;
454                }
455            }
456
457            Projection(base, elem) => {
458                let value = self.evaluated[base].as_ref()?;
459                let elem = match elem {
460                    ProjectionElem::Deref => ProjectionElem::Deref,
461                    ProjectionElem::Downcast(name, read_variant) => {
462                        ProjectionElem::Downcast(name, read_variant)
463                    }
464                    ProjectionElem::Field(f, ty) => ProjectionElem::Field(f, ty),
465                    ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
466                        ProjectionElem::ConstantIndex { offset, min_length, from_end }
467                    }
468                    ProjectionElem::Subslice { from, to, from_end } => {
469                        ProjectionElem::Subslice { from, to, from_end }
470                    }
471                    ProjectionElem::OpaqueCast(ty) => ProjectionElem::OpaqueCast(ty),
472                    ProjectionElem::Subtype(ty) => ProjectionElem::Subtype(ty),
473                    ProjectionElem::UnwrapUnsafeBinder(ty) => {
474                        ProjectionElem::UnwrapUnsafeBinder(ty)
475                    }
476                    // This should have been replaced by a `ConstantIndex` earlier.
477                    ProjectionElem::Index(_) => return None,
478                };
479                self.ecx.project(value, elem).discard_err()?
480            }
481            Address { place, kind, provenance: _ } => {
482                if !place.is_indirect_first_projection() {
483                    return None;
484                }
485                let local = self.locals[place.local]?;
486                let pointer = self.evaluated[local].as_ref()?;
487                let mut mplace = self.ecx.deref_pointer(pointer).discard_err()?;
488                for proj in place.projection.iter().skip(1) {
489                    // We have no call stack to associate a local with a value, so we cannot
490                    // interpret indexing.
491                    if matches!(proj, ProjectionElem::Index(_)) {
492                        return None;
493                    }
494                    mplace = self.ecx.project(&mplace, proj).discard_err()?;
495                }
496                let pointer = mplace.to_ref(&self.ecx);
497                let ty = match kind {
498                    AddressKind::Ref(bk) => Ty::new_ref(
499                        self.tcx,
500                        self.tcx.lifetimes.re_erased,
501                        mplace.layout.ty,
502                        bk.to_mutbl_lossy(),
503                    ),
504                    AddressKind::Address(mutbl) => {
505                        Ty::new_ptr(self.tcx, mplace.layout.ty, mutbl.to_mutbl_lossy())
506                    }
507                };
508                let layout = self.ecx.layout_of(ty).ok()?;
509                ImmTy::from_immediate(pointer, layout).into()
510            }
511
512            Discriminant(base) => {
513                let base = self.evaluated[base].as_ref()?;
514                let variant = self.ecx.read_discriminant(base).discard_err()?;
515                let discr_value =
516                    self.ecx.discriminant_for_variant(base.layout.ty, variant).discard_err()?;
517                discr_value.into()
518            }
519            Len(slice) => {
520                let slice = self.evaluated[slice].as_ref()?;
521                let usize_layout = self.ecx.layout_of(self.tcx.types.usize).unwrap();
522                let len = slice.len(&self.ecx).discard_err()?;
523                let imm = ImmTy::from_uint(len, usize_layout);
524                imm.into()
525            }
526            NullaryOp(null_op, ty) => {
527                let layout = self.ecx.layout_of(ty).ok()?;
528                if let NullOp::SizeOf | NullOp::AlignOf = null_op
529                    && layout.is_unsized()
530                {
531                    return None;
532                }
533                let val = match null_op {
534                    NullOp::SizeOf => layout.size.bytes(),
535                    NullOp::AlignOf => layout.align.abi.bytes(),
536                    NullOp::OffsetOf(fields) => self
537                        .ecx
538                        .tcx
539                        .offset_of_subfield(self.typing_env(), layout, fields.iter())
540                        .bytes(),
541                    NullOp::UbChecks => return None,
542                    NullOp::ContractChecks => return None,
543                };
544                let usize_layout = self.ecx.layout_of(self.tcx.types.usize).unwrap();
545                let imm = ImmTy::from_uint(val, usize_layout);
546                imm.into()
547            }
548            UnaryOp(un_op, operand) => {
549                let operand = self.evaluated[operand].as_ref()?;
550                let operand = self.ecx.read_immediate(operand).discard_err()?;
551                let val = self.ecx.unary_op(un_op, &operand).discard_err()?;
552                val.into()
553            }
554            BinaryOp(bin_op, lhs, rhs) => {
555                let lhs = self.evaluated[lhs].as_ref()?;
556                let lhs = self.ecx.read_immediate(lhs).discard_err()?;
557                let rhs = self.evaluated[rhs].as_ref()?;
558                let rhs = self.ecx.read_immediate(rhs).discard_err()?;
559                let val = self.ecx.binary_op(bin_op, &lhs, &rhs).discard_err()?;
560                val.into()
561            }
562            Cast { kind, value, from: _, to } => match kind {
563                CastKind::IntToInt | CastKind::IntToFloat => {
564                    let value = self.evaluated[value].as_ref()?;
565                    let value = self.ecx.read_immediate(value).discard_err()?;
566                    let to = self.ecx.layout_of(to).ok()?;
567                    let res = self.ecx.int_to_int_or_float(&value, to).discard_err()?;
568                    res.into()
569                }
570                CastKind::FloatToFloat | CastKind::FloatToInt => {
571                    let value = self.evaluated[value].as_ref()?;
572                    let value = self.ecx.read_immediate(value).discard_err()?;
573                    let to = self.ecx.layout_of(to).ok()?;
574                    let res = self.ecx.float_to_float_or_int(&value, to).discard_err()?;
575                    res.into()
576                }
577                CastKind::Transmute => {
578                    let value = self.evaluated[value].as_ref()?;
579                    let to = self.ecx.layout_of(to).ok()?;
580                    // `offset` for immediates generally only supports projections that match the
581                    // type of the immediate. However, as a HACK, we exploit that it can also do
582                    // limited transmutes: it only works between types with the same layout, and
583                    // cannot transmute pointers to integers.
584                    if value.as_mplace_or_imm().is_right() {
585                        let can_transmute = match (value.layout.backend_repr, to.backend_repr) {
586                            (BackendRepr::Scalar(s1), BackendRepr::Scalar(s2)) => {
587                                s1.size(&self.ecx) == s2.size(&self.ecx)
588                                    && !matches!(s1.primitive(), Primitive::Pointer(..))
589                            }
590                            (BackendRepr::ScalarPair(a1, b1), BackendRepr::ScalarPair(a2, b2)) => {
591                                a1.size(&self.ecx) == a2.size(&self.ecx) &&
592                                b1.size(&self.ecx) == b2.size(&self.ecx) &&
593                                // The alignment of the second component determines its offset, so that also needs to match.
594                                b1.align(&self.ecx) == b2.align(&self.ecx) &&
595                                // None of the inputs may be a pointer.
596                                !matches!(a1.primitive(), Primitive::Pointer(..))
597                                    && !matches!(b1.primitive(), Primitive::Pointer(..))
598                            }
599                            _ => false,
600                        };
601                        if !can_transmute {
602                            return None;
603                        }
604                    }
605                    value.offset(Size::ZERO, to, &self.ecx).discard_err()?
606                }
607                CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) => {
608                    let src = self.evaluated[value].as_ref()?;
609                    let to = self.ecx.layout_of(to).ok()?;
610                    let dest = self.ecx.allocate(to, MemoryKind::Stack).discard_err()?;
611                    self.ecx.unsize_into(src, to, &dest.clone().into()).discard_err()?;
612                    self.ecx
613                        .alloc_mark_immutable(dest.ptr().provenance.unwrap().alloc_id())
614                        .discard_err()?;
615                    dest.into()
616                }
617                CastKind::FnPtrToPtr | CastKind::PtrToPtr => {
618                    let src = self.evaluated[value].as_ref()?;
619                    let src = self.ecx.read_immediate(src).discard_err()?;
620                    let to = self.ecx.layout_of(to).ok()?;
621                    let ret = self.ecx.ptr_to_ptr(&src, to).discard_err()?;
622                    ret.into()
623                }
624                CastKind::PointerCoercion(ty::adjustment::PointerCoercion::UnsafeFnPointer, _) => {
625                    let src = self.evaluated[value].as_ref()?;
626                    let src = self.ecx.read_immediate(src).discard_err()?;
627                    let to = self.ecx.layout_of(to).ok()?;
628                    ImmTy::from_immediate(*src, to).into()
629                }
630                _ => return None,
631            },
632        };
633        Some(op)
634    }
635
636    fn project(
637        &mut self,
638        place: PlaceRef<'tcx>,
639        value: VnIndex,
640        proj: PlaceElem<'tcx>,
641        from_non_ssa_index: &mut bool,
642    ) -> Option<VnIndex> {
643        let proj = match proj {
644            ProjectionElem::Deref => {
645                let ty = place.ty(self.local_decls, self.tcx).ty;
646                if let Some(Mutability::Not) = ty.ref_mutability()
647                    && let Some(pointee_ty) = ty.builtin_deref(true)
648                    && pointee_ty.is_freeze(self.tcx, self.typing_env())
649                {
650                    // An immutable borrow `_x` always points to the same value for the
651                    // lifetime of the borrow, so we can merge all instances of `*_x`.
652                    return Some(self.insert_deref(value));
653                } else {
654                    return None;
655                }
656            }
657            ProjectionElem::Downcast(name, index) => ProjectionElem::Downcast(name, index),
658            ProjectionElem::Field(f, ty) => {
659                if let Value::Aggregate(_, _, fields) = self.get(value) {
660                    return Some(fields[f.as_usize()]);
661                } else if let Value::Projection(outer_value, ProjectionElem::Downcast(_, read_variant)) = self.get(value)
662                    && let Value::Aggregate(_, written_variant, fields) = self.get(*outer_value)
663                    // This pass is not aware of control-flow, so we do not know whether the
664                    // replacement we are doing is actually reachable. We could be in any arm of
665                    // ```
666                    // match Some(x) {
667                    //     Some(y) => /* stuff */,
668                    //     None => /* other */,
669                    // }
670                    // ```
671                    //
672                    // In surface rust, the current statement would be unreachable.
673                    //
674                    // However, from the reference chapter on enums and RFC 2195,
675                    // accessing the wrong variant is not UB if the enum has repr.
676                    // So it's not impossible for a series of MIR opts to generate
677                    // a downcast to an inactive variant.
678                    && written_variant == read_variant
679                {
680                    return Some(fields[f.as_usize()]);
681                }
682                ProjectionElem::Field(f, ty)
683            }
684            ProjectionElem::Index(idx) => {
685                if let Value::Repeat(inner, _) = self.get(value) {
686                    *from_non_ssa_index |= self.locals[idx].is_none();
687                    return Some(*inner);
688                }
689                let idx = self.locals[idx]?;
690                ProjectionElem::Index(idx)
691            }
692            ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
693                match self.get(value) {
694                    Value::Repeat(inner, _) => {
695                        return Some(*inner);
696                    }
697                    Value::Aggregate(AggregateTy::Array, _, operands) => {
698                        let offset = if from_end {
699                            operands.len() - offset as usize
700                        } else {
701                            offset as usize
702                        };
703                        return operands.get(offset).copied();
704                    }
705                    _ => {}
706                };
707                ProjectionElem::ConstantIndex { offset, min_length, from_end }
708            }
709            ProjectionElem::Subslice { from, to, from_end } => {
710                ProjectionElem::Subslice { from, to, from_end }
711            }
712            ProjectionElem::OpaqueCast(ty) => ProjectionElem::OpaqueCast(ty),
713            ProjectionElem::Subtype(ty) => ProjectionElem::Subtype(ty),
714            ProjectionElem::UnwrapUnsafeBinder(ty) => ProjectionElem::UnwrapUnsafeBinder(ty),
715        };
716
717        Some(self.insert(Value::Projection(value, proj)))
718    }
719
720    /// Simplify the projection chain if we know better.
721    #[instrument(level = "trace", skip(self))]
722    fn simplify_place_projection(&mut self, place: &mut Place<'tcx>, location: Location) {
723        // If the projection is indirect, we treat the local as a value, so can replace it with
724        // another local.
725        if place.is_indirect_first_projection()
726            && let Some(base) = self.locals[place.local]
727            && let Some(new_local) = self.try_as_local(base, location)
728            && place.local != new_local
729        {
730            place.local = new_local;
731            self.reused_locals.insert(new_local);
732        }
733
734        let mut projection = Cow::Borrowed(&place.projection[..]);
735
736        for i in 0..projection.len() {
737            let elem = projection[i];
738            if let ProjectionElem::Index(idx_local) = elem
739                && let Some(idx) = self.locals[idx_local]
740            {
741                if let Some(offset) = self.evaluated[idx].as_ref()
742                    && let Some(offset) = self.ecx.read_target_usize(offset).discard_err()
743                    && let Some(min_length) = offset.checked_add(1)
744                {
745                    projection.to_mut()[i] =
746                        ProjectionElem::ConstantIndex { offset, min_length, from_end: false };
747                } else if let Some(new_idx_local) = self.try_as_local(idx, location)
748                    && idx_local != new_idx_local
749                {
750                    projection.to_mut()[i] = ProjectionElem::Index(new_idx_local);
751                    self.reused_locals.insert(new_idx_local);
752                }
753            }
754        }
755
756        if projection.is_owned() {
757            place.projection = self.tcx.mk_place_elems(&projection);
758        }
759
760        trace!(?place);
761    }
762
763    /// Represent the *value* which would be read from `place`, and point `place` to a preexisting
764    /// place with the same value (if that already exists).
765    #[instrument(level = "trace", skip(self), ret)]
766    fn simplify_place_value(
767        &mut self,
768        place: &mut Place<'tcx>,
769        location: Location,
770    ) -> Option<VnIndex> {
771        self.simplify_place_projection(place, location);
772
773        // Invariant: `place` and `place_ref` point to the same value, even if they point to
774        // different memory locations.
775        let mut place_ref = place.as_ref();
776
777        // Invariant: `value` holds the value up-to the `index`th projection excluded.
778        let mut value = self.locals[place.local]?;
779        let mut from_non_ssa_index = false;
780        for (index, proj) in place.projection.iter().enumerate() {
781            if let Value::Projection(pointer, ProjectionElem::Deref) = *self.get(value)
782                && let Value::Address { place: mut pointee, kind, .. } = *self.get(pointer)
783                && let AddressKind::Ref(BorrowKind::Shared) = kind
784                && let Some(v) = self.simplify_place_value(&mut pointee, location)
785            {
786                value = v;
787                place_ref = pointee.project_deeper(&place.projection[index..], self.tcx).as_ref();
788            }
789            if let Some(local) = self.try_as_local(value, location) {
790                // Both `local` and `Place { local: place.local, projection: projection[..index] }`
791                // hold the same value. Therefore, following place holds the value in the original
792                // `place`.
793                place_ref = PlaceRef { local, projection: &place.projection[index..] };
794            }
795
796            let base = PlaceRef { local: place.local, projection: &place.projection[..index] };
797            value = self.project(base, value, proj, &mut from_non_ssa_index)?;
798        }
799
800        if let Value::Projection(pointer, ProjectionElem::Deref) = *self.get(value)
801            && let Value::Address { place: mut pointee, kind, .. } = *self.get(pointer)
802            && let AddressKind::Ref(BorrowKind::Shared) = kind
803            && let Some(v) = self.simplify_place_value(&mut pointee, location)
804        {
805            value = v;
806            place_ref = pointee.project_deeper(&[], self.tcx).as_ref();
807        }
808        if let Some(new_local) = self.try_as_local(value, location) {
809            place_ref = PlaceRef { local: new_local, projection: &[] };
810        } else if from_non_ssa_index {
811            // If access to non-SSA locals is unavoidable, bail out.
812            return None;
813        }
814
815        if place_ref.local != place.local || place_ref.projection.len() < place.projection.len() {
816            // By the invariant on `place_ref`.
817            *place = place_ref.project_deeper(&[], self.tcx);
818            self.reused_locals.insert(place_ref.local);
819        }
820
821        Some(value)
822    }
823
824    #[instrument(level = "trace", skip(self), ret)]
825    fn simplify_operand(
826        &mut self,
827        operand: &mut Operand<'tcx>,
828        location: Location,
829    ) -> Option<VnIndex> {
830        match *operand {
831            Operand::Constant(ref constant) => Some(self.insert_constant(constant.const_)),
832            Operand::Copy(ref mut place) | Operand::Move(ref mut place) => {
833                let value = self.simplify_place_value(place, location)?;
834                if let Some(const_) = self.try_as_constant(value) {
835                    *operand = Operand::Constant(Box::new(const_));
836                }
837                Some(value)
838            }
839        }
840    }
841
842    #[instrument(level = "trace", skip(self), ret)]
843    fn simplify_rvalue(
844        &mut self,
845        lhs: &Place<'tcx>,
846        rvalue: &mut Rvalue<'tcx>,
847        location: Location,
848    ) -> Option<VnIndex> {
849        let value = match *rvalue {
850            // Forward values.
851            Rvalue::Use(ref mut operand) => return self.simplify_operand(operand, location),
852            Rvalue::CopyForDeref(place) => {
853                let mut operand = Operand::Copy(place);
854                let val = self.simplify_operand(&mut operand, location);
855                *rvalue = Rvalue::Use(operand);
856                return val;
857            }
858
859            // Roots.
860            Rvalue::Repeat(ref mut op, amount) => {
861                let op = self.simplify_operand(op, location)?;
862                Value::Repeat(op, amount)
863            }
864            Rvalue::NullaryOp(op, ty) => Value::NullaryOp(op, ty),
865            Rvalue::Aggregate(..) => return self.simplify_aggregate(lhs, rvalue, location),
866            Rvalue::Ref(_, borrow_kind, ref mut place) => {
867                self.simplify_place_projection(place, location);
868                return Some(self.new_pointer(*place, AddressKind::Ref(borrow_kind)));
869            }
870            Rvalue::RawPtr(mutbl, ref mut place) => {
871                self.simplify_place_projection(place, location);
872                return Some(self.new_pointer(*place, AddressKind::Address(mutbl)));
873            }
874            Rvalue::WrapUnsafeBinder(ref mut op, ty) => {
875                let value = self.simplify_operand(op, location)?;
876                Value::Cast {
877                    kind: CastKind::Transmute,
878                    value,
879                    from: op.ty(self.local_decls, self.tcx),
880                    to: ty,
881                }
882            }
883
884            // Operations.
885            Rvalue::Len(ref mut place) => return self.simplify_len(place, location),
886            Rvalue::Cast(ref mut kind, ref mut value, to) => {
887                return self.simplify_cast(kind, value, to, location);
888            }
889            Rvalue::BinaryOp(op, box (ref mut lhs, ref mut rhs)) => {
890                return self.simplify_binary(op, lhs, rhs, location);
891            }
892            Rvalue::UnaryOp(op, ref mut arg_op) => {
893                return self.simplify_unary(op, arg_op, location);
894            }
895            Rvalue::Discriminant(ref mut place) => {
896                let place = self.simplify_place_value(place, location)?;
897                if let Some(discr) = self.simplify_discriminant(place) {
898                    return Some(discr);
899                }
900                Value::Discriminant(place)
901            }
902
903            // Unsupported values.
904            Rvalue::ThreadLocalRef(..) | Rvalue::ShallowInitBox(..) => return None,
905        };
906        debug!(?value);
907        Some(self.insert(value))
908    }
909
910    fn simplify_discriminant(&mut self, place: VnIndex) -> Option<VnIndex> {
911        if let Value::Aggregate(enum_ty, variant, _) = *self.get(place)
912            && let AggregateTy::Def(enum_did, enum_args) = enum_ty
913            && let DefKind::Enum = self.tcx.def_kind(enum_did)
914        {
915            let enum_ty = self.tcx.type_of(enum_did).instantiate(self.tcx, enum_args);
916            let discr = self.ecx.discriminant_for_variant(enum_ty, variant).discard_err()?;
917            return Some(self.insert_scalar(discr.to_scalar(), discr.layout.ty));
918        }
919
920        None
921    }
922
923    fn try_as_place_elem(
924        &mut self,
925        proj: ProjectionElem<VnIndex, Ty<'tcx>>,
926        loc: Location,
927    ) -> Option<PlaceElem<'tcx>> {
928        Some(match proj {
929            ProjectionElem::Deref => ProjectionElem::Deref,
930            ProjectionElem::Field(idx, ty) => ProjectionElem::Field(idx, ty),
931            ProjectionElem::Index(idx) => {
932                let Some(local) = self.try_as_local(idx, loc) else {
933                    return None;
934                };
935                self.reused_locals.insert(local);
936                ProjectionElem::Index(local)
937            }
938            ProjectionElem::ConstantIndex { offset, min_length, from_end } => {
939                ProjectionElem::ConstantIndex { offset, min_length, from_end }
940            }
941            ProjectionElem::Subslice { from, to, from_end } => {
942                ProjectionElem::Subslice { from, to, from_end }
943            }
944            ProjectionElem::Downcast(symbol, idx) => ProjectionElem::Downcast(symbol, idx),
945            ProjectionElem::OpaqueCast(idx) => ProjectionElem::OpaqueCast(idx),
946            ProjectionElem::Subtype(idx) => ProjectionElem::Subtype(idx),
947            ProjectionElem::UnwrapUnsafeBinder(ty) => ProjectionElem::UnwrapUnsafeBinder(ty),
948        })
949    }
950
951    fn simplify_aggregate_to_copy(
952        &mut self,
953        lhs: &Place<'tcx>,
954        rvalue: &mut Rvalue<'tcx>,
955        location: Location,
956        fields: &[VnIndex],
957        variant_index: VariantIdx,
958    ) -> Option<VnIndex> {
959        let Some(&first_field) = fields.first() else {
960            return None;
961        };
962        let Value::Projection(copy_from_value, _) = *self.get(first_field) else {
963            return None;
964        };
965        // All fields must correspond one-to-one and come from the same aggregate value.
966        if fields.iter().enumerate().any(|(index, &v)| {
967            if let Value::Projection(pointer, ProjectionElem::Field(from_index, _)) = *self.get(v)
968                && copy_from_value == pointer
969                && from_index.index() == index
970            {
971                return false;
972            }
973            true
974        }) {
975            return None;
976        }
977
978        let mut copy_from_local_value = copy_from_value;
979        if let Value::Projection(pointer, proj) = *self.get(copy_from_value)
980            && let ProjectionElem::Downcast(_, read_variant) = proj
981        {
982            if variant_index == read_variant {
983                // When copying a variant, there is no need to downcast.
984                copy_from_local_value = pointer;
985            } else {
986                // The copied variant must be identical.
987                return None;
988            }
989        }
990
991        // Allow introducing places with non-constant offsets, as those are still better than
992        // reconstructing an aggregate.
993        if let Some(place) = self.try_as_place(copy_from_local_value, location, true)
994            && rvalue.ty(self.local_decls, self.tcx) == place.ty(self.local_decls, self.tcx).ty
995        {
996            // Avoid creating `*a = copy (*b)`, as they might be aliases resulting in overlapping assignments.
997            // FIXME: This also avoids any kind of projection, not just derefs. We can add allowed projections.
998            if lhs.as_local().is_some() {
999                self.reused_locals.insert(place.local);
1000                *rvalue = Rvalue::Use(Operand::Copy(place));
1001            }
1002            return Some(copy_from_local_value);
1003        }
1004
1005        None
1006    }
1007
1008    fn simplify_aggregate(
1009        &mut self,
1010        lhs: &Place<'tcx>,
1011        rvalue: &mut Rvalue<'tcx>,
1012        location: Location,
1013    ) -> Option<VnIndex> {
1014        let Rvalue::Aggregate(box ref kind, ref mut field_ops) = *rvalue else { bug!() };
1015
1016        let tcx = self.tcx;
1017        if field_ops.is_empty() {
1018            let is_zst = match *kind {
1019                AggregateKind::Array(..)
1020                | AggregateKind::Tuple
1021                | AggregateKind::Closure(..)
1022                | AggregateKind::CoroutineClosure(..) => true,
1023                // Only enums can be non-ZST.
1024                AggregateKind::Adt(did, ..) => tcx.def_kind(did) != DefKind::Enum,
1025                // Coroutines are never ZST, as they at least contain the implicit states.
1026                AggregateKind::Coroutine(..) => false,
1027                AggregateKind::RawPtr(..) => bug!("MIR for RawPtr aggregate must have 2 fields"),
1028            };
1029
1030            if is_zst {
1031                let ty = rvalue.ty(self.local_decls, tcx);
1032                return Some(self.insert_constant(Const::zero_sized(ty)));
1033            }
1034        }
1035
1036        let (mut ty, variant_index) = match *kind {
1037            AggregateKind::Array(..) => {
1038                assert!(!field_ops.is_empty());
1039                (AggregateTy::Array, FIRST_VARIANT)
1040            }
1041            AggregateKind::Tuple => {
1042                assert!(!field_ops.is_empty());
1043                (AggregateTy::Tuple, FIRST_VARIANT)
1044            }
1045            AggregateKind::Closure(did, args)
1046            | AggregateKind::CoroutineClosure(did, args)
1047            | AggregateKind::Coroutine(did, args) => (AggregateTy::Def(did, args), FIRST_VARIANT),
1048            AggregateKind::Adt(did, variant_index, args, _, None) => {
1049                (AggregateTy::Def(did, args), variant_index)
1050            }
1051            // Do not track unions.
1052            AggregateKind::Adt(_, _, _, _, Some(_)) => return None,
1053            AggregateKind::RawPtr(pointee_ty, mtbl) => {
1054                assert_eq!(field_ops.len(), 2);
1055                let data_pointer_ty = field_ops[FieldIdx::ZERO].ty(self.local_decls, self.tcx);
1056                let output_pointer_ty = Ty::new_ptr(self.tcx, pointee_ty, mtbl);
1057                (AggregateTy::RawPtr { data_pointer_ty, output_pointer_ty }, FIRST_VARIANT)
1058            }
1059        };
1060
1061        let mut fields: Vec<_> = field_ops
1062            .iter_mut()
1063            .map(|op| self.simplify_operand(op, location).unwrap_or_else(|| self.new_opaque()))
1064            .collect();
1065
1066        if let AggregateTy::RawPtr { data_pointer_ty, output_pointer_ty } = &mut ty {
1067            let mut was_updated = false;
1068
1069            // Any thin pointer of matching mutability is fine as the data pointer.
1070            while let Value::Cast {
1071                kind: CastKind::PtrToPtr,
1072                value: cast_value,
1073                from: cast_from,
1074                to: _,
1075            } = self.get(fields[0])
1076                && let ty::RawPtr(from_pointee_ty, from_mtbl) = cast_from.kind()
1077                && let ty::RawPtr(_, output_mtbl) = output_pointer_ty.kind()
1078                && from_mtbl == output_mtbl
1079                && from_pointee_ty.is_sized(self.tcx, self.typing_env())
1080            {
1081                fields[0] = *cast_value;
1082                *data_pointer_ty = *cast_from;
1083                was_updated = true;
1084            }
1085
1086            if was_updated && let Some(op) = self.try_as_operand(fields[0], location) {
1087                field_ops[FieldIdx::ZERO] = op;
1088            }
1089        }
1090
1091        if let AggregateTy::Array = ty
1092            && fields.len() > 4
1093        {
1094            let first = fields[0];
1095            if fields.iter().all(|&v| v == first) {
1096                let len = ty::Const::from_target_usize(self.tcx, fields.len().try_into().unwrap());
1097                if let Some(op) = self.try_as_operand(first, location) {
1098                    *rvalue = Rvalue::Repeat(op, len);
1099                }
1100                return Some(self.insert(Value::Repeat(first, len)));
1101            }
1102        }
1103
1104        if let AggregateTy::Def(_, _) = ty
1105            && let Some(value) =
1106                self.simplify_aggregate_to_copy(lhs, rvalue, location, &fields, variant_index)
1107        {
1108            return Some(value);
1109        }
1110
1111        Some(self.insert(Value::Aggregate(ty, variant_index, fields)))
1112    }
1113
1114    #[instrument(level = "trace", skip(self), ret)]
1115    fn simplify_unary(
1116        &mut self,
1117        op: UnOp,
1118        arg_op: &mut Operand<'tcx>,
1119        location: Location,
1120    ) -> Option<VnIndex> {
1121        let mut arg_index = self.simplify_operand(arg_op, location)?;
1122
1123        // PtrMetadata doesn't care about *const vs *mut vs & vs &mut,
1124        // so start by removing those distinctions so we can update the `Operand`
1125        if op == UnOp::PtrMetadata {
1126            let mut was_updated = false;
1127            loop {
1128                match self.get(arg_index) {
1129                    // Pointer casts that preserve metadata, such as
1130                    // `*const [i32]` <-> `*mut [i32]` <-> `*mut [f32]`.
1131                    // It's critical that this not eliminate cases like
1132                    // `*const [T]` -> `*const T` which remove metadata.
1133                    // We run on potentially-generic MIR, though, so unlike codegen
1134                    // we can't always know exactly what the metadata are.
1135                    // To allow things like `*mut (?A, ?T)` <-> `*mut (?B, ?T)`,
1136                    // it's fine to get a projection as the type.
1137                    Value::Cast { kind: CastKind::PtrToPtr, value: inner, from, to }
1138                        if self.pointers_have_same_metadata(*from, *to) =>
1139                    {
1140                        arg_index = *inner;
1141                        was_updated = true;
1142                        continue;
1143                    }
1144
1145                    // `&mut *p`, `&raw *p`, etc don't change metadata.
1146                    Value::Address { place, kind: _, provenance: _ }
1147                        if let PlaceRef { local, projection: [PlaceElem::Deref] } =
1148                            place.as_ref()
1149                            && let Some(local_index) = self.locals[local] =>
1150                    {
1151                        arg_index = local_index;
1152                        was_updated = true;
1153                        continue;
1154                    }
1155
1156                    _ => {
1157                        if was_updated && let Some(op) = self.try_as_operand(arg_index, location) {
1158                            *arg_op = op;
1159                        }
1160                        break;
1161                    }
1162                }
1163            }
1164        }
1165
1166        let value = match (op, self.get(arg_index)) {
1167            (UnOp::Not, Value::UnaryOp(UnOp::Not, inner)) => return Some(*inner),
1168            (UnOp::Neg, Value::UnaryOp(UnOp::Neg, inner)) => return Some(*inner),
1169            (UnOp::Not, Value::BinaryOp(BinOp::Eq, lhs, rhs)) => {
1170                Value::BinaryOp(BinOp::Ne, *lhs, *rhs)
1171            }
1172            (UnOp::Not, Value::BinaryOp(BinOp::Ne, lhs, rhs)) => {
1173                Value::BinaryOp(BinOp::Eq, *lhs, *rhs)
1174            }
1175            (UnOp::PtrMetadata, Value::Aggregate(AggregateTy::RawPtr { .. }, _, fields)) => {
1176                return Some(fields[1]);
1177            }
1178            // We have an unsizing cast, which assigns the length to wide pointer metadata.
1179            (
1180                UnOp::PtrMetadata,
1181                Value::Cast {
1182                    kind: CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _),
1183                    from,
1184                    to,
1185                    ..
1186                },
1187            ) if let ty::Slice(..) = to.builtin_deref(true).unwrap().kind()
1188                && let ty::Array(_, len) = from.builtin_deref(true).unwrap().kind() =>
1189            {
1190                return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1191            }
1192            _ => Value::UnaryOp(op, arg_index),
1193        };
1194        Some(self.insert(value))
1195    }
1196
1197    #[instrument(level = "trace", skip(self), ret)]
1198    fn simplify_binary(
1199        &mut self,
1200        op: BinOp,
1201        lhs_operand: &mut Operand<'tcx>,
1202        rhs_operand: &mut Operand<'tcx>,
1203        location: Location,
1204    ) -> Option<VnIndex> {
1205        let lhs = self.simplify_operand(lhs_operand, location);
1206        let rhs = self.simplify_operand(rhs_operand, location);
1207        // Only short-circuit options after we called `simplify_operand`
1208        // on both operands for side effect.
1209        let mut lhs = lhs?;
1210        let mut rhs = rhs?;
1211
1212        let lhs_ty = lhs_operand.ty(self.local_decls, self.tcx);
1213
1214        // If we're comparing pointers, remove `PtrToPtr` casts if the from
1215        // types of both casts and the metadata all match.
1216        if let BinOp::Eq | BinOp::Ne | BinOp::Lt | BinOp::Le | BinOp::Gt | BinOp::Ge = op
1217            && lhs_ty.is_any_ptr()
1218            && let Value::Cast {
1219                kind: CastKind::PtrToPtr, value: lhs_value, from: lhs_from, ..
1220            } = self.get(lhs)
1221            && let Value::Cast {
1222                kind: CastKind::PtrToPtr, value: rhs_value, from: rhs_from, ..
1223            } = self.get(rhs)
1224            && lhs_from == rhs_from
1225            && self.pointers_have_same_metadata(*lhs_from, lhs_ty)
1226        {
1227            lhs = *lhs_value;
1228            rhs = *rhs_value;
1229            if let Some(lhs_op) = self.try_as_operand(lhs, location)
1230                && let Some(rhs_op) = self.try_as_operand(rhs, location)
1231            {
1232                *lhs_operand = lhs_op;
1233                *rhs_operand = rhs_op;
1234            }
1235        }
1236
1237        if let Some(value) = self.simplify_binary_inner(op, lhs_ty, lhs, rhs) {
1238            return Some(value);
1239        }
1240        let value = Value::BinaryOp(op, lhs, rhs);
1241        Some(self.insert(value))
1242    }
1243
1244    fn simplify_binary_inner(
1245        &mut self,
1246        op: BinOp,
1247        lhs_ty: Ty<'tcx>,
1248        lhs: VnIndex,
1249        rhs: VnIndex,
1250    ) -> Option<VnIndex> {
1251        // Floats are weird enough that none of the logic below applies.
1252        let reasonable_ty =
1253            lhs_ty.is_integral() || lhs_ty.is_bool() || lhs_ty.is_char() || lhs_ty.is_any_ptr();
1254        if !reasonable_ty {
1255            return None;
1256        }
1257
1258        let layout = self.ecx.layout_of(lhs_ty).ok()?;
1259
1260        let as_bits = |value: VnIndex| {
1261            let constant = self.evaluated[value].as_ref()?;
1262            if layout.backend_repr.is_scalar() {
1263                let scalar = self.ecx.read_scalar(constant).discard_err()?;
1264                scalar.to_bits(constant.layout.size).discard_err()
1265            } else {
1266                // `constant` is a wide pointer. Do not evaluate to bits.
1267                None
1268            }
1269        };
1270
1271        // Represent the values as `Left(bits)` or `Right(VnIndex)`.
1272        use Either::{Left, Right};
1273        let a = as_bits(lhs).map_or(Right(lhs), Left);
1274        let b = as_bits(rhs).map_or(Right(rhs), Left);
1275
1276        let result = match (op, a, b) {
1277            // Neutral elements.
1278            (
1279                BinOp::Add
1280                | BinOp::AddWithOverflow
1281                | BinOp::AddUnchecked
1282                | BinOp::BitOr
1283                | BinOp::BitXor,
1284                Left(0),
1285                Right(p),
1286            )
1287            | (
1288                BinOp::Add
1289                | BinOp::AddWithOverflow
1290                | BinOp::AddUnchecked
1291                | BinOp::BitOr
1292                | BinOp::BitXor
1293                | BinOp::Sub
1294                | BinOp::SubWithOverflow
1295                | BinOp::SubUnchecked
1296                | BinOp::Offset
1297                | BinOp::Shl
1298                | BinOp::Shr,
1299                Right(p),
1300                Left(0),
1301            )
1302            | (BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked, Left(1), Right(p))
1303            | (
1304                BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::Div,
1305                Right(p),
1306                Left(1),
1307            ) => p,
1308            // Attempt to simplify `x & ALL_ONES` to `x`, with `ALL_ONES` depending on type size.
1309            (BinOp::BitAnd, Right(p), Left(ones)) | (BinOp::BitAnd, Left(ones), Right(p))
1310                if ones == layout.size.truncate(u128::MAX)
1311                    || (layout.ty.is_bool() && ones == 1) =>
1312            {
1313                p
1314            }
1315            // Absorbing elements.
1316            (
1317                BinOp::Mul | BinOp::MulWithOverflow | BinOp::MulUnchecked | BinOp::BitAnd,
1318                _,
1319                Left(0),
1320            )
1321            | (BinOp::Rem, _, Left(1))
1322            | (
1323                BinOp::Mul
1324                | BinOp::MulWithOverflow
1325                | BinOp::MulUnchecked
1326                | BinOp::Div
1327                | BinOp::Rem
1328                | BinOp::BitAnd
1329                | BinOp::Shl
1330                | BinOp::Shr,
1331                Left(0),
1332                _,
1333            ) => self.insert_scalar(Scalar::from_uint(0u128, layout.size), lhs_ty),
1334            // Attempt to simplify `x | ALL_ONES` to `ALL_ONES`.
1335            (BinOp::BitOr, _, Left(ones)) | (BinOp::BitOr, Left(ones), _)
1336                if ones == layout.size.truncate(u128::MAX)
1337                    || (layout.ty.is_bool() && ones == 1) =>
1338            {
1339                self.insert_scalar(Scalar::from_uint(ones, layout.size), lhs_ty)
1340            }
1341            // Sub/Xor with itself.
1342            (BinOp::Sub | BinOp::SubWithOverflow | BinOp::SubUnchecked | BinOp::BitXor, a, b)
1343                if a == b =>
1344            {
1345                self.insert_scalar(Scalar::from_uint(0u128, layout.size), lhs_ty)
1346            }
1347            // Comparison:
1348            // - if both operands can be computed as bits, just compare the bits;
1349            // - if we proved that both operands have the same value, we can insert true/false;
1350            // - otherwise, do nothing, as we do not try to prove inequality.
1351            (BinOp::Eq, Left(a), Left(b)) => self.insert_bool(a == b),
1352            (BinOp::Eq, a, b) if a == b => self.insert_bool(true),
1353            (BinOp::Ne, Left(a), Left(b)) => self.insert_bool(a != b),
1354            (BinOp::Ne, a, b) if a == b => self.insert_bool(false),
1355            _ => return None,
1356        };
1357
1358        if op.is_overflowing() {
1359            let false_val = self.insert_bool(false);
1360            Some(self.insert_tuple(vec![result, false_val]))
1361        } else {
1362            Some(result)
1363        }
1364    }
1365
1366    fn simplify_cast(
1367        &mut self,
1368        initial_kind: &mut CastKind,
1369        initial_operand: &mut Operand<'tcx>,
1370        to: Ty<'tcx>,
1371        location: Location,
1372    ) -> Option<VnIndex> {
1373        use CastKind::*;
1374        use rustc_middle::ty::adjustment::PointerCoercion::*;
1375
1376        let mut from = initial_operand.ty(self.local_decls, self.tcx);
1377        let mut kind = *initial_kind;
1378        let mut value = self.simplify_operand(initial_operand, location)?;
1379        if from == to {
1380            return Some(value);
1381        }
1382
1383        if let CastKind::PointerCoercion(ReifyFnPointer | ClosureFnPointer(_), _) = kind {
1384            // Each reification of a generic fn may get a different pointer.
1385            // Do not try to merge them.
1386            return Some(self.new_opaque());
1387        }
1388
1389        let mut was_ever_updated = false;
1390        loop {
1391            let mut was_updated_this_iteration = false;
1392
1393            // Transmuting between raw pointers is just a pointer cast so long as
1394            // they have the same metadata type (like `*const i32` <=> `*mut u64`
1395            // or `*mut [i32]` <=> `*const [u64]`), including the common special
1396            // case of `*const T` <=> `*mut T`.
1397            if let Transmute = kind
1398                && from.is_raw_ptr()
1399                && to.is_raw_ptr()
1400                && self.pointers_have_same_metadata(from, to)
1401            {
1402                kind = PtrToPtr;
1403                was_updated_this_iteration = true;
1404            }
1405
1406            // If a cast just casts away the metadata again, then we can get it by
1407            // casting the original thin pointer passed to `from_raw_parts`
1408            if let PtrToPtr = kind
1409                && let Value::Aggregate(AggregateTy::RawPtr { data_pointer_ty, .. }, _, fields) =
1410                    self.get(value)
1411                && let ty::RawPtr(to_pointee, _) = to.kind()
1412                && to_pointee.is_sized(self.tcx, self.typing_env())
1413            {
1414                from = *data_pointer_ty;
1415                value = fields[0];
1416                was_updated_this_iteration = true;
1417                if *data_pointer_ty == to {
1418                    return Some(fields[0]);
1419                }
1420            }
1421
1422            // Aggregate-then-Transmute can just transmute the original field value,
1423            // so long as the bytes of a value from only from a single field.
1424            if let Transmute = kind
1425                && let Value::Aggregate(_aggregate_ty, variant_idx, field_values) = self.get(value)
1426                && let Some((field_idx, field_ty)) =
1427                    self.value_is_all_in_one_field(from, *variant_idx)
1428            {
1429                from = field_ty;
1430                value = field_values[field_idx.as_usize()];
1431                was_updated_this_iteration = true;
1432                if field_ty == to {
1433                    return Some(value);
1434                }
1435            }
1436
1437            // Various cast-then-cast cases can be simplified.
1438            if let Value::Cast {
1439                kind: inner_kind,
1440                value: inner_value,
1441                from: inner_from,
1442                to: inner_to,
1443            } = *self.get(value)
1444            {
1445                let new_kind = match (inner_kind, kind) {
1446                    // Even if there's a narrowing cast in here that's fine, because
1447                    // things like `*mut [i32] -> *mut i32 -> *const i32` and
1448                    // `*mut [i32] -> *const [i32] -> *const i32` can skip the middle in MIR.
1449                    (PtrToPtr, PtrToPtr) => Some(PtrToPtr),
1450                    // PtrToPtr-then-Transmute is fine so long as the pointer cast is identity:
1451                    // `*const T -> *mut T -> NonNull<T>` is fine, but we need to check for narrowing
1452                    // to skip things like `*const [i32] -> *const i32 -> NonNull<T>`.
1453                    (PtrToPtr, Transmute)
1454                        if self.pointers_have_same_metadata(inner_from, inner_to) =>
1455                    {
1456                        Some(Transmute)
1457                    }
1458                    // Similarly, for Transmute-then-PtrToPtr. Note that we need to check different
1459                    // variables for their metadata, and thus this can't merge with the previous arm.
1460                    (Transmute, PtrToPtr) if self.pointers_have_same_metadata(from, to) => {
1461                        Some(Transmute)
1462                    }
1463                    // If would be legal to always do this, but we don't want to hide information
1464                    // from the backend that it'd otherwise be able to use for optimizations.
1465                    (Transmute, Transmute)
1466                        if !self.type_may_have_niche_of_interest_to_backend(inner_to) =>
1467                    {
1468                        Some(Transmute)
1469                    }
1470                    _ => None,
1471                };
1472                if let Some(new_kind) = new_kind {
1473                    kind = new_kind;
1474                    from = inner_from;
1475                    value = inner_value;
1476                    was_updated_this_iteration = true;
1477                    if inner_from == to {
1478                        return Some(inner_value);
1479                    }
1480                }
1481            }
1482
1483            if was_updated_this_iteration {
1484                was_ever_updated = true;
1485            } else {
1486                break;
1487            }
1488        }
1489
1490        if was_ever_updated && let Some(op) = self.try_as_operand(value, location) {
1491            *initial_operand = op;
1492            *initial_kind = kind;
1493        }
1494
1495        Some(self.insert(Value::Cast { kind, value, from, to }))
1496    }
1497
1498    fn simplify_len(&mut self, place: &mut Place<'tcx>, location: Location) -> Option<VnIndex> {
1499        // Trivial case: we are fetching a statically known length.
1500        let place_ty = place.ty(self.local_decls, self.tcx).ty;
1501        if let ty::Array(_, len) = place_ty.kind() {
1502            return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1503        }
1504
1505        let mut inner = self.simplify_place_value(place, location)?;
1506
1507        // The length information is stored in the wide pointer.
1508        // Reborrowing copies length information from one pointer to the other.
1509        while let Value::Address { place: borrowed, .. } = self.get(inner)
1510            && let [PlaceElem::Deref] = borrowed.projection[..]
1511            && let Some(borrowed) = self.locals[borrowed.local]
1512        {
1513            inner = borrowed;
1514        }
1515
1516        // We have an unsizing cast, which assigns the length to wide pointer metadata.
1517        if let Value::Cast { kind, from, to, .. } = self.get(inner)
1518            && let CastKind::PointerCoercion(ty::adjustment::PointerCoercion::Unsize, _) = kind
1519            && let Some(from) = from.builtin_deref(true)
1520            && let ty::Array(_, len) = from.kind()
1521            && let Some(to) = to.builtin_deref(true)
1522            && let ty::Slice(..) = to.kind()
1523        {
1524            return Some(self.insert_constant(Const::Ty(self.tcx.types.usize, *len)));
1525        }
1526
1527        // Fallback: a symbolic `Len`.
1528        Some(self.insert(Value::Len(inner)))
1529    }
1530
1531    fn pointers_have_same_metadata(&self, left_ptr_ty: Ty<'tcx>, right_ptr_ty: Ty<'tcx>) -> bool {
1532        let left_meta_ty = left_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1533        let right_meta_ty = right_ptr_ty.pointee_metadata_ty_or_projection(self.tcx);
1534        if left_meta_ty == right_meta_ty {
1535            true
1536        } else if let Ok(left) =
1537            self.tcx.try_normalize_erasing_regions(self.typing_env(), left_meta_ty)
1538            && let Ok(right) =
1539                self.tcx.try_normalize_erasing_regions(self.typing_env(), right_meta_ty)
1540        {
1541            left == right
1542        } else {
1543            false
1544        }
1545    }
1546
1547    /// Returns `false` if we know for sure that this type has no interesting niche,
1548    /// and thus we can skip transmuting through it without worrying.
1549    ///
1550    /// The backend will emit `assume`s when transmuting between types with niches,
1551    /// so we want to preserve `i32 -> char -> u32` so that that data is around,
1552    /// but it's fine to skip whole-range-is-value steps like `A -> u32 -> B`.
1553    fn type_may_have_niche_of_interest_to_backend(&self, ty: Ty<'tcx>) -> bool {
1554        let Ok(layout) = self.ecx.layout_of(ty) else {
1555            // If it's too generic or something, then assume it might be interesting later.
1556            return true;
1557        };
1558
1559        if layout.uninhabited {
1560            return true;
1561        }
1562
1563        match layout.backend_repr {
1564            BackendRepr::Scalar(a) => !a.is_always_valid(&self.ecx),
1565            BackendRepr::ScalarPair(a, b) => {
1566                !a.is_always_valid(&self.ecx) || !b.is_always_valid(&self.ecx)
1567            }
1568            BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => false,
1569        }
1570    }
1571
1572    fn value_is_all_in_one_field(
1573        &self,
1574        ty: Ty<'tcx>,
1575        variant: VariantIdx,
1576    ) -> Option<(FieldIdx, Ty<'tcx>)> {
1577        if let Ok(layout) = self.ecx.layout_of(ty)
1578            && let abi::Variants::Single { index } = layout.variants
1579            && index == variant
1580            && let Some((field_idx, field_layout)) = layout.non_1zst_field(&self.ecx)
1581            && layout.size == field_layout.size
1582        {
1583            // We needed to check the variant to avoid trying to read the tag
1584            // field from an enum where no fields have variants, since that tag
1585            // field isn't in the `Aggregate` from which we're getting values.
1586            Some((FieldIdx::from_usize(field_idx), field_layout.ty))
1587        } else if let ty::Adt(adt, args) = ty.kind()
1588            && adt.is_struct()
1589            && adt.repr().transparent()
1590            && let [single_field] = adt.non_enum_variant().fields.raw.as_slice()
1591        {
1592            Some((FieldIdx::ZERO, single_field.ty(self.tcx, args)))
1593        } else {
1594            None
1595        }
1596    }
1597}
1598
1599fn op_to_prop_const<'tcx>(
1600    ecx: &mut InterpCx<'tcx, DummyMachine>,
1601    op: &OpTy<'tcx>,
1602) -> Option<ConstValue<'tcx>> {
1603    // Do not attempt to propagate unsized locals.
1604    if op.layout.is_unsized() {
1605        return None;
1606    }
1607
1608    // This constant is a ZST, just return an empty value.
1609    if op.layout.is_zst() {
1610        return Some(ConstValue::ZeroSized);
1611    }
1612
1613    // Do not synthetize too large constants. Codegen will just memcpy them, which we'd like to
1614    // avoid.
1615    if !matches!(op.layout.backend_repr, BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)) {
1616        return None;
1617    }
1618
1619    // If this constant has scalar ABI, return it as a `ConstValue::Scalar`.
1620    if let BackendRepr::Scalar(abi::Scalar::Initialized { .. }) = op.layout.backend_repr
1621        && let Some(scalar) = ecx.read_scalar(op).discard_err()
1622    {
1623        if !scalar.try_to_scalar_int().is_ok() {
1624            // Check that we do not leak a pointer.
1625            // Those pointers may lose part of their identity in codegen.
1626            // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1627            return None;
1628        }
1629        return Some(ConstValue::Scalar(scalar));
1630    }
1631
1632    // If this constant is already represented as an `Allocation`,
1633    // try putting it into global memory to return it.
1634    if let Either::Left(mplace) = op.as_mplace_or_imm() {
1635        let (size, _align) = ecx.size_and_align_of_mplace(&mplace).discard_err()??;
1636
1637        // Do not try interning a value that contains provenance.
1638        // Due to https://github.com/rust-lang/rust/issues/79738, doing so could lead to bugs.
1639        // FIXME: remove this hack once that issue is fixed.
1640        let alloc_ref = ecx.get_ptr_alloc(mplace.ptr(), size).discard_err()??;
1641        if alloc_ref.has_provenance() {
1642            return None;
1643        }
1644
1645        let pointer = mplace.ptr().into_pointer_or_addr().ok()?;
1646        let (prov, offset) = pointer.into_parts();
1647        let alloc_id = prov.alloc_id();
1648        intern_const_alloc_for_constprop(ecx, alloc_id).discard_err()?;
1649
1650        // `alloc_id` may point to a static. Codegen will choke on an `Indirect` with anything
1651        // by `GlobalAlloc::Memory`, so do fall through to copying if needed.
1652        // FIXME: find a way to treat this more uniformly (probably by fixing codegen)
1653        if let GlobalAlloc::Memory(alloc) = ecx.tcx.global_alloc(alloc_id)
1654            // Transmuting a constant is just an offset in the allocation. If the alignment of the
1655            // allocation is not enough, fallback to copying into a properly aligned value.
1656            && alloc.inner().align >= op.layout.align.abi
1657        {
1658            return Some(ConstValue::Indirect { alloc_id, offset });
1659        }
1660    }
1661
1662    // Everything failed: create a new allocation to hold the data.
1663    let alloc_id =
1664        ecx.intern_with_temp_alloc(op.layout, |ecx, dest| ecx.copy_op(op, dest)).discard_err()?;
1665    let value = ConstValue::Indirect { alloc_id, offset: Size::ZERO };
1666
1667    // Check that we do not leak a pointer.
1668    // Those pointers may lose part of their identity in codegen.
1669    // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1670    if ecx.tcx.global_alloc(alloc_id).unwrap_memory().inner().provenance().ptrs().is_empty() {
1671        return Some(value);
1672    }
1673
1674    None
1675}
1676
1677impl<'tcx> VnState<'_, 'tcx> {
1678    /// If either [`Self::try_as_constant`] as [`Self::try_as_place`] succeeds,
1679    /// returns that result as an [`Operand`].
1680    fn try_as_operand(&mut self, index: VnIndex, location: Location) -> Option<Operand<'tcx>> {
1681        if let Some(const_) = self.try_as_constant(index) {
1682            Some(Operand::Constant(Box::new(const_)))
1683        } else if let Some(place) = self.try_as_place(index, location, false) {
1684            self.reused_locals.insert(place.local);
1685            Some(Operand::Copy(place))
1686        } else {
1687            None
1688        }
1689    }
1690
1691    /// If `index` is a `Value::Constant`, return the `Constant` to be put in the MIR.
1692    fn try_as_constant(&mut self, index: VnIndex) -> Option<ConstOperand<'tcx>> {
1693        // This was already constant in MIR, do not change it. If the constant is not
1694        // deterministic, adding an additional mention of it in MIR will not give the same value as
1695        // the former mention.
1696        if let Value::Constant { value, disambiguator: 0 } = *self.get(index) {
1697            debug_assert!(value.is_deterministic());
1698            return Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_: value });
1699        }
1700
1701        let op = self.evaluated[index].as_ref()?;
1702        if op.layout.is_unsized() {
1703            // Do not attempt to propagate unsized locals.
1704            return None;
1705        }
1706
1707        let value = op_to_prop_const(&mut self.ecx, op)?;
1708
1709        // Check that we do not leak a pointer.
1710        // Those pointers may lose part of their identity in codegen.
1711        // FIXME: remove this hack once https://github.com/rust-lang/rust/issues/79738 is fixed.
1712        assert!(!value.may_have_provenance(self.tcx, op.layout.size));
1713
1714        let const_ = Const::Val(value, op.layout.ty);
1715        Some(ConstOperand { span: DUMMY_SP, user_ty: None, const_ })
1716    }
1717
1718    /// Construct a place which holds the same value as `index` and for which all locals strictly
1719    /// dominate `loc`. If you used this place, add its base local to `reused_locals` to remove
1720    /// storage statements.
1721    #[instrument(level = "trace", skip(self), ret)]
1722    fn try_as_place(
1723        &mut self,
1724        mut index: VnIndex,
1725        loc: Location,
1726        allow_complex_projection: bool,
1727    ) -> Option<Place<'tcx>> {
1728        let mut projection = SmallVec::<[PlaceElem<'tcx>; 1]>::new();
1729        loop {
1730            if let Some(local) = self.try_as_local(index, loc) {
1731                projection.reverse();
1732                let place =
1733                    Place { local, projection: self.tcx.mk_place_elems(projection.as_slice()) };
1734                return Some(place);
1735            } else if let Value::Projection(pointer, proj) = *self.get(index)
1736                && (allow_complex_projection || proj.is_stable_offset())
1737                && let Some(proj) = self.try_as_place_elem(proj, loc)
1738            {
1739                projection.push(proj);
1740                index = pointer;
1741            } else {
1742                return None;
1743            }
1744        }
1745    }
1746
1747    /// If there is a local which is assigned `index`, and its assignment strictly dominates `loc`,
1748    /// return it. If you used this local, add it to `reused_locals` to remove storage statements.
1749    fn try_as_local(&mut self, index: VnIndex, loc: Location) -> Option<Local> {
1750        let other = self.rev_locals.get(index)?;
1751        other
1752            .iter()
1753            .find(|&&other| self.ssa.assignment_dominates(&self.dominators, other, loc))
1754            .copied()
1755    }
1756}
1757
1758impl<'tcx> MutVisitor<'tcx> for VnState<'_, 'tcx> {
1759    fn tcx(&self) -> TyCtxt<'tcx> {
1760        self.tcx
1761    }
1762
1763    fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
1764        self.simplify_place_projection(place, location);
1765        if context.is_mutating_use() && !place.projection.is_empty() {
1766            // Non-local mutation maybe invalidate deref.
1767            self.invalidate_derefs();
1768        }
1769        self.super_place(place, context, location);
1770    }
1771
1772    fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) {
1773        self.simplify_operand(operand, location);
1774        self.super_operand(operand, location);
1775    }
1776
1777    fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, location: Location) {
1778        if let StatementKind::Assign(box (ref mut lhs, ref mut rvalue)) = stmt.kind {
1779            self.simplify_place_projection(lhs, location);
1780
1781            let value = self.simplify_rvalue(lhs, rvalue, location);
1782            let value = if let Some(local) = lhs.as_local()
1783                && self.ssa.is_ssa(local)
1784                // FIXME(#112651) `rvalue` may have a subtype to `local`. We can only mark
1785                // `local` as reusable if we have an exact type match.
1786                && self.local_decls[local].ty == rvalue.ty(self.local_decls, self.tcx)
1787            {
1788                let value = value.unwrap_or_else(|| self.new_opaque());
1789                self.assign(local, value);
1790                Some(value)
1791            } else {
1792                value
1793            };
1794            if let Some(value) = value {
1795                if let Some(const_) = self.try_as_constant(value) {
1796                    *rvalue = Rvalue::Use(Operand::Constant(Box::new(const_)));
1797                } else if let Some(place) = self.try_as_place(value, location, false)
1798                    && *rvalue != Rvalue::Use(Operand::Move(place))
1799                    && *rvalue != Rvalue::Use(Operand::Copy(place))
1800                {
1801                    *rvalue = Rvalue::Use(Operand::Copy(place));
1802                    self.reused_locals.insert(place.local);
1803                }
1804            }
1805        }
1806        self.super_statement(stmt, location);
1807    }
1808
1809    fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, location: Location) {
1810        if let Terminator { kind: TerminatorKind::Call { destination, .. }, .. } = terminator {
1811            if let Some(local) = destination.as_local()
1812                && self.ssa.is_ssa(local)
1813            {
1814                let opaque = self.new_opaque();
1815                self.assign(local, opaque);
1816            }
1817        }
1818        // Function calls and ASM may invalidate (nested) derefs. We must handle them carefully.
1819        // Currently, only preserving derefs for trivial terminators like SwitchInt and Goto.
1820        let safe_to_preserve_derefs = matches!(
1821            terminator.kind,
1822            TerminatorKind::SwitchInt { .. } | TerminatorKind::Goto { .. }
1823        );
1824        if !safe_to_preserve_derefs {
1825            self.invalidate_derefs();
1826        }
1827        self.super_terminator(terminator, location);
1828    }
1829}
1830
1831struct StorageRemover<'tcx> {
1832    tcx: TyCtxt<'tcx>,
1833    reused_locals: DenseBitSet<Local>,
1834}
1835
1836impl<'tcx> MutVisitor<'tcx> for StorageRemover<'tcx> {
1837    fn tcx(&self) -> TyCtxt<'tcx> {
1838        self.tcx
1839    }
1840
1841    fn visit_operand(&mut self, operand: &mut Operand<'tcx>, _: Location) {
1842        if let Operand::Move(place) = *operand
1843            && !place.is_indirect_first_projection()
1844            && self.reused_locals.contains(place.local)
1845        {
1846            *operand = Operand::Copy(place);
1847        }
1848    }
1849
1850    fn visit_statement(&mut self, stmt: &mut Statement<'tcx>, loc: Location) {
1851        match stmt.kind {
1852            // When removing storage statements, we need to remove both (#107511).
1853            StatementKind::StorageLive(l) | StatementKind::StorageDead(l)
1854                if self.reused_locals.contains(l) =>
1855            {
1856                stmt.make_nop()
1857            }
1858            _ => self.super_statement(stmt, loc),
1859        }
1860    }
1861}