rustc_const_eval/interpret/
validity.rs

1//! Check the validity invariant of a given value, and tell the user
2//! where in the value it got violated.
3//! In const context, this goes even further and tries to approximate const safety.
4//! That's useful because it means other passes (e.g. promotion) can rely on `const`s
5//! to be const-safe.
6
7use std::borrow::Cow;
8use std::fmt::Write;
9use std::hash::Hash;
10use std::num::NonZero;
11
12use either::{Left, Right};
13use hir::def::DefKind;
14use rustc_abi::{
15    BackendRepr, FieldIdx, FieldsShape, Scalar as ScalarAbi, Size, VariantIdx, Variants,
16    WrappingRange,
17};
18use rustc_ast::Mutability;
19use rustc_data_structures::fx::FxHashSet;
20use rustc_hir as hir;
21use rustc_middle::bug;
22use rustc_middle::mir::interpret::ValidationErrorKind::{self, *};
23use rustc_middle::mir::interpret::{
24    ExpectedKind, InterpErrorKind, InvalidMetaKind, Misalignment, PointerKind, Provenance,
25    UnsupportedOpInfo, ValidationErrorInfo, alloc_range, interp_ok,
26};
27use rustc_middle::ty::layout::{LayoutCx, TyAndLayout};
28use rustc_middle::ty::{self, Ty};
29use rustc_span::{Symbol, sym};
30use tracing::trace;
31
32use super::machine::AllocMap;
33use super::{
34    AllocId, CheckInAllocMsg, GlobalAlloc, ImmTy, Immediate, InterpCx, InterpResult, MPlaceTy,
35    Machine, MemPlaceMeta, PlaceTy, Pointer, Projectable, Scalar, ValueVisitor, err_ub,
36    format_interp_error,
37};
38use crate::enter_trace_span;
39
40// for the validation errors
41#[rustfmt::skip]
42use super::InterpErrorKind::UndefinedBehavior as Ub;
43use super::InterpErrorKind::Unsupported as Unsup;
44use super::UndefinedBehaviorInfo::*;
45use super::UnsupportedOpInfo::*;
46
47macro_rules! err_validation_failure {
48    ($where:expr, $kind: expr) => {{
49        let where_ = &$where;
50        let path = if !where_.is_empty() {
51            let mut path = String::new();
52            write_path(&mut path, where_);
53            Some(path)
54        } else {
55            None
56        };
57
58        err_ub!(ValidationError(ValidationErrorInfo { path, kind: $kind }))
59    }};
60}
61
62macro_rules! throw_validation_failure {
63    ($where:expr, $kind: expr) => {
64        do yeet err_validation_failure!($where, $kind)
65    };
66}
67
68/// If $e throws an error matching the pattern, throw a validation failure.
69/// Other errors are passed back to the caller, unchanged -- and if they reach the root of
70/// the visitor, we make sure only validation errors and `InvalidProgram` errors are left.
71/// This lets you use the patterns as a kind of validation list, asserting which errors
72/// can possibly happen:
73///
74/// ```ignore(illustrative)
75/// let v = try_validation!(some_fn(), some_path, {
76///     Foo | Bar | Baz => { "some failure" },
77/// });
78/// ```
79///
80/// The patterns must be of type `UndefinedBehaviorInfo`.
81/// An additional expected parameter can also be added to the failure message:
82///
83/// ```ignore(illustrative)
84/// let v = try_validation!(some_fn(), some_path, {
85///     Foo | Bar | Baz => { "some failure" } expected { "something that wasn't a failure" },
86/// });
87/// ```
88///
89/// An additional nicety is that both parameters actually take format args, so you can just write
90/// the format string in directly:
91///
92/// ```ignore(illustrative)
93/// let v = try_validation!(some_fn(), some_path, {
94///     Foo | Bar | Baz => { "{:?}", some_failure } expected { "{}", expected_value },
95/// });
96/// ```
97///
98macro_rules! try_validation {
99    ($e:expr, $where:expr,
100    $( $( $p:pat_param )|+ => $kind: expr ),+ $(,)?
101    ) => {{
102        $e.map_err_kind(|e| {
103            // We catch the error and turn it into a validation failure. We are okay with
104            // allocation here as this can only slow down builds that fail anyway.
105            match e {
106                $(
107                    $($p)|+ => {
108                        err_validation_failure!(
109                            $where,
110                            $kind
111                        )
112                    }
113                ),+,
114                e => e,
115            }
116        })?
117    }};
118}
119
120/// We want to show a nice path to the invalid field for diagnostics,
121/// but avoid string operations in the happy case where no error happens.
122/// So we track a `Vec<PathElem>` where `PathElem` contains all the data we
123/// need to later print something for the user.
124#[derive(Copy, Clone, Debug)]
125pub enum PathElem {
126    Field(Symbol),
127    Variant(Symbol),
128    CoroutineState(VariantIdx),
129    CapturedVar(Symbol),
130    ArrayElem(usize),
131    TupleElem(usize),
132    Deref,
133    EnumTag,
134    CoroutineTag,
135    DynDowncast,
136    Vtable,
137}
138
139/// Extra things to check for during validation of CTFE results.
140#[derive(Copy, Clone)]
141pub enum CtfeValidationMode {
142    /// Validation of a `static`
143    Static { mutbl: Mutability },
144    /// Validation of a promoted.
145    Promoted,
146    /// Validation of a `const`.
147    /// `allow_immutable_unsafe_cell` says whether we allow `UnsafeCell` in immutable memory (which is the
148    /// case for the top-level allocation of a `const`, where this is fine because the allocation will be
149    /// copied at each use site).
150    Const { allow_immutable_unsafe_cell: bool },
151}
152
153impl CtfeValidationMode {
154    fn allow_immutable_unsafe_cell(self) -> bool {
155        match self {
156            CtfeValidationMode::Static { .. } => false,
157            CtfeValidationMode::Promoted { .. } => false,
158            CtfeValidationMode::Const { allow_immutable_unsafe_cell, .. } => {
159                allow_immutable_unsafe_cell
160            }
161        }
162    }
163}
164
165/// State for tracking recursive validation of references
166pub struct RefTracking<T, PATH = ()> {
167    seen: FxHashSet<T>,
168    todo: Vec<(T, PATH)>,
169}
170
171impl<T: Clone + Eq + Hash + std::fmt::Debug, PATH: Default> RefTracking<T, PATH> {
172    pub fn empty() -> Self {
173        RefTracking { seen: FxHashSet::default(), todo: vec![] }
174    }
175    pub fn new(val: T) -> Self {
176        let mut ref_tracking_for_consts =
177            RefTracking { seen: FxHashSet::default(), todo: vec![(val.clone(), PATH::default())] };
178        ref_tracking_for_consts.seen.insert(val);
179        ref_tracking_for_consts
180    }
181    pub fn next(&mut self) -> Option<(T, PATH)> {
182        self.todo.pop()
183    }
184
185    fn track(&mut self, val: T, path: impl FnOnce() -> PATH) {
186        if self.seen.insert(val.clone()) {
187            trace!("Recursing below ptr {:#?}", val);
188            let path = path();
189            // Remember to come back to this later.
190            self.todo.push((val, path));
191        }
192    }
193}
194
195// FIXME make this translatable as well?
196/// Format a path
197fn write_path(out: &mut String, path: &[PathElem]) {
198    use self::PathElem::*;
199
200    for elem in path.iter() {
201        match elem {
202            Field(name) => write!(out, ".{name}"),
203            EnumTag => write!(out, ".<enum-tag>"),
204            Variant(name) => write!(out, ".<enum-variant({name})>"),
205            CoroutineTag => write!(out, ".<coroutine-tag>"),
206            CoroutineState(idx) => write!(out, ".<coroutine-state({})>", idx.index()),
207            CapturedVar(name) => write!(out, ".<captured-var({name})>"),
208            TupleElem(idx) => write!(out, ".{idx}"),
209            ArrayElem(idx) => write!(out, "[{idx}]"),
210            // `.<deref>` does not match Rust syntax, but it is more readable for long paths -- and
211            // some of the other items here also are not Rust syntax. Actually we can't
212            // even use the usual syntax because we are just showing the projections,
213            // not the root.
214            Deref => write!(out, ".<deref>"),
215            DynDowncast => write!(out, ".<dyn-downcast>"),
216            Vtable => write!(out, ".<vtable>"),
217        }
218        .unwrap()
219    }
220}
221
222/// Represents a set of `Size` values as a sorted list of ranges.
223// These are (offset, length) pairs, and they are sorted and mutually disjoint,
224// and never adjacent (i.e. there's always a gap between two of them).
225#[derive(Debug, Clone)]
226pub struct RangeSet(Vec<(Size, Size)>);
227
228impl RangeSet {
229    fn add_range(&mut self, offset: Size, size: Size) {
230        if size.bytes() == 0 {
231            // No need to track empty ranges.
232            return;
233        }
234        let v = &mut self.0;
235        // We scan for a partition point where the left partition is all the elements that end
236        // strictly before we start. Those are elements that are too "low" to merge with us.
237        let idx =
238            v.partition_point(|&(other_offset, other_size)| other_offset + other_size < offset);
239        // Now we want to either merge with the first element of the second partition, or insert ourselves before that.
240        if let Some(&(other_offset, other_size)) = v.get(idx)
241            && offset + size >= other_offset
242        {
243            // Their end is >= our start (otherwise it would not be in the 2nd partition) and
244            // our end is >= their start. This means we can merge the ranges.
245            let new_start = other_offset.min(offset);
246            let mut new_end = (other_offset + other_size).max(offset + size);
247            // We grew to the right, so merge with overlapping/adjacent elements.
248            // (We also may have grown to the left, but that can never make us adjacent with
249            // anything there since we selected the first such candidate via `partition_point`.)
250            let mut scan_right = 1;
251            while let Some(&(next_offset, next_size)) = v.get(idx + scan_right)
252                && new_end >= next_offset
253            {
254                // Increase our size to absorb the next element.
255                new_end = new_end.max(next_offset + next_size);
256                // Look at the next element.
257                scan_right += 1;
258            }
259            // Update the element we grew.
260            v[idx] = (new_start, new_end - new_start);
261            // Remove the elements we absorbed (if any).
262            if scan_right > 1 {
263                drop(v.drain((idx + 1)..(idx + scan_right)));
264            }
265        } else {
266            // Insert new element.
267            v.insert(idx, (offset, size));
268        }
269    }
270}
271
272struct ValidityVisitor<'rt, 'tcx, M: Machine<'tcx>> {
273    /// The `path` may be pushed to, but the part that is present when a function
274    /// starts must not be changed!  `visit_fields` and `visit_array` rely on
275    /// this stack discipline.
276    path: Vec<PathElem>,
277    ref_tracking: Option<&'rt mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Vec<PathElem>>>,
278    /// `None` indicates this is not validating for CTFE (but for runtime).
279    ctfe_mode: Option<CtfeValidationMode>,
280    ecx: &'rt mut InterpCx<'tcx, M>,
281    /// Whether provenance should be reset outside of pointers (emulating the effect of a typed
282    /// copy).
283    reset_provenance_and_padding: bool,
284    /// This tracks which byte ranges in this value contain data; the remaining bytes are padding.
285    /// The ideal representation here would be pointer-length pairs, but to keep things more compact
286    /// we only store a (range) set of offsets -- the base pointer is the same throughout the entire
287    /// visit, after all.
288    /// If this is `Some`, then `reset_provenance_and_padding` must be true (but not vice versa:
289    /// we might not track data vs padding bytes if the operand isn't stored in memory anyway).
290    data_bytes: Option<RangeSet>,
291}
292
293impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
294    fn aggregate_field_path_elem(&mut self, layout: TyAndLayout<'tcx>, field: usize) -> PathElem {
295        // First, check if we are projecting to a variant.
296        match layout.variants {
297            Variants::Multiple { tag_field, .. } => {
298                if tag_field.as_usize() == field {
299                    return match layout.ty.kind() {
300                        ty::Adt(def, ..) if def.is_enum() => PathElem::EnumTag,
301                        ty::Coroutine(..) => PathElem::CoroutineTag,
302                        _ => bug!("non-variant type {:?}", layout.ty),
303                    };
304                }
305            }
306            Variants::Single { .. } | Variants::Empty => {}
307        }
308
309        // Now we know we are projecting to a field, so figure out which one.
310        match layout.ty.kind() {
311            // coroutines, closures, and coroutine-closures all have upvars that may be named.
312            ty::Closure(def_id, _) | ty::Coroutine(def_id, _) | ty::CoroutineClosure(def_id, _) => {
313                let mut name = None;
314                // FIXME this should be more descriptive i.e. CapturePlace instead of CapturedVar
315                // https://github.com/rust-lang/project-rfc-2229/issues/46
316                if let Some(local_def_id) = def_id.as_local() {
317                    let captures = self.ecx.tcx.closure_captures(local_def_id);
318                    if let Some(captured_place) = captures.get(field) {
319                        // Sometimes the index is beyond the number of upvars (seen
320                        // for a coroutine).
321                        let var_hir_id = captured_place.get_root_variable();
322                        let node = self.ecx.tcx.hir_node(var_hir_id);
323                        if let hir::Node::Pat(pat) = node
324                            && let hir::PatKind::Binding(_, _, ident, _) = pat.kind
325                        {
326                            name = Some(ident.name);
327                        }
328                    }
329                }
330
331                PathElem::CapturedVar(name.unwrap_or_else(|| {
332                    // Fall back to showing the field index.
333                    sym::integer(field)
334                }))
335            }
336
337            // tuples
338            ty::Tuple(_) => PathElem::TupleElem(field),
339
340            // enums
341            ty::Adt(def, ..) if def.is_enum() => {
342                // we might be projecting *to* a variant, or to a field *in* a variant.
343                match layout.variants {
344                    Variants::Single { index } => {
345                        // Inside a variant
346                        PathElem::Field(def.variant(index).fields[FieldIdx::from_usize(field)].name)
347                    }
348                    Variants::Empty => panic!("there is no field in Variants::Empty types"),
349                    Variants::Multiple { .. } => bug!("we handled variants above"),
350                }
351            }
352
353            // other ADTs
354            ty::Adt(def, _) => {
355                PathElem::Field(def.non_enum_variant().fields[FieldIdx::from_usize(field)].name)
356            }
357
358            // arrays/slices
359            ty::Array(..) | ty::Slice(..) => PathElem::ArrayElem(field),
360
361            // dyn traits
362            ty::Dynamic(..) => {
363                assert_eq!(field, 0);
364                PathElem::DynDowncast
365            }
366
367            // nothing else has an aggregate layout
368            _ => bug!("aggregate_field_path_elem: got non-aggregate type {:?}", layout.ty),
369        }
370    }
371
372    fn with_elem<R>(
373        &mut self,
374        elem: PathElem,
375        f: impl FnOnce(&mut Self) -> InterpResult<'tcx, R>,
376    ) -> InterpResult<'tcx, R> {
377        // Remember the old state
378        let path_len = self.path.len();
379        // Record new element
380        self.path.push(elem);
381        // Perform operation
382        let r = f(self)?;
383        // Undo changes
384        self.path.truncate(path_len);
385        // Done
386        interp_ok(r)
387    }
388
389    fn read_immediate(
390        &self,
391        val: &PlaceTy<'tcx, M::Provenance>,
392        expected: ExpectedKind,
393    ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
394        interp_ok(try_validation!(
395            self.ecx.read_immediate(val),
396            self.path,
397            Ub(InvalidUninitBytes(_)) =>
398                Uninit { expected },
399            // The `Unsup` cases can only occur during CTFE
400            Unsup(ReadPointerAsInt(_)) =>
401                PointerAsInt { expected },
402            Unsup(ReadPartialPointer(_)) =>
403                PartialPointer,
404        ))
405    }
406
407    fn read_scalar(
408        &self,
409        val: &PlaceTy<'tcx, M::Provenance>,
410        expected: ExpectedKind,
411    ) -> InterpResult<'tcx, Scalar<M::Provenance>> {
412        interp_ok(self.read_immediate(val, expected)?.to_scalar())
413    }
414
415    fn deref_pointer(
416        &mut self,
417        val: &PlaceTy<'tcx, M::Provenance>,
418        expected: ExpectedKind,
419    ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::Provenance>> {
420        // Not using `ecx.deref_pointer` since we want to use our `read_immediate` wrapper.
421        let imm = self.read_immediate(val, expected)?;
422        // Reset provenance: ensure slice tail metadata does not preserve provenance,
423        // and ensure all pointers do not preserve partial provenance.
424        if self.reset_provenance_and_padding {
425            if matches!(imm.layout.backend_repr, BackendRepr::Scalar(..)) {
426                // A thin pointer. If it has provenance, we don't have to do anything.
427                // If it does not, ensure we clear the provenance in memory.
428                if matches!(imm.to_scalar(), Scalar::Int(..)) {
429                    self.ecx.clear_provenance(val)?;
430                }
431            } else {
432                // A wide pointer. This means we have to worry both about the pointer itself and the
433                // metadata. We do the lazy thing and just write back the value we got. Just
434                // clearing provenance in a targeted manner would be more efficient, but unless this
435                // is a perf hotspot it's just not worth the effort.
436                self.ecx.write_immediate_no_validate(*imm, val)?;
437            }
438            // The entire thing is data, not padding.
439            self.add_data_range_place(val);
440        }
441        // Now turn it into a place.
442        self.ecx.ref_to_mplace(&imm)
443    }
444
445    fn check_wide_ptr_meta(
446        &mut self,
447        meta: MemPlaceMeta<M::Provenance>,
448        pointee: TyAndLayout<'tcx>,
449    ) -> InterpResult<'tcx> {
450        let tail = self.ecx.tcx.struct_tail_for_codegen(pointee.ty, self.ecx.typing_env);
451        match tail.kind() {
452            ty::Dynamic(data, _, ty::Dyn) => {
453                let vtable = meta.unwrap_meta().to_pointer(self.ecx)?;
454                // Make sure it is a genuine vtable pointer for the right trait.
455                try_validation!(
456                    self.ecx.get_ptr_vtable_ty(vtable, Some(data)),
457                    self.path,
458                    Ub(DanglingIntPointer{ .. } | InvalidVTablePointer(..)) =>
459                        InvalidVTablePtr { value: format!("{vtable}") },
460                    Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type }) => {
461                        InvalidMetaWrongTrait { vtable_dyn_type, expected_dyn_type }
462                    },
463                );
464            }
465            ty::Slice(..) | ty::Str => {
466                let _len = meta.unwrap_meta().to_target_usize(self.ecx)?;
467                // We do not check that `len * elem_size <= isize::MAX`:
468                // that is only required for references, and there it falls out of the
469                // "dereferenceable" check performed by Stacked Borrows.
470            }
471            ty::Foreign(..) => {
472                // Unsized, but not wide.
473            }
474            _ => bug!("Unexpected unsized type tail: {:?}", tail),
475        }
476
477        interp_ok(())
478    }
479
480    /// Check a reference or `Box`.
481    fn check_safe_pointer(
482        &mut self,
483        value: &PlaceTy<'tcx, M::Provenance>,
484        ptr_kind: PointerKind,
485    ) -> InterpResult<'tcx> {
486        let place = self.deref_pointer(value, ptr_kind.into())?;
487        // Handle wide pointers.
488        // Check metadata early, for better diagnostics
489        if place.layout.is_unsized() {
490            self.check_wide_ptr_meta(place.meta(), place.layout)?;
491        }
492        // Make sure this is dereferenceable and all.
493        let size_and_align = try_validation!(
494            self.ecx.size_and_align_of_val(&place),
495            self.path,
496            Ub(InvalidMeta(msg)) => match msg {
497                InvalidMetaKind::SliceTooBig => InvalidMetaSliceTooLarge { ptr_kind },
498                InvalidMetaKind::TooBig => InvalidMetaTooLarge { ptr_kind },
499            }
500        );
501        let (size, align) = size_and_align
502            // for the purpose of validity, consider foreign types to have
503            // alignment and size determined by the layout (size will be 0,
504            // alignment should take attributes into account).
505            .unwrap_or_else(|| (place.layout.size, place.layout.align.abi));
506        // Direct call to `check_ptr_access_align` checks alignment even on CTFE machines.
507        try_validation!(
508            self.ecx.check_ptr_access(
509                place.ptr(),
510                size,
511                CheckInAllocMsg::Dereferenceable, // will anyway be replaced by validity message
512            ),
513            self.path,
514            Ub(DanglingIntPointer { addr: 0, .. }) => NullPtr { ptr_kind },
515            Ub(DanglingIntPointer { addr: i, .. }) => DanglingPtrNoProvenance {
516                ptr_kind,
517                // FIXME this says "null pointer" when null but we need translate
518                pointer: format!("{}", Pointer::<Option<AllocId>>::without_provenance(i))
519            },
520            Ub(PointerOutOfBounds { .. }) => DanglingPtrOutOfBounds {
521                ptr_kind
522            },
523            Ub(PointerUseAfterFree(..)) => DanglingPtrUseAfterFree {
524                ptr_kind,
525            },
526        );
527        try_validation!(
528            self.ecx.check_ptr_align(
529                place.ptr(),
530                align,
531            ),
532            self.path,
533            Ub(AlignmentCheckFailed(Misalignment { required, has }, _msg)) => UnalignedPtr {
534                ptr_kind,
535                required_bytes: required.bytes(),
536                found_bytes: has.bytes()
537            },
538        );
539        // Make sure this is non-null. We checked dereferenceability above, but if `size` is zero
540        // that does not imply non-null.
541        if self.ecx.scalar_may_be_null(Scalar::from_maybe_pointer(place.ptr(), self.ecx))? {
542            throw_validation_failure!(self.path, NullPtr { ptr_kind })
543        }
544        // Do not allow references to uninhabited types.
545        if place.layout.is_uninhabited() {
546            let ty = place.layout.ty;
547            throw_validation_failure!(self.path, PtrToUninhabited { ptr_kind, ty })
548        }
549        // Recursive checking
550        if let Some(ref_tracking) = self.ref_tracking.as_deref_mut() {
551            // Proceed recursively even for ZST, no reason to skip them!
552            // `!` is a ZST and we want to validate it.
553            if let Some(ctfe_mode) = self.ctfe_mode {
554                let mut skip_recursive_check = false;
555                // CTFE imposes restrictions on what references can point to.
556                if let Ok((alloc_id, _offset, _prov)) =
557                    self.ecx.ptr_try_get_alloc_id(place.ptr(), 0)
558                {
559                    // Everything should be already interned.
560                    let Some(global_alloc) = self.ecx.tcx.try_get_global_alloc(alloc_id) else {
561                        if self.ecx.memory.alloc_map.contains_key(&alloc_id) {
562                            // This can happen when interning didn't complete due to, e.g.
563                            // missing `make_global`. This must mean other errors are already
564                            // being reported.
565                            self.ecx.tcx.dcx().delayed_bug(
566                                "interning did not complete, there should be an error",
567                            );
568                            return interp_ok(());
569                        }
570                        // We can't have *any* references to non-existing allocations in const-eval
571                        // as the rest of rustc isn't happy with them... so we throw an error, even
572                        // though for zero-sized references this isn't really UB.
573                        // A potential future alternative would be to resurrect this as a zero-sized allocation
574                        // (which codegen will then compile to an aligned dummy pointer anyway).
575                        throw_validation_failure!(self.path, DanglingPtrUseAfterFree { ptr_kind });
576                    };
577                    let (size, _align) =
578                        global_alloc.size_and_align(*self.ecx.tcx, self.ecx.typing_env);
579                    let alloc_actual_mutbl =
580                        global_alloc.mutability(*self.ecx.tcx, self.ecx.typing_env);
581
582                    match global_alloc {
583                        GlobalAlloc::Static(did) => {
584                            let DefKind::Static { nested, .. } = self.ecx.tcx.def_kind(did) else {
585                                bug!()
586                            };
587                            assert!(!self.ecx.tcx.is_thread_local_static(did));
588                            assert!(self.ecx.tcx.is_static(did));
589                            match ctfe_mode {
590                                CtfeValidationMode::Static { .. }
591                                | CtfeValidationMode::Promoted { .. } => {
592                                    // We skip recursively checking other statics. These statics must be sound by
593                                    // themselves, and the only way to get broken statics here is by using
594                                    // unsafe code.
595                                    // The reasons we don't check other statics is twofold. For one, in all
596                                    // sound cases, the static was already validated on its own, and second, we
597                                    // trigger cycle errors if we try to compute the value of the other static
598                                    // and that static refers back to us (potentially through a promoted).
599                                    // This could miss some UB, but that's fine.
600                                    // We still walk nested allocations, as they are fundamentally part of this validation run.
601                                    // This means we will also recurse into nested statics of *other*
602                                    // statics, even though we do not recurse into other statics directly.
603                                    // That's somewhat inconsistent but harmless.
604                                    skip_recursive_check = !nested;
605                                }
606                                CtfeValidationMode::Const { .. } => {
607                                    // If this is mutable memory or an `extern static`, there's no point in checking it -- we'd
608                                    // just get errors trying to read the value.
609                                    if alloc_actual_mutbl.is_mut()
610                                        || self.ecx.tcx.is_foreign_item(did)
611                                    {
612                                        skip_recursive_check = true;
613                                    }
614                                }
615                            }
616                        }
617                        _ => (),
618                    }
619
620                    // If this allocation has size zero, there is no actual mutability here.
621                    if size != Size::ZERO {
622                        // Determine whether this pointer expects to be pointing to something mutable.
623                        let ptr_expected_mutbl = match ptr_kind {
624                            PointerKind::Box => Mutability::Mut,
625                            PointerKind::Ref(mutbl) => {
626                                // We do not take into account interior mutability here since we cannot know if
627                                // there really is an `UnsafeCell` inside `Option<UnsafeCell>` -- so we check
628                                // that in the recursive descent behind this reference (controlled by
629                                // `allow_immutable_unsafe_cell`).
630                                mutbl
631                            }
632                        };
633                        // Mutable pointer to immutable memory is no good.
634                        if ptr_expected_mutbl == Mutability::Mut
635                            && alloc_actual_mutbl == Mutability::Not
636                        {
637                            // This can actually occur with transmutes.
638                            throw_validation_failure!(self.path, MutableRefToImmutable);
639                        }
640                        // In a const, any kind of mutable reference is not good.
641                        if matches!(self.ctfe_mode, Some(CtfeValidationMode::Const { .. })) {
642                            if ptr_expected_mutbl == Mutability::Mut {
643                                throw_validation_failure!(self.path, MutableRefInConst);
644                            }
645                        }
646                    }
647                }
648                // Potentially skip recursive check.
649                if skip_recursive_check {
650                    return interp_ok(());
651                }
652            } else {
653                // This is not CTFE, so it's Miri with recursive checking.
654                // FIXME: we do *not* check behind boxes, since creating a new box first creates it uninitialized
655                // and then puts the value in there, so briefly we have a box with uninit contents.
656                // FIXME: should we also skip `UnsafeCell` behind shared references? Currently that is not
657                // needed since validation reads bypass Stacked Borrows and data race checks.
658                if matches!(ptr_kind, PointerKind::Box) {
659                    return interp_ok(());
660                }
661            }
662            let path = &self.path;
663            ref_tracking.track(place, || {
664                // We need to clone the path anyway, make sure it gets created
665                // with enough space for the additional `Deref`.
666                let mut new_path = Vec::with_capacity(path.len() + 1);
667                new_path.extend(path);
668                new_path.push(PathElem::Deref);
669                new_path
670            });
671        }
672        interp_ok(())
673    }
674
675    /// Check if this is a value of primitive type, and if yes check the validity of the value
676    /// at that type. Return `true` if the type is indeed primitive.
677    ///
678    /// Note that not all of these have `FieldsShape::Primitive`, e.g. wide references.
679    fn try_visit_primitive(
680        &mut self,
681        value: &PlaceTy<'tcx, M::Provenance>,
682    ) -> InterpResult<'tcx, bool> {
683        // Go over all the primitive types
684        let ty = value.layout.ty;
685        match ty.kind() {
686            ty::Bool => {
687                let scalar = self.read_scalar(value, ExpectedKind::Bool)?;
688                try_validation!(
689                    scalar.to_bool(),
690                    self.path,
691                    Ub(InvalidBool(..)) => ValidationErrorKind::InvalidBool {
692                        value: format!("{scalar:x}"),
693                    }
694                );
695                if self.reset_provenance_and_padding {
696                    self.ecx.clear_provenance(value)?;
697                    self.add_data_range_place(value);
698                }
699                interp_ok(true)
700            }
701            ty::Char => {
702                let scalar = self.read_scalar(value, ExpectedKind::Char)?;
703                try_validation!(
704                    scalar.to_char(),
705                    self.path,
706                    Ub(InvalidChar(..)) => ValidationErrorKind::InvalidChar {
707                        value: format!("{scalar:x}"),
708                    }
709                );
710                if self.reset_provenance_and_padding {
711                    self.ecx.clear_provenance(value)?;
712                    self.add_data_range_place(value);
713                }
714                interp_ok(true)
715            }
716            ty::Float(_) | ty::Int(_) | ty::Uint(_) => {
717                // NOTE: Keep this in sync with the array optimization for int/float
718                // types below!
719                self.read_scalar(
720                    value,
721                    if matches!(ty.kind(), ty::Float(..)) {
722                        ExpectedKind::Float
723                    } else {
724                        ExpectedKind::Int
725                    },
726                )?;
727                if self.reset_provenance_and_padding {
728                    self.ecx.clear_provenance(value)?;
729                    self.add_data_range_place(value);
730                }
731                interp_ok(true)
732            }
733            ty::RawPtr(..) => {
734                let place = self.deref_pointer(value, ExpectedKind::RawPtr)?;
735                if place.layout.is_unsized() {
736                    self.check_wide_ptr_meta(place.meta(), place.layout)?;
737                }
738                interp_ok(true)
739            }
740            ty::Ref(_, _ty, mutbl) => {
741                self.check_safe_pointer(value, PointerKind::Ref(*mutbl))?;
742                interp_ok(true)
743            }
744            ty::FnPtr(..) => {
745                let scalar = self.read_scalar(value, ExpectedKind::FnPtr)?;
746
747                // If we check references recursively, also check that this points to a function.
748                if let Some(_) = self.ref_tracking {
749                    let ptr = scalar.to_pointer(self.ecx)?;
750                    let _fn = try_validation!(
751                        self.ecx.get_ptr_fn(ptr),
752                        self.path,
753                        Ub(DanglingIntPointer{ .. } | InvalidFunctionPointer(..)) =>
754                            InvalidFnPtr { value: format!("{ptr}") },
755                    );
756                    // FIXME: Check if the signature matches
757                } else {
758                    // Otherwise (for standalone Miri), we have to still check it to be non-null.
759                    if self.ecx.scalar_may_be_null(scalar)? {
760                        throw_validation_failure!(self.path, NullFnPtr);
761                    }
762                }
763                if self.reset_provenance_and_padding {
764                    // Make sure we do not preserve partial provenance. This matches the thin
765                    // pointer handling in `deref_pointer`.
766                    if matches!(scalar, Scalar::Int(..)) {
767                        self.ecx.clear_provenance(value)?;
768                    }
769                    self.add_data_range_place(value);
770                }
771                interp_ok(true)
772            }
773            ty::Never => throw_validation_failure!(self.path, NeverVal),
774            ty::Foreign(..) | ty::FnDef(..) => {
775                // Nothing to check.
776                interp_ok(true)
777            }
778            ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binder)"),
779            // The above should be all the primitive types. The rest is compound, we
780            // check them by visiting their fields/variants.
781            ty::Adt(..)
782            | ty::Tuple(..)
783            | ty::Array(..)
784            | ty::Slice(..)
785            | ty::Str
786            | ty::Dynamic(..)
787            | ty::Closure(..)
788            | ty::Pat(..)
789            | ty::CoroutineClosure(..)
790            | ty::Coroutine(..) => interp_ok(false),
791            // Some types only occur during typechecking, they have no layout.
792            // We should not see them here and we could not check them anyway.
793            ty::Error(_)
794            | ty::Infer(..)
795            | ty::Placeholder(..)
796            | ty::Bound(..)
797            | ty::Param(..)
798            | ty::Alias(..)
799            | ty::CoroutineWitness(..) => bug!("Encountered invalid type {:?}", ty),
800        }
801    }
802
803    fn visit_scalar(
804        &mut self,
805        scalar: Scalar<M::Provenance>,
806        scalar_layout: ScalarAbi,
807    ) -> InterpResult<'tcx> {
808        let size = scalar_layout.size(self.ecx);
809        let valid_range = scalar_layout.valid_range(self.ecx);
810        let WrappingRange { start, end } = valid_range;
811        let max_value = size.unsigned_int_max();
812        assert!(end <= max_value);
813        let bits = match scalar.try_to_scalar_int() {
814            Ok(int) => int.to_bits(size),
815            Err(_) => {
816                // So this is a pointer then, and casting to an int failed.
817                // Can only happen during CTFE.
818                // We support 2 kinds of ranges here: full range, and excluding zero.
819                if start == 1 && end == max_value {
820                    // Only null is the niche. So make sure the ptr is NOT null.
821                    if self.ecx.scalar_may_be_null(scalar)? {
822                        throw_validation_failure!(
823                            self.path,
824                            NullablePtrOutOfRange { range: valid_range, max_value }
825                        )
826                    } else {
827                        return interp_ok(());
828                    }
829                } else if scalar_layout.is_always_valid(self.ecx) {
830                    // Easy. (This is reachable if `enforce_number_validity` is set.)
831                    return interp_ok(());
832                } else {
833                    // Conservatively, we reject, because the pointer *could* have a bad
834                    // value.
835                    throw_validation_failure!(
836                        self.path,
837                        PtrOutOfRange { range: valid_range, max_value }
838                    )
839                }
840            }
841        };
842        // Now compare.
843        if valid_range.contains(bits) {
844            interp_ok(())
845        } else {
846            throw_validation_failure!(
847                self.path,
848                OutOfRange { value: format!("{bits}"), range: valid_range, max_value }
849            )
850        }
851    }
852
853    fn in_mutable_memory(&self, val: &PlaceTy<'tcx, M::Provenance>) -> bool {
854        debug_assert!(self.ctfe_mode.is_some());
855        if let Some(mplace) = val.as_mplace_or_local().left() {
856            if let Some(alloc_id) = mplace.ptr().provenance.and_then(|p| p.get_alloc_id()) {
857                let tcx = *self.ecx.tcx;
858                // Everything must be already interned.
859                let mutbl = tcx.global_alloc(alloc_id).mutability(tcx, self.ecx.typing_env);
860                if let Some((_, alloc)) = self.ecx.memory.alloc_map.get(alloc_id) {
861                    assert_eq!(alloc.mutability, mutbl);
862                }
863                mutbl.is_mut()
864            } else {
865                // No memory at all.
866                false
867            }
868        } else {
869            // A local variable -- definitely mutable.
870            true
871        }
872    }
873
874    /// Add the given pointer-length pair to the "data" range of this visit.
875    fn add_data_range(&mut self, ptr: Pointer<Option<M::Provenance>>, size: Size) {
876        if let Some(data_bytes) = self.data_bytes.as_mut() {
877            // We only have to store the offset, the rest is the same for all pointers here.
878            // The logic is agnostic to whether the offset is relative or absolute as long as
879            // it is consistent.
880            let (_prov, offset) = ptr.into_raw_parts();
881            // Add this.
882            data_bytes.add_range(offset, size);
883        };
884    }
885
886    /// Add the entire given place to the "data" range of this visit.
887    fn add_data_range_place(&mut self, place: &PlaceTy<'tcx, M::Provenance>) {
888        // Only sized places can be added this way.
889        debug_assert!(place.layout.is_sized());
890        if let Some(data_bytes) = self.data_bytes.as_mut() {
891            let offset = Self::data_range_offset(self.ecx, place);
892            data_bytes.add_range(offset, place.layout.size);
893        }
894    }
895
896    /// Convert a place into the offset it starts at, for the purpose of data_range tracking.
897    /// Must only be called if `data_bytes` is `Some(_)`.
898    fn data_range_offset(ecx: &InterpCx<'tcx, M>, place: &PlaceTy<'tcx, M::Provenance>) -> Size {
899        // The presence of `data_bytes` implies that our place is in memory.
900        let ptr = ecx
901            .place_to_op(place)
902            .expect("place must be in memory")
903            .as_mplace_or_imm()
904            .expect_left("place must be in memory")
905            .ptr();
906        let (_prov, offset) = ptr.into_raw_parts();
907        offset
908    }
909
910    fn reset_padding(&mut self, place: &PlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx> {
911        let Some(data_bytes) = self.data_bytes.as_mut() else { return interp_ok(()) };
912        // Our value must be in memory, otherwise we would not have set up `data_bytes`.
913        let mplace = self.ecx.force_allocation(place)?;
914        // Determine starting offset and size.
915        let (_prov, start_offset) = mplace.ptr().into_raw_parts();
916        let (size, _align) = self
917            .ecx
918            .size_and_align_of_val(&mplace)?
919            .unwrap_or((mplace.layout.size, mplace.layout.align.abi));
920        // If there is no padding at all, we can skip the rest: check for
921        // a single data range covering the entire value.
922        if data_bytes.0 == &[(start_offset, size)] {
923            return interp_ok(());
924        }
925        // Get a handle for the allocation. Do this only once, to avoid looking up the same
926        // allocation over and over again. (Though to be fair, iterating the value already does
927        // exactly that.)
928        let Some(mut alloc) = self.ecx.get_ptr_alloc_mut(mplace.ptr(), size)? else {
929            // A ZST, no padding to clear.
930            return interp_ok(());
931        };
932        // Add a "finalizer" data range at the end, so that the iteration below finds all gaps
933        // between ranges.
934        data_bytes.0.push((start_offset + size, Size::ZERO));
935        // Iterate, and reset gaps.
936        let mut padding_cleared_until = start_offset;
937        for &(offset, size) in data_bytes.0.iter() {
938            assert!(
939                offset >= padding_cleared_until,
940                "reset_padding on {}: previous field ended at offset {}, next field starts at {} (and has a size of {} bytes)",
941                mplace.layout.ty,
942                (padding_cleared_until - start_offset).bytes(),
943                (offset - start_offset).bytes(),
944                size.bytes(),
945            );
946            if offset > padding_cleared_until {
947                // We found padding. Adjust the range to be relative to `alloc`, and make it uninit.
948                let padding_start = padding_cleared_until - start_offset;
949                let padding_size = offset - padding_cleared_until;
950                let range = alloc_range(padding_start, padding_size);
951                trace!("reset_padding on {}: resetting padding range {range:?}", mplace.layout.ty);
952                alloc.write_uninit(range)?;
953            }
954            padding_cleared_until = offset + size;
955        }
956        assert!(padding_cleared_until == start_offset + size);
957        interp_ok(())
958    }
959
960    /// Computes the data range of this union type:
961    /// which bytes are inside a field (i.e., not padding.)
962    fn union_data_range<'e>(
963        ecx: &'e mut InterpCx<'tcx, M>,
964        layout: TyAndLayout<'tcx>,
965    ) -> Cow<'e, RangeSet> {
966        assert!(layout.ty.is_union());
967        assert!(layout.is_sized(), "there are no unsized unions");
968        let layout_cx = LayoutCx::new(*ecx.tcx, ecx.typing_env);
969        return M::cached_union_data_range(ecx, layout.ty, || {
970            let mut out = RangeSet(Vec::new());
971            union_data_range_uncached(&layout_cx, layout, Size::ZERO, &mut out);
972            out
973        });
974
975        /// Helper for recursive traversal: add data ranges of the given type to `out`.
976        fn union_data_range_uncached<'tcx>(
977            cx: &LayoutCx<'tcx>,
978            layout: TyAndLayout<'tcx>,
979            base_offset: Size,
980            out: &mut RangeSet,
981        ) {
982            // If this is a ZST, we don't contain any data. In particular, this helps us to quickly
983            // skip over huge arrays of ZST.
984            if layout.is_zst() {
985                return;
986            }
987            // Just recursively add all the fields of everything to the output.
988            match &layout.fields {
989                FieldsShape::Primitive => {
990                    out.add_range(base_offset, layout.size);
991                }
992                &FieldsShape::Union(fields) => {
993                    // Currently, all fields start at offset 0 (relative to `base_offset`).
994                    for field in 0..fields.get() {
995                        let field = layout.field(cx, field);
996                        union_data_range_uncached(cx, field, base_offset, out);
997                    }
998                }
999                &FieldsShape::Array { stride, count } => {
1000                    let elem = layout.field(cx, 0);
1001
1002                    // Fast-path for large arrays of simple types that do not contain any padding.
1003                    if elem.backend_repr.is_scalar() {
1004                        out.add_range(base_offset, elem.size * count);
1005                    } else {
1006                        for idx in 0..count {
1007                            // This repeats the same computation for every array element... but the alternative
1008                            // is to allocate temporary storage for a dedicated `out` set for the array element,
1009                            // and replicating that N times. Is that better?
1010                            union_data_range_uncached(cx, elem, base_offset + idx * stride, out);
1011                        }
1012                    }
1013                }
1014                FieldsShape::Arbitrary { offsets, .. } => {
1015                    for (field, &offset) in offsets.iter_enumerated() {
1016                        let field = layout.field(cx, field.as_usize());
1017                        union_data_range_uncached(cx, field, base_offset + offset, out);
1018                    }
1019                }
1020            }
1021            // Don't forget potential other variants.
1022            match &layout.variants {
1023                Variants::Single { .. } | Variants::Empty => {
1024                    // Fully handled above.
1025                }
1026                Variants::Multiple { variants, .. } => {
1027                    for variant in variants.indices() {
1028                        let variant = layout.for_variant(cx, variant);
1029                        union_data_range_uncached(cx, variant, base_offset, out);
1030                    }
1031                }
1032            }
1033        }
1034    }
1035}
1036
1037impl<'rt, 'tcx, M: Machine<'tcx>> ValueVisitor<'tcx, M> for ValidityVisitor<'rt, 'tcx, M> {
1038    type V = PlaceTy<'tcx, M::Provenance>;
1039
1040    #[inline(always)]
1041    fn ecx(&self) -> &InterpCx<'tcx, M> {
1042        self.ecx
1043    }
1044
1045    fn read_discriminant(
1046        &mut self,
1047        val: &PlaceTy<'tcx, M::Provenance>,
1048    ) -> InterpResult<'tcx, VariantIdx> {
1049        self.with_elem(PathElem::EnumTag, move |this| {
1050            interp_ok(try_validation!(
1051                this.ecx.read_discriminant(val),
1052                this.path,
1053                Ub(InvalidTag(val)) => InvalidEnumTag {
1054                    value: format!("{val:x}"),
1055                },
1056                Ub(UninhabitedEnumVariantRead(_)) => UninhabitedEnumVariant,
1057                // Uninit / bad provenance are not possible since the field was already previously
1058                // checked at its integer type.
1059            ))
1060        })
1061    }
1062
1063    #[inline]
1064    fn visit_field(
1065        &mut self,
1066        old_val: &PlaceTy<'tcx, M::Provenance>,
1067        field: usize,
1068        new_val: &PlaceTy<'tcx, M::Provenance>,
1069    ) -> InterpResult<'tcx> {
1070        let elem = self.aggregate_field_path_elem(old_val.layout, field);
1071        self.with_elem(elem, move |this| this.visit_value(new_val))
1072    }
1073
1074    #[inline]
1075    fn visit_variant(
1076        &mut self,
1077        old_val: &PlaceTy<'tcx, M::Provenance>,
1078        variant_id: VariantIdx,
1079        new_val: &PlaceTy<'tcx, M::Provenance>,
1080    ) -> InterpResult<'tcx> {
1081        let name = match old_val.layout.ty.kind() {
1082            ty::Adt(adt, _) => PathElem::Variant(adt.variant(variant_id).name),
1083            // Coroutines also have variants
1084            ty::Coroutine(..) => PathElem::CoroutineState(variant_id),
1085            _ => bug!("Unexpected type with variant: {:?}", old_val.layout.ty),
1086        };
1087        self.with_elem(name, move |this| this.visit_value(new_val))
1088    }
1089
1090    #[inline(always)]
1091    fn visit_union(
1092        &mut self,
1093        val: &PlaceTy<'tcx, M::Provenance>,
1094        _fields: NonZero<usize>,
1095    ) -> InterpResult<'tcx> {
1096        // Special check for CTFE validation, preventing `UnsafeCell` inside unions in immutable memory.
1097        if self.ctfe_mode.is_some_and(|c| !c.allow_immutable_unsafe_cell()) {
1098            // Unsized unions are currently not a thing, but let's keep this code consistent with
1099            // the check in `visit_value`.
1100            let zst = self.ecx.size_and_align_of_val(val)?.is_some_and(|(s, _a)| s.bytes() == 0);
1101            if !zst && !val.layout.ty.is_freeze(*self.ecx.tcx, self.ecx.typing_env) {
1102                if !self.in_mutable_memory(val) {
1103                    throw_validation_failure!(self.path, UnsafeCellInImmutable);
1104                }
1105            }
1106        }
1107        if self.reset_provenance_and_padding
1108            && let Some(data_bytes) = self.data_bytes.as_mut()
1109        {
1110            let base_offset = Self::data_range_offset(self.ecx, val);
1111            // Determine and add data range for this union.
1112            let union_data_range = Self::union_data_range(self.ecx, val.layout);
1113            for &(offset, size) in union_data_range.0.iter() {
1114                data_bytes.add_range(base_offset + offset, size);
1115            }
1116        }
1117        interp_ok(())
1118    }
1119
1120    #[inline]
1121    fn visit_box(
1122        &mut self,
1123        _box_ty: Ty<'tcx>,
1124        val: &PlaceTy<'tcx, M::Provenance>,
1125    ) -> InterpResult<'tcx> {
1126        self.check_safe_pointer(val, PointerKind::Box)?;
1127        interp_ok(())
1128    }
1129
1130    #[inline]
1131    fn visit_value(&mut self, val: &PlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx> {
1132        trace!("visit_value: {:?}, {:?}", *val, val.layout);
1133
1134        // Check primitive types -- the leaves of our recursive descent.
1135        // This is called even for enum discriminants (which are "fields" of their enum),
1136        // so for integer-typed discriminants the provenance reset will happen here.
1137        // We assume that the Scalar validity range does not restrict these values
1138        // any further than `try_visit_primitive` does!
1139        if self.try_visit_primitive(val)? {
1140            return interp_ok(());
1141        }
1142
1143        // Special check preventing `UnsafeCell` in the inner part of constants
1144        if self.ctfe_mode.is_some_and(|c| !c.allow_immutable_unsafe_cell()) {
1145            // Exclude ZST values. We need to compute the dynamic size/align to properly
1146            // handle slices and trait objects.
1147            let zst = self.ecx.size_and_align_of_val(val)?.is_some_and(|(s, _a)| s.bytes() == 0);
1148            if !zst
1149                && let Some(def) = val.layout.ty.ty_adt_def()
1150                && def.is_unsafe_cell()
1151            {
1152                if !self.in_mutable_memory(val) {
1153                    throw_validation_failure!(self.path, UnsafeCellInImmutable);
1154                }
1155            }
1156        }
1157
1158        // Recursively walk the value at its type. Apply optimizations for some large types.
1159        match val.layout.ty.kind() {
1160            ty::Str => {
1161                let mplace = val.assert_mem_place(); // strings are unsized and hence never immediate
1162                let len = mplace.len(self.ecx)?;
1163                try_validation!(
1164                    self.ecx.read_bytes_ptr_strip_provenance(mplace.ptr(), Size::from_bytes(len)),
1165                    self.path,
1166                    Ub(InvalidUninitBytes(..)) => Uninit { expected: ExpectedKind::Str },
1167                    Unsup(ReadPointerAsInt(_)) => PointerAsInt { expected: ExpectedKind::Str }
1168                );
1169            }
1170            ty::Array(tys, ..) | ty::Slice(tys)
1171                // This optimization applies for types that can hold arbitrary non-provenance bytes (such as
1172                // integer and floating point types).
1173                // FIXME(wesleywiser) This logic could be extended further to arbitrary structs or
1174                // tuples made up of integer/floating point types or inhabited ZSTs with no padding.
1175                if matches!(tys.kind(), ty::Int(..) | ty::Uint(..) | ty::Float(..))
1176                =>
1177            {
1178                let expected = if tys.is_integral() { ExpectedKind::Int } else { ExpectedKind::Float };
1179                // Optimized handling for arrays of integer/float type.
1180
1181                // This is the length of the array/slice.
1182                let len = val.len(self.ecx)?;
1183                // This is the element type size.
1184                let layout = self.ecx.layout_of(*tys)?;
1185                // This is the size in bytes of the whole array. (This checks for overflow.)
1186                let size = layout.size * len;
1187                // If the size is 0, there is nothing to check.
1188                // (`size` can only be 0 if `len` is 0, and empty arrays are always valid.)
1189                if size == Size::ZERO {
1190                    return interp_ok(());
1191                }
1192                // Now that we definitely have a non-ZST array, we know it lives in memory -- except it may
1193                // be an uninitialized local variable, those are also "immediate".
1194                let mplace = match val.to_op(self.ecx)?.as_mplace_or_imm() {
1195                    Left(mplace) => mplace,
1196                    Right(imm) => match *imm {
1197                        Immediate::Uninit =>
1198                            throw_validation_failure!(self.path, Uninit { expected }),
1199                        Immediate::Scalar(..) | Immediate::ScalarPair(..) =>
1200                            bug!("arrays/slices can never have Scalar/ScalarPair layout"),
1201                    }
1202                };
1203
1204                // Optimization: we just check the entire range at once.
1205                // NOTE: Keep this in sync with the handling of integer and float
1206                // types above, in `visit_primitive`.
1207                // No need for an alignment check here, this is not an actual memory access.
1208                let alloc = self.ecx.get_ptr_alloc(mplace.ptr(), size)?.expect("we already excluded size 0");
1209
1210                alloc.get_bytes_strip_provenance().map_err_kind(|kind| {
1211                    // Some error happened, try to provide a more detailed description.
1212                    // For some errors we might be able to provide extra information.
1213                    // (This custom logic does not fit the `try_validation!` macro.)
1214                    match kind {
1215                        Ub(InvalidUninitBytes(Some((_alloc_id, access)))) | Unsup(ReadPointerAsInt(Some((_alloc_id, access)))) => {
1216                            // Some byte was uninitialized, determine which
1217                            // element that byte belongs to so we can
1218                            // provide an index.
1219                            let i = usize::try_from(
1220                                access.bad.start.bytes() / layout.size.bytes(),
1221                            )
1222                            .unwrap();
1223                            self.path.push(PathElem::ArrayElem(i));
1224
1225                            if matches!(kind, Ub(InvalidUninitBytes(_))) {
1226                                err_validation_failure!(self.path, Uninit { expected })
1227                            } else {
1228                                err_validation_failure!(self.path, PointerAsInt { expected })
1229                            }
1230                        }
1231
1232                        // Propagate upwards (that will also check for unexpected errors).
1233                        err => err,
1234                    }
1235                })?;
1236
1237                // Don't forget that these are all non-pointer types, and thus do not preserve
1238                // provenance.
1239                if self.reset_provenance_and_padding {
1240                    // We can't share this with above as above, we might be looking at read-only memory.
1241                    let mut alloc = self.ecx.get_ptr_alloc_mut(mplace.ptr(), size)?.expect("we already excluded size 0");
1242                    alloc.clear_provenance()?;
1243                    // Also, mark this as containing data, not padding.
1244                    self.add_data_range(mplace.ptr(), size);
1245                }
1246            }
1247            // Fast path for arrays and slices of ZSTs. We only need to check a single ZST element
1248            // of an array and not all of them, because there's only a single value of a specific
1249            // ZST type, so either validation fails for all elements or none.
1250            ty::Array(tys, ..) | ty::Slice(tys) if self.ecx.layout_of(*tys)?.is_zst() => {
1251                // Validate just the first element (if any).
1252                if val.len(self.ecx)? > 0 {
1253                    self.visit_field(val, 0, &self.ecx.project_index(val, 0)?)?;
1254                }
1255            }
1256            ty::Pat(base, pat) => {
1257                // First check that the base type is valid
1258                self.visit_value(&val.transmute(self.ecx.layout_of(*base)?, self.ecx)?)?;
1259                // When you extend this match, make sure to also add tests to
1260                // tests/ui/type/pattern_types/validity.rs((
1261                match **pat {
1262                    // Range patterns are precisely reflected into `valid_range` and thus
1263                    // handled fully by `visit_scalar` (called below).
1264                    ty::PatternKind::Range { .. } => {},
1265
1266                    // FIXME(pattern_types): check that the value is covered by one of the variants.
1267                    // For now, we rely on layout computation setting the scalar's `valid_range` to
1268                    // match the pattern. However, this cannot always work; the layout may
1269                    // pessimistically cover actually illegal ranges and Miri would miss that UB.
1270                    // The consolation here is that codegen also will miss that UB, so at least
1271                    // we won't see optimizations actually breaking such programs.
1272                    ty::PatternKind::Or(_patterns) => {}
1273                }
1274            }
1275            _ => {
1276                // default handler
1277                try_validation!(
1278                    self.walk_value(val),
1279                    self.path,
1280                    // It's not great to catch errors here, since we can't give a very good path,
1281                    // but it's better than ICEing.
1282                    Ub(InvalidVTableTrait { vtable_dyn_type, expected_dyn_type }) => {
1283                        InvalidMetaWrongTrait { vtable_dyn_type, expected_dyn_type }
1284                    },
1285                );
1286            }
1287        }
1288
1289        // *After* all of this, check further information stored in the layout. We need to check
1290        // this to handle types like `NonNull` where the `Scalar` info is more restrictive than what
1291        // the fields say (`rustc_layout_scalar_valid_range_start`). But in most cases, this will
1292        // just propagate what the fields say, and then we want the error to point at the field --
1293        // so, we first recurse, then we do this check.
1294        //
1295        // FIXME: We could avoid some redundant checks here. For newtypes wrapping
1296        // scalars, we do the same check on every "level" (e.g., first we check
1297        // MyNewtype and then the scalar in there).
1298        if val.layout.is_uninhabited() {
1299            let ty = val.layout.ty;
1300            throw_validation_failure!(self.path, UninhabitedVal { ty });
1301        }
1302        match val.layout.backend_repr {
1303            BackendRepr::Scalar(scalar_layout) => {
1304                if !scalar_layout.is_uninit_valid() {
1305                    // There is something to check here.
1306                    let scalar = self.read_scalar(val, ExpectedKind::InitScalar)?;
1307                    self.visit_scalar(scalar, scalar_layout)?;
1308                }
1309            }
1310            BackendRepr::ScalarPair(a_layout, b_layout) => {
1311                // We can only proceed if *both* scalars need to be initialized.
1312                // FIXME: find a way to also check ScalarPair when one side can be uninit but
1313                // the other must be init.
1314                if !a_layout.is_uninit_valid() && !b_layout.is_uninit_valid() {
1315                    let (a, b) =
1316                        self.read_immediate(val, ExpectedKind::InitScalar)?.to_scalar_pair();
1317                    self.visit_scalar(a, a_layout)?;
1318                    self.visit_scalar(b, b_layout)?;
1319                }
1320            }
1321            BackendRepr::SimdVector { .. } => {
1322                // No checks here, we assume layout computation gets this right.
1323                // (This is harder to check since Miri does not represent these as `Immediate`. We
1324                // also cannot use field projections since this might be a newtype around a vector.)
1325            }
1326            BackendRepr::Memory { .. } => {
1327                // Nothing to do.
1328            }
1329        }
1330
1331        interp_ok(())
1332    }
1333}
1334
1335impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
1336    fn validate_operand_internal(
1337        &mut self,
1338        val: &PlaceTy<'tcx, M::Provenance>,
1339        path: Vec<PathElem>,
1340        ref_tracking: Option<&mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Vec<PathElem>>>,
1341        ctfe_mode: Option<CtfeValidationMode>,
1342        reset_provenance_and_padding: bool,
1343    ) -> InterpResult<'tcx> {
1344        trace!("validate_operand_internal: {:?}, {:?}", *val, val.layout.ty);
1345
1346        // Run the visitor.
1347        self.run_for_validation_mut(|ecx| {
1348            let reset_padding = reset_provenance_and_padding && {
1349                // Check if `val` is actually stored in memory. If not, padding is not even
1350                // represented and we need not reset it.
1351                ecx.place_to_op(val)?.as_mplace_or_imm().is_left()
1352            };
1353            let mut v = ValidityVisitor {
1354                path,
1355                ref_tracking,
1356                ctfe_mode,
1357                ecx,
1358                reset_provenance_and_padding,
1359                data_bytes: reset_padding.then_some(RangeSet(Vec::new())),
1360            };
1361            v.visit_value(val)?;
1362            v.reset_padding(val)?;
1363            interp_ok(())
1364        })
1365        .map_err_info(|err| {
1366            if !matches!(
1367                err.kind(),
1368                err_ub!(ValidationError { .. })
1369                    | InterpErrorKind::InvalidProgram(_)
1370                    | InterpErrorKind::Unsupported(UnsupportedOpInfo::ExternTypeField)
1371            ) {
1372                bug!(
1373                    "Unexpected error during validation: {}",
1374                    format_interp_error(self.tcx.dcx(), err)
1375                );
1376            }
1377            err
1378        })
1379    }
1380
1381    /// This function checks the data at `val` to be const-valid.
1382    /// `val` is assumed to cover valid memory if it is an indirect operand.
1383    /// It will error if the bits at the destination do not match the ones described by the layout.
1384    ///
1385    /// `ref_tracking` is used to record references that we encounter so that they
1386    /// can be checked recursively by an outside driving loop.
1387    ///
1388    /// `constant` controls whether this must satisfy the rules for constants:
1389    /// - no pointers to statics.
1390    /// - no `UnsafeCell` or non-ZST `&mut`.
1391    #[inline(always)]
1392    pub(crate) fn const_validate_operand(
1393        &mut self,
1394        val: &PlaceTy<'tcx, M::Provenance>,
1395        path: Vec<PathElem>,
1396        ref_tracking: &mut RefTracking<MPlaceTy<'tcx, M::Provenance>, Vec<PathElem>>,
1397        ctfe_mode: CtfeValidationMode,
1398    ) -> InterpResult<'tcx> {
1399        self.validate_operand_internal(
1400            val,
1401            path,
1402            Some(ref_tracking),
1403            Some(ctfe_mode),
1404            /*reset_provenance*/ false,
1405        )
1406    }
1407
1408    /// This function checks the data at `val` to be runtime-valid.
1409    /// `val` is assumed to cover valid memory if it is an indirect operand.
1410    /// It will error if the bits at the destination do not match the ones described by the layout.
1411    #[inline(always)]
1412    pub fn validate_operand(
1413        &mut self,
1414        val: &PlaceTy<'tcx, M::Provenance>,
1415        recursive: bool,
1416        reset_provenance_and_padding: bool,
1417    ) -> InterpResult<'tcx> {
1418        let _span = enter_trace_span!(
1419            M,
1420            "validate_operand",
1421            "recursive={recursive}, reset_provenance_and_padding={reset_provenance_and_padding}, val={val:?}"
1422        );
1423
1424        // Note that we *could* actually be in CTFE here with `-Zextra-const-ub-checks`, but it's
1425        // still correct to not use `ctfe_mode`: that mode is for validation of the final constant
1426        // value, it rules out things like `UnsafeCell` in awkward places.
1427        if !recursive {
1428            return self.validate_operand_internal(
1429                val,
1430                vec![],
1431                None,
1432                None,
1433                reset_provenance_and_padding,
1434            );
1435        }
1436        // Do a recursive check.
1437        let mut ref_tracking = RefTracking::empty();
1438        self.validate_operand_internal(
1439            val,
1440            vec![],
1441            Some(&mut ref_tracking),
1442            None,
1443            reset_provenance_and_padding,
1444        )?;
1445        while let Some((mplace, path)) = ref_tracking.todo.pop() {
1446            // Things behind reference do *not* have the provenance reset.
1447            self.validate_operand_internal(
1448                &mplace.into(),
1449                path,
1450                Some(&mut ref_tracking),
1451                None,
1452                /*reset_provenance_and_padding*/ false,
1453            )?;
1454        }
1455        interp_ok(())
1456    }
1457}