rustc_const_eval/interpret/
machine.rs

1//! This module contains everything needed to instantiate an interpreter.
2//! This separation exists to ensure that no fancy miri features like
3//! interpreting common C functions leak into CTFE.
4
5use std::borrow::{Borrow, Cow};
6use std::fmt::Debug;
7use std::hash::Hash;
8
9use rustc_abi::{Align, Size};
10use rustc_apfloat::{Float, FloatConvert};
11use rustc_middle::query::TyCtxtAt;
12use rustc_middle::ty::Ty;
13use rustc_middle::ty::layout::TyAndLayout;
14use rustc_middle::{mir, ty};
15use rustc_span::def_id::DefId;
16use rustc_target::callconv::FnAbi;
17
18use super::{
19    AllocBytes, AllocId, AllocKind, AllocRange, Allocation, CTFE_ALLOC_SALT, ConstAllocation,
20    CtfeProvenance, EnteredTraceSpan, FnArg, Frame, ImmTy, InterpCx, InterpResult, MPlaceTy,
21    MemoryKind, Misalignment, OpTy, PlaceTy, Pointer, Provenance, RangeSet, interp_ok, throw_unsup,
22};
23
24/// Data returned by [`Machine::after_stack_pop`], and consumed by
25/// [`InterpCx::return_from_current_stack_frame`] to determine what actions should be done when
26/// returning from a stack frame.
27#[derive(Eq, PartialEq, Debug, Copy, Clone)]
28pub enum ReturnAction {
29    /// Indicates that no special handling should be
30    /// done - we'll either return normally or unwind
31    /// based on the terminator for the function
32    /// we're leaving.
33    Normal,
34
35    /// Indicates that we should *not* jump to the return/unwind address, as the callback already
36    /// took care of everything.
37    NoJump,
38
39    /// Returned by [`InterpCx::pop_stack_frame_raw`] when no cleanup should be done.
40    NoCleanup,
41}
42
43/// Whether this kind of memory is allowed to leak
44pub trait MayLeak: Copy {
45    fn may_leak(self) -> bool;
46}
47
48/// The functionality needed by memory to manage its allocations
49pub trait AllocMap<K: Hash + Eq, V> {
50    /// Tests if the map contains the given key.
51    /// Deliberately takes `&mut` because that is sufficient, and some implementations
52    /// can be more efficient then (using `RefCell::get_mut`).
53    fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool
54    where
55        K: Borrow<Q>;
56
57    /// Callers should prefer [`AllocMap::contains_key`] when it is possible to call because it may
58    /// be more efficient. This function exists for callers that only have a shared reference
59    /// (which might make it slightly less efficient than `contains_key`, e.g. if
60    /// the data is stored inside a `RefCell`).
61    fn contains_key_ref<Q: ?Sized + Hash + Eq>(&self, k: &Q) -> bool
62    where
63        K: Borrow<Q>;
64
65    /// Inserts a new entry into the map.
66    fn insert(&mut self, k: K, v: V) -> Option<V>;
67
68    /// Removes an entry from the map.
69    fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>
70    where
71        K: Borrow<Q>;
72
73    /// Returns data based on the keys and values in the map.
74    fn filter_map_collect<T>(&self, f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T>;
75
76    /// Returns a reference to entry `k`. If no such entry exists, call
77    /// `vacant` and either forward its error, or add its result to the map
78    /// and return a reference to *that*.
79    fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E>;
80
81    /// Returns a mutable reference to entry `k`. If no such entry exists, call
82    /// `vacant` and either forward its error, or add its result to the map
83    /// and return a reference to *that*.
84    fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E>;
85
86    /// Read-only lookup.
87    fn get(&self, k: K) -> Option<&V> {
88        self.get_or(k, || Err(())).ok()
89    }
90
91    /// Mutable lookup.
92    fn get_mut(&mut self, k: K) -> Option<&mut V> {
93        self.get_mut_or(k, || Err(())).ok()
94    }
95}
96
97/// Methods of this trait signifies a point where CTFE evaluation would fail
98/// and some use case dependent behaviour can instead be applied.
99pub trait Machine<'tcx>: Sized {
100    /// Additional memory kinds a machine wishes to distinguish from the builtin ones
101    type MemoryKind: Debug + std::fmt::Display + MayLeak + Eq + 'static;
102
103    /// Pointers are "tagged" with provenance information; typically the `AllocId` they belong to.
104    type Provenance: Provenance + Eq + Hash + 'static;
105
106    /// When getting the AllocId of a pointer, some extra data is also obtained from the provenance
107    /// that is passed to memory access hooks so they can do things with it.
108    type ProvenanceExtra: Copy + 'static;
109
110    /// Machines can define extra (non-instance) things that represent values of function pointers.
111    /// For example, Miri uses this to return a function pointer from `dlsym`
112    /// that can later be called to execute the right thing.
113    type ExtraFnVal: Debug + Copy;
114
115    /// Extra data stored in every call frame.
116    type FrameExtra;
117
118    /// Extra data stored in every allocation.
119    type AllocExtra: Debug + Clone + 'tcx;
120
121    /// Type for the bytes of the allocation.
122    type Bytes: AllocBytes + 'static;
123
124    /// Memory's allocation map
125    type MemoryMap: AllocMap<
126            AllocId,
127            (
128                MemoryKind<Self::MemoryKind>,
129                Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>,
130            ),
131        > + Default
132        + Clone;
133
134    /// The memory kind to use for copied global memory (held in `tcx`) --
135    /// or None if such memory should not be mutated and thus any such attempt will cause
136    /// a `ModifiedStatic` error to be raised.
137    /// Statics are copied under two circumstances: When they are mutated, and when
138    /// `adjust_allocation` (see below) returns an owned allocation
139    /// that is added to the memory so that the work is not done twice.
140    const GLOBAL_KIND: Option<Self::MemoryKind>;
141
142    /// Should the machine panic on allocation failures?
143    const PANIC_ON_ALLOC_FAIL: bool;
144
145    /// Determines whether `eval_mir_constant` can never fail because all required consts have
146    /// already been checked before.
147    const ALL_CONSTS_ARE_PRECHECKED: bool = true;
148
149    /// Whether memory accesses should be alignment-checked.
150    fn enforce_alignment(ecx: &InterpCx<'tcx, Self>) -> bool;
151
152    /// Gives the machine a chance to detect more misalignment than the built-in checks would catch.
153    #[inline(always)]
154    fn alignment_check(
155        _ecx: &InterpCx<'tcx, Self>,
156        _alloc_id: AllocId,
157        _alloc_align: Align,
158        _alloc_kind: AllocKind,
159        _offset: Size,
160        _align: Align,
161    ) -> Option<Misalignment> {
162        None
163    }
164
165    /// Whether to enforce the validity invariant for a specific layout.
166    fn enforce_validity(ecx: &InterpCx<'tcx, Self>, layout: TyAndLayout<'tcx>) -> bool;
167    /// Whether to enforce the validity invariant *recursively*.
168    fn enforce_validity_recursively(
169        _ecx: &InterpCx<'tcx, Self>,
170        _layout: TyAndLayout<'tcx>,
171    ) -> bool {
172        false
173    }
174
175    /// Whether Assert(OverflowNeg) and Assert(Overflow) MIR terminators should actually
176    /// check for overflow.
177    fn ignore_optional_overflow_checks(_ecx: &InterpCx<'tcx, Self>) -> bool;
178
179    /// Entry point for obtaining the MIR of anything that should get evaluated.
180    /// So not just functions and shims, but also const/static initializers, anonymous
181    /// constants, ...
182    fn load_mir(
183        ecx: &InterpCx<'tcx, Self>,
184        instance: ty::InstanceKind<'tcx>,
185    ) -> &'tcx mir::Body<'tcx> {
186        ecx.tcx.instance_mir(instance)
187    }
188
189    /// Entry point to all function calls.
190    ///
191    /// Returns either the mir to use for the call, or `None` if execution should
192    /// just proceed (which usually means this hook did all the work that the
193    /// called function should usually have done). In the latter case, it is
194    /// this hook's responsibility to advance the instruction pointer!
195    /// (This is to support functions like `__rust_maybe_catch_panic` that neither find a MIR
196    /// nor just jump to `ret`, but instead push their own stack frame.)
197    /// Passing `dest`and `ret` in the same `Option` proved very annoying when only one of them
198    /// was used.
199    fn find_mir_or_eval_fn(
200        ecx: &mut InterpCx<'tcx, Self>,
201        instance: ty::Instance<'tcx>,
202        abi: &FnAbi<'tcx, Ty<'tcx>>,
203        args: &[FnArg<'tcx, Self::Provenance>],
204        destination: &PlaceTy<'tcx, Self::Provenance>,
205        target: Option<mir::BasicBlock>,
206        unwind: mir::UnwindAction,
207    ) -> InterpResult<'tcx, Option<(&'tcx mir::Body<'tcx>, ty::Instance<'tcx>)>>;
208
209    /// Execute `fn_val`. It is the hook's responsibility to advance the instruction
210    /// pointer as appropriate.
211    fn call_extra_fn(
212        ecx: &mut InterpCx<'tcx, Self>,
213        fn_val: Self::ExtraFnVal,
214        abi: &FnAbi<'tcx, Ty<'tcx>>,
215        args: &[FnArg<'tcx, Self::Provenance>],
216        destination: &PlaceTy<'tcx, Self::Provenance>,
217        target: Option<mir::BasicBlock>,
218        unwind: mir::UnwindAction,
219    ) -> InterpResult<'tcx>;
220
221    /// Directly process an intrinsic without pushing a stack frame. It is the hook's
222    /// responsibility to advance the instruction pointer as appropriate.
223    ///
224    /// Returns `None` if the intrinsic was fully handled.
225    /// Otherwise, returns an `Instance` of the function that implements the intrinsic.
226    fn call_intrinsic(
227        ecx: &mut InterpCx<'tcx, Self>,
228        instance: ty::Instance<'tcx>,
229        args: &[OpTy<'tcx, Self::Provenance>],
230        destination: &PlaceTy<'tcx, Self::Provenance>,
231        target: Option<mir::BasicBlock>,
232        unwind: mir::UnwindAction,
233    ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>>;
234
235    /// Check whether the given function may be executed on the current machine, in terms of the
236    /// target features is requires.
237    fn check_fn_target_features(
238        _ecx: &InterpCx<'tcx, Self>,
239        _instance: ty::Instance<'tcx>,
240    ) -> InterpResult<'tcx>;
241
242    /// Called to evaluate `Assert` MIR terminators that trigger a panic.
243    fn assert_panic(
244        ecx: &mut InterpCx<'tcx, Self>,
245        msg: &mir::AssertMessage<'tcx>,
246        unwind: mir::UnwindAction,
247    ) -> InterpResult<'tcx>;
248
249    /// Called to trigger a non-unwinding panic.
250    fn panic_nounwind(_ecx: &mut InterpCx<'tcx, Self>, msg: &str) -> InterpResult<'tcx>;
251
252    /// Called when unwinding reached a state where execution should be terminated.
253    fn unwind_terminate(
254        ecx: &mut InterpCx<'tcx, Self>,
255        reason: mir::UnwindTerminateReason,
256    ) -> InterpResult<'tcx>;
257
258    /// Called for all binary operations where the LHS has pointer type.
259    ///
260    /// Returns a (value, overflowed) pair if the operation succeeded
261    fn binary_ptr_op(
262        ecx: &InterpCx<'tcx, Self>,
263        bin_op: mir::BinOp,
264        left: &ImmTy<'tcx, Self::Provenance>,
265        right: &ImmTy<'tcx, Self::Provenance>,
266    ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>>;
267
268    /// Generate the NaN returned by a float operation, given the list of inputs.
269    /// (This is all inputs, not just NaN inputs!)
270    fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>(
271        _ecx: &InterpCx<'tcx, Self>,
272        _inputs: &[F1],
273    ) -> F2 {
274        // By default we always return the preferred NaN.
275        F2::NAN
276    }
277
278    /// Apply non-determinism to float operations that do not return a precise result.
279    fn apply_float_nondet(
280        _ecx: &mut InterpCx<'tcx, Self>,
281        val: ImmTy<'tcx, Self::Provenance>,
282    ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>> {
283        interp_ok(val)
284    }
285
286    /// Determines the result of `min`/`max` on floats when the arguments are equal.
287    fn equal_float_min_max<F: Float>(_ecx: &InterpCx<'tcx, Self>, a: F, _b: F) -> F {
288        // By default, we pick the left argument.
289        a
290    }
291
292    /// Called before a basic block terminator is executed.
293    #[inline]
294    fn before_terminator(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
295        interp_ok(())
296    }
297
298    /// Determines the result of a `NullaryOp::UbChecks` invocation.
299    fn ub_checks(_ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool>;
300
301    /// Determines the result of a `NullaryOp::ContractChecks` invocation.
302    fn contract_checks(_ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool>;
303
304    /// Called when the interpreter encounters a `StatementKind::ConstEvalCounter` instruction.
305    /// You can use this to detect long or endlessly running programs.
306    #[inline]
307    fn increment_const_eval_counter(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
308        interp_ok(())
309    }
310
311    /// Called before a global allocation is accessed.
312    /// `def_id` is `Some` if this is the "lazy" allocation of a static.
313    #[inline]
314    fn before_access_global(
315        _tcx: TyCtxtAt<'tcx>,
316        _machine: &Self,
317        _alloc_id: AllocId,
318        _allocation: ConstAllocation<'tcx>,
319        _static_def_id: Option<DefId>,
320        _is_write: bool,
321    ) -> InterpResult<'tcx> {
322        interp_ok(())
323    }
324
325    /// Return the `AllocId` for the given thread-local static in the current thread.
326    fn thread_local_static_pointer(
327        _ecx: &mut InterpCx<'tcx, Self>,
328        def_id: DefId,
329    ) -> InterpResult<'tcx, Pointer<Self::Provenance>> {
330        throw_unsup!(ThreadLocalStatic(def_id))
331    }
332
333    /// Return the `AllocId` for the given `extern static`.
334    fn extern_static_pointer(
335        ecx: &InterpCx<'tcx, Self>,
336        def_id: DefId,
337    ) -> InterpResult<'tcx, Pointer<Self::Provenance>>;
338
339    /// "Int-to-pointer cast"
340    fn ptr_from_addr_cast(
341        ecx: &InterpCx<'tcx, Self>,
342        addr: u64,
343    ) -> InterpResult<'tcx, Pointer<Option<Self::Provenance>>>;
344
345    /// Marks a pointer as exposed, allowing its provenance
346    /// to be recovered. "Pointer-to-int cast"
347    fn expose_provenance(
348        ecx: &InterpCx<'tcx, Self>,
349        provenance: Self::Provenance,
350    ) -> InterpResult<'tcx>;
351
352    /// Convert a pointer with provenance into an allocation-offset pair and extra provenance info.
353    /// `size` says how many bytes of memory are expected at that pointer. The *sign* of `size` can
354    /// be used to disambiguate situations where a wildcard pointer sits right in between two
355    /// allocations.
356    ///
357    /// If `ptr.provenance.get_alloc_id()` is `Some(p)`, the returned `AllocId` must be `p`.
358    /// The resulting `AllocId` will just be used for that one step and the forgotten again
359    /// (i.e., we'll never turn the data returned here back into a `Pointer` that might be
360    /// stored in machine state).
361    ///
362    /// When this fails, that means the pointer does not point to a live allocation.
363    fn ptr_get_alloc(
364        ecx: &InterpCx<'tcx, Self>,
365        ptr: Pointer<Self::Provenance>,
366        size: i64,
367    ) -> Option<(AllocId, Size, Self::ProvenanceExtra)>;
368
369    /// Return a "root" pointer for the given allocation: the one that is used for direct
370    /// accesses to this static/const/fn allocation, or the one returned from the heap allocator.
371    ///
372    /// Not called on `extern` or thread-local statics (those use the methods above).
373    ///
374    /// `kind` is the kind of the allocation the pointer points to; it can be `None` when
375    /// it's a global and `GLOBAL_KIND` is `None`.
376    fn adjust_alloc_root_pointer(
377        ecx: &InterpCx<'tcx, Self>,
378        ptr: Pointer,
379        kind: Option<MemoryKind<Self::MemoryKind>>,
380    ) -> InterpResult<'tcx, Pointer<Self::Provenance>>;
381
382    /// Called to adjust global allocations to the Provenance and AllocExtra of this machine.
383    ///
384    /// If `alloc` contains pointers, then they are all pointing to globals.
385    ///
386    /// This should avoid copying if no work has to be done! If this returns an owned
387    /// allocation (because a copy had to be done to adjust things), machine memory will
388    /// cache the result. (This relies on `AllocMap::get_or` being able to add the
389    /// owned allocation to the map even when the map is shared.)
390    fn adjust_global_allocation<'b>(
391        ecx: &InterpCx<'tcx, Self>,
392        id: AllocId,
393        alloc: &'b Allocation,
394    ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>;
395
396    /// Initialize the extra state of an allocation local to this machine.
397    ///
398    /// This is guaranteed to be called exactly once on all allocations local to this machine.
399    /// It will not be called automatically for global allocations; `adjust_global_allocation`
400    /// has to do that itself if that is desired.
401    fn init_local_allocation(
402        ecx: &InterpCx<'tcx, Self>,
403        id: AllocId,
404        kind: MemoryKind<Self::MemoryKind>,
405        size: Size,
406        align: Align,
407    ) -> InterpResult<'tcx, Self::AllocExtra>;
408
409    /// Hook for performing extra checks on a memory read access.
410    /// `ptr` will always be a pointer with the provenance in `prov` pointing to the beginning of
411    /// `range`.
412    ///
413    /// This will *not* be called during validation!
414    ///
415    /// Takes read-only access to the allocation so we can keep all the memory read
416    /// operations take `&self`. Use a `RefCell` in `AllocExtra` if you
417    /// need to mutate.
418    ///
419    /// This is not invoked for ZST accesses, as no read actually happens.
420    #[inline(always)]
421    fn before_memory_read(
422        _tcx: TyCtxtAt<'tcx>,
423        _machine: &Self,
424        _alloc_extra: &Self::AllocExtra,
425        _ptr: Pointer<Option<Self::Provenance>>,
426        _prov: (AllocId, Self::ProvenanceExtra),
427        _range: AllocRange,
428    ) -> InterpResult<'tcx> {
429        interp_ok(())
430    }
431
432    /// Hook for performing extra checks on any memory read access,
433    /// that involves an allocation, even ZST reads.
434    ///
435    /// This will *not* be called during validation!
436    ///
437    /// Used to prevent statics from self-initializing by reading from their own memory
438    /// as it is being initialized.
439    fn before_alloc_access(
440        _tcx: TyCtxtAt<'tcx>,
441        _machine: &Self,
442        _alloc_id: AllocId,
443    ) -> InterpResult<'tcx> {
444        interp_ok(())
445    }
446
447    /// Hook for performing extra checks on a memory write access.
448    /// This is not invoked for ZST accesses, as no write actually happens.
449    /// `ptr` will always be a pointer with the provenance in `prov` pointing to the beginning of
450    /// `range`.
451    #[inline(always)]
452    fn before_memory_write(
453        _tcx: TyCtxtAt<'tcx>,
454        _machine: &mut Self,
455        _alloc_extra: &mut Self::AllocExtra,
456        _ptr: Pointer<Option<Self::Provenance>>,
457        _prov: (AllocId, Self::ProvenanceExtra),
458        _range: AllocRange,
459    ) -> InterpResult<'tcx> {
460        interp_ok(())
461    }
462
463    /// Hook for performing extra operations on a memory deallocation.
464    /// `ptr` will always be a pointer with the provenance in `prov` pointing to the beginning of
465    /// the allocation.
466    #[inline(always)]
467    fn before_memory_deallocation(
468        _tcx: TyCtxtAt<'tcx>,
469        _machine: &mut Self,
470        _alloc_extra: &mut Self::AllocExtra,
471        _ptr: Pointer<Option<Self::Provenance>>,
472        _prov: (AllocId, Self::ProvenanceExtra),
473        _size: Size,
474        _align: Align,
475        _kind: MemoryKind<Self::MemoryKind>,
476    ) -> InterpResult<'tcx> {
477        interp_ok(())
478    }
479
480    /// Executes a retagging operation for a single pointer.
481    /// Returns the possibly adjusted pointer.
482    #[inline]
483    fn retag_ptr_value(
484        _ecx: &mut InterpCx<'tcx, Self>,
485        _kind: mir::RetagKind,
486        val: &ImmTy<'tcx, Self::Provenance>,
487    ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>> {
488        interp_ok(val.clone())
489    }
490
491    /// Executes a retagging operation on a compound value.
492    /// Replaces all pointers stored in the given place.
493    #[inline]
494    fn retag_place_contents(
495        _ecx: &mut InterpCx<'tcx, Self>,
496        _kind: mir::RetagKind,
497        _place: &PlaceTy<'tcx, Self::Provenance>,
498    ) -> InterpResult<'tcx> {
499        interp_ok(())
500    }
501
502    /// Called on places used for in-place function argument and return value handling.
503    ///
504    /// These places need to be protected to make sure the program cannot tell whether the
505    /// argument/return value was actually copied or passed in-place..
506    fn protect_in_place_function_argument(
507        ecx: &mut InterpCx<'tcx, Self>,
508        mplace: &MPlaceTy<'tcx, Self::Provenance>,
509    ) -> InterpResult<'tcx> {
510        // Without an aliasing model, all we can do is put `Uninit` into the place.
511        // Conveniently this also ensures that the place actually points to suitable memory.
512        ecx.write_uninit(mplace)
513    }
514
515    /// Called immediately before a new stack frame gets pushed.
516    fn init_frame(
517        ecx: &mut InterpCx<'tcx, Self>,
518        frame: Frame<'tcx, Self::Provenance>,
519    ) -> InterpResult<'tcx, Frame<'tcx, Self::Provenance, Self::FrameExtra>>;
520
521    /// Borrow the current thread's stack.
522    fn stack<'a>(
523        ecx: &'a InterpCx<'tcx, Self>,
524    ) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>];
525
526    /// Mutably borrow the current thread's stack.
527    fn stack_mut<'a>(
528        ecx: &'a mut InterpCx<'tcx, Self>,
529    ) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>>;
530
531    /// Called immediately after a stack frame got pushed and its locals got initialized.
532    fn after_stack_push(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
533        interp_ok(())
534    }
535
536    /// Called just before the frame is removed from the stack (followed by return value copy and
537    /// local cleanup).
538    fn before_stack_pop(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
539        interp_ok(())
540    }
541
542    /// Called immediately after a stack frame got popped, but before jumping back to the caller.
543    /// The `locals` have already been destroyed!
544    #[inline(always)]
545    fn after_stack_pop(
546        _ecx: &mut InterpCx<'tcx, Self>,
547        _frame: Frame<'tcx, Self::Provenance, Self::FrameExtra>,
548        unwinding: bool,
549    ) -> InterpResult<'tcx, ReturnAction> {
550        // By default, we do not support unwinding from panics
551        assert!(!unwinding);
552        interp_ok(ReturnAction::Normal)
553    }
554
555    /// Called immediately after an "immediate" local variable is read in a given frame
556    /// (i.e., this is called for reads that do not end up accessing addressable memory).
557    #[inline(always)]
558    fn after_local_read(
559        _ecx: &InterpCx<'tcx, Self>,
560        _frame: &Frame<'tcx, Self::Provenance, Self::FrameExtra>,
561        _local: mir::Local,
562    ) -> InterpResult<'tcx> {
563        interp_ok(())
564    }
565
566    /// Called immediately after an "immediate" local variable is assigned a new value
567    /// (i.e., this is called for writes that do not end up in memory).
568    /// `storage_live` indicates whether this is the initial write upon `StorageLive`.
569    #[inline(always)]
570    fn after_local_write(
571        _ecx: &mut InterpCx<'tcx, Self>,
572        _local: mir::Local,
573        _storage_live: bool,
574    ) -> InterpResult<'tcx> {
575        interp_ok(())
576    }
577
578    /// Called immediately after actual memory was allocated for a local
579    /// but before the local's stack frame is updated to point to that memory.
580    #[inline(always)]
581    fn after_local_moved_to_memory(
582        _ecx: &mut InterpCx<'tcx, Self>,
583        _local: mir::Local,
584        _mplace: &MPlaceTy<'tcx, Self::Provenance>,
585    ) -> InterpResult<'tcx> {
586        interp_ok(())
587    }
588
589    /// Returns the salt to be used for a deduplicated global alloation.
590    /// If the allocation is for a function, the instance is provided as well
591    /// (this lets Miri ensure unique addresses for some functions).
592    fn get_global_alloc_salt(
593        ecx: &InterpCx<'tcx, Self>,
594        instance: Option<ty::Instance<'tcx>>,
595    ) -> usize;
596
597    fn cached_union_data_range<'e>(
598        _ecx: &'e mut InterpCx<'tcx, Self>,
599        _ty: Ty<'tcx>,
600        compute_range: impl FnOnce() -> RangeSet,
601    ) -> Cow<'e, RangeSet> {
602        // Default to no caching.
603        Cow::Owned(compute_range())
604    }
605
606    /// Compute the value passed to the constructors of the `AllocBytes` type for
607    /// abstract machine allocations.
608    fn get_default_alloc_params(&self) -> <Self::Bytes as AllocBytes>::AllocParams;
609
610    /// Allows enabling/disabling tracing calls from within `rustc_const_eval` at compile time, by
611    /// delegating the entering of [tracing::Span]s to implementors of the [Machine] trait. The
612    /// default implementation corresponds to tracing being disabled, meaning the tracing calls will
613    /// supposedly be optimized out completely. To enable tracing, override this trait method and
614    /// return `span.entered()`. Also see [crate::enter_trace_span].
615    #[must_use]
616    #[inline(always)]
617    fn enter_trace_span(_span: impl FnOnce() -> tracing::Span) -> impl EnteredTraceSpan {
618        ()
619    }
620}
621
622/// A lot of the flexibility above is just needed for `Miri`, but all "compile-time" machines
623/// (CTFE and ConstProp) use the same instance. Here, we share that code.
624pub macro compile_time_machine(<$tcx: lifetime>) {
625    type Provenance = CtfeProvenance;
626    type ProvenanceExtra = bool; // the "immutable" flag
627
628    type ExtraFnVal = !;
629
630    type MemoryKind = $crate::const_eval::MemoryKind;
631    type MemoryMap =
632        rustc_data_structures::fx::FxIndexMap<AllocId, (MemoryKind<Self::MemoryKind>, Allocation)>;
633    const GLOBAL_KIND: Option<Self::MemoryKind> = None; // no copying of globals from `tcx` to machine memory
634
635    type AllocExtra = ();
636    type FrameExtra = ();
637    type Bytes = Box<[u8]>;
638
639    #[inline(always)]
640    fn ignore_optional_overflow_checks(_ecx: &InterpCx<$tcx, Self>) -> bool {
641        false
642    }
643
644    #[inline(always)]
645    fn unwind_terminate(
646        _ecx: &mut InterpCx<$tcx, Self>,
647        _reason: mir::UnwindTerminateReason,
648    ) -> InterpResult<$tcx> {
649        unreachable!("unwinding cannot happen during compile-time evaluation")
650    }
651
652    #[inline(always)]
653    fn check_fn_target_features(
654        _ecx: &InterpCx<$tcx, Self>,
655        _instance: ty::Instance<$tcx>,
656    ) -> InterpResult<$tcx> {
657        // For now we don't do any checking here. We can't use `tcx.sess` because that can differ
658        // between crates, and we need to ensure that const-eval always behaves the same.
659        interp_ok(())
660    }
661
662    #[inline(always)]
663    fn call_extra_fn(
664        _ecx: &mut InterpCx<$tcx, Self>,
665        fn_val: !,
666        _abi: &FnAbi<$tcx, Ty<$tcx>>,
667        _args: &[FnArg<$tcx>],
668        _destination: &PlaceTy<$tcx, Self::Provenance>,
669        _target: Option<mir::BasicBlock>,
670        _unwind: mir::UnwindAction,
671    ) -> InterpResult<$tcx> {
672        match fn_val {}
673    }
674
675    #[inline(always)]
676    fn ub_checks(_ecx: &InterpCx<$tcx, Self>) -> InterpResult<$tcx, bool> {
677        // We can't look at `tcx.sess` here as that can differ across crates, which can lead to
678        // unsound differences in evaluating the same constant at different instantiation sites.
679        interp_ok(true)
680    }
681
682    #[inline(always)]
683    fn contract_checks(_ecx: &InterpCx<$tcx, Self>) -> InterpResult<$tcx, bool> {
684        // We can't look at `tcx.sess` here as that can differ across crates, which can lead to
685        // unsound differences in evaluating the same constant at different instantiation sites.
686        interp_ok(true)
687    }
688
689    #[inline(always)]
690    fn adjust_global_allocation<'b>(
691        _ecx: &InterpCx<$tcx, Self>,
692        _id: AllocId,
693        alloc: &'b Allocation,
694    ) -> InterpResult<$tcx, Cow<'b, Allocation<Self::Provenance>>> {
695        // Overwrite default implementation: no need to adjust anything.
696        interp_ok(Cow::Borrowed(alloc))
697    }
698
699    fn init_local_allocation(
700        _ecx: &InterpCx<$tcx, Self>,
701        _id: AllocId,
702        _kind: MemoryKind<Self::MemoryKind>,
703        _size: Size,
704        _align: Align,
705    ) -> InterpResult<$tcx, Self::AllocExtra> {
706        interp_ok(())
707    }
708
709    fn extern_static_pointer(
710        ecx: &InterpCx<$tcx, Self>,
711        def_id: DefId,
712    ) -> InterpResult<$tcx, Pointer> {
713        // Use the `AllocId` associated with the `DefId`. Any actual *access* will fail.
714        interp_ok(Pointer::new(ecx.tcx.reserve_and_set_static_alloc(def_id).into(), Size::ZERO))
715    }
716
717    #[inline(always)]
718    fn adjust_alloc_root_pointer(
719        _ecx: &InterpCx<$tcx, Self>,
720        ptr: Pointer<CtfeProvenance>,
721        _kind: Option<MemoryKind<Self::MemoryKind>>,
722    ) -> InterpResult<$tcx, Pointer<CtfeProvenance>> {
723        interp_ok(ptr)
724    }
725
726    #[inline(always)]
727    fn ptr_from_addr_cast(
728        _ecx: &InterpCx<$tcx, Self>,
729        addr: u64,
730    ) -> InterpResult<$tcx, Pointer<Option<CtfeProvenance>>> {
731        // Allow these casts, but make the pointer not dereferenceable.
732        // (I.e., they behave like transmutation.)
733        // This is correct because no pointers can ever be exposed in compile-time evaluation.
734        interp_ok(Pointer::without_provenance(addr))
735    }
736
737    #[inline(always)]
738    fn ptr_get_alloc(
739        _ecx: &InterpCx<$tcx, Self>,
740        ptr: Pointer<CtfeProvenance>,
741        _size: i64,
742    ) -> Option<(AllocId, Size, Self::ProvenanceExtra)> {
743        let (prov, offset) = ptr.prov_and_relative_offset();
744        Some((prov.alloc_id(), offset, prov.immutable()))
745    }
746
747    #[inline(always)]
748    fn get_global_alloc_salt(
749        _ecx: &InterpCx<$tcx, Self>,
750        _instance: Option<ty::Instance<$tcx>>,
751    ) -> usize {
752        CTFE_ALLOC_SALT
753    }
754}