1use std::any::Any;
5use std::borrow::Cow;
6use std::cell::{Cell, RefCell};
7use std::path::Path;
8use std::rc::Rc;
9use std::{fmt, process};
10
11use rand::rngs::StdRng;
12use rand::{Rng, SeedableRng};
13use rustc_abi::{Align, ExternAbi, Size};
14use rustc_apfloat::{Float, FloatConvert};
15use rustc_data_structures::fx::{FxHashMap, FxHashSet};
16#[allow(unused)]
17use rustc_data_structures::static_assert_size;
18use rustc_hir::attrs::InlineAttr;
19use rustc_middle::middle::codegen_fn_attrs::TargetFeatureKind;
20use rustc_middle::mir;
21use rustc_middle::query::TyCtxtAt;
22use rustc_middle::ty::layout::{
23 HasTyCtxt, HasTypingEnv, LayoutCx, LayoutError, LayoutOf, TyAndLayout,
24};
25use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
26use rustc_session::config::InliningThreshold;
27use rustc_span::def_id::{CrateNum, DefId};
28use rustc_span::{Span, SpanData, Symbol};
29use rustc_target::callconv::FnAbi;
30
31use crate::alloc_addresses::EvalContextExt;
32use crate::concurrency::cpu_affinity::{self, CpuAffinityMask};
33use crate::concurrency::data_race::{self, NaReadType, NaWriteType};
34use crate::concurrency::{AllocDataRaceHandler, GenmcCtx, GlobalDataRaceHandler, weak_memory};
35use crate::*;
36
37pub const SIGRTMIN: i32 = 34;
41
42pub const SIGRTMAX: i32 = 42;
46
47const ADDRS_PER_ANON_GLOBAL: usize = 32;
51
52pub struct FrameExtra<'tcx> {
54 pub borrow_tracker: Option<borrow_tracker::FrameState>,
56
57 pub catch_unwind: Option<CatchUnwindData<'tcx>>,
61
62 pub timing: Option<measureme::DetachedTiming>,
66
67 pub is_user_relevant: bool,
72
73 pub data_race: Option<data_race::FrameState>,
75}
76
77impl<'tcx> std::fmt::Debug for FrameExtra<'tcx> {
78 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
79 let FrameExtra { borrow_tracker, catch_unwind, timing: _, is_user_relevant, data_race } =
81 self;
82 f.debug_struct("FrameData")
83 .field("borrow_tracker", borrow_tracker)
84 .field("catch_unwind", catch_unwind)
85 .field("is_user_relevant", is_user_relevant)
86 .field("data_race", data_race)
87 .finish()
88 }
89}
90
91impl VisitProvenance for FrameExtra<'_> {
92 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
93 let FrameExtra {
94 catch_unwind,
95 borrow_tracker,
96 timing: _,
97 is_user_relevant: _,
98 data_race: _,
99 } = self;
100
101 catch_unwind.visit_provenance(visit);
102 borrow_tracker.visit_provenance(visit);
103 }
104}
105
106#[derive(Debug, Copy, Clone, PartialEq, Eq)]
108pub enum MiriMemoryKind {
109 Rust,
111 Miri,
113 C,
115 WinHeap,
117 WinLocal,
119 Machine,
122 Runtime,
125 Global,
128 ExternStatic,
131 Tls,
134 Mmap,
136}
137
138impl From<MiriMemoryKind> for MemoryKind {
139 #[inline(always)]
140 fn from(kind: MiriMemoryKind) -> MemoryKind {
141 MemoryKind::Machine(kind)
142 }
143}
144
145impl MayLeak for MiriMemoryKind {
146 #[inline(always)]
147 fn may_leak(self) -> bool {
148 use self::MiriMemoryKind::*;
149 match self {
150 Rust | Miri | C | WinHeap | WinLocal | Runtime => false,
151 Machine | Global | ExternStatic | Tls | Mmap => true,
152 }
153 }
154}
155
156impl MiriMemoryKind {
157 fn should_save_allocation_span(self) -> bool {
159 use self::MiriMemoryKind::*;
160 match self {
161 Rust | Miri | C | WinHeap | WinLocal | Mmap => true,
163 Machine | Global | ExternStatic | Tls | Runtime => false,
165 }
166 }
167}
168
169impl fmt::Display for MiriMemoryKind {
170 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
171 use self::MiriMemoryKind::*;
172 match self {
173 Rust => write!(f, "Rust heap"),
174 Miri => write!(f, "Miri bare-metal heap"),
175 C => write!(f, "C heap"),
176 WinHeap => write!(f, "Windows heap"),
177 WinLocal => write!(f, "Windows local memory"),
178 Machine => write!(f, "machine-managed memory"),
179 Runtime => write!(f, "language runtime memory"),
180 Global => write!(f, "global (static or const)"),
181 ExternStatic => write!(f, "extern static"),
182 Tls => write!(f, "thread-local static"),
183 Mmap => write!(f, "mmap"),
184 }
185 }
186}
187
188pub type MemoryKind = interpret::MemoryKind<MiriMemoryKind>;
189
190#[derive(Clone, Copy, PartialEq, Eq, Hash)]
196pub enum Provenance {
197 Concrete {
200 alloc_id: AllocId,
201 tag: BorTag,
203 },
204 Wildcard,
221}
222
223#[derive(Copy, Clone, PartialEq)]
225pub enum ProvenanceExtra {
226 Concrete(BorTag),
227 Wildcard,
228}
229
230#[cfg(target_pointer_width = "64")]
231static_assert_size!(StrictPointer, 24);
232#[cfg(target_pointer_width = "64")]
236static_assert_size!(Scalar, 32);
237
238impl fmt::Debug for Provenance {
239 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
240 match self {
241 Provenance::Concrete { alloc_id, tag } => {
242 if f.alternate() {
244 write!(f, "[{alloc_id:#?}]")?;
245 } else {
246 write!(f, "[{alloc_id:?}]")?;
247 }
248 write!(f, "{tag:?}")?;
250 }
251 Provenance::Wildcard => {
252 write!(f, "[wildcard]")?;
253 }
254 }
255 Ok(())
256 }
257}
258
259impl interpret::Provenance for Provenance {
260 const OFFSET_IS_ADDR: bool = true;
262
263 const WILDCARD: Option<Self> = Some(Provenance::Wildcard);
265
266 fn get_alloc_id(self) -> Option<AllocId> {
267 match self {
268 Provenance::Concrete { alloc_id, .. } => Some(alloc_id),
269 Provenance::Wildcard => None,
270 }
271 }
272
273 fn fmt(ptr: &interpret::Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
274 let (prov, addr) = ptr.into_raw_parts(); write!(f, "{:#x}", addr.bytes())?;
276 if f.alternate() {
277 write!(f, "{prov:#?}")?;
278 } else {
279 write!(f, "{prov:?}")?;
280 }
281 Ok(())
282 }
283
284 fn join(left: Self, right: Self) -> Option<Self> {
285 match (left, right) {
286 (
288 Provenance::Concrete { alloc_id: left_alloc, tag: left_tag },
289 Provenance::Concrete { alloc_id: right_alloc, tag: right_tag },
290 ) if left_alloc == right_alloc && left_tag == right_tag => Some(left),
291 (Provenance::Wildcard, o) | (o, Provenance::Wildcard) => Some(o),
294 _ => None,
296 }
297 }
298}
299
300impl fmt::Debug for ProvenanceExtra {
301 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
302 match self {
303 ProvenanceExtra::Concrete(pid) => write!(f, "{pid:?}"),
304 ProvenanceExtra::Wildcard => write!(f, "<wildcard>"),
305 }
306 }
307}
308
309impl ProvenanceExtra {
310 pub fn and_then<T>(self, f: impl FnOnce(BorTag) -> Option<T>) -> Option<T> {
311 match self {
312 ProvenanceExtra::Concrete(pid) => f(pid),
313 ProvenanceExtra::Wildcard => None,
314 }
315 }
316}
317
318#[derive(Debug)]
320pub struct AllocExtra<'tcx> {
321 pub borrow_tracker: Option<borrow_tracker::AllocState>,
323 pub data_race: AllocDataRaceHandler,
327 pub backtrace: Option<Vec<FrameInfo<'tcx>>>,
332 pub sync: FxHashMap<Size, Box<dyn Any>>,
337}
338
339impl<'tcx> Clone for AllocExtra<'tcx> {
342 fn clone(&self) -> Self {
343 panic!("our allocations should never be cloned");
344 }
345}
346
347impl VisitProvenance for AllocExtra<'_> {
348 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
349 let AllocExtra { borrow_tracker, data_race, backtrace: _, sync: _ } = self;
350
351 borrow_tracker.visit_provenance(visit);
352 data_race.visit_provenance(visit);
353 }
354}
355
356pub struct PrimitiveLayouts<'tcx> {
358 pub unit: TyAndLayout<'tcx>,
359 pub i8: TyAndLayout<'tcx>,
360 pub i16: TyAndLayout<'tcx>,
361 pub i32: TyAndLayout<'tcx>,
362 pub i64: TyAndLayout<'tcx>,
363 pub i128: TyAndLayout<'tcx>,
364 pub isize: TyAndLayout<'tcx>,
365 pub u8: TyAndLayout<'tcx>,
366 pub u16: TyAndLayout<'tcx>,
367 pub u32: TyAndLayout<'tcx>,
368 pub u64: TyAndLayout<'tcx>,
369 pub u128: TyAndLayout<'tcx>,
370 pub usize: TyAndLayout<'tcx>,
371 pub bool: TyAndLayout<'tcx>,
372 pub mut_raw_ptr: TyAndLayout<'tcx>, pub const_raw_ptr: TyAndLayout<'tcx>, }
375
376impl<'tcx> PrimitiveLayouts<'tcx> {
377 fn new(layout_cx: LayoutCx<'tcx>) -> Result<Self, &'tcx LayoutError<'tcx>> {
378 let tcx = layout_cx.tcx();
379 let mut_raw_ptr = Ty::new_mut_ptr(tcx, tcx.types.unit);
380 let const_raw_ptr = Ty::new_imm_ptr(tcx, tcx.types.unit);
381 Ok(Self {
382 unit: layout_cx.layout_of(tcx.types.unit)?,
383 i8: layout_cx.layout_of(tcx.types.i8)?,
384 i16: layout_cx.layout_of(tcx.types.i16)?,
385 i32: layout_cx.layout_of(tcx.types.i32)?,
386 i64: layout_cx.layout_of(tcx.types.i64)?,
387 i128: layout_cx.layout_of(tcx.types.i128)?,
388 isize: layout_cx.layout_of(tcx.types.isize)?,
389 u8: layout_cx.layout_of(tcx.types.u8)?,
390 u16: layout_cx.layout_of(tcx.types.u16)?,
391 u32: layout_cx.layout_of(tcx.types.u32)?,
392 u64: layout_cx.layout_of(tcx.types.u64)?,
393 u128: layout_cx.layout_of(tcx.types.u128)?,
394 usize: layout_cx.layout_of(tcx.types.usize)?,
395 bool: layout_cx.layout_of(tcx.types.bool)?,
396 mut_raw_ptr: layout_cx.layout_of(mut_raw_ptr)?,
397 const_raw_ptr: layout_cx.layout_of(const_raw_ptr)?,
398 })
399 }
400
401 pub fn uint(&self, size: Size) -> Option<TyAndLayout<'tcx>> {
402 match size.bits() {
403 8 => Some(self.u8),
404 16 => Some(self.u16),
405 32 => Some(self.u32),
406 64 => Some(self.u64),
407 128 => Some(self.u128),
408 _ => None,
409 }
410 }
411
412 pub fn int(&self, size: Size) -> Option<TyAndLayout<'tcx>> {
413 match size.bits() {
414 8 => Some(self.i8),
415 16 => Some(self.i16),
416 32 => Some(self.i32),
417 64 => Some(self.i64),
418 128 => Some(self.i128),
419 _ => None,
420 }
421 }
422}
423
424pub struct MiriMachine<'tcx> {
429 pub tcx: TyCtxt<'tcx>,
431
432 pub borrow_tracker: Option<borrow_tracker::GlobalState>,
434
435 pub data_race: GlobalDataRaceHandler,
441
442 pub alloc_addresses: alloc_addresses::GlobalState,
444
445 pub(crate) env_vars: EnvVars<'tcx>,
447
448 pub(crate) main_fn_ret_place: Option<MPlaceTy<'tcx>>,
450
451 pub(crate) argc: Option<Pointer>,
455 pub(crate) argv: Option<Pointer>,
456 pub(crate) cmd_line: Option<Pointer>,
457
458 pub(crate) tls: TlsData<'tcx>,
460
461 pub(crate) isolated_op: IsolatedOp,
465
466 pub(crate) validation: ValidationMode,
468
469 pub(crate) fds: shims::FdTable,
471 pub(crate) dirs: shims::DirTable,
473
474 pub(crate) epoll_interests: shims::EpollInterestTable,
476
477 pub(crate) monotonic_clock: MonotonicClock,
479
480 pub(crate) threads: ThreadManager<'tcx>,
482
483 pub(crate) thread_cpu_affinity: FxHashMap<ThreadId, CpuAffinityMask>,
487
488 pub(crate) layouts: PrimitiveLayouts<'tcx>,
490
491 pub(crate) static_roots: Vec<AllocId>,
493
494 profiler: Option<measureme::Profiler>,
497 string_cache: FxHashMap<String, measureme::StringId>,
500
501 pub(crate) exported_symbols_cache: FxHashMap<Symbol, Option<Instance<'tcx>>>,
504
505 pub(crate) backtrace_style: BacktraceStyle,
507
508 pub(crate) local_crates: Vec<CrateNum>,
510
511 extern_statics: FxHashMap<Symbol, StrictPointer>,
513
514 pub(crate) rng: RefCell<StdRng>,
517
518 pub(crate) allocator: Option<Rc<RefCell<crate::alloc::isolated_alloc::IsolatedAlloc>>>,
520
521 tracked_alloc_ids: FxHashSet<AllocId>,
524 track_alloc_accesses: bool,
526
527 pub(crate) check_alignment: AlignmentCheck,
529
530 pub(crate) cmpxchg_weak_failure_rate: f64,
532
533 pub(crate) preemption_rate: f64,
535
536 pub(crate) report_progress: Option<u32>,
538 pub(crate) basic_block_count: u64,
540
541 #[cfg(all(unix, feature = "native-lib"))]
543 pub native_lib: Vec<(libloading::Library, std::path::PathBuf)>,
544 #[cfg(not(all(unix, feature = "native-lib")))]
545 pub native_lib: Vec<!>,
546
547 pub(crate) gc_interval: u32,
549 pub(crate) since_gc: u32,
551
552 pub(crate) num_cpus: u32,
554
555 pub(crate) page_size: u64,
557 pub(crate) stack_addr: u64,
558 pub(crate) stack_size: u64,
559
560 pub(crate) collect_leak_backtraces: bool,
562
563 pub(crate) allocation_spans: RefCell<FxHashMap<AllocId, (Span, Option<Span>)>>,
566
567 pub(crate) symbolic_alignment: RefCell<FxHashMap<AllocId, (Size, Align)>>,
574
575 union_data_ranges: FxHashMap<Ty<'tcx>, RangeSet>,
577
578 pub(crate) pthread_mutex_sanity: Cell<bool>,
580 pub(crate) pthread_rwlock_sanity: Cell<bool>,
581 pub(crate) pthread_condvar_sanity: Cell<bool>,
582
583 pub(crate) sb_extern_type_warned: Cell<bool>,
585 #[allow(unused)]
587 pub(crate) native_call_mem_warned: Cell<bool>,
588 pub(crate) reject_in_isolation_warned: RefCell<FxHashSet<String>>,
590 pub(crate) int2ptr_warned: RefCell<FxHashSet<Span>>,
592
593 pub(crate) mangle_internal_symbol_cache: FxHashMap<&'static str, String>,
595
596 pub force_intrinsic_fallback: bool,
598
599 pub float_nondet: bool,
601 pub float_rounding_error: bool,
603}
604
605impl<'tcx> MiriMachine<'tcx> {
606 pub(crate) fn new(
610 config: &MiriConfig,
611 layout_cx: LayoutCx<'tcx>,
612 genmc_ctx: Option<Rc<GenmcCtx>>,
613 ) -> Self {
614 let tcx = layout_cx.tcx();
615 let local_crates = helpers::get_local_crates(tcx);
616 let layouts =
617 PrimitiveLayouts::new(layout_cx).expect("Couldn't get layouts of primitive types");
618 let profiler = config.measureme_out.as_ref().map(|out| {
619 let crate_name =
620 tcx.sess.opts.crate_name.clone().unwrap_or_else(|| "unknown-crate".to_string());
621 let pid = process::id();
622 let filename = format!("{crate_name}-{pid:07}");
627 let path = Path::new(out).join(filename);
628 measureme::Profiler::new(path).expect("Couldn't create `measureme` profiler")
629 });
630 let rng = StdRng::seed_from_u64(config.seed.unwrap_or(0));
631 let borrow_tracker = config.borrow_tracker.map(|bt| bt.instantiate_global_state(config));
632 let data_race = if config.genmc_config.is_some() {
633 GlobalDataRaceHandler::Genmc(genmc_ctx.unwrap())
635 } else if config.data_race_detector {
636 GlobalDataRaceHandler::Vclocks(Box::new(data_race::GlobalState::new(config)))
637 } else {
638 GlobalDataRaceHandler::None
639 };
640 let page_size = if let Some(page_size) = config.page_size {
644 page_size
645 } else {
646 let target = &tcx.sess.target;
647 match target.arch.as_ref() {
648 "wasm32" | "wasm64" => 64 * 1024, "aarch64" => {
650 if target.options.vendor.as_ref() == "apple" {
651 16 * 1024
655 } else {
656 4 * 1024
657 }
658 }
659 _ => 4 * 1024,
660 }
661 };
662 let stack_addr = if tcx.pointer_size().bits() < 32 { page_size } else { page_size * 32 };
664 let stack_size =
665 if tcx.pointer_size().bits() < 32 { page_size * 4 } else { page_size * 16 };
666 assert!(
667 usize::try_from(config.num_cpus).unwrap() <= cpu_affinity::MAX_CPUS,
668 "miri only supports up to {} CPUs, but {} were configured",
669 cpu_affinity::MAX_CPUS,
670 config.num_cpus
671 );
672 let threads = ThreadManager::new(config);
673 let mut thread_cpu_affinity = FxHashMap::default();
674 if matches!(&*tcx.sess.target.os, "linux" | "freebsd" | "android") {
675 thread_cpu_affinity
676 .insert(threads.active_thread(), CpuAffinityMask::new(&layout_cx, config.num_cpus));
677 }
678 MiriMachine {
679 tcx,
680 borrow_tracker,
681 data_race,
682 alloc_addresses: RefCell::new(alloc_addresses::GlobalStateInner::new(config, stack_addr)),
683 env_vars: EnvVars::default(),
685 main_fn_ret_place: None,
686 argc: None,
687 argv: None,
688 cmd_line: None,
689 tls: TlsData::default(),
690 isolated_op: config.isolated_op,
691 validation: config.validation,
692 fds: shims::FdTable::init(config.mute_stdout_stderr),
693 epoll_interests: shims::EpollInterestTable::new(),
694 dirs: Default::default(),
695 layouts,
696 threads,
697 thread_cpu_affinity,
698 static_roots: Vec::new(),
699 profiler,
700 string_cache: Default::default(),
701 exported_symbols_cache: FxHashMap::default(),
702 backtrace_style: config.backtrace_style,
703 local_crates,
704 extern_statics: FxHashMap::default(),
705 rng: RefCell::new(rng),
706 allocator: if !config.native_lib.is_empty() {
707 Some(Rc::new(RefCell::new(crate::alloc::isolated_alloc::IsolatedAlloc::new())))
708 } else { None },
709 tracked_alloc_ids: config.tracked_alloc_ids.clone(),
710 track_alloc_accesses: config.track_alloc_accesses,
711 check_alignment: config.check_alignment,
712 cmpxchg_weak_failure_rate: config.cmpxchg_weak_failure_rate,
713 preemption_rate: config.preemption_rate,
714 report_progress: config.report_progress,
715 basic_block_count: 0,
716 monotonic_clock: MonotonicClock::new(config.isolated_op == IsolatedOp::Allow),
717 #[cfg(all(unix, feature = "native-lib"))]
718 native_lib: config.native_lib.iter().map(|lib_file_path| {
719 let host_triple = rustc_session::config::host_tuple();
720 let target_triple = tcx.sess.opts.target_triple.tuple();
721 if host_triple != target_triple {
723 panic!(
724 "calling native C functions in linked .so file requires host and target to be the same: \
725 host={host_triple}, target={target_triple}",
726 );
727 }
728 (
732 unsafe {
733 libloading::Library::new(lib_file_path)
734 .expect("failed to read specified extern shared object file")
735 },
736 lib_file_path.clone(),
737 )
738 }).collect(),
739 #[cfg(not(all(unix, feature = "native-lib")))]
740 native_lib: config.native_lib.iter().map(|_| {
741 panic!("calling functions from native libraries via FFI is not supported in this build of Miri")
742 }).collect(),
743 gc_interval: config.gc_interval,
744 since_gc: 0,
745 num_cpus: config.num_cpus,
746 page_size,
747 stack_addr,
748 stack_size,
749 collect_leak_backtraces: config.collect_leak_backtraces,
750 allocation_spans: RefCell::new(FxHashMap::default()),
751 symbolic_alignment: RefCell::new(FxHashMap::default()),
752 union_data_ranges: FxHashMap::default(),
753 pthread_mutex_sanity: Cell::new(false),
754 pthread_rwlock_sanity: Cell::new(false),
755 pthread_condvar_sanity: Cell::new(false),
756 sb_extern_type_warned: Cell::new(false),
757 native_call_mem_warned: Cell::new(false),
758 reject_in_isolation_warned: Default::default(),
759 int2ptr_warned: Default::default(),
760 mangle_internal_symbol_cache: Default::default(),
761 force_intrinsic_fallback: config.force_intrinsic_fallback,
762 float_nondet: config.float_nondet,
763 float_rounding_error: config.float_rounding_error,
764 }
765 }
766
767 pub(crate) fn late_init(
768 ecx: &mut MiriInterpCx<'tcx>,
769 config: &MiriConfig,
770 on_main_stack_empty: StackEmptyCallback<'tcx>,
771 ) -> InterpResult<'tcx> {
772 EnvVars::init(ecx, config)?;
773 MiriMachine::init_extern_statics(ecx)?;
774 ThreadManager::init(ecx, on_main_stack_empty);
775 interp_ok(())
776 }
777
778 pub(crate) fn add_extern_static(ecx: &mut MiriInterpCx<'tcx>, name: &str, ptr: Pointer) {
779 let ptr = ptr.into_pointer_or_addr().unwrap();
781 ecx.machine.extern_statics.try_insert(Symbol::intern(name), ptr).unwrap();
782 }
783
784 pub(crate) fn communicate(&self) -> bool {
785 self.isolated_op == IsolatedOp::Allow
786 }
787
788 pub(crate) fn is_local(&self, frame: &FrameInfo<'_>) -> bool {
790 let def_id = frame.instance.def_id();
791 def_id.is_local() || self.local_crates.contains(&def_id.krate)
792 }
793
794 pub(crate) fn handle_abnormal_termination(&mut self) {
796 drop(self.profiler.take());
801 }
802
803 pub(crate) fn page_align(&self) -> Align {
804 Align::from_bytes(self.page_size).unwrap()
805 }
806
807 pub(crate) fn allocated_span(&self, alloc_id: AllocId) -> Option<SpanData> {
808 self.allocation_spans
809 .borrow()
810 .get(&alloc_id)
811 .map(|(allocated, _deallocated)| allocated.data())
812 }
813
814 pub(crate) fn deallocated_span(&self, alloc_id: AllocId) -> Option<SpanData> {
815 self.allocation_spans
816 .borrow()
817 .get(&alloc_id)
818 .and_then(|(_allocated, deallocated)| *deallocated)
819 .map(Span::data)
820 }
821
822 fn init_allocation(
823 ecx: &MiriInterpCx<'tcx>,
824 id: AllocId,
825 kind: MemoryKind,
826 size: Size,
827 align: Align,
828 ) -> InterpResult<'tcx, AllocExtra<'tcx>> {
829 if ecx.machine.tracked_alloc_ids.contains(&id) {
830 ecx.emit_diagnostic(NonHaltingDiagnostic::CreatedAlloc(id, size, align, kind));
831 }
832
833 let borrow_tracker = ecx
834 .machine
835 .borrow_tracker
836 .as_ref()
837 .map(|bt| bt.borrow_mut().new_allocation(id, size, kind, &ecx.machine));
838
839 let data_race = match &ecx.machine.data_race {
840 GlobalDataRaceHandler::None => AllocDataRaceHandler::None,
841 GlobalDataRaceHandler::Vclocks(data_race) =>
842 AllocDataRaceHandler::Vclocks(
843 data_race::AllocState::new_allocation(
844 data_race,
845 &ecx.machine.threads,
846 size,
847 kind,
848 ecx.machine.current_span(),
849 ),
850 data_race.weak_memory.then(weak_memory::AllocState::new_allocation),
851 ),
852 GlobalDataRaceHandler::Genmc(_genmc_ctx) => {
853 AllocDataRaceHandler::Genmc
856 }
857 };
858
859 let backtrace = if kind.may_leak() || !ecx.machine.collect_leak_backtraces {
863 None
864 } else {
865 Some(ecx.generate_stacktrace())
866 };
867
868 if matches!(kind, MemoryKind::Machine(kind) if kind.should_save_allocation_span()) {
869 ecx.machine
870 .allocation_spans
871 .borrow_mut()
872 .insert(id, (ecx.machine.current_span(), None));
873 }
874
875 interp_ok(AllocExtra { borrow_tracker, data_race, backtrace, sync: FxHashMap::default() })
876 }
877}
878
879impl VisitProvenance for MiriMachine<'_> {
880 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
881 #[rustfmt::skip]
882 let MiriMachine {
883 threads,
884 thread_cpu_affinity: _,
885 tls,
886 env_vars,
887 main_fn_ret_place,
888 argc,
889 argv,
890 cmd_line,
891 extern_statics,
892 dirs,
893 borrow_tracker,
894 data_race,
895 alloc_addresses,
896 fds,
897 epoll_interests:_,
898 tcx: _,
899 isolated_op: _,
900 validation: _,
901 monotonic_clock: _,
902 layouts: _,
903 static_roots: _,
904 profiler: _,
905 string_cache: _,
906 exported_symbols_cache: _,
907 backtrace_style: _,
908 local_crates: _,
909 rng: _,
910 allocator: _,
911 tracked_alloc_ids: _,
912 track_alloc_accesses: _,
913 check_alignment: _,
914 cmpxchg_weak_failure_rate: _,
915 preemption_rate: _,
916 report_progress: _,
917 basic_block_count: _,
918 native_lib: _,
919 gc_interval: _,
920 since_gc: _,
921 num_cpus: _,
922 page_size: _,
923 stack_addr: _,
924 stack_size: _,
925 collect_leak_backtraces: _,
926 allocation_spans: _,
927 symbolic_alignment: _,
928 union_data_ranges: _,
929 pthread_mutex_sanity: _,
930 pthread_rwlock_sanity: _,
931 pthread_condvar_sanity: _,
932 sb_extern_type_warned: _,
933 native_call_mem_warned: _,
934 reject_in_isolation_warned: _,
935 int2ptr_warned: _,
936 mangle_internal_symbol_cache: _,
937 force_intrinsic_fallback: _,
938 float_nondet: _,
939 float_rounding_error: _,
940 } = self;
941
942 threads.visit_provenance(visit);
943 tls.visit_provenance(visit);
944 env_vars.visit_provenance(visit);
945 dirs.visit_provenance(visit);
946 fds.visit_provenance(visit);
947 data_race.visit_provenance(visit);
948 borrow_tracker.visit_provenance(visit);
949 alloc_addresses.visit_provenance(visit);
950 main_fn_ret_place.visit_provenance(visit);
951 argc.visit_provenance(visit);
952 argv.visit_provenance(visit);
953 cmd_line.visit_provenance(visit);
954 for ptr in extern_statics.values() {
955 ptr.visit_provenance(visit);
956 }
957 }
958}
959
960pub type MiriInterpCx<'tcx> = InterpCx<'tcx, MiriMachine<'tcx>>;
962
963pub trait MiriInterpCxExt<'tcx> {
965 fn eval_context_ref<'a>(&'a self) -> &'a MiriInterpCx<'tcx>;
966 fn eval_context_mut<'a>(&'a mut self) -> &'a mut MiriInterpCx<'tcx>;
967}
968impl<'tcx> MiriInterpCxExt<'tcx> for MiriInterpCx<'tcx> {
969 #[inline(always)]
970 fn eval_context_ref(&self) -> &MiriInterpCx<'tcx> {
971 self
972 }
973 #[inline(always)]
974 fn eval_context_mut(&mut self) -> &mut MiriInterpCx<'tcx> {
975 self
976 }
977}
978
979impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> {
981 type MemoryKind = MiriMemoryKind;
982 type ExtraFnVal = DynSym;
983
984 type FrameExtra = FrameExtra<'tcx>;
985 type AllocExtra = AllocExtra<'tcx>;
986
987 type Provenance = Provenance;
988 type ProvenanceExtra = ProvenanceExtra;
989 type Bytes = MiriAllocBytes;
990
991 type MemoryMap =
992 MonoHashMap<AllocId, (MemoryKind, Allocation<Provenance, Self::AllocExtra, Self::Bytes>)>;
993
994 const GLOBAL_KIND: Option<MiriMemoryKind> = Some(MiriMemoryKind::Global);
995
996 const PANIC_ON_ALLOC_FAIL: bool = false;
997
998 #[inline(always)]
999 fn enforce_alignment(ecx: &MiriInterpCx<'tcx>) -> bool {
1000 ecx.machine.check_alignment != AlignmentCheck::None
1001 }
1002
1003 #[inline(always)]
1004 fn alignment_check(
1005 ecx: &MiriInterpCx<'tcx>,
1006 alloc_id: AllocId,
1007 alloc_align: Align,
1008 alloc_kind: AllocKind,
1009 offset: Size,
1010 align: Align,
1011 ) -> Option<Misalignment> {
1012 if ecx.machine.check_alignment != AlignmentCheck::Symbolic {
1013 return None;
1015 }
1016 if alloc_kind != AllocKind::LiveData {
1017 return None;
1019 }
1020 let (promised_offset, promised_align) = ecx
1022 .machine
1023 .symbolic_alignment
1024 .borrow()
1025 .get(&alloc_id)
1026 .copied()
1027 .unwrap_or((Size::ZERO, alloc_align));
1028 if promised_align < align {
1029 Some(Misalignment { has: promised_align, required: align })
1031 } else {
1032 let distance = offset.bytes().wrapping_sub(promised_offset.bytes());
1034 if distance.is_multiple_of(align.bytes()) {
1036 None
1038 } else {
1039 let distance_pow2 = 1 << distance.trailing_zeros();
1041 Some(Misalignment {
1042 has: Align::from_bytes(distance_pow2).unwrap(),
1043 required: align,
1044 })
1045 }
1046 }
1047 }
1048
1049 #[inline(always)]
1050 fn enforce_validity(ecx: &MiriInterpCx<'tcx>, _layout: TyAndLayout<'tcx>) -> bool {
1051 ecx.machine.validation != ValidationMode::No
1052 }
1053 #[inline(always)]
1054 fn enforce_validity_recursively(
1055 ecx: &InterpCx<'tcx, Self>,
1056 _layout: TyAndLayout<'tcx>,
1057 ) -> bool {
1058 ecx.machine.validation == ValidationMode::Deep
1059 }
1060
1061 #[inline(always)]
1062 fn ignore_optional_overflow_checks(ecx: &MiriInterpCx<'tcx>) -> bool {
1063 !ecx.tcx.sess.overflow_checks()
1064 }
1065
1066 fn check_fn_target_features(
1067 ecx: &MiriInterpCx<'tcx>,
1068 instance: ty::Instance<'tcx>,
1069 ) -> InterpResult<'tcx> {
1070 let attrs = ecx.tcx.codegen_instance_attrs(instance.def);
1071 if attrs
1072 .target_features
1073 .iter()
1074 .any(|feature| !ecx.tcx.sess.target_features.contains(&feature.name))
1075 {
1076 let unavailable = attrs
1077 .target_features
1078 .iter()
1079 .filter(|&feature| {
1080 feature.kind != TargetFeatureKind::Implied && !ecx.tcx.sess.target_features.contains(&feature.name)
1081 })
1082 .fold(String::new(), |mut s, feature| {
1083 if !s.is_empty() {
1084 s.push_str(", ");
1085 }
1086 s.push_str(feature.name.as_str());
1087 s
1088 });
1089 let msg = format!(
1090 "calling a function that requires unavailable target features: {unavailable}"
1091 );
1092 if ecx.tcx.sess.target.is_like_wasm {
1095 throw_machine_stop!(TerminationInfo::Abort(msg));
1096 } else {
1097 throw_ub_format!("{msg}");
1098 }
1099 }
1100 interp_ok(())
1101 }
1102
1103 #[inline(always)]
1104 fn find_mir_or_eval_fn(
1105 ecx: &mut MiriInterpCx<'tcx>,
1106 instance: ty::Instance<'tcx>,
1107 abi: &FnAbi<'tcx, Ty<'tcx>>,
1108 args: &[FnArg<'tcx, Provenance>],
1109 dest: &PlaceTy<'tcx>,
1110 ret: Option<mir::BasicBlock>,
1111 unwind: mir::UnwindAction,
1112 ) -> InterpResult<'tcx, Option<(&'tcx mir::Body<'tcx>, ty::Instance<'tcx>)>> {
1113 if ecx.tcx.is_foreign_item(instance.def_id()) {
1115 let _trace = enter_trace_span!("emulate_foreign_item");
1116 let args = ecx.copy_fn_args(args); let link_name = Symbol::intern(ecx.tcx.symbol_name(instance).name);
1124 return ecx.emulate_foreign_item(link_name, abi, &args, dest, ret, unwind);
1125 }
1126
1127 let _trace = enter_trace_span!("load_mir");
1129 interp_ok(Some((ecx.load_mir(instance.def, None)?, instance)))
1130 }
1131
1132 #[inline(always)]
1133 fn call_extra_fn(
1134 ecx: &mut MiriInterpCx<'tcx>,
1135 fn_val: DynSym,
1136 abi: &FnAbi<'tcx, Ty<'tcx>>,
1137 args: &[FnArg<'tcx, Provenance>],
1138 dest: &PlaceTy<'tcx>,
1139 ret: Option<mir::BasicBlock>,
1140 unwind: mir::UnwindAction,
1141 ) -> InterpResult<'tcx> {
1142 let args = ecx.copy_fn_args(args); ecx.emulate_dyn_sym(fn_val, abi, &args, dest, ret, unwind)
1144 }
1145
1146 #[inline(always)]
1147 fn call_intrinsic(
1148 ecx: &mut MiriInterpCx<'tcx>,
1149 instance: ty::Instance<'tcx>,
1150 args: &[OpTy<'tcx>],
1151 dest: &PlaceTy<'tcx>,
1152 ret: Option<mir::BasicBlock>,
1153 unwind: mir::UnwindAction,
1154 ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
1155 ecx.call_intrinsic(instance, args, dest, ret, unwind)
1156 }
1157
1158 #[inline(always)]
1159 fn assert_panic(
1160 ecx: &mut MiriInterpCx<'tcx>,
1161 msg: &mir::AssertMessage<'tcx>,
1162 unwind: mir::UnwindAction,
1163 ) -> InterpResult<'tcx> {
1164 ecx.assert_panic(msg, unwind)
1165 }
1166
1167 fn panic_nounwind(ecx: &mut InterpCx<'tcx, Self>, msg: &str) -> InterpResult<'tcx> {
1168 ecx.start_panic_nounwind(msg)
1169 }
1170
1171 fn unwind_terminate(
1172 ecx: &mut InterpCx<'tcx, Self>,
1173 reason: mir::UnwindTerminateReason,
1174 ) -> InterpResult<'tcx> {
1175 let panic = ecx.tcx.lang_items().get(reason.lang_item()).unwrap();
1177 let panic = ty::Instance::mono(ecx.tcx.tcx, panic);
1178 ecx.call_function(
1179 panic,
1180 ExternAbi::Rust,
1181 &[],
1182 None,
1183 ReturnContinuation::Goto { ret: None, unwind: mir::UnwindAction::Unreachable },
1184 )?;
1185 interp_ok(())
1186 }
1187
1188 #[inline(always)]
1189 fn binary_ptr_op(
1190 ecx: &MiriInterpCx<'tcx>,
1191 bin_op: mir::BinOp,
1192 left: &ImmTy<'tcx>,
1193 right: &ImmTy<'tcx>,
1194 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1195 ecx.binary_ptr_op(bin_op, left, right)
1196 }
1197
1198 #[inline(always)]
1199 fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>(
1200 ecx: &InterpCx<'tcx, Self>,
1201 inputs: &[F1],
1202 ) -> F2 {
1203 ecx.generate_nan(inputs)
1204 }
1205
1206 #[inline(always)]
1207 fn apply_float_nondet(
1208 ecx: &mut InterpCx<'tcx, Self>,
1209 val: ImmTy<'tcx>,
1210 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1211 crate::math::apply_random_float_error_to_imm(ecx, val, 2 )
1212 }
1213
1214 #[inline(always)]
1215 fn equal_float_min_max<F: Float>(ecx: &MiriInterpCx<'tcx>, a: F, b: F) -> F {
1216 ecx.equal_float_min_max(a, b)
1217 }
1218
1219 #[inline(always)]
1220 fn ub_checks(ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool> {
1221 interp_ok(ecx.tcx.sess.ub_checks())
1222 }
1223
1224 #[inline(always)]
1225 fn contract_checks(ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool> {
1226 interp_ok(ecx.tcx.sess.contract_checks())
1227 }
1228
1229 #[inline(always)]
1230 fn thread_local_static_pointer(
1231 ecx: &mut MiriInterpCx<'tcx>,
1232 def_id: DefId,
1233 ) -> InterpResult<'tcx, StrictPointer> {
1234 ecx.get_or_create_thread_local_alloc(def_id)
1235 }
1236
1237 fn extern_static_pointer(
1238 ecx: &MiriInterpCx<'tcx>,
1239 def_id: DefId,
1240 ) -> InterpResult<'tcx, StrictPointer> {
1241 let link_name = Symbol::intern(ecx.tcx.symbol_name(Instance::mono(*ecx.tcx, def_id)).name);
1242 if let Some(&ptr) = ecx.machine.extern_statics.get(&link_name) {
1243 let Provenance::Concrete { alloc_id, .. } = ptr.provenance else {
1247 panic!("extern_statics cannot contain wildcards")
1248 };
1249 let info = ecx.get_alloc_info(alloc_id);
1250 let def_ty = ecx.tcx.type_of(def_id).instantiate_identity();
1251 let extern_decl_layout =
1252 ecx.tcx.layout_of(ecx.typing_env().as_query_input(def_ty)).unwrap();
1253 if extern_decl_layout.size != info.size || extern_decl_layout.align.abi != info.align {
1254 throw_unsup_format!(
1255 "extern static `{link_name}` has been declared as `{krate}::{name}` \
1256 with a size of {decl_size} bytes and alignment of {decl_align} bytes, \
1257 but Miri emulates it via an extern static shim \
1258 with a size of {shim_size} bytes and alignment of {shim_align} bytes",
1259 name = ecx.tcx.def_path_str(def_id),
1260 krate = ecx.tcx.crate_name(def_id.krate),
1261 decl_size = extern_decl_layout.size.bytes(),
1262 decl_align = extern_decl_layout.align.abi.bytes(),
1263 shim_size = info.size.bytes(),
1264 shim_align = info.align.bytes(),
1265 )
1266 }
1267 interp_ok(ptr)
1268 } else {
1269 throw_unsup_format!("extern static `{link_name}` is not supported by Miri",)
1270 }
1271 }
1272
1273 fn init_local_allocation(
1274 ecx: &MiriInterpCx<'tcx>,
1275 id: AllocId,
1276 kind: MemoryKind,
1277 size: Size,
1278 align: Align,
1279 ) -> InterpResult<'tcx, Self::AllocExtra> {
1280 assert!(kind != MiriMemoryKind::Global.into());
1281 MiriMachine::init_allocation(ecx, id, kind, size, align)
1282 }
1283
1284 fn adjust_alloc_root_pointer(
1285 ecx: &MiriInterpCx<'tcx>,
1286 ptr: interpret::Pointer<CtfeProvenance>,
1287 kind: Option<MemoryKind>,
1288 ) -> InterpResult<'tcx, interpret::Pointer<Provenance>> {
1289 let kind = kind.expect("we set our GLOBAL_KIND so this cannot be None");
1290 let alloc_id = ptr.provenance.alloc_id();
1291 if cfg!(debug_assertions) {
1292 match ecx.tcx.try_get_global_alloc(alloc_id) {
1294 Some(GlobalAlloc::Static(def_id)) if ecx.tcx.is_thread_local_static(def_id) => {
1295 panic!("adjust_alloc_root_pointer called on thread-local static")
1296 }
1297 Some(GlobalAlloc::Static(def_id)) if ecx.tcx.is_foreign_item(def_id) => {
1298 panic!("adjust_alloc_root_pointer called on extern static")
1299 }
1300 _ => {}
1301 }
1302 }
1303 let tag = if let Some(borrow_tracker) = &ecx.machine.borrow_tracker {
1305 borrow_tracker.borrow_mut().root_ptr_tag(alloc_id, &ecx.machine)
1306 } else {
1307 BorTag::default()
1309 };
1310 ecx.adjust_alloc_root_pointer(ptr, tag, kind)
1311 }
1312
1313 #[inline(always)]
1315 fn ptr_from_addr_cast(ecx: &MiriInterpCx<'tcx>, addr: u64) -> InterpResult<'tcx, Pointer> {
1316 ecx.ptr_from_addr_cast(addr)
1317 }
1318
1319 #[inline(always)]
1323 fn expose_provenance(
1324 ecx: &InterpCx<'tcx, Self>,
1325 provenance: Self::Provenance,
1326 ) -> InterpResult<'tcx> {
1327 ecx.expose_provenance(provenance)
1328 }
1329
1330 fn ptr_get_alloc(
1342 ecx: &MiriInterpCx<'tcx>,
1343 ptr: StrictPointer,
1344 size: i64,
1345 ) -> Option<(AllocId, Size, Self::ProvenanceExtra)> {
1346 let rel = ecx.ptr_get_alloc(ptr, size);
1347
1348 rel.map(|(alloc_id, size)| {
1349 let tag = match ptr.provenance {
1350 Provenance::Concrete { tag, .. } => ProvenanceExtra::Concrete(tag),
1351 Provenance::Wildcard => ProvenanceExtra::Wildcard,
1352 };
1353 (alloc_id, size, tag)
1354 })
1355 }
1356
1357 fn adjust_global_allocation<'b>(
1366 ecx: &InterpCx<'tcx, Self>,
1367 id: AllocId,
1368 alloc: &'b Allocation,
1369 ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>
1370 {
1371 let alloc = alloc.adjust_from_tcx(
1372 &ecx.tcx,
1373 |bytes, align| ecx.get_global_alloc_bytes(id, bytes, align),
1374 |ptr| ecx.global_root_pointer(ptr),
1375 )?;
1376 let kind = MiriMemoryKind::Global.into();
1377 let extra = MiriMachine::init_allocation(ecx, id, kind, alloc.size(), alloc.align)?;
1378 interp_ok(Cow::Owned(alloc.with_extra(extra)))
1379 }
1380
1381 #[inline(always)]
1382 fn before_memory_read(
1383 _tcx: TyCtxtAt<'tcx>,
1384 machine: &Self,
1385 alloc_extra: &AllocExtra<'tcx>,
1386 ptr: Pointer,
1387 (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
1388 range: AllocRange,
1389 ) -> InterpResult<'tcx> {
1390 if machine.track_alloc_accesses && machine.tracked_alloc_ids.contains(&alloc_id) {
1391 machine
1392 .emit_diagnostic(NonHaltingDiagnostic::AccessedAlloc(alloc_id, AccessKind::Read));
1393 }
1394 match &machine.data_race {
1396 GlobalDataRaceHandler::None => {}
1397 GlobalDataRaceHandler::Genmc(genmc_ctx) =>
1398 genmc_ctx.memory_load(machine, ptr.addr(), range.size)?,
1399 GlobalDataRaceHandler::Vclocks(_data_race) => {
1400 let _trace = enter_trace_span!(data_race::before_memory_read);
1401 let AllocDataRaceHandler::Vclocks(data_race, weak_memory) = &alloc_extra.data_race
1402 else {
1403 unreachable!();
1404 };
1405 data_race.read(alloc_id, range, NaReadType::Read, None, machine)?;
1406 if let Some(weak_memory) = weak_memory {
1407 weak_memory.memory_accessed(range, machine.data_race.as_vclocks_ref().unwrap());
1408 }
1409 }
1410 }
1411 if let Some(borrow_tracker) = &alloc_extra.borrow_tracker {
1412 borrow_tracker.before_memory_read(alloc_id, prov_extra, range, machine)?;
1413 }
1414 interp_ok(())
1415 }
1416
1417 #[inline(always)]
1418 fn before_memory_write(
1419 _tcx: TyCtxtAt<'tcx>,
1420 machine: &mut Self,
1421 alloc_extra: &mut AllocExtra<'tcx>,
1422 ptr: Pointer,
1423 (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
1424 range: AllocRange,
1425 ) -> InterpResult<'tcx> {
1426 if machine.track_alloc_accesses && machine.tracked_alloc_ids.contains(&alloc_id) {
1427 machine
1428 .emit_diagnostic(NonHaltingDiagnostic::AccessedAlloc(alloc_id, AccessKind::Write));
1429 }
1430 match &machine.data_race {
1431 GlobalDataRaceHandler::None => {}
1432 GlobalDataRaceHandler::Genmc(genmc_ctx) => {
1433 genmc_ctx.memory_store(machine, ptr.addr(), range.size)?;
1434 }
1435 GlobalDataRaceHandler::Vclocks(_global_state) => {
1436 let _trace = enter_trace_span!(data_race::before_memory_write);
1437 let AllocDataRaceHandler::Vclocks(data_race, weak_memory) =
1438 &mut alloc_extra.data_race
1439 else {
1440 unreachable!()
1441 };
1442 data_race.write(alloc_id, range, NaWriteType::Write, None, machine)?;
1443 if let Some(weak_memory) = weak_memory {
1444 weak_memory.memory_accessed(range, machine.data_race.as_vclocks_ref().unwrap());
1445 }
1446 }
1447 }
1448 if let Some(borrow_tracker) = &mut alloc_extra.borrow_tracker {
1449 borrow_tracker.before_memory_write(alloc_id, prov_extra, range, machine)?;
1450 }
1451 interp_ok(())
1452 }
1453
1454 #[inline(always)]
1455 fn before_memory_deallocation(
1456 _tcx: TyCtxtAt<'tcx>,
1457 machine: &mut Self,
1458 alloc_extra: &mut AllocExtra<'tcx>,
1459 ptr: Pointer,
1460 (alloc_id, prove_extra): (AllocId, Self::ProvenanceExtra),
1461 size: Size,
1462 align: Align,
1463 kind: MemoryKind,
1464 ) -> InterpResult<'tcx> {
1465 if machine.tracked_alloc_ids.contains(&alloc_id) {
1466 machine.emit_diagnostic(NonHaltingDiagnostic::FreedAlloc(alloc_id));
1467 }
1468 match &machine.data_race {
1469 GlobalDataRaceHandler::None => {}
1470 GlobalDataRaceHandler::Genmc(genmc_ctx) =>
1471 genmc_ctx.handle_dealloc(machine, ptr.addr(), size, align, kind)?,
1472 GlobalDataRaceHandler::Vclocks(_global_state) => {
1473 let _trace = enter_trace_span!(data_race::before_memory_deallocation);
1474 let data_race = alloc_extra.data_race.as_vclocks_mut().unwrap();
1475 data_race.write(
1476 alloc_id,
1477 alloc_range(Size::ZERO, size),
1478 NaWriteType::Deallocate,
1479 None,
1480 machine,
1481 )?;
1482 }
1483 }
1484 if let Some(borrow_tracker) = &mut alloc_extra.borrow_tracker {
1485 borrow_tracker.before_memory_deallocation(alloc_id, prove_extra, size, machine)?;
1486 }
1487 if let Some((_, deallocated_at)) = machine.allocation_spans.borrow_mut().get_mut(&alloc_id)
1488 {
1489 *deallocated_at = Some(machine.current_span());
1490 }
1491 machine.free_alloc_id(alloc_id, size, align, kind);
1492 interp_ok(())
1493 }
1494
1495 #[inline(always)]
1496 fn retag_ptr_value(
1497 ecx: &mut InterpCx<'tcx, Self>,
1498 kind: mir::RetagKind,
1499 val: &ImmTy<'tcx>,
1500 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1501 if ecx.machine.borrow_tracker.is_some() {
1502 ecx.retag_ptr_value(kind, val)
1503 } else {
1504 interp_ok(val.clone())
1505 }
1506 }
1507
1508 #[inline(always)]
1509 fn retag_place_contents(
1510 ecx: &mut InterpCx<'tcx, Self>,
1511 kind: mir::RetagKind,
1512 place: &PlaceTy<'tcx>,
1513 ) -> InterpResult<'tcx> {
1514 if ecx.machine.borrow_tracker.is_some() {
1515 ecx.retag_place_contents(kind, place)?;
1516 }
1517 interp_ok(())
1518 }
1519
1520 fn protect_in_place_function_argument(
1521 ecx: &mut InterpCx<'tcx, Self>,
1522 place: &MPlaceTy<'tcx>,
1523 ) -> InterpResult<'tcx> {
1524 let protected_place = if ecx.machine.borrow_tracker.is_some() {
1527 ecx.protect_place(place)?
1528 } else {
1529 place.clone()
1531 };
1532 ecx.write_uninit(&protected_place)?;
1537 interp_ok(())
1539 }
1540
1541 #[inline(always)]
1542 fn init_frame(
1543 ecx: &mut InterpCx<'tcx, Self>,
1544 frame: Frame<'tcx, Provenance>,
1545 ) -> InterpResult<'tcx, Frame<'tcx, Provenance, FrameExtra<'tcx>>> {
1546 let timing = if let Some(profiler) = ecx.machine.profiler.as_ref() {
1548 let fn_name = frame.instance().to_string();
1549 let entry = ecx.machine.string_cache.entry(fn_name.clone());
1550 let name = entry.or_insert_with(|| profiler.alloc_string(&*fn_name));
1551
1552 Some(profiler.start_recording_interval_event_detached(
1553 *name,
1554 measureme::EventId::from_label(*name),
1555 ecx.active_thread().to_u32(),
1556 ))
1557 } else {
1558 None
1559 };
1560
1561 let borrow_tracker = ecx.machine.borrow_tracker.as_ref();
1562
1563 let extra = FrameExtra {
1564 borrow_tracker: borrow_tracker.map(|bt| bt.borrow_mut().new_frame()),
1565 catch_unwind: None,
1566 timing,
1567 is_user_relevant: ecx.machine.is_user_relevant(&frame),
1568 data_race: ecx
1569 .machine
1570 .data_race
1571 .as_vclocks_ref()
1572 .map(|_| data_race::FrameState::default()),
1573 };
1574
1575 interp_ok(frame.with_extra(extra))
1576 }
1577
1578 fn stack<'a>(
1579 ecx: &'a InterpCx<'tcx, Self>,
1580 ) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>] {
1581 ecx.active_thread_stack()
1582 }
1583
1584 fn stack_mut<'a>(
1585 ecx: &'a mut InterpCx<'tcx, Self>,
1586 ) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>> {
1587 ecx.active_thread_stack_mut()
1588 }
1589
1590 fn before_terminator(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1591 ecx.machine.basic_block_count += 1u64; ecx.machine.since_gc += 1;
1593 if let Some(report_progress) = ecx.machine.report_progress {
1595 if ecx.machine.basic_block_count.is_multiple_of(u64::from(report_progress)) {
1596 ecx.emit_diagnostic(NonHaltingDiagnostic::ProgressReport {
1597 block_count: ecx.machine.basic_block_count,
1598 });
1599 }
1600 }
1601
1602 if ecx.machine.gc_interval > 0 && ecx.machine.since_gc >= ecx.machine.gc_interval {
1607 ecx.machine.since_gc = 0;
1608 ecx.run_provenance_gc();
1609 }
1610
1611 ecx.maybe_preempt_active_thread();
1614
1615 ecx.machine.monotonic_clock.tick();
1617
1618 interp_ok(())
1619 }
1620
1621 #[inline(always)]
1622 fn after_stack_push(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1623 if ecx.frame().extra.is_user_relevant {
1624 let stack_len = ecx.active_thread_stack().len();
1627 ecx.active_thread_mut().set_top_user_relevant_frame(stack_len - 1);
1628 }
1629 interp_ok(())
1630 }
1631
1632 fn before_stack_pop(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1633 let frame = ecx.frame();
1634 if ecx.machine.borrow_tracker.is_some() {
1637 ecx.on_stack_pop(frame)?;
1638 }
1639 if frame.extra.is_user_relevant {
1640 ecx.active_thread_mut().recompute_top_user_relevant_frame(1);
1646 }
1647 info!("Leaving {}", ecx.frame().instance());
1651 interp_ok(())
1652 }
1653
1654 #[inline(always)]
1655 fn after_stack_pop(
1656 ecx: &mut InterpCx<'tcx, Self>,
1657 frame: Frame<'tcx, Provenance, FrameExtra<'tcx>>,
1658 unwinding: bool,
1659 ) -> InterpResult<'tcx, ReturnAction> {
1660 let res = {
1661 let mut frame = frame;
1663 let timing = frame.extra.timing.take();
1664 let res = ecx.handle_stack_pop_unwind(frame.extra, unwinding);
1665 if let Some(profiler) = ecx.machine.profiler.as_ref() {
1666 profiler.finish_recording_interval_event(timing.unwrap());
1667 }
1668 res
1669 };
1670 if !ecx.active_thread_stack().is_empty() {
1673 info!("Continuing in {}", ecx.frame().instance());
1674 }
1675 res
1676 }
1677
1678 fn after_local_read(
1679 ecx: &InterpCx<'tcx, Self>,
1680 frame: &Frame<'tcx, Provenance, FrameExtra<'tcx>>,
1681 local: mir::Local,
1682 ) -> InterpResult<'tcx> {
1683 if let Some(data_race) = &frame.extra.data_race {
1684 let _trace = enter_trace_span!(data_race::after_local_read);
1685 data_race.local_read(local, &ecx.machine);
1686 }
1687 interp_ok(())
1688 }
1689
1690 fn after_local_write(
1691 ecx: &mut InterpCx<'tcx, Self>,
1692 local: mir::Local,
1693 storage_live: bool,
1694 ) -> InterpResult<'tcx> {
1695 if let Some(data_race) = &ecx.frame().extra.data_race {
1696 let _trace = enter_trace_span!(data_race::after_local_write);
1697 data_race.local_write(local, storage_live, &ecx.machine);
1698 }
1699 interp_ok(())
1700 }
1701
1702 fn after_local_moved_to_memory(
1703 ecx: &mut InterpCx<'tcx, Self>,
1704 local: mir::Local,
1705 mplace: &MPlaceTy<'tcx>,
1706 ) -> InterpResult<'tcx> {
1707 let Some(Provenance::Concrete { alloc_id, .. }) = mplace.ptr().provenance else {
1708 panic!("after_local_allocated should only be called on fresh allocations");
1709 };
1710 let local_decl = &ecx.frame().body().local_decls[local];
1712 let span = local_decl.source_info.span;
1713 ecx.machine.allocation_spans.borrow_mut().insert(alloc_id, (span, None));
1714 let (alloc_info, machine) = ecx.get_alloc_extra_mut(alloc_id)?;
1716 if let Some(data_race) =
1717 &machine.threads.active_thread_stack().last().unwrap().extra.data_race
1718 {
1719 let _trace = enter_trace_span!(data_race::after_local_moved_to_memory);
1720 data_race.local_moved_to_memory(
1721 local,
1722 alloc_info.data_race.as_vclocks_mut().unwrap(),
1723 machine,
1724 );
1725 }
1726 interp_ok(())
1727 }
1728
1729 fn get_global_alloc_salt(
1730 ecx: &InterpCx<'tcx, Self>,
1731 instance: Option<ty::Instance<'tcx>>,
1732 ) -> usize {
1733 let unique = if let Some(instance) = instance {
1734 let is_generic = instance
1747 .args
1748 .into_iter()
1749 .any(|arg| !matches!(arg.kind(), ty::GenericArgKind::Lifetime(_)));
1750 let can_be_inlined = matches!(
1751 ecx.tcx.sess.opts.unstable_opts.cross_crate_inline_threshold,
1752 InliningThreshold::Always
1753 ) || !matches!(
1754 ecx.tcx.codegen_instance_attrs(instance.def).inline,
1755 InlineAttr::Never
1756 );
1757 !is_generic && !can_be_inlined
1758 } else {
1759 false
1761 };
1762 if unique {
1764 CTFE_ALLOC_SALT
1765 } else {
1766 ecx.machine.rng.borrow_mut().random_range(0..ADDRS_PER_ANON_GLOBAL)
1767 }
1768 }
1769
1770 fn cached_union_data_range<'e>(
1771 ecx: &'e mut InterpCx<'tcx, Self>,
1772 ty: Ty<'tcx>,
1773 compute_range: impl FnOnce() -> RangeSet,
1774 ) -> Cow<'e, RangeSet> {
1775 Cow::Borrowed(ecx.machine.union_data_ranges.entry(ty).or_insert_with(compute_range))
1776 }
1777
1778 fn get_default_alloc_params(&self) -> <Self::Bytes as AllocBytes>::AllocParams {
1779 use crate::alloc::MiriAllocParams;
1780
1781 match &self.allocator {
1782 Some(alloc) => MiriAllocParams::Isolated(alloc.clone()),
1783 None => MiriAllocParams::Global,
1784 }
1785 }
1786
1787 fn enter_trace_span(span: impl FnOnce() -> tracing::Span) -> impl EnteredTraceSpan {
1788 #[cfg(feature = "tracing")]
1789 {
1790 span().entered()
1791 }
1792 #[cfg(not(feature = "tracing"))]
1793 #[expect(clippy::unused_unit)]
1794 {
1795 let _ = span; ()
1797 }
1798 }
1799}
1800
1801pub trait MachineCallback<'tcx, T>: VisitProvenance {
1803 fn call(
1805 self: Box<Self>,
1806 ecx: &mut InterpCx<'tcx, MiriMachine<'tcx>>,
1807 arg: T,
1808 ) -> InterpResult<'tcx>;
1809}
1810
1811pub type DynMachineCallback<'tcx, T> = Box<dyn MachineCallback<'tcx, T> + 'tcx>;
1813
1814#[macro_export]
1831macro_rules! callback {
1832 (@capture<$tcx:lifetime $(,)? $($lft:lifetime),*>
1833 { $($name:ident: $type:ty),* $(,)? }
1834 |$this:ident, $arg:ident: $arg_ty:ty| $body:expr $(,)?) => {{
1835 struct Callback<$tcx, $($lft),*> {
1836 $($name: $type,)*
1837 _phantom: std::marker::PhantomData<&$tcx ()>,
1838 }
1839
1840 impl<$tcx, $($lft),*> VisitProvenance for Callback<$tcx, $($lft),*> {
1841 fn visit_provenance(&self, _visit: &mut VisitWith<'_>) {
1842 $(
1843 self.$name.visit_provenance(_visit);
1844 )*
1845 }
1846 }
1847
1848 impl<$tcx, $($lft),*> MachineCallback<$tcx, $arg_ty> for Callback<$tcx, $($lft),*> {
1849 fn call(
1850 self: Box<Self>,
1851 $this: &mut MiriInterpCx<$tcx>,
1852 $arg: $arg_ty
1853 ) -> InterpResult<$tcx> {
1854 #[allow(unused_variables)]
1855 let Callback { $($name,)* _phantom } = *self;
1856 $body
1857 }
1858 }
1859
1860 Box::new(Callback {
1861 $($name,)*
1862 _phantom: std::marker::PhantomData
1863 })
1864 }};
1865}