1use std::any::Any;
5use std::borrow::Cow;
6use std::cell::{Cell, RefCell};
7use std::path::Path;
8use std::rc::Rc;
9use std::{fmt, process};
10
11use rand::rngs::StdRng;
12use rand::{Rng, SeedableRng};
13use rustc_abi::{Align, ExternAbi, Size};
14use rustc_apfloat::{Float, FloatConvert};
15use rustc_data_structures::fx::{FxHashMap, FxHashSet};
16#[allow(unused)]
17use rustc_data_structures::static_assert_size;
18use rustc_hir::attrs::InlineAttr;
19use rustc_middle::middle::codegen_fn_attrs::TargetFeatureKind;
20use rustc_middle::mir;
21use rustc_middle::query::TyCtxtAt;
22use rustc_middle::ty::layout::{
23 HasTyCtxt, HasTypingEnv, LayoutCx, LayoutError, LayoutOf, TyAndLayout,
24};
25use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
26use rustc_session::config::InliningThreshold;
27use rustc_span::def_id::{CrateNum, DefId};
28use rustc_span::{Span, SpanData, Symbol};
29use rustc_target::callconv::FnAbi;
30
31use crate::alloc_addresses::EvalContextExt;
32use crate::concurrency::cpu_affinity::{self, CpuAffinityMask};
33use crate::concurrency::data_race::{self, NaReadType, NaWriteType};
34use crate::concurrency::{AllocDataRaceHandler, GenmcCtx, GlobalDataRaceHandler, weak_memory};
35use crate::*;
36
37pub const SIGRTMIN: i32 = 34;
41
42pub const SIGRTMAX: i32 = 42;
46
47const ADDRS_PER_ANON_GLOBAL: usize = 32;
51
52#[derive(Copy, Clone, Debug, PartialEq)]
53pub enum AlignmentCheck {
54 None,
56 Symbolic,
58 Int,
60}
61
62#[derive(Copy, Clone, Debug, PartialEq)]
63pub enum RejectOpWith {
64 Abort,
66
67 NoWarning,
71
72 Warning,
74
75 WarningWithoutBacktrace,
77}
78
79#[derive(Copy, Clone, Debug, PartialEq)]
80pub enum IsolatedOp {
81 Reject(RejectOpWith),
86
87 Allow,
89}
90
91#[derive(Debug, Copy, Clone, PartialEq, Eq)]
92pub enum BacktraceStyle {
93 Short,
95 Full,
97 Off,
99}
100
101#[derive(Debug, Copy, Clone, PartialEq, Eq)]
102pub enum ValidationMode {
103 No,
105 Shallow,
107 Deep,
109}
110
111#[derive(Debug, Copy, Clone, PartialEq, Eq)]
112pub enum FloatRoundingErrorMode {
113 Random,
115 None,
117 Max,
119}
120
121pub struct FrameExtra<'tcx> {
123 pub borrow_tracker: Option<borrow_tracker::FrameState>,
125
126 pub catch_unwind: Option<CatchUnwindData<'tcx>>,
130
131 pub timing: Option<measureme::DetachedTiming>,
135
136 pub is_user_relevant: bool,
141
142 pub data_race: Option<data_race::FrameState>,
144}
145
146impl<'tcx> std::fmt::Debug for FrameExtra<'tcx> {
147 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
148 let FrameExtra { borrow_tracker, catch_unwind, timing: _, is_user_relevant, data_race } =
150 self;
151 f.debug_struct("FrameData")
152 .field("borrow_tracker", borrow_tracker)
153 .field("catch_unwind", catch_unwind)
154 .field("is_user_relevant", is_user_relevant)
155 .field("data_race", data_race)
156 .finish()
157 }
158}
159
160impl VisitProvenance for FrameExtra<'_> {
161 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
162 let FrameExtra {
163 catch_unwind,
164 borrow_tracker,
165 timing: _,
166 is_user_relevant: _,
167 data_race: _,
168 } = self;
169
170 catch_unwind.visit_provenance(visit);
171 borrow_tracker.visit_provenance(visit);
172 }
173}
174
175#[derive(Debug, Copy, Clone, PartialEq, Eq)]
177pub enum MiriMemoryKind {
178 Rust,
180 Miri,
182 C,
184 WinHeap,
186 WinLocal,
188 Machine,
191 Runtime,
194 Global,
197 ExternStatic,
200 Tls,
203 Mmap,
205}
206
207impl From<MiriMemoryKind> for MemoryKind {
208 #[inline(always)]
209 fn from(kind: MiriMemoryKind) -> MemoryKind {
210 MemoryKind::Machine(kind)
211 }
212}
213
214impl MayLeak for MiriMemoryKind {
215 #[inline(always)]
216 fn may_leak(self) -> bool {
217 use self::MiriMemoryKind::*;
218 match self {
219 Rust | Miri | C | WinHeap | WinLocal | Runtime => false,
220 Machine | Global | ExternStatic | Tls | Mmap => true,
221 }
222 }
223}
224
225impl MiriMemoryKind {
226 fn should_save_allocation_span(self) -> bool {
228 use self::MiriMemoryKind::*;
229 match self {
230 Rust | Miri | C | WinHeap | WinLocal | Mmap => true,
232 Machine | Global | ExternStatic | Tls | Runtime => false,
234 }
235 }
236}
237
238impl fmt::Display for MiriMemoryKind {
239 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
240 use self::MiriMemoryKind::*;
241 match self {
242 Rust => write!(f, "Rust heap"),
243 Miri => write!(f, "Miri bare-metal heap"),
244 C => write!(f, "C heap"),
245 WinHeap => write!(f, "Windows heap"),
246 WinLocal => write!(f, "Windows local memory"),
247 Machine => write!(f, "machine-managed memory"),
248 Runtime => write!(f, "language runtime memory"),
249 Global => write!(f, "global (static or const)"),
250 ExternStatic => write!(f, "extern static"),
251 Tls => write!(f, "thread-local static"),
252 Mmap => write!(f, "mmap"),
253 }
254 }
255}
256
257pub type MemoryKind = interpret::MemoryKind<MiriMemoryKind>;
258
259#[derive(Clone, Copy, PartialEq, Eq, Hash)]
265pub enum Provenance {
266 Concrete {
269 alloc_id: AllocId,
270 tag: BorTag,
272 },
273 Wildcard,
290}
291
292#[derive(Copy, Clone, PartialEq)]
294pub enum ProvenanceExtra {
295 Concrete(BorTag),
296 Wildcard,
297}
298
299#[cfg(target_pointer_width = "64")]
300static_assert_size!(StrictPointer, 24);
301#[cfg(target_pointer_width = "64")]
305static_assert_size!(Scalar, 32);
306
307impl fmt::Debug for Provenance {
308 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
309 match self {
310 Provenance::Concrete { alloc_id, tag } => {
311 if f.alternate() {
313 write!(f, "[{alloc_id:#?}]")?;
314 } else {
315 write!(f, "[{alloc_id:?}]")?;
316 }
317 write!(f, "{tag:?}")?;
319 }
320 Provenance::Wildcard => {
321 write!(f, "[wildcard]")?;
322 }
323 }
324 Ok(())
325 }
326}
327
328impl interpret::Provenance for Provenance {
329 const OFFSET_IS_ADDR: bool = true;
331
332 const WILDCARD: Option<Self> = Some(Provenance::Wildcard);
334
335 fn get_alloc_id(self) -> Option<AllocId> {
336 match self {
337 Provenance::Concrete { alloc_id, .. } => Some(alloc_id),
338 Provenance::Wildcard => None,
339 }
340 }
341
342 fn fmt(ptr: &interpret::Pointer<Self>, f: &mut fmt::Formatter<'_>) -> fmt::Result {
343 let (prov, addr) = ptr.into_raw_parts(); write!(f, "{:#x}", addr.bytes())?;
345 if f.alternate() {
346 write!(f, "{prov:#?}")?;
347 } else {
348 write!(f, "{prov:?}")?;
349 }
350 Ok(())
351 }
352
353 fn join(left: Self, right: Self) -> Option<Self> {
354 match (left, right) {
355 (
357 Provenance::Concrete { alloc_id: left_alloc, tag: left_tag },
358 Provenance::Concrete { alloc_id: right_alloc, tag: right_tag },
359 ) if left_alloc == right_alloc && left_tag == right_tag => Some(left),
360 (Provenance::Wildcard, o) | (o, Provenance::Wildcard) => Some(o),
363 _ => None,
365 }
366 }
367}
368
369impl fmt::Debug for ProvenanceExtra {
370 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
371 match self {
372 ProvenanceExtra::Concrete(pid) => write!(f, "{pid:?}"),
373 ProvenanceExtra::Wildcard => write!(f, "<wildcard>"),
374 }
375 }
376}
377
378impl ProvenanceExtra {
379 pub fn and_then<T>(self, f: impl FnOnce(BorTag) -> Option<T>) -> Option<T> {
380 match self {
381 ProvenanceExtra::Concrete(pid) => f(pid),
382 ProvenanceExtra::Wildcard => None,
383 }
384 }
385}
386
387#[derive(Debug)]
389pub struct AllocExtra<'tcx> {
390 pub borrow_tracker: Option<borrow_tracker::AllocState>,
392 pub data_race: AllocDataRaceHandler,
396 pub backtrace: Option<Vec<FrameInfo<'tcx>>>,
401 pub sync: FxHashMap<Size, Box<dyn Any>>,
406}
407
408impl<'tcx> Clone for AllocExtra<'tcx> {
411 fn clone(&self) -> Self {
412 panic!("our allocations should never be cloned");
413 }
414}
415
416impl VisitProvenance for AllocExtra<'_> {
417 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
418 let AllocExtra { borrow_tracker, data_race, backtrace: _, sync: _ } = self;
419
420 borrow_tracker.visit_provenance(visit);
421 data_race.visit_provenance(visit);
422 }
423}
424
425pub struct PrimitiveLayouts<'tcx> {
427 pub unit: TyAndLayout<'tcx>,
428 pub i8: TyAndLayout<'tcx>,
429 pub i16: TyAndLayout<'tcx>,
430 pub i32: TyAndLayout<'tcx>,
431 pub i64: TyAndLayout<'tcx>,
432 pub i128: TyAndLayout<'tcx>,
433 pub isize: TyAndLayout<'tcx>,
434 pub u8: TyAndLayout<'tcx>,
435 pub u16: TyAndLayout<'tcx>,
436 pub u32: TyAndLayout<'tcx>,
437 pub u64: TyAndLayout<'tcx>,
438 pub u128: TyAndLayout<'tcx>,
439 pub usize: TyAndLayout<'tcx>,
440 pub bool: TyAndLayout<'tcx>,
441 pub mut_raw_ptr: TyAndLayout<'tcx>, pub const_raw_ptr: TyAndLayout<'tcx>, }
444
445impl<'tcx> PrimitiveLayouts<'tcx> {
446 fn new(layout_cx: LayoutCx<'tcx>) -> Result<Self, &'tcx LayoutError<'tcx>> {
447 let tcx = layout_cx.tcx();
448 let mut_raw_ptr = Ty::new_mut_ptr(tcx, tcx.types.unit);
449 let const_raw_ptr = Ty::new_imm_ptr(tcx, tcx.types.unit);
450 Ok(Self {
451 unit: layout_cx.layout_of(tcx.types.unit)?,
452 i8: layout_cx.layout_of(tcx.types.i8)?,
453 i16: layout_cx.layout_of(tcx.types.i16)?,
454 i32: layout_cx.layout_of(tcx.types.i32)?,
455 i64: layout_cx.layout_of(tcx.types.i64)?,
456 i128: layout_cx.layout_of(tcx.types.i128)?,
457 isize: layout_cx.layout_of(tcx.types.isize)?,
458 u8: layout_cx.layout_of(tcx.types.u8)?,
459 u16: layout_cx.layout_of(tcx.types.u16)?,
460 u32: layout_cx.layout_of(tcx.types.u32)?,
461 u64: layout_cx.layout_of(tcx.types.u64)?,
462 u128: layout_cx.layout_of(tcx.types.u128)?,
463 usize: layout_cx.layout_of(tcx.types.usize)?,
464 bool: layout_cx.layout_of(tcx.types.bool)?,
465 mut_raw_ptr: layout_cx.layout_of(mut_raw_ptr)?,
466 const_raw_ptr: layout_cx.layout_of(const_raw_ptr)?,
467 })
468 }
469
470 pub fn uint(&self, size: Size) -> Option<TyAndLayout<'tcx>> {
471 match size.bits() {
472 8 => Some(self.u8),
473 16 => Some(self.u16),
474 32 => Some(self.u32),
475 64 => Some(self.u64),
476 128 => Some(self.u128),
477 _ => None,
478 }
479 }
480
481 pub fn int(&self, size: Size) -> Option<TyAndLayout<'tcx>> {
482 match size.bits() {
483 8 => Some(self.i8),
484 16 => Some(self.i16),
485 32 => Some(self.i32),
486 64 => Some(self.i64),
487 128 => Some(self.i128),
488 _ => None,
489 }
490 }
491}
492
493pub struct MiriMachine<'tcx> {
498 pub tcx: TyCtxt<'tcx>,
500
501 pub borrow_tracker: Option<borrow_tracker::GlobalState>,
503
504 pub data_race: GlobalDataRaceHandler,
510
511 pub alloc_addresses: alloc_addresses::GlobalState,
513
514 pub(crate) env_vars: EnvVars<'tcx>,
516
517 pub(crate) main_fn_ret_place: Option<MPlaceTy<'tcx>>,
519
520 pub(crate) argc: Option<Pointer>,
524 pub(crate) argv: Option<Pointer>,
525 pub(crate) cmd_line: Option<Pointer>,
526
527 pub(crate) tls: TlsData<'tcx>,
529
530 pub(crate) isolated_op: IsolatedOp,
534
535 pub(crate) validation: ValidationMode,
537
538 pub(crate) fds: shims::FdTable,
540 pub(crate) dirs: shims::DirTable,
542
543 pub(crate) epoll_interests: shims::EpollInterestTable,
545
546 pub(crate) monotonic_clock: MonotonicClock,
548
549 pub(crate) threads: ThreadManager<'tcx>,
551
552 pub(crate) thread_cpu_affinity: FxHashMap<ThreadId, CpuAffinityMask>,
556
557 pub(crate) layouts: PrimitiveLayouts<'tcx>,
559
560 pub(crate) static_roots: Vec<AllocId>,
562
563 profiler: Option<measureme::Profiler>,
566 string_cache: FxHashMap<String, measureme::StringId>,
569
570 pub(crate) exported_symbols_cache: FxHashMap<Symbol, Option<Instance<'tcx>>>,
573
574 pub(crate) backtrace_style: BacktraceStyle,
576
577 pub(crate) local_crates: Vec<CrateNum>,
579
580 extern_statics: FxHashMap<Symbol, StrictPointer>,
582
583 pub(crate) rng: RefCell<StdRng>,
586
587 pub(crate) allocator: Option<Rc<RefCell<crate::alloc::isolated_alloc::IsolatedAlloc>>>,
589
590 tracked_alloc_ids: FxHashSet<AllocId>,
593 track_alloc_accesses: bool,
595
596 pub(crate) check_alignment: AlignmentCheck,
598
599 pub(crate) cmpxchg_weak_failure_rate: f64,
601
602 pub(crate) preemption_rate: f64,
604
605 pub(crate) report_progress: Option<u32>,
607 pub(crate) basic_block_count: u64,
609
610 #[cfg(all(unix, feature = "native-lib"))]
612 pub native_lib: Vec<(libloading::Library, std::path::PathBuf)>,
613 #[cfg(not(all(unix, feature = "native-lib")))]
614 pub native_lib: Vec<!>,
615
616 pub(crate) gc_interval: u32,
618 pub(crate) since_gc: u32,
620
621 pub(crate) num_cpus: u32,
623
624 pub(crate) page_size: u64,
626 pub(crate) stack_addr: u64,
627 pub(crate) stack_size: u64,
628
629 pub(crate) collect_leak_backtraces: bool,
631
632 pub(crate) allocation_spans: RefCell<FxHashMap<AllocId, (Span, Option<Span>)>>,
635
636 pub(crate) symbolic_alignment: RefCell<FxHashMap<AllocId, (Size, Align)>>,
643
644 union_data_ranges: FxHashMap<Ty<'tcx>, RangeSet>,
646
647 pub(crate) pthread_mutex_sanity: Cell<bool>,
649 pub(crate) pthread_rwlock_sanity: Cell<bool>,
650 pub(crate) pthread_condvar_sanity: Cell<bool>,
651
652 pub(crate) sb_extern_type_warned: Cell<bool>,
654 #[allow(unused)]
656 pub(crate) native_call_mem_warned: Cell<bool>,
657 pub(crate) reject_in_isolation_warned: RefCell<FxHashSet<String>>,
659 pub(crate) int2ptr_warned: RefCell<FxHashSet<Span>>,
661
662 pub(crate) mangle_internal_symbol_cache: FxHashMap<&'static str, String>,
664
665 pub force_intrinsic_fallback: bool,
667
668 pub float_nondet: bool,
670 pub float_rounding_error: FloatRoundingErrorMode,
672
673 pub short_fd_operations: bool,
675}
676
677impl<'tcx> MiriMachine<'tcx> {
678 pub(crate) fn new(
682 config: &MiriConfig,
683 layout_cx: LayoutCx<'tcx>,
684 genmc_ctx: Option<Rc<GenmcCtx>>,
685 ) -> Self {
686 let tcx = layout_cx.tcx();
687 let local_crates = helpers::get_local_crates(tcx);
688 let layouts =
689 PrimitiveLayouts::new(layout_cx).expect("Couldn't get layouts of primitive types");
690 let profiler = config.measureme_out.as_ref().map(|out| {
691 let crate_name =
692 tcx.sess.opts.crate_name.clone().unwrap_or_else(|| "unknown-crate".to_string());
693 let pid = process::id();
694 let filename = format!("{crate_name}-{pid:07}");
699 let path = Path::new(out).join(filename);
700 measureme::Profiler::new(path).expect("Couldn't create `measureme` profiler")
701 });
702 let rng = StdRng::seed_from_u64(config.seed.unwrap_or(0));
703 let borrow_tracker = config.borrow_tracker.map(|bt| bt.instantiate_global_state(config));
704 let data_race = if config.genmc_config.is_some() {
705 GlobalDataRaceHandler::Genmc(genmc_ctx.unwrap())
707 } else if config.data_race_detector {
708 GlobalDataRaceHandler::Vclocks(Box::new(data_race::GlobalState::new(config)))
709 } else {
710 GlobalDataRaceHandler::None
711 };
712 let page_size = if let Some(page_size) = config.page_size {
716 page_size
717 } else {
718 let target = &tcx.sess.target;
719 match target.arch.as_ref() {
720 "wasm32" | "wasm64" => 64 * 1024, "aarch64" => {
722 if target.options.vendor.as_ref() == "apple" {
723 16 * 1024
727 } else {
728 4 * 1024
729 }
730 }
731 _ => 4 * 1024,
732 }
733 };
734 let stack_addr = if tcx.pointer_size().bits() < 32 { page_size } else { page_size * 32 };
736 let stack_size =
737 if tcx.pointer_size().bits() < 32 { page_size * 4 } else { page_size * 16 };
738 assert!(
739 usize::try_from(config.num_cpus).unwrap() <= cpu_affinity::MAX_CPUS,
740 "miri only supports up to {} CPUs, but {} were configured",
741 cpu_affinity::MAX_CPUS,
742 config.num_cpus
743 );
744 let threads = ThreadManager::new(config);
745 let mut thread_cpu_affinity = FxHashMap::default();
746 if matches!(&*tcx.sess.target.os, "linux" | "freebsd" | "android") {
747 thread_cpu_affinity
748 .insert(threads.active_thread(), CpuAffinityMask::new(&layout_cx, config.num_cpus));
749 }
750 MiriMachine {
751 tcx,
752 borrow_tracker,
753 data_race,
754 alloc_addresses: RefCell::new(alloc_addresses::GlobalStateInner::new(config, stack_addr)),
755 env_vars: EnvVars::default(),
757 main_fn_ret_place: None,
758 argc: None,
759 argv: None,
760 cmd_line: None,
761 tls: TlsData::default(),
762 isolated_op: config.isolated_op,
763 validation: config.validation,
764 fds: shims::FdTable::init(config.mute_stdout_stderr),
765 epoll_interests: shims::EpollInterestTable::new(),
766 dirs: Default::default(),
767 layouts,
768 threads,
769 thread_cpu_affinity,
770 static_roots: Vec::new(),
771 profiler,
772 string_cache: Default::default(),
773 exported_symbols_cache: FxHashMap::default(),
774 backtrace_style: config.backtrace_style,
775 local_crates,
776 extern_statics: FxHashMap::default(),
777 rng: RefCell::new(rng),
778 allocator: if !config.native_lib.is_empty() {
779 Some(Rc::new(RefCell::new(crate::alloc::isolated_alloc::IsolatedAlloc::new())))
780 } else { None },
781 tracked_alloc_ids: config.tracked_alloc_ids.clone(),
782 track_alloc_accesses: config.track_alloc_accesses,
783 check_alignment: config.check_alignment,
784 cmpxchg_weak_failure_rate: config.cmpxchg_weak_failure_rate,
785 preemption_rate: config.preemption_rate,
786 report_progress: config.report_progress,
787 basic_block_count: 0,
788 monotonic_clock: MonotonicClock::new(config.isolated_op == IsolatedOp::Allow),
789 #[cfg(all(unix, feature = "native-lib"))]
790 native_lib: config.native_lib.iter().map(|lib_file_path| {
791 let host_triple = rustc_session::config::host_tuple();
792 let target_triple = tcx.sess.opts.target_triple.tuple();
793 if host_triple != target_triple {
795 panic!(
796 "calling native C functions in linked .so file requires host and target to be the same: \
797 host={host_triple}, target={target_triple}",
798 );
799 }
800 (
804 unsafe {
805 libloading::Library::new(lib_file_path)
806 .expect("failed to read specified extern shared object file")
807 },
808 lib_file_path.clone(),
809 )
810 }).collect(),
811 #[cfg(not(all(unix, feature = "native-lib")))]
812 native_lib: config.native_lib.iter().map(|_| {
813 panic!("calling functions from native libraries via FFI is not supported in this build of Miri")
814 }).collect(),
815 gc_interval: config.gc_interval,
816 since_gc: 0,
817 num_cpus: config.num_cpus,
818 page_size,
819 stack_addr,
820 stack_size,
821 collect_leak_backtraces: config.collect_leak_backtraces,
822 allocation_spans: RefCell::new(FxHashMap::default()),
823 symbolic_alignment: RefCell::new(FxHashMap::default()),
824 union_data_ranges: FxHashMap::default(),
825 pthread_mutex_sanity: Cell::new(false),
826 pthread_rwlock_sanity: Cell::new(false),
827 pthread_condvar_sanity: Cell::new(false),
828 sb_extern_type_warned: Cell::new(false),
829 native_call_mem_warned: Cell::new(false),
830 reject_in_isolation_warned: Default::default(),
831 int2ptr_warned: Default::default(),
832 mangle_internal_symbol_cache: Default::default(),
833 force_intrinsic_fallback: config.force_intrinsic_fallback,
834 float_nondet: config.float_nondet,
835 float_rounding_error: config.float_rounding_error,
836 short_fd_operations: config.short_fd_operations,
837 }
838 }
839
840 pub(crate) fn late_init(
841 ecx: &mut MiriInterpCx<'tcx>,
842 config: &MiriConfig,
843 on_main_stack_empty: StackEmptyCallback<'tcx>,
844 ) -> InterpResult<'tcx> {
845 EnvVars::init(ecx, config)?;
846 MiriMachine::init_extern_statics(ecx)?;
847 ThreadManager::init(ecx, on_main_stack_empty);
848 interp_ok(())
849 }
850
851 pub(crate) fn add_extern_static(ecx: &mut MiriInterpCx<'tcx>, name: &str, ptr: Pointer) {
852 let ptr = ptr.into_pointer_or_addr().unwrap();
854 ecx.machine.extern_statics.try_insert(Symbol::intern(name), ptr).unwrap();
855 }
856
857 pub(crate) fn communicate(&self) -> bool {
858 self.isolated_op == IsolatedOp::Allow
859 }
860
861 pub(crate) fn is_local(&self, frame: &FrameInfo<'_>) -> bool {
863 let def_id = frame.instance.def_id();
864 def_id.is_local() || self.local_crates.contains(&def_id.krate)
865 }
866
867 pub(crate) fn handle_abnormal_termination(&mut self) {
869 drop(self.profiler.take());
874 }
875
876 pub(crate) fn page_align(&self) -> Align {
877 Align::from_bytes(self.page_size).unwrap()
878 }
879
880 pub(crate) fn allocated_span(&self, alloc_id: AllocId) -> Option<SpanData> {
881 self.allocation_spans
882 .borrow()
883 .get(&alloc_id)
884 .map(|(allocated, _deallocated)| allocated.data())
885 }
886
887 pub(crate) fn deallocated_span(&self, alloc_id: AllocId) -> Option<SpanData> {
888 self.allocation_spans
889 .borrow()
890 .get(&alloc_id)
891 .and_then(|(_allocated, deallocated)| *deallocated)
892 .map(Span::data)
893 }
894
895 fn init_allocation(
896 ecx: &MiriInterpCx<'tcx>,
897 id: AllocId,
898 kind: MemoryKind,
899 size: Size,
900 align: Align,
901 ) -> InterpResult<'tcx, AllocExtra<'tcx>> {
902 if ecx.machine.tracked_alloc_ids.contains(&id) {
903 ecx.emit_diagnostic(NonHaltingDiagnostic::CreatedAlloc(id, size, align, kind));
904 }
905
906 let borrow_tracker = ecx
907 .machine
908 .borrow_tracker
909 .as_ref()
910 .map(|bt| bt.borrow_mut().new_allocation(id, size, kind, &ecx.machine));
911
912 let data_race = match &ecx.machine.data_race {
913 GlobalDataRaceHandler::None => AllocDataRaceHandler::None,
914 GlobalDataRaceHandler::Vclocks(data_race) =>
915 AllocDataRaceHandler::Vclocks(
916 data_race::AllocState::new_allocation(
917 data_race,
918 &ecx.machine.threads,
919 size,
920 kind,
921 ecx.machine.current_span(),
922 ),
923 data_race.weak_memory.then(weak_memory::AllocState::new_allocation),
924 ),
925 GlobalDataRaceHandler::Genmc(_genmc_ctx) => {
926 AllocDataRaceHandler::Genmc
929 }
930 };
931
932 let backtrace = if kind.may_leak() || !ecx.machine.collect_leak_backtraces {
936 None
937 } else {
938 Some(ecx.generate_stacktrace())
939 };
940
941 if matches!(kind, MemoryKind::Machine(kind) if kind.should_save_allocation_span()) {
942 ecx.machine
943 .allocation_spans
944 .borrow_mut()
945 .insert(id, (ecx.machine.current_span(), None));
946 }
947
948 interp_ok(AllocExtra { borrow_tracker, data_race, backtrace, sync: FxHashMap::default() })
949 }
950}
951
952impl VisitProvenance for MiriMachine<'_> {
953 fn visit_provenance(&self, visit: &mut VisitWith<'_>) {
954 #[rustfmt::skip]
955 let MiriMachine {
956 threads,
957 thread_cpu_affinity: _,
958 tls,
959 env_vars,
960 main_fn_ret_place,
961 argc,
962 argv,
963 cmd_line,
964 extern_statics,
965 dirs,
966 borrow_tracker,
967 data_race,
968 alloc_addresses,
969 fds,
970 epoll_interests:_,
971 tcx: _,
972 isolated_op: _,
973 validation: _,
974 monotonic_clock: _,
975 layouts: _,
976 static_roots: _,
977 profiler: _,
978 string_cache: _,
979 exported_symbols_cache: _,
980 backtrace_style: _,
981 local_crates: _,
982 rng: _,
983 allocator: _,
984 tracked_alloc_ids: _,
985 track_alloc_accesses: _,
986 check_alignment: _,
987 cmpxchg_weak_failure_rate: _,
988 preemption_rate: _,
989 report_progress: _,
990 basic_block_count: _,
991 native_lib: _,
992 gc_interval: _,
993 since_gc: _,
994 num_cpus: _,
995 page_size: _,
996 stack_addr: _,
997 stack_size: _,
998 collect_leak_backtraces: _,
999 allocation_spans: _,
1000 symbolic_alignment: _,
1001 union_data_ranges: _,
1002 pthread_mutex_sanity: _,
1003 pthread_rwlock_sanity: _,
1004 pthread_condvar_sanity: _,
1005 sb_extern_type_warned: _,
1006 native_call_mem_warned: _,
1007 reject_in_isolation_warned: _,
1008 int2ptr_warned: _,
1009 mangle_internal_symbol_cache: _,
1010 force_intrinsic_fallback: _,
1011 float_nondet: _,
1012 float_rounding_error: _,
1013 short_fd_operations: _,
1014 } = self;
1015
1016 threads.visit_provenance(visit);
1017 tls.visit_provenance(visit);
1018 env_vars.visit_provenance(visit);
1019 dirs.visit_provenance(visit);
1020 fds.visit_provenance(visit);
1021 data_race.visit_provenance(visit);
1022 borrow_tracker.visit_provenance(visit);
1023 alloc_addresses.visit_provenance(visit);
1024 main_fn_ret_place.visit_provenance(visit);
1025 argc.visit_provenance(visit);
1026 argv.visit_provenance(visit);
1027 cmd_line.visit_provenance(visit);
1028 for ptr in extern_statics.values() {
1029 ptr.visit_provenance(visit);
1030 }
1031 }
1032}
1033
1034pub type MiriInterpCx<'tcx> = InterpCx<'tcx, MiriMachine<'tcx>>;
1036
1037pub trait MiriInterpCxExt<'tcx> {
1039 fn eval_context_ref<'a>(&'a self) -> &'a MiriInterpCx<'tcx>;
1040 fn eval_context_mut<'a>(&'a mut self) -> &'a mut MiriInterpCx<'tcx>;
1041}
1042impl<'tcx> MiriInterpCxExt<'tcx> for MiriInterpCx<'tcx> {
1043 #[inline(always)]
1044 fn eval_context_ref(&self) -> &MiriInterpCx<'tcx> {
1045 self
1046 }
1047 #[inline(always)]
1048 fn eval_context_mut(&mut self) -> &mut MiriInterpCx<'tcx> {
1049 self
1050 }
1051}
1052
1053impl<'tcx> Machine<'tcx> for MiriMachine<'tcx> {
1055 type MemoryKind = MiriMemoryKind;
1056 type ExtraFnVal = DynSym;
1057
1058 type FrameExtra = FrameExtra<'tcx>;
1059 type AllocExtra = AllocExtra<'tcx>;
1060
1061 type Provenance = Provenance;
1062 type ProvenanceExtra = ProvenanceExtra;
1063 type Bytes = MiriAllocBytes;
1064
1065 type MemoryMap =
1066 MonoHashMap<AllocId, (MemoryKind, Allocation<Provenance, Self::AllocExtra, Self::Bytes>)>;
1067
1068 const GLOBAL_KIND: Option<MiriMemoryKind> = Some(MiriMemoryKind::Global);
1069
1070 const PANIC_ON_ALLOC_FAIL: bool = false;
1071
1072 #[inline(always)]
1073 fn enforce_alignment(ecx: &MiriInterpCx<'tcx>) -> bool {
1074 ecx.machine.check_alignment != AlignmentCheck::None
1075 }
1076
1077 #[inline(always)]
1078 fn alignment_check(
1079 ecx: &MiriInterpCx<'tcx>,
1080 alloc_id: AllocId,
1081 alloc_align: Align,
1082 alloc_kind: AllocKind,
1083 offset: Size,
1084 align: Align,
1085 ) -> Option<Misalignment> {
1086 if ecx.machine.check_alignment != AlignmentCheck::Symbolic {
1087 return None;
1089 }
1090 if alloc_kind != AllocKind::LiveData {
1091 return None;
1093 }
1094 let (promised_offset, promised_align) = ecx
1096 .machine
1097 .symbolic_alignment
1098 .borrow()
1099 .get(&alloc_id)
1100 .copied()
1101 .unwrap_or((Size::ZERO, alloc_align));
1102 if promised_align < align {
1103 Some(Misalignment { has: promised_align, required: align })
1105 } else {
1106 let distance = offset.bytes().wrapping_sub(promised_offset.bytes());
1108 if distance.is_multiple_of(align.bytes()) {
1110 None
1112 } else {
1113 let distance_pow2 = 1 << distance.trailing_zeros();
1115 Some(Misalignment {
1116 has: Align::from_bytes(distance_pow2).unwrap(),
1117 required: align,
1118 })
1119 }
1120 }
1121 }
1122
1123 #[inline(always)]
1124 fn enforce_validity(ecx: &MiriInterpCx<'tcx>, _layout: TyAndLayout<'tcx>) -> bool {
1125 ecx.machine.validation != ValidationMode::No
1126 }
1127 #[inline(always)]
1128 fn enforce_validity_recursively(
1129 ecx: &InterpCx<'tcx, Self>,
1130 _layout: TyAndLayout<'tcx>,
1131 ) -> bool {
1132 ecx.machine.validation == ValidationMode::Deep
1133 }
1134
1135 #[inline(always)]
1136 fn ignore_optional_overflow_checks(ecx: &MiriInterpCx<'tcx>) -> bool {
1137 !ecx.tcx.sess.overflow_checks()
1138 }
1139
1140 fn check_fn_target_features(
1141 ecx: &MiriInterpCx<'tcx>,
1142 instance: ty::Instance<'tcx>,
1143 ) -> InterpResult<'tcx> {
1144 let attrs = ecx.tcx.codegen_instance_attrs(instance.def);
1145 if attrs
1146 .target_features
1147 .iter()
1148 .any(|feature| !ecx.tcx.sess.target_features.contains(&feature.name))
1149 {
1150 let unavailable = attrs
1151 .target_features
1152 .iter()
1153 .filter(|&feature| {
1154 feature.kind != TargetFeatureKind::Implied
1155 && !ecx.tcx.sess.target_features.contains(&feature.name)
1156 })
1157 .fold(String::new(), |mut s, feature| {
1158 if !s.is_empty() {
1159 s.push_str(", ");
1160 }
1161 s.push_str(feature.name.as_str());
1162 s
1163 });
1164 let msg = format!(
1165 "calling a function that requires unavailable target features: {unavailable}"
1166 );
1167 if ecx.tcx.sess.target.is_like_wasm {
1170 throw_machine_stop!(TerminationInfo::Abort(msg));
1171 } else {
1172 throw_ub_format!("{msg}");
1173 }
1174 }
1175 interp_ok(())
1176 }
1177
1178 #[inline(always)]
1179 fn find_mir_or_eval_fn(
1180 ecx: &mut MiriInterpCx<'tcx>,
1181 instance: ty::Instance<'tcx>,
1182 abi: &FnAbi<'tcx, Ty<'tcx>>,
1183 args: &[FnArg<'tcx, Provenance>],
1184 dest: &PlaceTy<'tcx>,
1185 ret: Option<mir::BasicBlock>,
1186 unwind: mir::UnwindAction,
1187 ) -> InterpResult<'tcx, Option<(&'tcx mir::Body<'tcx>, ty::Instance<'tcx>)>> {
1188 if ecx.tcx.is_foreign_item(instance.def_id()) {
1190 let _trace = enter_trace_span!("emulate_foreign_item");
1191 let args = ecx.copy_fn_args(args); let link_name = Symbol::intern(ecx.tcx.symbol_name(instance).name);
1199 return ecx.emulate_foreign_item(link_name, abi, &args, dest, ret, unwind);
1200 }
1201
1202 let _trace = enter_trace_span!("load_mir");
1204 interp_ok(Some((ecx.load_mir(instance.def, None)?, instance)))
1205 }
1206
1207 #[inline(always)]
1208 fn call_extra_fn(
1209 ecx: &mut MiriInterpCx<'tcx>,
1210 fn_val: DynSym,
1211 abi: &FnAbi<'tcx, Ty<'tcx>>,
1212 args: &[FnArg<'tcx, Provenance>],
1213 dest: &PlaceTy<'tcx>,
1214 ret: Option<mir::BasicBlock>,
1215 unwind: mir::UnwindAction,
1216 ) -> InterpResult<'tcx> {
1217 let args = ecx.copy_fn_args(args); ecx.emulate_dyn_sym(fn_val, abi, &args, dest, ret, unwind)
1219 }
1220
1221 #[inline(always)]
1222 fn call_intrinsic(
1223 ecx: &mut MiriInterpCx<'tcx>,
1224 instance: ty::Instance<'tcx>,
1225 args: &[OpTy<'tcx>],
1226 dest: &PlaceTy<'tcx>,
1227 ret: Option<mir::BasicBlock>,
1228 unwind: mir::UnwindAction,
1229 ) -> InterpResult<'tcx, Option<ty::Instance<'tcx>>> {
1230 ecx.call_intrinsic(instance, args, dest, ret, unwind)
1231 }
1232
1233 #[inline(always)]
1234 fn assert_panic(
1235 ecx: &mut MiriInterpCx<'tcx>,
1236 msg: &mir::AssertMessage<'tcx>,
1237 unwind: mir::UnwindAction,
1238 ) -> InterpResult<'tcx> {
1239 ecx.assert_panic(msg, unwind)
1240 }
1241
1242 fn panic_nounwind(ecx: &mut InterpCx<'tcx, Self>, msg: &str) -> InterpResult<'tcx> {
1243 ecx.start_panic_nounwind(msg)
1244 }
1245
1246 fn unwind_terminate(
1247 ecx: &mut InterpCx<'tcx, Self>,
1248 reason: mir::UnwindTerminateReason,
1249 ) -> InterpResult<'tcx> {
1250 let panic = ecx.tcx.lang_items().get(reason.lang_item()).unwrap();
1252 let panic = ty::Instance::mono(ecx.tcx.tcx, panic);
1253 ecx.call_function(
1254 panic,
1255 ExternAbi::Rust,
1256 &[],
1257 None,
1258 ReturnContinuation::Goto { ret: None, unwind: mir::UnwindAction::Unreachable },
1259 )?;
1260 interp_ok(())
1261 }
1262
1263 #[inline(always)]
1264 fn binary_ptr_op(
1265 ecx: &MiriInterpCx<'tcx>,
1266 bin_op: mir::BinOp,
1267 left: &ImmTy<'tcx>,
1268 right: &ImmTy<'tcx>,
1269 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1270 ecx.binary_ptr_op(bin_op, left, right)
1271 }
1272
1273 #[inline(always)]
1274 fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>(
1275 ecx: &InterpCx<'tcx, Self>,
1276 inputs: &[F1],
1277 ) -> F2 {
1278 ecx.generate_nan(inputs)
1279 }
1280
1281 #[inline(always)]
1282 fn apply_float_nondet(
1283 ecx: &mut InterpCx<'tcx, Self>,
1284 val: ImmTy<'tcx>,
1285 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1286 crate::math::apply_random_float_error_to_imm(ecx, val, 4)
1287 }
1288
1289 #[inline(always)]
1290 fn equal_float_min_max<F: Float>(ecx: &MiriInterpCx<'tcx>, a: F, b: F) -> F {
1291 ecx.equal_float_min_max(a, b)
1292 }
1293
1294 #[inline(always)]
1295 fn ub_checks(ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool> {
1296 interp_ok(ecx.tcx.sess.ub_checks())
1297 }
1298
1299 #[inline(always)]
1300 fn contract_checks(ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool> {
1301 interp_ok(ecx.tcx.sess.contract_checks())
1302 }
1303
1304 #[inline(always)]
1305 fn thread_local_static_pointer(
1306 ecx: &mut MiriInterpCx<'tcx>,
1307 def_id: DefId,
1308 ) -> InterpResult<'tcx, StrictPointer> {
1309 ecx.get_or_create_thread_local_alloc(def_id)
1310 }
1311
1312 fn extern_static_pointer(
1313 ecx: &MiriInterpCx<'tcx>,
1314 def_id: DefId,
1315 ) -> InterpResult<'tcx, StrictPointer> {
1316 let link_name = Symbol::intern(ecx.tcx.symbol_name(Instance::mono(*ecx.tcx, def_id)).name);
1317 if let Some(&ptr) = ecx.machine.extern_statics.get(&link_name) {
1318 let Provenance::Concrete { alloc_id, .. } = ptr.provenance else {
1322 panic!("extern_statics cannot contain wildcards")
1323 };
1324 let info = ecx.get_alloc_info(alloc_id);
1325 let def_ty = ecx.tcx.type_of(def_id).instantiate_identity();
1326 let extern_decl_layout =
1327 ecx.tcx.layout_of(ecx.typing_env().as_query_input(def_ty)).unwrap();
1328 if extern_decl_layout.size != info.size || extern_decl_layout.align.abi != info.align {
1329 throw_unsup_format!(
1330 "extern static `{link_name}` has been declared as `{krate}::{name}` \
1331 with a size of {decl_size} bytes and alignment of {decl_align} bytes, \
1332 but Miri emulates it via an extern static shim \
1333 with a size of {shim_size} bytes and alignment of {shim_align} bytes",
1334 name = ecx.tcx.def_path_str(def_id),
1335 krate = ecx.tcx.crate_name(def_id.krate),
1336 decl_size = extern_decl_layout.size.bytes(),
1337 decl_align = extern_decl_layout.align.abi.bytes(),
1338 shim_size = info.size.bytes(),
1339 shim_align = info.align.bytes(),
1340 )
1341 }
1342 interp_ok(ptr)
1343 } else {
1344 throw_unsup_format!("extern static `{link_name}` is not supported by Miri",)
1345 }
1346 }
1347
1348 fn init_local_allocation(
1349 ecx: &MiriInterpCx<'tcx>,
1350 id: AllocId,
1351 kind: MemoryKind,
1352 size: Size,
1353 align: Align,
1354 ) -> InterpResult<'tcx, Self::AllocExtra> {
1355 assert!(kind != MiriMemoryKind::Global.into());
1356 MiriMachine::init_allocation(ecx, id, kind, size, align)
1357 }
1358
1359 fn adjust_alloc_root_pointer(
1360 ecx: &MiriInterpCx<'tcx>,
1361 ptr: interpret::Pointer<CtfeProvenance>,
1362 kind: Option<MemoryKind>,
1363 ) -> InterpResult<'tcx, interpret::Pointer<Provenance>> {
1364 let kind = kind.expect("we set our GLOBAL_KIND so this cannot be None");
1365 let alloc_id = ptr.provenance.alloc_id();
1366 if cfg!(debug_assertions) {
1367 match ecx.tcx.try_get_global_alloc(alloc_id) {
1369 Some(GlobalAlloc::Static(def_id)) if ecx.tcx.is_thread_local_static(def_id) => {
1370 panic!("adjust_alloc_root_pointer called on thread-local static")
1371 }
1372 Some(GlobalAlloc::Static(def_id)) if ecx.tcx.is_foreign_item(def_id) => {
1373 panic!("adjust_alloc_root_pointer called on extern static")
1374 }
1375 _ => {}
1376 }
1377 }
1378 let tag = if let Some(borrow_tracker) = &ecx.machine.borrow_tracker {
1380 borrow_tracker.borrow_mut().root_ptr_tag(alloc_id, &ecx.machine)
1381 } else {
1382 BorTag::default()
1384 };
1385 ecx.adjust_alloc_root_pointer(ptr, tag, kind)
1386 }
1387
1388 #[inline(always)]
1390 fn ptr_from_addr_cast(ecx: &MiriInterpCx<'tcx>, addr: u64) -> InterpResult<'tcx, Pointer> {
1391 ecx.ptr_from_addr_cast(addr)
1392 }
1393
1394 #[inline(always)]
1398 fn expose_provenance(
1399 ecx: &InterpCx<'tcx, Self>,
1400 provenance: Self::Provenance,
1401 ) -> InterpResult<'tcx> {
1402 ecx.expose_provenance(provenance)
1403 }
1404
1405 fn ptr_get_alloc(
1417 ecx: &MiriInterpCx<'tcx>,
1418 ptr: StrictPointer,
1419 size: i64,
1420 ) -> Option<(AllocId, Size, Self::ProvenanceExtra)> {
1421 let rel = ecx.ptr_get_alloc(ptr, size);
1422
1423 rel.map(|(alloc_id, size)| {
1424 let tag = match ptr.provenance {
1425 Provenance::Concrete { tag, .. } => ProvenanceExtra::Concrete(tag),
1426 Provenance::Wildcard => ProvenanceExtra::Wildcard,
1427 };
1428 (alloc_id, size, tag)
1429 })
1430 }
1431
1432 fn adjust_global_allocation<'b>(
1441 ecx: &InterpCx<'tcx, Self>,
1442 id: AllocId,
1443 alloc: &'b Allocation,
1444 ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>
1445 {
1446 let alloc = alloc.adjust_from_tcx(
1447 &ecx.tcx,
1448 |bytes, align| ecx.get_global_alloc_bytes(id, bytes, align),
1449 |ptr| ecx.global_root_pointer(ptr),
1450 )?;
1451 let kind = MiriMemoryKind::Global.into();
1452 let extra = MiriMachine::init_allocation(ecx, id, kind, alloc.size(), alloc.align)?;
1453 interp_ok(Cow::Owned(alloc.with_extra(extra)))
1454 }
1455
1456 #[inline(always)]
1457 fn before_memory_read(
1458 _tcx: TyCtxtAt<'tcx>,
1459 machine: &Self,
1460 alloc_extra: &AllocExtra<'tcx>,
1461 ptr: Pointer,
1462 (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
1463 range: AllocRange,
1464 ) -> InterpResult<'tcx> {
1465 if machine.track_alloc_accesses && machine.tracked_alloc_ids.contains(&alloc_id) {
1466 machine
1467 .emit_diagnostic(NonHaltingDiagnostic::AccessedAlloc(alloc_id, AccessKind::Read));
1468 }
1469 match &machine.data_race {
1471 GlobalDataRaceHandler::None => {}
1472 GlobalDataRaceHandler::Genmc(genmc_ctx) =>
1473 genmc_ctx.memory_load(machine, ptr.addr(), range.size)?,
1474 GlobalDataRaceHandler::Vclocks(_data_race) => {
1475 let _trace = enter_trace_span!(data_race::before_memory_read);
1476 let AllocDataRaceHandler::Vclocks(data_race, weak_memory) = &alloc_extra.data_race
1477 else {
1478 unreachable!();
1479 };
1480 data_race.read(alloc_id, range, NaReadType::Read, None, machine)?;
1481 if let Some(weak_memory) = weak_memory {
1482 weak_memory.memory_accessed(range, machine.data_race.as_vclocks_ref().unwrap());
1483 }
1484 }
1485 }
1486 if let Some(borrow_tracker) = &alloc_extra.borrow_tracker {
1487 borrow_tracker.before_memory_read(alloc_id, prov_extra, range, machine)?;
1488 }
1489 interp_ok(())
1490 }
1491
1492 #[inline(always)]
1493 fn before_memory_write(
1494 _tcx: TyCtxtAt<'tcx>,
1495 machine: &mut Self,
1496 alloc_extra: &mut AllocExtra<'tcx>,
1497 ptr: Pointer,
1498 (alloc_id, prov_extra): (AllocId, Self::ProvenanceExtra),
1499 range: AllocRange,
1500 ) -> InterpResult<'tcx> {
1501 if machine.track_alloc_accesses && machine.tracked_alloc_ids.contains(&alloc_id) {
1502 machine
1503 .emit_diagnostic(NonHaltingDiagnostic::AccessedAlloc(alloc_id, AccessKind::Write));
1504 }
1505 match &machine.data_race {
1506 GlobalDataRaceHandler::None => {}
1507 GlobalDataRaceHandler::Genmc(genmc_ctx) => {
1508 genmc_ctx.memory_store(machine, ptr.addr(), range.size)?;
1509 }
1510 GlobalDataRaceHandler::Vclocks(_global_state) => {
1511 let _trace = enter_trace_span!(data_race::before_memory_write);
1512 let AllocDataRaceHandler::Vclocks(data_race, weak_memory) =
1513 &mut alloc_extra.data_race
1514 else {
1515 unreachable!()
1516 };
1517 data_race.write(alloc_id, range, NaWriteType::Write, None, machine)?;
1518 if let Some(weak_memory) = weak_memory {
1519 weak_memory.memory_accessed(range, machine.data_race.as_vclocks_ref().unwrap());
1520 }
1521 }
1522 }
1523 if let Some(borrow_tracker) = &mut alloc_extra.borrow_tracker {
1524 borrow_tracker.before_memory_write(alloc_id, prov_extra, range, machine)?;
1525 }
1526 interp_ok(())
1527 }
1528
1529 #[inline(always)]
1530 fn before_memory_deallocation(
1531 _tcx: TyCtxtAt<'tcx>,
1532 machine: &mut Self,
1533 alloc_extra: &mut AllocExtra<'tcx>,
1534 ptr: Pointer,
1535 (alloc_id, prove_extra): (AllocId, Self::ProvenanceExtra),
1536 size: Size,
1537 align: Align,
1538 kind: MemoryKind,
1539 ) -> InterpResult<'tcx> {
1540 if machine.tracked_alloc_ids.contains(&alloc_id) {
1541 machine.emit_diagnostic(NonHaltingDiagnostic::FreedAlloc(alloc_id));
1542 }
1543 match &machine.data_race {
1544 GlobalDataRaceHandler::None => {}
1545 GlobalDataRaceHandler::Genmc(genmc_ctx) =>
1546 genmc_ctx.handle_dealloc(machine, ptr.addr(), size, align, kind)?,
1547 GlobalDataRaceHandler::Vclocks(_global_state) => {
1548 let _trace = enter_trace_span!(data_race::before_memory_deallocation);
1549 let data_race = alloc_extra.data_race.as_vclocks_mut().unwrap();
1550 data_race.write(
1551 alloc_id,
1552 alloc_range(Size::ZERO, size),
1553 NaWriteType::Deallocate,
1554 None,
1555 machine,
1556 )?;
1557 }
1558 }
1559 if let Some(borrow_tracker) = &mut alloc_extra.borrow_tracker {
1560 borrow_tracker.before_memory_deallocation(alloc_id, prove_extra, size, machine)?;
1561 }
1562 if let Some((_, deallocated_at)) = machine.allocation_spans.borrow_mut().get_mut(&alloc_id)
1563 {
1564 *deallocated_at = Some(machine.current_span());
1565 }
1566 machine.free_alloc_id(alloc_id, size, align, kind);
1567 interp_ok(())
1568 }
1569
1570 #[inline(always)]
1571 fn retag_ptr_value(
1572 ecx: &mut InterpCx<'tcx, Self>,
1573 kind: mir::RetagKind,
1574 val: &ImmTy<'tcx>,
1575 ) -> InterpResult<'tcx, ImmTy<'tcx>> {
1576 if ecx.machine.borrow_tracker.is_some() {
1577 ecx.retag_ptr_value(kind, val)
1578 } else {
1579 interp_ok(val.clone())
1580 }
1581 }
1582
1583 #[inline(always)]
1584 fn retag_place_contents(
1585 ecx: &mut InterpCx<'tcx, Self>,
1586 kind: mir::RetagKind,
1587 place: &PlaceTy<'tcx>,
1588 ) -> InterpResult<'tcx> {
1589 if ecx.machine.borrow_tracker.is_some() {
1590 ecx.retag_place_contents(kind, place)?;
1591 }
1592 interp_ok(())
1593 }
1594
1595 fn protect_in_place_function_argument(
1596 ecx: &mut InterpCx<'tcx, Self>,
1597 place: &MPlaceTy<'tcx>,
1598 ) -> InterpResult<'tcx> {
1599 let protected_place = if ecx.machine.borrow_tracker.is_some() {
1602 ecx.protect_place(place)?
1603 } else {
1604 place.clone()
1606 };
1607 ecx.write_uninit(&protected_place)?;
1612 interp_ok(())
1614 }
1615
1616 #[inline(always)]
1617 fn init_frame(
1618 ecx: &mut InterpCx<'tcx, Self>,
1619 frame: Frame<'tcx, Provenance>,
1620 ) -> InterpResult<'tcx, Frame<'tcx, Provenance, FrameExtra<'tcx>>> {
1621 let timing = if let Some(profiler) = ecx.machine.profiler.as_ref() {
1623 let fn_name = frame.instance().to_string();
1624 let entry = ecx.machine.string_cache.entry(fn_name.clone());
1625 let name = entry.or_insert_with(|| profiler.alloc_string(&*fn_name));
1626
1627 Some(profiler.start_recording_interval_event_detached(
1628 *name,
1629 measureme::EventId::from_label(*name),
1630 ecx.active_thread().to_u32(),
1631 ))
1632 } else {
1633 None
1634 };
1635
1636 let borrow_tracker = ecx.machine.borrow_tracker.as_ref();
1637
1638 let extra = FrameExtra {
1639 borrow_tracker: borrow_tracker.map(|bt| bt.borrow_mut().new_frame()),
1640 catch_unwind: None,
1641 timing,
1642 is_user_relevant: ecx.machine.is_user_relevant(&frame),
1643 data_race: ecx
1644 .machine
1645 .data_race
1646 .as_vclocks_ref()
1647 .map(|_| data_race::FrameState::default()),
1648 };
1649
1650 interp_ok(frame.with_extra(extra))
1651 }
1652
1653 fn stack<'a>(
1654 ecx: &'a InterpCx<'tcx, Self>,
1655 ) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>] {
1656 ecx.active_thread_stack()
1657 }
1658
1659 fn stack_mut<'a>(
1660 ecx: &'a mut InterpCx<'tcx, Self>,
1661 ) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>> {
1662 ecx.active_thread_stack_mut()
1663 }
1664
1665 fn before_terminator(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1666 ecx.machine.basic_block_count += 1u64; ecx.machine.since_gc += 1;
1668 if let Some(report_progress) = ecx.machine.report_progress {
1670 if ecx.machine.basic_block_count.is_multiple_of(u64::from(report_progress)) {
1671 ecx.emit_diagnostic(NonHaltingDiagnostic::ProgressReport {
1672 block_count: ecx.machine.basic_block_count,
1673 });
1674 }
1675 }
1676
1677 if ecx.machine.gc_interval > 0 && ecx.machine.since_gc >= ecx.machine.gc_interval {
1682 ecx.machine.since_gc = 0;
1683 ecx.run_provenance_gc();
1684 }
1685
1686 ecx.maybe_preempt_active_thread();
1689
1690 ecx.machine.monotonic_clock.tick();
1692
1693 interp_ok(())
1694 }
1695
1696 #[inline(always)]
1697 fn after_stack_push(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1698 if ecx.frame().extra.is_user_relevant {
1699 let stack_len = ecx.active_thread_stack().len();
1702 ecx.active_thread_mut().set_top_user_relevant_frame(stack_len - 1);
1703 }
1704 interp_ok(())
1705 }
1706
1707 fn before_stack_pop(ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> {
1708 let frame = ecx.frame();
1709 if ecx.machine.borrow_tracker.is_some() {
1712 ecx.on_stack_pop(frame)?;
1713 }
1714 if frame.extra.is_user_relevant {
1715 ecx.active_thread_mut().recompute_top_user_relevant_frame(1);
1721 }
1722 info!("Leaving {}", ecx.frame().instance());
1726 interp_ok(())
1727 }
1728
1729 #[inline(always)]
1730 fn after_stack_pop(
1731 ecx: &mut InterpCx<'tcx, Self>,
1732 frame: Frame<'tcx, Provenance, FrameExtra<'tcx>>,
1733 unwinding: bool,
1734 ) -> InterpResult<'tcx, ReturnAction> {
1735 let res = {
1736 let mut frame = frame;
1738 let timing = frame.extra.timing.take();
1739 let res = ecx.handle_stack_pop_unwind(frame.extra, unwinding);
1740 if let Some(profiler) = ecx.machine.profiler.as_ref() {
1741 profiler.finish_recording_interval_event(timing.unwrap());
1742 }
1743 res
1744 };
1745 if !ecx.active_thread_stack().is_empty() {
1748 info!("Continuing in {}", ecx.frame().instance());
1749 }
1750 res
1751 }
1752
1753 fn after_local_read(
1754 ecx: &InterpCx<'tcx, Self>,
1755 frame: &Frame<'tcx, Provenance, FrameExtra<'tcx>>,
1756 local: mir::Local,
1757 ) -> InterpResult<'tcx> {
1758 if let Some(data_race) = &frame.extra.data_race {
1759 let _trace = enter_trace_span!(data_race::after_local_read);
1760 data_race.local_read(local, &ecx.machine);
1761 }
1762 interp_ok(())
1763 }
1764
1765 fn after_local_write(
1766 ecx: &mut InterpCx<'tcx, Self>,
1767 local: mir::Local,
1768 storage_live: bool,
1769 ) -> InterpResult<'tcx> {
1770 if let Some(data_race) = &ecx.frame().extra.data_race {
1771 let _trace = enter_trace_span!(data_race::after_local_write);
1772 data_race.local_write(local, storage_live, &ecx.machine);
1773 }
1774 interp_ok(())
1775 }
1776
1777 fn after_local_moved_to_memory(
1778 ecx: &mut InterpCx<'tcx, Self>,
1779 local: mir::Local,
1780 mplace: &MPlaceTy<'tcx>,
1781 ) -> InterpResult<'tcx> {
1782 let Some(Provenance::Concrete { alloc_id, .. }) = mplace.ptr().provenance else {
1783 panic!("after_local_allocated should only be called on fresh allocations");
1784 };
1785 let local_decl = &ecx.frame().body().local_decls[local];
1787 let span = local_decl.source_info.span;
1788 ecx.machine.allocation_spans.borrow_mut().insert(alloc_id, (span, None));
1789 let (alloc_info, machine) = ecx.get_alloc_extra_mut(alloc_id)?;
1791 if let Some(data_race) =
1792 &machine.threads.active_thread_stack().last().unwrap().extra.data_race
1793 {
1794 let _trace = enter_trace_span!(data_race::after_local_moved_to_memory);
1795 data_race.local_moved_to_memory(
1796 local,
1797 alloc_info.data_race.as_vclocks_mut().unwrap(),
1798 machine,
1799 );
1800 }
1801 interp_ok(())
1802 }
1803
1804 fn get_global_alloc_salt(
1805 ecx: &InterpCx<'tcx, Self>,
1806 instance: Option<ty::Instance<'tcx>>,
1807 ) -> usize {
1808 let unique = if let Some(instance) = instance {
1809 let is_generic = instance
1822 .args
1823 .into_iter()
1824 .any(|arg| !matches!(arg.kind(), ty::GenericArgKind::Lifetime(_)));
1825 let can_be_inlined = matches!(
1826 ecx.tcx.sess.opts.unstable_opts.cross_crate_inline_threshold,
1827 InliningThreshold::Always
1828 ) || !matches!(
1829 ecx.tcx.codegen_instance_attrs(instance.def).inline,
1830 InlineAttr::Never
1831 );
1832 !is_generic && !can_be_inlined
1833 } else {
1834 false
1836 };
1837 if unique {
1839 CTFE_ALLOC_SALT
1840 } else {
1841 ecx.machine.rng.borrow_mut().random_range(0..ADDRS_PER_ANON_GLOBAL)
1842 }
1843 }
1844
1845 fn cached_union_data_range<'e>(
1846 ecx: &'e mut InterpCx<'tcx, Self>,
1847 ty: Ty<'tcx>,
1848 compute_range: impl FnOnce() -> RangeSet,
1849 ) -> Cow<'e, RangeSet> {
1850 Cow::Borrowed(ecx.machine.union_data_ranges.entry(ty).or_insert_with(compute_range))
1851 }
1852
1853 fn get_default_alloc_params(&self) -> <Self::Bytes as AllocBytes>::AllocParams {
1854 use crate::alloc::MiriAllocParams;
1855
1856 match &self.allocator {
1857 Some(alloc) => MiriAllocParams::Isolated(alloc.clone()),
1858 None => MiriAllocParams::Global,
1859 }
1860 }
1861
1862 fn enter_trace_span(span: impl FnOnce() -> tracing::Span) -> impl EnteredTraceSpan {
1863 #[cfg(feature = "tracing")]
1864 {
1865 span().entered()
1866 }
1867 #[cfg(not(feature = "tracing"))]
1868 #[expect(clippy::unused_unit)]
1869 {
1870 let _ = span; ()
1872 }
1873 }
1874}
1875
1876pub trait MachineCallback<'tcx, T>: VisitProvenance {
1878 fn call(
1880 self: Box<Self>,
1881 ecx: &mut InterpCx<'tcx, MiriMachine<'tcx>>,
1882 arg: T,
1883 ) -> InterpResult<'tcx>;
1884}
1885
1886pub type DynMachineCallback<'tcx, T> = Box<dyn MachineCallback<'tcx, T> + 'tcx>;
1888
1889#[macro_export]
1906macro_rules! callback {
1907 (@capture<$tcx:lifetime $(,)? $($lft:lifetime),*>
1908 { $($name:ident: $type:ty),* $(,)? }
1909 |$this:ident, $arg:ident: $arg_ty:ty| $body:expr $(,)?) => {{
1910 struct Callback<$tcx, $($lft),*> {
1911 $($name: $type,)*
1912 _phantom: std::marker::PhantomData<&$tcx ()>,
1913 }
1914
1915 impl<$tcx, $($lft),*> VisitProvenance for Callback<$tcx, $($lft),*> {
1916 fn visit_provenance(&self, _visit: &mut VisitWith<'_>) {
1917 $(
1918 self.$name.visit_provenance(_visit);
1919 )*
1920 }
1921 }
1922
1923 impl<$tcx, $($lft),*> MachineCallback<$tcx, $arg_ty> for Callback<$tcx, $($lft),*> {
1924 fn call(
1925 self: Box<Self>,
1926 $this: &mut MiriInterpCx<$tcx>,
1927 $arg: $arg_ty
1928 ) -> InterpResult<$tcx> {
1929 #[allow(unused_variables)]
1930 let Callback { $($name,)* _phantom } = *self;
1931 $body
1932 }
1933 }
1934
1935 Box::new(Callback {
1936 $($name,)*
1937 _phantom: std::marker::PhantomData
1938 })
1939 }};
1940}