miri/
helpers.rs

1use std::num::NonZero;
2use std::time::Duration;
3use std::{cmp, iter};
4
5use rand::RngCore;
6use rustc_abi::{Align, ExternAbi, FieldIdx, FieldsShape, Size, Variants};
7use rustc_apfloat::Float;
8use rustc_apfloat::ieee::{Double, Half, Quad, Single};
9use rustc_hir::Safety;
10use rustc_hir::def::{DefKind, Namespace};
11use rustc_hir::def_id::{CRATE_DEF_INDEX, CrateNum, DefId, LOCAL_CRATE};
12use rustc_index::IndexVec;
13use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
14use rustc_middle::middle::dependency_format::Linkage;
15use rustc_middle::middle::exported_symbols::ExportedSymbol;
16use rustc_middle::ty::layout::{LayoutOf, MaybeResult, TyAndLayout};
17use rustc_middle::ty::{self, FloatTy, IntTy, Ty, TyCtxt, UintTy};
18use rustc_session::config::CrateType;
19use rustc_span::{Span, Symbol};
20use rustc_symbol_mangling::mangle_internal_symbol;
21
22use crate::*;
23
24/// Indicates which kind of access is being performed.
25#[derive(Copy, Clone, Hash, PartialEq, Eq, Debug)]
26pub enum AccessKind {
27    Read,
28    Write,
29}
30
31/// Gets an instance for a path.
32///
33/// A `None` namespace indicates we are looking for a module.
34fn try_resolve_did(tcx: TyCtxt<'_>, path: &[&str], namespace: Option<Namespace>) -> Option<DefId> {
35    let _trace = enter_trace_span!("try_resolve_did", ?path);
36
37    /// Yield all children of the given item, that have the given name.
38    fn find_children<'tcx: 'a, 'a>(
39        tcx: TyCtxt<'tcx>,
40        item: DefId,
41        name: &'a str,
42    ) -> impl Iterator<Item = DefId> + 'a {
43        let name = Symbol::intern(name);
44        tcx.module_children(item)
45            .iter()
46            .filter(move |item| item.ident.name == name)
47            .map(move |item| item.res.def_id())
48    }
49
50    // Take apart the path: leading crate, a sequence of modules, and potentially a final item.
51    let (&crate_name, path) = path.split_first().expect("paths must have at least one segment");
52    let (modules, item) = if let Some(namespace) = namespace {
53        let (&item_name, modules) =
54            path.split_last().expect("non-module paths must have at least 2 segments");
55        (modules, Some((item_name, namespace)))
56    } else {
57        (path, None)
58    };
59
60    // There may be more than one crate with this name. We try them all.
61    // (This is particularly relevant when running `std` tests as then there are two `std` crates:
62    // the one in the sysroot and the one locally built by `cargo test`.)
63    // FIXME: can we prefer the one from the sysroot?
64    'crates: for krate in
65        tcx.crates(()).iter().filter(|&&krate| tcx.crate_name(krate).as_str() == crate_name)
66    {
67        let mut cur_item = DefId { krate: *krate, index: CRATE_DEF_INDEX };
68        // Go over the modules.
69        for &segment in modules {
70            let Some(next_item) = find_children(tcx, cur_item, segment)
71                .find(|item| tcx.def_kind(item) == DefKind::Mod)
72            else {
73                continue 'crates;
74            };
75            cur_item = next_item;
76        }
77        // Finally, look up the desired item in this module, if any.
78        match item {
79            Some((item_name, namespace)) => {
80                let Some(item) = find_children(tcx, cur_item, item_name)
81                    .find(|item| tcx.def_kind(item).ns() == Some(namespace))
82                else {
83                    continue 'crates;
84                };
85                return Some(item);
86            }
87            None => {
88                // Just return the module.
89                return Some(cur_item);
90            }
91        }
92    }
93    // Item not found in any of the crates with the right name.
94    None
95}
96
97/// Gets an instance for a path; fails gracefully if the path does not exist.
98pub fn try_resolve_path<'tcx>(
99    tcx: TyCtxt<'tcx>,
100    path: &[&str],
101    namespace: Namespace,
102) -> Option<ty::Instance<'tcx>> {
103    let did = try_resolve_did(tcx, path, Some(namespace))?;
104    Some(ty::Instance::mono(tcx, did))
105}
106
107/// Gets an instance for a path.
108#[track_caller]
109pub fn resolve_path<'tcx>(
110    tcx: TyCtxt<'tcx>,
111    path: &[&str],
112    namespace: Namespace,
113) -> ty::Instance<'tcx> {
114    try_resolve_path(tcx, path, namespace)
115        .unwrap_or_else(|| panic!("failed to find required Rust item: {path:?}"))
116}
117
118/// Gets the layout of a type at a path.
119#[track_caller]
120pub fn path_ty_layout<'tcx>(cx: &impl LayoutOf<'tcx>, path: &[&str]) -> TyAndLayout<'tcx> {
121    let ty = resolve_path(cx.tcx(), path, Namespace::TypeNS).ty(cx.tcx(), cx.typing_env());
122    cx.layout_of(ty).to_result().ok().unwrap()
123}
124
125/// Call `f` for each exported symbol.
126pub fn iter_exported_symbols<'tcx>(
127    tcx: TyCtxt<'tcx>,
128    mut f: impl FnMut(CrateNum, DefId) -> InterpResult<'tcx>,
129) -> InterpResult<'tcx> {
130    // First, the symbols in the local crate. We can't use `exported_symbols` here as that
131    // skips `#[used]` statics (since `reachable_set` skips them in binary crates).
132    // So we walk all HIR items ourselves instead.
133    let crate_items = tcx.hir_crate_items(());
134    for def_id in crate_items.definitions() {
135        let exported = tcx.def_kind(def_id).has_codegen_attrs() && {
136            let codegen_attrs = tcx.codegen_fn_attrs(def_id);
137            codegen_attrs.contains_extern_indicator(tcx, def_id.into())
138                || codegen_attrs.flags.contains(CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL)
139                || codegen_attrs.flags.contains(CodegenFnAttrFlags::USED_COMPILER)
140                || codegen_attrs.flags.contains(CodegenFnAttrFlags::USED_LINKER)
141        };
142        if exported {
143            f(LOCAL_CRATE, def_id.into())?;
144        }
145    }
146
147    // Next, all our dependencies.
148    // `dependency_formats` includes all the transitive informations needed to link a crate,
149    // which is what we need here since we need to dig out `exported_symbols` from all transitive
150    // dependencies.
151    let dependency_formats = tcx.dependency_formats(());
152    // Find the dependencies of the executable we are running.
153    let dependency_format = dependency_formats
154        .get(&CrateType::Executable)
155        .expect("interpreting a non-executable crate");
156    for cnum in dependency_format
157        .iter_enumerated()
158        .filter_map(|(num, &linkage)| (linkage != Linkage::NotLinked).then_some(num))
159    {
160        if cnum == LOCAL_CRATE {
161            continue; // Already handled above
162        }
163
164        // We can ignore `_export_info` here: we are a Rust crate, and everything is exported
165        // from a Rust crate.
166        for &(symbol, _export_info) in tcx.exported_non_generic_symbols(cnum) {
167            if let ExportedSymbol::NonGeneric(def_id) = symbol {
168                f(cnum, def_id)?;
169            }
170        }
171    }
172    interp_ok(())
173}
174
175/// Convert a softfloat type to its corresponding hostfloat type.
176pub trait ToHost {
177    type HostFloat;
178    fn to_host(self) -> Self::HostFloat;
179}
180
181/// Convert a hostfloat type to its corresponding softfloat type.
182pub trait ToSoft {
183    type SoftFloat;
184    fn to_soft(self) -> Self::SoftFloat;
185}
186
187impl ToHost for rustc_apfloat::ieee::Double {
188    type HostFloat = f64;
189
190    fn to_host(self) -> Self::HostFloat {
191        f64::from_bits(self.to_bits().try_into().unwrap())
192    }
193}
194
195impl ToSoft for f64 {
196    type SoftFloat = rustc_apfloat::ieee::Double;
197
198    fn to_soft(self) -> Self::SoftFloat {
199        Float::from_bits(self.to_bits().into())
200    }
201}
202
203impl ToHost for rustc_apfloat::ieee::Single {
204    type HostFloat = f32;
205
206    fn to_host(self) -> Self::HostFloat {
207        f32::from_bits(self.to_bits().try_into().unwrap())
208    }
209}
210
211impl ToSoft for f32 {
212    type SoftFloat = rustc_apfloat::ieee::Single;
213
214    fn to_soft(self) -> Self::SoftFloat {
215        Float::from_bits(self.to_bits().into())
216    }
217}
218
219impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {}
220pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
221    /// Checks if the given crate/module exists.
222    fn have_module(&self, path: &[&str]) -> bool {
223        try_resolve_did(*self.eval_context_ref().tcx, path, None).is_some()
224    }
225
226    /// Evaluates the scalar at the specified path.
227    fn eval_path(&self, path: &[&str]) -> MPlaceTy<'tcx> {
228        let this = self.eval_context_ref();
229        let instance = resolve_path(*this.tcx, path, Namespace::ValueNS);
230        // We don't give a span -- this isn't actually used directly by the program anyway.
231        this.eval_global(instance).unwrap_or_else(|err| {
232            panic!("failed to evaluate required Rust item: {path:?}\n{err:?}")
233        })
234    }
235    fn eval_path_scalar(&self, path: &[&str]) -> Scalar {
236        let this = self.eval_context_ref();
237        let val = this.eval_path(path);
238        this.read_scalar(&val)
239            .unwrap_or_else(|err| panic!("failed to read required Rust item: {path:?}\n{err:?}"))
240    }
241
242    /// Helper function to get a `libc` constant as a `Scalar`.
243    fn eval_libc(&self, name: &str) -> Scalar {
244        if self.eval_context_ref().tcx.sess.target.os == "windows" {
245            panic!(
246                "`libc` crate is not reliably available on Windows targets; Miri should not use it there"
247            );
248        }
249        self.eval_path_scalar(&["libc", name])
250    }
251
252    /// Helper function to get a `libc` constant as an `i32`.
253    fn eval_libc_i32(&self, name: &str) -> i32 {
254        // TODO: Cache the result.
255        self.eval_libc(name).to_i32().unwrap_or_else(|_err| {
256            panic!("required libc item has unexpected type (not `i32`): {name}")
257        })
258    }
259
260    /// Helper function to get a `libc` constant as an `u32`.
261    fn eval_libc_u32(&self, name: &str) -> u32 {
262        // TODO: Cache the result.
263        self.eval_libc(name).to_u32().unwrap_or_else(|_err| {
264            panic!("required libc item has unexpected type (not `u32`): {name}")
265        })
266    }
267
268    /// Helper function to get a `libc` constant as an `u64`.
269    fn eval_libc_u64(&self, name: &str) -> u64 {
270        // TODO: Cache the result.
271        self.eval_libc(name).to_u64().unwrap_or_else(|_err| {
272            panic!("required libc item has unexpected type (not `u64`): {name}")
273        })
274    }
275
276    /// Helper function to get a `windows` constant as a `Scalar`.
277    fn eval_windows(&self, module: &str, name: &str) -> Scalar {
278        self.eval_context_ref().eval_path_scalar(&["std", "sys", "pal", "windows", module, name])
279    }
280
281    /// Helper function to get a `windows` constant as a `u32`.
282    fn eval_windows_u32(&self, module: &str, name: &str) -> u32 {
283        // TODO: Cache the result.
284        self.eval_windows(module, name).to_u32().unwrap_or_else(|_err| {
285            panic!("required Windows item has unexpected type (not `u32`): {module}::{name}")
286        })
287    }
288
289    /// Helper function to get a `windows` constant as a `u64`.
290    fn eval_windows_u64(&self, module: &str, name: &str) -> u64 {
291        // TODO: Cache the result.
292        self.eval_windows(module, name).to_u64().unwrap_or_else(|_err| {
293            panic!("required Windows item has unexpected type (not `u64`): {module}::{name}")
294        })
295    }
296
297    /// Helper function to get the `TyAndLayout` of a `libc` type
298    fn libc_ty_layout(&self, name: &str) -> TyAndLayout<'tcx> {
299        let this = self.eval_context_ref();
300        if this.tcx.sess.target.os == "windows" {
301            panic!(
302                "`libc` crate is not reliably available on Windows targets; Miri should not use it there"
303            );
304        }
305        path_ty_layout(this, &["libc", name])
306    }
307
308    /// Helper function to get the `TyAndLayout` of a `windows` type
309    fn windows_ty_layout(&self, name: &str) -> TyAndLayout<'tcx> {
310        let this = self.eval_context_ref();
311        path_ty_layout(this, &["std", "sys", "pal", "windows", "c", name])
312    }
313
314    /// Helper function to get `TyAndLayout` of an array that consists of `libc` type.
315    fn libc_array_ty_layout(&self, name: &str, size: u64) -> TyAndLayout<'tcx> {
316        let this = self.eval_context_ref();
317        let elem_ty_layout = this.libc_ty_layout(name);
318        let array_ty = Ty::new_array(*this.tcx, elem_ty_layout.ty, size);
319        this.layout_of(array_ty).unwrap()
320    }
321
322    /// Project to the given *named* field (which must be a struct or union type).
323    fn try_project_field_named<P: Projectable<'tcx, Provenance>>(
324        &self,
325        base: &P,
326        name: &str,
327    ) -> InterpResult<'tcx, Option<P>> {
328        let this = self.eval_context_ref();
329        let adt = base.layout().ty.ty_adt_def().unwrap();
330        for (idx, field) in adt.non_enum_variant().fields.iter_enumerated() {
331            if field.name.as_str() == name {
332                return interp_ok(Some(this.project_field(base, idx)?));
333            }
334        }
335        interp_ok(None)
336    }
337
338    /// Project to the given *named* field (which must be a struct or union type).
339    fn project_field_named<P: Projectable<'tcx, Provenance>>(
340        &self,
341        base: &P,
342        name: &str,
343    ) -> InterpResult<'tcx, P> {
344        interp_ok(
345            self.try_project_field_named(base, name)?
346                .unwrap_or_else(|| bug!("no field named {} in type {}", name, base.layout().ty)),
347        )
348    }
349
350    /// Write an int of the appropriate size to `dest`. The target type may be signed or unsigned,
351    /// we try to do the right thing anyway. `i128` can fit all integer types except for `u128` so
352    /// this method is fine for almost all integer types.
353    fn write_int(
354        &mut self,
355        i: impl Into<i128>,
356        dest: &impl Writeable<'tcx, Provenance>,
357    ) -> InterpResult<'tcx> {
358        assert!(
359            dest.layout().backend_repr.is_scalar(),
360            "write_int on non-scalar type {}",
361            dest.layout().ty
362        );
363        let val = if dest.layout().backend_repr.is_signed() {
364            Scalar::from_int(i, dest.layout().size)
365        } else {
366            // `unwrap` can only fail here if `i` is negative
367            Scalar::from_uint(u128::try_from(i.into()).unwrap(), dest.layout().size)
368        };
369        self.eval_context_mut().write_scalar(val, dest)
370    }
371
372    /// Write the first N fields of the given place.
373    fn write_int_fields(
374        &mut self,
375        values: &[i128],
376        dest: &impl Writeable<'tcx, Provenance>,
377    ) -> InterpResult<'tcx> {
378        let this = self.eval_context_mut();
379        for (idx, &val) in values.iter().enumerate() {
380            let idx = FieldIdx::from_usize(idx);
381            let field = this.project_field(dest, idx)?;
382            this.write_int(val, &field)?;
383        }
384        interp_ok(())
385    }
386
387    /// Write the given fields of the given place.
388    fn write_int_fields_named(
389        &mut self,
390        values: &[(&str, i128)],
391        dest: &impl Writeable<'tcx, Provenance>,
392    ) -> InterpResult<'tcx> {
393        let this = self.eval_context_mut();
394        for &(name, val) in values.iter() {
395            let field = this.project_field_named(dest, name)?;
396            this.write_int(val, &field)?;
397        }
398        interp_ok(())
399    }
400
401    /// Write a 0 of the appropriate size to `dest`.
402    fn write_null(&mut self, dest: &impl Writeable<'tcx, Provenance>) -> InterpResult<'tcx> {
403        self.write_int(0, dest)
404    }
405
406    /// Test if this pointer equals 0.
407    fn ptr_is_null(&self, ptr: Pointer) -> InterpResult<'tcx, bool> {
408        interp_ok(ptr.addr().bytes() == 0)
409    }
410
411    /// Generate some random bytes, and write them to `dest`.
412    fn gen_random(&mut self, ptr: Pointer, len: u64) -> InterpResult<'tcx> {
413        // Some programs pass in a null pointer and a length of 0
414        // to their platform's random-generation function (e.g. getrandom())
415        // on Linux. For compatibility with these programs, we don't perform
416        // any additional checks - it's okay if the pointer is invalid,
417        // since we wouldn't actually be writing to it.
418        if len == 0 {
419            return interp_ok(());
420        }
421        let this = self.eval_context_mut();
422
423        let mut data = vec![0; usize::try_from(len).unwrap()];
424
425        if this.machine.communicate() {
426            // Fill the buffer using the host's rng.
427            getrandom::fill(&mut data)
428                .map_err(|err| err_unsup_format!("host getrandom failed: {}", err))?;
429        } else {
430            let rng = this.machine.rng.get_mut();
431            rng.fill_bytes(&mut data);
432        }
433
434        this.write_bytes_ptr(ptr, data.iter().copied())
435    }
436
437    /// Call a function: Push the stack frame and pass the arguments.
438    /// For now, arguments must be scalars (so that the caller does not have to know the layout).
439    ///
440    /// If you do not provide a return place, a dangling zero-sized place will be created
441    /// for your convenience. This is only valid if the return type is `()`.
442    fn call_function(
443        &mut self,
444        f: ty::Instance<'tcx>,
445        caller_abi: ExternAbi,
446        args: &[ImmTy<'tcx>],
447        dest: Option<&MPlaceTy<'tcx>>,
448        cont: ReturnContinuation,
449    ) -> InterpResult<'tcx> {
450        let this = self.eval_context_mut();
451
452        // Get MIR.
453        let mir = this.load_mir(f.def, None)?;
454        let dest = match dest {
455            Some(dest) => dest.clone(),
456            None => MPlaceTy::fake_alloc_zst(this.machine.layouts.unit),
457        };
458
459        // Construct a function pointer type representing the caller perspective.
460        let sig = this.tcx.mk_fn_sig(
461            args.iter().map(|a| a.layout.ty),
462            dest.layout.ty,
463            /*c_variadic*/ false,
464            Safety::Safe,
465            caller_abi,
466        );
467        let caller_fn_abi = this.fn_abi_of_fn_ptr(ty::Binder::dummy(sig), ty::List::empty())?;
468
469        // This will also show proper errors if there is any ABI mismatch.
470        this.init_stack_frame(
471            f,
472            mir,
473            caller_fn_abi,
474            &args.iter().map(|a| FnArg::Copy(a.clone().into())).collect::<Vec<_>>(),
475            /*with_caller_location*/ false,
476            &dest.into(),
477            cont,
478        )
479    }
480
481    /// Visits the memory covered by `place`, sensitive to freezing: the 2nd parameter
482    /// of `action` will be true if this is frozen, false if this is in an `UnsafeCell`.
483    /// The range is relative to `place`.
484    fn visit_freeze_sensitive(
485        &self,
486        place: &MPlaceTy<'tcx>,
487        size: Size,
488        mut action: impl FnMut(AllocRange, bool) -> InterpResult<'tcx>,
489    ) -> InterpResult<'tcx> {
490        let this = self.eval_context_ref();
491        trace!("visit_frozen(place={:?}, size={:?})", *place, size);
492        debug_assert_eq!(
493            size,
494            this.size_and_align_of_val(place)?
495                .map(|(size, _)| size)
496                .unwrap_or_else(|| place.layout.size)
497        );
498        // Store how far we proceeded into the place so far. Everything to the left of
499        // this offset has already been handled, in the sense that the frozen parts
500        // have had `action` called on them.
501        let start_addr = place.ptr().addr();
502        let mut cur_addr = start_addr;
503        // Called when we detected an `UnsafeCell` at the given offset and size.
504        // Calls `action` and advances `cur_ptr`.
505        let mut unsafe_cell_action = |unsafe_cell_ptr: &Pointer, unsafe_cell_size: Size| {
506            // We assume that we are given the fields in increasing offset order,
507            // and nothing else changes.
508            let unsafe_cell_addr = unsafe_cell_ptr.addr();
509            assert!(unsafe_cell_addr >= cur_addr);
510            let frozen_size = unsafe_cell_addr - cur_addr;
511            // Everything between the cur_ptr and this `UnsafeCell` is frozen.
512            if frozen_size != Size::ZERO {
513                action(alloc_range(cur_addr - start_addr, frozen_size), /*frozen*/ true)?;
514            }
515            cur_addr += frozen_size;
516            // This `UnsafeCell` is NOT frozen.
517            if unsafe_cell_size != Size::ZERO {
518                action(
519                    alloc_range(cur_addr - start_addr, unsafe_cell_size),
520                    /*frozen*/ false,
521                )?;
522            }
523            cur_addr += unsafe_cell_size;
524            // Done
525            interp_ok(())
526        };
527        // Run a visitor
528        {
529            let mut visitor = UnsafeCellVisitor {
530                ecx: this,
531                unsafe_cell_action: |place| {
532                    trace!("unsafe_cell_action on {:?}", place.ptr());
533                    // We need a size to go on.
534                    let unsafe_cell_size = this
535                        .size_and_align_of_val(place)?
536                        .map(|(size, _)| size)
537                        // for extern types, just cover what we can
538                        .unwrap_or_else(|| place.layout.size);
539                    // Now handle this `UnsafeCell`, unless it is empty.
540                    if unsafe_cell_size != Size::ZERO {
541                        unsafe_cell_action(&place.ptr(), unsafe_cell_size)
542                    } else {
543                        interp_ok(())
544                    }
545                },
546            };
547            visitor.visit_value(place)?;
548        }
549        // The part between the end_ptr and the end of the place is also frozen.
550        // So pretend there is a 0-sized `UnsafeCell` at the end.
551        unsafe_cell_action(&place.ptr().wrapping_offset(size, this), Size::ZERO)?;
552        // Done!
553        return interp_ok(());
554
555        /// Visiting the memory covered by a `MemPlace`, being aware of
556        /// whether we are inside an `UnsafeCell` or not.
557        struct UnsafeCellVisitor<'ecx, 'tcx, F>
558        where
559            F: FnMut(&MPlaceTy<'tcx>) -> InterpResult<'tcx>,
560        {
561            ecx: &'ecx MiriInterpCx<'tcx>,
562            unsafe_cell_action: F,
563        }
564
565        impl<'ecx, 'tcx, F> ValueVisitor<'tcx, MiriMachine<'tcx>> for UnsafeCellVisitor<'ecx, 'tcx, F>
566        where
567            F: FnMut(&MPlaceTy<'tcx>) -> InterpResult<'tcx>,
568        {
569            type V = MPlaceTy<'tcx>;
570
571            #[inline(always)]
572            fn ecx(&self) -> &MiriInterpCx<'tcx> {
573                self.ecx
574            }
575
576            fn aggregate_field_iter(
577                memory_index: &IndexVec<FieldIdx, u32>,
578            ) -> impl Iterator<Item = FieldIdx> + 'static {
579                let inverse_memory_index = memory_index.invert_bijective_mapping();
580                inverse_memory_index.into_iter()
581            }
582
583            // Hook to detect `UnsafeCell`.
584            fn visit_value(&mut self, v: &MPlaceTy<'tcx>) -> InterpResult<'tcx> {
585                trace!("UnsafeCellVisitor: {:?} {:?}", *v, v.layout.ty);
586                let is_unsafe_cell = match v.layout.ty.kind() {
587                    ty::Adt(adt, _) =>
588                        Some(adt.did()) == self.ecx.tcx.lang_items().unsafe_cell_type(),
589                    _ => false,
590                };
591                if is_unsafe_cell {
592                    // We do not have to recurse further, this is an `UnsafeCell`.
593                    (self.unsafe_cell_action)(v)
594                } else if self.ecx.type_is_freeze(v.layout.ty) {
595                    // This is `Freeze`, there cannot be an `UnsafeCell`
596                    interp_ok(())
597                } else if matches!(v.layout.fields, FieldsShape::Union(..)) {
598                    // A (non-frozen) union. We fall back to whatever the type says.
599                    (self.unsafe_cell_action)(v)
600                } else {
601                    // We want to not actually read from memory for this visit. So, before
602                    // walking this value, we have to make sure it is not a
603                    // `Variants::Multiple`.
604                    // FIXME: the current logic here is layout-dependent, so enums with
605                    // multiple variants where all but 1 are uninhabited will be recursed into.
606                    // Is that truly what we want?
607                    match v.layout.variants {
608                        Variants::Multiple { .. } => {
609                            // A multi-variant enum, or coroutine, or so.
610                            // Treat this like a union: without reading from memory,
611                            // we cannot determine the variant we are in. Reading from
612                            // memory would be subject to Stacked Borrows rules, leading
613                            // to all sorts of "funny" recursion.
614                            // We only end up here if the type is *not* freeze, so we just call the
615                            // `UnsafeCell` action.
616                            (self.unsafe_cell_action)(v)
617                        }
618                        Variants::Single { .. } | Variants::Empty => {
619                            // Proceed further, try to find where exactly that `UnsafeCell`
620                            // is hiding.
621                            self.walk_value(v)
622                        }
623                    }
624                }
625            }
626
627            fn visit_union(
628                &mut self,
629                _v: &MPlaceTy<'tcx>,
630                _fields: NonZero<usize>,
631            ) -> InterpResult<'tcx> {
632                bug!("we should have already handled unions in `visit_value`")
633            }
634        }
635    }
636
637    /// Helper function used inside the shims of foreign functions to check that isolation is
638    /// disabled. It returns an error using the `name` of the foreign function if this is not the
639    /// case.
640    fn check_no_isolation(&self, name: &str) -> InterpResult<'tcx> {
641        if !self.eval_context_ref().machine.communicate() {
642            self.reject_in_isolation(name, RejectOpWith::Abort)?;
643        }
644        interp_ok(())
645    }
646
647    /// Helper function used inside the shims of foreign functions which reject the op
648    /// when isolation is enabled. It is used to print a warning/backtrace about the rejection.
649    fn reject_in_isolation(&self, op_name: &str, reject_with: RejectOpWith) -> InterpResult<'tcx> {
650        let this = self.eval_context_ref();
651        match reject_with {
652            RejectOpWith::Abort => isolation_abort_error(op_name),
653            RejectOpWith::WarningWithoutBacktrace => {
654                let mut emitted_warnings = this.machine.reject_in_isolation_warned.borrow_mut();
655                if !emitted_warnings.contains(op_name) {
656                    // First time we are seeing this.
657                    emitted_warnings.insert(op_name.to_owned());
658                    this.tcx
659                        .dcx()
660                        .warn(format!("{op_name} was made to return an error due to isolation"));
661                }
662
663                interp_ok(())
664            }
665            RejectOpWith::Warning => {
666                this.emit_diagnostic(NonHaltingDiagnostic::RejectedIsolatedOp(op_name.to_string()));
667                interp_ok(())
668            }
669            RejectOpWith::NoWarning => interp_ok(()), // no warning
670        }
671    }
672
673    /// Helper function used inside the shims of foreign functions to assert that the target OS
674    /// is `target_os`. It panics showing a message with the `name` of the foreign function
675    /// if this is not the case.
676    fn assert_target_os(&self, target_os: &str, name: &str) {
677        assert_eq!(
678            self.eval_context_ref().tcx.sess.target.os,
679            target_os,
680            "`{name}` is only available on the `{target_os}` target OS",
681        )
682    }
683
684    /// Helper function used inside shims of foreign functions to check that the target OS
685    /// is one of `target_oses`. It returns an error containing the `name` of the foreign function
686    /// in a message if this is not the case.
687    fn check_target_os(&self, target_oses: &[&str], name: Symbol) -> InterpResult<'tcx> {
688        let target_os = self.eval_context_ref().tcx.sess.target.os.as_ref();
689        if !target_oses.contains(&target_os) {
690            throw_unsup_format!("`{name}` is not supported on {target_os}");
691        }
692        interp_ok(())
693    }
694
695    /// Helper function used inside the shims of foreign functions to assert that the target OS
696    /// is part of the UNIX family. It panics showing a message with the `name` of the foreign function
697    /// if this is not the case.
698    fn assert_target_os_is_unix(&self, name: &str) {
699        assert!(self.target_os_is_unix(), "`{name}` is only available for unix targets",);
700    }
701
702    fn target_os_is_unix(&self) -> bool {
703        self.eval_context_ref().tcx.sess.target.families.iter().any(|f| f == "unix")
704    }
705
706    /// Dereference a pointer operand to a place using `layout` instead of the pointer's declared type
707    fn deref_pointer_as(
708        &self,
709        op: &impl Projectable<'tcx, Provenance>,
710        layout: TyAndLayout<'tcx>,
711    ) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
712        let this = self.eval_context_ref();
713        let ptr = this.read_pointer(op)?;
714        interp_ok(this.ptr_to_mplace(ptr, layout))
715    }
716
717    /// Calculates the MPlaceTy given the offset and layout of an access on an operand
718    fn deref_pointer_and_offset(
719        &self,
720        op: &impl Projectable<'tcx, Provenance>,
721        offset: u64,
722        base_layout: TyAndLayout<'tcx>,
723        value_layout: TyAndLayout<'tcx>,
724    ) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
725        let this = self.eval_context_ref();
726        let op_place = this.deref_pointer_as(op, base_layout)?;
727        let offset = Size::from_bytes(offset);
728
729        // Ensure that the access is within bounds.
730        assert!(base_layout.size >= offset + value_layout.size);
731        let value_place = op_place.offset(offset, value_layout, this)?;
732        interp_ok(value_place)
733    }
734
735    fn deref_pointer_and_read(
736        &self,
737        op: &impl Projectable<'tcx, Provenance>,
738        offset: u64,
739        base_layout: TyAndLayout<'tcx>,
740        value_layout: TyAndLayout<'tcx>,
741    ) -> InterpResult<'tcx, Scalar> {
742        let this = self.eval_context_ref();
743        let value_place = this.deref_pointer_and_offset(op, offset, base_layout, value_layout)?;
744        this.read_scalar(&value_place)
745    }
746
747    fn deref_pointer_and_write(
748        &mut self,
749        op: &impl Projectable<'tcx, Provenance>,
750        offset: u64,
751        value: impl Into<Scalar>,
752        base_layout: TyAndLayout<'tcx>,
753        value_layout: TyAndLayout<'tcx>,
754    ) -> InterpResult<'tcx, ()> {
755        let this = self.eval_context_mut();
756        let value_place = this.deref_pointer_and_offset(op, offset, base_layout, value_layout)?;
757        this.write_scalar(value, &value_place)
758    }
759
760    /// Parse a `timespec` struct and return it as a `std::time::Duration`. It returns `None`
761    /// if the value in the `timespec` struct is invalid. Some libc functions will return
762    /// `EINVAL` in this case.
763    fn read_timespec(&mut self, tp: &MPlaceTy<'tcx>) -> InterpResult<'tcx, Option<Duration>> {
764        let this = self.eval_context_mut();
765        let seconds_place = this.project_field(tp, FieldIdx::ZERO)?;
766        let seconds_scalar = this.read_scalar(&seconds_place)?;
767        let seconds = seconds_scalar.to_target_isize(this)?;
768        let nanoseconds_place = this.project_field(tp, FieldIdx::ONE)?;
769        let nanoseconds_scalar = this.read_scalar(&nanoseconds_place)?;
770        let nanoseconds = nanoseconds_scalar.to_target_isize(this)?;
771
772        interp_ok(
773            try {
774                // tv_sec must be non-negative.
775                let seconds: u64 = seconds.try_into().ok()?;
776                // tv_nsec must be non-negative.
777                let nanoseconds: u32 = nanoseconds.try_into().ok()?;
778                if nanoseconds >= 1_000_000_000 {
779                    // tv_nsec must not be greater than 999,999,999.
780                    None?
781                }
782                Duration::new(seconds, nanoseconds)
783            },
784        )
785    }
786
787    /// Read bytes from a byte slice.
788    fn read_byte_slice<'a>(&'a self, slice: &ImmTy<'tcx>) -> InterpResult<'tcx, &'a [u8]>
789    where
790        'tcx: 'a,
791    {
792        let this = self.eval_context_ref();
793        let (ptr, len) = slice.to_scalar_pair();
794        let ptr = ptr.to_pointer(this)?;
795        let len = len.to_target_usize(this)?;
796        let bytes = this.read_bytes_ptr_strip_provenance(ptr, Size::from_bytes(len))?;
797        interp_ok(bytes)
798    }
799
800    /// Read a sequence of bytes until the first null terminator.
801    fn read_c_str<'a>(&'a self, ptr: Pointer) -> InterpResult<'tcx, &'a [u8]>
802    where
803        'tcx: 'a,
804    {
805        let this = self.eval_context_ref();
806        let size1 = Size::from_bytes(1);
807
808        // Step 1: determine the length.
809        let mut len = Size::ZERO;
810        loop {
811            // FIXME: We are re-getting the allocation each time around the loop.
812            // Would be nice if we could somehow "extend" an existing AllocRange.
813            let alloc = this.get_ptr_alloc(ptr.wrapping_offset(len, this), size1)?.unwrap(); // not a ZST, so we will get a result
814            let byte = alloc.read_integer(alloc_range(Size::ZERO, size1))?.to_u8()?;
815            if byte == 0 {
816                break;
817            } else {
818                len += size1;
819            }
820        }
821
822        // Step 2: get the bytes.
823        this.read_bytes_ptr_strip_provenance(ptr, len)
824    }
825
826    /// Helper function to write a sequence of bytes with an added null-terminator, which is what
827    /// the Unix APIs usually handle. This function returns `Ok((false, length))` without trying
828    /// to write if `size` is not large enough to fit the contents of `c_str` plus a null
829    /// terminator. It returns `Ok((true, length))` if the writing process was successful. The
830    /// string length returned does include the null terminator.
831    fn write_c_str(
832        &mut self,
833        c_str: &[u8],
834        ptr: Pointer,
835        size: u64,
836    ) -> InterpResult<'tcx, (bool, u64)> {
837        // If `size` is smaller or equal than `bytes.len()`, writing `bytes` plus the required null
838        // terminator to memory using the `ptr` pointer would cause an out-of-bounds access.
839        let string_length = u64::try_from(c_str.len()).unwrap();
840        let string_length = string_length.strict_add(1);
841        if size < string_length {
842            return interp_ok((false, string_length));
843        }
844        self.eval_context_mut()
845            .write_bytes_ptr(ptr, c_str.iter().copied().chain(iter::once(0u8)))?;
846        interp_ok((true, string_length))
847    }
848
849    /// Helper function to read a sequence of unsigned integers of the given size and alignment
850    /// until the first null terminator.
851    fn read_c_str_with_char_size<T>(
852        &self,
853        mut ptr: Pointer,
854        size: Size,
855        align: Align,
856    ) -> InterpResult<'tcx, Vec<T>>
857    where
858        T: TryFrom<u128>,
859        <T as TryFrom<u128>>::Error: std::fmt::Debug,
860    {
861        assert_ne!(size, Size::ZERO);
862
863        let this = self.eval_context_ref();
864
865        this.check_ptr_align(ptr, align)?;
866
867        let mut wchars = Vec::new();
868        loop {
869            // FIXME: We are re-getting the allocation each time around the loop.
870            // Would be nice if we could somehow "extend" an existing AllocRange.
871            let alloc = this.get_ptr_alloc(ptr, size)?.unwrap(); // not a ZST, so we will get a result
872            let wchar_int = alloc.read_integer(alloc_range(Size::ZERO, size))?.to_bits(size)?;
873            if wchar_int == 0 {
874                break;
875            } else {
876                wchars.push(wchar_int.try_into().unwrap());
877                ptr = ptr.wrapping_offset(size, this);
878            }
879        }
880
881        interp_ok(wchars)
882    }
883
884    /// Read a sequence of u16 until the first null terminator.
885    fn read_wide_str(&self, ptr: Pointer) -> InterpResult<'tcx, Vec<u16>> {
886        self.read_c_str_with_char_size(ptr, Size::from_bytes(2), Align::from_bytes(2).unwrap())
887    }
888
889    /// Helper function to write a sequence of u16 with an added 0x0000-terminator, which is what
890    /// the Windows APIs usually handle. This function returns `Ok((false, length))` without trying
891    /// to write if `size` is not large enough to fit the contents of `os_string` plus a null
892    /// terminator. It returns `Ok((true, length))` if the writing process was successful. The
893    /// string length returned does include the null terminator. Length is measured in units of
894    /// `u16.`
895    fn write_wide_str(
896        &mut self,
897        wide_str: &[u16],
898        ptr: Pointer,
899        size: u64,
900    ) -> InterpResult<'tcx, (bool, u64)> {
901        // If `size` is smaller or equal than `bytes.len()`, writing `bytes` plus the required
902        // 0x0000 terminator to memory would cause an out-of-bounds access.
903        let string_length = u64::try_from(wide_str.len()).unwrap();
904        let string_length = string_length.strict_add(1);
905        if size < string_length {
906            return interp_ok((false, string_length));
907        }
908
909        // Store the UTF-16 string.
910        let size2 = Size::from_bytes(2);
911        let this = self.eval_context_mut();
912        this.check_ptr_align(ptr, Align::from_bytes(2).unwrap())?;
913        let mut alloc = this.get_ptr_alloc_mut(ptr, size2 * string_length)?.unwrap(); // not a ZST, so we will get a result
914        for (offset, wchar) in wide_str.iter().copied().chain(iter::once(0x0000)).enumerate() {
915            let offset = u64::try_from(offset).unwrap();
916            alloc.write_scalar(alloc_range(size2 * offset, size2), Scalar::from_u16(wchar))?;
917        }
918        interp_ok((true, string_length))
919    }
920
921    /// Read a sequence of wchar_t until the first null terminator.
922    /// Always returns a `Vec<u32>` no matter the size of `wchar_t`.
923    fn read_wchar_t_str(&self, ptr: Pointer) -> InterpResult<'tcx, Vec<u32>> {
924        let this = self.eval_context_ref();
925        let wchar_t = if this.tcx.sess.target.os == "windows" {
926            // We don't have libc on Windows so we have to hard-code the type ourselves.
927            this.machine.layouts.u16
928        } else {
929            this.libc_ty_layout("wchar_t")
930        };
931        self.read_c_str_with_char_size(ptr, wchar_t.size, wchar_t.align.abi)
932    }
933
934    fn frame_in_std(&self) -> bool {
935        let this = self.eval_context_ref();
936        let frame = this.frame();
937        // Make an attempt to get at the instance of the function this is inlined from.
938        let instance: Option<_> = try {
939            let scope = frame.current_source_info()?.scope;
940            let inlined_parent = frame.body().source_scopes[scope].inlined_parent_scope?;
941            let source = &frame.body().source_scopes[inlined_parent];
942            source.inlined.expect("inlined_parent_scope points to scope without inline info").0
943        };
944        // Fall back to the instance of the function itself.
945        let instance = instance.unwrap_or(frame.instance());
946        // Now check the crate it is in. We could try to be clever here and e.g. check if this is
947        // the same crate as `start_fn`, but that would not work for running std tests in Miri, so
948        // we'd need some more hacks anyway. So we just check the name of the crate. If someone
949        // calls their crate `std` then we'll just let them keep the pieces.
950        let frame_crate = this.tcx.def_path(instance.def_id()).krate;
951        let crate_name = this.tcx.crate_name(frame_crate);
952        let crate_name = crate_name.as_str();
953        // On miri-test-libstd, the name of the crate is different.
954        crate_name == "std" || crate_name == "std_miri_test"
955    }
956
957    /// Mark a machine allocation that was just created as immutable.
958    fn mark_immutable(&mut self, mplace: &MPlaceTy<'tcx>) {
959        let this = self.eval_context_mut();
960        // This got just allocated, so there definitely is a pointer here.
961        let provenance = mplace.ptr().into_pointer_or_addr().unwrap().provenance;
962        this.alloc_mark_immutable(provenance.get_alloc_id().unwrap()).unwrap();
963    }
964
965    /// Converts `src` from floating point to integer type `dest_ty`
966    /// after rounding with mode `round`.
967    /// Returns `None` if `f` is NaN or out of range.
968    fn float_to_int_checked(
969        &self,
970        src: &ImmTy<'tcx>,
971        cast_to: TyAndLayout<'tcx>,
972        round: rustc_apfloat::Round,
973    ) -> InterpResult<'tcx, Option<ImmTy<'tcx>>> {
974        let this = self.eval_context_ref();
975
976        fn float_to_int_inner<'tcx, F: rustc_apfloat::Float>(
977            ecx: &MiriInterpCx<'tcx>,
978            src: F,
979            cast_to: TyAndLayout<'tcx>,
980            round: rustc_apfloat::Round,
981        ) -> (Scalar, rustc_apfloat::Status) {
982            let int_size = cast_to.layout.size;
983            match cast_to.ty.kind() {
984                // Unsigned
985                ty::Uint(_) => {
986                    let res = src.to_u128_r(int_size.bits_usize(), round, &mut false);
987                    (Scalar::from_uint(res.value, int_size), res.status)
988                }
989                // Signed
990                ty::Int(_) => {
991                    let res = src.to_i128_r(int_size.bits_usize(), round, &mut false);
992                    (Scalar::from_int(res.value, int_size), res.status)
993                }
994                // Nothing else
995                _ =>
996                    span_bug!(
997                        ecx.cur_span(),
998                        "attempted float-to-int conversion with non-int output type {}",
999                        cast_to.ty,
1000                    ),
1001            }
1002        }
1003
1004        let ty::Float(fty) = src.layout.ty.kind() else {
1005            bug!("float_to_int_checked: non-float input type {}", src.layout.ty)
1006        };
1007
1008        let (val, status) = match fty {
1009            FloatTy::F16 =>
1010                float_to_int_inner::<Half>(this, src.to_scalar().to_f16()?, cast_to, round),
1011            FloatTy::F32 =>
1012                float_to_int_inner::<Single>(this, src.to_scalar().to_f32()?, cast_to, round),
1013            FloatTy::F64 =>
1014                float_to_int_inner::<Double>(this, src.to_scalar().to_f64()?, cast_to, round),
1015            FloatTy::F128 =>
1016                float_to_int_inner::<Quad>(this, src.to_scalar().to_f128()?, cast_to, round),
1017        };
1018
1019        if status.intersects(
1020            rustc_apfloat::Status::INVALID_OP
1021                | rustc_apfloat::Status::OVERFLOW
1022                | rustc_apfloat::Status::UNDERFLOW,
1023        ) {
1024            // Floating point value is NaN (flagged with INVALID_OP) or outside the range
1025            // of values of the integer type (flagged with OVERFLOW or UNDERFLOW).
1026            interp_ok(None)
1027        } else {
1028            // Floating point value can be represented by the integer type after rounding.
1029            // The INEXACT flag is ignored on purpose to allow rounding.
1030            interp_ok(Some(ImmTy::from_scalar(val, cast_to)))
1031        }
1032    }
1033
1034    /// Returns an integer type that is twice wide as `ty`
1035    fn get_twice_wide_int_ty(&self, ty: Ty<'tcx>) -> Ty<'tcx> {
1036        let this = self.eval_context_ref();
1037        match ty.kind() {
1038            // Unsigned
1039            ty::Uint(UintTy::U8) => this.tcx.types.u16,
1040            ty::Uint(UintTy::U16) => this.tcx.types.u32,
1041            ty::Uint(UintTy::U32) => this.tcx.types.u64,
1042            ty::Uint(UintTy::U64) => this.tcx.types.u128,
1043            // Signed
1044            ty::Int(IntTy::I8) => this.tcx.types.i16,
1045            ty::Int(IntTy::I16) => this.tcx.types.i32,
1046            ty::Int(IntTy::I32) => this.tcx.types.i64,
1047            ty::Int(IntTy::I64) => this.tcx.types.i128,
1048            _ => span_bug!(this.cur_span(), "unexpected type: {ty:?}"),
1049        }
1050    }
1051
1052    /// Checks that target feature `target_feature` is enabled.
1053    ///
1054    /// If not enabled, emits an UB error that states that the feature is
1055    /// required by `intrinsic`.
1056    fn expect_target_feature_for_intrinsic(
1057        &self,
1058        intrinsic: Symbol,
1059        target_feature: &str,
1060    ) -> InterpResult<'tcx, ()> {
1061        let this = self.eval_context_ref();
1062        if !this.tcx.sess.unstable_target_features.contains(&Symbol::intern(target_feature)) {
1063            throw_ub_format!(
1064                "attempted to call intrinsic `{intrinsic}` that requires missing target feature {target_feature}"
1065            );
1066        }
1067        interp_ok(())
1068    }
1069
1070    /// Lookup an array of immediates from any linker sections matching the provided predicate.
1071    fn lookup_link_section(
1072        &mut self,
1073        include_name: impl Fn(&str) -> bool,
1074    ) -> InterpResult<'tcx, Vec<ImmTy<'tcx>>> {
1075        let this = self.eval_context_mut();
1076        let tcx = this.tcx.tcx;
1077
1078        let mut array = vec![];
1079
1080        iter_exported_symbols(tcx, |_cnum, def_id| {
1081            let attrs = tcx.codegen_fn_attrs(def_id);
1082            let Some(link_section) = attrs.link_section else {
1083                return interp_ok(());
1084            };
1085            if include_name(link_section.as_str()) {
1086                let instance = ty::Instance::mono(tcx, def_id);
1087                let const_val = this.eval_global(instance).unwrap_or_else(|err| {
1088                    panic!(
1089                        "failed to evaluate static in required link_section: {def_id:?}\n{err:?}"
1090                    )
1091                });
1092                match const_val.layout.ty.kind() {
1093                    ty::FnPtr(..) => {
1094                        array.push(this.read_immediate(&const_val)?);
1095                    }
1096                    ty::Array(elem_ty, _) if matches!(elem_ty.kind(), ty::FnPtr(..)) => {
1097                        let mut elems = this.project_array_fields(&const_val)?;
1098                        while let Some((_idx, elem)) = elems.next(this)? {
1099                            array.push(this.read_immediate(&elem)?);
1100                        }
1101                    }
1102                    _ =>
1103                        throw_unsup_format!(
1104                            "only function pointers and arrays of function pointers are supported in well-known linker sections"
1105                        ),
1106                }
1107            }
1108            interp_ok(())
1109        })?;
1110
1111        interp_ok(array)
1112    }
1113
1114    fn mangle_internal_symbol<'a>(&'a mut self, name: &'static str) -> &'a str
1115    where
1116        'tcx: 'a,
1117    {
1118        let this = self.eval_context_mut();
1119        let tcx = *this.tcx;
1120        this.machine
1121            .mangle_internal_symbol_cache
1122            .entry(name)
1123            .or_insert_with(|| mangle_internal_symbol(tcx, name))
1124    }
1125}
1126
1127impl<'tcx> MiriMachine<'tcx> {
1128    /// Get the current span in the topmost function which is workspace-local and not
1129    /// `#[track_caller]`.
1130    /// This function is backed by a cache, and can be assumed to be very fast.
1131    /// It will work even when the stack is empty.
1132    pub fn current_span(&self) -> Span {
1133        self.threads.active_thread_ref().current_span()
1134    }
1135
1136    /// Returns the span of the *caller* of the current operation, again
1137    /// walking down the stack to find the closest frame in a local crate, if the caller of the
1138    /// current operation is not in a local crate.
1139    /// This is useful when we are processing something which occurs on function-entry and we want
1140    /// to point at the call to the function, not the function definition generally.
1141    pub fn caller_span(&self) -> Span {
1142        // We need to go down at least to the caller (len - 2), or however
1143        // far we have to go to find a frame in a local crate which is also not #[track_caller].
1144        let frame_idx = self.top_user_relevant_frame().unwrap();
1145        let frame_idx = cmp::min(frame_idx, self.stack().len().saturating_sub(2));
1146        self.stack()[frame_idx].current_span()
1147    }
1148
1149    fn stack(&self) -> &[Frame<'tcx, Provenance, machine::FrameExtra<'tcx>>] {
1150        self.threads.active_thread_stack()
1151    }
1152
1153    fn top_user_relevant_frame(&self) -> Option<usize> {
1154        self.threads.active_thread_ref().top_user_relevant_frame()
1155    }
1156
1157    /// This is the source of truth for the `is_user_relevant` flag in our `FrameExtra`.
1158    pub fn is_user_relevant(&self, frame: &Frame<'tcx, Provenance>) -> bool {
1159        let def_id = frame.instance().def_id();
1160        (def_id.is_local() || self.local_crates.contains(&def_id.krate))
1161            && !frame.instance().def.requires_caller_location(self.tcx)
1162    }
1163}
1164
1165pub fn isolation_abort_error<'tcx>(name: &str) -> InterpResult<'tcx> {
1166    throw_machine_stop!(TerminationInfo::UnsupportedInIsolation(format!(
1167        "{name} not available when isolation is enabled",
1168    )))
1169}
1170
1171/// Retrieve the list of local crates that should have been passed by cargo-miri in
1172/// MIRI_LOCAL_CRATES and turn them into `CrateNum`s.
1173pub fn get_local_crates(tcx: TyCtxt<'_>) -> Vec<CrateNum> {
1174    // Convert the local crate names from the passed-in config into CrateNums so that they can
1175    // be looked up quickly during execution
1176    let local_crate_names = std::env::var("MIRI_LOCAL_CRATES")
1177        .map(|crates| crates.split(',').map(|krate| krate.to_string()).collect::<Vec<_>>())
1178        .unwrap_or_default();
1179    let mut local_crates = Vec::new();
1180    for &crate_num in tcx.crates(()) {
1181        let name = tcx.crate_name(crate_num);
1182        let name = name.as_str();
1183        if local_crate_names.iter().any(|local_name| local_name == name) {
1184            local_crates.push(crate_num);
1185        }
1186    }
1187    local_crates
1188}
1189
1190pub(crate) fn bool_to_simd_element(b: bool, size: Size) -> Scalar {
1191    // SIMD uses all-1 as pattern for "true". In two's complement,
1192    // -1 has all its bits set to one and `from_int` will truncate or
1193    // sign-extend it to `size` as required.
1194    let val = if b { -1 } else { 0 };
1195    Scalar::from_int(val, size)
1196}
1197
1198pub(crate) fn simd_element_to_bool(elem: ImmTy<'_>) -> InterpResult<'_, bool> {
1199    assert!(
1200        matches!(elem.layout.ty.kind(), ty::Int(_) | ty::Uint(_)),
1201        "SIMD mask element type must be an integer, but this is `{}`",
1202        elem.layout.ty
1203    );
1204    let val = elem.to_scalar().to_int(elem.layout.size)?;
1205    interp_ok(match val {
1206        0 => false,
1207        -1 => true,
1208        _ => throw_ub_format!("each element of a SIMD mask must be all-0-bits or all-1-bits"),
1209    })
1210}
1211
1212/// Check whether an operation that writes to a target buffer was successful.
1213/// Accordingly select return value.
1214/// Local helper function to be used in Windows shims.
1215pub(crate) fn windows_check_buffer_size((success, len): (bool, u64)) -> u32 {
1216    if success {
1217        // If the function succeeds, the return value is the number of characters stored in the target buffer,
1218        // not including the terminating null character.
1219        u32::try_from(len.strict_sub(1)).unwrap()
1220    } else {
1221        // If the target buffer was not large enough to hold the data, the return value is the buffer size, in characters,
1222        // required to hold the string and its terminating null character.
1223        u32::try_from(len).unwrap()
1224    }
1225}
1226
1227/// We don't support 16-bit systems, so let's have ergonomic conversion from `u32` to `usize`.
1228pub trait ToUsize {
1229    fn to_usize(self) -> usize;
1230}
1231
1232impl ToUsize for u32 {
1233    fn to_usize(self) -> usize {
1234        self.try_into().unwrap()
1235    }
1236}
1237
1238/// Similarly, a maximum address size of `u64` is assumed widely here, so let's have ergonomic
1239/// converion from `usize` to `u64`.
1240pub trait ToU64 {
1241    fn to_u64(self) -> u64;
1242}
1243
1244impl ToU64 for usize {
1245    fn to_u64(self) -> u64 {
1246        self.try_into().unwrap()
1247    }
1248}
1249
1250/// Enters a [tracing::info_span] only if the "tracing" feature is enabled, otherwise does nothing.
1251/// This calls [rustc_const_eval::enter_trace_span] with [MiriMachine] as the first argument, which
1252/// will in turn call [MiriMachine::enter_trace_span], which takes care of determining at compile
1253/// time whether to trace or not (and supposedly the call is compiled out if tracing is disabled).
1254/// Look at [rustc_const_eval::enter_trace_span] for complete documentation, examples and tips.
1255#[macro_export]
1256macro_rules! enter_trace_span {
1257    ($($tt:tt)*) => {
1258        rustc_const_eval::enter_trace_span!($crate::MiriMachine<'static>, $($tt)*)
1259    };
1260}