1use std::assert_matches::assert_matches;
10use std::borrow::{Borrow, Cow};
11use std::cell::Cell;
12use std::collections::VecDeque;
13use std::{fmt, ptr};
14
15use rustc_abi::{Align, HasDataLayout, Size};
16use rustc_ast::Mutability;
17use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
18use rustc_middle::bug;
19use rustc_middle::mir::display_allocation;
20use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
21use tracing::{debug, instrument, trace};
22
23use super::{
24 AllocBytes, AllocId, AllocInit, AllocMap, AllocRange, Allocation, CheckAlignMsg,
25 CheckInAllocMsg, CtfeProvenance, GlobalAlloc, InterpCx, InterpResult, Machine, MayLeak,
26 Misalignment, Pointer, PointerArithmetic, Provenance, Scalar, alloc_range, err_ub,
27 err_ub_custom, interp_ok, throw_ub, throw_ub_custom, throw_unsup, throw_unsup_format,
28};
29use crate::fluent_generated as fluent;
30
31#[derive(Debug, PartialEq, Copy, Clone)]
32pub enum MemoryKind<T> {
33 Stack,
35 CallerLocation,
37 Machine(T),
39}
40
41impl<T: MayLeak> MayLeak for MemoryKind<T> {
42 #[inline]
43 fn may_leak(self) -> bool {
44 match self {
45 MemoryKind::Stack => false,
46 MemoryKind::CallerLocation => true,
47 MemoryKind::Machine(k) => k.may_leak(),
48 }
49 }
50}
51
52impl<T: fmt::Display> fmt::Display for MemoryKind<T> {
53 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
54 match self {
55 MemoryKind::Stack => write!(f, "stack variable"),
56 MemoryKind::CallerLocation => write!(f, "caller location"),
57 MemoryKind::Machine(m) => write!(f, "{m}"),
58 }
59 }
60}
61
62#[derive(Copy, Clone, PartialEq, Debug)]
64pub enum AllocKind {
65 LiveData,
67 Function,
69 VTable,
71 Dead,
73}
74
75#[derive(Copy, Clone, PartialEq, Debug)]
77pub struct AllocInfo {
78 pub size: Size,
79 pub align: Align,
80 pub kind: AllocKind,
81 pub mutbl: Mutability,
82}
83
84impl AllocInfo {
85 fn new(size: Size, align: Align, kind: AllocKind, mutbl: Mutability) -> Self {
86 Self { size, align, kind, mutbl }
87 }
88}
89
90#[derive(Debug, Copy, Clone)]
92pub enum FnVal<'tcx, Other> {
93 Instance(Instance<'tcx>),
94 Other(Other),
95}
96
97impl<'tcx, Other> FnVal<'tcx, Other> {
98 pub fn as_instance(self) -> InterpResult<'tcx, Instance<'tcx>> {
99 match self {
100 FnVal::Instance(instance) => interp_ok(instance),
101 FnVal::Other(_) => {
102 throw_unsup_format!("'foreign' function pointers are not supported in this context")
103 }
104 }
105 }
106}
107
108pub struct Memory<'tcx, M: Machine<'tcx>> {
111 pub(super) alloc_map: M::MemoryMap,
122
123 extra_fn_ptr_map: FxIndexMap<AllocId, M::ExtraFnVal>,
125
126 pub(super) dead_alloc_map: FxIndexMap<AllocId, (Size, Align)>,
131
132 validation_in_progress: Cell<bool>,
136}
137
138#[derive(Copy, Clone)]
141pub struct AllocRef<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes = Box<[u8]>> {
142 alloc: &'a Allocation<Prov, Extra, Bytes>,
143 range: AllocRange,
144 tcx: TyCtxt<'tcx>,
145 alloc_id: AllocId,
146}
147pub struct AllocRefMut<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes = Box<[u8]>> {
150 alloc: &'a mut Allocation<Prov, Extra, Bytes>,
151 range: AllocRange,
152 tcx: TyCtxt<'tcx>,
153 alloc_id: AllocId,
154}
155
156impl<'tcx, M: Machine<'tcx>> Memory<'tcx, M> {
157 pub fn new() -> Self {
158 Memory {
159 alloc_map: M::MemoryMap::default(),
160 extra_fn_ptr_map: FxIndexMap::default(),
161 dead_alloc_map: FxIndexMap::default(),
162 validation_in_progress: Cell::new(false),
163 }
164 }
165
166 pub fn alloc_map(&self) -> &M::MemoryMap {
168 &self.alloc_map
169 }
170}
171
172impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
173 #[inline]
183 pub fn global_root_pointer(
184 &self,
185 ptr: Pointer<CtfeProvenance>,
186 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
187 let alloc_id = ptr.provenance.alloc_id();
188 match self.tcx.try_get_global_alloc(alloc_id) {
190 Some(GlobalAlloc::Static(def_id)) if self.tcx.is_thread_local_static(def_id) => {
191 bug!("global memory cannot point to thread-local static")
194 }
195 Some(GlobalAlloc::Static(def_id)) if self.tcx.is_foreign_item(def_id) => {
196 return M::extern_static_pointer(self, def_id);
197 }
198 None => {
199 assert!(
200 self.memory.extra_fn_ptr_map.contains_key(&alloc_id),
201 "{alloc_id:?} is neither global nor a function pointer"
202 );
203 }
204 _ => {}
205 }
206 M::adjust_alloc_root_pointer(self, ptr, M::GLOBAL_KIND.map(MemoryKind::Machine))
208 }
209
210 pub fn fn_ptr(&mut self, fn_val: FnVal<'tcx, M::ExtraFnVal>) -> Pointer<M::Provenance> {
211 let id = match fn_val {
212 FnVal::Instance(instance) => {
213 let salt = M::get_global_alloc_salt(self, Some(instance));
214 self.tcx.reserve_and_set_fn_alloc(instance, salt)
215 }
216 FnVal::Other(extra) => {
217 let id = self.tcx.reserve_alloc_id();
219 let old = self.memory.extra_fn_ptr_map.insert(id, extra);
220 assert!(old.is_none());
221 id
222 }
223 };
224 self.global_root_pointer(Pointer::from(id)).unwrap()
227 }
228
229 pub fn allocate_ptr(
230 &mut self,
231 size: Size,
232 align: Align,
233 kind: MemoryKind<M::MemoryKind>,
234 init: AllocInit,
235 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
236 let params = self.machine.get_default_alloc_params();
237 let alloc = if M::PANIC_ON_ALLOC_FAIL {
238 Allocation::new(size, align, init, params)
239 } else {
240 Allocation::try_new(size, align, init, params)?
241 };
242 self.insert_allocation(alloc, kind)
243 }
244
245 pub fn allocate_bytes_ptr(
246 &mut self,
247 bytes: &[u8],
248 align: Align,
249 kind: MemoryKind<M::MemoryKind>,
250 mutability: Mutability,
251 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
252 let params = self.machine.get_default_alloc_params();
253 let alloc = Allocation::from_bytes(bytes, align, mutability, params);
254 self.insert_allocation(alloc, kind)
255 }
256
257 pub fn insert_allocation(
258 &mut self,
259 alloc: Allocation<M::Provenance, (), M::Bytes>,
260 kind: MemoryKind<M::MemoryKind>,
261 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
262 assert!(alloc.size() <= self.max_size_of_val());
263 let id = self.tcx.reserve_alloc_id();
264 debug_assert_ne!(
265 Some(kind),
266 M::GLOBAL_KIND.map(MemoryKind::Machine),
267 "dynamically allocating global memory"
268 );
269 let extra = M::init_local_allocation(self, id, kind, alloc.size(), alloc.align)?;
272 let alloc = alloc.with_extra(extra);
273 self.memory.alloc_map.insert(id, (kind, alloc));
274 M::adjust_alloc_root_pointer(self, Pointer::from(id), Some(kind))
275 }
276
277 pub fn reallocate_ptr(
280 &mut self,
281 ptr: Pointer<Option<M::Provenance>>,
282 old_size_and_align: Option<(Size, Align)>,
283 new_size: Size,
284 new_align: Align,
285 kind: MemoryKind<M::MemoryKind>,
286 init_growth: AllocInit,
287 ) -> InterpResult<'tcx, Pointer<M::Provenance>> {
288 let (alloc_id, offset, _prov) = self.ptr_get_alloc_id(ptr, 0)?;
289 if offset.bytes() != 0 {
290 throw_ub_custom!(
291 fluent::const_eval_realloc_or_alloc_with_offset,
292 ptr = format!("{ptr:?}"),
293 kind = "realloc"
294 );
295 }
296
297 let new_ptr = self.allocate_ptr(new_size, new_align, kind, init_growth)?;
303 let old_size = match old_size_and_align {
304 Some((size, _align)) => size,
305 None => self.get_alloc_raw(alloc_id)?.size(),
306 };
307 self.mem_copy(ptr, new_ptr.into(), old_size.min(new_size), true)?;
309 self.deallocate_ptr(ptr, old_size_and_align, kind)?;
310
311 interp_ok(new_ptr)
312 }
313
314 #[instrument(skip(self), level = "debug")]
315 pub fn deallocate_ptr(
316 &mut self,
317 ptr: Pointer<Option<M::Provenance>>,
318 old_size_and_align: Option<(Size, Align)>,
319 kind: MemoryKind<M::MemoryKind>,
320 ) -> InterpResult<'tcx> {
321 let (alloc_id, offset, prov) = self.ptr_get_alloc_id(ptr, 0)?;
322 trace!("deallocating: {alloc_id:?}");
323
324 if offset.bytes() != 0 {
325 throw_ub_custom!(
326 fluent::const_eval_realloc_or_alloc_with_offset,
327 ptr = format!("{ptr:?}"),
328 kind = "dealloc",
329 );
330 }
331
332 let Some((alloc_kind, mut alloc)) = self.memory.alloc_map.remove(&alloc_id) else {
333 return Err(match self.tcx.try_get_global_alloc(alloc_id) {
335 Some(GlobalAlloc::Function { .. }) => {
336 err_ub_custom!(
337 fluent::const_eval_invalid_dealloc,
338 alloc_id = alloc_id,
339 kind = "fn",
340 )
341 }
342 Some(GlobalAlloc::VTable(..)) => {
343 err_ub_custom!(
344 fluent::const_eval_invalid_dealloc,
345 alloc_id = alloc_id,
346 kind = "vtable",
347 )
348 }
349 Some(GlobalAlloc::Static(..) | GlobalAlloc::Memory(..)) => {
350 err_ub_custom!(
351 fluent::const_eval_invalid_dealloc,
352 alloc_id = alloc_id,
353 kind = "static_mem"
354 )
355 }
356 None => err_ub!(PointerUseAfterFree(alloc_id, CheckInAllocMsg::MemoryAccess)),
357 })
358 .into();
359 };
360
361 if alloc.mutability.is_not() {
362 throw_ub_custom!(fluent::const_eval_dealloc_immutable, alloc = alloc_id,);
363 }
364 if alloc_kind != kind {
365 throw_ub_custom!(
366 fluent::const_eval_dealloc_kind_mismatch,
367 alloc = alloc_id,
368 alloc_kind = format!("{alloc_kind}"),
369 kind = format!("{kind}"),
370 );
371 }
372 if let Some((size, align)) = old_size_and_align {
373 if size != alloc.size() || align != alloc.align {
374 throw_ub_custom!(
375 fluent::const_eval_dealloc_incorrect_layout,
376 alloc = alloc_id,
377 size = alloc.size().bytes(),
378 align = alloc.align.bytes(),
379 size_found = size.bytes(),
380 align_found = align.bytes(),
381 )
382 }
383 }
384
385 let size = alloc.size();
387 M::before_memory_deallocation(
388 self.tcx,
389 &mut self.machine,
390 &mut alloc.extra,
391 ptr,
392 (alloc_id, prov),
393 size,
394 alloc.align,
395 kind,
396 )?;
397
398 let old = self.memory.dead_alloc_map.insert(alloc_id, (size, alloc.align));
400 if old.is_some() {
401 bug!("Nothing can be deallocated twice");
402 }
403
404 interp_ok(())
405 }
406
407 #[inline(always)]
409 fn get_ptr_access(
410 &self,
411 ptr: Pointer<Option<M::Provenance>>,
412 size: Size,
413 ) -> InterpResult<'tcx, Option<(AllocId, Size, M::ProvenanceExtra)>> {
414 let size = i64::try_from(size.bytes()).unwrap(); Self::check_and_deref_ptr(
416 self,
417 ptr,
418 size,
419 CheckInAllocMsg::MemoryAccess,
420 |this, alloc_id, offset, prov| {
421 let (size, align) =
422 this.get_live_alloc_size_and_align(alloc_id, CheckInAllocMsg::MemoryAccess)?;
423 interp_ok((size, align, (alloc_id, offset, prov)))
424 },
425 )
426 }
427
428 #[inline(always)]
431 pub fn check_ptr_access(
432 &self,
433 ptr: Pointer<Option<M::Provenance>>,
434 size: Size,
435 msg: CheckInAllocMsg,
436 ) -> InterpResult<'tcx> {
437 let size = i64::try_from(size.bytes()).unwrap(); Self::check_and_deref_ptr(self, ptr, size, msg, |this, alloc_id, _, _| {
439 let (size, align) = this.get_live_alloc_size_and_align(alloc_id, msg)?;
440 interp_ok((size, align, ()))
441 })?;
442 interp_ok(())
443 }
444
445 pub fn check_ptr_access_signed(
449 &self,
450 ptr: Pointer<Option<M::Provenance>>,
451 size: i64,
452 msg: CheckInAllocMsg,
453 ) -> InterpResult<'tcx> {
454 Self::check_and_deref_ptr(self, ptr, size, msg, |this, alloc_id, _, _| {
455 let (size, align) = this.get_live_alloc_size_and_align(alloc_id, msg)?;
456 interp_ok((size, align, ()))
457 })?;
458 interp_ok(())
459 }
460
461 fn check_and_deref_ptr<T, R: Borrow<Self>>(
470 this: R,
471 ptr: Pointer<Option<M::Provenance>>,
472 size: i64,
473 msg: CheckInAllocMsg,
474 alloc_size: impl FnOnce(
475 R,
476 AllocId,
477 Size,
478 M::ProvenanceExtra,
479 ) -> InterpResult<'tcx, (Size, Align, T)>,
480 ) -> InterpResult<'tcx, Option<T>> {
481 if size == 0 {
483 return interp_ok(None);
484 }
485
486 interp_ok(match this.borrow().ptr_try_get_alloc_id(ptr, size) {
487 Err(addr) => {
488 throw_ub!(DanglingIntPointer { addr, inbounds_size: size, msg });
490 }
491 Ok((alloc_id, offset, prov)) => {
492 let tcx = this.borrow().tcx;
493 let (alloc_size, _alloc_align, ret_val) = alloc_size(this, alloc_id, offset, prov)?;
494 let offset = offset.bytes();
495 let (begin, end) = if size >= 0 {
497 (Some(offset), offset.checked_add(size as u64))
498 } else {
499 (offset.checked_sub(size.unsigned_abs()), Some(offset))
500 };
501 let in_bounds = begin.is_some() && end.is_some_and(|e| e <= alloc_size.bytes());
503 if !in_bounds {
504 throw_ub!(PointerOutOfBounds {
505 alloc_id,
506 alloc_size,
507 ptr_offset: tcx.sign_extend_to_target_isize(offset),
508 inbounds_size: size,
509 msg,
510 })
511 }
512
513 Some(ret_val)
514 }
515 })
516 }
517
518 pub(super) fn check_misalign(
519 &self,
520 misaligned: Option<Misalignment>,
521 msg: CheckAlignMsg,
522 ) -> InterpResult<'tcx> {
523 if let Some(misaligned) = misaligned {
524 throw_ub!(AlignmentCheckFailed(misaligned, msg))
525 }
526 interp_ok(())
527 }
528
529 pub(super) fn is_ptr_misaligned(
530 &self,
531 ptr: Pointer<Option<M::Provenance>>,
532 align: Align,
533 ) -> Option<Misalignment> {
534 if !M::enforce_alignment(self) || align.bytes() == 1 {
535 return None;
536 }
537
538 #[inline]
539 fn is_offset_misaligned(offset: u64, align: Align) -> Option<Misalignment> {
540 if offset % align.bytes() == 0 {
541 None
542 } else {
543 let offset_pow2 = 1 << offset.trailing_zeros();
545 Some(Misalignment { has: Align::from_bytes(offset_pow2).unwrap(), required: align })
546 }
547 }
548
549 match self.ptr_try_get_alloc_id(ptr, 0) {
550 Err(addr) => is_offset_misaligned(addr, align),
551 Ok((alloc_id, offset, _prov)) => {
552 let alloc_info = self.get_alloc_info(alloc_id);
553 if let Some(misalign) = M::alignment_check(
554 self,
555 alloc_id,
556 alloc_info.align,
557 alloc_info.kind,
558 offset,
559 align,
560 ) {
561 Some(misalign)
562 } else if M::Provenance::OFFSET_IS_ADDR {
563 is_offset_misaligned(ptr.addr().bytes(), align)
564 } else {
565 if alloc_info.align.bytes() < align.bytes() {
567 Some(Misalignment { has: alloc_info.align, required: align })
568 } else {
569 is_offset_misaligned(offset.bytes(), align)
570 }
571 }
572 }
573 }
574 }
575
576 pub fn check_ptr_align(
580 &self,
581 ptr: Pointer<Option<M::Provenance>>,
582 align: Align,
583 ) -> InterpResult<'tcx> {
584 self.check_misalign(self.is_ptr_misaligned(ptr, align), CheckAlignMsg::AccessedPtr)
585 }
586}
587
588impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
589 pub fn remove_unreachable_allocs(&mut self, reachable_allocs: &FxHashSet<AllocId>) {
591 #[allow(rustc::potential_query_instability)] self.memory.dead_alloc_map.retain(|id, _| reachable_allocs.contains(id));
596 }
597}
598
599impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
601 fn get_global_alloc(
607 &self,
608 id: AllocId,
609 is_write: bool,
610 ) -> InterpResult<'tcx, Cow<'tcx, Allocation<M::Provenance, M::AllocExtra, M::Bytes>>> {
611 let (alloc, def_id) = match self.tcx.try_get_global_alloc(id) {
612 Some(GlobalAlloc::Memory(mem)) => {
613 (mem, None)
615 }
616 Some(GlobalAlloc::Function { .. }) => throw_ub!(DerefFunctionPointer(id)),
617 Some(GlobalAlloc::VTable(..)) => throw_ub!(DerefVTablePointer(id)),
618 None => throw_ub!(PointerUseAfterFree(id, CheckInAllocMsg::MemoryAccess)),
619 Some(GlobalAlloc::Static(def_id)) => {
620 assert!(self.tcx.is_static(def_id));
621 assert!(!self.tcx.is_thread_local_static(def_id));
624 if self.tcx.is_foreign_item(def_id) {
635 throw_unsup!(ExternStatic(def_id));
638 }
639
640 let val = self.ctfe_query(|tcx| tcx.eval_static_initializer(def_id))?;
642 (val, Some(def_id))
643 }
644 };
645 M::before_access_global(self.tcx, &self.machine, id, alloc, def_id, is_write)?;
646 M::adjust_global_allocation(
648 self,
649 id, alloc.inner(),
651 )
652 }
653
654 fn get_alloc_raw(
659 &self,
660 id: AllocId,
661 ) -> InterpResult<'tcx, &Allocation<M::Provenance, M::AllocExtra, M::Bytes>> {
662 let a = self.memory.alloc_map.get_or(id, || {
667 let alloc = self.get_global_alloc(id, false).report_err().map_err(Err)?;
670 match alloc {
671 Cow::Borrowed(alloc) => {
672 Err(Ok(alloc))
675 }
676 Cow::Owned(alloc) => {
677 let kind = M::GLOBAL_KIND.expect(
679 "I got a global allocation that I have to copy but the machine does \
680 not expect that to happen",
681 );
682 Ok((MemoryKind::Machine(kind), alloc))
683 }
684 }
685 });
686 match a {
688 Ok(a) => interp_ok(&a.1),
689 Err(a) => a.into(),
690 }
691 }
692
693 pub fn get_alloc_bytes_unchecked_raw(&self, id: AllocId) -> InterpResult<'tcx, *const u8> {
696 let alloc = self.get_alloc_raw(id)?;
697 interp_ok(alloc.get_bytes_unchecked_raw())
698 }
699
700 pub fn get_ptr_alloc<'a>(
702 &'a self,
703 ptr: Pointer<Option<M::Provenance>>,
704 size: Size,
705 ) -> InterpResult<'tcx, Option<AllocRef<'a, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
706 {
707 let size_i64 = i64::try_from(size.bytes()).unwrap(); let ptr_and_alloc = Self::check_and_deref_ptr(
709 self,
710 ptr,
711 size_i64,
712 CheckInAllocMsg::MemoryAccess,
713 |this, alloc_id, offset, prov| {
714 let alloc = this.get_alloc_raw(alloc_id)?;
715 interp_ok((alloc.size(), alloc.align, (alloc_id, offset, prov, alloc)))
716 },
717 )?;
718 if !self.memory.validation_in_progress.get() {
722 if let Ok((alloc_id, ..)) = self.ptr_try_get_alloc_id(ptr, size_i64) {
723 M::before_alloc_read(self, alloc_id)?;
724 }
725 }
726
727 if let Some((alloc_id, offset, prov, alloc)) = ptr_and_alloc {
728 let range = alloc_range(offset, size);
729 if !self.memory.validation_in_progress.get() {
730 M::before_memory_read(
731 self.tcx,
732 &self.machine,
733 &alloc.extra,
734 ptr,
735 (alloc_id, prov),
736 range,
737 )?;
738 }
739 interp_ok(Some(AllocRef { alloc, range, tcx: *self.tcx, alloc_id }))
740 } else {
741 interp_ok(None)
742 }
743 }
744
745 pub fn get_alloc_extra<'a>(&'a self, id: AllocId) -> InterpResult<'tcx, &'a M::AllocExtra> {
747 interp_ok(&self.get_alloc_raw(id)?.extra)
748 }
749
750 pub fn get_alloc_mutability<'a>(&'a self, id: AllocId) -> InterpResult<'tcx, Mutability> {
752 interp_ok(self.get_alloc_raw(id)?.mutability)
753 }
754
755 fn get_alloc_raw_mut(
761 &mut self,
762 id: AllocId,
763 ) -> InterpResult<'tcx, (&mut Allocation<M::Provenance, M::AllocExtra, M::Bytes>, &mut M)> {
764 if self.memory.alloc_map.get_mut(id).is_none() {
772 let alloc = self.get_global_alloc(id, true)?;
775 let kind = M::GLOBAL_KIND.expect(
776 "I got a global allocation that I have to copy but the machine does \
777 not expect that to happen",
778 );
779 self.memory.alloc_map.insert(id, (MemoryKind::Machine(kind), alloc.into_owned()));
780 }
781
782 let (_kind, alloc) = self.memory.alloc_map.get_mut(id).unwrap();
783 if alloc.mutability.is_not() {
784 throw_ub!(WriteToReadOnly(id))
785 }
786 interp_ok((alloc, &mut self.machine))
787 }
788
789 pub fn get_alloc_bytes_unchecked_raw_mut(
792 &mut self,
793 id: AllocId,
794 ) -> InterpResult<'tcx, *mut u8> {
795 let alloc = self.get_alloc_raw_mut(id)?.0;
796 interp_ok(alloc.get_bytes_unchecked_raw_mut())
797 }
798
799 pub fn get_ptr_alloc_mut<'a>(
801 &'a mut self,
802 ptr: Pointer<Option<M::Provenance>>,
803 size: Size,
804 ) -> InterpResult<'tcx, Option<AllocRefMut<'a, 'tcx, M::Provenance, M::AllocExtra, M::Bytes>>>
805 {
806 let tcx = self.tcx;
807 let validation_in_progress = self.memory.validation_in_progress.get();
808
809 let size_i64 = i64::try_from(size.bytes()).unwrap(); let ptr_and_alloc = Self::check_and_deref_ptr(
811 self,
812 ptr,
813 size_i64,
814 CheckInAllocMsg::MemoryAccess,
815 |this, alloc_id, offset, prov| {
816 let (alloc, machine) = this.get_alloc_raw_mut(alloc_id)?;
817 interp_ok((alloc.size(), alloc.align, (alloc_id, offset, prov, alloc, machine)))
818 },
819 )?;
820
821 if let Some((alloc_id, offset, prov, alloc, machine)) = ptr_and_alloc {
822 let range = alloc_range(offset, size);
823 if !validation_in_progress {
824 M::before_memory_write(
825 tcx,
826 machine,
827 &mut alloc.extra,
828 ptr,
829 (alloc_id, prov),
830 range,
831 )?;
832 }
833 interp_ok(Some(AllocRefMut { alloc, range, tcx: *tcx, alloc_id }))
834 } else {
835 interp_ok(None)
836 }
837 }
838
839 pub fn get_alloc_extra_mut<'a>(
841 &'a mut self,
842 id: AllocId,
843 ) -> InterpResult<'tcx, (&'a mut M::AllocExtra, &'a mut M)> {
844 let (alloc, machine) = self.get_alloc_raw_mut(id)?;
845 interp_ok((&mut alloc.extra, machine))
846 }
847
848 pub fn is_alloc_live(&self, id: AllocId) -> bool {
852 self.memory.alloc_map.contains_key_ref(&id)
853 || self.memory.extra_fn_ptr_map.contains_key(&id)
854 || self.tcx.try_get_global_alloc(id).is_some()
857 }
858
859 pub fn get_alloc_info(&self, id: AllocId) -> AllocInfo {
862 if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
867 return AllocInfo::new(
868 alloc.size(),
869 alloc.align,
870 AllocKind::LiveData,
871 alloc.mutability,
872 );
873 }
874
875 if let Some(fn_val) = self.get_fn_alloc(id) {
878 let align = match fn_val {
879 FnVal::Instance(instance) => {
880 let fn_align = self.tcx.codegen_fn_attrs(instance.def_id()).alignment;
883 let global_align = self.tcx.sess.opts.unstable_opts.min_function_alignment;
884
885 Ord::max(global_align, fn_align).unwrap_or(Align::ONE)
886 }
887 FnVal::Other(_) => Align::ONE,
889 };
890
891 return AllocInfo::new(Size::ZERO, align, AllocKind::Function, Mutability::Not);
892 }
893
894 if let Some(global_alloc) = self.tcx.try_get_global_alloc(id) {
896 let (size, align) = global_alloc.size_and_align(*self.tcx, self.typing_env);
897 let mutbl = global_alloc.mutability(*self.tcx, self.typing_env);
898 let kind = match global_alloc {
899 GlobalAlloc::Static { .. } | GlobalAlloc::Memory { .. } => AllocKind::LiveData,
900 GlobalAlloc::Function { .. } => bug!("We already checked function pointers above"),
901 GlobalAlloc::VTable { .. } => AllocKind::VTable,
902 };
903 return AllocInfo::new(size, align, kind, mutbl);
904 }
905
906 let (size, align) = *self
908 .memory
909 .dead_alloc_map
910 .get(&id)
911 .expect("deallocated pointers should all be recorded in `dead_alloc_map`");
912 AllocInfo::new(size, align, AllocKind::Dead, Mutability::Not)
913 }
914
915 fn get_live_alloc_size_and_align(
917 &self,
918 id: AllocId,
919 msg: CheckInAllocMsg,
920 ) -> InterpResult<'tcx, (Size, Align)> {
921 let info = self.get_alloc_info(id);
922 if matches!(info.kind, AllocKind::Dead) {
923 throw_ub!(PointerUseAfterFree(id, msg))
924 }
925 interp_ok((info.size, info.align))
926 }
927
928 fn get_fn_alloc(&self, id: AllocId) -> Option<FnVal<'tcx, M::ExtraFnVal>> {
929 if let Some(extra) = self.memory.extra_fn_ptr_map.get(&id) {
930 Some(FnVal::Other(*extra))
931 } else {
932 match self.tcx.try_get_global_alloc(id) {
933 Some(GlobalAlloc::Function { instance, .. }) => Some(FnVal::Instance(instance)),
934 _ => None,
935 }
936 }
937 }
938
939 pub fn get_ptr_fn(
940 &self,
941 ptr: Pointer<Option<M::Provenance>>,
942 ) -> InterpResult<'tcx, FnVal<'tcx, M::ExtraFnVal>> {
943 trace!("get_ptr_fn({:?})", ptr);
944 let (alloc_id, offset, _prov) = self.ptr_get_alloc_id(ptr, 0)?;
945 if offset.bytes() != 0 {
946 throw_ub!(InvalidFunctionPointer(Pointer::new(alloc_id, offset)))
947 }
948 self.get_fn_alloc(alloc_id)
949 .ok_or_else(|| err_ub!(InvalidFunctionPointer(Pointer::new(alloc_id, offset))))
950 .into()
951 }
952
953 pub fn get_ptr_vtable_ty(
956 &self,
957 ptr: Pointer<Option<M::Provenance>>,
958 expected_trait: Option<&'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>>,
959 ) -> InterpResult<'tcx, Ty<'tcx>> {
960 trace!("get_ptr_vtable({:?})", ptr);
961 let (alloc_id, offset, _tag) = self.ptr_get_alloc_id(ptr, 0)?;
962 if offset.bytes() != 0 {
963 throw_ub!(InvalidVTablePointer(Pointer::new(alloc_id, offset)))
964 }
965 let Some(GlobalAlloc::VTable(ty, vtable_dyn_type)) =
966 self.tcx.try_get_global_alloc(alloc_id)
967 else {
968 throw_ub!(InvalidVTablePointer(Pointer::new(alloc_id, offset)))
969 };
970 if let Some(expected_dyn_type) = expected_trait {
971 self.check_vtable_for_type(vtable_dyn_type, expected_dyn_type)?;
972 }
973 interp_ok(ty)
974 }
975
976 pub fn alloc_mark_immutable(&mut self, id: AllocId) -> InterpResult<'tcx> {
977 self.get_alloc_raw_mut(id)?.0.mutability = Mutability::Not;
978 interp_ok(())
979 }
980
981 pub fn prepare_for_native_call(&mut self, ids: Vec<AllocId>) -> InterpResult<'tcx> {
988 let mut done = FxHashSet::default();
989 let mut todo = ids;
990 while let Some(id) = todo.pop() {
991 if !done.insert(id) {
992 continue;
994 }
995 let info = self.get_alloc_info(id);
996
997 if !matches!(info.kind, AllocKind::LiveData) {
999 continue;
1000 }
1001
1002 let alloc = self.get_alloc_raw(id)?;
1004 for prov in alloc.provenance().provenances() {
1005 M::expose_provenance(self, prov)?;
1006 if let Some(id) = prov.get_alloc_id() {
1007 todo.push(id);
1008 }
1009 }
1010 std::hint::black_box(alloc.get_bytes_unchecked_raw().expose_provenance());
1014
1015 if info.mutbl.is_mut() {
1017 self.get_alloc_raw_mut(id)?
1018 .0
1019 .prepare_for_native_write()
1020 .map_err(|e| e.to_interp_error(id))?;
1021 }
1022 }
1023 interp_ok(())
1024 }
1025
1026 #[must_use]
1029 pub fn dump_alloc<'a>(&'a self, id: AllocId) -> DumpAllocs<'a, 'tcx, M> {
1030 self.dump_allocs(vec![id])
1031 }
1032
1033 #[must_use]
1036 pub fn dump_allocs<'a>(&'a self, mut allocs: Vec<AllocId>) -> DumpAllocs<'a, 'tcx, M> {
1037 allocs.sort();
1038 allocs.dedup();
1039 DumpAllocs { ecx: self, allocs }
1040 }
1041
1042 pub fn print_alloc_bytes_for_diagnostics(&self, id: AllocId) -> String {
1044 let alloc = self.get_alloc_raw(id).unwrap();
1047 let mut bytes = String::new();
1048 if alloc.size() != Size::ZERO {
1049 bytes = "\n".into();
1050 rustc_middle::mir::pretty::write_allocation_bytes(*self.tcx, alloc, &mut bytes, " ")
1052 .unwrap();
1053 }
1054 bytes
1055 }
1056
1057 pub fn take_leaked_allocations(
1063 &mut self,
1064 static_roots: impl FnOnce(&Self) -> &[AllocId],
1065 ) -> Vec<(AllocId, MemoryKind<M::MemoryKind>, Allocation<M::Provenance, M::AllocExtra, M::Bytes>)>
1066 {
1067 let reachable = {
1069 let mut reachable = FxHashSet::default();
1070 let global_kind = M::GLOBAL_KIND.map(MemoryKind::Machine);
1071 let mut todo: Vec<_> =
1072 self.memory.alloc_map.filter_map_collect(move |&id, &(kind, _)| {
1073 if Some(kind) == global_kind { Some(id) } else { None }
1074 });
1075 todo.extend(static_roots(self));
1076 while let Some(id) = todo.pop() {
1077 if reachable.insert(id) {
1078 if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
1080 todo.extend(
1081 alloc.provenance().provenances().filter_map(|prov| prov.get_alloc_id()),
1082 );
1083 }
1084 }
1085 }
1086 reachable
1087 };
1088
1089 let leaked: Vec<_> = self.memory.alloc_map.filter_map_collect(|&id, &(kind, _)| {
1091 if kind.may_leak() || reachable.contains(&id) { None } else { Some(id) }
1092 });
1093 let mut result = Vec::new();
1094 for &id in leaked.iter() {
1095 let (kind, alloc) = self.memory.alloc_map.remove(&id).unwrap();
1096 result.push((id, kind, alloc));
1097 }
1098 result
1099 }
1100
1101 pub fn run_for_validation_mut<R>(&mut self, f: impl FnOnce(&mut Self) -> R) -> R {
1107 assert!(
1110 self.memory.validation_in_progress.replace(true) == false,
1111 "`validation_in_progress` was already set"
1112 );
1113 let res = f(self);
1114 assert!(
1115 self.memory.validation_in_progress.replace(false) == true,
1116 "`validation_in_progress` was unset by someone else"
1117 );
1118 res
1119 }
1120
1121 pub fn run_for_validation_ref<R>(&self, f: impl FnOnce(&Self) -> R) -> R {
1127 assert!(
1130 self.memory.validation_in_progress.replace(true) == false,
1131 "`validation_in_progress` was already set"
1132 );
1133 let res = f(self);
1134 assert!(
1135 self.memory.validation_in_progress.replace(false) == true,
1136 "`validation_in_progress` was unset by someone else"
1137 );
1138 res
1139 }
1140
1141 pub(super) fn validation_in_progress(&self) -> bool {
1142 self.memory.validation_in_progress.get()
1143 }
1144}
1145
1146#[doc(hidden)]
1147pub struct DumpAllocs<'a, 'tcx, M: Machine<'tcx>> {
1149 ecx: &'a InterpCx<'tcx, M>,
1150 allocs: Vec<AllocId>,
1151}
1152
1153impl<'a, 'tcx, M: Machine<'tcx>> std::fmt::Debug for DumpAllocs<'a, 'tcx, M> {
1154 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1155 fn write_allocation_track_relocs<'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>(
1157 fmt: &mut std::fmt::Formatter<'_>,
1158 tcx: TyCtxt<'tcx>,
1159 allocs_to_print: &mut VecDeque<AllocId>,
1160 alloc: &Allocation<Prov, Extra, Bytes>,
1161 ) -> std::fmt::Result {
1162 for alloc_id in alloc.provenance().provenances().filter_map(|prov| prov.get_alloc_id())
1163 {
1164 allocs_to_print.push_back(alloc_id);
1165 }
1166 write!(fmt, "{}", display_allocation(tcx, alloc))
1167 }
1168
1169 let mut allocs_to_print: VecDeque<_> = self.allocs.iter().copied().collect();
1170 let mut allocs_printed = FxHashSet::default();
1172
1173 while let Some(id) = allocs_to_print.pop_front() {
1174 if !allocs_printed.insert(id) {
1175 continue;
1177 }
1178
1179 write!(fmt, "{id:?}")?;
1180 match self.ecx.memory.alloc_map.get(id) {
1181 Some((kind, alloc)) => {
1182 write!(fmt, " ({kind}, ")?;
1184 write_allocation_track_relocs(
1185 &mut *fmt,
1186 *self.ecx.tcx,
1187 &mut allocs_to_print,
1188 alloc,
1189 )?;
1190 }
1191 None => {
1192 match self.ecx.tcx.try_get_global_alloc(id) {
1194 Some(GlobalAlloc::Memory(alloc)) => {
1195 write!(fmt, " (unchanged global, ")?;
1196 write_allocation_track_relocs(
1197 &mut *fmt,
1198 *self.ecx.tcx,
1199 &mut allocs_to_print,
1200 alloc.inner(),
1201 )?;
1202 }
1203 Some(GlobalAlloc::Function { instance, .. }) => {
1204 write!(fmt, " (fn: {instance})")?;
1205 }
1206 Some(GlobalAlloc::VTable(ty, dyn_ty)) => {
1207 write!(fmt, " (vtable: impl {dyn_ty} for {ty})")?;
1208 }
1209 Some(GlobalAlloc::Static(did)) => {
1210 write!(fmt, " (static: {})", self.ecx.tcx.def_path_str(did))?;
1211 }
1212 None => {
1213 write!(fmt, " (deallocated)")?;
1214 }
1215 }
1216 }
1217 }
1218 writeln!(fmt)?;
1219 }
1220 Ok(())
1221 }
1222}
1223
1224impl<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>
1226 AllocRefMut<'a, 'tcx, Prov, Extra, Bytes>
1227{
1228 pub fn as_ref<'b>(&'b self) -> AllocRef<'b, 'tcx, Prov, Extra, Bytes> {
1229 AllocRef { alloc: self.alloc, range: self.range, tcx: self.tcx, alloc_id: self.alloc_id }
1230 }
1231
1232 pub fn write_scalar(&mut self, range: AllocRange, val: Scalar<Prov>) -> InterpResult<'tcx> {
1234 let range = self.range.subrange(range);
1235 debug!("write_scalar at {:?}{range:?}: {val:?}", self.alloc_id);
1236
1237 self.alloc
1238 .write_scalar(&self.tcx, range, val)
1239 .map_err(|e| e.to_interp_error(self.alloc_id))
1240 .into()
1241 }
1242
1243 pub fn write_ptr_sized(&mut self, offset: Size, val: Scalar<Prov>) -> InterpResult<'tcx> {
1245 self.write_scalar(alloc_range(offset, self.tcx.data_layout().pointer_size), val)
1246 }
1247
1248 pub fn write_uninit(&mut self, range: AllocRange) -> InterpResult<'tcx> {
1250 let range = self.range.subrange(range);
1251
1252 self.alloc
1253 .write_uninit(&self.tcx, range)
1254 .map_err(|e| e.to_interp_error(self.alloc_id))
1255 .into()
1256 }
1257
1258 pub fn write_uninit_full(&mut self) -> InterpResult<'tcx> {
1260 self.alloc
1261 .write_uninit(&self.tcx, self.range)
1262 .map_err(|e| e.to_interp_error(self.alloc_id))
1263 .into()
1264 }
1265
1266 pub fn clear_provenance(&mut self) -> InterpResult<'tcx> {
1268 self.alloc
1269 .clear_provenance(&self.tcx, self.range)
1270 .map_err(|e| e.to_interp_error(self.alloc_id))
1271 .into()
1272 }
1273}
1274
1275impl<'a, 'tcx, Prov: Provenance, Extra, Bytes: AllocBytes> AllocRef<'a, 'tcx, Prov, Extra, Bytes> {
1276 pub fn read_scalar(
1278 &self,
1279 range: AllocRange,
1280 read_provenance: bool,
1281 ) -> InterpResult<'tcx, Scalar<Prov>> {
1282 let range = self.range.subrange(range);
1283 self.alloc
1284 .read_scalar(&self.tcx, range, read_provenance)
1285 .map_err(|e| e.to_interp_error(self.alloc_id))
1286 .into()
1287 }
1288
1289 pub fn read_integer(&self, range: AllocRange) -> InterpResult<'tcx, Scalar<Prov>> {
1291 self.read_scalar(range, false)
1292 }
1293
1294 pub fn read_pointer(&self, offset: Size) -> InterpResult<'tcx, Scalar<Prov>> {
1296 self.read_scalar(
1297 alloc_range(offset, self.tcx.data_layout().pointer_size),
1298 true,
1299 )
1300 }
1301
1302 pub fn get_bytes_strip_provenance<'b>(&'b self) -> InterpResult<'tcx, &'a [u8]> {
1304 self.alloc
1305 .get_bytes_strip_provenance(&self.tcx, self.range)
1306 .map_err(|e| e.to_interp_error(self.alloc_id))
1307 .into()
1308 }
1309
1310 pub fn has_provenance(&self) -> bool {
1312 !self.alloc.provenance().range_empty(self.range, &self.tcx)
1313 }
1314}
1315
1316impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
1317 pub fn read_bytes_ptr_strip_provenance(
1322 &self,
1323 ptr: Pointer<Option<M::Provenance>>,
1324 size: Size,
1325 ) -> InterpResult<'tcx, &[u8]> {
1326 let Some(alloc_ref) = self.get_ptr_alloc(ptr, size)? else {
1327 return interp_ok(&[]);
1329 };
1330 interp_ok(
1333 alloc_ref
1334 .alloc
1335 .get_bytes_strip_provenance(&alloc_ref.tcx, alloc_ref.range)
1336 .map_err(|e| e.to_interp_error(alloc_ref.alloc_id))?,
1337 )
1338 }
1339
1340 pub fn write_bytes_ptr(
1344 &mut self,
1345 ptr: Pointer<Option<M::Provenance>>,
1346 src: impl IntoIterator<Item = u8>,
1347 ) -> InterpResult<'tcx> {
1348 let mut src = src.into_iter();
1349 let (lower, upper) = src.size_hint();
1350 let len = upper.expect("can only write bounded iterators");
1351 assert_eq!(lower, len, "can only write iterators with a precise length");
1352
1353 let size = Size::from_bytes(len);
1354 let Some(alloc_ref) = self.get_ptr_alloc_mut(ptr, size)? else {
1355 assert_matches!(src.next(), None, "iterator said it was empty but returned an element");
1357 return interp_ok(());
1358 };
1359
1360 let alloc_id = alloc_ref.alloc_id;
1363 let bytes = alloc_ref
1364 .alloc
1365 .get_bytes_unchecked_for_overwrite(&alloc_ref.tcx, alloc_ref.range)
1366 .map_err(move |e| e.to_interp_error(alloc_id))?;
1367 for dest in bytes {
1370 *dest = src.next().expect("iterator was shorter than it said it would be");
1371 }
1372 assert_matches!(src.next(), None, "iterator was longer than it said it would be");
1373 interp_ok(())
1374 }
1375
1376 pub fn mem_copy(
1377 &mut self,
1378 src: Pointer<Option<M::Provenance>>,
1379 dest: Pointer<Option<M::Provenance>>,
1380 size: Size,
1381 nonoverlapping: bool,
1382 ) -> InterpResult<'tcx> {
1383 self.mem_copy_repeatedly(src, dest, size, 1, nonoverlapping)
1384 }
1385
1386 pub fn mem_copy_repeatedly(
1392 &mut self,
1393 src: Pointer<Option<M::Provenance>>,
1394 dest: Pointer<Option<M::Provenance>>,
1395 size: Size,
1396 num_copies: u64,
1397 nonoverlapping: bool,
1398 ) -> InterpResult<'tcx> {
1399 let tcx = self.tcx;
1400 let src_parts = self.get_ptr_access(src, size)?;
1402 let dest_parts = self.get_ptr_access(dest, size * num_copies)?; let Some((src_alloc_id, src_offset, src_prov)) = src_parts else {
1409 return interp_ok(());
1411 };
1412 let src_alloc = self.get_alloc_raw(src_alloc_id)?;
1413 let src_range = alloc_range(src_offset, size);
1414 assert!(!self.memory.validation_in_progress.get(), "we can't be copying during validation");
1415 M::before_memory_read(
1418 tcx,
1419 &self.machine,
1420 &src_alloc.extra,
1421 src,
1422 (src_alloc_id, src_prov),
1423 src_range,
1424 )?;
1425 let Some((dest_alloc_id, dest_offset, dest_prov)) = dest_parts else {
1428 return interp_ok(());
1430 };
1431
1432 let src_bytes = src_alloc.get_bytes_unchecked(src_range).as_ptr(); let provenance = src_alloc
1439 .provenance()
1440 .prepare_copy(src_range, dest_offset, num_copies, self)
1441 .map_err(|e| e.to_interp_error(dest_alloc_id))?;
1442 let init = src_alloc.init_mask().prepare_copy(src_range);
1444
1445 let (dest_alloc, extra) = self.get_alloc_raw_mut(dest_alloc_id)?;
1447 let dest_range = alloc_range(dest_offset, size * num_copies);
1448 M::before_memory_write(
1449 tcx,
1450 extra,
1451 &mut dest_alloc.extra,
1452 dest,
1453 (dest_alloc_id, dest_prov),
1454 dest_range,
1455 )?;
1456 let dest_bytes = dest_alloc
1458 .get_bytes_unchecked_for_overwrite_ptr(&tcx, dest_range)
1459 .map_err(|e| e.to_interp_error(dest_alloc_id))?
1460 .as_mut_ptr();
1461
1462 if init.no_bytes_init() {
1463 dest_alloc
1470 .write_uninit(&tcx, dest_range)
1471 .map_err(|e| e.to_interp_error(dest_alloc_id))?;
1472 return interp_ok(());
1474 }
1475
1476 unsafe {
1482 if src_alloc_id == dest_alloc_id {
1483 if nonoverlapping {
1484 if (src_offset <= dest_offset && src_offset + size > dest_offset)
1486 || (dest_offset <= src_offset && dest_offset + size > src_offset)
1487 {
1488 throw_ub_custom!(fluent::const_eval_copy_nonoverlapping_overlapping);
1489 }
1490 }
1491 }
1492 if num_copies > 1 {
1493 assert!(nonoverlapping, "multi-copy only supported in non-overlapping mode");
1494 }
1495
1496 let size_in_bytes = size.bytes_usize();
1497 if size_in_bytes == 1 {
1500 debug_assert!(num_copies >= 1); let value = *src_bytes;
1503 dest_bytes.write_bytes(value, (size * num_copies).bytes_usize());
1504 } else if src_alloc_id == dest_alloc_id {
1505 let mut dest_ptr = dest_bytes;
1506 for _ in 0..num_copies {
1507 ptr::copy(src_bytes, dest_ptr, size_in_bytes);
1510 dest_ptr = dest_ptr.add(size_in_bytes);
1511 }
1512 } else {
1513 let mut dest_ptr = dest_bytes;
1514 for _ in 0..num_copies {
1515 ptr::copy_nonoverlapping(src_bytes, dest_ptr, size_in_bytes);
1516 dest_ptr = dest_ptr.add(size_in_bytes);
1517 }
1518 }
1519 }
1520
1521 dest_alloc.init_mask_apply_copy(
1523 init,
1524 alloc_range(dest_offset, size), num_copies,
1526 );
1527 dest_alloc.provenance_apply_copy(provenance);
1529
1530 interp_ok(())
1531 }
1532}
1533
1534impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
1536 pub fn scalar_may_be_null(&self, scalar: Scalar<M::Provenance>) -> InterpResult<'tcx, bool> {
1539 match scalar.try_to_scalar_int() {
1540 Ok(int) => interp_ok(int.is_null()),
1541 Err(_) => {
1542 let ptr = scalar.to_pointer(self)?;
1544 match self.ptr_try_get_alloc_id(ptr, 0) {
1545 Ok((alloc_id, offset, _)) => {
1546 let info = self.get_alloc_info(alloc_id);
1547 if offset <= info.size {
1549 return interp_ok(false);
1550 }
1551 if offset.bytes() % info.align.bytes() != 0 {
1555 return interp_ok(false);
1556 }
1557 interp_ok(true)
1559 }
1560 Err(_offset) => bug!("a non-int scalar is always a pointer"),
1561 }
1562 }
1563 }
1564 }
1565
1566 pub fn ptr_try_get_alloc_id(
1580 &self,
1581 ptr: Pointer<Option<M::Provenance>>,
1582 size: i64,
1583 ) -> Result<(AllocId, Size, M::ProvenanceExtra), u64> {
1584 match ptr.into_pointer_or_addr() {
1585 Ok(ptr) => match M::ptr_get_alloc(self, ptr, size) {
1586 Some((alloc_id, offset, extra)) => Ok((alloc_id, offset, extra)),
1587 None => {
1588 assert!(M::Provenance::OFFSET_IS_ADDR);
1589 let (_, addr) = ptr.into_parts();
1590 Err(addr.bytes())
1591 }
1592 },
1593 Err(addr) => Err(addr.bytes()),
1594 }
1595 }
1596
1597 #[inline(always)]
1610 pub fn ptr_get_alloc_id(
1611 &self,
1612 ptr: Pointer<Option<M::Provenance>>,
1613 size: i64,
1614 ) -> InterpResult<'tcx, (AllocId, Size, M::ProvenanceExtra)> {
1615 self.ptr_try_get_alloc_id(ptr, size)
1616 .map_err(|offset| {
1617 err_ub!(DanglingIntPointer {
1618 addr: offset,
1619 inbounds_size: size,
1620 msg: CheckInAllocMsg::Dereferenceable
1621 })
1622 })
1623 .into()
1624 }
1625}