miri/borrow_tracker/tree_borrows/
mod.rs

1use rustc_abi::{BackendRepr, Size};
2use rustc_middle::mir::{Mutability, RetagKind};
3use rustc_middle::ty::layout::HasTypingEnv;
4use rustc_middle::ty::{self, Ty};
5
6use self::foreign_access_skipping::IdempotentForeignAccess;
7use self::tree::LocationState;
8use crate::borrow_tracker::{GlobalState, GlobalStateInner, ProtectorKind};
9use crate::concurrency::data_race::NaReadType;
10use crate::*;
11
12pub mod diagnostics;
13mod foreign_access_skipping;
14mod perms;
15mod tree;
16mod unimap;
17
18#[cfg(test)]
19mod exhaustive;
20
21use self::perms::Permission;
22pub use self::tree::Tree;
23
24pub type AllocState = Tree;
25
26impl<'tcx> Tree {
27    /// Create a new allocation, i.e. a new tree
28    pub fn new_allocation(
29        id: AllocId,
30        size: Size,
31        state: &mut GlobalStateInner,
32        _kind: MemoryKind,
33        machine: &MiriMachine<'tcx>,
34    ) -> Self {
35        let tag = state.root_ptr_tag(id, machine); // Fresh tag for the root
36        let span = machine.current_span();
37        Tree::new(tag, size, span)
38    }
39
40    /// Check that an access on the entire range is permitted, and update
41    /// the tree.
42    pub fn before_memory_access(
43        &mut self,
44        access_kind: AccessKind,
45        alloc_id: AllocId,
46        prov: ProvenanceExtra,
47        range: AllocRange,
48        machine: &MiriMachine<'tcx>,
49    ) -> InterpResult<'tcx> {
50        trace!(
51            "{} with tag {:?}: {:?}, size {}",
52            access_kind,
53            prov,
54            interpret::Pointer::new(alloc_id, range.start),
55            range.size.bytes(),
56        );
57        // TODO: for now we bail out on wildcard pointers. Eventually we should
58        // handle them as much as we can.
59        let tag = match prov {
60            ProvenanceExtra::Concrete(tag) => tag,
61            ProvenanceExtra::Wildcard => return interp_ok(()),
62        };
63        let global = machine.borrow_tracker.as_ref().unwrap();
64        let span = machine.current_span();
65        self.perform_access(
66            tag,
67            Some((range, access_kind, diagnostics::AccessCause::Explicit(access_kind))),
68            global,
69            alloc_id,
70            span,
71        )
72    }
73
74    /// Check that this pointer has permission to deallocate this range.
75    pub fn before_memory_deallocation(
76        &mut self,
77        alloc_id: AllocId,
78        prov: ProvenanceExtra,
79        size: Size,
80        machine: &MiriMachine<'tcx>,
81    ) -> InterpResult<'tcx> {
82        // TODO: for now we bail out on wildcard pointers. Eventually we should
83        // handle them as much as we can.
84        let tag = match prov {
85            ProvenanceExtra::Concrete(tag) => tag,
86            ProvenanceExtra::Wildcard => return interp_ok(()),
87        };
88        let global = machine.borrow_tracker.as_ref().unwrap();
89        let span = machine.current_span();
90        self.dealloc(tag, alloc_range(Size::ZERO, size), global, alloc_id, span)
91    }
92
93    pub fn expose_tag(&mut self, _tag: BorTag) {
94        // TODO
95    }
96
97    /// A tag just lost its protector.
98    ///
99    /// This emits a special kind of access that is only applied
100    /// to accessed locations, as a protection against other
101    /// tags not having been made aware of the existence of this
102    /// protector.
103    pub fn release_protector(
104        &mut self,
105        machine: &MiriMachine<'tcx>,
106        global: &GlobalState,
107        tag: BorTag,
108        alloc_id: AllocId, // diagnostics
109    ) -> InterpResult<'tcx> {
110        let span = machine.current_span();
111        // `None` makes it the magic on-protector-end operation
112        self.perform_access(tag, None, global, alloc_id, span)
113    }
114}
115
116/// Policy for a new borrow.
117#[derive(Debug, Clone, Copy)]
118pub struct NewPermission {
119    /// Permission for the frozen part of the range.
120    freeze_perm: Permission,
121    /// Whether a read access should be performed on the frozen part on a retag.
122    freeze_access: bool,
123    /// Permission for the non-frozen part of the range.
124    nonfreeze_perm: Permission,
125    /// Whether a read access should be performed on the non-frozen
126    /// part on a retag.
127    nonfreeze_access: bool,
128    /// Permission for memory outside the range.
129    outside_perm: Permission,
130    /// Whether this pointer is part of the arguments of a function call.
131    /// `protector` is `Some(_)` for all pointers marked `noalias`.
132    protector: Option<ProtectorKind>,
133}
134
135impl<'tcx> NewPermission {
136    /// Determine NewPermission of the reference/Box from the type of the pointee.
137    ///
138    /// A `ref_mutability` of `None` indicates a `Box` type.
139    fn new(
140        pointee: Ty<'tcx>,
141        ref_mutability: Option<Mutability>,
142        retag_kind: RetagKind,
143        cx: &crate::MiriInterpCx<'tcx>,
144    ) -> Option<Self> {
145        let ty_is_unpin = pointee.is_unpin(*cx.tcx, cx.typing_env());
146        let ty_is_freeze = pointee.is_freeze(*cx.tcx, cx.typing_env());
147        let is_protected = retag_kind == RetagKind::FnEntry;
148
149        if matches!(ref_mutability, Some(Mutability::Mut) | None if !ty_is_unpin) {
150            // Mutable reference / Box to pinning type: retagging is a NOP.
151            // FIXME: with `UnsafePinned`, this should do proper per-byte tracking.
152            return None;
153        }
154
155        let freeze_perm = match ref_mutability {
156            // Shared references are frozen.
157            Some(Mutability::Not) => Permission::new_frozen(),
158            // Mutable references and Boxes are reserved.
159            _ => Permission::new_reserved_frz(),
160        };
161        let nonfreeze_perm = match ref_mutability {
162            // Shared references are "transparent".
163            Some(Mutability::Not) => Permission::new_cell(),
164            // *Protected* mutable references and boxes are reserved without regarding for interior mutability.
165            _ if is_protected => Permission::new_reserved_frz(),
166            // Unprotected mutable references and boxes start in `ReservedIm`.
167            _ => Permission::new_reserved_im(),
168        };
169
170        // Everything except for `Cell` gets an initial access.
171        let initial_access = |perm: &Permission| !perm.is_cell();
172
173        Some(NewPermission {
174            freeze_perm,
175            freeze_access: initial_access(&freeze_perm),
176            nonfreeze_perm,
177            nonfreeze_access: initial_access(&nonfreeze_perm),
178            outside_perm: if ty_is_freeze { freeze_perm } else { nonfreeze_perm },
179            protector: is_protected.then_some(if ref_mutability.is_some() {
180                // Strong protector for references
181                ProtectorKind::StrongProtector
182            } else {
183                // Weak protector for boxes
184                ProtectorKind::WeakProtector
185            }),
186        })
187    }
188}
189
190/// Retagging/reborrowing.
191/// Policy on which permission to grant to each pointer should be left to
192/// the implementation of NewPermission.
193impl<'tcx> EvalContextPrivExt<'tcx> for crate::MiriInterpCx<'tcx> {}
194trait EvalContextPrivExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
195    /// Returns the provenance that should be used henceforth.
196    fn tb_reborrow(
197        &mut self,
198        place: &MPlaceTy<'tcx>, // parent tag extracted from here
199        ptr_size: Size,
200        new_perm: NewPermission,
201        new_tag: BorTag,
202    ) -> InterpResult<'tcx, Option<Provenance>> {
203        let this = self.eval_context_mut();
204        // Ensure we bail out if the pointer goes out-of-bounds (see miri#1050).
205        this.check_ptr_access(place.ptr(), ptr_size, CheckInAllocMsg::Dereferenceable)?;
206
207        // It is crucial that this gets called on all code paths, to ensure we track tag creation.
208        let log_creation = |this: &MiriInterpCx<'tcx>,
209                            loc: Option<(AllocId, Size, ProvenanceExtra)>| // alloc_id, base_offset, orig_tag
210         -> InterpResult<'tcx> {
211            let global = this.machine.borrow_tracker.as_ref().unwrap().borrow();
212            let ty = place.layout.ty;
213            if global.tracked_pointer_tags.contains(&new_tag) {
214                 let ty_is_freeze = ty.is_freeze(*this.tcx, this.typing_env());
215                 let kind_str =
216                     if ty_is_freeze {
217                         format!("initial state {} (pointee type {ty})", new_perm.freeze_perm)
218                     } else {
219                         format!("initial state {}/{} outside/inside UnsafeCell (pointee type {ty})", new_perm.freeze_perm, new_perm.nonfreeze_perm)
220                     };
221                this.emit_diagnostic(NonHaltingDiagnostic::CreatedPointerTag(
222                    new_tag.inner(),
223                    Some(kind_str),
224                    loc.map(|(alloc_id, base_offset, orig_tag)| (alloc_id, alloc_range(base_offset, ptr_size), orig_tag)),
225                ));
226            }
227            drop(global); // don't hold that reference any longer than we have to
228            interp_ok(())
229        };
230
231        trace!("Reborrow of size {:?}", ptr_size);
232        let (alloc_id, base_offset, parent_prov) = match this.ptr_try_get_alloc_id(place.ptr(), 0) {
233            Ok(data) => {
234                // Unlike SB, we *do* a proper retag for size 0 if can identify the allocation.
235                // After all, the pointer may be lazily initialized outside this initial range.
236                data
237            }
238            Err(_) => {
239                assert_eq!(ptr_size, Size::ZERO); // we did the deref check above, size has to be 0 here
240                // This pointer doesn't come with an AllocId, so there's no
241                // memory to do retagging in.
242                trace!(
243                    "reborrow of size 0: reference {:?} derived from {:?} (pointee {})",
244                    new_tag,
245                    place.ptr(),
246                    place.layout.ty,
247                );
248                log_creation(this, None)?;
249                // Keep original provenance.
250                return interp_ok(place.ptr().provenance);
251            }
252        };
253        log_creation(this, Some((alloc_id, base_offset, parent_prov)))?;
254
255        let orig_tag = match parent_prov {
256            ProvenanceExtra::Wildcard => return interp_ok(place.ptr().provenance), // TODO: handle wildcard pointers
257            ProvenanceExtra::Concrete(tag) => tag,
258        };
259
260        trace!(
261            "reborrow: reference {:?} derived from {:?} (pointee {}): {:?}, size {}",
262            new_tag,
263            orig_tag,
264            place.layout.ty,
265            interpret::Pointer::new(alloc_id, base_offset),
266            ptr_size.bytes()
267        );
268
269        if let Some(protect) = new_perm.protector {
270            // We register the protection in two different places.
271            // This makes creating a protector slower, but checking whether a tag
272            // is protected faster.
273            this.frame_mut()
274                .extra
275                .borrow_tracker
276                .as_mut()
277                .unwrap()
278                .protected_tags
279                .push((alloc_id, new_tag));
280            this.machine
281                .borrow_tracker
282                .as_mut()
283                .expect("We should have borrow tracking data")
284                .get_mut()
285                .protected_tags
286                .insert(new_tag, protect);
287        }
288
289        let alloc_kind = this.get_alloc_info(alloc_id).kind;
290        if !matches!(alloc_kind, AllocKind::LiveData) {
291            assert_eq!(ptr_size, Size::ZERO); // we did the deref check above, size has to be 0 here
292            // There's not actually any bytes here where accesses could even be tracked.
293            // Just produce the new provenance, nothing else to do.
294            return interp_ok(Some(Provenance::Concrete { alloc_id, tag: new_tag }));
295        }
296
297        let span = this.machine.current_span();
298
299        // When adding a new node, the SIFA of its parents needs to be updated, potentially across
300        // the entire memory range. For the parts that are being accessed below, the access itself
301        // trivially takes care of that. However, we have to do some more work to also deal with the
302        // parts that are not being accessed. Specifically what we do is that we call
303        // `update_last_accessed_after_retag` on the SIFA of the permission set for the part of
304        // memory outside `perm_map` -- so that part is definitely taken care of. The remaining
305        // concern is the part of memory that is in the range of `perms_map`, but not accessed
306        // below. There we have two cases:
307        // * If the type is `!Freeze`, then the non-accessed part uses `nonfreeze_perm`, so the
308        //   `nonfreeze_perm` initialized parts are also fine. We enforce the `freeze_perm` parts to
309        //   be accessed via the assert below, and thus everything is taken care of.
310        // * If the type is `Freeze`, then `freeze_perm` is used everywhere (both inside and outside
311        //   the initial range), and we update everything to have the `freeze_perm`'s SIFA, so there
312        //   are no issues. (And this assert below is not actually needed in this case).
313        assert!(new_perm.freeze_access);
314
315        let protected = new_perm.protector.is_some();
316        let precise_interior_mut = this
317            .machine
318            .borrow_tracker
319            .as_mut()
320            .unwrap()
321            .get_mut()
322            .borrow_tracker_method
323            .get_tree_borrows_params()
324            .precise_interior_mut;
325
326        // Compute initial "inside" permissions.
327        let loc_state = |frozen: bool| -> LocationState {
328            let (perm, access) = if frozen {
329                (new_perm.freeze_perm, new_perm.freeze_access)
330            } else {
331                (new_perm.nonfreeze_perm, new_perm.nonfreeze_access)
332            };
333            let sifa = perm.strongest_idempotent_foreign_access(protected);
334            if access {
335                LocationState::new_accessed(perm, sifa)
336            } else {
337                LocationState::new_non_accessed(perm, sifa)
338            }
339        };
340        let perms_map = if !precise_interior_mut {
341            // For `!Freeze` types, just pretend the entire thing is an `UnsafeCell`.
342            let ty_is_freeze = place.layout.ty.is_freeze(*this.tcx, this.typing_env());
343            let state = loc_state(ty_is_freeze);
344            DedupRangeMap::new(ptr_size, state)
345        } else {
346            // The initial state will be overwritten by the visitor below.
347            let mut perms_map: DedupRangeMap<LocationState> = DedupRangeMap::new(
348                ptr_size,
349                LocationState::new_accessed(
350                    Permission::new_disabled(),
351                    IdempotentForeignAccess::None,
352                ),
353            );
354            this.visit_freeze_sensitive(place, ptr_size, |range, frozen| {
355                let state = loc_state(frozen);
356                for (_loc_range, loc) in perms_map.iter_mut(range.start, range.size) {
357                    *loc = state;
358                }
359                interp_ok(())
360            })?;
361            perms_map
362        };
363
364        let alloc_extra = this.get_alloc_extra(alloc_id)?;
365        let mut tree_borrows = alloc_extra.borrow_tracker_tb().borrow_mut();
366
367        for (perm_range, perm) in perms_map.iter_all() {
368            if perm.is_accessed() {
369                // Some reborrows incur a read access to the parent.
370                // Adjust range to be relative to allocation start (rather than to `place`).
371                let range_in_alloc = AllocRange {
372                    start: Size::from_bytes(perm_range.start) + base_offset,
373                    size: Size::from_bytes(perm_range.end - perm_range.start),
374                };
375
376                tree_borrows.perform_access(
377                    orig_tag,
378                    Some((range_in_alloc, AccessKind::Read, diagnostics::AccessCause::Reborrow)),
379                    this.machine.borrow_tracker.as_ref().unwrap(),
380                    alloc_id,
381                    this.machine.current_span(),
382                )?;
383
384                // Also inform the data race model (but only if any bytes are actually affected).
385                if range_in_alloc.size.bytes() > 0 {
386                    if let Some(data_race) = alloc_extra.data_race.as_vclocks_ref() {
387                        data_race.read(
388                            alloc_id,
389                            range_in_alloc,
390                            NaReadType::Retag,
391                            Some(place.layout.ty),
392                            &this.machine,
393                        )?
394                    }
395                }
396            }
397        }
398
399        // Record the parent-child pair in the tree.
400        tree_borrows.new_child(
401            base_offset,
402            orig_tag,
403            new_tag,
404            perms_map,
405            new_perm.outside_perm,
406            protected,
407            span,
408        )?;
409        drop(tree_borrows);
410
411        interp_ok(Some(Provenance::Concrete { alloc_id, tag: new_tag }))
412    }
413
414    fn tb_retag_place(
415        &mut self,
416        place: &MPlaceTy<'tcx>,
417        new_perm: NewPermission,
418    ) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
419        let this = self.eval_context_mut();
420
421        // Determine the size of the reborrow.
422        // For most types this is the entire size of the place, however
423        // - when `extern type` is involved we use the size of the known prefix,
424        // - if the pointer is not reborrowed (raw pointer) then we override the size
425        //   to do a zero-length reborrow.
426        let reborrow_size =
427            this.size_and_align_of_val(place)?.map(|(size, _)| size).unwrap_or(place.layout.size);
428        trace!("Creating new permission: {:?} with size {:?}", new_perm, reborrow_size);
429
430        // This new tag is not guaranteed to actually be used.
431        //
432        // If you run out of tags, consider the following optimization: adjust `tb_reborrow`
433        // so that rather than taking as input a fresh tag and deciding whether it uses this
434        // one or the parent it instead just returns whether a new tag should be created.
435        // This will avoid creating tags than end up never being used.
436        let new_tag = this.machine.borrow_tracker.as_mut().unwrap().get_mut().new_ptr();
437
438        // Compute the actual reborrow.
439        let new_prov = this.tb_reborrow(place, reborrow_size, new_perm, new_tag)?;
440
441        // Adjust place.
442        // (If the closure gets called, that means the old provenance was `Some`, and hence the new
443        // one must also be `Some`.)
444        interp_ok(place.clone().map_provenance(|_| new_prov.unwrap()))
445    }
446
447    /// Retags an individual pointer, returning the retagged version.
448    fn tb_retag_reference(
449        &mut self,
450        val: &ImmTy<'tcx>,
451        new_perm: NewPermission,
452    ) -> InterpResult<'tcx, ImmTy<'tcx>> {
453        let this = self.eval_context_mut();
454        let place = this.ref_to_mplace(val)?;
455        let new_place = this.tb_retag_place(&place, new_perm)?;
456        interp_ok(ImmTy::from_immediate(new_place.to_ref(this), val.layout))
457    }
458}
459
460impl<'tcx> EvalContextExt<'tcx> for crate::MiriInterpCx<'tcx> {}
461pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
462    /// Retag a pointer. References are passed to `from_ref_ty` and
463    /// raw pointers are never reborrowed.
464    fn tb_retag_ptr_value(
465        &mut self,
466        kind: RetagKind,
467        val: &ImmTy<'tcx>,
468    ) -> InterpResult<'tcx, ImmTy<'tcx>> {
469        let this = self.eval_context_mut();
470        let new_perm = match val.layout.ty.kind() {
471            &ty::Ref(_, pointee, mutability) =>
472                NewPermission::new(pointee, Some(mutability), kind, this),
473            _ => None,
474        };
475        if let Some(new_perm) = new_perm {
476            this.tb_retag_reference(val, new_perm)
477        } else {
478            interp_ok(val.clone())
479        }
480    }
481
482    /// Retag all pointers that are stored in this place.
483    fn tb_retag_place_contents(
484        &mut self,
485        kind: RetagKind,
486        place: &PlaceTy<'tcx>,
487    ) -> InterpResult<'tcx> {
488        let this = self.eval_context_mut();
489        let options = this.machine.borrow_tracker.as_mut().unwrap().get_mut();
490        let retag_fields = options.retag_fields;
491        let mut visitor = RetagVisitor { ecx: this, kind, retag_fields };
492        return visitor.visit_value(place);
493
494        // The actual visitor.
495        struct RetagVisitor<'ecx, 'tcx> {
496            ecx: &'ecx mut MiriInterpCx<'tcx>,
497            kind: RetagKind,
498            retag_fields: RetagFields,
499        }
500        impl<'ecx, 'tcx> RetagVisitor<'ecx, 'tcx> {
501            #[inline(always)] // yes this helps in our benchmarks
502            fn retag_ptr_inplace(
503                &mut self,
504                place: &PlaceTy<'tcx>,
505                new_perm: Option<NewPermission>,
506            ) -> InterpResult<'tcx> {
507                if let Some(new_perm) = new_perm {
508                    let val = self.ecx.read_immediate(&self.ecx.place_to_op(place)?)?;
509                    let val = self.ecx.tb_retag_reference(&val, new_perm)?;
510                    self.ecx.write_immediate(*val, place)?;
511                }
512                interp_ok(())
513            }
514        }
515        impl<'ecx, 'tcx> ValueVisitor<'tcx, MiriMachine<'tcx>> for RetagVisitor<'ecx, 'tcx> {
516            type V = PlaceTy<'tcx>;
517
518            #[inline(always)]
519            fn ecx(&self) -> &MiriInterpCx<'tcx> {
520                self.ecx
521            }
522
523            /// Regardless of how `Unique` is handled, Boxes are always reborrowed.
524            /// When `Unique` is also reborrowed, then it behaves exactly like `Box`
525            /// except for the fact that `Box` has a non-zero-sized reborrow.
526            fn visit_box(&mut self, box_ty: Ty<'tcx>, place: &PlaceTy<'tcx>) -> InterpResult<'tcx> {
527                // Only boxes for the global allocator get any special treatment.
528                if box_ty.is_box_global(*self.ecx.tcx) {
529                    let pointee = place.layout.ty.builtin_deref(true).unwrap();
530                    let new_perm =
531                        NewPermission::new(pointee, /* not a ref */ None, self.kind, self.ecx);
532                    self.retag_ptr_inplace(place, new_perm)?;
533                }
534                interp_ok(())
535            }
536
537            fn visit_value(&mut self, place: &PlaceTy<'tcx>) -> InterpResult<'tcx> {
538                // If this place is smaller than a pointer, we know that it can't contain any
539                // pointers we need to retag, so we can stop recursion early.
540                // This optimization is crucial for ZSTs, because they can contain way more fields
541                // than we can ever visit.
542                if place.layout.is_sized() && place.layout.size < self.ecx.pointer_size() {
543                    return interp_ok(());
544                }
545
546                // Check the type of this value to see what to do with it (retag, or recurse).
547                match place.layout.ty.kind() {
548                    &ty::Ref(_, pointee, mutability) => {
549                        let new_perm =
550                            NewPermission::new(pointee, Some(mutability), self.kind, self.ecx);
551                        self.retag_ptr_inplace(place, new_perm)?;
552                    }
553                    ty::RawPtr(_, _) => {
554                        // We definitely do *not* want to recurse into raw pointers -- wide raw
555                        // pointers have fields, and for dyn Trait pointees those can have reference
556                        // type!
557                        // We also do not want to reborrow them.
558                    }
559                    ty::Adt(adt, _) if adt.is_box() => {
560                        // Recurse for boxes, they require some tricky handling and will end up in `visit_box` above.
561                        // (Yes this means we technically also recursively retag the allocator itself
562                        // even if field retagging is not enabled. *shrug*)
563                        self.walk_value(place)?;
564                    }
565                    _ => {
566                        // Not a reference/pointer/box. Only recurse if configured appropriately.
567                        let recurse = match self.retag_fields {
568                            RetagFields::No => false,
569                            RetagFields::Yes => true,
570                            RetagFields::OnlyScalar => {
571                                // Matching `ArgAbi::new` at the time of writing, only fields of
572                                // `Scalar` and `ScalarPair` ABI are considered.
573                                matches!(
574                                    place.layout.backend_repr,
575                                    BackendRepr::Scalar(..) | BackendRepr::ScalarPair(..)
576                                )
577                            }
578                        };
579                        if recurse {
580                            self.walk_value(place)?;
581                        }
582                    }
583                }
584                interp_ok(())
585            }
586        }
587    }
588
589    /// Protect a place so that it cannot be used any more for the duration of the current function
590    /// call.
591    ///
592    /// This is used to ensure soundness of in-place function argument/return passing.
593    fn tb_protect_place(&mut self, place: &MPlaceTy<'tcx>) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
594        let this = self.eval_context_mut();
595
596        // Retag it. With protection! That is the entire point.
597        let new_perm = NewPermission {
598            // Note: If we are creating a protected Reserved, which can
599            // never be ReservedIM, the value of the `ty_is_freeze`
600            // argument doesn't matter
601            // (`ty_is_freeze || true` in `new_reserved` will always be `true`).
602            freeze_perm: Permission::new_reserved_frz(),
603            freeze_access: true,
604            nonfreeze_perm: Permission::new_reserved_frz(),
605            nonfreeze_access: true,
606            outside_perm: Permission::new_reserved_frz(),
607            protector: Some(ProtectorKind::StrongProtector),
608        };
609        this.tb_retag_place(place, new_perm)
610    }
611
612    /// Mark the given tag as exposed. It was found on a pointer with the given AllocId.
613    fn tb_expose_tag(&self, alloc_id: AllocId, tag: BorTag) -> InterpResult<'tcx> {
614        let this = self.eval_context_ref();
615
616        // Function pointers and dead objects don't have an alloc_extra so we ignore them.
617        // This is okay because accessing them is UB anyway, no need for any Tree Borrows checks.
618        // NOT using `get_alloc_extra_mut` since this might be a read-only allocation!
619        let kind = this.get_alloc_info(alloc_id).kind;
620        match kind {
621            AllocKind::LiveData => {
622                // This should have alloc_extra data, but `get_alloc_extra` can still fail
623                // if converting this alloc_id from a global to a local one
624                // uncovers a non-supported `extern static`.
625                let alloc_extra = this.get_alloc_extra(alloc_id)?;
626                trace!("Tree Borrows tag {tag:?} exposed in {alloc_id:?}");
627                alloc_extra.borrow_tracker_tb().borrow_mut().expose_tag(tag);
628            }
629            AllocKind::Function | AllocKind::VTable | AllocKind::TypeId | AllocKind::Dead => {
630                // No tree borrows on these allocations.
631            }
632        }
633        interp_ok(())
634    }
635
636    /// Display the tree.
637    fn print_tree(&mut self, alloc_id: AllocId, show_unnamed: bool) -> InterpResult<'tcx> {
638        let this = self.eval_context_mut();
639        let alloc_extra = this.get_alloc_extra(alloc_id)?;
640        let tree_borrows = alloc_extra.borrow_tracker_tb().borrow();
641        let borrow_tracker = &this.machine.borrow_tracker.as_ref().unwrap().borrow();
642        tree_borrows.print_tree(&borrow_tracker.protected_tags, show_unnamed)
643    }
644
645    /// Give a name to the pointer, usually the name it has in the source code (for debugging).
646    /// The name given is `name` and the pointer that receives it is the `nth_parent`
647    /// of `ptr` (with 0 representing `ptr` itself)
648    fn tb_give_pointer_debug_name(
649        &mut self,
650        ptr: Pointer,
651        nth_parent: u8,
652        name: &str,
653    ) -> InterpResult<'tcx> {
654        let this = self.eval_context_mut();
655        let (tag, alloc_id) = match ptr.provenance {
656            Some(Provenance::Concrete { tag, alloc_id }) => (tag, alloc_id),
657            _ => {
658                eprintln!("Can't give the name {name} to Wildcard pointer");
659                return interp_ok(());
660            }
661        };
662        let alloc_extra = this.get_alloc_extra(alloc_id)?;
663        let mut tree_borrows = alloc_extra.borrow_tracker_tb().borrow_mut();
664        tree_borrows.give_pointer_debug_name(tag, nth_parent, name)
665    }
666}