rustc_const_eval/interpret/stack.rs
1//! Manages the low-level pushing and popping of stack frames and the (de)allocation of local variables.
2//! For handling of argument passing and return values, see the `call` module.
3use std::cell::Cell;
4use std::{fmt, mem};
5
6use either::{Either, Left, Right};
7use rustc_hir as hir;
8use rustc_hir::definitions::DefPathData;
9use rustc_index::IndexVec;
10use rustc_middle::ty::layout::TyAndLayout;
11use rustc_middle::ty::{self, Ty, TyCtxt};
12use rustc_middle::{bug, mir};
13use rustc_mir_dataflow::impls::always_storage_live_locals;
14use rustc_span::Span;
15use tracing::field::Empty;
16use tracing::{info_span, instrument, trace};
17
18use super::{
19 AllocId, CtfeProvenance, Immediate, InterpCx, InterpResult, Machine, MemPlace, MemPlaceMeta,
20 MemoryKind, Operand, PlaceTy, Pointer, Provenance, ReturnAction, Scalar, from_known_layout,
21 interp_ok, throw_ub, throw_unsup,
22};
23use crate::errors;
24
25// The Phantomdata exists to prevent this type from being `Send`. If it were sent across a thread
26// boundary and dropped in the other thread, it would exit the span in the other thread.
27struct SpanGuard(tracing::Span, std::marker::PhantomData<*const u8>);
28
29impl SpanGuard {
30 /// By default a `SpanGuard` does nothing.
31 fn new() -> Self {
32 Self(tracing::Span::none(), std::marker::PhantomData)
33 }
34
35 /// If a span is entered, we exit the previous span (if any, normally none) and enter the
36 /// new span. This is mainly so we don't have to use `Option` for the `tracing_span` field of
37 /// `Frame` by creating a dummy span to being with and then entering it once the frame has
38 /// been pushed.
39 fn enter(&mut self, span: tracing::Span) {
40 // This executes the destructor on the previous instance of `SpanGuard`, ensuring that
41 // we never enter or exit more spans than vice versa. Unless you `mem::leak`, then we
42 // can't protect the tracing stack, but that'll just lead to weird logging, no actual
43 // problems.
44 *self = Self(span, std::marker::PhantomData);
45 self.0.with_subscriber(|(id, dispatch)| {
46 dispatch.enter(id);
47 });
48 }
49}
50
51impl Drop for SpanGuard {
52 fn drop(&mut self) {
53 self.0.with_subscriber(|(id, dispatch)| {
54 dispatch.exit(id);
55 });
56 }
57}
58
59/// A stack frame.
60pub struct Frame<'tcx, Prov: Provenance = CtfeProvenance, Extra = ()> {
61 ////////////////////////////////////////////////////////////////////////////////
62 // Function and callsite information
63 ////////////////////////////////////////////////////////////////////////////////
64 /// The MIR for the function called on this frame.
65 pub(super) body: &'tcx mir::Body<'tcx>,
66
67 /// The def_id and args of the current function.
68 pub(super) instance: ty::Instance<'tcx>,
69
70 /// Extra data for the machine.
71 pub extra: Extra,
72
73 ////////////////////////////////////////////////////////////////////////////////
74 // Return place and locals
75 ////////////////////////////////////////////////////////////////////////////////
76 /// Where to continue when returning from this function.
77 return_cont: ReturnContinuation,
78
79 /// The location where the result of the current stack frame should be written to,
80 /// and its layout in the caller. This place is to be interpreted relative to the
81 /// *caller's* stack frame. We use a `PlaceTy` instead of an `MPlaceTy` since this
82 /// avoids having to move *all* return places into Miri's memory.
83 pub return_place: PlaceTy<'tcx, Prov>,
84
85 /// The list of locals for this stack frame, stored in order as
86 /// `[return_ptr, arguments..., variables..., temporaries...]`.
87 /// The locals are stored as `Option<Value>`s.
88 /// `None` represents a local that is currently dead, while a live local
89 /// can either directly contain `Scalar` or refer to some part of an `Allocation`.
90 ///
91 /// Do *not* access this directly; always go through the machine hook!
92 pub locals: IndexVec<mir::Local, LocalState<'tcx, Prov>>,
93
94 /// The span of the `tracing` crate is stored here.
95 /// When the guard is dropped, the span is exited. This gives us
96 /// a full stack trace on all tracing statements.
97 tracing_span: SpanGuard,
98
99 ////////////////////////////////////////////////////////////////////////////////
100 // Current position within the function
101 ////////////////////////////////////////////////////////////////////////////////
102 /// If this is `Right`, we are not currently executing any particular statement in
103 /// this frame (can happen e.g. during frame initialization, and during unwinding on
104 /// frames without cleanup code).
105 ///
106 /// Needs to be public because ConstProp does unspeakable things to it.
107 pub(super) loc: Either<mir::Location, Span>,
108}
109
110/// Where and how to continue when returning/unwinding from the current function.
111#[derive(Clone, Copy, Eq, PartialEq, Debug)] // Miri debug-prints these
112pub enum ReturnContinuation {
113 /// Jump to the next block in the caller, or cause UB if None (that's a function
114 /// that may never return).
115 /// `ret` stores the block we jump to on a normal return, while `unwind`
116 /// stores the block used for cleanup during unwinding.
117 Goto { ret: Option<mir::BasicBlock>, unwind: mir::UnwindAction },
118 /// The root frame of the stack: nowhere else to jump to, so we stop.
119 /// `cleanup` says whether locals are deallocated. Static computation
120 /// wants them leaked to intern what they need (and just throw away
121 /// the entire `ecx` when it is done).
122 Stop { cleanup: bool },
123}
124
125/// Return type of [`InterpCx::pop_stack_frame_raw`].
126pub struct StackPopInfo<'tcx, Prov: Provenance> {
127 /// Additional information about the action to be performed when returning from the popped
128 /// stack frame.
129 pub return_action: ReturnAction,
130
131 /// [`return_cont`](Frame::return_cont) of the popped stack frame.
132 pub return_cont: ReturnContinuation,
133
134 /// [`return_place`](Frame::return_place) of the popped stack frame.
135 pub return_place: PlaceTy<'tcx, Prov>,
136}
137
138/// State of a local variable including a memoized layout
139#[derive(Clone)]
140pub struct LocalState<'tcx, Prov: Provenance = CtfeProvenance> {
141 value: LocalValue<Prov>,
142 /// Don't modify if `Some`, this is only used to prevent computing the layout twice.
143 /// Avoids computing the layout of locals that are never actually initialized.
144 layout: Cell<Option<TyAndLayout<'tcx>>>,
145}
146
147impl<Prov: Provenance> std::fmt::Debug for LocalState<'_, Prov> {
148 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
149 f.debug_struct("LocalState")
150 .field("value", &self.value)
151 .field("ty", &self.layout.get().map(|l| l.ty))
152 .finish()
153 }
154}
155
156/// Current value of a local variable
157///
158/// This does not store the type of the local; the type is given by `body.local_decls` and can never
159/// change, so by not storing here we avoid having to maintain that as an invariant.
160#[derive(Copy, Clone, Debug)] // Miri debug-prints these
161pub(super) enum LocalValue<Prov: Provenance = CtfeProvenance> {
162 /// This local is not currently alive, and cannot be used at all.
163 Dead,
164 /// A normal, live local.
165 /// Mostly for convenience, we re-use the `Operand` type here.
166 /// This is an optimization over just always having a pointer here;
167 /// we can thus avoid doing an allocation when the local just stores
168 /// immediate values *and* never has its address taken.
169 Live(Operand<Prov>),
170}
171
172impl<'tcx, Prov: Provenance> LocalState<'tcx, Prov> {
173 pub fn make_live_uninit(&mut self) {
174 self.value = LocalValue::Live(Operand::Immediate(Immediate::Uninit));
175 }
176
177 /// This is a hack because Miri needs a way to visit all the provenance in a `LocalState`
178 /// without having a layout or `TyCtxt` available, and we want to keep the `Operand` type
179 /// private.
180 pub fn as_mplace_or_imm(
181 &self,
182 ) -> Option<Either<(Pointer<Option<Prov>>, MemPlaceMeta<Prov>), Immediate<Prov>>> {
183 match self.value {
184 LocalValue::Dead => None,
185 LocalValue::Live(Operand::Indirect(mplace)) => Some(Left((mplace.ptr, mplace.meta))),
186 LocalValue::Live(Operand::Immediate(imm)) => Some(Right(imm)),
187 }
188 }
189
190 /// Read the local's value or error if the local is not yet live or not live anymore.
191 #[inline(always)]
192 pub(super) fn access(&self) -> InterpResult<'tcx, &Operand<Prov>> {
193 match &self.value {
194 LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"?
195 LocalValue::Live(val) => interp_ok(val),
196 }
197 }
198
199 /// Overwrite the local. If the local can be overwritten in place, return a reference
200 /// to do so; otherwise return the `MemPlace` to consult instead.
201 #[inline(always)]
202 pub(super) fn access_mut(&mut self) -> InterpResult<'tcx, &mut Operand<Prov>> {
203 match &mut self.value {
204 LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"?
205 LocalValue::Live(val) => interp_ok(val),
206 }
207 }
208}
209
210/// What we store about a frame in an interpreter backtrace.
211#[derive(Clone, Debug)]
212pub struct FrameInfo<'tcx> {
213 pub instance: ty::Instance<'tcx>,
214 pub span: Span,
215}
216
217// FIXME: only used by miri, should be removed once translatable.
218impl<'tcx> fmt::Display for FrameInfo<'tcx> {
219 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
220 ty::tls::with(|tcx| {
221 if tcx.def_key(self.instance.def_id()).disambiguated_data.data == DefPathData::Closure {
222 write!(f, "inside closure")
223 } else {
224 // Note: this triggers a `must_produce_diag` state, which means that if we ever
225 // get here we must emit a diagnostic. We should never display a `FrameInfo` unless
226 // we actually want to emit a warning or error to the user.
227 write!(f, "inside `{}`", self.instance)
228 }
229 })
230 }
231}
232
233impl<'tcx> FrameInfo<'tcx> {
234 pub fn as_note(&self, tcx: TyCtxt<'tcx>) -> errors::FrameNote {
235 let span = self.span;
236 if tcx.def_key(self.instance.def_id()).disambiguated_data.data == DefPathData::Closure {
237 errors::FrameNote {
238 where_: "closure",
239 span,
240 instance: String::new(),
241 times: 0,
242 has_label: false,
243 }
244 } else {
245 let instance = format!("{}", self.instance);
246 // Note: this triggers a `must_produce_diag` state, which means that if we ever get
247 // here we must emit a diagnostic. We should never display a `FrameInfo` unless we
248 // actually want to emit a warning or error to the user.
249 errors::FrameNote { where_: "instance", span, instance, times: 0, has_label: false }
250 }
251 }
252}
253
254impl<'tcx, Prov: Provenance> Frame<'tcx, Prov> {
255 pub fn with_extra<Extra>(self, extra: Extra) -> Frame<'tcx, Prov, Extra> {
256 Frame {
257 body: self.body,
258 instance: self.instance,
259 return_cont: self.return_cont,
260 return_place: self.return_place,
261 locals: self.locals,
262 loc: self.loc,
263 extra,
264 tracing_span: self.tracing_span,
265 }
266 }
267}
268
269impl<'tcx, Prov: Provenance, Extra> Frame<'tcx, Prov, Extra> {
270 /// Get the current location within the Frame.
271 ///
272 /// If this is `Right`, we are not currently executing any particular statement in
273 /// this frame (can happen e.g. during frame initialization, and during unwinding on
274 /// frames without cleanup code).
275 ///
276 /// Used by [priroda](https://github.com/oli-obk/priroda).
277 pub fn current_loc(&self) -> Either<mir::Location, Span> {
278 self.loc
279 }
280
281 pub fn body(&self) -> &'tcx mir::Body<'tcx> {
282 self.body
283 }
284
285 pub fn instance(&self) -> ty::Instance<'tcx> {
286 self.instance
287 }
288
289 /// Return the `SourceInfo` of the current instruction.
290 pub fn current_source_info(&self) -> Option<&mir::SourceInfo> {
291 self.loc.left().map(|loc| self.body.source_info(loc))
292 }
293
294 pub fn current_span(&self) -> Span {
295 match self.loc {
296 Left(loc) => self.body.source_info(loc).span,
297 Right(span) => span,
298 }
299 }
300
301 pub fn lint_root(&self, tcx: TyCtxt<'tcx>) -> Option<hir::HirId> {
302 // We first try to get a HirId via the current source scope,
303 // and fall back to `body.source`.
304 self.current_source_info()
305 .and_then(|source_info| match &self.body.source_scopes[source_info.scope].local_data {
306 mir::ClearCrossCrate::Set(data) => Some(data.lint_root),
307 mir::ClearCrossCrate::Clear => None,
308 })
309 .or_else(|| {
310 let def_id = self.body.source.def_id().as_local();
311 def_id.map(|def_id| tcx.local_def_id_to_hir_id(def_id))
312 })
313 }
314
315 /// Returns the address of the buffer where the locals are stored. This is used by `Place` as a
316 /// sanity check to detect bugs where we mix up which stack frame a place refers to.
317 #[inline(always)]
318 pub(super) fn locals_addr(&self) -> usize {
319 self.locals.raw.as_ptr().addr()
320 }
321
322 #[must_use]
323 pub fn generate_stacktrace_from_stack(stack: &[Self]) -> Vec<FrameInfo<'tcx>> {
324 let mut frames = Vec::new();
325 // This deliberately does *not* honor `requires_caller_location` since it is used for much
326 // more than just panics.
327 for frame in stack.iter().rev() {
328 let span = match frame.loc {
329 Left(loc) => {
330 // If the stacktrace passes through MIR-inlined source scopes, add them.
331 let mir::SourceInfo { mut span, scope } = *frame.body.source_info(loc);
332 let mut scope_data = &frame.body.source_scopes[scope];
333 while let Some((instance, call_span)) = scope_data.inlined {
334 frames.push(FrameInfo { span, instance });
335 span = call_span;
336 scope_data = &frame.body.source_scopes[scope_data.parent_scope.unwrap()];
337 }
338 span
339 }
340 Right(span) => span,
341 };
342 frames.push(FrameInfo { span, instance: frame.instance });
343 }
344 trace!("generate stacktrace: {:#?}", frames);
345 frames
346 }
347}
348
349impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
350 /// Very low-level helper that pushes a stack frame without initializing
351 /// the arguments or local variables.
352 ///
353 /// The high-level version of this is `init_stack_frame`.
354 #[instrument(skip(self, body, return_place, return_cont), level = "debug")]
355 pub(crate) fn push_stack_frame_raw(
356 &mut self,
357 instance: ty::Instance<'tcx>,
358 body: &'tcx mir::Body<'tcx>,
359 return_place: &PlaceTy<'tcx, M::Provenance>,
360 return_cont: ReturnContinuation,
361 ) -> InterpResult<'tcx> {
362 trace!("body: {:#?}", body);
363
364 // We can push a `Root` frame if and only if the stack is empty.
365 debug_assert_eq!(
366 self.stack().is_empty(),
367 matches!(return_cont, ReturnContinuation::Stop { .. })
368 );
369
370 // First push a stack frame so we have access to `instantiate_from_current_frame` and other
371 // `self.frame()`-based functions.
372 let dead_local = LocalState { value: LocalValue::Dead, layout: Cell::new(None) };
373 let locals = IndexVec::from_elem(dead_local, &body.local_decls);
374 let pre_frame = Frame {
375 body,
376 loc: Right(body.span), // Span used for errors caused during preamble.
377 return_cont,
378 return_place: return_place.clone(),
379 locals,
380 instance,
381 tracing_span: SpanGuard::new(),
382 extra: (),
383 };
384 let frame = M::init_frame(self, pre_frame)?;
385 self.stack_mut().push(frame);
386
387 // Make sure all the constants required by this frame evaluate successfully (post-monomorphization check).
388 for &const_ in body.required_consts() {
389 let c =
390 self.instantiate_from_current_frame_and_normalize_erasing_regions(const_.const_)?;
391 c.eval(*self.tcx, self.typing_env, const_.span).map_err(|err| {
392 err.emit_note(*self.tcx);
393 err
394 })?;
395 }
396
397 // Finish things up.
398 M::after_stack_push(self)?;
399 self.frame_mut().loc = Left(mir::Location::START);
400 // `tracing_separate_thread` is used to instruct the tracing_chrome [tracing::Layer] in Miri
401 // to put the "frame" span on a separate trace thread/line than other spans, to make the
402 // visualization in <https://ui.perfetto.dev> easier to interpret. It is set to a value of
403 // [tracing::field::Empty] so that other tracing layers (e.g. the logger) will ignore it.
404 let span = info_span!("frame", tracing_separate_thread = Empty, frame = %instance);
405 self.frame_mut().tracing_span.enter(span);
406
407 interp_ok(())
408 }
409
410 /// Low-level helper that pops a stack frame from the stack and returns some information about
411 /// it.
412 ///
413 /// This also deallocates locals, if necessary.
414 /// `copy_ret_val` gets called after the frame has been taken from the stack but before the locals have been deallocated.
415 ///
416 /// [`M::before_stack_pop`] and [`M::after_stack_pop`] are called by this function
417 /// automatically.
418 ///
419 /// The high-level version of this is `return_from_current_stack_frame`.
420 ///
421 /// [`M::before_stack_pop`]: Machine::before_stack_pop
422 /// [`M::after_stack_pop`]: Machine::after_stack_pop
423 pub(super) fn pop_stack_frame_raw(
424 &mut self,
425 unwinding: bool,
426 copy_ret_val: impl FnOnce(&mut Self, &PlaceTy<'tcx, M::Provenance>) -> InterpResult<'tcx>,
427 ) -> InterpResult<'tcx, StackPopInfo<'tcx, M::Provenance>> {
428 M::before_stack_pop(self)?;
429 let frame =
430 self.stack_mut().pop().expect("tried to pop a stack frame, but there were none");
431
432 // Copy return value (unless we are unwinding).
433 if !unwinding {
434 copy_ret_val(self, &frame.return_place)?;
435 }
436
437 let return_cont = frame.return_cont;
438 let return_place = frame.return_place.clone();
439
440 // Cleanup: deallocate locals.
441 // Usually we want to clean up (deallocate locals), but in a few rare cases we don't.
442 // We do this while the frame is still on the stack, so errors point to the callee.
443 let cleanup = match return_cont {
444 ReturnContinuation::Goto { .. } => true,
445 ReturnContinuation::Stop { cleanup, .. } => cleanup,
446 };
447
448 let return_action = if cleanup {
449 // We need to take the locals out, since we need to mutate while iterating.
450 for local in &frame.locals {
451 self.deallocate_local(local.value)?;
452 }
453
454 // Call the machine hook, which determines the next steps.
455 let return_action = M::after_stack_pop(self, frame, unwinding)?;
456 assert_ne!(return_action, ReturnAction::NoCleanup);
457 return_action
458 } else {
459 // We also skip the machine hook when there's no cleanup. This not a real "pop" anyway.
460 ReturnAction::NoCleanup
461 };
462
463 interp_ok(StackPopInfo { return_action, return_cont, return_place })
464 }
465
466 /// In the current stack frame, mark all locals as live that are not arguments and don't have
467 /// `Storage*` annotations (this includes the return place).
468 pub(crate) fn storage_live_for_always_live_locals(&mut self) -> InterpResult<'tcx> {
469 self.storage_live(mir::RETURN_PLACE)?;
470
471 let body = self.body();
472 let always_live = always_storage_live_locals(body);
473 for local in body.vars_and_temps_iter() {
474 if always_live.contains(local) {
475 self.storage_live(local)?;
476 }
477 }
478 interp_ok(())
479 }
480
481 pub fn storage_live_dyn(
482 &mut self,
483 local: mir::Local,
484 meta: MemPlaceMeta<M::Provenance>,
485 ) -> InterpResult<'tcx> {
486 trace!("{:?} is now live", local);
487
488 // We avoid `ty.is_trivially_sized` since that does something expensive for ADTs.
489 fn is_very_trivially_sized(ty: Ty<'_>) -> bool {
490 match ty.kind() {
491 ty::Infer(ty::IntVar(_) | ty::FloatVar(_))
492 | ty::Uint(_)
493 | ty::Int(_)
494 | ty::Bool
495 | ty::Float(_)
496 | ty::FnDef(..)
497 | ty::FnPtr(..)
498 | ty::RawPtr(..)
499 | ty::Char
500 | ty::Ref(..)
501 | ty::Coroutine(..)
502 | ty::CoroutineWitness(..)
503 | ty::Array(..)
504 | ty::Closure(..)
505 | ty::CoroutineClosure(..)
506 | ty::Never
507 | ty::Error(_) => true,
508
509 ty::Str | ty::Slice(_) | ty::Dynamic(_, _, ty::Dyn) | ty::Foreign(..) => false,
510
511 ty::Tuple(tys) => tys.last().is_none_or(|ty| is_very_trivially_sized(*ty)),
512
513 ty::Pat(ty, ..) => is_very_trivially_sized(*ty),
514
515 // We don't want to do any queries, so there is not much we can do with ADTs.
516 ty::Adt(..) => false,
517
518 ty::UnsafeBinder(ty) => is_very_trivially_sized(ty.skip_binder()),
519
520 ty::Alias(..) | ty::Param(_) | ty::Placeholder(..) => false,
521
522 ty::Infer(ty::TyVar(_)) => false,
523
524 ty::Bound(..)
525 | ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => {
526 bug!("`is_very_trivially_sized` applied to unexpected type: {}", ty)
527 }
528 }
529 }
530
531 // This is a hot function, we avoid computing the layout when possible.
532 // `unsized_` will be `None` for sized types and `Some(layout)` for unsized types.
533 let unsized_ = if is_very_trivially_sized(self.body().local_decls[local].ty) {
534 None
535 } else {
536 // We need the layout.
537 let layout = self.layout_of_local(self.frame(), local, None)?;
538 if layout.is_sized() { None } else { Some(layout) }
539 };
540
541 let local_val = LocalValue::Live(if let Some(layout) = unsized_ {
542 if !meta.has_meta() {
543 throw_unsup!(UnsizedLocal);
544 }
545 // Need to allocate some memory, since `Immediate::Uninit` cannot be unsized.
546 let dest_place = self.allocate_dyn(layout, MemoryKind::Stack, meta)?;
547 Operand::Indirect(*dest_place.mplace())
548 } else {
549 // Just make this an efficient immediate.
550 assert!(!meta.has_meta()); // we're dropping the metadata
551 // Make sure the machine knows this "write" is happening. (This is important so that
552 // races involving local variable allocation can be detected by Miri.)
553 M::after_local_write(self, local, /*storage_live*/ true)?;
554 // Note that not calling `layout_of` here does have one real consequence:
555 // if the type is too big, we'll only notice this when the local is actually initialized,
556 // which is a bit too late -- we should ideally notice this already here, when the memory
557 // is conceptually allocated. But given how rare that error is and that this is a hot function,
558 // we accept this downside for now.
559 Operand::Immediate(Immediate::Uninit)
560 });
561
562 // If the local is already live, deallocate its old memory.
563 let old = mem::replace(&mut self.frame_mut().locals[local].value, local_val);
564 self.deallocate_local(old)?;
565 interp_ok(())
566 }
567
568 /// Mark a storage as live, killing the previous content.
569 #[inline(always)]
570 pub fn storage_live(&mut self, local: mir::Local) -> InterpResult<'tcx> {
571 self.storage_live_dyn(local, MemPlaceMeta::None)
572 }
573
574 pub fn storage_dead(&mut self, local: mir::Local) -> InterpResult<'tcx> {
575 assert!(local != mir::RETURN_PLACE, "Cannot make return place dead");
576 trace!("{:?} is now dead", local);
577
578 // If the local is already dead, this is a NOP.
579 let old = mem::replace(&mut self.frame_mut().locals[local].value, LocalValue::Dead);
580 self.deallocate_local(old)?;
581 interp_ok(())
582 }
583
584 fn deallocate_local(&mut self, local: LocalValue<M::Provenance>) -> InterpResult<'tcx> {
585 if let LocalValue::Live(Operand::Indirect(MemPlace { ptr, .. })) = local {
586 // All locals have a backing allocation, even if the allocation is empty
587 // due to the local having ZST type. Hence we can `unwrap`.
588 trace!(
589 "deallocating local {:?}: {:?}",
590 local,
591 // Locals always have a `alloc_id` (they are never the result of a int2ptr).
592 self.dump_alloc(ptr.provenance.unwrap().get_alloc_id().unwrap())
593 );
594 self.deallocate_ptr(ptr, None, MemoryKind::Stack)?;
595 };
596 interp_ok(())
597 }
598
599 /// This is public because it is used by [Aquascope](https://github.com/cognitive-engineering-lab/aquascope/)
600 /// to analyze all the locals in a stack frame.
601 #[inline(always)]
602 pub fn layout_of_local(
603 &self,
604 frame: &Frame<'tcx, M::Provenance, M::FrameExtra>,
605 local: mir::Local,
606 layout: Option<TyAndLayout<'tcx>>,
607 ) -> InterpResult<'tcx, TyAndLayout<'tcx>> {
608 let state = &frame.locals[local];
609 if let Some(layout) = state.layout.get() {
610 return interp_ok(layout);
611 }
612
613 let layout = from_known_layout(self.tcx, self.typing_env, layout, || {
614 let local_ty = frame.body.local_decls[local].ty;
615 let local_ty =
616 self.instantiate_from_frame_and_normalize_erasing_regions(frame, local_ty)?;
617 self.layout_of(local_ty).into()
618 })?;
619
620 // Layouts of locals are requested a lot, so we cache them.
621 state.layout.set(Some(layout));
622 interp_ok(layout)
623 }
624}
625
626impl<'tcx, Prov: Provenance> LocalState<'tcx, Prov> {
627 pub(super) fn print(
628 &self,
629 allocs: &mut Vec<Option<AllocId>>,
630 fmt: &mut std::fmt::Formatter<'_>,
631 ) -> std::fmt::Result {
632 match self.value {
633 LocalValue::Dead => write!(fmt, " is dead")?,
634 LocalValue::Live(Operand::Immediate(Immediate::Uninit)) => {
635 write!(fmt, " is uninitialized")?
636 }
637 LocalValue::Live(Operand::Indirect(mplace)) => {
638 write!(
639 fmt,
640 " by {} ref {:?}:",
641 match mplace.meta {
642 MemPlaceMeta::Meta(meta) => format!(" meta({meta:?})"),
643 MemPlaceMeta::None => String::new(),
644 },
645 mplace.ptr,
646 )?;
647 allocs.extend(mplace.ptr.provenance.map(Provenance::get_alloc_id));
648 }
649 LocalValue::Live(Operand::Immediate(Immediate::Scalar(val))) => {
650 write!(fmt, " {val:?}")?;
651 if let Scalar::Ptr(ptr, _size) = val {
652 allocs.push(ptr.provenance.get_alloc_id());
653 }
654 }
655 LocalValue::Live(Operand::Immediate(Immediate::ScalarPair(val1, val2))) => {
656 write!(fmt, " ({val1:?}, {val2:?})")?;
657 if let Scalar::Ptr(ptr, _size) = val1 {
658 allocs.push(ptr.provenance.get_alloc_id());
659 }
660 if let Scalar::Ptr(ptr, _size) = val2 {
661 allocs.push(ptr.provenance.get_alloc_id());
662 }
663 }
664 }
665
666 Ok(())
667 }
668}