rustc_const_eval/interpret/
step.rs1use std::iter;
6
7use either::Either;
8use rustc_abi::{FIRST_VARIANT, FieldIdx};
9use rustc_data_structures::fx::FxHashSet;
10use rustc_index::IndexSlice;
11use rustc_middle::ty::{self, Instance, Ty};
12use rustc_middle::{bug, mir, span_bug};
13use rustc_span::source_map::Spanned;
14use rustc_target::callconv::FnAbi;
15use tracing::field::Empty;
16use tracing::{info, instrument, trace};
17
18use super::{
19 FnArg, FnVal, ImmTy, Immediate, InterpCx, InterpResult, Machine, MemPlaceMeta, PlaceTy,
20 Projectable, Scalar, interp_ok, throw_ub, throw_unsup_format,
21};
22use crate::interpret::EnteredTraceSpan;
23use crate::{enter_trace_span, util};
24
25struct EvaluatedCalleeAndArgs<'tcx, M: Machine<'tcx>> {
26 callee: FnVal<'tcx, M::ExtraFnVal>,
27 args: Vec<FnArg<'tcx, M::Provenance>>,
28 fn_sig: ty::FnSig<'tcx>,
29 fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>,
30 with_caller_location: bool,
32}
33
34impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
35 #[inline(always)]
41 pub fn step(&mut self) -> InterpResult<'tcx, bool> {
42 if self.stack().is_empty() {
43 return interp_ok(false);
44 }
45
46 let Either::Left(loc) = self.frame().loc else {
47 trace!("unwinding: skipping frame");
50 self.return_from_current_stack_frame(true)?;
51 return interp_ok(true);
52 };
53 let basic_block = &self.body().basic_blocks[loc.block];
54
55 if let Some(stmt) = basic_block.statements.get(loc.statement_index) {
56 let old_frames = self.frame_idx();
57 self.eval_statement(stmt)?;
58 assert_eq!(old_frames, self.frame_idx());
60 self.frame_mut().loc.as_mut().left().unwrap().statement_index += 1;
62 return interp_ok(true);
63 }
64
65 M::before_terminator(self)?;
66
67 let terminator = basic_block.terminator();
68 self.eval_terminator(terminator)?;
69 if !self.stack().is_empty() {
70 if let Either::Left(loc) = self.frame().loc {
71 info!("// executing {:?}", loc.block);
72 }
73 }
74 interp_ok(true)
75 }
76
77 pub fn eval_statement(&mut self, stmt: &mir::Statement<'tcx>) -> InterpResult<'tcx> {
82 let _trace = enter_trace_span!(
83 M,
84 step::eval_statement,
85 stmt = ?stmt.kind,
86 span = ?stmt.source_info.span,
87 tracing_separate_thread = Empty,
88 )
89 .or_if_tracing_disabled(|| info!(stmt = ?stmt.kind));
90
91 use rustc_middle::mir::StatementKind::*;
92
93 match &stmt.kind {
94 Assign(box (place, rvalue)) => self.eval_rvalue_into_place(rvalue, *place)?,
95
96 SetDiscriminant { place, variant_index } => {
97 let dest = self.eval_place(**place)?;
98 self.write_discriminant(*variant_index, &dest)?;
99 }
100
101 Deinit(place) => {
102 let dest = self.eval_place(**place)?;
103 self.write_uninit(&dest)?;
104 }
105
106 StorageLive(local) => {
108 self.storage_live(*local)?;
109 }
110
111 StorageDead(local) => {
113 self.storage_dead(*local)?;
114 }
115
116 FakeRead(..) => {}
119
120 Retag(kind, place) => {
122 let dest = self.eval_place(**place)?;
123 M::retag_place_contents(self, *kind, &dest)?;
124 }
125
126 Intrinsic(box intrinsic) => self.eval_nondiverging_intrinsic(intrinsic)?,
127
128 PlaceMention(box place) => {
130 let _ = self.eval_place(*place)?;
131 }
132
133 AscribeUserType(..) => {}
136
137 Coverage(..) => {}
149
150 ConstEvalCounter => {
151 M::increment_const_eval_counter(self)?;
152 }
153
154 Nop => {}
157
158 BackwardIncompatibleDropHint { .. } => {}
160 }
161
162 interp_ok(())
163 }
164
165 pub fn eval_rvalue_into_place(
170 &mut self,
171 rvalue: &mir::Rvalue<'tcx>,
172 place: mir::Place<'tcx>,
173 ) -> InterpResult<'tcx> {
174 let dest = self.eval_place(place)?;
175 use rustc_middle::mir::Rvalue::*;
179 match *rvalue {
180 ThreadLocalRef(did) => {
181 let ptr = M::thread_local_static_pointer(self, did)?;
182 self.write_pointer(ptr, &dest)?;
183 }
184
185 Use(ref operand) => {
186 let op = self.eval_operand(operand, Some(dest.layout))?;
188 self.copy_op(&op, &dest)?;
189 }
190
191 CopyForDeref(place) => {
192 let op = self.eval_place_to_op(place, Some(dest.layout))?;
193 self.copy_op(&op, &dest)?;
194 }
195
196 BinaryOp(bin_op, box (ref left, ref right)) => {
197 let layout = util::binop_left_homogeneous(bin_op).then_some(dest.layout);
198 let left = self.read_immediate(&self.eval_operand(left, layout)?)?;
199 let layout = util::binop_right_homogeneous(bin_op).then_some(left.layout);
200 let right = self.read_immediate(&self.eval_operand(right, layout)?)?;
201 let result = self.binary_op(bin_op, &left, &right)?;
202 assert_eq!(result.layout, dest.layout, "layout mismatch for result of {bin_op:?}");
203 self.write_immediate(*result, &dest)?;
204 }
205
206 UnaryOp(un_op, ref operand) => {
207 let val = self.read_immediate(&self.eval_operand(operand, Some(dest.layout))?)?;
209 let result = self.unary_op(un_op, &val)?;
210 assert_eq!(result.layout, dest.layout, "layout mismatch for result of {un_op:?}");
211 self.write_immediate(*result, &dest)?;
212 }
213
214 NullaryOp(null_op, ty) => {
215 let ty = self.instantiate_from_current_frame_and_normalize_erasing_regions(ty)?;
216 let val = self.nullary_op(null_op, ty)?;
217 self.write_immediate(*val, &dest)?;
218 }
219
220 Aggregate(box ref kind, ref operands) => {
221 self.write_aggregate(kind, operands, &dest)?;
222 }
223
224 Repeat(ref operand, _) => {
225 self.write_repeat(operand, &dest)?;
226 }
227
228 Len(place) => {
229 let src = self.eval_place(place)?;
230 let len = src.len(self)?;
231 self.write_scalar(Scalar::from_target_usize(len, self), &dest)?;
232 }
233
234 Ref(_, borrow_kind, place) => {
235 let src = self.eval_place(place)?;
236 let place = self.force_allocation(&src)?;
237 let val = ImmTy::from_immediate(place.to_ref(self), dest.layout);
238 let val = M::retag_ptr_value(
240 self,
241 if borrow_kind.allows_two_phase_borrow() {
242 mir::RetagKind::TwoPhase
243 } else {
244 mir::RetagKind::Default
245 },
246 &val,
247 )?;
248 self.write_immediate(*val, &dest)?;
249 }
250
251 RawPtr(kind, place) => {
252 let place_base_raw = if place.is_indirect_first_projection() {
254 let ty = self.frame().body.local_decls[place.local].ty;
255 ty.is_raw_ptr()
256 } else {
257 false
259 };
260
261 let src = self.eval_place(place)?;
262 let place = self.force_allocation(&src)?;
263 let mut val = ImmTy::from_immediate(place.to_ref(self), dest.layout);
264 if !place_base_raw && !kind.is_fake() {
265 val = M::retag_ptr_value(self, mir::RetagKind::Raw, &val)?;
268 }
269 self.write_immediate(*val, &dest)?;
270 }
271
272 ShallowInitBox(ref operand, _) => {
273 let src = self.eval_operand(operand, None)?;
274 let v = self.read_immediate(&src)?;
275 self.write_immediate(*v, &dest)?;
276 }
277
278 Cast(cast_kind, ref operand, cast_ty) => {
279 let src = self.eval_operand(operand, None)?;
280 let cast_ty =
281 self.instantiate_from_current_frame_and_normalize_erasing_regions(cast_ty)?;
282 self.cast(&src, cast_kind, cast_ty, &dest)?;
283 }
284
285 Discriminant(place) => {
286 let op = self.eval_place_to_op(place, None)?;
287 let variant = self.read_discriminant(&op)?;
288 let discr = self.discriminant_for_variant(op.layout.ty, variant)?;
289 self.write_immediate(*discr, &dest)?;
290 }
291
292 WrapUnsafeBinder(ref op, _ty) => {
293 let op = self.eval_operand(op, None)?;
296 self.copy_op_allow_transmute(&op, &dest)?;
297 }
298 }
299
300 trace!("{:?}", self.dump_place(&dest));
301
302 interp_ok(())
303 }
304
305 #[instrument(skip(self), level = "trace")]
307 fn write_aggregate(
308 &mut self,
309 kind: &mir::AggregateKind<'tcx>,
310 operands: &IndexSlice<FieldIdx, mir::Operand<'tcx>>,
311 dest: &PlaceTy<'tcx, M::Provenance>,
312 ) -> InterpResult<'tcx> {
313 self.write_uninit(dest)?; let (variant_index, variant_dest, active_field_index) = match *kind {
315 mir::AggregateKind::Adt(_, variant_index, _, _, active_field_index) => {
316 let variant_dest = self.project_downcast(dest, variant_index)?;
317 (variant_index, variant_dest, active_field_index)
318 }
319 mir::AggregateKind::RawPtr(..) => {
320 let [data, meta] = &operands.raw else {
325 bug!("{kind:?} should have 2 operands, had {operands:?}");
326 };
327 let data = self.eval_operand(data, None)?;
328 let data = self.read_pointer(&data)?;
329 let meta = self.eval_operand(meta, None)?;
330 let meta = if meta.layout.is_zst() {
331 MemPlaceMeta::None
332 } else {
333 MemPlaceMeta::Meta(self.read_scalar(&meta)?)
334 };
335 let ptr_imm = Immediate::new_pointer_with_meta(data, meta, self);
336 let ptr = ImmTy::from_immediate(ptr_imm, dest.layout);
337 self.copy_op(&ptr, dest)?;
338 return interp_ok(());
339 }
340 _ => (FIRST_VARIANT, dest.clone(), None),
341 };
342 if active_field_index.is_some() {
343 assert_eq!(operands.len(), 1);
344 }
345 for (field_index, operand) in operands.iter_enumerated() {
346 let field_index = active_field_index.unwrap_or(field_index);
347 let field_dest = self.project_field(&variant_dest, field_index)?;
348 let op = self.eval_operand(operand, Some(field_dest.layout))?;
349 self.copy_op(&op, &field_dest)?;
350 }
351 self.write_discriminant(variant_index, dest)
352 }
353
354 fn write_repeat(
357 &mut self,
358 operand: &mir::Operand<'tcx>,
359 dest: &PlaceTy<'tcx, M::Provenance>,
360 ) -> InterpResult<'tcx> {
361 let src = self.eval_operand(operand, None)?;
362 assert!(src.layout.is_sized());
363 let dest = self.force_allocation(&dest)?;
364 let length = dest.len(self)?;
365
366 if length == 0 {
367 self.get_place_alloc_mut(&dest)?;
369 } else {
370 let first = self.project_index(&dest, 0)?;
372 self.copy_op(&src, &first)?;
373
374 let elem_size = first.layout.size;
378 let first_ptr = first.ptr();
379 let rest_ptr = first_ptr.wrapping_offset(elem_size, self);
380 self.mem_copy_repeatedly(
382 first_ptr,
383 rest_ptr,
384 elem_size,
385 length - 1,
386 true,
387 )?;
388 }
389
390 interp_ok(())
391 }
392
393 fn eval_fn_call_argument(
395 &mut self,
396 op: &mir::Operand<'tcx>,
397 move_definitely_disjoint: bool,
398 ) -> InterpResult<'tcx, FnArg<'tcx, M::Provenance>> {
399 interp_ok(match op {
400 mir::Operand::Copy(_) | mir::Operand::Constant(_) => {
401 let op = self.eval_operand(op, None)?;
403 FnArg::Copy(op)
404 }
405 mir::Operand::Move(place) => {
406 let place = self.eval_place(*place)?;
407 if move_definitely_disjoint {
408 let op = self.place_to_op(&place)?;
412 match op.as_mplace_or_imm() {
413 Either::Left(mplace) => FnArg::InPlace(mplace),
414 Either::Right(_imm) => FnArg::Copy(op),
415 }
416 } else {
417 FnArg::InPlace(self.force_allocation(&place)?)
419 }
420 }
421 })
422 }
423
424 fn eval_callee_and_args(
427 &mut self,
428 terminator: &mir::Terminator<'tcx>,
429 func: &mir::Operand<'tcx>,
430 args: &[Spanned<mir::Operand<'tcx>>],
431 dest: &mir::Place<'tcx>,
432 ) -> InterpResult<'tcx, EvaluatedCalleeAndArgs<'tcx, M>> {
433 let func = self.eval_operand(func, None)?;
434
435 let move_definitely_disjoint = 'move_definitely_disjoint: {
444 let mut previous_locals = FxHashSet::<mir::Local>::default();
445 for place in args
446 .iter()
447 .filter_map(|a| {
448 if let mir::Operand::Move(place) = &a.node { Some(place) } else { None }
450 })
451 .chain(iter::once(dest))
452 {
453 if place.is_indirect_first_projection() {
454 break 'move_definitely_disjoint false;
456 }
457 if !previous_locals.insert(place.local) {
458 break 'move_definitely_disjoint false;
460 }
461 }
462 true
464 };
465 let args = args
466 .iter()
467 .map(|arg| self.eval_fn_call_argument(&arg.node, move_definitely_disjoint))
468 .collect::<InterpResult<'tcx, Vec<_>>>()?;
469
470 let fn_sig_binder = {
471 let _trace = enter_trace_span!(M, "fn_sig", ty = ?func.layout.ty.kind());
472 func.layout.ty.fn_sig(*self.tcx)
473 };
474 let fn_sig = self.tcx.normalize_erasing_late_bound_regions(self.typing_env, fn_sig_binder);
475 let extra_args = &args[fn_sig.inputs().len()..];
476 let extra_args =
477 self.tcx.mk_type_list_from_iter(extra_args.iter().map(|arg| arg.layout().ty));
478
479 let (callee, fn_abi, with_caller_location) = match *func.layout.ty.kind() {
480 ty::FnPtr(..) => {
481 let fn_ptr = self.read_pointer(&func)?;
482 let fn_val = self.get_ptr_fn(fn_ptr)?;
483 (fn_val, self.fn_abi_of_fn_ptr(fn_sig_binder, extra_args)?, false)
484 }
485 ty::FnDef(def_id, args) => {
486 let instance = self.resolve(def_id, args)?;
487 (
488 FnVal::Instance(instance),
489 self.fn_abi_of_instance(instance, extra_args)?,
490 instance.def.requires_caller_location(*self.tcx),
491 )
492 }
493 _ => {
494 span_bug!(terminator.source_info.span, "invalid callee of type {}", func.layout.ty)
495 }
496 };
497
498 interp_ok(EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location })
499 }
500
501 fn eval_terminator(&mut self, terminator: &mir::Terminator<'tcx>) -> InterpResult<'tcx> {
502 let _trace = enter_trace_span!(
503 M,
504 step::eval_terminator,
505 terminator = ?terminator.kind,
506 span = ?terminator.source_info.span,
507 tracing_separate_thread = Empty,
508 )
509 .or_if_tracing_disabled(|| info!(terminator = ?terminator.kind));
510
511 use rustc_middle::mir::TerminatorKind::*;
512 match terminator.kind {
513 Return => {
514 self.return_from_current_stack_frame(false)?
515 }
516
517 Goto { target } => self.go_to_block(target),
518
519 SwitchInt { ref discr, ref targets } => {
520 let discr = self.read_immediate(&self.eval_operand(discr, None)?)?;
521 trace!("SwitchInt({:?})", *discr);
522
523 let mut target_block = targets.otherwise();
525
526 for (const_int, target) in targets.iter() {
527 let res = self.binary_op(
530 mir::BinOp::Eq,
531 &discr,
532 &ImmTy::from_uint(const_int, discr.layout),
533 )?;
534 if res.to_scalar().to_bool()? {
535 target_block = target;
536 break;
537 }
538 }
539
540 self.go_to_block(target_block);
541 }
542
543 Call {
544 ref func,
545 ref args,
546 destination,
547 target,
548 unwind,
549 call_source: _,
550 fn_span: _,
551 } => {
552 let old_stack = self.frame_idx();
553 let old_loc = self.frame().loc;
554
555 let EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location } =
556 self.eval_callee_and_args(terminator, func, args, &destination)?;
557
558 let destination = self.eval_place(destination)?;
559 self.init_fn_call(
560 callee,
561 (fn_sig.abi, fn_abi),
562 &args,
563 with_caller_location,
564 &destination,
565 target,
566 if fn_abi.can_unwind { unwind } else { mir::UnwindAction::Unreachable },
567 )?;
568 if self.frame_idx() == old_stack && self.frame().loc == old_loc {
571 span_bug!(terminator.source_info.span, "evaluating this call made no progress");
572 }
573 }
574
575 TailCall { ref func, ref args, fn_span: _ } => {
576 let old_frame_idx = self.frame_idx();
577
578 let EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location } =
579 self.eval_callee_and_args(terminator, func, args, &mir::Place::return_place())?;
580
581 self.init_fn_tail_call(callee, (fn_sig.abi, fn_abi), &args, with_caller_location)?;
582
583 if self.frame_idx() != old_frame_idx {
584 span_bug!(
585 terminator.source_info.span,
586 "evaluating this tail call pushed a new stack frame"
587 );
588 }
589 }
590
591 Drop { place, target, unwind, replace: _, drop, async_fut } => {
592 assert!(
593 async_fut.is_none() && drop.is_none(),
594 "Async Drop must be expanded or reset to sync in runtime MIR"
595 );
596 let place = self.eval_place(place)?;
597 let instance = {
598 let _trace =
599 enter_trace_span!(M, resolve::resolve_drop_in_place, ty = ?place.layout.ty);
600 Instance::resolve_drop_in_place(*self.tcx, place.layout.ty)
601 };
602 if let ty::InstanceKind::DropGlue(_, None) = instance.def {
603 self.go_to_block(target);
608 return interp_ok(());
609 }
610 trace!("TerminatorKind::drop: {:?}, type {}", place, place.layout.ty);
611 self.init_drop_in_place_call(&place, instance, target, unwind)?;
612 }
613
614 Assert { ref cond, expected, ref msg, target, unwind } => {
615 let ignored =
616 M::ignore_optional_overflow_checks(self) && msg.is_optional_overflow_check();
617 let cond_val = self.read_scalar(&self.eval_operand(cond, None)?)?.to_bool()?;
618 if ignored || expected == cond_val {
619 self.go_to_block(target);
620 } else {
621 M::assert_panic(self, msg, unwind)?;
622 }
623 }
624
625 UnwindTerminate(reason) => {
626 M::unwind_terminate(self, reason)?;
627 }
628
629 UnwindResume => {
633 trace!("unwinding: resuming from cleanup");
634 self.return_from_current_stack_frame(true)?;
637 return interp_ok(());
638 }
639
640 Unreachable => throw_ub!(Unreachable),
642
643 FalseEdge { .. } | FalseUnwind { .. } | Yield { .. } | CoroutineDrop => span_bug!(
645 terminator.source_info.span,
646 "{:#?} should have been eliminated by MIR pass",
647 terminator.kind
648 ),
649
650 InlineAsm { .. } => {
651 throw_unsup_format!("inline assembly is not supported");
652 }
653 }
654
655 interp_ok(())
656 }
657}