rustc_const_eval/interpret/
step.rs1use either::Either;
6use rustc_abi::{FIRST_VARIANT, FieldIdx};
7use rustc_index::IndexSlice;
8use rustc_middle::ty::{self, Instance, Ty};
9use rustc_middle::{bug, mir, span_bug};
10use rustc_span::source_map::Spanned;
11use rustc_target::callconv::FnAbi;
12use tracing::field::Empty;
13use tracing::{info, instrument, trace};
14
15use super::{
16 FnArg, FnVal, ImmTy, Immediate, InterpCx, InterpResult, Machine, MemPlaceMeta, PlaceTy,
17 Projectable, Scalar, interp_ok, throw_ub, throw_unsup_format,
18};
19use crate::interpret::EnteredTraceSpan;
20use crate::{enter_trace_span, util};
21
22struct EvaluatedCalleeAndArgs<'tcx, M: Machine<'tcx>> {
23 callee: FnVal<'tcx, M::ExtraFnVal>,
24 args: Vec<FnArg<'tcx, M::Provenance>>,
25 fn_sig: ty::FnSig<'tcx>,
26 fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>,
27 with_caller_location: bool,
29}
30
31impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
32 #[inline(always)]
38 pub fn step(&mut self) -> InterpResult<'tcx, bool> {
39 if self.stack().is_empty() {
40 return interp_ok(false);
41 }
42
43 let Either::Left(loc) = self.frame().loc else {
44 trace!("unwinding: skipping frame");
47 self.return_from_current_stack_frame(true)?;
48 return interp_ok(true);
49 };
50 let basic_block = &self.body().basic_blocks[loc.block];
51
52 if let Some(stmt) = basic_block.statements.get(loc.statement_index) {
53 let old_frames = self.frame_idx();
54 self.eval_statement(stmt)?;
55 assert_eq!(old_frames, self.frame_idx());
57 self.frame_mut().loc.as_mut().left().unwrap().statement_index += 1;
59 return interp_ok(true);
60 }
61
62 M::before_terminator(self)?;
63
64 let terminator = basic_block.terminator();
65 self.eval_terminator(terminator)?;
66 if !self.stack().is_empty() {
67 if let Either::Left(loc) = self.frame().loc {
68 info!("// executing {:?}", loc.block);
69 }
70 }
71 interp_ok(true)
72 }
73
74 pub fn eval_statement(&mut self, stmt: &mir::Statement<'tcx>) -> InterpResult<'tcx> {
79 let _span = enter_trace_span!(
80 M,
81 step::eval_statement,
82 stmt = ?stmt.kind,
83 span = ?stmt.source_info.span,
84 tracing_separate_thread = Empty,
85 )
86 .or_if_tracing_disabled(|| info!(stmt = ?stmt.kind));
87
88 use rustc_middle::mir::StatementKind::*;
89
90 match &stmt.kind {
91 Assign(box (place, rvalue)) => self.eval_rvalue_into_place(rvalue, *place)?,
92
93 SetDiscriminant { place, variant_index } => {
94 let dest = self.eval_place(**place)?;
95 self.write_discriminant(*variant_index, &dest)?;
96 }
97
98 Deinit(place) => {
99 let dest = self.eval_place(**place)?;
100 self.write_uninit(&dest)?;
101 }
102
103 StorageLive(local) => {
105 self.storage_live(*local)?;
106 }
107
108 StorageDead(local) => {
110 self.storage_dead(*local)?;
111 }
112
113 FakeRead(..) => {}
116
117 Retag(kind, place) => {
119 let dest = self.eval_place(**place)?;
120 M::retag_place_contents(self, *kind, &dest)?;
121 }
122
123 Intrinsic(box intrinsic) => self.eval_nondiverging_intrinsic(intrinsic)?,
124
125 PlaceMention(box place) => {
127 let _ = self.eval_place(*place)?;
128 }
129
130 AscribeUserType(..) => {}
133
134 Coverage(..) => {}
146
147 ConstEvalCounter => {
148 M::increment_const_eval_counter(self)?;
149 }
150
151 Nop => {}
154
155 BackwardIncompatibleDropHint { .. } => {}
157 }
158
159 interp_ok(())
160 }
161
162 pub fn eval_rvalue_into_place(
167 &mut self,
168 rvalue: &mir::Rvalue<'tcx>,
169 place: mir::Place<'tcx>,
170 ) -> InterpResult<'tcx> {
171 let dest = self.eval_place(place)?;
172 use rustc_middle::mir::Rvalue::*;
176 match *rvalue {
177 ThreadLocalRef(did) => {
178 let ptr = M::thread_local_static_pointer(self, did)?;
179 self.write_pointer(ptr, &dest)?;
180 }
181
182 Use(ref operand) => {
183 let op = self.eval_operand(operand, Some(dest.layout))?;
185 self.copy_op(&op, &dest)?;
186 }
187
188 CopyForDeref(place) => {
189 let op = self.eval_place_to_op(place, Some(dest.layout))?;
190 self.copy_op(&op, &dest)?;
191 }
192
193 BinaryOp(bin_op, box (ref left, ref right)) => {
194 let layout = util::binop_left_homogeneous(bin_op).then_some(dest.layout);
195 let left = self.read_immediate(&self.eval_operand(left, layout)?)?;
196 let layout = util::binop_right_homogeneous(bin_op).then_some(left.layout);
197 let right = self.read_immediate(&self.eval_operand(right, layout)?)?;
198 let result = self.binary_op(bin_op, &left, &right)?;
199 assert_eq!(result.layout, dest.layout, "layout mismatch for result of {bin_op:?}");
200 self.write_immediate(*result, &dest)?;
201 }
202
203 UnaryOp(un_op, ref operand) => {
204 let val = self.read_immediate(&self.eval_operand(operand, Some(dest.layout))?)?;
206 let result = self.unary_op(un_op, &val)?;
207 assert_eq!(result.layout, dest.layout, "layout mismatch for result of {un_op:?}");
208 self.write_immediate(*result, &dest)?;
209 }
210
211 NullaryOp(null_op, ty) => {
212 let ty = self.instantiate_from_current_frame_and_normalize_erasing_regions(ty)?;
213 let val = self.nullary_op(null_op, ty)?;
214 self.write_immediate(*val, &dest)?;
215 }
216
217 Aggregate(box ref kind, ref operands) => {
218 self.write_aggregate(kind, operands, &dest)?;
219 }
220
221 Repeat(ref operand, _) => {
222 self.write_repeat(operand, &dest)?;
223 }
224
225 Len(place) => {
226 let src = self.eval_place(place)?;
227 let len = src.len(self)?;
228 self.write_scalar(Scalar::from_target_usize(len, self), &dest)?;
229 }
230
231 Ref(_, borrow_kind, place) => {
232 let src = self.eval_place(place)?;
233 let place = self.force_allocation(&src)?;
234 let val = ImmTy::from_immediate(place.to_ref(self), dest.layout);
235 let val = M::retag_ptr_value(
237 self,
238 if borrow_kind.allows_two_phase_borrow() {
239 mir::RetagKind::TwoPhase
240 } else {
241 mir::RetagKind::Default
242 },
243 &val,
244 )?;
245 self.write_immediate(*val, &dest)?;
246 }
247
248 RawPtr(kind, place) => {
249 let place_base_raw = if place.is_indirect_first_projection() {
251 let ty = self.frame().body.local_decls[place.local].ty;
252 ty.is_raw_ptr()
253 } else {
254 false
256 };
257
258 let src = self.eval_place(place)?;
259 let place = self.force_allocation(&src)?;
260 let mut val = ImmTy::from_immediate(place.to_ref(self), dest.layout);
261 if !place_base_raw && !kind.is_fake() {
262 val = M::retag_ptr_value(self, mir::RetagKind::Raw, &val)?;
265 }
266 self.write_immediate(*val, &dest)?;
267 }
268
269 ShallowInitBox(ref operand, _) => {
270 let src = self.eval_operand(operand, None)?;
271 let v = self.read_immediate(&src)?;
272 self.write_immediate(*v, &dest)?;
273 }
274
275 Cast(cast_kind, ref operand, cast_ty) => {
276 let src = self.eval_operand(operand, None)?;
277 let cast_ty =
278 self.instantiate_from_current_frame_and_normalize_erasing_regions(cast_ty)?;
279 self.cast(&src, cast_kind, cast_ty, &dest)?;
280 }
281
282 Discriminant(place) => {
283 let op = self.eval_place_to_op(place, None)?;
284 let variant = self.read_discriminant(&op)?;
285 let discr = self.discriminant_for_variant(op.layout.ty, variant)?;
286 self.write_immediate(*discr, &dest)?;
287 }
288
289 WrapUnsafeBinder(ref op, _ty) => {
290 let op = self.eval_operand(op, None)?;
293 self.copy_op_allow_transmute(&op, &dest)?;
294 }
295 }
296
297 trace!("{:?}", self.dump_place(&dest));
298
299 interp_ok(())
300 }
301
302 #[instrument(skip(self), level = "trace")]
304 fn write_aggregate(
305 &mut self,
306 kind: &mir::AggregateKind<'tcx>,
307 operands: &IndexSlice<FieldIdx, mir::Operand<'tcx>>,
308 dest: &PlaceTy<'tcx, M::Provenance>,
309 ) -> InterpResult<'tcx> {
310 self.write_uninit(dest)?; let (variant_index, variant_dest, active_field_index) = match *kind {
312 mir::AggregateKind::Adt(_, variant_index, _, _, active_field_index) => {
313 let variant_dest = self.project_downcast(dest, variant_index)?;
314 (variant_index, variant_dest, active_field_index)
315 }
316 mir::AggregateKind::RawPtr(..) => {
317 let [data, meta] = &operands.raw else {
322 bug!("{kind:?} should have 2 operands, had {operands:?}");
323 };
324 let data = self.eval_operand(data, None)?;
325 let data = self.read_pointer(&data)?;
326 let meta = self.eval_operand(meta, None)?;
327 let meta = if meta.layout.is_zst() {
328 MemPlaceMeta::None
329 } else {
330 MemPlaceMeta::Meta(self.read_scalar(&meta)?)
331 };
332 let ptr_imm = Immediate::new_pointer_with_meta(data, meta, self);
333 let ptr = ImmTy::from_immediate(ptr_imm, dest.layout);
334 self.copy_op(&ptr, dest)?;
335 return interp_ok(());
336 }
337 _ => (FIRST_VARIANT, dest.clone(), None),
338 };
339 if active_field_index.is_some() {
340 assert_eq!(operands.len(), 1);
341 }
342 for (field_index, operand) in operands.iter_enumerated() {
343 let field_index = active_field_index.unwrap_or(field_index);
344 let field_dest = self.project_field(&variant_dest, field_index)?;
345 let op = self.eval_operand(operand, Some(field_dest.layout))?;
346 self.copy_op(&op, &field_dest)?;
347 }
348 self.write_discriminant(variant_index, dest)
349 }
350
351 fn write_repeat(
354 &mut self,
355 operand: &mir::Operand<'tcx>,
356 dest: &PlaceTy<'tcx, M::Provenance>,
357 ) -> InterpResult<'tcx> {
358 let src = self.eval_operand(operand, None)?;
359 assert!(src.layout.is_sized());
360 let dest = self.force_allocation(&dest)?;
361 let length = dest.len(self)?;
362
363 if length == 0 {
364 self.get_place_alloc_mut(&dest)?;
366 } else {
367 let first = self.project_index(&dest, 0)?;
369 self.copy_op(&src, &first)?;
370
371 let elem_size = first.layout.size;
375 let first_ptr = first.ptr();
376 let rest_ptr = first_ptr.wrapping_offset(elem_size, self);
377 self.mem_copy_repeatedly(
379 first_ptr,
380 rest_ptr,
381 elem_size,
382 length - 1,
383 true,
384 )?;
385 }
386
387 interp_ok(())
388 }
389
390 fn eval_fn_call_argument(
392 &self,
393 op: &mir::Operand<'tcx>,
394 ) -> InterpResult<'tcx, FnArg<'tcx, M::Provenance>> {
395 interp_ok(match op {
396 mir::Operand::Copy(_) | mir::Operand::Constant(_) => {
397 let op = self.eval_operand(op, None)?;
399 FnArg::Copy(op)
400 }
401 mir::Operand::Move(place) => {
402 let place = self.eval_place(*place)?;
407 let op = self.place_to_op(&place)?;
408
409 match op.as_mplace_or_imm() {
410 Either::Left(mplace) => FnArg::InPlace(mplace),
411 Either::Right(_imm) => {
412 FnArg::Copy(op)
419 }
420 }
421 }
422 })
423 }
424
425 fn eval_callee_and_args(
428 &self,
429 terminator: &mir::Terminator<'tcx>,
430 func: &mir::Operand<'tcx>,
431 args: &[Spanned<mir::Operand<'tcx>>],
432 ) -> InterpResult<'tcx, EvaluatedCalleeAndArgs<'tcx, M>> {
433 let func = self.eval_operand(func, None)?;
434 let args = args
435 .iter()
436 .map(|arg| self.eval_fn_call_argument(&arg.node))
437 .collect::<InterpResult<'tcx, Vec<_>>>()?;
438
439 let fn_sig_binder = func.layout.ty.fn_sig(*self.tcx);
440 let fn_sig = self.tcx.normalize_erasing_late_bound_regions(self.typing_env, fn_sig_binder);
441 let extra_args = &args[fn_sig.inputs().len()..];
442 let extra_args =
443 self.tcx.mk_type_list_from_iter(extra_args.iter().map(|arg| arg.layout().ty));
444
445 let (callee, fn_abi, with_caller_location) = match *func.layout.ty.kind() {
446 ty::FnPtr(..) => {
447 let fn_ptr = self.read_pointer(&func)?;
448 let fn_val = self.get_ptr_fn(fn_ptr)?;
449 (fn_val, self.fn_abi_of_fn_ptr(fn_sig_binder, extra_args)?, false)
450 }
451 ty::FnDef(def_id, args) => {
452 let instance = self.resolve(def_id, args)?;
453 (
454 FnVal::Instance(instance),
455 self.fn_abi_of_instance(instance, extra_args)?,
456 instance.def.requires_caller_location(*self.tcx),
457 )
458 }
459 _ => {
460 span_bug!(terminator.source_info.span, "invalid callee of type {}", func.layout.ty)
461 }
462 };
463
464 interp_ok(EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location })
465 }
466
467 fn eval_terminator(&mut self, terminator: &mir::Terminator<'tcx>) -> InterpResult<'tcx> {
468 let _span = enter_trace_span!(
469 M,
470 step::eval_terminator,
471 terminator = ?terminator.kind,
472 span = ?terminator.source_info.span,
473 tracing_separate_thread = Empty,
474 )
475 .or_if_tracing_disabled(|| info!(terminator = ?terminator.kind));
476
477 use rustc_middle::mir::TerminatorKind::*;
478 match terminator.kind {
479 Return => {
480 self.return_from_current_stack_frame(false)?
481 }
482
483 Goto { target } => self.go_to_block(target),
484
485 SwitchInt { ref discr, ref targets } => {
486 let discr = self.read_immediate(&self.eval_operand(discr, None)?)?;
487 trace!("SwitchInt({:?})", *discr);
488
489 let mut target_block = targets.otherwise();
491
492 for (const_int, target) in targets.iter() {
493 let res = self.binary_op(
496 mir::BinOp::Eq,
497 &discr,
498 &ImmTy::from_uint(const_int, discr.layout),
499 )?;
500 if res.to_scalar().to_bool()? {
501 target_block = target;
502 break;
503 }
504 }
505
506 self.go_to_block(target_block);
507 }
508
509 Call {
510 ref func,
511 ref args,
512 destination,
513 target,
514 unwind,
515 call_source: _,
516 fn_span: _,
517 } => {
518 let old_stack = self.frame_idx();
519 let old_loc = self.frame().loc;
520
521 let EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location } =
522 self.eval_callee_and_args(terminator, func, args)?;
523
524 let destination = self.eval_place(destination)?;
525 self.init_fn_call(
526 callee,
527 (fn_sig.abi, fn_abi),
528 &args,
529 with_caller_location,
530 &destination,
531 target,
532 if fn_abi.can_unwind { unwind } else { mir::UnwindAction::Unreachable },
533 )?;
534 if self.frame_idx() == old_stack && self.frame().loc == old_loc {
537 span_bug!(terminator.source_info.span, "evaluating this call made no progress");
538 }
539 }
540
541 TailCall { ref func, ref args, fn_span: _ } => {
542 let old_frame_idx = self.frame_idx();
543
544 let EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location } =
545 self.eval_callee_and_args(terminator, func, args)?;
546
547 self.init_fn_tail_call(callee, (fn_sig.abi, fn_abi), &args, with_caller_location)?;
548
549 if self.frame_idx() != old_frame_idx {
550 span_bug!(
551 terminator.source_info.span,
552 "evaluating this tail call pushed a new stack frame"
553 );
554 }
555 }
556
557 Drop { place, target, unwind, replace: _, drop, async_fut } => {
558 assert!(
559 async_fut.is_none() && drop.is_none(),
560 "Async Drop must be expanded or reset to sync in runtime MIR"
561 );
562 let place = self.eval_place(place)?;
563 let instance = Instance::resolve_drop_in_place(*self.tcx, place.layout.ty);
564 if let ty::InstanceKind::DropGlue(_, None) = instance.def {
565 self.go_to_block(target);
570 return interp_ok(());
571 }
572 trace!("TerminatorKind::drop: {:?}, type {}", place, place.layout.ty);
573 self.init_drop_in_place_call(&place, instance, target, unwind)?;
574 }
575
576 Assert { ref cond, expected, ref msg, target, unwind } => {
577 let ignored =
578 M::ignore_optional_overflow_checks(self) && msg.is_optional_overflow_check();
579 let cond_val = self.read_scalar(&self.eval_operand(cond, None)?)?.to_bool()?;
580 if ignored || expected == cond_val {
581 self.go_to_block(target);
582 } else {
583 M::assert_panic(self, msg, unwind)?;
584 }
585 }
586
587 UnwindTerminate(reason) => {
588 M::unwind_terminate(self, reason)?;
589 }
590
591 UnwindResume => {
595 trace!("unwinding: resuming from cleanup");
596 self.return_from_current_stack_frame(true)?;
599 return interp_ok(());
600 }
601
602 Unreachable => throw_ub!(Unreachable),
604
605 FalseEdge { .. } | FalseUnwind { .. } | Yield { .. } | CoroutineDrop => span_bug!(
607 terminator.source_info.span,
608 "{:#?} should have been eliminated by MIR pass",
609 terminator.kind
610 ),
611
612 InlineAsm { .. } => {
613 throw_unsup_format!("inline assembly is not supported");
614 }
615 }
616
617 interp_ok(())
618 }
619}