rustc_codegen_ssa/mir/
statement.rs1use rustc_middle::mir::{self, NonDivergingIntrinsic};
2use rustc_middle::span_bug;
3use tracing::instrument;
4
5use super::{FunctionCx, LocalRef};
6use crate::traits::*;
7
8impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
9 #[instrument(level = "debug", skip(self, bx))]
10 pub(crate) fn codegen_statement(&mut self, bx: &mut Bx, statement: &mir::Statement<'tcx>) {
11 self.set_debug_loc(bx, statement.source_info);
12 match statement.kind {
13 mir::StatementKind::Assign(box (ref place, ref rvalue)) => {
14 if let Some(index) = place.as_local() {
15 match self.locals[index] {
16 LocalRef::Place(cg_dest) => self.codegen_rvalue(bx, cg_dest, rvalue),
17 LocalRef::UnsizedPlace(cg_indirect_dest) => {
18 let ty = cg_indirect_dest.layout.ty;
19 span_bug!(
20 statement.source_info.span,
21 "cannot reallocate from `UnsizedPlace({ty})` \
22 into `{rvalue:?}`; dynamic alloca is not supported",
23 );
24 }
25 LocalRef::PendingOperand => {
26 let operand = self.codegen_rvalue_operand(bx, rvalue);
27 self.overwrite_local(index, LocalRef::Operand(operand));
28 self.debug_introduce_local(bx, index);
29 }
30 LocalRef::Operand(op) => {
31 if !op.layout.is_zst() {
32 span_bug!(
33 statement.source_info.span,
34 "operand {:?} already assigned",
35 rvalue
36 );
37 }
38
39 self.codegen_rvalue_operand(bx, rvalue);
42 }
43 }
44 } else {
45 let cg_dest = self.codegen_place(bx, place.as_ref());
46 self.codegen_rvalue(bx, cg_dest, rvalue);
47 }
48 }
49 mir::StatementKind::SetDiscriminant { box ref place, variant_index } => {
50 self.codegen_place(bx, place.as_ref()).codegen_set_discr(bx, variant_index);
51 }
52 mir::StatementKind::Deinit(..) => {
53 }
57 mir::StatementKind::StorageLive(local) => {
58 if let LocalRef::Place(cg_place) = self.locals[local] {
59 cg_place.storage_live(bx);
60 } else if let LocalRef::UnsizedPlace(cg_indirect_place) = self.locals[local] {
61 cg_indirect_place.storage_live(bx);
62 }
63 }
64 mir::StatementKind::StorageDead(local) => {
65 if let LocalRef::Place(cg_place) = self.locals[local] {
66 cg_place.storage_dead(bx);
67 } else if let LocalRef::UnsizedPlace(cg_indirect_place) = self.locals[local] {
68 cg_indirect_place.storage_dead(bx);
69 }
70 }
71 mir::StatementKind::Coverage(ref kind) => {
72 self.codegen_coverage(bx, kind, statement.source_info.scope);
73 }
74 mir::StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(ref op)) => {
75 let op_val = self.codegen_operand(bx, op);
76 bx.assume(op_val.immediate());
77 }
78 mir::StatementKind::Intrinsic(box NonDivergingIntrinsic::CopyNonOverlapping(
79 mir::CopyNonOverlapping { ref count, ref src, ref dst },
80 )) => {
81 let dst_val = self.codegen_operand(bx, dst);
82 let src_val = self.codegen_operand(bx, src);
83 let count = self.codegen_operand(bx, count).immediate();
84 let pointee_layout = dst_val
85 .layout
86 .pointee_info_at(bx, rustc_abi::Size::ZERO)
87 .expect("Expected pointer");
88 let bytes = bx.mul(count, bx.const_usize(pointee_layout.size.bytes()));
89
90 let align = pointee_layout.align;
91 let dst = dst_val.immediate();
92 let src = src_val.immediate();
93 bx.memcpy(dst, align, src, align, bytes, crate::MemFlags::empty());
94 }
95 mir::StatementKind::FakeRead(..)
96 | mir::StatementKind::Retag { .. }
97 | mir::StatementKind::AscribeUserType(..)
98 | mir::StatementKind::ConstEvalCounter
99 | mir::StatementKind::PlaceMention(..)
100 | mir::StatementKind::BackwardIncompatibleDropHint { .. }
101 | mir::StatementKind::Nop => {}
102 }
103 }
104}