1use std::assert_matches::assert_matches;
2use std::ops::Deref;
3
4use rustc_abi::{Align, Scalar, Size, WrappingRange};
5use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs;
6use rustc_middle::ty::layout::{FnAbiOf, LayoutOf, TyAndLayout};
7use rustc_middle::ty::{AtomicOrdering, Instance, Ty};
8use rustc_session::config::OptLevel;
9use rustc_span::Span;
10use rustc_target::callconv::FnAbi;
11
12use super::abi::AbiBuilderMethods;
13use super::asm::AsmBuilderMethods;
14use super::consts::ConstCodegenMethods;
15use super::coverageinfo::CoverageInfoBuilderMethods;
16use super::debuginfo::DebugInfoBuilderMethods;
17use super::intrinsic::IntrinsicCallBuilderMethods;
18use super::misc::MiscCodegenMethods;
19use super::type_::{ArgAbiBuilderMethods, BaseTypeCodegenMethods, LayoutTypeCodegenMethods};
20use super::{CodegenMethods, StaticBuilderMethods};
21use crate::MemFlags;
22use crate::common::{AtomicRmwBinOp, IntPredicate, RealPredicate, SynchronizationScope, TypeKind};
23use crate::mir::operand::{OperandRef, OperandValue};
24use crate::mir::place::{PlaceRef, PlaceValue};
25
26#[derive(Copy, Clone, Debug, PartialEq, Eq)]
27pub enum OverflowOp {
28 Add,
29 Sub,
30 Mul,
31}
32
33pub trait BuilderMethods<'a, 'tcx>:
34 Sized
35 + LayoutOf<'tcx, LayoutOfResult = TyAndLayout<'tcx>>
36 + FnAbiOf<'tcx, FnAbiOfResult = &'tcx FnAbi<'tcx, Ty<'tcx>>>
37 + Deref<Target = Self::CodegenCx>
38 + CoverageInfoBuilderMethods<'tcx>
39 + DebugInfoBuilderMethods
40 + ArgAbiBuilderMethods<'tcx>
41 + AbiBuilderMethods
42 + IntrinsicCallBuilderMethods<'tcx>
43 + AsmBuilderMethods<'tcx>
44 + StaticBuilderMethods
45{
46 type CodegenCx: CodegenMethods<
50 'tcx,
51 Value = Self::Value,
52 Metadata = Self::Metadata,
53 Function = Self::Function,
54 BasicBlock = Self::BasicBlock,
55 Type = Self::Type,
56 Funclet = Self::Funclet,
57 DIScope = Self::DIScope,
58 DILocation = Self::DILocation,
59 DIVariable = Self::DIVariable,
60 >;
61
62 fn build(cx: &'a Self::CodegenCx, llbb: Self::BasicBlock) -> Self;
63
64 fn cx(&self) -> &Self::CodegenCx;
65 fn llbb(&self) -> Self::BasicBlock;
66
67 fn set_span(&mut self, span: Span);
68
69 fn append_block(cx: &'a Self::CodegenCx, llfn: Self::Function, name: &str) -> Self::BasicBlock;
71
72 fn append_sibling_block(&mut self, name: &str) -> Self::BasicBlock;
73
74 fn switch_to_block(&mut self, llbb: Self::BasicBlock);
75
76 fn ret_void(&mut self);
77 fn ret(&mut self, v: Self::Value);
78 fn br(&mut self, dest: Self::BasicBlock);
79 fn cond_br(
80 &mut self,
81 cond: Self::Value,
82 then_llbb: Self::BasicBlock,
83 else_llbb: Self::BasicBlock,
84 );
85
86 fn cond_br_with_expect(
93 &mut self,
94 mut cond: Self::Value,
95 then_llbb: Self::BasicBlock,
96 else_llbb: Self::BasicBlock,
97 expect: Option<bool>,
98 ) {
99 if let Some(expect) = expect {
100 cond = self.expect(cond, expect);
101 }
102 self.cond_br(cond, then_llbb, else_llbb)
103 }
104
105 fn switch(
106 &mut self,
107 v: Self::Value,
108 else_llbb: Self::BasicBlock,
109 cases: impl ExactSizeIterator<Item = (u128, Self::BasicBlock)>,
110 );
111
112 fn switch_with_weights(
116 &mut self,
117 v: Self::Value,
118 else_llbb: Self::BasicBlock,
119 _else_is_cold: bool,
120 cases: impl ExactSizeIterator<Item = (u128, Self::BasicBlock, bool)>,
121 ) {
122 self.switch(v, else_llbb, cases.map(|(val, bb, _)| (val, bb)))
123 }
124
125 fn invoke(
126 &mut self,
127 llty: Self::Type,
128 fn_attrs: Option<&CodegenFnAttrs>,
129 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
130 llfn: Self::Value,
131 args: &[Self::Value],
132 then: Self::BasicBlock,
133 catch: Self::BasicBlock,
134 funclet: Option<&Self::Funclet>,
135 instance: Option<Instance<'tcx>>,
136 ) -> Self::Value;
137 fn unreachable(&mut self);
138
139 fn unreachable_nonterminator(&mut self) {
141 let const_true = self.cx().const_bool(true);
145 let poison_ptr = self.const_poison(self.cx().type_ptr());
146 self.store(const_true, poison_ptr, Align::ONE);
147 }
148
149 fn add(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
150 fn fadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
151 fn fadd_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
152 fn fadd_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
153 fn sub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
154 fn fsub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
155 fn fsub_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
156 fn fsub_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
157 fn mul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
158 fn fmul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
159 fn fmul_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
160 fn fmul_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
161 fn udiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
162 fn exactudiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
163 fn sdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
164 fn exactsdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
165 fn fdiv(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
166 fn fdiv_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
167 fn fdiv_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
168 fn urem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
169 fn srem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
170 fn frem(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
171 fn frem_fast(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
172 fn frem_algebraic(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
173 fn shl(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
176 fn lshr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
180 fn ashr(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
184 fn unchecked_sadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
185 self.add(lhs, rhs)
186 }
187 fn unchecked_uadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
188 self.add(lhs, rhs)
189 }
190 fn unchecked_suadd(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
191 self.unchecked_sadd(lhs, rhs)
192 }
193 fn unchecked_ssub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
194 self.sub(lhs, rhs)
195 }
196 fn unchecked_usub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
197 self.sub(lhs, rhs)
198 }
199 fn unchecked_susub(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
200 self.unchecked_ssub(lhs, rhs)
201 }
202 fn unchecked_smul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
203 self.mul(lhs, rhs)
204 }
205 fn unchecked_umul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
206 self.mul(lhs, rhs)
207 }
208 fn unchecked_sumul(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
209 self.unchecked_smul(lhs, rhs)
212 }
213 fn and(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
214 fn or(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
215 fn or_disjoint(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value {
218 self.or(lhs, rhs)
219 }
220 fn xor(&mut self, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
221 fn neg(&mut self, v: Self::Value) -> Self::Value;
222 fn fneg(&mut self, v: Self::Value) -> Self::Value;
223 fn not(&mut self, v: Self::Value) -> Self::Value;
224
225 fn checked_binop(
226 &mut self,
227 oop: OverflowOp,
228 ty: Ty<'tcx>,
229 lhs: Self::Value,
230 rhs: Self::Value,
231 ) -> (Self::Value, Self::Value);
232
233 fn from_immediate(&mut self, val: Self::Value) -> Self::Value;
234 fn to_immediate_scalar(&mut self, val: Self::Value, scalar: Scalar) -> Self::Value;
235
236 fn alloca(&mut self, size: Size, align: Align) -> Self::Value;
237
238 fn load(&mut self, ty: Self::Type, ptr: Self::Value, align: Align) -> Self::Value;
239 fn volatile_load(&mut self, ty: Self::Type, ptr: Self::Value) -> Self::Value;
240 fn atomic_load(
241 &mut self,
242 ty: Self::Type,
243 ptr: Self::Value,
244 order: AtomicOrdering,
245 size: Size,
246 ) -> Self::Value;
247 fn load_from_place(&mut self, ty: Self::Type, place: PlaceValue<Self::Value>) -> Self::Value {
248 assert_eq!(place.llextra, None);
249 self.load(ty, place.llval, place.align)
250 }
251 fn load_operand(&mut self, place: PlaceRef<'tcx, Self::Value>)
252 -> OperandRef<'tcx, Self::Value>;
253
254 fn write_operand_repeatedly(
256 &mut self,
257 elem: OperandRef<'tcx, Self::Value>,
258 count: u64,
259 dest: PlaceRef<'tcx, Self::Value>,
260 );
261
262 fn assume_integer_range(&mut self, imm: Self::Value, ty: Self::Type, range: WrappingRange) {
267 let WrappingRange { start, end } = range;
268
269 let shifted = if start == 0 {
273 imm
274 } else {
275 let low = self.const_uint_big(ty, start);
276 self.sub(imm, low)
277 };
278 let width = self.const_uint_big(ty, u128::wrapping_sub(end, start));
279 let cmp = self.icmp(IntPredicate::IntULE, shifted, width);
280 self.assume(cmp);
281 }
282
283 fn assume_nonnull(&mut self, val: Self::Value) {
287 let null = self.const_null(self.type_ptr());
292 let is_null = self.icmp(IntPredicate::IntNE, val, null);
293 self.assume(is_null);
294 }
295
296 fn range_metadata(&mut self, load: Self::Value, range: WrappingRange);
297 fn nonnull_metadata(&mut self, load: Self::Value);
298
299 fn store(&mut self, val: Self::Value, ptr: Self::Value, align: Align) -> Self::Value;
300 fn store_to_place(&mut self, val: Self::Value, place: PlaceValue<Self::Value>) -> Self::Value {
301 assert_eq!(place.llextra, None);
302 self.store(val, place.llval, place.align)
303 }
304 fn store_with_flags(
305 &mut self,
306 val: Self::Value,
307 ptr: Self::Value,
308 align: Align,
309 flags: MemFlags,
310 ) -> Self::Value;
311 fn store_to_place_with_flags(
312 &mut self,
313 val: Self::Value,
314 place: PlaceValue<Self::Value>,
315 flags: MemFlags,
316 ) -> Self::Value {
317 assert_eq!(place.llextra, None);
318 self.store_with_flags(val, place.llval, place.align, flags)
319 }
320 fn atomic_store(
321 &mut self,
322 val: Self::Value,
323 ptr: Self::Value,
324 order: AtomicOrdering,
325 size: Size,
326 );
327
328 fn gep(&mut self, ty: Self::Type, ptr: Self::Value, indices: &[Self::Value]) -> Self::Value;
329 fn inbounds_gep(
330 &mut self,
331 ty: Self::Type,
332 ptr: Self::Value,
333 indices: &[Self::Value],
334 ) -> Self::Value;
335 fn inbounds_nuw_gep(
336 &mut self,
337 ty: Self::Type,
338 ptr: Self::Value,
339 indices: &[Self::Value],
340 ) -> Self::Value {
341 self.inbounds_gep(ty, ptr, indices)
342 }
343 fn ptradd(&mut self, ptr: Self::Value, offset: Self::Value) -> Self::Value {
344 self.gep(self.cx().type_i8(), ptr, &[offset])
345 }
346 fn inbounds_ptradd(&mut self, ptr: Self::Value, offset: Self::Value) -> Self::Value {
347 self.inbounds_gep(self.cx().type_i8(), ptr, &[offset])
348 }
349
350 fn trunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
351 fn unchecked_utrunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value {
354 self.trunc(val, dest_ty)
355 }
356 fn unchecked_strunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value {
359 self.trunc(val, dest_ty)
360 }
361
362 fn sext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
363 fn fptoui_sat(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
364 fn fptosi_sat(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
365 fn fptoui(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
366 fn fptosi(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
367 fn uitofp(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
368 fn sitofp(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
369 fn fptrunc(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
370 fn fpext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
371 fn ptrtoint(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
372 fn inttoptr(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
373 fn bitcast(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
374 fn intcast(&mut self, val: Self::Value, dest_ty: Self::Type, is_signed: bool) -> Self::Value;
375 fn pointercast(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
376
377 fn cast_float_to_int(
378 &mut self,
379 signed: bool,
380 x: Self::Value,
381 dest_ty: Self::Type,
382 ) -> Self::Value {
383 let in_ty = self.cx().val_ty(x);
384 let (float_ty, int_ty) = if self.cx().type_kind(dest_ty) == TypeKind::Vector
385 && self.cx().type_kind(in_ty) == TypeKind::Vector
386 {
387 (self.cx().element_type(in_ty), self.cx().element_type(dest_ty))
388 } else {
389 (in_ty, dest_ty)
390 };
391 assert_matches!(
392 self.cx().type_kind(float_ty),
393 TypeKind::Half | TypeKind::Float | TypeKind::Double | TypeKind::FP128
394 );
395 assert_eq!(self.cx().type_kind(int_ty), TypeKind::Integer);
396
397 if let Some(false) = self.cx().sess().opts.unstable_opts.saturating_float_casts {
398 return if signed { self.fptosi(x, dest_ty) } else { self.fptoui(x, dest_ty) };
399 }
400
401 if signed { self.fptosi_sat(x, dest_ty) } else { self.fptoui_sat(x, dest_ty) }
402 }
403
404 fn icmp(&mut self, op: IntPredicate, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
405 fn fcmp(&mut self, op: RealPredicate, lhs: Self::Value, rhs: Self::Value) -> Self::Value;
406
407 fn three_way_compare(
411 &mut self,
412 _ty: Ty<'tcx>,
413 _lhs: Self::Value,
414 _rhs: Self::Value,
415 ) -> Option<Self::Value> {
416 None
417 }
418
419 fn memcpy(
420 &mut self,
421 dst: Self::Value,
422 dst_align: Align,
423 src: Self::Value,
424 src_align: Align,
425 size: Self::Value,
426 flags: MemFlags,
427 );
428 fn memmove(
429 &mut self,
430 dst: Self::Value,
431 dst_align: Align,
432 src: Self::Value,
433 src_align: Align,
434 size: Self::Value,
435 flags: MemFlags,
436 );
437 fn memset(
438 &mut self,
439 ptr: Self::Value,
440 fill_byte: Self::Value,
441 size: Self::Value,
442 align: Align,
443 flags: MemFlags,
444 );
445
446 fn typed_place_copy(
453 &mut self,
454 dst: PlaceValue<Self::Value>,
455 src: PlaceValue<Self::Value>,
456 layout: TyAndLayout<'tcx>,
457 ) {
458 self.typed_place_copy_with_flags(dst, src, layout, MemFlags::empty());
459 }
460
461 fn typed_place_copy_with_flags(
462 &mut self,
463 dst: PlaceValue<Self::Value>,
464 src: PlaceValue<Self::Value>,
465 layout: TyAndLayout<'tcx>,
466 flags: MemFlags,
467 ) {
468 assert!(layout.is_sized(), "cannot typed-copy an unsigned type");
469 assert!(src.llextra.is_none(), "cannot directly copy from unsized values");
470 assert!(dst.llextra.is_none(), "cannot directly copy into unsized values");
471 if flags.contains(MemFlags::NONTEMPORAL) {
472 let ty = self.backend_type(layout);
474 let val = self.load_from_place(ty, src);
475 self.store_to_place_with_flags(val, dst, flags);
476 } else if self.sess().opts.optimize == OptLevel::No && self.is_backend_immediate(layout) {
477 let temp = self.load_operand(src.with_type(layout));
480 temp.val.store_with_flags(self, dst.with_type(layout), flags);
481 } else if !layout.is_zst() {
482 let bytes = self.const_usize(layout.size.bytes());
483 self.memcpy(dst.llval, dst.align, src.llval, src.align, bytes, flags);
484 }
485 }
486
487 fn typed_place_swap(
495 &mut self,
496 left: PlaceValue<Self::Value>,
497 right: PlaceValue<Self::Value>,
498 layout: TyAndLayout<'tcx>,
499 ) {
500 let mut temp = self.load_operand(left.with_type(layout));
501 if let OperandValue::Ref(..) = temp.val {
502 let alloca = PlaceRef::alloca(self, layout);
504 self.typed_place_copy(alloca.val, left, layout);
505 temp = self.load_operand(alloca);
506 }
507 self.typed_place_copy(left, right, layout);
508 temp.val.store(self, right.with_type(layout));
509 }
510
511 fn select(
512 &mut self,
513 cond: Self::Value,
514 then_val: Self::Value,
515 else_val: Self::Value,
516 ) -> Self::Value;
517
518 fn va_arg(&mut self, list: Self::Value, ty: Self::Type) -> Self::Value;
519 fn extract_element(&mut self, vec: Self::Value, idx: Self::Value) -> Self::Value;
520 fn vector_splat(&mut self, num_elts: usize, elt: Self::Value) -> Self::Value;
521 fn extract_value(&mut self, agg_val: Self::Value, idx: u64) -> Self::Value;
522 fn insert_value(&mut self, agg_val: Self::Value, elt: Self::Value, idx: u64) -> Self::Value;
523
524 fn set_personality_fn(&mut self, personality: Self::Function);
525
526 fn cleanup_landing_pad(&mut self, pers_fn: Self::Function) -> (Self::Value, Self::Value);
528 fn filter_landing_pad(&mut self, pers_fn: Self::Function);
529 fn resume(&mut self, exn0: Self::Value, exn1: Self::Value);
530
531 fn cleanup_pad(&mut self, parent: Option<Self::Value>, args: &[Self::Value]) -> Self::Funclet;
533 fn cleanup_ret(&mut self, funclet: &Self::Funclet, unwind: Option<Self::BasicBlock>);
534 fn catch_pad(&mut self, parent: Self::Value, args: &[Self::Value]) -> Self::Funclet;
535 fn catch_switch(
536 &mut self,
537 parent: Option<Self::Value>,
538 unwind: Option<Self::BasicBlock>,
539 handlers: &[Self::BasicBlock],
540 ) -> Self::Value;
541
542 fn atomic_cmpxchg(
543 &mut self,
544 dst: Self::Value,
545 cmp: Self::Value,
546 src: Self::Value,
547 order: AtomicOrdering,
548 failure_order: AtomicOrdering,
549 weak: bool,
550 ) -> (Self::Value, Self::Value);
551 fn atomic_rmw(
552 &mut self,
553 op: AtomicRmwBinOp,
554 dst: Self::Value,
555 src: Self::Value,
556 order: AtomicOrdering,
557 ) -> Self::Value;
558 fn atomic_fence(&mut self, order: AtomicOrdering, scope: SynchronizationScope);
559 fn set_invariant_load(&mut self, load: Self::Value);
560
561 fn lifetime_start(&mut self, ptr: Self::Value, size: Size);
563
564 fn lifetime_end(&mut self, ptr: Self::Value, size: Size);
566
567 fn call(
589 &mut self,
590 llty: Self::Type,
591 fn_attrs: Option<&CodegenFnAttrs>,
592 fn_abi: Option<&FnAbi<'tcx, Ty<'tcx>>>,
593 fn_val: Self::Value,
594 args: &[Self::Value],
595 funclet: Option<&Self::Funclet>,
596 instance: Option<Instance<'tcx>>,
597 ) -> Self::Value;
598
599 fn tail_call(
600 &mut self,
601 llty: Self::Type,
602 fn_attrs: Option<&CodegenFnAttrs>,
603 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
604 llfn: Self::Value,
605 args: &[Self::Value],
606 funclet: Option<&Self::Funclet>,
607 instance: Option<Instance<'tcx>>,
608 );
609
610 fn zext(&mut self, val: Self::Value, dest_ty: Self::Type) -> Self::Value;
611
612 fn apply_attrs_to_cleanup_callsite(&mut self, llret: Self::Value);
613}