1use std::iter;
2
3use rustc_index::IndexVec;
4use rustc_index::bit_set::DenseBitSet;
5use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
6use rustc_middle::mir::{Body, Local, UnwindTerminateReason, traversal};
7use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt, HasTypingEnv, TyAndLayout};
8use rustc_middle::ty::{self, Instance, Ty, TyCtxt, TypeFoldable, TypeVisitableExt};
9use rustc_middle::{bug, mir, span_bug};
10use rustc_target::callconv::{FnAbi, PassMode};
11use tracing::{debug, instrument};
12
13use crate::base;
14use crate::traits::*;
15
16mod analyze;
17mod block;
18mod constant;
19mod coverageinfo;
20pub mod debuginfo;
21mod intrinsic;
22mod locals;
23pub mod naked_asm;
24pub mod operand;
25pub mod place;
26mod rvalue;
27mod statement;
28
29use self::debuginfo::{FunctionDebugContext, PerLocalVarDebugInfo};
30use self::operand::{OperandRef, OperandValue};
31use self::place::PlaceRef;
32
33enum CachedLlbb<T> {
35 None,
37
38 Some(T),
40
41 Skip,
43}
44
45type PerLocalVarDebugInfoIndexVec<'tcx, V> =
46 IndexVec<mir::Local, Vec<PerLocalVarDebugInfo<'tcx, V>>>;
47
48pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
50 instance: Instance<'tcx>,
51
52 mir: &'tcx mir::Body<'tcx>,
53
54 debug_context: Option<FunctionDebugContext<'tcx, Bx::DIScope, Bx::DILocation>>,
55
56 llfn: Bx::Function,
57
58 cx: &'a Bx::CodegenCx,
59
60 fn_abi: &'tcx FnAbi<'tcx, Ty<'tcx>>,
61
62 personality_slot: Option<PlaceRef<'tcx, Bx::Value>>,
70
71 cached_llbbs: IndexVec<mir::BasicBlock, CachedLlbb<Bx::BasicBlock>>,
76
77 cleanup_kinds: Option<IndexVec<mir::BasicBlock, analyze::CleanupKind>>,
79
80 funclets: IndexVec<mir::BasicBlock, Option<Bx::Funclet>>,
84
85 landing_pads: IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>>,
88
89 unreachable_block: Option<Bx::BasicBlock>,
91
92 terminate_block: Option<(Bx::BasicBlock, UnwindTerminateReason)>,
94
95 cold_blocks: IndexVec<mir::BasicBlock, bool>,
98
99 locals: locals::Locals<'tcx, Bx::Value>,
115
116 per_local_var_debug_info: Option<PerLocalVarDebugInfoIndexVec<'tcx, Bx::DIVariable>>,
119
120 caller_location: Option<OperandRef<'tcx, Bx::Value>>,
122}
123
124impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
125 pub fn monomorphize<T>(&self, value: T) -> T
126 where
127 T: Copy + TypeFoldable<TyCtxt<'tcx>>,
128 {
129 debug!("monomorphize: self.instance={:?}", self.instance);
130 self.instance.instantiate_mir_and_normalize_erasing_regions(
131 self.cx.tcx(),
132 self.cx.typing_env(),
133 ty::EarlyBinder::bind(value),
134 )
135 }
136}
137
138enum LocalRef<'tcx, V> {
139 Place(PlaceRef<'tcx, V>),
140 UnsizedPlace(PlaceRef<'tcx, V>),
145 Operand(OperandRef<'tcx, V>),
147 PendingOperand,
149}
150
151impl<'tcx, V: CodegenObject> LocalRef<'tcx, V> {
152 fn new_operand(layout: TyAndLayout<'tcx>) -> LocalRef<'tcx, V> {
153 if layout.is_zst() {
154 LocalRef::Operand(OperandRef::zero_sized(layout))
158 } else {
159 LocalRef::PendingOperand
160 }
161 }
162}
163
164#[instrument(level = "debug", skip(cx))]
167pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
168 cx: &'a Bx::CodegenCx,
169 instance: Instance<'tcx>,
170) {
171 assert!(!instance.args.has_infer());
172
173 let tcx = cx.tcx();
174 let llfn = cx.get_fn(instance);
175
176 let mut mir = tcx.instance_mir(instance.def);
177
178 let fn_abi = cx.fn_abi_of_instance(instance, ty::List::empty());
179 debug!("fn_abi: {:?}", fn_abi);
180
181 if tcx.features().ergonomic_clones() {
182 let monomorphized_mir = instance.instantiate_mir_and_normalize_erasing_regions(
183 tcx,
184 ty::TypingEnv::fully_monomorphized(),
185 ty::EarlyBinder::bind(mir.clone()),
186 );
187 mir = tcx.arena.alloc(optimize_use_clone::<Bx>(cx, monomorphized_mir));
188 }
189
190 let debug_context = cx.create_function_debug_context(instance, fn_abi, llfn, &mir);
191
192 let start_llbb = Bx::append_block(cx, llfn, "start");
193 let mut start_bx = Bx::build(cx, start_llbb);
194
195 if mir.basic_blocks.iter().any(|bb| {
196 bb.is_cleanup || matches!(bb.terminator().unwind(), Some(mir::UnwindAction::Terminate(_)))
197 }) {
198 start_bx.set_personality_fn(cx.eh_personality());
199 }
200
201 let cleanup_kinds =
202 base::wants_new_eh_instructions(tcx.sess).then(|| analyze::cleanup_kinds(&mir));
203
204 let cached_llbbs: IndexVec<mir::BasicBlock, CachedLlbb<Bx::BasicBlock>> =
205 mir.basic_blocks
206 .indices()
207 .map(|bb| {
208 if bb == mir::START_BLOCK { CachedLlbb::Some(start_llbb) } else { CachedLlbb::None }
209 })
210 .collect();
211
212 let mut fx = FunctionCx {
213 instance,
214 mir,
215 llfn,
216 fn_abi,
217 cx,
218 personality_slot: None,
219 cached_llbbs,
220 unreachable_block: None,
221 terminate_block: None,
222 cleanup_kinds,
223 landing_pads: IndexVec::from_elem(None, &mir.basic_blocks),
224 funclets: IndexVec::from_fn_n(|_| None, mir.basic_blocks.len()),
225 cold_blocks: find_cold_blocks(tcx, mir),
226 locals: locals::Locals::empty(),
227 debug_context,
228 per_local_var_debug_info: None,
229 caller_location: None,
230 };
231
232 let (per_local_var_debug_info, consts_debug_info) =
238 fx.compute_per_local_var_debug_info(&mut start_bx).unzip();
239 fx.per_local_var_debug_info = per_local_var_debug_info;
240
241 let traversal_order = traversal::mono_reachable_reverse_postorder(mir, tcx, instance);
242 let memory_locals = analyze::non_ssa_locals(&fx, &traversal_order);
243
244 let local_values = {
246 let args = arg_local_refs(&mut start_bx, &mut fx, &memory_locals);
247
248 let mut allocate_local = |local: Local| {
249 let decl = &mir.local_decls[local];
250 let layout = start_bx.layout_of(fx.monomorphize(decl.ty));
251 assert!(!layout.ty.has_erasable_regions());
252
253 if local == mir::RETURN_PLACE {
254 match fx.fn_abi.ret.mode {
255 PassMode::Indirect { .. } => {
256 debug!("alloc: {:?} (return place) -> place", local);
257 let llretptr = start_bx.get_param(0);
258 return LocalRef::Place(PlaceRef::new_sized(llretptr, layout));
259 }
260 PassMode::Cast { ref cast, .. } => {
261 debug!("alloc: {:?} (return place) -> place", local);
262 let size = cast.size(&start_bx);
263 return LocalRef::Place(PlaceRef::alloca_size(&mut start_bx, size, layout));
264 }
265 _ => {}
266 };
267 }
268
269 if memory_locals.contains(local) {
270 debug!("alloc: {:?} -> place", local);
271 if layout.is_unsized() {
272 LocalRef::UnsizedPlace(PlaceRef::alloca_unsized_indirect(&mut start_bx, layout))
273 } else {
274 LocalRef::Place(PlaceRef::alloca(&mut start_bx, layout))
275 }
276 } else {
277 debug!("alloc: {:?} -> operand", local);
278 LocalRef::new_operand(layout)
279 }
280 };
281
282 let retptr = allocate_local(mir::RETURN_PLACE);
283 iter::once(retptr)
284 .chain(args.into_iter())
285 .chain(mir.vars_and_temps_iter().map(allocate_local))
286 .collect()
287 };
288 fx.initialize_locals(local_values);
289
290 fx.debug_introduce_locals(&mut start_bx, consts_debug_info.unwrap_or_default());
292
293 start_bx.init_coverage(instance);
296
297 drop(start_bx);
300
301 let mut unreached_blocks = DenseBitSet::new_filled(mir.basic_blocks.len());
302 for bb in traversal_order {
304 fx.codegen_block(bb);
305 unreached_blocks.remove(bb);
306 }
307
308 for bb in unreached_blocks.iter() {
314 fx.codegen_block_as_unreachable(bb);
315 }
316}
317
318fn optimize_use_clone<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
320 cx: &'a Bx::CodegenCx,
321 mut mir: Body<'tcx>,
322) -> Body<'tcx> {
323 let tcx = cx.tcx();
324
325 if tcx.features().ergonomic_clones() {
326 for bb in mir.basic_blocks.as_mut() {
327 let mir::TerminatorKind::Call {
328 args,
329 destination,
330 target,
331 call_source: mir::CallSource::Use,
332 ..
333 } = &bb.terminator().kind
334 else {
335 continue;
336 };
337
338 assert_eq!(args.len(), 1);
340 let arg = &args[0];
341
342 let arg_ty = arg.node.ty(&mir.local_decls, tcx);
345
346 let ty::Ref(_region, inner_ty, mir::Mutability::Not) = *arg_ty.kind() else { continue };
347
348 if !tcx.type_is_copy_modulo_regions(cx.typing_env(), inner_ty) {
349 continue;
350 }
351
352 let Some(arg_place) = arg.node.place() else { continue };
353
354 let destination_block = target.unwrap();
355
356 bb.statements.push(mir::Statement {
357 source_info: bb.terminator().source_info,
358 kind: mir::StatementKind::Assign(Box::new((
359 *destination,
360 mir::Rvalue::Use(mir::Operand::Copy(
361 arg_place.project_deeper(&[mir::ProjectionElem::Deref], tcx),
362 )),
363 ))),
364 });
365
366 bb.terminator_mut().kind = mir::TerminatorKind::Goto { target: destination_block };
367 }
368 }
369
370 mir
371}
372
373fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
377 bx: &mut Bx,
378 fx: &mut FunctionCx<'a, 'tcx, Bx>,
379 memory_locals: &DenseBitSet<mir::Local>,
380) -> Vec<LocalRef<'tcx, Bx::Value>> {
381 let mir = fx.mir;
382 let mut idx = 0;
383 let mut llarg_idx = fx.fn_abi.ret.is_indirect() as usize;
384
385 let mut num_untupled = None;
386
387 let codegen_fn_attrs = bx.tcx().codegen_fn_attrs(fx.instance.def_id());
388 let naked = codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NAKED);
389 if naked {
390 return vec![];
391 }
392
393 let args = mir
394 .args_iter()
395 .enumerate()
396 .map(|(arg_index, local)| {
397 let arg_decl = &mir.local_decls[local];
398 let arg_ty = fx.monomorphize(arg_decl.ty);
399
400 if Some(local) == mir.spread_arg {
401 let ty::Tuple(tupled_arg_tys) = arg_ty.kind() else {
406 bug!("spread argument isn't a tuple?!");
407 };
408
409 let layout = bx.layout_of(arg_ty);
410
411 if layout.is_unsized() {
413 span_bug!(
414 arg_decl.source_info.span,
415 "\"rust-call\" ABI does not support unsized params",
416 );
417 }
418
419 let place = PlaceRef::alloca(bx, layout);
420 for i in 0..tupled_arg_tys.len() {
421 let arg = &fx.fn_abi.args[idx];
422 idx += 1;
423 if let PassMode::Cast { pad_i32: true, .. } = arg.mode {
424 llarg_idx += 1;
425 }
426 let pr_field = place.project_field(bx, i);
427 bx.store_fn_arg(arg, &mut llarg_idx, pr_field);
428 }
429 assert_eq!(
430 None,
431 num_untupled.replace(tupled_arg_tys.len()),
432 "Replaced existing num_tupled"
433 );
434
435 return LocalRef::Place(place);
436 }
437
438 if fx.fn_abi.c_variadic && arg_index == fx.fn_abi.args.len() {
439 let va_list = PlaceRef::alloca(bx, bx.layout_of(arg_ty));
440 bx.va_start(va_list.val.llval);
441
442 return LocalRef::Place(va_list);
443 }
444
445 let arg = &fx.fn_abi.args[idx];
446 idx += 1;
447 if let PassMode::Cast { pad_i32: true, .. } = arg.mode {
448 llarg_idx += 1;
449 }
450
451 if !memory_locals.contains(local) {
452 let local = |op| LocalRef::Operand(op);
456 match arg.mode {
457 PassMode::Ignore => {
458 return local(OperandRef::zero_sized(arg.layout));
459 }
460 PassMode::Direct(_) => {
461 let llarg = bx.get_param(llarg_idx);
462 llarg_idx += 1;
463 return local(OperandRef::from_immediate_or_packed_pair(
464 bx, llarg, arg.layout,
465 ));
466 }
467 PassMode::Pair(..) => {
468 let (a, b) = (bx.get_param(llarg_idx), bx.get_param(llarg_idx + 1));
469 llarg_idx += 2;
470
471 return local(OperandRef {
472 val: OperandValue::Pair(a, b),
473 layout: arg.layout,
474 });
475 }
476 _ => {}
477 }
478 }
479
480 match arg.mode {
481 PassMode::Indirect { attrs, meta_attrs: None, on_stack: _ } => {
483 if let Some(pointee_align) = attrs.pointee_align
487 && pointee_align < arg.layout.align.abi
488 {
489 let tmp = PlaceRef::alloca(bx, arg.layout);
492 bx.store_fn_arg(arg, &mut llarg_idx, tmp);
493 LocalRef::Place(tmp)
494 } else {
495 let llarg = bx.get_param(llarg_idx);
496 llarg_idx += 1;
497 LocalRef::Place(PlaceRef::new_sized(llarg, arg.layout))
498 }
499 }
500 PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
502 let llarg = bx.get_param(llarg_idx);
505 llarg_idx += 1;
506 let llextra = bx.get_param(llarg_idx);
507 llarg_idx += 1;
508 let indirect_operand = OperandValue::Pair(llarg, llextra);
509
510 let tmp = PlaceRef::alloca_unsized_indirect(bx, arg.layout);
511 indirect_operand.store(bx, tmp);
512 LocalRef::UnsizedPlace(tmp)
513 }
514 _ => {
515 let tmp = PlaceRef::alloca(bx, arg.layout);
516 bx.store_fn_arg(arg, &mut llarg_idx, tmp);
517 LocalRef::Place(tmp)
518 }
519 }
520 })
521 .collect::<Vec<_>>();
522
523 if fx.instance.def.requires_caller_location(bx.tcx()) {
524 let mir_args = if let Some(num_untupled) = num_untupled {
525 args.len() - 1 + num_untupled
527 } else {
528 args.len()
529 };
530 assert_eq!(
531 fx.fn_abi.args.len(),
532 mir_args + 1,
533 "#[track_caller] instance {:?} must have 1 more argument in their ABI than in their MIR",
534 fx.instance
535 );
536
537 let arg = fx.fn_abi.args.last().unwrap();
538 match arg.mode {
539 PassMode::Direct(_) => (),
540 _ => bug!("caller location must be PassMode::Direct, found {:?}", arg.mode),
541 }
542
543 fx.caller_location = Some(OperandRef {
544 val: OperandValue::Immediate(bx.get_param(llarg_idx)),
545 layout: arg.layout,
546 });
547 }
548
549 args
550}
551
552fn find_cold_blocks<'tcx>(
553 tcx: TyCtxt<'tcx>,
554 mir: &mir::Body<'tcx>,
555) -> IndexVec<mir::BasicBlock, bool> {
556 let local_decls = &mir.local_decls;
557
558 let mut cold_blocks: IndexVec<mir::BasicBlock, bool> =
559 IndexVec::from_elem(false, &mir.basic_blocks);
560
561 for (bb, bb_data) in traversal::postorder(mir) {
563 let terminator = bb_data.terminator();
564
565 match terminator.kind {
566 mir::TerminatorKind::Call { ref func, .. }
568 | mir::TerminatorKind::TailCall { ref func, .. }
569 if let ty::FnDef(def_id, ..) = *func.ty(local_decls, tcx).kind()
570 && let attrs = tcx.codegen_fn_attrs(def_id)
571 && attrs.flags.contains(CodegenFnAttrFlags::COLD) =>
572 {
573 cold_blocks[bb] = true;
574 continue;
575 }
576
577 mir::TerminatorKind::Unreachable => {
579 cold_blocks[bb] = true;
580 continue;
581 }
582
583 _ => {}
584 }
585
586 let mut succ = terminator.successors();
588 if let Some(first) = succ.next()
589 && cold_blocks[first]
590 && succ.all(|s| cold_blocks[s])
591 {
592 cold_blocks[bb] = true;
593 }
594 }
595
596 cold_blocks
597}