1use std::iter;
2
3use rustc_abi::Primitive::Pointer;
4use rustc_abi::{BackendRepr, ExternAbi, PointerKind, Scalar, Size};
5use rustc_hir as hir;
6use rustc_hir::lang_items::LangItem;
7use rustc_middle::bug;
8use rustc_middle::query::Providers;
9use rustc_middle::ty::layout::{
10 FnAbiError, HasTyCtxt, HasTypingEnv, LayoutCx, LayoutOf, TyAndLayout, fn_can_unwind,
11};
12use rustc_middle::ty::{self, InstanceKind, Ty, TyCtxt};
13use rustc_session::config::OptLevel;
14use rustc_span::DUMMY_SP;
15use rustc_span::def_id::DefId;
16use rustc_target::callconv::{
17 AbiMap, ArgAbi, ArgAttribute, ArgAttributes, ArgExtension, FnAbi, PassMode,
18};
19use tracing::debug;
20
21pub(crate) fn provide(providers: &mut Providers) {
22 *providers = Providers { fn_abi_of_fn_ptr, fn_abi_of_instance, ..*providers };
23}
24
25#[tracing::instrument(level = "debug", skip(tcx, typing_env))]
31fn fn_sig_for_fn_abi<'tcx>(
32 tcx: TyCtxt<'tcx>,
33 instance: ty::Instance<'tcx>,
34 typing_env: ty::TypingEnv<'tcx>,
35) -> ty::FnSig<'tcx> {
36 if let InstanceKind::ThreadLocalShim(..) = instance.def {
37 return tcx.mk_fn_sig(
38 [],
39 tcx.thread_local_ptr_ty(instance.def_id()),
40 false,
41 hir::Safety::Safe,
42 rustc_abi::ExternAbi::Rust,
43 );
44 }
45
46 let ty = instance.ty(tcx, typing_env);
47 match *ty.kind() {
48 ty::FnDef(def_id, args) => {
49 let mut sig = tcx
50 .instantiate_bound_regions_with_erased(tcx.fn_sig(def_id).instantiate(tcx, args));
51
52 if let ty::InstanceKind::VTableShim(..) = instance.def {
54 let mut inputs_and_output = sig.inputs_and_output.to_vec();
55 inputs_and_output[0] = Ty::new_mut_ptr(tcx, inputs_and_output[0]);
56 sig.inputs_and_output = tcx.mk_type_list(&inputs_and_output);
57 }
58
59 sig
60 }
61 ty::Closure(def_id, args) => {
62 let sig = tcx.instantiate_bound_regions_with_erased(args.as_closure().sig());
63 let env_ty = tcx.closure_env_ty(
64 Ty::new_closure(tcx, def_id, args),
65 args.as_closure().kind(),
66 tcx.lifetimes.re_erased,
67 );
68
69 tcx.mk_fn_sig(
70 iter::once(env_ty).chain(sig.inputs().iter().cloned()),
71 sig.output(),
72 sig.c_variadic,
73 sig.safety,
74 sig.abi,
75 )
76 }
77 ty::CoroutineClosure(def_id, args) => {
78 let coroutine_ty = Ty::new_coroutine_closure(tcx, def_id, args);
79 let sig = args.as_coroutine_closure().coroutine_closure_sig();
80
81 let mut coroutine_kind = args.as_coroutine_closure().kind();
86
87 let env_ty =
88 if let InstanceKind::ConstructCoroutineInClosureShim { receiver_by_ref, .. } =
89 instance.def
90 {
91 coroutine_kind = ty::ClosureKind::FnOnce;
92
93 if receiver_by_ref {
96 Ty::new_imm_ref(tcx, tcx.lifetimes.re_erased, coroutine_ty)
97 } else {
98 coroutine_ty
99 }
100 } else {
101 tcx.closure_env_ty(coroutine_ty, coroutine_kind, tcx.lifetimes.re_erased)
102 };
103
104 let sig = tcx.instantiate_bound_regions_with_erased(sig);
105
106 tcx.mk_fn_sig(
107 iter::once(env_ty).chain([sig.tupled_inputs_ty]),
108 sig.to_coroutine_given_kind_and_upvars(
109 tcx,
110 args.as_coroutine_closure().parent_args(),
111 tcx.coroutine_for_closure(def_id),
112 coroutine_kind,
113 tcx.lifetimes.re_erased,
114 args.as_coroutine_closure().tupled_upvars_ty(),
115 args.as_coroutine_closure().coroutine_captures_by_ref_ty(),
116 ),
117 sig.c_variadic,
118 sig.safety,
119 sig.abi,
120 )
121 }
122 ty::Coroutine(did, args) => {
123 let coroutine_kind = tcx.coroutine_kind(did).unwrap();
124 let sig = args.as_coroutine().sig();
125
126 let env_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, ty);
127
128 let pin_did = tcx.require_lang_item(LangItem::Pin, DUMMY_SP);
129 let pin_adt_ref = tcx.adt_def(pin_did);
130 let pin_args = tcx.mk_args(&[env_ty.into()]);
131 let env_ty = match coroutine_kind {
132 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _) => {
133 env_ty
136 }
137 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _)
138 | hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _)
139 | hir::CoroutineKind::Coroutine(_) => Ty::new_adt(tcx, pin_adt_ref, pin_args),
140 };
141
142 let (resume_ty, ret_ty) = match coroutine_kind {
149 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _) => {
150 assert_eq!(sig.yield_ty, tcx.types.unit);
152
153 let poll_did = tcx.require_lang_item(LangItem::Poll, DUMMY_SP);
154 let poll_adt_ref = tcx.adt_def(poll_did);
155 let poll_args = tcx.mk_args(&[sig.return_ty.into()]);
156 let ret_ty = Ty::new_adt(tcx, poll_adt_ref, poll_args);
157
158 #[cfg(debug_assertions)]
161 {
162 if let ty::Adt(resume_ty_adt, _) = sig.resume_ty.kind() {
163 let expected_adt =
164 tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, DUMMY_SP));
165 assert_eq!(*resume_ty_adt, expected_adt);
166 } else {
167 panic!("expected `ResumeTy`, found `{:?}`", sig.resume_ty);
168 };
169 }
170 let context_mut_ref = Ty::new_task_context(tcx);
171
172 (Some(context_mut_ref), ret_ty)
173 }
174 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _) => {
175 let option_did = tcx.require_lang_item(LangItem::Option, DUMMY_SP);
177 let option_adt_ref = tcx.adt_def(option_did);
178 let option_args = tcx.mk_args(&[sig.yield_ty.into()]);
179 let ret_ty = Ty::new_adt(tcx, option_adt_ref, option_args);
180
181 assert_eq!(sig.return_ty, tcx.types.unit);
182 assert_eq!(sig.resume_ty, tcx.types.unit);
183
184 (None, ret_ty)
185 }
186 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _) => {
187 assert_eq!(sig.return_ty, tcx.types.unit);
190
191 let ret_ty = sig.yield_ty;
193
194 #[cfg(debug_assertions)]
197 {
198 if let ty::Adt(resume_ty_adt, _) = sig.resume_ty.kind() {
199 let expected_adt =
200 tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, DUMMY_SP));
201 assert_eq!(*resume_ty_adt, expected_adt);
202 } else {
203 panic!("expected `ResumeTy`, found `{:?}`", sig.resume_ty);
204 };
205 }
206 let context_mut_ref = Ty::new_task_context(tcx);
207
208 (Some(context_mut_ref), ret_ty)
209 }
210 hir::CoroutineKind::Coroutine(_) => {
211 let state_did = tcx.require_lang_item(LangItem::CoroutineState, DUMMY_SP);
213 let state_adt_ref = tcx.adt_def(state_did);
214 let state_args = tcx.mk_args(&[sig.yield_ty.into(), sig.return_ty.into()]);
215 let ret_ty = Ty::new_adt(tcx, state_adt_ref, state_args);
216
217 (Some(sig.resume_ty), ret_ty)
218 }
219 };
220
221 if let Some(resume_ty) = resume_ty {
222 tcx.mk_fn_sig(
223 [env_ty, resume_ty],
224 ret_ty,
225 false,
226 hir::Safety::Safe,
227 rustc_abi::ExternAbi::Rust,
228 )
229 } else {
230 tcx.mk_fn_sig(
232 [env_ty],
233 ret_ty,
234 false,
235 hir::Safety::Safe,
236 rustc_abi::ExternAbi::Rust,
237 )
238 }
239 }
240 _ => bug!("unexpected type {:?} in Instance::fn_sig", ty),
241 }
242}
243
244fn fn_abi_of_fn_ptr<'tcx>(
245 tcx: TyCtxt<'tcx>,
246 query: ty::PseudoCanonicalInput<'tcx, (ty::PolyFnSig<'tcx>, &'tcx ty::List<Ty<'tcx>>)>,
247) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
248 let ty::PseudoCanonicalInput { typing_env, value: (sig, extra_args) } = query;
249 fn_abi_new_uncached(
250 &LayoutCx::new(tcx, typing_env),
251 tcx.instantiate_bound_regions_with_erased(sig),
252 extra_args,
253 None,
254 )
255}
256
257fn fn_abi_of_instance<'tcx>(
258 tcx: TyCtxt<'tcx>,
259 query: ty::PseudoCanonicalInput<'tcx, (ty::Instance<'tcx>, &'tcx ty::List<Ty<'tcx>>)>,
260) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
261 let ty::PseudoCanonicalInput { typing_env, value: (instance, extra_args) } = query;
262 fn_abi_new_uncached(
263 &LayoutCx::new(tcx, typing_env),
264 fn_sig_for_fn_abi(tcx, instance, typing_env),
265 extra_args,
266 Some(instance),
267 )
268}
269
270fn arg_attrs_for_rust_scalar<'tcx>(
272 cx: LayoutCx<'tcx>,
273 scalar: Scalar,
274 layout: TyAndLayout<'tcx>,
275 offset: Size,
276 is_return: bool,
277 drop_target_pointee: Option<Ty<'tcx>>,
278) -> ArgAttributes {
279 let mut attrs = ArgAttributes::new();
280
281 if scalar.is_bool() {
283 attrs.ext(ArgExtension::Zext);
284 attrs.set(ArgAttribute::NoUndef);
285 return attrs;
286 }
287
288 if !scalar.is_uninit_valid() {
289 attrs.set(ArgAttribute::NoUndef);
290 }
291
292 let Scalar::Initialized { value: Pointer(_), valid_range } = scalar else { return attrs };
294
295 if !valid_range.contains(0) || drop_target_pointee.is_some() {
298 attrs.set(ArgAttribute::NonNull);
299 }
300
301 let tcx = cx.tcx();
302
303 if let Some(pointee) = layout.pointee_info_at(&cx, offset) {
304 let kind = if let Some(kind) = pointee.safe {
305 Some(kind)
306 } else if let Some(pointee) = drop_target_pointee {
307 Some(PointerKind::MutableRef { unpin: pointee.is_unpin(tcx, cx.typing_env) })
309 } else {
310 None
311 };
312 if let Some(kind) = kind {
313 attrs.pointee_align =
314 Some(pointee.align.min(cx.tcx().sess.target.max_reliable_alignment()));
315
316 attrs.pointee_size = match kind {
323 PointerKind::Box { .. }
324 | PointerKind::SharedRef { frozen: false }
325 | PointerKind::MutableRef { unpin: false } => Size::ZERO,
326 PointerKind::SharedRef { frozen: true }
327 | PointerKind::MutableRef { unpin: true } => pointee.size,
328 };
329
330 let noalias_for_box = tcx.sess.opts.unstable_opts.box_noalias;
334
335 let noalias_mut_ref = tcx.sess.opts.unstable_opts.mutable_noalias;
339
340 let no_alias = match kind {
347 PointerKind::SharedRef { frozen } => frozen,
348 PointerKind::MutableRef { unpin } => unpin && noalias_mut_ref,
349 PointerKind::Box { unpin, global } => unpin && global && noalias_for_box,
350 };
351 if no_alias && !is_return {
354 attrs.set(ArgAttribute::NoAlias);
355 }
356
357 if matches!(kind, PointerKind::SharedRef { frozen: true }) && !is_return {
358 attrs.set(ArgAttribute::ReadOnly);
359 attrs.set(ArgAttribute::CapturesReadOnly);
360 }
361 }
362 }
363
364 attrs
365}
366
367fn fn_abi_sanity_check<'tcx>(
369 cx: &LayoutCx<'tcx>,
370 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
371 spec_abi: ExternAbi,
372) {
373 fn fn_arg_sanity_check<'tcx>(
374 cx: &LayoutCx<'tcx>,
375 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
376 spec_abi: ExternAbi,
377 arg: &ArgAbi<'tcx, Ty<'tcx>>,
378 ) {
379 let tcx = cx.tcx();
380
381 if spec_abi.is_rustic_abi() {
382 if arg.layout.is_zst() {
383 assert!(arg.is_ignore());
386 }
387 if let PassMode::Indirect { on_stack, .. } = arg.mode {
388 assert!(!on_stack, "rust abi shouldn't use on_stack");
389 }
390 }
391
392 match &arg.mode {
393 PassMode::Ignore => {
394 assert!(arg.layout.is_zst());
395 }
396 PassMode::Direct(_) => {
397 match arg.layout.backend_repr {
402 BackendRepr::Scalar(_) | BackendRepr::SimdVector { .. } => {}
403 BackendRepr::ScalarPair(..) => {
404 panic!("`PassMode::Direct` used for ScalarPair type {}", arg.layout.ty)
405 }
406 BackendRepr::Memory { sized } => {
407 assert!(sized, "`PassMode::Direct` for unsized type in ABI: {:#?}", fn_abi);
410
411 assert!(
417 matches!(spec_abi, ExternAbi::Unadjusted),
418 "`PassMode::Direct` for aggregates only allowed for \"unadjusted\"\n\
419 Problematic type: {:#?}",
420 arg.layout,
421 );
422 }
423 }
424 }
425 PassMode::Pair(_, _) => {
426 assert!(
429 matches!(arg.layout.backend_repr, BackendRepr::ScalarPair(..)),
430 "PassMode::Pair for type {}",
431 arg.layout.ty
432 );
433 }
434 PassMode::Cast { .. } => {
435 assert!(arg.layout.is_sized());
437 }
438 PassMode::Indirect { meta_attrs: None, .. } => {
439 assert!(arg.layout.is_sized());
444 }
445 PassMode::Indirect { meta_attrs: Some(_), on_stack, .. } => {
446 assert!(arg.layout.is_unsized() && !on_stack);
448 let tail = tcx.struct_tail_for_codegen(arg.layout.ty, cx.typing_env);
450 if matches!(tail.kind(), ty::Foreign(..)) {
451 panic!("unsized arguments must not be `extern` types");
456 }
457 }
458 }
459 }
460
461 for arg in fn_abi.args.iter() {
462 fn_arg_sanity_check(cx, fn_abi, spec_abi, arg);
463 }
464 fn_arg_sanity_check(cx, fn_abi, spec_abi, &fn_abi.ret);
465}
466
467#[tracing::instrument(level = "debug", skip(cx, instance))]
468fn fn_abi_new_uncached<'tcx>(
469 cx: &LayoutCx<'tcx>,
470 sig: ty::FnSig<'tcx>,
471 extra_args: &[Ty<'tcx>],
472 instance: Option<ty::Instance<'tcx>>,
473) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
474 let tcx = cx.tcx();
475 let (caller_location, determined_fn_def_id, is_virtual_call) = if let Some(instance) = instance
476 {
477 let is_virtual_call = matches!(instance.def, ty::InstanceKind::Virtual(..));
478 let is_tls_shim_call = matches!(instance.def, ty::InstanceKind::ThreadLocalShim(_));
479 (
480 instance.def.requires_caller_location(tcx).then(|| tcx.caller_location_ty()),
481 if is_virtual_call || is_tls_shim_call { None } else { Some(instance.def_id()) },
482 is_virtual_call,
483 )
484 } else {
485 (None, None, false)
486 };
487 let sig = tcx.normalize_erasing_regions(cx.typing_env, sig);
488
489 let abi_map = AbiMap::from_target(&tcx.sess.target);
490 let conv = abi_map.canonize_abi(sig.abi, sig.c_variadic).unwrap();
491
492 let mut inputs = sig.inputs();
493 let extra_args = if sig.abi == ExternAbi::RustCall {
494 assert!(!sig.c_variadic && extra_args.is_empty());
495
496 if let Some(input) = sig.inputs().last()
497 && let ty::Tuple(tupled_arguments) = input.kind()
498 {
499 inputs = &sig.inputs()[0..sig.inputs().len() - 1];
500 tupled_arguments
501 } else {
502 bug!(
503 "argument to function with \"rust-call\" ABI \
504 is not a tuple"
505 );
506 }
507 } else {
508 assert!(sig.c_variadic || extra_args.is_empty());
509 extra_args
510 };
511
512 let is_drop_in_place = determined_fn_def_id.is_some_and(|def_id| {
513 tcx.is_lang_item(def_id, LangItem::DropInPlace)
514 || tcx.is_lang_item(def_id, LangItem::AsyncDropInPlace)
515 });
516
517 let arg_of = |ty: Ty<'tcx>, arg_idx: Option<usize>| -> Result<_, &'tcx FnAbiError<'tcx>> {
518 let span = tracing::debug_span!("arg_of");
519 let _entered = span.enter();
520 let is_return = arg_idx.is_none();
521 let is_drop_target = is_drop_in_place && arg_idx == Some(0);
522 let drop_target_pointee = is_drop_target.then(|| match ty.kind() {
523 ty::RawPtr(ty, _) => *ty,
524 _ => bug!("argument to drop_in_place is not a raw ptr: {:?}", ty),
525 });
526
527 let layout = cx.layout_of(ty).map_err(|err| &*tcx.arena.alloc(FnAbiError::Layout(*err)))?;
528 let layout = if is_virtual_call && arg_idx == Some(0) {
529 make_thin_self_ptr(cx, layout)
533 } else {
534 layout
535 };
536
537 let mut arg = ArgAbi::new(cx, layout, |layout, scalar, offset| {
538 arg_attrs_for_rust_scalar(*cx, scalar, *layout, offset, is_return, drop_target_pointee)
539 });
540
541 if arg.layout.is_zst() {
542 arg.mode = PassMode::Ignore;
543 }
544
545 Ok(arg)
546 };
547
548 let mut fn_abi = FnAbi {
549 ret: arg_of(sig.output(), None)?,
550 args: inputs
551 .iter()
552 .copied()
553 .chain(extra_args.iter().copied())
554 .chain(caller_location)
555 .enumerate()
556 .map(|(i, ty)| arg_of(ty, Some(i)))
557 .collect::<Result<_, _>>()?,
558 c_variadic: sig.c_variadic,
559 fixed_count: inputs.len() as u32,
560 conv,
561 can_unwind: fn_can_unwind(
563 tcx,
564 determined_fn_def_id,
566 sig.abi,
567 ),
568 };
569 fn_abi_adjust_for_abi(
570 cx,
571 &mut fn_abi,
572 sig.abi,
573 determined_fn_def_id,
578 );
579 debug!("fn_abi_new_uncached = {:?}", fn_abi);
580 fn_abi_sanity_check(cx, &fn_abi, sig.abi);
581 Ok(tcx.arena.alloc(fn_abi))
582}
583
584#[tracing::instrument(level = "trace", skip(cx))]
585fn fn_abi_adjust_for_abi<'tcx>(
586 cx: &LayoutCx<'tcx>,
587 fn_abi: &mut FnAbi<'tcx, Ty<'tcx>>,
588 abi: ExternAbi,
589 fn_def_id: Option<DefId>,
590) {
591 if abi == ExternAbi::Unadjusted {
592 fn unadjust<'tcx>(arg: &mut ArgAbi<'tcx, Ty<'tcx>>) {
595 if matches!(arg.layout.backend_repr, BackendRepr::Memory { .. }) {
598 assert!(
599 arg.layout.backend_repr.is_sized(),
600 "'unadjusted' ABI does not support unsized arguments"
601 );
602 }
603 arg.make_direct_deprecated();
604 }
605
606 unadjust(&mut fn_abi.ret);
607 for arg in fn_abi.args.iter_mut() {
608 unadjust(arg);
609 }
610 return;
611 }
612
613 let tcx = cx.tcx();
614
615 if abi.is_rustic_abi() {
616 fn_abi.adjust_for_rust_abi(cx);
617
618 let deduced_param_attrs =
622 if tcx.sess.opts.optimize != OptLevel::No && tcx.sess.opts.incremental.is_none() {
623 fn_def_id.map(|fn_def_id| tcx.deduced_param_attrs(fn_def_id)).unwrap_or_default()
624 } else {
625 &[]
626 };
627
628 for (arg_idx, arg) in fn_abi.args.iter_mut().enumerate() {
629 if arg.is_ignore() {
630 continue;
631 }
632
633 if let &mut PassMode::Indirect { ref mut attrs, .. } = &mut arg.mode {
639 if let Some(deduced_param_attrs) = deduced_param_attrs.get(arg_idx)
643 && deduced_param_attrs.read_only
644 {
645 attrs.regular.insert(ArgAttribute::ReadOnly);
646 debug!("added deduced read-only attribute");
647 }
648 }
649 }
650 } else {
651 fn_abi.adjust_for_foreign_abi(cx, abi);
652 }
653}
654
655#[tracing::instrument(level = "debug", skip(cx))]
656fn make_thin_self_ptr<'tcx>(
657 cx: &(impl HasTyCtxt<'tcx> + HasTypingEnv<'tcx>),
658 layout: TyAndLayout<'tcx>,
659) -> TyAndLayout<'tcx> {
660 let tcx = cx.tcx();
661 let wide_pointer_ty = if layout.is_unsized() {
662 Ty::new_mut_ptr(tcx, layout.ty)
665 } else {
666 match layout.backend_repr {
667 BackendRepr::ScalarPair(..) | BackendRepr::Scalar(..) => (),
668 _ => bug!("receiver type has unsupported layout: {:?}", layout),
669 }
670
671 let mut wide_pointer_layout = layout;
677 while !wide_pointer_layout.ty.is_raw_ptr() && !wide_pointer_layout.ty.is_ref() {
678 wide_pointer_layout = wide_pointer_layout
679 .non_1zst_field(cx)
680 .expect("not exactly one non-1-ZST field in a `DispatchFromDyn` type")
681 .1
682 }
683
684 wide_pointer_layout.ty
685 };
686
687 let unit_ptr_ty = Ty::new_mut_ptr(tcx, tcx.types.unit);
691
692 TyAndLayout {
693 ty: wide_pointer_ty,
694
695 ..tcx.layout_of(ty::TypingEnv::fully_monomorphized().as_query_input(unit_ptr_ty)).unwrap()
698 }
699}