1use std::iter;
2
3use rustc_abi::Primitive::Pointer;
4use rustc_abi::{BackendRepr, ExternAbi, PointerKind, Scalar, Size};
5use rustc_hir as hir;
6use rustc_hir::lang_items::LangItem;
7use rustc_middle::bug;
8use rustc_middle::query::Providers;
9use rustc_middle::ty::layout::{
10 FnAbiError, HasTyCtxt, HasTypingEnv, LayoutCx, LayoutOf, TyAndLayout, fn_can_unwind,
11};
12use rustc_middle::ty::{self, InstanceKind, Ty, TyCtxt};
13use rustc_session::config::OptLevel;
14use rustc_span::def_id::DefId;
15use rustc_target::callconv::{
16 ArgAbi, ArgAttribute, ArgAttributes, ArgExtension, Conv, FnAbi, PassMode, RiscvInterruptKind,
17};
18use tracing::debug;
19
20pub(crate) fn provide(providers: &mut Providers) {
21 *providers = Providers { fn_abi_of_fn_ptr, fn_abi_of_instance, ..*providers };
22}
23
24#[tracing::instrument(level = "debug", skip(tcx, typing_env))]
30fn fn_sig_for_fn_abi<'tcx>(
31 tcx: TyCtxt<'tcx>,
32 instance: ty::Instance<'tcx>,
33 typing_env: ty::TypingEnv<'tcx>,
34) -> ty::FnSig<'tcx> {
35 if let InstanceKind::ThreadLocalShim(..) = instance.def {
36 return tcx.mk_fn_sig(
37 [],
38 tcx.thread_local_ptr_ty(instance.def_id()),
39 false,
40 hir::Safety::Safe,
41 rustc_abi::ExternAbi::Unadjusted,
42 );
43 }
44
45 let ty = instance.ty(tcx, typing_env);
46 match *ty.kind() {
47 ty::FnDef(def_id, args) => {
48 let mut sig = tcx
49 .instantiate_bound_regions_with_erased(tcx.fn_sig(def_id).instantiate(tcx, args));
50
51 if let ty::InstanceKind::VTableShim(..) = instance.def {
53 let mut inputs_and_output = sig.inputs_and_output.to_vec();
54 inputs_and_output[0] = Ty::new_mut_ptr(tcx, inputs_and_output[0]);
55 sig.inputs_and_output = tcx.mk_type_list(&inputs_and_output);
56 }
57
58 sig
59 }
60 ty::Closure(def_id, args) => {
61 let sig = tcx.instantiate_bound_regions_with_erased(args.as_closure().sig());
62 let env_ty = tcx.closure_env_ty(
63 Ty::new_closure(tcx, def_id, args),
64 args.as_closure().kind(),
65 tcx.lifetimes.re_erased,
66 );
67
68 tcx.mk_fn_sig(
69 iter::once(env_ty).chain(sig.inputs().iter().cloned()),
70 sig.output(),
71 sig.c_variadic,
72 sig.safety,
73 sig.abi,
74 )
75 }
76 ty::CoroutineClosure(def_id, args) => {
77 let coroutine_ty = Ty::new_coroutine_closure(tcx, def_id, args);
78 let sig = args.as_coroutine_closure().coroutine_closure_sig();
79
80 let mut coroutine_kind = args.as_coroutine_closure().kind();
85
86 let env_ty =
87 if let InstanceKind::ConstructCoroutineInClosureShim { receiver_by_ref, .. } =
88 instance.def
89 {
90 coroutine_kind = ty::ClosureKind::FnOnce;
91
92 if receiver_by_ref {
95 Ty::new_imm_ref(tcx, tcx.lifetimes.re_erased, coroutine_ty)
96 } else {
97 coroutine_ty
98 }
99 } else {
100 tcx.closure_env_ty(coroutine_ty, coroutine_kind, tcx.lifetimes.re_erased)
101 };
102
103 let sig = tcx.instantiate_bound_regions_with_erased(sig);
104
105 tcx.mk_fn_sig(
106 iter::once(env_ty).chain([sig.tupled_inputs_ty]),
107 sig.to_coroutine_given_kind_and_upvars(
108 tcx,
109 args.as_coroutine_closure().parent_args(),
110 tcx.coroutine_for_closure(def_id),
111 coroutine_kind,
112 tcx.lifetimes.re_erased,
113 args.as_coroutine_closure().tupled_upvars_ty(),
114 args.as_coroutine_closure().coroutine_captures_by_ref_ty(),
115 ),
116 sig.c_variadic,
117 sig.safety,
118 sig.abi,
119 )
120 }
121 ty::Coroutine(did, args) => {
122 let coroutine_kind = tcx.coroutine_kind(did).unwrap();
123 let sig = args.as_coroutine().sig();
124
125 let env_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, ty);
126
127 let pin_did = tcx.require_lang_item(LangItem::Pin, None);
128 let pin_adt_ref = tcx.adt_def(pin_did);
129 let pin_args = tcx.mk_args(&[env_ty.into()]);
130 let env_ty = match coroutine_kind {
131 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _) => {
132 env_ty
135 }
136 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _)
137 | hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _)
138 | hir::CoroutineKind::Coroutine(_) => Ty::new_adt(tcx, pin_adt_ref, pin_args),
139 };
140
141 let (resume_ty, ret_ty) = match coroutine_kind {
148 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Async, _) => {
149 assert_eq!(sig.yield_ty, tcx.types.unit);
151
152 let poll_did = tcx.require_lang_item(LangItem::Poll, None);
153 let poll_adt_ref = tcx.adt_def(poll_did);
154 let poll_args = tcx.mk_args(&[sig.return_ty.into()]);
155 let ret_ty = Ty::new_adt(tcx, poll_adt_ref, poll_args);
156
157 #[cfg(debug_assertions)]
160 {
161 if let ty::Adt(resume_ty_adt, _) = sig.resume_ty.kind() {
162 let expected_adt =
163 tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, None));
164 assert_eq!(*resume_ty_adt, expected_adt);
165 } else {
166 panic!("expected `ResumeTy`, found `{:?}`", sig.resume_ty);
167 };
168 }
169 let context_mut_ref = Ty::new_task_context(tcx);
170
171 (Some(context_mut_ref), ret_ty)
172 }
173 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::Gen, _) => {
174 let option_did = tcx.require_lang_item(LangItem::Option, None);
176 let option_adt_ref = tcx.adt_def(option_did);
177 let option_args = tcx.mk_args(&[sig.yield_ty.into()]);
178 let ret_ty = Ty::new_adt(tcx, option_adt_ref, option_args);
179
180 assert_eq!(sig.return_ty, tcx.types.unit);
181 assert_eq!(sig.resume_ty, tcx.types.unit);
182
183 (None, ret_ty)
184 }
185 hir::CoroutineKind::Desugared(hir::CoroutineDesugaring::AsyncGen, _) => {
186 assert_eq!(sig.return_ty, tcx.types.unit);
189
190 let ret_ty = sig.yield_ty;
192
193 #[cfg(debug_assertions)]
196 {
197 if let ty::Adt(resume_ty_adt, _) = sig.resume_ty.kind() {
198 let expected_adt =
199 tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, None));
200 assert_eq!(*resume_ty_adt, expected_adt);
201 } else {
202 panic!("expected `ResumeTy`, found `{:?}`", sig.resume_ty);
203 };
204 }
205 let context_mut_ref = Ty::new_task_context(tcx);
206
207 (Some(context_mut_ref), ret_ty)
208 }
209 hir::CoroutineKind::Coroutine(_) => {
210 let state_did = tcx.require_lang_item(LangItem::CoroutineState, None);
212 let state_adt_ref = tcx.adt_def(state_did);
213 let state_args = tcx.mk_args(&[sig.yield_ty.into(), sig.return_ty.into()]);
214 let ret_ty = Ty::new_adt(tcx, state_adt_ref, state_args);
215
216 (Some(sig.resume_ty), ret_ty)
217 }
218 };
219
220 if let Some(resume_ty) = resume_ty {
221 tcx.mk_fn_sig(
222 [env_ty, resume_ty],
223 ret_ty,
224 false,
225 hir::Safety::Safe,
226 rustc_abi::ExternAbi::Rust,
227 )
228 } else {
229 tcx.mk_fn_sig(
231 [env_ty],
232 ret_ty,
233 false,
234 hir::Safety::Safe,
235 rustc_abi::ExternAbi::Rust,
236 )
237 }
238 }
239 _ => bug!("unexpected type {:?} in Instance::fn_sig", ty),
240 }
241}
242
243#[inline]
244fn conv_from_spec_abi(tcx: TyCtxt<'_>, abi: ExternAbi, c_variadic: bool) -> Conv {
245 use rustc_abi::ExternAbi::*;
246 match tcx.sess.target.adjust_abi(abi, c_variadic) {
247 Rust | RustCall => Conv::Rust,
248
249 RustCold => Conv::PreserveMost,
252
253 System { .. } => bug!("system abi should be selected elsewhere"),
255 EfiApi => bug!("eficall abi should be selected elsewhere"),
256
257 Stdcall { .. } => Conv::X86Stdcall,
258 Fastcall { .. } => Conv::X86Fastcall,
259 Vectorcall { .. } => Conv::X86VectorCall,
260 Thiscall { .. } => Conv::X86ThisCall,
261 C { .. } => Conv::C,
262 Unadjusted => Conv::C,
263 Win64 { .. } => Conv::X86_64Win64,
264 SysV64 { .. } => Conv::X86_64SysV,
265 Aapcs { .. } => Conv::ArmAapcs,
266 CCmseNonSecureCall => Conv::CCmseNonSecureCall,
267 CCmseNonSecureEntry => Conv::CCmseNonSecureEntry,
268 PtxKernel => Conv::GpuKernel,
269 Msp430Interrupt => Conv::Msp430Intr,
270 X86Interrupt => Conv::X86Intr,
271 GpuKernel => Conv::GpuKernel,
272 AvrInterrupt => Conv::AvrInterrupt,
273 AvrNonBlockingInterrupt => Conv::AvrNonBlockingInterrupt,
274 RiscvInterruptM => Conv::RiscvInterrupt { kind: RiscvInterruptKind::Machine },
275 RiscvInterruptS => Conv::RiscvInterrupt { kind: RiscvInterruptKind::Supervisor },
276
277 Cdecl { .. } => Conv::C,
279 }
280}
281
282fn fn_abi_of_fn_ptr<'tcx>(
283 tcx: TyCtxt<'tcx>,
284 query: ty::PseudoCanonicalInput<'tcx, (ty::PolyFnSig<'tcx>, &'tcx ty::List<Ty<'tcx>>)>,
285) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
286 let ty::PseudoCanonicalInput { typing_env, value: (sig, extra_args) } = query;
287 fn_abi_new_uncached(
288 &LayoutCx::new(tcx, typing_env),
289 tcx.instantiate_bound_regions_with_erased(sig),
290 extra_args,
291 None,
292 )
293}
294
295fn fn_abi_of_instance<'tcx>(
296 tcx: TyCtxt<'tcx>,
297 query: ty::PseudoCanonicalInput<'tcx, (ty::Instance<'tcx>, &'tcx ty::List<Ty<'tcx>>)>,
298) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
299 let ty::PseudoCanonicalInput { typing_env, value: (instance, extra_args) } = query;
300 fn_abi_new_uncached(
301 &LayoutCx::new(tcx, typing_env),
302 fn_sig_for_fn_abi(tcx, instance, typing_env),
303 extra_args,
304 Some(instance),
305 )
306}
307
308fn adjust_for_rust_scalar<'tcx>(
310 cx: LayoutCx<'tcx>,
311 attrs: &mut ArgAttributes,
312 scalar: Scalar,
313 layout: TyAndLayout<'tcx>,
314 offset: Size,
315 is_return: bool,
316 drop_target_pointee: Option<Ty<'tcx>>,
317) {
318 if scalar.is_bool() {
320 attrs.ext(ArgExtension::Zext);
321 attrs.set(ArgAttribute::NoUndef);
322 return;
323 }
324
325 if !scalar.is_uninit_valid() {
326 attrs.set(ArgAttribute::NoUndef);
327 }
328
329 let Scalar::Initialized { value: Pointer(_), valid_range } = scalar else { return };
331
332 if !valid_range.contains(0) || drop_target_pointee.is_some() {
335 attrs.set(ArgAttribute::NonNull);
336 }
337
338 let tcx = cx.tcx();
339
340 if let Some(pointee) = layout.pointee_info_at(&cx, offset) {
341 let kind = if let Some(kind) = pointee.safe {
342 Some(kind)
343 } else if let Some(pointee) = drop_target_pointee {
344 Some(PointerKind::MutableRef { unpin: pointee.is_unpin(tcx, cx.typing_env) })
346 } else {
347 None
348 };
349 if let Some(kind) = kind {
350 attrs.pointee_align =
351 Some(pointee.align.min(cx.tcx().sess.target.max_reliable_alignment()));
352
353 attrs.pointee_size = match kind {
360 PointerKind::Box { .. }
361 | PointerKind::SharedRef { frozen: false }
362 | PointerKind::MutableRef { unpin: false } => Size::ZERO,
363 PointerKind::SharedRef { frozen: true }
364 | PointerKind::MutableRef { unpin: true } => pointee.size,
365 };
366
367 let noalias_for_box = tcx.sess.opts.unstable_opts.box_noalias;
371
372 let noalias_mut_ref = tcx.sess.opts.unstable_opts.mutable_noalias;
376
377 let no_alias = match kind {
384 PointerKind::SharedRef { frozen } => frozen,
385 PointerKind::MutableRef { unpin } => unpin && noalias_mut_ref,
386 PointerKind::Box { unpin, global } => unpin && global && noalias_for_box,
387 };
388 if no_alias && !is_return {
391 attrs.set(ArgAttribute::NoAlias);
392 }
393
394 if matches!(kind, PointerKind::SharedRef { frozen: true }) && !is_return {
395 attrs.set(ArgAttribute::ReadOnly);
396 }
397 }
398 }
399}
400
401fn fn_abi_sanity_check<'tcx>(
403 cx: &LayoutCx<'tcx>,
404 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
405 spec_abi: ExternAbi,
406) {
407 fn fn_arg_sanity_check<'tcx>(
408 cx: &LayoutCx<'tcx>,
409 fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
410 spec_abi: ExternAbi,
411 arg: &ArgAbi<'tcx, Ty<'tcx>>,
412 ) {
413 let tcx = cx.tcx();
414
415 if spec_abi.is_rustic_abi() {
416 if arg.layout.is_zst() {
417 assert!(arg.is_ignore());
420 }
421 if let PassMode::Indirect { on_stack, .. } = arg.mode {
422 assert!(!on_stack, "rust abi shouldn't use on_stack");
423 }
424 }
425
426 match &arg.mode {
427 PassMode::Ignore => {
428 assert!(arg.layout.is_zst());
429 }
430 PassMode::Direct(_) => {
431 match arg.layout.backend_repr {
436 BackendRepr::Scalar(_) | BackendRepr::SimdVector { .. } => {}
437 BackendRepr::ScalarPair(..) => {
438 panic!("`PassMode::Direct` used for ScalarPair type {}", arg.layout.ty)
439 }
440 BackendRepr::Memory { sized } => {
441 assert!(sized, "`PassMode::Direct` for unsized type in ABI: {:#?}", fn_abi);
444 match spec_abi {
455 ExternAbi::Unadjusted => {}
456 ExternAbi::C { unwind: _ }
457 if matches!(&*tcx.sess.target.arch, "wasm32" | "wasm64") => {}
458 _ => {
459 panic!(
460 "`PassMode::Direct` for aggregates only allowed for \"unadjusted\" functions and on wasm\n\
461 Problematic type: {:#?}",
462 arg.layout,
463 );
464 }
465 }
466 }
467 }
468 }
469 PassMode::Pair(_, _) => {
470 assert!(
473 matches!(arg.layout.backend_repr, BackendRepr::ScalarPair(..)),
474 "PassMode::Pair for type {}",
475 arg.layout.ty
476 );
477 }
478 PassMode::Cast { .. } => {
479 assert!(arg.layout.is_sized());
481 }
482 PassMode::Indirect { meta_attrs: None, .. } => {
483 assert!(arg.layout.is_sized());
488 }
489 PassMode::Indirect { meta_attrs: Some(_), on_stack, .. } => {
490 assert!(arg.layout.is_unsized() && !on_stack);
492 let tail = tcx.struct_tail_for_codegen(arg.layout.ty, cx.typing_env);
494 if matches!(tail.kind(), ty::Foreign(..)) {
495 panic!("unsized arguments must not be `extern` types");
500 }
501 }
502 }
503 }
504
505 for arg in fn_abi.args.iter() {
506 fn_arg_sanity_check(cx, fn_abi, spec_abi, arg);
507 }
508 fn_arg_sanity_check(cx, fn_abi, spec_abi, &fn_abi.ret);
509}
510
511#[tracing::instrument(level = "debug", skip(cx, instance))]
512fn fn_abi_new_uncached<'tcx>(
513 cx: &LayoutCx<'tcx>,
514 sig: ty::FnSig<'tcx>,
515 extra_args: &[Ty<'tcx>],
516 instance: Option<ty::Instance<'tcx>>,
517) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, &'tcx FnAbiError<'tcx>> {
518 let tcx = cx.tcx();
519 let (caller_location, determined_fn_def_id, is_virtual_call) = if let Some(instance) = instance
520 {
521 let is_virtual_call = matches!(instance.def, ty::InstanceKind::Virtual(..));
522 (
523 instance.def.requires_caller_location(tcx).then(|| tcx.caller_location_ty()),
524 if is_virtual_call { None } else { Some(instance.def_id()) },
525 is_virtual_call,
526 )
527 } else {
528 (None, None, false)
529 };
530 let sig = tcx.normalize_erasing_regions(cx.typing_env, sig);
531
532 let conv = conv_from_spec_abi(cx.tcx(), sig.abi, sig.c_variadic);
533
534 let mut inputs = sig.inputs();
535 let extra_args = if sig.abi == ExternAbi::RustCall {
536 assert!(!sig.c_variadic && extra_args.is_empty());
537
538 if let Some(input) = sig.inputs().last()
539 && let ty::Tuple(tupled_arguments) = input.kind()
540 {
541 inputs = &sig.inputs()[0..sig.inputs().len() - 1];
542 tupled_arguments
543 } else {
544 bug!(
545 "argument to function with \"rust-call\" ABI \
546 is not a tuple"
547 );
548 }
549 } else {
550 assert!(sig.c_variadic || extra_args.is_empty());
551 extra_args
552 };
553
554 let is_drop_in_place = determined_fn_def_id.is_some_and(|def_id| {
555 tcx.is_lang_item(def_id, LangItem::DropInPlace)
556 || tcx.is_lang_item(def_id, LangItem::AsyncDropInPlace)
557 });
558
559 let arg_of = |ty: Ty<'tcx>, arg_idx: Option<usize>| -> Result<_, &'tcx FnAbiError<'tcx>> {
560 let span = tracing::debug_span!("arg_of");
561 let _entered = span.enter();
562 let is_return = arg_idx.is_none();
563 let is_drop_target = is_drop_in_place && arg_idx == Some(0);
564 let drop_target_pointee = is_drop_target.then(|| match ty.kind() {
565 ty::RawPtr(ty, _) => *ty,
566 _ => bug!("argument to drop_in_place is not a raw ptr: {:?}", ty),
567 });
568
569 let layout = cx.layout_of(ty).map_err(|err| &*tcx.arena.alloc(FnAbiError::Layout(*err)))?;
570 let layout = if is_virtual_call && arg_idx == Some(0) {
571 make_thin_self_ptr(cx, layout)
575 } else {
576 layout
577 };
578
579 let mut arg = ArgAbi::new(cx, layout, |layout, scalar, offset| {
580 let mut attrs = ArgAttributes::new();
581 adjust_for_rust_scalar(
582 *cx,
583 &mut attrs,
584 scalar,
585 *layout,
586 offset,
587 is_return,
588 drop_target_pointee,
589 );
590 attrs
591 });
592
593 if arg.layout.is_zst() {
594 arg.mode = PassMode::Ignore;
595 }
596
597 Ok(arg)
598 };
599
600 let mut fn_abi = FnAbi {
601 ret: arg_of(sig.output(), None)?,
602 args: inputs
603 .iter()
604 .copied()
605 .chain(extra_args.iter().copied())
606 .chain(caller_location)
607 .enumerate()
608 .map(|(i, ty)| arg_of(ty, Some(i)))
609 .collect::<Result<_, _>>()?,
610 c_variadic: sig.c_variadic,
611 fixed_count: inputs.len() as u32,
612 conv,
613 can_unwind: fn_can_unwind(
614 tcx,
615 determined_fn_def_id,
617 sig.abi,
618 ),
619 };
620 fn_abi_adjust_for_abi(
621 cx,
622 &mut fn_abi,
623 sig.abi,
624 determined_fn_def_id,
628 );
629 debug!("fn_abi_new_uncached = {:?}", fn_abi);
630 fn_abi_sanity_check(cx, &fn_abi, sig.abi);
631 Ok(tcx.arena.alloc(fn_abi))
632}
633
634#[tracing::instrument(level = "trace", skip(cx))]
635fn fn_abi_adjust_for_abi<'tcx>(
636 cx: &LayoutCx<'tcx>,
637 fn_abi: &mut FnAbi<'tcx, Ty<'tcx>>,
638 abi: ExternAbi,
639 fn_def_id: Option<DefId>,
640) {
641 if abi == ExternAbi::Unadjusted {
642 fn unadjust<'tcx>(arg: &mut ArgAbi<'tcx, Ty<'tcx>>) {
645 if matches!(arg.layout.backend_repr, BackendRepr::Memory { .. }) {
648 assert!(
649 arg.layout.backend_repr.is_sized(),
650 "'unadjusted' ABI does not support unsized arguments"
651 );
652 }
653 arg.make_direct_deprecated();
654 }
655
656 unadjust(&mut fn_abi.ret);
657 for arg in fn_abi.args.iter_mut() {
658 unadjust(arg);
659 }
660 return;
661 }
662
663 let tcx = cx.tcx();
664
665 if abi.is_rustic_abi() {
666 fn_abi.adjust_for_rust_abi(cx);
667
668 let deduced_param_attrs =
672 if tcx.sess.opts.optimize != OptLevel::No && tcx.sess.opts.incremental.is_none() {
673 fn_def_id.map(|fn_def_id| tcx.deduced_param_attrs(fn_def_id)).unwrap_or_default()
674 } else {
675 &[]
676 };
677
678 for (arg_idx, arg) in fn_abi.args.iter_mut().enumerate() {
679 if arg.is_ignore() {
680 continue;
681 }
682
683 if let &mut PassMode::Indirect { ref mut attrs, .. } = &mut arg.mode {
689 if let Some(deduced_param_attrs) = deduced_param_attrs.get(arg_idx) {
693 if deduced_param_attrs.read_only {
694 attrs.regular.insert(ArgAttribute::ReadOnly);
695 debug!("added deduced read-only attribute");
696 }
697 }
698 }
699 }
700 } else {
701 fn_abi.adjust_for_foreign_abi(cx, abi);
702 }
703}
704
705#[tracing::instrument(level = "debug", skip(cx))]
706fn make_thin_self_ptr<'tcx>(
707 cx: &(impl HasTyCtxt<'tcx> + HasTypingEnv<'tcx>),
708 layout: TyAndLayout<'tcx>,
709) -> TyAndLayout<'tcx> {
710 let tcx = cx.tcx();
711 let wide_pointer_ty = if layout.is_unsized() {
712 Ty::new_mut_ptr(tcx, layout.ty)
715 } else {
716 match layout.backend_repr {
717 BackendRepr::ScalarPair(..) | BackendRepr::Scalar(..) => (),
718 _ => bug!("receiver type has unsupported layout: {:?}", layout),
719 }
720
721 let mut wide_pointer_layout = layout;
727 while !wide_pointer_layout.ty.is_raw_ptr() && !wide_pointer_layout.ty.is_ref() {
728 wide_pointer_layout = wide_pointer_layout
729 .non_1zst_field(cx)
730 .expect("not exactly one non-1-ZST field in a `DispatchFromDyn` type")
731 .1
732 }
733
734 wide_pointer_layout.ty
735 };
736
737 let unit_ptr_ty = Ty::new_mut_ptr(tcx, tcx.types.unit);
741
742 TyAndLayout {
743 ty: wide_pointer_ty,
744
745 ..tcx.layout_of(ty::TypingEnv::fully_monomorphized().as_query_input(unit_ptr_ty)).unwrap()
748 }
749}