1#![cfg_attr(feature = "nightly", allow(internal_features))]
3#![cfg_attr(feature = "nightly", doc(rust_logo))]
4#![cfg_attr(feature = "nightly", feature(assert_matches))]
5#![cfg_attr(feature = "nightly", feature(rustc_attrs))]
6#![cfg_attr(feature = "nightly", feature(rustdoc_internals))]
7#![cfg_attr(feature = "nightly", feature(step_trait))]
8use std::fmt;
43#[cfg(feature = "nightly")]
44use std::iter::Step;
45use std::num::{NonZeroUsize, ParseIntError};
46use std::ops::{Add, AddAssign, Deref, Mul, RangeFull, RangeInclusive, Sub};
47use std::str::FromStr;
48
49use bitflags::bitflags;
50#[cfg(feature = "nightly")]
51use rustc_data_structures::stable_hasher::StableOrd;
52use rustc_hashes::Hash64;
53use rustc_index::{Idx, IndexSlice, IndexVec};
54#[cfg(feature = "nightly")]
55use rustc_macros::{Decodable_NoContext, Encodable_NoContext, HashStable_Generic};
56
57mod callconv;
58mod canon_abi;
59mod extern_abi;
60mod layout;
61#[cfg(test)]
62mod tests;
63
64pub use callconv::{Heterogeneous, HomogeneousAggregate, Reg, RegKind};
65pub use canon_abi::{ArmCall, CanonAbi, InterruptKind, X86Call};
66#[cfg(feature = "nightly")]
67pub use extern_abi::CVariadicStatus;
68pub use extern_abi::{ExternAbi, all_names};
69#[cfg(feature = "nightly")]
70pub use layout::{FIRST_VARIANT, FieldIdx, Layout, TyAbiInterface, TyAndLayout, VariantIdx};
71pub use layout::{LayoutCalculator, LayoutCalculatorError};
72
73#[cfg(feature = "nightly")]
77pub trait HashStableContext {}
78
79#[derive(Clone, Copy, PartialEq, Eq, Default)]
80#[cfg_attr(
81 feature = "nightly",
82 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
83)]
84pub struct ReprFlags(u8);
85
86bitflags! {
87 impl ReprFlags: u8 {
88 const IS_C = 1 << 0;
89 const IS_SIMD = 1 << 1;
90 const IS_TRANSPARENT = 1 << 2;
91 const IS_LINEAR = 1 << 3;
94 const RANDOMIZE_LAYOUT = 1 << 4;
98 const FIELD_ORDER_UNOPTIMIZABLE = ReprFlags::IS_C.bits()
100 | ReprFlags::IS_SIMD.bits()
101 | ReprFlags::IS_LINEAR.bits();
102 const ABI_UNOPTIMIZABLE = ReprFlags::IS_C.bits() | ReprFlags::IS_SIMD.bits();
103 }
104}
105
106impl std::fmt::Debug for ReprFlags {
109 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
110 bitflags::parser::to_writer(self, f)
111 }
112}
113
114#[derive(Copy, Clone, Debug, Eq, PartialEq)]
115#[cfg_attr(
116 feature = "nightly",
117 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
118)]
119pub enum IntegerType {
120 Pointer(bool),
123 Fixed(Integer, bool),
126}
127
128impl IntegerType {
129 pub fn is_signed(&self) -> bool {
130 match self {
131 IntegerType::Pointer(b) => *b,
132 IntegerType::Fixed(_, b) => *b,
133 }
134 }
135}
136
137#[derive(Copy, Clone, Debug, Eq, PartialEq, Default)]
139#[cfg_attr(
140 feature = "nightly",
141 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
142)]
143pub struct ReprOptions {
144 pub int: Option<IntegerType>,
145 pub align: Option<Align>,
146 pub pack: Option<Align>,
147 pub flags: ReprFlags,
148 pub field_shuffle_seed: Hash64,
156}
157
158impl ReprOptions {
159 #[inline]
160 pub fn simd(&self) -> bool {
161 self.flags.contains(ReprFlags::IS_SIMD)
162 }
163
164 #[inline]
165 pub fn c(&self) -> bool {
166 self.flags.contains(ReprFlags::IS_C)
167 }
168
169 #[inline]
170 pub fn packed(&self) -> bool {
171 self.pack.is_some()
172 }
173
174 #[inline]
175 pub fn transparent(&self) -> bool {
176 self.flags.contains(ReprFlags::IS_TRANSPARENT)
177 }
178
179 #[inline]
180 pub fn linear(&self) -> bool {
181 self.flags.contains(ReprFlags::IS_LINEAR)
182 }
183
184 pub fn discr_type(&self) -> IntegerType {
187 self.int.unwrap_or(IntegerType::Pointer(true))
188 }
189
190 pub fn inhibit_enum_layout_opt(&self) -> bool {
194 self.c() || self.int.is_some()
195 }
196
197 pub fn inhibit_newtype_abi_optimization(&self) -> bool {
198 self.flags.intersects(ReprFlags::ABI_UNOPTIMIZABLE)
199 }
200
201 pub fn inhibit_struct_field_reordering(&self) -> bool {
204 self.flags.intersects(ReprFlags::FIELD_ORDER_UNOPTIMIZABLE) || self.int.is_some()
205 }
206
207 pub fn can_randomize_type_layout(&self) -> bool {
210 !self.inhibit_struct_field_reordering() && self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT)
211 }
212
213 pub fn inhibits_union_abi_opt(&self) -> bool {
215 self.c()
216 }
217}
218
219pub const MAX_SIMD_LANES: u64 = 1 << 0xF;
225
226#[derive(Copy, Clone, Debug, PartialEq, Eq)]
228pub struct PointerSpec {
229 pointer_size: Size,
231 pointer_align: AbiAlign,
233 pointer_offset: Size,
235 _is_fat: bool,
238}
239
240#[derive(Debug, PartialEq, Eq)]
243pub struct TargetDataLayout {
244 pub endian: Endian,
245 pub i1_align: AbiAlign,
246 pub i8_align: AbiAlign,
247 pub i16_align: AbiAlign,
248 pub i32_align: AbiAlign,
249 pub i64_align: AbiAlign,
250 pub i128_align: AbiAlign,
251 pub f16_align: AbiAlign,
252 pub f32_align: AbiAlign,
253 pub f64_align: AbiAlign,
254 pub f128_align: AbiAlign,
255 pub aggregate_align: AbiAlign,
256
257 pub vector_align: Vec<(Size, AbiAlign)>,
259
260 pub default_address_space: AddressSpace,
261 pub default_address_space_pointer_spec: PointerSpec,
262
263 address_space_info: Vec<(AddressSpace, PointerSpec)>,
270
271 pub instruction_address_space: AddressSpace,
272
273 pub c_enum_min_size: Integer,
277}
278
279impl Default for TargetDataLayout {
280 fn default() -> TargetDataLayout {
282 let align = |bits| Align::from_bits(bits).unwrap();
283 TargetDataLayout {
284 endian: Endian::Big,
285 i1_align: AbiAlign::new(align(8)),
286 i8_align: AbiAlign::new(align(8)),
287 i16_align: AbiAlign::new(align(16)),
288 i32_align: AbiAlign::new(align(32)),
289 i64_align: AbiAlign::new(align(32)),
290 i128_align: AbiAlign::new(align(32)),
291 f16_align: AbiAlign::new(align(16)),
292 f32_align: AbiAlign::new(align(32)),
293 f64_align: AbiAlign::new(align(64)),
294 f128_align: AbiAlign::new(align(128)),
295 aggregate_align: AbiAlign { abi: align(8) },
296 vector_align: vec![
297 (Size::from_bits(64), AbiAlign::new(align(64))),
298 (Size::from_bits(128), AbiAlign::new(align(128))),
299 ],
300 default_address_space: AddressSpace::ZERO,
301 default_address_space_pointer_spec: PointerSpec {
302 pointer_size: Size::from_bits(64),
303 pointer_align: AbiAlign::new(align(64)),
304 pointer_offset: Size::from_bits(64),
305 _is_fat: false,
306 },
307 address_space_info: vec![],
308 instruction_address_space: AddressSpace::ZERO,
309 c_enum_min_size: Integer::I32,
310 }
311 }
312}
313
314pub enum TargetDataLayoutErrors<'a> {
315 InvalidAddressSpace { addr_space: &'a str, cause: &'a str, err: ParseIntError },
316 InvalidBits { kind: &'a str, bit: &'a str, cause: &'a str, err: ParseIntError },
317 MissingAlignment { cause: &'a str },
318 InvalidAlignment { cause: &'a str, err: AlignFromBytesError },
319 InconsistentTargetArchitecture { dl: &'a str, target: &'a str },
320 InconsistentTargetPointerWidth { pointer_size: u64, target: u16 },
321 InvalidBitsSize { err: String },
322 UnknownPointerSpecification { err: String },
323}
324
325impl TargetDataLayout {
326 pub fn parse_from_llvm_datalayout_string<'a>(
332 input: &'a str,
333 default_address_space: AddressSpace,
334 ) -> Result<TargetDataLayout, TargetDataLayoutErrors<'a>> {
335 let parse_address_space = |s: &'a str, cause: &'a str| {
337 s.parse::<u32>().map(AddressSpace).map_err(|err| {
338 TargetDataLayoutErrors::InvalidAddressSpace { addr_space: s, cause, err }
339 })
340 };
341
342 let parse_bits = |s: &'a str, kind: &'a str, cause: &'a str| {
344 s.parse::<u64>().map_err(|err| TargetDataLayoutErrors::InvalidBits {
345 kind,
346 bit: s,
347 cause,
348 err,
349 })
350 };
351
352 let parse_size =
354 |s: &'a str, cause: &'a str| parse_bits(s, "size", cause).map(Size::from_bits);
355
356 let parse_align_str = |s: &'a str, cause: &'a str| {
358 let align_from_bits = |bits| {
359 Align::from_bits(bits)
360 .map_err(|err| TargetDataLayoutErrors::InvalidAlignment { cause, err })
361 };
362 let abi = parse_bits(s, "alignment", cause)?;
363 Ok(AbiAlign::new(align_from_bits(abi)?))
364 };
365
366 let parse_align_seq = |s: &[&'a str], cause: &'a str| {
369 if s.is_empty() {
370 return Err(TargetDataLayoutErrors::MissingAlignment { cause });
371 }
372 parse_align_str(s[0], cause)
373 };
374
375 let mut dl = TargetDataLayout::default();
376 dl.default_address_space = default_address_space;
377
378 let mut i128_align_src = 64;
379 for spec in input.split('-') {
380 let spec_parts = spec.split(':').collect::<Vec<_>>();
381
382 match &*spec_parts {
383 ["e"] => dl.endian = Endian::Little,
384 ["E"] => dl.endian = Endian::Big,
385 [p] if p.starts_with('P') => {
386 dl.instruction_address_space = parse_address_space(&p[1..], "P")?
387 }
388 ["a", a @ ..] => dl.aggregate_align = parse_align_seq(a, "a")?,
389 ["f16", a @ ..] => dl.f16_align = parse_align_seq(a, "f16")?,
390 ["f32", a @ ..] => dl.f32_align = parse_align_seq(a, "f32")?,
391 ["f64", a @ ..] => dl.f64_align = parse_align_seq(a, "f64")?,
392 ["f128", a @ ..] => dl.f128_align = parse_align_seq(a, "f128")?,
393 [p, s, a @ ..] if p.starts_with("p") => {
394 let mut p = p.strip_prefix('p').unwrap();
395 let mut _is_fat = false;
396
397 if p.starts_with('f') {
401 p = p.strip_prefix('f').unwrap();
402 _is_fat = true;
403 }
404
405 if p.starts_with(char::is_alphabetic) {
408 return Err(TargetDataLayoutErrors::UnknownPointerSpecification {
409 err: p.to_string(),
410 });
411 }
412
413 let addr_space = if !p.is_empty() {
414 parse_address_space(p, "p-")?
415 } else {
416 AddressSpace::ZERO
417 };
418
419 let pointer_size = parse_size(s, "p-")?;
420 let pointer_align = parse_align_seq(a, "p-")?;
421 let info = PointerSpec {
422 pointer_offset: pointer_size,
423 pointer_size,
424 pointer_align,
425 _is_fat,
426 };
427 if addr_space == default_address_space {
428 dl.default_address_space_pointer_spec = info;
429 } else {
430 match dl.address_space_info.iter_mut().find(|(a, _)| *a == addr_space) {
431 Some(e) => e.1 = info,
432 None => {
433 dl.address_space_info.push((addr_space, info));
434 }
435 }
436 }
437 }
438 [p, s, a, _pr, i] if p.starts_with("p") => {
439 let mut p = p.strip_prefix('p').unwrap();
440 let mut _is_fat = false;
441
442 if p.starts_with('f') {
446 p = p.strip_prefix('f').unwrap();
447 _is_fat = true;
448 }
449
450 if p.starts_with(char::is_alphabetic) {
453 return Err(TargetDataLayoutErrors::UnknownPointerSpecification {
454 err: p.to_string(),
455 });
456 }
457
458 let addr_space = if !p.is_empty() {
459 parse_address_space(p, "p")?
460 } else {
461 AddressSpace::ZERO
462 };
463
464 let info = PointerSpec {
465 pointer_size: parse_size(s, "p-")?,
466 pointer_align: parse_align_str(a, "p-")?,
467 pointer_offset: parse_size(i, "p-")?,
468 _is_fat,
469 };
470
471 if addr_space == default_address_space {
472 dl.default_address_space_pointer_spec = info;
473 } else {
474 match dl.address_space_info.iter_mut().find(|(a, _)| *a == addr_space) {
475 Some(e) => e.1 = info,
476 None => {
477 dl.address_space_info.push((addr_space, info));
478 }
479 }
480 }
481 }
482
483 [s, a @ ..] if s.starts_with('i') => {
484 let Ok(bits) = s[1..].parse::<u64>() else {
485 parse_size(&s[1..], "i")?; continue;
487 };
488 let a = parse_align_seq(a, s)?;
489 match bits {
490 1 => dl.i1_align = a,
491 8 => dl.i8_align = a,
492 16 => dl.i16_align = a,
493 32 => dl.i32_align = a,
494 64 => dl.i64_align = a,
495 _ => {}
496 }
497 if bits >= i128_align_src && bits <= 128 {
498 i128_align_src = bits;
501 dl.i128_align = a;
502 }
503 }
504 [s, a @ ..] if s.starts_with('v') => {
505 let v_size = parse_size(&s[1..], "v")?;
506 let a = parse_align_seq(a, s)?;
507 if let Some(v) = dl.vector_align.iter_mut().find(|v| v.0 == v_size) {
508 v.1 = a;
509 continue;
510 }
511 dl.vector_align.push((v_size, a));
513 }
514 _ => {} }
516 }
517
518 if (dl.instruction_address_space != dl.default_address_space)
521 && dl
522 .address_space_info
523 .iter()
524 .find(|(a, _)| *a == dl.instruction_address_space)
525 .is_none()
526 {
527 dl.address_space_info.push((
528 dl.instruction_address_space,
529 dl.default_address_space_pointer_spec.clone(),
530 ));
531 }
532
533 Ok(dl)
534 }
535
536 #[inline]
547 pub fn obj_size_bound(&self) -> u64 {
548 match self.pointer_size().bits() {
549 16 => 1 << 15,
550 32 => 1 << 31,
551 64 => 1 << 61,
552 bits => panic!("obj_size_bound: unknown pointer bit size {bits}"),
553 }
554 }
555
556 #[inline]
566 pub fn obj_size_bound_in(&self, address_space: AddressSpace) -> u64 {
567 match self.pointer_size_in(address_space).bits() {
568 16 => 1 << 15,
569 32 => 1 << 31,
570 64 => 1 << 61,
571 bits => panic!("obj_size_bound: unknown pointer bit size {bits}"),
572 }
573 }
574
575 #[inline]
576 pub fn ptr_sized_integer(&self) -> Integer {
577 use Integer::*;
578 match self.pointer_offset().bits() {
579 16 => I16,
580 32 => I32,
581 64 => I64,
582 bits => panic!("ptr_sized_integer: unknown pointer bit size {bits}"),
583 }
584 }
585
586 #[inline]
587 pub fn ptr_sized_integer_in(&self, address_space: AddressSpace) -> Integer {
588 use Integer::*;
589 match self.pointer_offset_in(address_space).bits() {
590 16 => I16,
591 32 => I32,
592 64 => I64,
593 bits => panic!("ptr_sized_integer: unknown pointer bit size {bits}"),
594 }
595 }
596
597 #[inline]
599 fn cabi_vector_align(&self, vec_size: Size) -> Option<AbiAlign> {
600 self.vector_align
601 .iter()
602 .find(|(size, _align)| *size == vec_size)
603 .map(|(_size, align)| *align)
604 }
605
606 #[inline]
608 pub fn llvmlike_vector_align(&self, vec_size: Size) -> AbiAlign {
609 self.cabi_vector_align(vec_size).unwrap_or(AbiAlign::new(
610 Align::from_bytes(vec_size.bytes().next_power_of_two()).unwrap(),
611 ))
612 }
613
614 #[inline]
616 pub fn pointer_size(&self) -> Size {
617 self.default_address_space_pointer_spec.pointer_size
618 }
619
620 #[inline]
622 pub fn pointer_size_in(&self, c: AddressSpace) -> Size {
623 if c == self.default_address_space {
624 return self.default_address_space_pointer_spec.pointer_size;
625 }
626
627 if let Some(e) = self.address_space_info.iter().find(|(a, _)| a == &c) {
628 e.1.pointer_size
629 } else {
630 panic!("Use of unknown address space {c:?}");
631 }
632 }
633
634 #[inline]
636 pub fn pointer_offset(&self) -> Size {
637 self.default_address_space_pointer_spec.pointer_offset
638 }
639
640 #[inline]
642 pub fn pointer_offset_in(&self, c: AddressSpace) -> Size {
643 if c == self.default_address_space {
644 return self.default_address_space_pointer_spec.pointer_offset;
645 }
646
647 if let Some(e) = self.address_space_info.iter().find(|(a, _)| a == &c) {
648 e.1.pointer_offset
649 } else {
650 panic!("Use of unknown address space {c:?}");
651 }
652 }
653
654 #[inline]
656 pub fn pointer_align(&self) -> AbiAlign {
657 self.default_address_space_pointer_spec.pointer_align
658 }
659
660 #[inline]
662 pub fn pointer_align_in(&self, c: AddressSpace) -> AbiAlign {
663 if c == self.default_address_space {
664 return self.default_address_space_pointer_spec.pointer_align;
665 }
666
667 if let Some(e) = self.address_space_info.iter().find(|(a, _)| a == &c) {
668 e.1.pointer_align
669 } else {
670 panic!("Use of unknown address space {c:?}");
671 }
672 }
673}
674
675pub trait HasDataLayout {
676 fn data_layout(&self) -> &TargetDataLayout;
677}
678
679impl HasDataLayout for TargetDataLayout {
680 #[inline]
681 fn data_layout(&self) -> &TargetDataLayout {
682 self
683 }
684}
685
686impl HasDataLayout for &TargetDataLayout {
688 #[inline]
689 fn data_layout(&self) -> &TargetDataLayout {
690 (**self).data_layout()
691 }
692}
693
694#[derive(Copy, Clone, PartialEq, Eq)]
696pub enum Endian {
697 Little,
698 Big,
699}
700
701impl Endian {
702 pub fn as_str(&self) -> &'static str {
703 match self {
704 Self::Little => "little",
705 Self::Big => "big",
706 }
707 }
708}
709
710impl fmt::Debug for Endian {
711 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
712 f.write_str(self.as_str())
713 }
714}
715
716impl FromStr for Endian {
717 type Err = String;
718
719 fn from_str(s: &str) -> Result<Self, Self::Err> {
720 match s {
721 "little" => Ok(Self::Little),
722 "big" => Ok(Self::Big),
723 _ => Err(format!(r#"unknown endian: "{s}""#)),
724 }
725 }
726}
727
728#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
730#[cfg_attr(
731 feature = "nightly",
732 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
733)]
734pub struct Size {
735 raw: u64,
736}
737
738#[cfg(feature = "nightly")]
739impl StableOrd for Size {
740 const CAN_USE_UNSTABLE_SORT: bool = true;
741
742 const THIS_IMPLEMENTATION_HAS_BEEN_TRIPLE_CHECKED: () = ();
745}
746
747impl fmt::Debug for Size {
749 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
750 write!(f, "Size({} bytes)", self.bytes())
751 }
752}
753
754impl Size {
755 pub const ZERO: Size = Size { raw: 0 };
756
757 pub fn from_bits(bits: impl TryInto<u64>) -> Size {
760 let bits = bits.try_into().ok().unwrap();
761 Size { raw: bits.div_ceil(8) }
762 }
763
764 #[inline]
765 pub fn from_bytes(bytes: impl TryInto<u64>) -> Size {
766 let bytes: u64 = bytes.try_into().ok().unwrap();
767 Size { raw: bytes }
768 }
769
770 #[inline]
771 pub fn bytes(self) -> u64 {
772 self.raw
773 }
774
775 #[inline]
776 pub fn bytes_usize(self) -> usize {
777 self.bytes().try_into().unwrap()
778 }
779
780 #[inline]
781 pub fn bits(self) -> u64 {
782 #[cold]
783 fn overflow(bytes: u64) -> ! {
784 panic!("Size::bits: {bytes} bytes in bits doesn't fit in u64")
785 }
786
787 self.bytes().checked_mul(8).unwrap_or_else(|| overflow(self.bytes()))
788 }
789
790 #[inline]
791 pub fn bits_usize(self) -> usize {
792 self.bits().try_into().unwrap()
793 }
794
795 #[inline]
796 pub fn align_to(self, align: Align) -> Size {
797 let mask = align.bytes() - 1;
798 Size::from_bytes((self.bytes() + mask) & !mask)
799 }
800
801 #[inline]
802 pub fn is_aligned(self, align: Align) -> bool {
803 let mask = align.bytes() - 1;
804 self.bytes() & mask == 0
805 }
806
807 #[inline]
808 pub fn checked_add<C: HasDataLayout>(self, offset: Size, cx: &C) -> Option<Size> {
809 let dl = cx.data_layout();
810
811 let bytes = self.bytes().checked_add(offset.bytes())?;
812
813 if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
814 }
815
816 #[inline]
817 pub fn checked_mul<C: HasDataLayout>(self, count: u64, cx: &C) -> Option<Size> {
818 let dl = cx.data_layout();
819
820 let bytes = self.bytes().checked_mul(count)?;
821 if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
822 }
823
824 #[inline]
827 pub fn sign_extend(self, value: u128) -> i128 {
828 let size = self.bits();
829 if size == 0 {
830 return 0;
832 }
833 let shift = 128 - size;
835 ((value << shift) as i128) >> shift
838 }
839
840 #[inline]
842 pub fn truncate(self, value: u128) -> u128 {
843 let size = self.bits();
844 if size == 0 {
845 return 0;
847 }
848 let shift = 128 - size;
849 (value << shift) >> shift
851 }
852
853 #[inline]
854 pub fn signed_int_min(&self) -> i128 {
855 self.sign_extend(1_u128 << (self.bits() - 1))
856 }
857
858 #[inline]
859 pub fn signed_int_max(&self) -> i128 {
860 i128::MAX >> (128 - self.bits())
861 }
862
863 #[inline]
864 pub fn unsigned_int_max(&self) -> u128 {
865 u128::MAX >> (128 - self.bits())
866 }
867}
868
869impl Add for Size {
873 type Output = Size;
874 #[inline]
875 fn add(self, other: Size) -> Size {
876 Size::from_bytes(self.bytes().checked_add(other.bytes()).unwrap_or_else(|| {
877 panic!("Size::add: {} + {} doesn't fit in u64", self.bytes(), other.bytes())
878 }))
879 }
880}
881
882impl Sub for Size {
883 type Output = Size;
884 #[inline]
885 fn sub(self, other: Size) -> Size {
886 Size::from_bytes(self.bytes().checked_sub(other.bytes()).unwrap_or_else(|| {
887 panic!("Size::sub: {} - {} would result in negative size", self.bytes(), other.bytes())
888 }))
889 }
890}
891
892impl Mul<Size> for u64 {
893 type Output = Size;
894 #[inline]
895 fn mul(self, size: Size) -> Size {
896 size * self
897 }
898}
899
900impl Mul<u64> for Size {
901 type Output = Size;
902 #[inline]
903 fn mul(self, count: u64) -> Size {
904 match self.bytes().checked_mul(count) {
905 Some(bytes) => Size::from_bytes(bytes),
906 None => panic!("Size::mul: {} * {} doesn't fit in u64", self.bytes(), count),
907 }
908 }
909}
910
911impl AddAssign for Size {
912 #[inline]
913 fn add_assign(&mut self, other: Size) {
914 *self = *self + other;
915 }
916}
917
918#[cfg(feature = "nightly")]
919impl Step for Size {
920 #[inline]
921 fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
922 u64::steps_between(&start.bytes(), &end.bytes())
923 }
924
925 #[inline]
926 fn forward_checked(start: Self, count: usize) -> Option<Self> {
927 u64::forward_checked(start.bytes(), count).map(Self::from_bytes)
928 }
929
930 #[inline]
931 fn forward(start: Self, count: usize) -> Self {
932 Self::from_bytes(u64::forward(start.bytes(), count))
933 }
934
935 #[inline]
936 unsafe fn forward_unchecked(start: Self, count: usize) -> Self {
937 Self::from_bytes(unsafe { u64::forward_unchecked(start.bytes(), count) })
938 }
939
940 #[inline]
941 fn backward_checked(start: Self, count: usize) -> Option<Self> {
942 u64::backward_checked(start.bytes(), count).map(Self::from_bytes)
943 }
944
945 #[inline]
946 fn backward(start: Self, count: usize) -> Self {
947 Self::from_bytes(u64::backward(start.bytes(), count))
948 }
949
950 #[inline]
951 unsafe fn backward_unchecked(start: Self, count: usize) -> Self {
952 Self::from_bytes(unsafe { u64::backward_unchecked(start.bytes(), count) })
953 }
954}
955
956#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
958#[cfg_attr(
959 feature = "nightly",
960 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
961)]
962pub struct Align {
963 pow2: u8,
964}
965
966impl fmt::Debug for Align {
968 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
969 write!(f, "Align({} bytes)", self.bytes())
970 }
971}
972
973#[derive(Clone, Copy)]
974pub enum AlignFromBytesError {
975 NotPowerOfTwo(u64),
976 TooLarge(u64),
977}
978
979impl AlignFromBytesError {
980 pub fn diag_ident(self) -> &'static str {
981 match self {
982 Self::NotPowerOfTwo(_) => "not_power_of_two",
983 Self::TooLarge(_) => "too_large",
984 }
985 }
986
987 pub fn align(self) -> u64 {
988 let (Self::NotPowerOfTwo(align) | Self::TooLarge(align)) = self;
989 align
990 }
991}
992
993impl fmt::Debug for AlignFromBytesError {
994 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
995 fmt::Display::fmt(self, f)
996 }
997}
998
999impl fmt::Display for AlignFromBytesError {
1000 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1001 match self {
1002 AlignFromBytesError::NotPowerOfTwo(align) => write!(f, "`{align}` is not a power of 2"),
1003 AlignFromBytesError::TooLarge(align) => write!(f, "`{align}` is too large"),
1004 }
1005 }
1006}
1007
1008impl Align {
1009 pub const ONE: Align = Align { pow2: 0 };
1010 pub const EIGHT: Align = Align { pow2: 3 };
1011 pub const MAX: Align = Align { pow2: 29 };
1013
1014 #[inline]
1015 pub fn from_bits(bits: u64) -> Result<Align, AlignFromBytesError> {
1016 Align::from_bytes(Size::from_bits(bits).bytes())
1017 }
1018
1019 #[inline]
1020 pub const fn from_bytes(align: u64) -> Result<Align, AlignFromBytesError> {
1021 if align == 0 {
1023 return Ok(Align::ONE);
1024 }
1025
1026 #[cold]
1027 const fn not_power_of_2(align: u64) -> AlignFromBytesError {
1028 AlignFromBytesError::NotPowerOfTwo(align)
1029 }
1030
1031 #[cold]
1032 const fn too_large(align: u64) -> AlignFromBytesError {
1033 AlignFromBytesError::TooLarge(align)
1034 }
1035
1036 let tz = align.trailing_zeros();
1037 if align != (1 << tz) {
1038 return Err(not_power_of_2(align));
1039 }
1040
1041 let pow2 = tz as u8;
1042 if pow2 > Self::MAX.pow2 {
1043 return Err(too_large(align));
1044 }
1045
1046 Ok(Align { pow2 })
1047 }
1048
1049 #[inline]
1050 pub const fn bytes(self) -> u64 {
1051 1 << self.pow2
1052 }
1053
1054 #[inline]
1055 pub fn bytes_usize(self) -> usize {
1056 self.bytes().try_into().unwrap()
1057 }
1058
1059 #[inline]
1060 pub const fn bits(self) -> u64 {
1061 self.bytes() * 8
1062 }
1063
1064 #[inline]
1065 pub fn bits_usize(self) -> usize {
1066 self.bits().try_into().unwrap()
1067 }
1068
1069 #[inline]
1074 pub fn max_aligned_factor(size: Size) -> Align {
1075 Align { pow2: size.bytes().trailing_zeros() as u8 }
1076 }
1077
1078 #[inline]
1080 pub fn restrict_for_offset(self, size: Size) -> Align {
1081 self.min(Align::max_aligned_factor(size))
1082 }
1083}
1084
1085#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
1095#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1096pub struct AbiAlign {
1097 pub abi: Align,
1098}
1099
1100impl AbiAlign {
1101 #[inline]
1102 pub fn new(align: Align) -> AbiAlign {
1103 AbiAlign { abi: align }
1104 }
1105
1106 #[inline]
1107 pub fn min(self, other: AbiAlign) -> AbiAlign {
1108 AbiAlign { abi: self.abi.min(other.abi) }
1109 }
1110
1111 #[inline]
1112 pub fn max(self, other: AbiAlign) -> AbiAlign {
1113 AbiAlign { abi: self.abi.max(other.abi) }
1114 }
1115}
1116
1117impl Deref for AbiAlign {
1118 type Target = Align;
1119
1120 fn deref(&self) -> &Self::Target {
1121 &self.abi
1122 }
1123}
1124
1125#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
1127#[cfg_attr(
1128 feature = "nightly",
1129 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
1130)]
1131pub enum Integer {
1132 I8,
1133 I16,
1134 I32,
1135 I64,
1136 I128,
1137}
1138
1139impl Integer {
1140 pub fn int_ty_str(self) -> &'static str {
1141 use Integer::*;
1142 match self {
1143 I8 => "i8",
1144 I16 => "i16",
1145 I32 => "i32",
1146 I64 => "i64",
1147 I128 => "i128",
1148 }
1149 }
1150
1151 pub fn uint_ty_str(self) -> &'static str {
1152 use Integer::*;
1153 match self {
1154 I8 => "u8",
1155 I16 => "u16",
1156 I32 => "u32",
1157 I64 => "u64",
1158 I128 => "u128",
1159 }
1160 }
1161
1162 #[inline]
1163 pub fn size(self) -> Size {
1164 use Integer::*;
1165 match self {
1166 I8 => Size::from_bytes(1),
1167 I16 => Size::from_bytes(2),
1168 I32 => Size::from_bytes(4),
1169 I64 => Size::from_bytes(8),
1170 I128 => Size::from_bytes(16),
1171 }
1172 }
1173
1174 pub fn from_attr<C: HasDataLayout>(cx: &C, ity: IntegerType) -> Integer {
1176 let dl = cx.data_layout();
1177
1178 match ity {
1179 IntegerType::Pointer(_) => dl.ptr_sized_integer(),
1180 IntegerType::Fixed(x, _) => x,
1181 }
1182 }
1183
1184 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAlign {
1185 use Integer::*;
1186 let dl = cx.data_layout();
1187
1188 match self {
1189 I8 => dl.i8_align,
1190 I16 => dl.i16_align,
1191 I32 => dl.i32_align,
1192 I64 => dl.i64_align,
1193 I128 => dl.i128_align,
1194 }
1195 }
1196
1197 #[inline]
1199 pub fn signed_max(self) -> i128 {
1200 use Integer::*;
1201 match self {
1202 I8 => i8::MAX as i128,
1203 I16 => i16::MAX as i128,
1204 I32 => i32::MAX as i128,
1205 I64 => i64::MAX as i128,
1206 I128 => i128::MAX,
1207 }
1208 }
1209
1210 #[inline]
1212 pub fn signed_min(self) -> i128 {
1213 use Integer::*;
1214 match self {
1215 I8 => i8::MIN as i128,
1216 I16 => i16::MIN as i128,
1217 I32 => i32::MIN as i128,
1218 I64 => i64::MIN as i128,
1219 I128 => i128::MIN,
1220 }
1221 }
1222
1223 #[inline]
1225 pub fn fit_signed(x: i128) -> Integer {
1226 use Integer::*;
1227 match x {
1228 -0x0000_0000_0000_0080..=0x0000_0000_0000_007f => I8,
1229 -0x0000_0000_0000_8000..=0x0000_0000_0000_7fff => I16,
1230 -0x0000_0000_8000_0000..=0x0000_0000_7fff_ffff => I32,
1231 -0x8000_0000_0000_0000..=0x7fff_ffff_ffff_ffff => I64,
1232 _ => I128,
1233 }
1234 }
1235
1236 #[inline]
1238 pub fn fit_unsigned(x: u128) -> Integer {
1239 use Integer::*;
1240 match x {
1241 0..=0x0000_0000_0000_00ff => I8,
1242 0..=0x0000_0000_0000_ffff => I16,
1243 0..=0x0000_0000_ffff_ffff => I32,
1244 0..=0xffff_ffff_ffff_ffff => I64,
1245 _ => I128,
1246 }
1247 }
1248
1249 pub fn for_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Option<Integer> {
1251 use Integer::*;
1252 let dl = cx.data_layout();
1253
1254 [I8, I16, I32, I64, I128].into_iter().find(|&candidate| {
1255 wanted == candidate.align(dl).abi && wanted.bytes() == candidate.size().bytes()
1256 })
1257 }
1258
1259 pub fn approximate_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Integer {
1261 use Integer::*;
1262 let dl = cx.data_layout();
1263
1264 for candidate in [I64, I32, I16] {
1266 if wanted >= candidate.align(dl).abi && wanted.bytes() >= candidate.size().bytes() {
1267 return candidate;
1268 }
1269 }
1270 I8
1271 }
1272
1273 #[inline]
1276 pub fn from_size(size: Size) -> Result<Self, String> {
1277 match size.bits() {
1278 8 => Ok(Integer::I8),
1279 16 => Ok(Integer::I16),
1280 32 => Ok(Integer::I32),
1281 64 => Ok(Integer::I64),
1282 128 => Ok(Integer::I128),
1283 _ => Err(format!("rust does not support integers with {} bits", size.bits())),
1284 }
1285 }
1286}
1287
1288#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
1290#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1291pub enum Float {
1292 F16,
1293 F32,
1294 F64,
1295 F128,
1296}
1297
1298impl Float {
1299 pub fn size(self) -> Size {
1300 use Float::*;
1301
1302 match self {
1303 F16 => Size::from_bits(16),
1304 F32 => Size::from_bits(32),
1305 F64 => Size::from_bits(64),
1306 F128 => Size::from_bits(128),
1307 }
1308 }
1309
1310 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAlign {
1311 use Float::*;
1312 let dl = cx.data_layout();
1313
1314 match self {
1315 F16 => dl.f16_align,
1316 F32 => dl.f32_align,
1317 F64 => dl.f64_align,
1318 F128 => dl.f128_align,
1319 }
1320 }
1321}
1322
1323#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
1325#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1326pub enum Primitive {
1327 Int(Integer, bool),
1335 Float(Float),
1336 Pointer(AddressSpace),
1337}
1338
1339impl Primitive {
1340 pub fn size<C: HasDataLayout>(self, cx: &C) -> Size {
1341 use Primitive::*;
1342 let dl = cx.data_layout();
1343
1344 match self {
1345 Int(i, _) => i.size(),
1346 Float(f) => f.size(),
1347 Pointer(a) => dl.pointer_size_in(a),
1348 }
1349 }
1350
1351 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAlign {
1352 use Primitive::*;
1353 let dl = cx.data_layout();
1354
1355 match self {
1356 Int(i, _) => i.align(dl),
1357 Float(f) => f.align(dl),
1358 Pointer(a) => dl.pointer_align_in(a),
1359 }
1360 }
1361}
1362
1363#[derive(Clone, Copy, PartialEq, Eq, Hash)]
1373#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1374pub struct WrappingRange {
1375 pub start: u128,
1376 pub end: u128,
1377}
1378
1379impl WrappingRange {
1380 pub fn full(size: Size) -> Self {
1381 Self { start: 0, end: size.unsigned_int_max() }
1382 }
1383
1384 #[inline(always)]
1386 pub fn contains(&self, v: u128) -> bool {
1387 if self.start <= self.end {
1388 self.start <= v && v <= self.end
1389 } else {
1390 self.start <= v || v <= self.end
1391 }
1392 }
1393
1394 #[inline(always)]
1397 pub fn contains_range(&self, other: Self, size: Size) -> bool {
1398 if self.is_full_for(size) {
1399 true
1400 } else {
1401 let trunc = |x| size.truncate(x);
1402
1403 let delta = self.start;
1404 let max = trunc(self.end.wrapping_sub(delta));
1405
1406 let other_start = trunc(other.start.wrapping_sub(delta));
1407 let other_end = trunc(other.end.wrapping_sub(delta));
1408
1409 (other_start <= other_end) && (other_end <= max)
1413 }
1414 }
1415
1416 #[inline(always)]
1418 fn with_start(mut self, start: u128) -> Self {
1419 self.start = start;
1420 self
1421 }
1422
1423 #[inline(always)]
1425 fn with_end(mut self, end: u128) -> Self {
1426 self.end = end;
1427 self
1428 }
1429
1430 #[inline]
1436 fn is_full_for(&self, size: Size) -> bool {
1437 let max_value = size.unsigned_int_max();
1438 debug_assert!(self.start <= max_value && self.end <= max_value);
1439 self.start == (self.end.wrapping_add(1) & max_value)
1440 }
1441
1442 #[inline]
1448 pub fn no_unsigned_wraparound(&self, size: Size) -> Result<bool, RangeFull> {
1449 if self.is_full_for(size) { Err(..) } else { Ok(self.start <= self.end) }
1450 }
1451
1452 #[inline]
1461 pub fn no_signed_wraparound(&self, size: Size) -> Result<bool, RangeFull> {
1462 if self.is_full_for(size) {
1463 Err(..)
1464 } else {
1465 let start: i128 = size.sign_extend(self.start);
1466 let end: i128 = size.sign_extend(self.end);
1467 Ok(start <= end)
1468 }
1469 }
1470}
1471
1472impl fmt::Debug for WrappingRange {
1473 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
1474 if self.start > self.end {
1475 write!(fmt, "(..={}) | ({}..)", self.end, self.start)?;
1476 } else {
1477 write!(fmt, "{}..={}", self.start, self.end)?;
1478 }
1479 Ok(())
1480 }
1481}
1482
1483#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1485#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1486pub enum Scalar {
1487 Initialized {
1488 value: Primitive,
1489
1490 valid_range: WrappingRange,
1494 },
1495 Union {
1496 value: Primitive,
1502 },
1503}
1504
1505impl Scalar {
1506 #[inline]
1507 pub fn is_bool(&self) -> bool {
1508 use Integer::*;
1509 matches!(
1510 self,
1511 Scalar::Initialized {
1512 value: Primitive::Int(I8, false),
1513 valid_range: WrappingRange { start: 0, end: 1 }
1514 }
1515 )
1516 }
1517
1518 pub fn primitive(&self) -> Primitive {
1521 match *self {
1522 Scalar::Initialized { value, .. } | Scalar::Union { value } => value,
1523 }
1524 }
1525
1526 pub fn align(self, cx: &impl HasDataLayout) -> AbiAlign {
1527 self.primitive().align(cx)
1528 }
1529
1530 pub fn size(self, cx: &impl HasDataLayout) -> Size {
1531 self.primitive().size(cx)
1532 }
1533
1534 #[inline]
1535 pub fn to_union(&self) -> Self {
1536 Self::Union { value: self.primitive() }
1537 }
1538
1539 #[inline]
1540 pub fn valid_range(&self, cx: &impl HasDataLayout) -> WrappingRange {
1541 match *self {
1542 Scalar::Initialized { valid_range, .. } => valid_range,
1543 Scalar::Union { value } => WrappingRange::full(value.size(cx)),
1544 }
1545 }
1546
1547 #[inline]
1548 pub fn valid_range_mut(&mut self) -> &mut WrappingRange {
1551 match self {
1552 Scalar::Initialized { valid_range, .. } => valid_range,
1553 Scalar::Union { .. } => panic!("cannot change the valid range of a union"),
1554 }
1555 }
1556
1557 #[inline]
1560 pub fn is_always_valid<C: HasDataLayout>(&self, cx: &C) -> bool {
1561 match *self {
1562 Scalar::Initialized { valid_range, .. } => valid_range.is_full_for(self.size(cx)),
1563 Scalar::Union { .. } => true,
1564 }
1565 }
1566
1567 #[inline]
1569 pub fn is_uninit_valid(&self) -> bool {
1570 match *self {
1571 Scalar::Initialized { .. } => false,
1572 Scalar::Union { .. } => true,
1573 }
1574 }
1575
1576 #[inline]
1578 pub fn is_signed(&self) -> bool {
1579 match self.primitive() {
1580 Primitive::Int(_, signed) => signed,
1581 _ => false,
1582 }
1583 }
1584}
1585
1586#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1589#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1590pub enum FieldsShape<FieldIdx: Idx> {
1591 Primitive,
1593
1594 Union(NonZeroUsize),
1596
1597 Array { stride: Size, count: u64 },
1599
1600 Arbitrary {
1608 offsets: IndexVec<FieldIdx, Size>,
1613
1614 memory_index: IndexVec<FieldIdx, u32>,
1627 },
1628}
1629
1630impl<FieldIdx: Idx> FieldsShape<FieldIdx> {
1631 #[inline]
1632 pub fn count(&self) -> usize {
1633 match *self {
1634 FieldsShape::Primitive => 0,
1635 FieldsShape::Union(count) => count.get(),
1636 FieldsShape::Array { count, .. } => count.try_into().unwrap(),
1637 FieldsShape::Arbitrary { ref offsets, .. } => offsets.len(),
1638 }
1639 }
1640
1641 #[inline]
1642 pub fn offset(&self, i: usize) -> Size {
1643 match *self {
1644 FieldsShape::Primitive => {
1645 unreachable!("FieldsShape::offset: `Primitive`s have no fields")
1646 }
1647 FieldsShape::Union(count) => {
1648 assert!(i < count.get(), "tried to access field {i} of union with {count} fields");
1649 Size::ZERO
1650 }
1651 FieldsShape::Array { stride, count } => {
1652 let i = u64::try_from(i).unwrap();
1653 assert!(i < count, "tried to access field {i} of array with {count} fields");
1654 stride * i
1655 }
1656 FieldsShape::Arbitrary { ref offsets, .. } => offsets[FieldIdx::new(i)],
1657 }
1658 }
1659
1660 #[inline]
1661 pub fn memory_index(&self, i: usize) -> usize {
1662 match *self {
1663 FieldsShape::Primitive => {
1664 unreachable!("FieldsShape::memory_index: `Primitive`s have no fields")
1665 }
1666 FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
1667 FieldsShape::Arbitrary { ref memory_index, .. } => {
1668 memory_index[FieldIdx::new(i)].try_into().unwrap()
1669 }
1670 }
1671 }
1672
1673 #[inline]
1675 pub fn index_by_increasing_offset(&self) -> impl ExactSizeIterator<Item = usize> {
1676 let mut inverse_small = [0u8; 64];
1677 let mut inverse_big = IndexVec::new();
1678 let use_small = self.count() <= inverse_small.len();
1679
1680 if let FieldsShape::Arbitrary { ref memory_index, .. } = *self {
1682 if use_small {
1683 for (field_idx, &mem_idx) in memory_index.iter_enumerated() {
1684 inverse_small[mem_idx as usize] = field_idx.index() as u8;
1685 }
1686 } else {
1687 inverse_big = memory_index.invert_bijective_mapping();
1688 }
1689 }
1690
1691 let pseudofield_count = if let FieldsShape::Primitive = self { 1 } else { self.count() };
1695
1696 (0..pseudofield_count).map(move |i| match *self {
1697 FieldsShape::Primitive | FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
1698 FieldsShape::Arbitrary { .. } => {
1699 if use_small {
1700 inverse_small[i] as usize
1701 } else {
1702 inverse_big[i as u32].index()
1703 }
1704 }
1705 })
1706 }
1707}
1708
1709#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
1713#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1714pub struct AddressSpace(pub u32);
1715
1716impl AddressSpace {
1717 pub const ZERO: Self = AddressSpace(0);
1719}
1720
1721#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1732#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1733pub enum BackendRepr {
1734 Scalar(Scalar),
1735 ScalarPair(Scalar, Scalar),
1736 SimdVector {
1737 element: Scalar,
1738 count: u64,
1739 },
1740 Memory {
1742 sized: bool,
1744 },
1745}
1746
1747impl BackendRepr {
1748 #[inline]
1750 pub fn is_unsized(&self) -> bool {
1751 match *self {
1752 BackendRepr::Scalar(_)
1753 | BackendRepr::ScalarPair(..)
1754 | BackendRepr::SimdVector { .. } => false,
1755 BackendRepr::Memory { sized } => !sized,
1756 }
1757 }
1758
1759 #[inline]
1760 pub fn is_sized(&self) -> bool {
1761 !self.is_unsized()
1762 }
1763
1764 #[inline]
1767 pub fn is_signed(&self) -> bool {
1768 match self {
1769 BackendRepr::Scalar(scal) => scal.is_signed(),
1770 _ => panic!("`is_signed` on non-scalar ABI {self:?}"),
1771 }
1772 }
1773
1774 #[inline]
1776 pub fn is_scalar(&self) -> bool {
1777 matches!(*self, BackendRepr::Scalar(_))
1778 }
1779
1780 #[inline]
1782 pub fn is_bool(&self) -> bool {
1783 matches!(*self, BackendRepr::Scalar(s) if s.is_bool())
1784 }
1785
1786 pub fn scalar_align<C: HasDataLayout>(&self, cx: &C) -> Option<Align> {
1790 match *self {
1791 BackendRepr::Scalar(s) => Some(s.align(cx).abi),
1792 BackendRepr::ScalarPair(s1, s2) => Some(s1.align(cx).max(s2.align(cx)).abi),
1793 BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => None,
1795 }
1796 }
1797
1798 pub fn scalar_size<C: HasDataLayout>(&self, cx: &C) -> Option<Size> {
1802 match *self {
1803 BackendRepr::Scalar(s) => Some(s.size(cx)),
1805 BackendRepr::ScalarPair(s1, s2) => {
1807 let field2_offset = s1.size(cx).align_to(s2.align(cx).abi);
1808 let size = (field2_offset + s2.size(cx)).align_to(
1809 self.scalar_align(cx)
1810 .unwrap(),
1812 );
1813 Some(size)
1814 }
1815 BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => None,
1817 }
1818 }
1819
1820 pub fn to_union(&self) -> Self {
1822 match *self {
1823 BackendRepr::Scalar(s) => BackendRepr::Scalar(s.to_union()),
1824 BackendRepr::ScalarPair(s1, s2) => {
1825 BackendRepr::ScalarPair(s1.to_union(), s2.to_union())
1826 }
1827 BackendRepr::SimdVector { element, count } => {
1828 BackendRepr::SimdVector { element: element.to_union(), count }
1829 }
1830 BackendRepr::Memory { .. } => BackendRepr::Memory { sized: true },
1831 }
1832 }
1833
1834 pub fn eq_up_to_validity(&self, other: &Self) -> bool {
1835 match (self, other) {
1836 (BackendRepr::Scalar(l), BackendRepr::Scalar(r)) => l.primitive() == r.primitive(),
1839 (
1840 BackendRepr::SimdVector { element: element_l, count: count_l },
1841 BackendRepr::SimdVector { element: element_r, count: count_r },
1842 ) => element_l.primitive() == element_r.primitive() && count_l == count_r,
1843 (BackendRepr::ScalarPair(l1, l2), BackendRepr::ScalarPair(r1, r2)) => {
1844 l1.primitive() == r1.primitive() && l2.primitive() == r2.primitive()
1845 }
1846 _ => self == other,
1848 }
1849 }
1850}
1851
1852#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1854#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1855pub enum Variants<FieldIdx: Idx, VariantIdx: Idx> {
1856 Empty,
1858
1859 Single {
1861 index: VariantIdx,
1863 },
1864
1865 Multiple {
1872 tag: Scalar,
1873 tag_encoding: TagEncoding<VariantIdx>,
1874 tag_field: FieldIdx,
1875 variants: IndexVec<VariantIdx, LayoutData<FieldIdx, VariantIdx>>,
1876 },
1877}
1878
1879#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1881#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1882pub enum TagEncoding<VariantIdx: Idx> {
1883 Direct,
1886
1887 Niche {
1911 untagged_variant: VariantIdx,
1912 niche_variants: RangeInclusive<VariantIdx>,
1915 niche_start: u128,
1918 },
1919}
1920
1921#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1922#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1923pub struct Niche {
1924 pub offset: Size,
1925 pub value: Primitive,
1926 pub valid_range: WrappingRange,
1927}
1928
1929impl Niche {
1930 pub fn from_scalar<C: HasDataLayout>(cx: &C, offset: Size, scalar: Scalar) -> Option<Self> {
1931 let Scalar::Initialized { value, valid_range } = scalar else { return None };
1932 let niche = Niche { offset, value, valid_range };
1933 if niche.available(cx) > 0 { Some(niche) } else { None }
1934 }
1935
1936 pub fn available<C: HasDataLayout>(&self, cx: &C) -> u128 {
1937 let Self { value, valid_range: v, .. } = *self;
1938 let size = value.size(cx);
1939 assert!(size.bits() <= 128);
1940 let max_value = size.unsigned_int_max();
1941
1942 let niche = v.end.wrapping_add(1)..v.start;
1944 niche.end.wrapping_sub(niche.start) & max_value
1945 }
1946
1947 pub fn reserve<C: HasDataLayout>(&self, cx: &C, count: u128) -> Option<(u128, Scalar)> {
1948 assert!(count > 0);
1949
1950 let Self { value, valid_range: v, .. } = *self;
1951 let size = value.size(cx);
1952 assert!(size.bits() <= 128);
1953 let max_value = size.unsigned_int_max();
1954
1955 let niche = v.end.wrapping_add(1)..v.start;
1956 let available = niche.end.wrapping_sub(niche.start) & max_value;
1957 if count > available {
1958 return None;
1959 }
1960
1961 let move_start = |v: WrappingRange| {
1975 let start = v.start.wrapping_sub(count) & max_value;
1976 Some((start, Scalar::Initialized { value, valid_range: v.with_start(start) }))
1977 };
1978 let move_end = |v: WrappingRange| {
1979 let start = v.end.wrapping_add(1) & max_value;
1980 let end = v.end.wrapping_add(count) & max_value;
1981 Some((start, Scalar::Initialized { value, valid_range: v.with_end(end) }))
1982 };
1983 let distance_end_zero = max_value - v.end;
1984 if v.start > v.end {
1985 move_end(v)
1987 } else if v.start <= distance_end_zero {
1988 if count <= v.start {
1989 move_start(v)
1990 } else {
1991 move_end(v)
1993 }
1994 } else {
1995 let end = v.end.wrapping_add(count) & max_value;
1996 let overshot_zero = (1..=v.end).contains(&end);
1997 if overshot_zero {
1998 move_start(v)
2000 } else {
2001 move_end(v)
2002 }
2003 }
2004 }
2005}
2006
2007#[derive(PartialEq, Eq, Hash, Clone)]
2009#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
2010pub struct LayoutData<FieldIdx: Idx, VariantIdx: Idx> {
2011 pub fields: FieldsShape<FieldIdx>,
2013
2014 pub variants: Variants<FieldIdx, VariantIdx>,
2022
2023 pub backend_repr: BackendRepr,
2031
2032 pub largest_niche: Option<Niche>,
2035 pub uninhabited: bool,
2040
2041 pub align: AbiAlign,
2042 pub size: Size,
2043
2044 pub max_repr_align: Option<Align>,
2048
2049 pub unadjusted_abi_align: Align,
2053
2054 pub randomization_seed: Hash64,
2065}
2066
2067impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
2068 pub fn is_aggregate(&self) -> bool {
2070 match self.backend_repr {
2071 BackendRepr::Scalar(_) | BackendRepr::SimdVector { .. } => false,
2072 BackendRepr::ScalarPair(..) | BackendRepr::Memory { .. } => true,
2073 }
2074 }
2075
2076 pub fn is_uninhabited(&self) -> bool {
2078 self.uninhabited
2079 }
2080}
2081
2082impl<FieldIdx: Idx, VariantIdx: Idx> fmt::Debug for LayoutData<FieldIdx, VariantIdx>
2083where
2084 FieldsShape<FieldIdx>: fmt::Debug,
2085 Variants<FieldIdx, VariantIdx>: fmt::Debug,
2086{
2087 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2088 let LayoutData {
2092 size,
2093 align,
2094 backend_repr,
2095 fields,
2096 largest_niche,
2097 uninhabited,
2098 variants,
2099 max_repr_align,
2100 unadjusted_abi_align,
2101 randomization_seed,
2102 } = self;
2103 f.debug_struct("Layout")
2104 .field("size", size)
2105 .field("align", align)
2106 .field("backend_repr", backend_repr)
2107 .field("fields", fields)
2108 .field("largest_niche", largest_niche)
2109 .field("uninhabited", uninhabited)
2110 .field("variants", variants)
2111 .field("max_repr_align", max_repr_align)
2112 .field("unadjusted_abi_align", unadjusted_abi_align)
2113 .field("randomization_seed", randomization_seed)
2114 .finish()
2115 }
2116}
2117
2118#[derive(Copy, Clone, PartialEq, Eq, Debug)]
2119pub enum PointerKind {
2120 SharedRef { frozen: bool },
2122 MutableRef { unpin: bool },
2124 Box { unpin: bool, global: bool },
2127}
2128
2129#[derive(Copy, Clone, Debug)]
2134pub struct PointeeInfo {
2135 pub safe: Option<PointerKind>,
2138 pub size: Size,
2144 pub align: Align,
2146}
2147
2148impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
2149 #[inline]
2151 pub fn is_unsized(&self) -> bool {
2152 self.backend_repr.is_unsized()
2153 }
2154
2155 #[inline]
2156 pub fn is_sized(&self) -> bool {
2157 self.backend_repr.is_sized()
2158 }
2159
2160 pub fn is_1zst(&self) -> bool {
2162 self.is_sized() && self.size.bytes() == 0 && self.align.abi.bytes() == 1
2163 }
2164
2165 pub fn is_zst(&self) -> bool {
2170 match self.backend_repr {
2171 BackendRepr::Scalar(_)
2172 | BackendRepr::ScalarPair(..)
2173 | BackendRepr::SimdVector { .. } => false,
2174 BackendRepr::Memory { sized } => sized && self.size.bytes() == 0,
2175 }
2176 }
2177
2178 pub fn eq_abi(&self, other: &Self) -> bool {
2184 self.size == other.size
2188 && self.is_sized() == other.is_sized()
2189 && self.backend_repr.eq_up_to_validity(&other.backend_repr)
2190 && self.backend_repr.is_bool() == other.backend_repr.is_bool()
2191 && self.align.abi == other.align.abi
2192 && self.max_repr_align == other.max_repr_align
2193 && self.unadjusted_abi_align == other.unadjusted_abi_align
2194 }
2195}
2196
2197#[derive(Copy, Clone, Debug)]
2198pub enum StructKind {
2199 AlwaysSized,
2201 MaybeUnsized,
2203 Prefixed(Size, Align),
2205}
2206
2207#[derive(Clone, Debug)]
2208pub enum AbiFromStrErr {
2209 Unknown,
2211 NoExplicitUnwind,
2213}