1#![cfg_attr(feature = "nightly", allow(internal_features))]
3#![cfg_attr(feature = "nightly", doc(rust_logo))]
4#![cfg_attr(feature = "nightly", feature(assert_matches))]
5#![cfg_attr(feature = "nightly", feature(rustc_attrs))]
6#![cfg_attr(feature = "nightly", feature(rustdoc_internals))]
7#![cfg_attr(feature = "nightly", feature(step_trait))]
8use std::fmt;
43#[cfg(feature = "nightly")]
44use std::iter::Step;
45use std::num::{NonZeroUsize, ParseIntError};
46use std::ops::{Add, AddAssign, Mul, RangeInclusive, Sub};
47use std::str::FromStr;
48
49use bitflags::bitflags;
50#[cfg(feature = "nightly")]
51use rustc_data_structures::stable_hasher::StableOrd;
52use rustc_hashes::Hash64;
53use rustc_index::{Idx, IndexSlice, IndexVec};
54#[cfg(feature = "nightly")]
55use rustc_macros::{Decodable_NoContext, Encodable_NoContext, HashStable_Generic};
56
57mod callconv;
58mod canon_abi;
59mod extern_abi;
60mod layout;
61#[cfg(test)]
62mod tests;
63
64pub use callconv::{Heterogeneous, HomogeneousAggregate, Reg, RegKind};
65pub use canon_abi::{ArmCall, CanonAbi, InterruptKind, X86Call};
66pub use extern_abi::{ExternAbi, all_names};
67#[cfg(feature = "nightly")]
68pub use layout::{FIRST_VARIANT, FieldIdx, Layout, TyAbiInterface, TyAndLayout, VariantIdx};
69pub use layout::{LayoutCalculator, LayoutCalculatorError};
70
71#[cfg(feature = "nightly")]
75pub trait HashStableContext {}
76
77#[derive(Clone, Copy, PartialEq, Eq, Default)]
78#[cfg_attr(
79 feature = "nightly",
80 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
81)]
82pub struct ReprFlags(u8);
83
84bitflags! {
85 impl ReprFlags: u8 {
86 const IS_C = 1 << 0;
87 const IS_SIMD = 1 << 1;
88 const IS_TRANSPARENT = 1 << 2;
89 const IS_LINEAR = 1 << 3;
92 const RANDOMIZE_LAYOUT = 1 << 4;
96 const FIELD_ORDER_UNOPTIMIZABLE = ReprFlags::IS_C.bits()
98 | ReprFlags::IS_SIMD.bits()
99 | ReprFlags::IS_LINEAR.bits();
100 const ABI_UNOPTIMIZABLE = ReprFlags::IS_C.bits() | ReprFlags::IS_SIMD.bits();
101 }
102}
103
104impl std::fmt::Debug for ReprFlags {
107 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
108 bitflags::parser::to_writer(self, f)
109 }
110}
111
112#[derive(Copy, Clone, Debug, Eq, PartialEq)]
113#[cfg_attr(
114 feature = "nightly",
115 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
116)]
117pub enum IntegerType {
118 Pointer(bool),
121 Fixed(Integer, bool),
124}
125
126impl IntegerType {
127 pub fn is_signed(&self) -> bool {
128 match self {
129 IntegerType::Pointer(b) => *b,
130 IntegerType::Fixed(_, b) => *b,
131 }
132 }
133}
134
135#[derive(Copy, Clone, Debug, Eq, PartialEq, Default)]
137#[cfg_attr(
138 feature = "nightly",
139 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
140)]
141pub struct ReprOptions {
142 pub int: Option<IntegerType>,
143 pub align: Option<Align>,
144 pub pack: Option<Align>,
145 pub flags: ReprFlags,
146 pub field_shuffle_seed: Hash64,
154}
155
156impl ReprOptions {
157 #[inline]
158 pub fn simd(&self) -> bool {
159 self.flags.contains(ReprFlags::IS_SIMD)
160 }
161
162 #[inline]
163 pub fn c(&self) -> bool {
164 self.flags.contains(ReprFlags::IS_C)
165 }
166
167 #[inline]
168 pub fn packed(&self) -> bool {
169 self.pack.is_some()
170 }
171
172 #[inline]
173 pub fn transparent(&self) -> bool {
174 self.flags.contains(ReprFlags::IS_TRANSPARENT)
175 }
176
177 #[inline]
178 pub fn linear(&self) -> bool {
179 self.flags.contains(ReprFlags::IS_LINEAR)
180 }
181
182 pub fn discr_type(&self) -> IntegerType {
185 self.int.unwrap_or(IntegerType::Pointer(true))
186 }
187
188 pub fn inhibit_enum_layout_opt(&self) -> bool {
192 self.c() || self.int.is_some()
193 }
194
195 pub fn inhibit_newtype_abi_optimization(&self) -> bool {
196 self.flags.intersects(ReprFlags::ABI_UNOPTIMIZABLE)
197 }
198
199 pub fn inhibit_struct_field_reordering(&self) -> bool {
202 self.flags.intersects(ReprFlags::FIELD_ORDER_UNOPTIMIZABLE) || self.int.is_some()
203 }
204
205 pub fn can_randomize_type_layout(&self) -> bool {
208 !self.inhibit_struct_field_reordering() && self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT)
209 }
210
211 pub fn inhibits_union_abi_opt(&self) -> bool {
213 self.c()
214 }
215}
216
217pub const MAX_SIMD_LANES: u64 = 1 << 0xF;
223
224#[derive(Debug, PartialEq, Eq)]
227pub struct TargetDataLayout {
228 pub endian: Endian,
229 pub i1_align: AbiAndPrefAlign,
230 pub i8_align: AbiAndPrefAlign,
231 pub i16_align: AbiAndPrefAlign,
232 pub i32_align: AbiAndPrefAlign,
233 pub i64_align: AbiAndPrefAlign,
234 pub i128_align: AbiAndPrefAlign,
235 pub f16_align: AbiAndPrefAlign,
236 pub f32_align: AbiAndPrefAlign,
237 pub f64_align: AbiAndPrefAlign,
238 pub f128_align: AbiAndPrefAlign,
239 pub pointer_size: Size,
240 pub pointer_align: AbiAndPrefAlign,
241 pub aggregate_align: AbiAndPrefAlign,
242
243 pub vector_align: Vec<(Size, AbiAndPrefAlign)>,
245
246 pub instruction_address_space: AddressSpace,
247
248 pub c_enum_min_size: Integer,
252}
253
254impl Default for TargetDataLayout {
255 fn default() -> TargetDataLayout {
257 let align = |bits| Align::from_bits(bits).unwrap();
258 TargetDataLayout {
259 endian: Endian::Big,
260 i1_align: AbiAndPrefAlign::new(align(8)),
261 i8_align: AbiAndPrefAlign::new(align(8)),
262 i16_align: AbiAndPrefAlign::new(align(16)),
263 i32_align: AbiAndPrefAlign::new(align(32)),
264 i64_align: AbiAndPrefAlign { abi: align(32), pref: align(64) },
265 i128_align: AbiAndPrefAlign { abi: align(32), pref: align(64) },
266 f16_align: AbiAndPrefAlign::new(align(16)),
267 f32_align: AbiAndPrefAlign::new(align(32)),
268 f64_align: AbiAndPrefAlign::new(align(64)),
269 f128_align: AbiAndPrefAlign::new(align(128)),
270 pointer_size: Size::from_bits(64),
271 pointer_align: AbiAndPrefAlign::new(align(64)),
272 aggregate_align: AbiAndPrefAlign { abi: align(0), pref: align(64) },
273 vector_align: vec![
274 (Size::from_bits(64), AbiAndPrefAlign::new(align(64))),
275 (Size::from_bits(128), AbiAndPrefAlign::new(align(128))),
276 ],
277 instruction_address_space: AddressSpace::DATA,
278 c_enum_min_size: Integer::I32,
279 }
280 }
281}
282
283pub enum TargetDataLayoutErrors<'a> {
284 InvalidAddressSpace { addr_space: &'a str, cause: &'a str, err: ParseIntError },
285 InvalidBits { kind: &'a str, bit: &'a str, cause: &'a str, err: ParseIntError },
286 MissingAlignment { cause: &'a str },
287 InvalidAlignment { cause: &'a str, err: AlignFromBytesError },
288 InconsistentTargetArchitecture { dl: &'a str, target: &'a str },
289 InconsistentTargetPointerWidth { pointer_size: u64, target: u32 },
290 InvalidBitsSize { err: String },
291}
292
293impl TargetDataLayout {
294 pub fn parse_from_llvm_datalayout_string<'a>(
300 input: &'a str,
301 ) -> Result<TargetDataLayout, TargetDataLayoutErrors<'a>> {
302 let parse_address_space = |s: &'a str, cause: &'a str| {
304 s.parse::<u32>().map(AddressSpace).map_err(|err| {
305 TargetDataLayoutErrors::InvalidAddressSpace { addr_space: s, cause, err }
306 })
307 };
308
309 let parse_bits = |s: &'a str, kind: &'a str, cause: &'a str| {
311 s.parse::<u64>().map_err(|err| TargetDataLayoutErrors::InvalidBits {
312 kind,
313 bit: s,
314 cause,
315 err,
316 })
317 };
318
319 let parse_size =
321 |s: &'a str, cause: &'a str| parse_bits(s, "size", cause).map(Size::from_bits);
322
323 let parse_align = |s: &[&'a str], cause: &'a str| {
325 if s.is_empty() {
326 return Err(TargetDataLayoutErrors::MissingAlignment { cause });
327 }
328 let align_from_bits = |bits| {
329 Align::from_bits(bits)
330 .map_err(|err| TargetDataLayoutErrors::InvalidAlignment { cause, err })
331 };
332 let abi = parse_bits(s[0], "alignment", cause)?;
333 let pref = s.get(1).map_or(Ok(abi), |pref| parse_bits(pref, "alignment", cause))?;
334 Ok(AbiAndPrefAlign { abi: align_from_bits(abi)?, pref: align_from_bits(pref)? })
335 };
336
337 let mut dl = TargetDataLayout::default();
338 let mut i128_align_src = 64;
339 for spec in input.split('-') {
340 let spec_parts = spec.split(':').collect::<Vec<_>>();
341
342 match &*spec_parts {
343 ["e"] => dl.endian = Endian::Little,
344 ["E"] => dl.endian = Endian::Big,
345 [p] if p.starts_with('P') => {
346 dl.instruction_address_space = parse_address_space(&p[1..], "P")?
347 }
348 ["a", a @ ..] => dl.aggregate_align = parse_align(a, "a")?,
349 ["f16", a @ ..] => dl.f16_align = parse_align(a, "f16")?,
350 ["f32", a @ ..] => dl.f32_align = parse_align(a, "f32")?,
351 ["f64", a @ ..] => dl.f64_align = parse_align(a, "f64")?,
352 ["f128", a @ ..] => dl.f128_align = parse_align(a, "f128")?,
353 [p @ "p", s, a @ ..] | [p @ "p0", s, a @ ..] => {
357 dl.pointer_size = parse_size(s, p)?;
358 dl.pointer_align = parse_align(a, p)?;
359 }
360 [s, a @ ..] if s.starts_with('i') => {
361 let Ok(bits) = s[1..].parse::<u64>() else {
362 parse_size(&s[1..], "i")?; continue;
364 };
365 let a = parse_align(a, s)?;
366 match bits {
367 1 => dl.i1_align = a,
368 8 => dl.i8_align = a,
369 16 => dl.i16_align = a,
370 32 => dl.i32_align = a,
371 64 => dl.i64_align = a,
372 _ => {}
373 }
374 if bits >= i128_align_src && bits <= 128 {
375 i128_align_src = bits;
378 dl.i128_align = a;
379 }
380 }
381 [s, a @ ..] if s.starts_with('v') => {
382 let v_size = parse_size(&s[1..], "v")?;
383 let a = parse_align(a, s)?;
384 if let Some(v) = dl.vector_align.iter_mut().find(|v| v.0 == v_size) {
385 v.1 = a;
386 continue;
387 }
388 dl.vector_align.push((v_size, a));
390 }
391 _ => {} }
393 }
394 Ok(dl)
395 }
396
397 #[inline]
407 pub fn obj_size_bound(&self) -> u64 {
408 match self.pointer_size.bits() {
409 16 => 1 << 15,
410 32 => 1 << 31,
411 64 => 1 << 61,
412 bits => panic!("obj_size_bound: unknown pointer bit size {bits}"),
413 }
414 }
415
416 #[inline]
417 pub fn ptr_sized_integer(&self) -> Integer {
418 use Integer::*;
419 match self.pointer_size.bits() {
420 16 => I16,
421 32 => I32,
422 64 => I64,
423 bits => panic!("ptr_sized_integer: unknown pointer bit size {bits}"),
424 }
425 }
426
427 #[inline]
429 fn cabi_vector_align(&self, vec_size: Size) -> Option<AbiAndPrefAlign> {
430 self.vector_align
431 .iter()
432 .find(|(size, _align)| *size == vec_size)
433 .map(|(_size, align)| *align)
434 }
435
436 #[inline]
438 pub fn llvmlike_vector_align(&self, vec_size: Size) -> AbiAndPrefAlign {
439 self.cabi_vector_align(vec_size).unwrap_or(AbiAndPrefAlign::new(
440 Align::from_bytes(vec_size.bytes().next_power_of_two()).unwrap(),
441 ))
442 }
443}
444
445pub trait HasDataLayout {
446 fn data_layout(&self) -> &TargetDataLayout;
447}
448
449impl HasDataLayout for TargetDataLayout {
450 #[inline]
451 fn data_layout(&self) -> &TargetDataLayout {
452 self
453 }
454}
455
456impl HasDataLayout for &TargetDataLayout {
458 #[inline]
459 fn data_layout(&self) -> &TargetDataLayout {
460 (**self).data_layout()
461 }
462}
463
464#[derive(Copy, Clone, PartialEq, Eq)]
466pub enum Endian {
467 Little,
468 Big,
469}
470
471impl Endian {
472 pub fn as_str(&self) -> &'static str {
473 match self {
474 Self::Little => "little",
475 Self::Big => "big",
476 }
477 }
478}
479
480impl fmt::Debug for Endian {
481 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
482 f.write_str(self.as_str())
483 }
484}
485
486impl FromStr for Endian {
487 type Err = String;
488
489 fn from_str(s: &str) -> Result<Self, Self::Err> {
490 match s {
491 "little" => Ok(Self::Little),
492 "big" => Ok(Self::Big),
493 _ => Err(format!(r#"unknown endian: "{s}""#)),
494 }
495 }
496}
497
498#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
500#[cfg_attr(
501 feature = "nightly",
502 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
503)]
504pub struct Size {
505 raw: u64,
506}
507
508#[cfg(feature = "nightly")]
509impl StableOrd for Size {
510 const CAN_USE_UNSTABLE_SORT: bool = true;
511
512 const THIS_IMPLEMENTATION_HAS_BEEN_TRIPLE_CHECKED: () = ();
515}
516
517impl fmt::Debug for Size {
519 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
520 write!(f, "Size({} bytes)", self.bytes())
521 }
522}
523
524impl Size {
525 pub const ZERO: Size = Size { raw: 0 };
526
527 pub fn from_bits(bits: impl TryInto<u64>) -> Size {
530 let bits = bits.try_into().ok().unwrap();
531 Size { raw: bits / 8 + ((bits % 8) + 7) / 8 }
533 }
534
535 #[inline]
536 pub fn from_bytes(bytes: impl TryInto<u64>) -> Size {
537 let bytes: u64 = bytes.try_into().ok().unwrap();
538 Size { raw: bytes }
539 }
540
541 #[inline]
542 pub fn bytes(self) -> u64 {
543 self.raw
544 }
545
546 #[inline]
547 pub fn bytes_usize(self) -> usize {
548 self.bytes().try_into().unwrap()
549 }
550
551 #[inline]
552 pub fn bits(self) -> u64 {
553 #[cold]
554 fn overflow(bytes: u64) -> ! {
555 panic!("Size::bits: {bytes} bytes in bits doesn't fit in u64")
556 }
557
558 self.bytes().checked_mul(8).unwrap_or_else(|| overflow(self.bytes()))
559 }
560
561 #[inline]
562 pub fn bits_usize(self) -> usize {
563 self.bits().try_into().unwrap()
564 }
565
566 #[inline]
567 pub fn align_to(self, align: Align) -> Size {
568 let mask = align.bytes() - 1;
569 Size::from_bytes((self.bytes() + mask) & !mask)
570 }
571
572 #[inline]
573 pub fn is_aligned(self, align: Align) -> bool {
574 let mask = align.bytes() - 1;
575 self.bytes() & mask == 0
576 }
577
578 #[inline]
579 pub fn checked_add<C: HasDataLayout>(self, offset: Size, cx: &C) -> Option<Size> {
580 let dl = cx.data_layout();
581
582 let bytes = self.bytes().checked_add(offset.bytes())?;
583
584 if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
585 }
586
587 #[inline]
588 pub fn checked_mul<C: HasDataLayout>(self, count: u64, cx: &C) -> Option<Size> {
589 let dl = cx.data_layout();
590
591 let bytes = self.bytes().checked_mul(count)?;
592 if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
593 }
594
595 #[inline]
598 pub fn sign_extend(self, value: u128) -> i128 {
599 let size = self.bits();
600 if size == 0 {
601 return 0;
603 }
604 let shift = 128 - size;
606 ((value << shift) as i128) >> shift
609 }
610
611 #[inline]
613 pub fn truncate(self, value: u128) -> u128 {
614 let size = self.bits();
615 if size == 0 {
616 return 0;
618 }
619 let shift = 128 - size;
620 (value << shift) >> shift
622 }
623
624 #[inline]
625 pub fn signed_int_min(&self) -> i128 {
626 self.sign_extend(1_u128 << (self.bits() - 1))
627 }
628
629 #[inline]
630 pub fn signed_int_max(&self) -> i128 {
631 i128::MAX >> (128 - self.bits())
632 }
633
634 #[inline]
635 pub fn unsigned_int_max(&self) -> u128 {
636 u128::MAX >> (128 - self.bits())
637 }
638}
639
640impl Add for Size {
644 type Output = Size;
645 #[inline]
646 fn add(self, other: Size) -> Size {
647 Size::from_bytes(self.bytes().checked_add(other.bytes()).unwrap_or_else(|| {
648 panic!("Size::add: {} + {} doesn't fit in u64", self.bytes(), other.bytes())
649 }))
650 }
651}
652
653impl Sub for Size {
654 type Output = Size;
655 #[inline]
656 fn sub(self, other: Size) -> Size {
657 Size::from_bytes(self.bytes().checked_sub(other.bytes()).unwrap_or_else(|| {
658 panic!("Size::sub: {} - {} would result in negative size", self.bytes(), other.bytes())
659 }))
660 }
661}
662
663impl Mul<Size> for u64 {
664 type Output = Size;
665 #[inline]
666 fn mul(self, size: Size) -> Size {
667 size * self
668 }
669}
670
671impl Mul<u64> for Size {
672 type Output = Size;
673 #[inline]
674 fn mul(self, count: u64) -> Size {
675 match self.bytes().checked_mul(count) {
676 Some(bytes) => Size::from_bytes(bytes),
677 None => panic!("Size::mul: {} * {} doesn't fit in u64", self.bytes(), count),
678 }
679 }
680}
681
682impl AddAssign for Size {
683 #[inline]
684 fn add_assign(&mut self, other: Size) {
685 *self = *self + other;
686 }
687}
688
689#[cfg(feature = "nightly")]
690impl Step for Size {
691 #[inline]
692 fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
693 u64::steps_between(&start.bytes(), &end.bytes())
694 }
695
696 #[inline]
697 fn forward_checked(start: Self, count: usize) -> Option<Self> {
698 u64::forward_checked(start.bytes(), count).map(Self::from_bytes)
699 }
700
701 #[inline]
702 fn forward(start: Self, count: usize) -> Self {
703 Self::from_bytes(u64::forward(start.bytes(), count))
704 }
705
706 #[inline]
707 unsafe fn forward_unchecked(start: Self, count: usize) -> Self {
708 Self::from_bytes(unsafe { u64::forward_unchecked(start.bytes(), count) })
709 }
710
711 #[inline]
712 fn backward_checked(start: Self, count: usize) -> Option<Self> {
713 u64::backward_checked(start.bytes(), count).map(Self::from_bytes)
714 }
715
716 #[inline]
717 fn backward(start: Self, count: usize) -> Self {
718 Self::from_bytes(u64::backward(start.bytes(), count))
719 }
720
721 #[inline]
722 unsafe fn backward_unchecked(start: Self, count: usize) -> Self {
723 Self::from_bytes(unsafe { u64::backward_unchecked(start.bytes(), count) })
724 }
725}
726
727#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
729#[cfg_attr(
730 feature = "nightly",
731 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
732)]
733pub struct Align {
734 pow2: u8,
735}
736
737impl fmt::Debug for Align {
739 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
740 write!(f, "Align({} bytes)", self.bytes())
741 }
742}
743
744#[derive(Clone, Copy)]
745pub enum AlignFromBytesError {
746 NotPowerOfTwo(u64),
747 TooLarge(u64),
748}
749
750impl AlignFromBytesError {
751 pub fn diag_ident(self) -> &'static str {
752 match self {
753 Self::NotPowerOfTwo(_) => "not_power_of_two",
754 Self::TooLarge(_) => "too_large",
755 }
756 }
757
758 pub fn align(self) -> u64 {
759 let (Self::NotPowerOfTwo(align) | Self::TooLarge(align)) = self;
760 align
761 }
762}
763
764impl fmt::Debug for AlignFromBytesError {
765 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
766 fmt::Display::fmt(self, f)
767 }
768}
769
770impl fmt::Display for AlignFromBytesError {
771 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
772 match self {
773 AlignFromBytesError::NotPowerOfTwo(align) => write!(f, "`{align}` is not a power of 2"),
774 AlignFromBytesError::TooLarge(align) => write!(f, "`{align}` is too large"),
775 }
776 }
777}
778
779impl Align {
780 pub const ONE: Align = Align { pow2: 0 };
781 pub const EIGHT: Align = Align { pow2: 3 };
782 pub const MAX: Align = Align { pow2: 29 };
784
785 #[inline]
786 pub fn from_bits(bits: u64) -> Result<Align, AlignFromBytesError> {
787 Align::from_bytes(Size::from_bits(bits).bytes())
788 }
789
790 #[inline]
791 pub const fn from_bytes(align: u64) -> Result<Align, AlignFromBytesError> {
792 if align == 0 {
794 return Ok(Align::ONE);
795 }
796
797 #[cold]
798 const fn not_power_of_2(align: u64) -> AlignFromBytesError {
799 AlignFromBytesError::NotPowerOfTwo(align)
800 }
801
802 #[cold]
803 const fn too_large(align: u64) -> AlignFromBytesError {
804 AlignFromBytesError::TooLarge(align)
805 }
806
807 let tz = align.trailing_zeros();
808 if align != (1 << tz) {
809 return Err(not_power_of_2(align));
810 }
811
812 let pow2 = tz as u8;
813 if pow2 > Self::MAX.pow2 {
814 return Err(too_large(align));
815 }
816
817 Ok(Align { pow2 })
818 }
819
820 #[inline]
821 pub const fn bytes(self) -> u64 {
822 1 << self.pow2
823 }
824
825 #[inline]
826 pub fn bytes_usize(self) -> usize {
827 self.bytes().try_into().unwrap()
828 }
829
830 #[inline]
831 pub const fn bits(self) -> u64 {
832 self.bytes() * 8
833 }
834
835 #[inline]
836 pub fn bits_usize(self) -> usize {
837 self.bits().try_into().unwrap()
838 }
839
840 #[inline]
845 pub fn max_aligned_factor(size: Size) -> Align {
846 Align { pow2: size.bytes().trailing_zeros() as u8 }
847 }
848
849 #[inline]
851 pub fn restrict_for_offset(self, size: Size) -> Align {
852 self.min(Align::max_aligned_factor(size))
853 }
854}
855
856#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
866#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
867pub struct AbiAndPrefAlign {
868 pub abi: Align,
869 pub pref: Align,
870}
871
872impl AbiAndPrefAlign {
873 #[inline]
874 pub fn new(align: Align) -> AbiAndPrefAlign {
875 AbiAndPrefAlign { abi: align, pref: align }
876 }
877
878 #[inline]
879 pub fn min(self, other: AbiAndPrefAlign) -> AbiAndPrefAlign {
880 AbiAndPrefAlign { abi: self.abi.min(other.abi), pref: self.pref.min(other.pref) }
881 }
882
883 #[inline]
884 pub fn max(self, other: AbiAndPrefAlign) -> AbiAndPrefAlign {
885 AbiAndPrefAlign { abi: self.abi.max(other.abi), pref: self.pref.max(other.pref) }
886 }
887}
888
889#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
891#[cfg_attr(
892 feature = "nightly",
893 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
894)]
895pub enum Integer {
896 I8,
897 I16,
898 I32,
899 I64,
900 I128,
901}
902
903impl Integer {
904 pub fn int_ty_str(self) -> &'static str {
905 use Integer::*;
906 match self {
907 I8 => "i8",
908 I16 => "i16",
909 I32 => "i32",
910 I64 => "i64",
911 I128 => "i128",
912 }
913 }
914
915 pub fn uint_ty_str(self) -> &'static str {
916 use Integer::*;
917 match self {
918 I8 => "u8",
919 I16 => "u16",
920 I32 => "u32",
921 I64 => "u64",
922 I128 => "u128",
923 }
924 }
925
926 #[inline]
927 pub fn size(self) -> Size {
928 use Integer::*;
929 match self {
930 I8 => Size::from_bytes(1),
931 I16 => Size::from_bytes(2),
932 I32 => Size::from_bytes(4),
933 I64 => Size::from_bytes(8),
934 I128 => Size::from_bytes(16),
935 }
936 }
937
938 pub fn from_attr<C: HasDataLayout>(cx: &C, ity: IntegerType) -> Integer {
940 let dl = cx.data_layout();
941
942 match ity {
943 IntegerType::Pointer(_) => dl.ptr_sized_integer(),
944 IntegerType::Fixed(x, _) => x,
945 }
946 }
947
948 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAndPrefAlign {
949 use Integer::*;
950 let dl = cx.data_layout();
951
952 match self {
953 I8 => dl.i8_align,
954 I16 => dl.i16_align,
955 I32 => dl.i32_align,
956 I64 => dl.i64_align,
957 I128 => dl.i128_align,
958 }
959 }
960
961 #[inline]
963 pub fn signed_max(self) -> i128 {
964 use Integer::*;
965 match self {
966 I8 => i8::MAX as i128,
967 I16 => i16::MAX as i128,
968 I32 => i32::MAX as i128,
969 I64 => i64::MAX as i128,
970 I128 => i128::MAX,
971 }
972 }
973
974 #[inline]
976 pub fn fit_signed(x: i128) -> Integer {
977 use Integer::*;
978 match x {
979 -0x0000_0000_0000_0080..=0x0000_0000_0000_007f => I8,
980 -0x0000_0000_0000_8000..=0x0000_0000_0000_7fff => I16,
981 -0x0000_0000_8000_0000..=0x0000_0000_7fff_ffff => I32,
982 -0x8000_0000_0000_0000..=0x7fff_ffff_ffff_ffff => I64,
983 _ => I128,
984 }
985 }
986
987 #[inline]
989 pub fn fit_unsigned(x: u128) -> Integer {
990 use Integer::*;
991 match x {
992 0..=0x0000_0000_0000_00ff => I8,
993 0..=0x0000_0000_0000_ffff => I16,
994 0..=0x0000_0000_ffff_ffff => I32,
995 0..=0xffff_ffff_ffff_ffff => I64,
996 _ => I128,
997 }
998 }
999
1000 pub fn for_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Option<Integer> {
1002 use Integer::*;
1003 let dl = cx.data_layout();
1004
1005 [I8, I16, I32, I64, I128].into_iter().find(|&candidate| {
1006 wanted == candidate.align(dl).abi && wanted.bytes() == candidate.size().bytes()
1007 })
1008 }
1009
1010 pub fn approximate_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Integer {
1012 use Integer::*;
1013 let dl = cx.data_layout();
1014
1015 for candidate in [I64, I32, I16] {
1017 if wanted >= candidate.align(dl).abi && wanted.bytes() >= candidate.size().bytes() {
1018 return candidate;
1019 }
1020 }
1021 I8
1022 }
1023
1024 #[inline]
1027 pub fn from_size(size: Size) -> Result<Self, String> {
1028 match size.bits() {
1029 8 => Ok(Integer::I8),
1030 16 => Ok(Integer::I16),
1031 32 => Ok(Integer::I32),
1032 64 => Ok(Integer::I64),
1033 128 => Ok(Integer::I128),
1034 _ => Err(format!("rust does not support integers with {} bits", size.bits())),
1035 }
1036 }
1037}
1038
1039#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
1041#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1042pub enum Float {
1043 F16,
1044 F32,
1045 F64,
1046 F128,
1047}
1048
1049impl Float {
1050 pub fn size(self) -> Size {
1051 use Float::*;
1052
1053 match self {
1054 F16 => Size::from_bits(16),
1055 F32 => Size::from_bits(32),
1056 F64 => Size::from_bits(64),
1057 F128 => Size::from_bits(128),
1058 }
1059 }
1060
1061 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAndPrefAlign {
1062 use Float::*;
1063 let dl = cx.data_layout();
1064
1065 match self {
1066 F16 => dl.f16_align,
1067 F32 => dl.f32_align,
1068 F64 => dl.f64_align,
1069 F128 => dl.f128_align,
1070 }
1071 }
1072}
1073
1074#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
1076#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1077pub enum Primitive {
1078 Int(Integer, bool),
1086 Float(Float),
1087 Pointer(AddressSpace),
1088}
1089
1090impl Primitive {
1091 pub fn size<C: HasDataLayout>(self, cx: &C) -> Size {
1092 use Primitive::*;
1093 let dl = cx.data_layout();
1094
1095 match self {
1096 Int(i, _) => i.size(),
1097 Float(f) => f.size(),
1098 Pointer(_) => dl.pointer_size,
1102 }
1103 }
1104
1105 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAndPrefAlign {
1106 use Primitive::*;
1107 let dl = cx.data_layout();
1108
1109 match self {
1110 Int(i, _) => i.align(dl),
1111 Float(f) => f.align(dl),
1112 Pointer(_) => dl.pointer_align,
1116 }
1117 }
1118}
1119
1120#[derive(Clone, Copy, PartialEq, Eq, Hash)]
1130#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1131pub struct WrappingRange {
1132 pub start: u128,
1133 pub end: u128,
1134}
1135
1136impl WrappingRange {
1137 pub fn full(size: Size) -> Self {
1138 Self { start: 0, end: size.unsigned_int_max() }
1139 }
1140
1141 #[inline(always)]
1143 pub fn contains(&self, v: u128) -> bool {
1144 if self.start <= self.end {
1145 self.start <= v && v <= self.end
1146 } else {
1147 self.start <= v || v <= self.end
1148 }
1149 }
1150
1151 #[inline(always)]
1153 fn with_start(mut self, start: u128) -> Self {
1154 self.start = start;
1155 self
1156 }
1157
1158 #[inline(always)]
1160 fn with_end(mut self, end: u128) -> Self {
1161 self.end = end;
1162 self
1163 }
1164
1165 #[inline]
1167 fn is_full_for(&self, size: Size) -> bool {
1168 let max_value = size.unsigned_int_max();
1169 debug_assert!(self.start <= max_value && self.end <= max_value);
1170 self.start == (self.end.wrapping_add(1) & max_value)
1171 }
1172}
1173
1174impl fmt::Debug for WrappingRange {
1175 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
1176 if self.start > self.end {
1177 write!(fmt, "(..={}) | ({}..)", self.end, self.start)?;
1178 } else {
1179 write!(fmt, "{}..={}", self.start, self.end)?;
1180 }
1181 Ok(())
1182 }
1183}
1184
1185#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1187#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1188pub enum Scalar {
1189 Initialized {
1190 value: Primitive,
1191
1192 valid_range: WrappingRange,
1196 },
1197 Union {
1198 value: Primitive,
1204 },
1205}
1206
1207impl Scalar {
1208 #[inline]
1209 pub fn is_bool(&self) -> bool {
1210 use Integer::*;
1211 matches!(
1212 self,
1213 Scalar::Initialized {
1214 value: Primitive::Int(I8, false),
1215 valid_range: WrappingRange { start: 0, end: 1 }
1216 }
1217 )
1218 }
1219
1220 pub fn primitive(&self) -> Primitive {
1223 match *self {
1224 Scalar::Initialized { value, .. } | Scalar::Union { value } => value,
1225 }
1226 }
1227
1228 pub fn align(self, cx: &impl HasDataLayout) -> AbiAndPrefAlign {
1229 self.primitive().align(cx)
1230 }
1231
1232 pub fn size(self, cx: &impl HasDataLayout) -> Size {
1233 self.primitive().size(cx)
1234 }
1235
1236 #[inline]
1237 pub fn to_union(&self) -> Self {
1238 Self::Union { value: self.primitive() }
1239 }
1240
1241 #[inline]
1242 pub fn valid_range(&self, cx: &impl HasDataLayout) -> WrappingRange {
1243 match *self {
1244 Scalar::Initialized { valid_range, .. } => valid_range,
1245 Scalar::Union { value } => WrappingRange::full(value.size(cx)),
1246 }
1247 }
1248
1249 #[inline]
1250 pub fn valid_range_mut(&mut self) -> &mut WrappingRange {
1253 match self {
1254 Scalar::Initialized { valid_range, .. } => valid_range,
1255 Scalar::Union { .. } => panic!("cannot change the valid range of a union"),
1256 }
1257 }
1258
1259 #[inline]
1262 pub fn is_always_valid<C: HasDataLayout>(&self, cx: &C) -> bool {
1263 match *self {
1264 Scalar::Initialized { valid_range, .. } => valid_range.is_full_for(self.size(cx)),
1265 Scalar::Union { .. } => true,
1266 }
1267 }
1268
1269 #[inline]
1271 pub fn is_uninit_valid(&self) -> bool {
1272 match *self {
1273 Scalar::Initialized { .. } => false,
1274 Scalar::Union { .. } => true,
1275 }
1276 }
1277
1278 #[inline]
1280 pub fn is_signed(&self) -> bool {
1281 match self.primitive() {
1282 Primitive::Int(_, signed) => signed,
1283 _ => false,
1284 }
1285 }
1286}
1287
1288#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1291#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1292pub enum FieldsShape<FieldIdx: Idx> {
1293 Primitive,
1295
1296 Union(NonZeroUsize),
1298
1299 Array { stride: Size, count: u64 },
1301
1302 Arbitrary {
1310 offsets: IndexVec<FieldIdx, Size>,
1315
1316 memory_index: IndexVec<FieldIdx, u32>,
1329 },
1330}
1331
1332impl<FieldIdx: Idx> FieldsShape<FieldIdx> {
1333 #[inline]
1334 pub fn count(&self) -> usize {
1335 match *self {
1336 FieldsShape::Primitive => 0,
1337 FieldsShape::Union(count) => count.get(),
1338 FieldsShape::Array { count, .. } => count.try_into().unwrap(),
1339 FieldsShape::Arbitrary { ref offsets, .. } => offsets.len(),
1340 }
1341 }
1342
1343 #[inline]
1344 pub fn offset(&self, i: usize) -> Size {
1345 match *self {
1346 FieldsShape::Primitive => {
1347 unreachable!("FieldsShape::offset: `Primitive`s have no fields")
1348 }
1349 FieldsShape::Union(count) => {
1350 assert!(i < count.get(), "tried to access field {i} of union with {count} fields");
1351 Size::ZERO
1352 }
1353 FieldsShape::Array { stride, count } => {
1354 let i = u64::try_from(i).unwrap();
1355 assert!(i < count, "tried to access field {i} of array with {count} fields");
1356 stride * i
1357 }
1358 FieldsShape::Arbitrary { ref offsets, .. } => offsets[FieldIdx::new(i)],
1359 }
1360 }
1361
1362 #[inline]
1363 pub fn memory_index(&self, i: usize) -> usize {
1364 match *self {
1365 FieldsShape::Primitive => {
1366 unreachable!("FieldsShape::memory_index: `Primitive`s have no fields")
1367 }
1368 FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
1369 FieldsShape::Arbitrary { ref memory_index, .. } => {
1370 memory_index[FieldIdx::new(i)].try_into().unwrap()
1371 }
1372 }
1373 }
1374
1375 #[inline]
1377 pub fn index_by_increasing_offset(&self) -> impl ExactSizeIterator<Item = usize> {
1378 let mut inverse_small = [0u8; 64];
1379 let mut inverse_big = IndexVec::new();
1380 let use_small = self.count() <= inverse_small.len();
1381
1382 if let FieldsShape::Arbitrary { ref memory_index, .. } = *self {
1384 if use_small {
1385 for (field_idx, &mem_idx) in memory_index.iter_enumerated() {
1386 inverse_small[mem_idx as usize] = field_idx.index() as u8;
1387 }
1388 } else {
1389 inverse_big = memory_index.invert_bijective_mapping();
1390 }
1391 }
1392
1393 let pseudofield_count = if let FieldsShape::Primitive = self { 1 } else { self.count() };
1397
1398 (0..pseudofield_count).map(move |i| match *self {
1399 FieldsShape::Primitive | FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
1400 FieldsShape::Arbitrary { .. } => {
1401 if use_small {
1402 inverse_small[i] as usize
1403 } else {
1404 inverse_big[i as u32].index()
1405 }
1406 }
1407 })
1408 }
1409}
1410
1411#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
1415#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1416pub struct AddressSpace(pub u32);
1417
1418impl AddressSpace {
1419 pub const DATA: Self = AddressSpace(0);
1421}
1422
1423#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1434#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1435pub enum BackendRepr {
1436 Scalar(Scalar),
1437 ScalarPair(Scalar, Scalar),
1438 SimdVector {
1439 element: Scalar,
1440 count: u64,
1441 },
1442 Memory {
1444 sized: bool,
1446 },
1447}
1448
1449impl BackendRepr {
1450 #[inline]
1452 pub fn is_unsized(&self) -> bool {
1453 match *self {
1454 BackendRepr::Scalar(_)
1455 | BackendRepr::ScalarPair(..)
1456 | BackendRepr::SimdVector { .. } => false,
1457 BackendRepr::Memory { sized } => !sized,
1458 }
1459 }
1460
1461 #[inline]
1462 pub fn is_sized(&self) -> bool {
1463 !self.is_unsized()
1464 }
1465
1466 #[inline]
1469 pub fn is_signed(&self) -> bool {
1470 match self {
1471 BackendRepr::Scalar(scal) => scal.is_signed(),
1472 _ => panic!("`is_signed` on non-scalar ABI {self:?}"),
1473 }
1474 }
1475
1476 #[inline]
1478 pub fn is_scalar(&self) -> bool {
1479 matches!(*self, BackendRepr::Scalar(_))
1480 }
1481
1482 #[inline]
1484 pub fn is_bool(&self) -> bool {
1485 matches!(*self, BackendRepr::Scalar(s) if s.is_bool())
1486 }
1487
1488 pub fn scalar_align<C: HasDataLayout>(&self, cx: &C) -> Option<Align> {
1492 match *self {
1493 BackendRepr::Scalar(s) => Some(s.align(cx).abi),
1494 BackendRepr::ScalarPair(s1, s2) => Some(s1.align(cx).max(s2.align(cx)).abi),
1495 BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => None,
1497 }
1498 }
1499
1500 pub fn scalar_size<C: HasDataLayout>(&self, cx: &C) -> Option<Size> {
1504 match *self {
1505 BackendRepr::Scalar(s) => Some(s.size(cx)),
1507 BackendRepr::ScalarPair(s1, s2) => {
1509 let field2_offset = s1.size(cx).align_to(s2.align(cx).abi);
1510 let size = (field2_offset + s2.size(cx)).align_to(
1511 self.scalar_align(cx)
1512 .unwrap(),
1514 );
1515 Some(size)
1516 }
1517 BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => None,
1519 }
1520 }
1521
1522 pub fn to_union(&self) -> Self {
1524 match *self {
1525 BackendRepr::Scalar(s) => BackendRepr::Scalar(s.to_union()),
1526 BackendRepr::ScalarPair(s1, s2) => {
1527 BackendRepr::ScalarPair(s1.to_union(), s2.to_union())
1528 }
1529 BackendRepr::SimdVector { element, count } => {
1530 BackendRepr::SimdVector { element: element.to_union(), count }
1531 }
1532 BackendRepr::Memory { .. } => BackendRepr::Memory { sized: true },
1533 }
1534 }
1535
1536 pub fn eq_up_to_validity(&self, other: &Self) -> bool {
1537 match (self, other) {
1538 (BackendRepr::Scalar(l), BackendRepr::Scalar(r)) => l.primitive() == r.primitive(),
1541 (
1542 BackendRepr::SimdVector { element: element_l, count: count_l },
1543 BackendRepr::SimdVector { element: element_r, count: count_r },
1544 ) => element_l.primitive() == element_r.primitive() && count_l == count_r,
1545 (BackendRepr::ScalarPair(l1, l2), BackendRepr::ScalarPair(r1, r2)) => {
1546 l1.primitive() == r1.primitive() && l2.primitive() == r2.primitive()
1547 }
1548 _ => self == other,
1550 }
1551 }
1552}
1553
1554#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1556#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1557pub enum Variants<FieldIdx: Idx, VariantIdx: Idx> {
1558 Empty,
1560
1561 Single {
1563 index: VariantIdx,
1565 },
1566
1567 Multiple {
1574 tag: Scalar,
1575 tag_encoding: TagEncoding<VariantIdx>,
1576 tag_field: FieldIdx,
1577 variants: IndexVec<VariantIdx, LayoutData<FieldIdx, VariantIdx>>,
1578 },
1579}
1580
1581#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1583#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1584pub enum TagEncoding<VariantIdx: Idx> {
1585 Direct,
1588
1589 Niche {
1604 untagged_variant: VariantIdx,
1605 niche_variants: RangeInclusive<VariantIdx>,
1608 niche_start: u128,
1611 },
1612}
1613
1614#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1615#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1616pub struct Niche {
1617 pub offset: Size,
1618 pub value: Primitive,
1619 pub valid_range: WrappingRange,
1620}
1621
1622impl Niche {
1623 pub fn from_scalar<C: HasDataLayout>(cx: &C, offset: Size, scalar: Scalar) -> Option<Self> {
1624 let Scalar::Initialized { value, valid_range } = scalar else { return None };
1625 let niche = Niche { offset, value, valid_range };
1626 if niche.available(cx) > 0 { Some(niche) } else { None }
1627 }
1628
1629 pub fn available<C: HasDataLayout>(&self, cx: &C) -> u128 {
1630 let Self { value, valid_range: v, .. } = *self;
1631 let size = value.size(cx);
1632 assert!(size.bits() <= 128);
1633 let max_value = size.unsigned_int_max();
1634
1635 let niche = v.end.wrapping_add(1)..v.start;
1637 niche.end.wrapping_sub(niche.start) & max_value
1638 }
1639
1640 pub fn reserve<C: HasDataLayout>(&self, cx: &C, count: u128) -> Option<(u128, Scalar)> {
1641 assert!(count > 0);
1642
1643 let Self { value, valid_range: v, .. } = *self;
1644 let size = value.size(cx);
1645 assert!(size.bits() <= 128);
1646 let max_value = size.unsigned_int_max();
1647
1648 let niche = v.end.wrapping_add(1)..v.start;
1649 let available = niche.end.wrapping_sub(niche.start) & max_value;
1650 if count > available {
1651 return None;
1652 }
1653
1654 let move_start = |v: WrappingRange| {
1668 let start = v.start.wrapping_sub(count) & max_value;
1669 Some((start, Scalar::Initialized { value, valid_range: v.with_start(start) }))
1670 };
1671 let move_end = |v: WrappingRange| {
1672 let start = v.end.wrapping_add(1) & max_value;
1673 let end = v.end.wrapping_add(count) & max_value;
1674 Some((start, Scalar::Initialized { value, valid_range: v.with_end(end) }))
1675 };
1676 let distance_end_zero = max_value - v.end;
1677 if v.start > v.end {
1678 move_end(v)
1680 } else if v.start <= distance_end_zero {
1681 if count <= v.start {
1682 move_start(v)
1683 } else {
1684 move_end(v)
1686 }
1687 } else {
1688 let end = v.end.wrapping_add(count) & max_value;
1689 let overshot_zero = (1..=v.end).contains(&end);
1690 if overshot_zero {
1691 move_start(v)
1693 } else {
1694 move_end(v)
1695 }
1696 }
1697 }
1698}
1699
1700#[derive(PartialEq, Eq, Hash, Clone)]
1702#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1703pub struct LayoutData<FieldIdx: Idx, VariantIdx: Idx> {
1704 pub fields: FieldsShape<FieldIdx>,
1706
1707 pub variants: Variants<FieldIdx, VariantIdx>,
1715
1716 pub backend_repr: BackendRepr,
1724
1725 pub largest_niche: Option<Niche>,
1728 pub uninhabited: bool,
1733
1734 pub align: AbiAndPrefAlign,
1735 pub size: Size,
1736
1737 pub max_repr_align: Option<Align>,
1741
1742 pub unadjusted_abi_align: Align,
1746
1747 pub randomization_seed: Hash64,
1758}
1759
1760impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
1761 pub fn is_aggregate(&self) -> bool {
1763 match self.backend_repr {
1764 BackendRepr::Scalar(_) | BackendRepr::SimdVector { .. } => false,
1765 BackendRepr::ScalarPair(..) | BackendRepr::Memory { .. } => true,
1766 }
1767 }
1768
1769 pub fn is_uninhabited(&self) -> bool {
1771 self.uninhabited
1772 }
1773}
1774
1775impl<FieldIdx: Idx, VariantIdx: Idx> fmt::Debug for LayoutData<FieldIdx, VariantIdx>
1776where
1777 FieldsShape<FieldIdx>: fmt::Debug,
1778 Variants<FieldIdx, VariantIdx>: fmt::Debug,
1779{
1780 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1781 let LayoutData {
1785 size,
1786 align,
1787 backend_repr,
1788 fields,
1789 largest_niche,
1790 uninhabited,
1791 variants,
1792 max_repr_align,
1793 unadjusted_abi_align,
1794 randomization_seed,
1795 } = self;
1796 f.debug_struct("Layout")
1797 .field("size", size)
1798 .field("align", align)
1799 .field("backend_repr", backend_repr)
1800 .field("fields", fields)
1801 .field("largest_niche", largest_niche)
1802 .field("uninhabited", uninhabited)
1803 .field("variants", variants)
1804 .field("max_repr_align", max_repr_align)
1805 .field("unadjusted_abi_align", unadjusted_abi_align)
1806 .field("randomization_seed", randomization_seed)
1807 .finish()
1808 }
1809}
1810
1811#[derive(Copy, Clone, PartialEq, Eq, Debug)]
1812pub enum PointerKind {
1813 SharedRef { frozen: bool },
1815 MutableRef { unpin: bool },
1817 Box { unpin: bool, global: bool },
1820}
1821
1822#[derive(Copy, Clone, Debug)]
1827pub struct PointeeInfo {
1828 pub safe: Option<PointerKind>,
1831 pub size: Size,
1837 pub align: Align,
1839}
1840
1841impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
1842 #[inline]
1844 pub fn is_unsized(&self) -> bool {
1845 self.backend_repr.is_unsized()
1846 }
1847
1848 #[inline]
1849 pub fn is_sized(&self) -> bool {
1850 self.backend_repr.is_sized()
1851 }
1852
1853 pub fn is_1zst(&self) -> bool {
1855 self.is_sized() && self.size.bytes() == 0 && self.align.abi.bytes() == 1
1856 }
1857
1858 pub fn is_zst(&self) -> bool {
1863 match self.backend_repr {
1864 BackendRepr::Scalar(_)
1865 | BackendRepr::ScalarPair(..)
1866 | BackendRepr::SimdVector { .. } => false,
1867 BackendRepr::Memory { sized } => sized && self.size.bytes() == 0,
1868 }
1869 }
1870
1871 pub fn eq_abi(&self, other: &Self) -> bool {
1877 self.size == other.size
1881 && self.is_sized() == other.is_sized()
1882 && self.backend_repr.eq_up_to_validity(&other.backend_repr)
1883 && self.backend_repr.is_bool() == other.backend_repr.is_bool()
1884 && self.align.abi == other.align.abi
1885 && self.max_repr_align == other.max_repr_align
1886 && self.unadjusted_abi_align == other.unadjusted_abi_align
1887 }
1888}
1889
1890#[derive(Copy, Clone, Debug)]
1891pub enum StructKind {
1892 AlwaysSized,
1894 MaybeUnsized,
1896 Prefixed(Size, Align),
1898}
1899
1900#[derive(Clone, Debug)]
1901pub enum AbiFromStrErr {
1902 Unknown,
1904 NoExplicitUnwind,
1906}