1#![cfg_attr(feature = "nightly", allow(internal_features))]
3#![cfg_attr(feature = "nightly", doc(rust_logo))]
4#![cfg_attr(feature = "nightly", feature(assert_matches))]
5#![cfg_attr(feature = "nightly", feature(rustc_attrs))]
6#![cfg_attr(feature = "nightly", feature(rustdoc_internals))]
7#![cfg_attr(feature = "nightly", feature(step_trait))]
8#![warn(unreachable_pub)]
9use std::fmt;
44#[cfg(feature = "nightly")]
45use std::iter::Step;
46use std::num::{NonZeroUsize, ParseIntError};
47use std::ops::{Add, AddAssign, Mul, RangeInclusive, Sub};
48use std::str::FromStr;
49
50use bitflags::bitflags;
51#[cfg(feature = "nightly")]
52use rustc_data_structures::stable_hasher::StableOrd;
53use rustc_index::{Idx, IndexSlice, IndexVec};
54#[cfg(feature = "nightly")]
55use rustc_macros::{Decodable_Generic, Encodable_Generic, HashStable_Generic};
56
57mod callconv;
58mod layout;
59#[cfg(test)]
60mod tests;
61
62mod extern_abi;
63
64pub use callconv::{Heterogeneous, HomogeneousAggregate, Reg, RegKind};
65pub use extern_abi::{ExternAbi, all_names};
66#[cfg(feature = "nightly")]
67pub use layout::{FIRST_VARIANT, FieldIdx, Layout, TyAbiInterface, TyAndLayout, VariantIdx};
68pub use layout::{LayoutCalculator, LayoutCalculatorError};
69
70#[cfg(feature = "nightly")]
74pub trait HashStableContext {}
75
76#[derive(Clone, Copy, PartialEq, Eq, Default)]
77#[cfg_attr(feature = "nightly", derive(Encodable_Generic, Decodable_Generic, HashStable_Generic))]
78pub struct ReprFlags(u8);
79
80bitflags! {
81 impl ReprFlags: u8 {
82 const IS_C = 1 << 0;
83 const IS_SIMD = 1 << 1;
84 const IS_TRANSPARENT = 1 << 2;
85 const IS_LINEAR = 1 << 3;
88 const RANDOMIZE_LAYOUT = 1 << 4;
92 const FIELD_ORDER_UNOPTIMIZABLE = ReprFlags::IS_C.bits()
94 | ReprFlags::IS_SIMD.bits()
95 | ReprFlags::IS_LINEAR.bits();
96 const ABI_UNOPTIMIZABLE = ReprFlags::IS_C.bits() | ReprFlags::IS_SIMD.bits();
97 }
98}
99
100impl std::fmt::Debug for ReprFlags {
103 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
104 bitflags::parser::to_writer(self, f)
105 }
106}
107
108#[derive(Copy, Clone, Debug, Eq, PartialEq)]
109#[cfg_attr(feature = "nightly", derive(Encodable_Generic, Decodable_Generic, HashStable_Generic))]
110pub enum IntegerType {
111 Pointer(bool),
114 Fixed(Integer, bool),
117}
118
119impl IntegerType {
120 pub fn is_signed(&self) -> bool {
121 match self {
122 IntegerType::Pointer(b) => *b,
123 IntegerType::Fixed(_, b) => *b,
124 }
125 }
126}
127
128#[derive(Copy, Clone, Debug, Eq, PartialEq, Default)]
130#[cfg_attr(feature = "nightly", derive(Encodable_Generic, Decodable_Generic, HashStable_Generic))]
131pub struct ReprOptions {
132 pub int: Option<IntegerType>,
133 pub align: Option<Align>,
134 pub pack: Option<Align>,
135 pub flags: ReprFlags,
136 pub field_shuffle_seed: u64,
144}
145
146impl ReprOptions {
147 #[inline]
148 pub fn simd(&self) -> bool {
149 self.flags.contains(ReprFlags::IS_SIMD)
150 }
151
152 #[inline]
153 pub fn c(&self) -> bool {
154 self.flags.contains(ReprFlags::IS_C)
155 }
156
157 #[inline]
158 pub fn packed(&self) -> bool {
159 self.pack.is_some()
160 }
161
162 #[inline]
163 pub fn transparent(&self) -> bool {
164 self.flags.contains(ReprFlags::IS_TRANSPARENT)
165 }
166
167 #[inline]
168 pub fn linear(&self) -> bool {
169 self.flags.contains(ReprFlags::IS_LINEAR)
170 }
171
172 pub fn discr_type(&self) -> IntegerType {
175 self.int.unwrap_or(IntegerType::Pointer(true))
176 }
177
178 pub fn inhibit_enum_layout_opt(&self) -> bool {
182 self.c() || self.int.is_some()
183 }
184
185 pub fn inhibit_newtype_abi_optimization(&self) -> bool {
186 self.flags.intersects(ReprFlags::ABI_UNOPTIMIZABLE)
187 }
188
189 pub fn inhibit_struct_field_reordering(&self) -> bool {
192 self.flags.intersects(ReprFlags::FIELD_ORDER_UNOPTIMIZABLE) || self.int.is_some()
193 }
194
195 pub fn can_randomize_type_layout(&self) -> bool {
198 !self.inhibit_struct_field_reordering() && self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT)
199 }
200
201 pub fn inhibits_union_abi_opt(&self) -> bool {
203 self.c()
204 }
205}
206
207#[derive(Debug, PartialEq, Eq)]
210pub struct TargetDataLayout {
211 pub endian: Endian,
212 pub i1_align: AbiAndPrefAlign,
213 pub i8_align: AbiAndPrefAlign,
214 pub i16_align: AbiAndPrefAlign,
215 pub i32_align: AbiAndPrefAlign,
216 pub i64_align: AbiAndPrefAlign,
217 pub i128_align: AbiAndPrefAlign,
218 pub f16_align: AbiAndPrefAlign,
219 pub f32_align: AbiAndPrefAlign,
220 pub f64_align: AbiAndPrefAlign,
221 pub f128_align: AbiAndPrefAlign,
222 pub pointer_size: Size,
223 pub pointer_align: AbiAndPrefAlign,
224 pub aggregate_align: AbiAndPrefAlign,
225
226 pub vector_align: Vec<(Size, AbiAndPrefAlign)>,
228
229 pub instruction_address_space: AddressSpace,
230
231 pub c_enum_min_size: Integer,
235}
236
237impl Default for TargetDataLayout {
238 fn default() -> TargetDataLayout {
240 let align = |bits| Align::from_bits(bits).unwrap();
241 TargetDataLayout {
242 endian: Endian::Big,
243 i1_align: AbiAndPrefAlign::new(align(8)),
244 i8_align: AbiAndPrefAlign::new(align(8)),
245 i16_align: AbiAndPrefAlign::new(align(16)),
246 i32_align: AbiAndPrefAlign::new(align(32)),
247 i64_align: AbiAndPrefAlign { abi: align(32), pref: align(64) },
248 i128_align: AbiAndPrefAlign { abi: align(32), pref: align(64) },
249 f16_align: AbiAndPrefAlign::new(align(16)),
250 f32_align: AbiAndPrefAlign::new(align(32)),
251 f64_align: AbiAndPrefAlign::new(align(64)),
252 f128_align: AbiAndPrefAlign::new(align(128)),
253 pointer_size: Size::from_bits(64),
254 pointer_align: AbiAndPrefAlign::new(align(64)),
255 aggregate_align: AbiAndPrefAlign { abi: align(0), pref: align(64) },
256 vector_align: vec![
257 (Size::from_bits(64), AbiAndPrefAlign::new(align(64))),
258 (Size::from_bits(128), AbiAndPrefAlign::new(align(128))),
259 ],
260 instruction_address_space: AddressSpace::DATA,
261 c_enum_min_size: Integer::I32,
262 }
263 }
264}
265
266pub enum TargetDataLayoutErrors<'a> {
267 InvalidAddressSpace { addr_space: &'a str, cause: &'a str, err: ParseIntError },
268 InvalidBits { kind: &'a str, bit: &'a str, cause: &'a str, err: ParseIntError },
269 MissingAlignment { cause: &'a str },
270 InvalidAlignment { cause: &'a str, err: AlignFromBytesError },
271 InconsistentTargetArchitecture { dl: &'a str, target: &'a str },
272 InconsistentTargetPointerWidth { pointer_size: u64, target: u32 },
273 InvalidBitsSize { err: String },
274}
275
276impl TargetDataLayout {
277 pub fn parse_from_llvm_datalayout_string<'a>(
283 input: &'a str,
284 ) -> Result<TargetDataLayout, TargetDataLayoutErrors<'a>> {
285 let parse_address_space = |s: &'a str, cause: &'a str| {
287 s.parse::<u32>().map(AddressSpace).map_err(|err| {
288 TargetDataLayoutErrors::InvalidAddressSpace { addr_space: s, cause, err }
289 })
290 };
291
292 let parse_bits = |s: &'a str, kind: &'a str, cause: &'a str| {
294 s.parse::<u64>().map_err(|err| TargetDataLayoutErrors::InvalidBits {
295 kind,
296 bit: s,
297 cause,
298 err,
299 })
300 };
301
302 let parse_size =
304 |s: &'a str, cause: &'a str| parse_bits(s, "size", cause).map(Size::from_bits);
305
306 let parse_align = |s: &[&'a str], cause: &'a str| {
308 if s.is_empty() {
309 return Err(TargetDataLayoutErrors::MissingAlignment { cause });
310 }
311 let align_from_bits = |bits| {
312 Align::from_bits(bits)
313 .map_err(|err| TargetDataLayoutErrors::InvalidAlignment { cause, err })
314 };
315 let abi = parse_bits(s[0], "alignment", cause)?;
316 let pref = s.get(1).map_or(Ok(abi), |pref| parse_bits(pref, "alignment", cause))?;
317 Ok(AbiAndPrefAlign { abi: align_from_bits(abi)?, pref: align_from_bits(pref)? })
318 };
319
320 let mut dl = TargetDataLayout::default();
321 let mut i128_align_src = 64;
322 for spec in input.split('-') {
323 let spec_parts = spec.split(':').collect::<Vec<_>>();
324
325 match &*spec_parts {
326 ["e"] => dl.endian = Endian::Little,
327 ["E"] => dl.endian = Endian::Big,
328 [p] if p.starts_with('P') => {
329 dl.instruction_address_space = parse_address_space(&p[1..], "P")?
330 }
331 ["a", ref a @ ..] => dl.aggregate_align = parse_align(a, "a")?,
332 ["f16", ref a @ ..] => dl.f16_align = parse_align(a, "f16")?,
333 ["f32", ref a @ ..] => dl.f32_align = parse_align(a, "f32")?,
334 ["f64", ref a @ ..] => dl.f64_align = parse_align(a, "f64")?,
335 ["f128", ref a @ ..] => dl.f128_align = parse_align(a, "f128")?,
336 [p @ "p", s, ref a @ ..] | [p @ "p0", s, ref a @ ..] => {
340 dl.pointer_size = parse_size(s, p)?;
341 dl.pointer_align = parse_align(a, p)?;
342 }
343 [s, ref a @ ..] if s.starts_with('i') => {
344 let Ok(bits) = s[1..].parse::<u64>() else {
345 parse_size(&s[1..], "i")?; continue;
347 };
348 let a = parse_align(a, s)?;
349 match bits {
350 1 => dl.i1_align = a,
351 8 => dl.i8_align = a,
352 16 => dl.i16_align = a,
353 32 => dl.i32_align = a,
354 64 => dl.i64_align = a,
355 _ => {}
356 }
357 if bits >= i128_align_src && bits <= 128 {
358 i128_align_src = bits;
361 dl.i128_align = a;
362 }
363 }
364 [s, ref a @ ..] if s.starts_with('v') => {
365 let v_size = parse_size(&s[1..], "v")?;
366 let a = parse_align(a, s)?;
367 if let Some(v) = dl.vector_align.iter_mut().find(|v| v.0 == v_size) {
368 v.1 = a;
369 continue;
370 }
371 dl.vector_align.push((v_size, a));
373 }
374 _ => {} }
376 }
377 Ok(dl)
378 }
379
380 #[inline]
390 pub fn obj_size_bound(&self) -> u64 {
391 match self.pointer_size.bits() {
392 16 => 1 << 15,
393 32 => 1 << 31,
394 64 => 1 << 61,
395 bits => panic!("obj_size_bound: unknown pointer bit size {bits}"),
396 }
397 }
398
399 #[inline]
400 pub fn ptr_sized_integer(&self) -> Integer {
401 use Integer::*;
402 match self.pointer_size.bits() {
403 16 => I16,
404 32 => I32,
405 64 => I64,
406 bits => panic!("ptr_sized_integer: unknown pointer bit size {bits}"),
407 }
408 }
409
410 #[inline]
411 pub fn vector_align(&self, vec_size: Size) -> AbiAndPrefAlign {
412 for &(size, align) in &self.vector_align {
413 if size == vec_size {
414 return align;
415 }
416 }
417 AbiAndPrefAlign::new(Align::from_bytes(vec_size.bytes().next_power_of_two()).unwrap())
420 }
421}
422
423pub trait HasDataLayout {
424 fn data_layout(&self) -> &TargetDataLayout;
425}
426
427impl HasDataLayout for TargetDataLayout {
428 #[inline]
429 fn data_layout(&self) -> &TargetDataLayout {
430 self
431 }
432}
433
434impl HasDataLayout for &TargetDataLayout {
436 #[inline]
437 fn data_layout(&self) -> &TargetDataLayout {
438 (**self).data_layout()
439 }
440}
441
442#[derive(Copy, Clone, PartialEq, Eq)]
444pub enum Endian {
445 Little,
446 Big,
447}
448
449impl Endian {
450 pub fn as_str(&self) -> &'static str {
451 match self {
452 Self::Little => "little",
453 Self::Big => "big",
454 }
455 }
456}
457
458impl fmt::Debug for Endian {
459 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
460 f.write_str(self.as_str())
461 }
462}
463
464impl FromStr for Endian {
465 type Err = String;
466
467 fn from_str(s: &str) -> Result<Self, Self::Err> {
468 match s {
469 "little" => Ok(Self::Little),
470 "big" => Ok(Self::Big),
471 _ => Err(format!(r#"unknown endian: "{s}""#)),
472 }
473 }
474}
475
476#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
478#[cfg_attr(feature = "nightly", derive(Encodable_Generic, Decodable_Generic, HashStable_Generic))]
479pub struct Size {
480 raw: u64,
481}
482
483#[cfg(feature = "nightly")]
484impl StableOrd for Size {
485 const CAN_USE_UNSTABLE_SORT: bool = true;
486
487 const THIS_IMPLEMENTATION_HAS_BEEN_TRIPLE_CHECKED: () = ();
490}
491
492impl fmt::Debug for Size {
494 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
495 write!(f, "Size({} bytes)", self.bytes())
496 }
497}
498
499impl Size {
500 pub const ZERO: Size = Size { raw: 0 };
501
502 pub fn from_bits(bits: impl TryInto<u64>) -> Size {
505 let bits = bits.try_into().ok().unwrap();
506 Size { raw: bits / 8 + ((bits % 8) + 7) / 8 }
508 }
509
510 #[inline]
511 pub fn from_bytes(bytes: impl TryInto<u64>) -> Size {
512 let bytes: u64 = bytes.try_into().ok().unwrap();
513 Size { raw: bytes }
514 }
515
516 #[inline]
517 pub fn bytes(self) -> u64 {
518 self.raw
519 }
520
521 #[inline]
522 pub fn bytes_usize(self) -> usize {
523 self.bytes().try_into().unwrap()
524 }
525
526 #[inline]
527 pub fn bits(self) -> u64 {
528 #[cold]
529 fn overflow(bytes: u64) -> ! {
530 panic!("Size::bits: {bytes} bytes in bits doesn't fit in u64")
531 }
532
533 self.bytes().checked_mul(8).unwrap_or_else(|| overflow(self.bytes()))
534 }
535
536 #[inline]
537 pub fn bits_usize(self) -> usize {
538 self.bits().try_into().unwrap()
539 }
540
541 #[inline]
542 pub fn align_to(self, align: Align) -> Size {
543 let mask = align.bytes() - 1;
544 Size::from_bytes((self.bytes() + mask) & !mask)
545 }
546
547 #[inline]
548 pub fn is_aligned(self, align: Align) -> bool {
549 let mask = align.bytes() - 1;
550 self.bytes() & mask == 0
551 }
552
553 #[inline]
554 pub fn checked_add<C: HasDataLayout>(self, offset: Size, cx: &C) -> Option<Size> {
555 let dl = cx.data_layout();
556
557 let bytes = self.bytes().checked_add(offset.bytes())?;
558
559 if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
560 }
561
562 #[inline]
563 pub fn checked_mul<C: HasDataLayout>(self, count: u64, cx: &C) -> Option<Size> {
564 let dl = cx.data_layout();
565
566 let bytes = self.bytes().checked_mul(count)?;
567 if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
568 }
569
570 #[inline]
573 pub fn sign_extend(self, value: u128) -> i128 {
574 let size = self.bits();
575 if size == 0 {
576 return 0;
578 }
579 let shift = 128 - size;
581 ((value << shift) as i128) >> shift
584 }
585
586 #[inline]
588 pub fn truncate(self, value: u128) -> u128 {
589 let size = self.bits();
590 if size == 0 {
591 return 0;
593 }
594 let shift = 128 - size;
595 (value << shift) >> shift
597 }
598
599 #[inline]
600 pub fn signed_int_min(&self) -> i128 {
601 self.sign_extend(1_u128 << (self.bits() - 1))
602 }
603
604 #[inline]
605 pub fn signed_int_max(&self) -> i128 {
606 i128::MAX >> (128 - self.bits())
607 }
608
609 #[inline]
610 pub fn unsigned_int_max(&self) -> u128 {
611 u128::MAX >> (128 - self.bits())
612 }
613}
614
615impl Add for Size {
619 type Output = Size;
620 #[inline]
621 fn add(self, other: Size) -> Size {
622 Size::from_bytes(self.bytes().checked_add(other.bytes()).unwrap_or_else(|| {
623 panic!("Size::add: {} + {} doesn't fit in u64", self.bytes(), other.bytes())
624 }))
625 }
626}
627
628impl Sub for Size {
629 type Output = Size;
630 #[inline]
631 fn sub(self, other: Size) -> Size {
632 Size::from_bytes(self.bytes().checked_sub(other.bytes()).unwrap_or_else(|| {
633 panic!("Size::sub: {} - {} would result in negative size", self.bytes(), other.bytes())
634 }))
635 }
636}
637
638impl Mul<Size> for u64 {
639 type Output = Size;
640 #[inline]
641 fn mul(self, size: Size) -> Size {
642 size * self
643 }
644}
645
646impl Mul<u64> for Size {
647 type Output = Size;
648 #[inline]
649 fn mul(self, count: u64) -> Size {
650 match self.bytes().checked_mul(count) {
651 Some(bytes) => Size::from_bytes(bytes),
652 None => panic!("Size::mul: {} * {} doesn't fit in u64", self.bytes(), count),
653 }
654 }
655}
656
657impl AddAssign for Size {
658 #[inline]
659 fn add_assign(&mut self, other: Size) {
660 *self = *self + other;
661 }
662}
663
664#[cfg(feature = "nightly")]
665impl Step for Size {
666 #[inline]
667 fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
668 u64::steps_between(&start.bytes(), &end.bytes())
669 }
670
671 #[inline]
672 fn forward_checked(start: Self, count: usize) -> Option<Self> {
673 u64::forward_checked(start.bytes(), count).map(Self::from_bytes)
674 }
675
676 #[inline]
677 fn forward(start: Self, count: usize) -> Self {
678 Self::from_bytes(u64::forward(start.bytes(), count))
679 }
680
681 #[inline]
682 unsafe fn forward_unchecked(start: Self, count: usize) -> Self {
683 Self::from_bytes(unsafe { u64::forward_unchecked(start.bytes(), count) })
684 }
685
686 #[inline]
687 fn backward_checked(start: Self, count: usize) -> Option<Self> {
688 u64::backward_checked(start.bytes(), count).map(Self::from_bytes)
689 }
690
691 #[inline]
692 fn backward(start: Self, count: usize) -> Self {
693 Self::from_bytes(u64::backward(start.bytes(), count))
694 }
695
696 #[inline]
697 unsafe fn backward_unchecked(start: Self, count: usize) -> Self {
698 Self::from_bytes(unsafe { u64::backward_unchecked(start.bytes(), count) })
699 }
700}
701
702#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
704#[cfg_attr(feature = "nightly", derive(Encodable_Generic, Decodable_Generic, HashStable_Generic))]
705pub struct Align {
706 pow2: u8,
707}
708
709impl fmt::Debug for Align {
711 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
712 write!(f, "Align({} bytes)", self.bytes())
713 }
714}
715
716#[derive(Clone, Copy)]
717pub enum AlignFromBytesError {
718 NotPowerOfTwo(u64),
719 TooLarge(u64),
720}
721
722impl AlignFromBytesError {
723 pub fn diag_ident(self) -> &'static str {
724 match self {
725 Self::NotPowerOfTwo(_) => "not_power_of_two",
726 Self::TooLarge(_) => "too_large",
727 }
728 }
729
730 pub fn align(self) -> u64 {
731 let (Self::NotPowerOfTwo(align) | Self::TooLarge(align)) = self;
732 align
733 }
734}
735
736impl fmt::Debug for AlignFromBytesError {
737 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
738 fmt::Display::fmt(self, f)
739 }
740}
741
742impl fmt::Display for AlignFromBytesError {
743 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
744 match self {
745 AlignFromBytesError::NotPowerOfTwo(align) => write!(f, "`{align}` is not a power of 2"),
746 AlignFromBytesError::TooLarge(align) => write!(f, "`{align}` is too large"),
747 }
748 }
749}
750
751impl Align {
752 pub const ONE: Align = Align { pow2: 0 };
753 pub const EIGHT: Align = Align { pow2: 3 };
754 pub const MAX: Align = Align { pow2: 29 };
756
757 #[inline]
758 pub fn from_bits(bits: u64) -> Result<Align, AlignFromBytesError> {
759 Align::from_bytes(Size::from_bits(bits).bytes())
760 }
761
762 #[inline]
763 pub const fn from_bytes(align: u64) -> Result<Align, AlignFromBytesError> {
764 if align == 0 {
766 return Ok(Align::ONE);
767 }
768
769 #[cold]
770 const fn not_power_of_2(align: u64) -> AlignFromBytesError {
771 AlignFromBytesError::NotPowerOfTwo(align)
772 }
773
774 #[cold]
775 const fn too_large(align: u64) -> AlignFromBytesError {
776 AlignFromBytesError::TooLarge(align)
777 }
778
779 let tz = align.trailing_zeros();
780 if align != (1 << tz) {
781 return Err(not_power_of_2(align));
782 }
783
784 let pow2 = tz as u8;
785 if pow2 > Self::MAX.pow2 {
786 return Err(too_large(align));
787 }
788
789 Ok(Align { pow2 })
790 }
791
792 #[inline]
793 pub fn bytes(self) -> u64 {
794 1 << self.pow2
795 }
796
797 #[inline]
798 pub fn bytes_usize(self) -> usize {
799 self.bytes().try_into().unwrap()
800 }
801
802 #[inline]
803 pub fn bits(self) -> u64 {
804 self.bytes() * 8
805 }
806
807 #[inline]
808 pub fn bits_usize(self) -> usize {
809 self.bits().try_into().unwrap()
810 }
811
812 #[inline]
817 pub fn max_for_offset(offset: Size) -> Align {
818 Align { pow2: offset.bytes().trailing_zeros() as u8 }
819 }
820
821 #[inline]
824 pub fn restrict_for_offset(self, offset: Size) -> Align {
825 self.min(Align::max_for_offset(offset))
826 }
827}
828
829#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
839#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
840pub struct AbiAndPrefAlign {
841 pub abi: Align,
842 pub pref: Align,
843}
844
845impl AbiAndPrefAlign {
846 #[inline]
847 pub fn new(align: Align) -> AbiAndPrefAlign {
848 AbiAndPrefAlign { abi: align, pref: align }
849 }
850
851 #[inline]
852 pub fn min(self, other: AbiAndPrefAlign) -> AbiAndPrefAlign {
853 AbiAndPrefAlign { abi: self.abi.min(other.abi), pref: self.pref.min(other.pref) }
854 }
855
856 #[inline]
857 pub fn max(self, other: AbiAndPrefAlign) -> AbiAndPrefAlign {
858 AbiAndPrefAlign { abi: self.abi.max(other.abi), pref: self.pref.max(other.pref) }
859 }
860}
861
862#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
864#[cfg_attr(feature = "nightly", derive(Encodable_Generic, Decodable_Generic, HashStable_Generic))]
865pub enum Integer {
866 I8,
867 I16,
868 I32,
869 I64,
870 I128,
871}
872
873impl Integer {
874 pub fn int_ty_str(self) -> &'static str {
875 use Integer::*;
876 match self {
877 I8 => "i8",
878 I16 => "i16",
879 I32 => "i32",
880 I64 => "i64",
881 I128 => "i128",
882 }
883 }
884
885 pub fn uint_ty_str(self) -> &'static str {
886 use Integer::*;
887 match self {
888 I8 => "u8",
889 I16 => "u16",
890 I32 => "u32",
891 I64 => "u64",
892 I128 => "u128",
893 }
894 }
895
896 #[inline]
897 pub fn size(self) -> Size {
898 use Integer::*;
899 match self {
900 I8 => Size::from_bytes(1),
901 I16 => Size::from_bytes(2),
902 I32 => Size::from_bytes(4),
903 I64 => Size::from_bytes(8),
904 I128 => Size::from_bytes(16),
905 }
906 }
907
908 pub fn from_attr<C: HasDataLayout>(cx: &C, ity: IntegerType) -> Integer {
910 let dl = cx.data_layout();
911
912 match ity {
913 IntegerType::Pointer(_) => dl.ptr_sized_integer(),
914 IntegerType::Fixed(x, _) => x,
915 }
916 }
917
918 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAndPrefAlign {
919 use Integer::*;
920 let dl = cx.data_layout();
921
922 match self {
923 I8 => dl.i8_align,
924 I16 => dl.i16_align,
925 I32 => dl.i32_align,
926 I64 => dl.i64_align,
927 I128 => dl.i128_align,
928 }
929 }
930
931 #[inline]
933 pub fn signed_max(self) -> i128 {
934 use Integer::*;
935 match self {
936 I8 => i8::MAX as i128,
937 I16 => i16::MAX as i128,
938 I32 => i32::MAX as i128,
939 I64 => i64::MAX as i128,
940 I128 => i128::MAX,
941 }
942 }
943
944 #[inline]
946 pub fn fit_signed(x: i128) -> Integer {
947 use Integer::*;
948 match x {
949 -0x0000_0000_0000_0080..=0x0000_0000_0000_007f => I8,
950 -0x0000_0000_0000_8000..=0x0000_0000_0000_7fff => I16,
951 -0x0000_0000_8000_0000..=0x0000_0000_7fff_ffff => I32,
952 -0x8000_0000_0000_0000..=0x7fff_ffff_ffff_ffff => I64,
953 _ => I128,
954 }
955 }
956
957 #[inline]
959 pub fn fit_unsigned(x: u128) -> Integer {
960 use Integer::*;
961 match x {
962 0..=0x0000_0000_0000_00ff => I8,
963 0..=0x0000_0000_0000_ffff => I16,
964 0..=0x0000_0000_ffff_ffff => I32,
965 0..=0xffff_ffff_ffff_ffff => I64,
966 _ => I128,
967 }
968 }
969
970 pub fn for_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Option<Integer> {
972 use Integer::*;
973 let dl = cx.data_layout();
974
975 [I8, I16, I32, I64, I128].into_iter().find(|&candidate| {
976 wanted == candidate.align(dl).abi && wanted.bytes() == candidate.size().bytes()
977 })
978 }
979
980 pub fn approximate_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Integer {
982 use Integer::*;
983 let dl = cx.data_layout();
984
985 for candidate in [I64, I32, I16] {
987 if wanted >= candidate.align(dl).abi && wanted.bytes() >= candidate.size().bytes() {
988 return candidate;
989 }
990 }
991 I8
992 }
993
994 #[inline]
997 pub fn from_size(size: Size) -> Result<Self, String> {
998 match size.bits() {
999 8 => Ok(Integer::I8),
1000 16 => Ok(Integer::I16),
1001 32 => Ok(Integer::I32),
1002 64 => Ok(Integer::I64),
1003 128 => Ok(Integer::I128),
1004 _ => Err(format!("rust does not support integers with {} bits", size.bits())),
1005 }
1006 }
1007}
1008
1009#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
1011#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1012pub enum Float {
1013 F16,
1014 F32,
1015 F64,
1016 F128,
1017}
1018
1019impl Float {
1020 pub fn size(self) -> Size {
1021 use Float::*;
1022
1023 match self {
1024 F16 => Size::from_bits(16),
1025 F32 => Size::from_bits(32),
1026 F64 => Size::from_bits(64),
1027 F128 => Size::from_bits(128),
1028 }
1029 }
1030
1031 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAndPrefAlign {
1032 use Float::*;
1033 let dl = cx.data_layout();
1034
1035 match self {
1036 F16 => dl.f16_align,
1037 F32 => dl.f32_align,
1038 F64 => dl.f64_align,
1039 F128 => dl.f128_align,
1040 }
1041 }
1042}
1043
1044#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
1046#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1047pub enum Primitive {
1048 Int(Integer, bool),
1056 Float(Float),
1057 Pointer(AddressSpace),
1058}
1059
1060impl Primitive {
1061 pub fn size<C: HasDataLayout>(self, cx: &C) -> Size {
1062 use Primitive::*;
1063 let dl = cx.data_layout();
1064
1065 match self {
1066 Int(i, _) => i.size(),
1067 Float(f) => f.size(),
1068 Pointer(_) => dl.pointer_size,
1072 }
1073 }
1074
1075 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAndPrefAlign {
1076 use Primitive::*;
1077 let dl = cx.data_layout();
1078
1079 match self {
1080 Int(i, _) => i.align(dl),
1081 Float(f) => f.align(dl),
1082 Pointer(_) => dl.pointer_align,
1086 }
1087 }
1088}
1089
1090#[derive(Clone, Copy, PartialEq, Eq, Hash)]
1100#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1101pub struct WrappingRange {
1102 pub start: u128,
1103 pub end: u128,
1104}
1105
1106impl WrappingRange {
1107 pub fn full(size: Size) -> Self {
1108 Self { start: 0, end: size.unsigned_int_max() }
1109 }
1110
1111 #[inline(always)]
1113 pub fn contains(&self, v: u128) -> bool {
1114 if self.start <= self.end {
1115 self.start <= v && v <= self.end
1116 } else {
1117 self.start <= v || v <= self.end
1118 }
1119 }
1120
1121 #[inline(always)]
1123 fn with_start(mut self, start: u128) -> Self {
1124 self.start = start;
1125 self
1126 }
1127
1128 #[inline(always)]
1130 fn with_end(mut self, end: u128) -> Self {
1131 self.end = end;
1132 self
1133 }
1134
1135 #[inline]
1137 fn is_full_for(&self, size: Size) -> bool {
1138 let max_value = size.unsigned_int_max();
1139 debug_assert!(self.start <= max_value && self.end <= max_value);
1140 self.start == (self.end.wrapping_add(1) & max_value)
1141 }
1142}
1143
1144impl fmt::Debug for WrappingRange {
1145 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
1146 if self.start > self.end {
1147 write!(fmt, "(..={}) | ({}..)", self.end, self.start)?;
1148 } else {
1149 write!(fmt, "{}..={}", self.start, self.end)?;
1150 }
1151 Ok(())
1152 }
1153}
1154
1155#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1157#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1158pub enum Scalar {
1159 Initialized {
1160 value: Primitive,
1161
1162 valid_range: WrappingRange,
1166 },
1167 Union {
1168 value: Primitive,
1174 },
1175}
1176
1177impl Scalar {
1178 #[inline]
1179 pub fn is_bool(&self) -> bool {
1180 use Integer::*;
1181 matches!(
1182 self,
1183 Scalar::Initialized {
1184 value: Primitive::Int(I8, false),
1185 valid_range: WrappingRange { start: 0, end: 1 }
1186 }
1187 )
1188 }
1189
1190 pub fn primitive(&self) -> Primitive {
1193 match *self {
1194 Scalar::Initialized { value, .. } | Scalar::Union { value } => value,
1195 }
1196 }
1197
1198 pub fn align(self, cx: &impl HasDataLayout) -> AbiAndPrefAlign {
1199 self.primitive().align(cx)
1200 }
1201
1202 pub fn size(self, cx: &impl HasDataLayout) -> Size {
1203 self.primitive().size(cx)
1204 }
1205
1206 #[inline]
1207 pub fn to_union(&self) -> Self {
1208 Self::Union { value: self.primitive() }
1209 }
1210
1211 #[inline]
1212 pub fn valid_range(&self, cx: &impl HasDataLayout) -> WrappingRange {
1213 match *self {
1214 Scalar::Initialized { valid_range, .. } => valid_range,
1215 Scalar::Union { value } => WrappingRange::full(value.size(cx)),
1216 }
1217 }
1218
1219 #[inline]
1220 pub fn valid_range_mut(&mut self) -> &mut WrappingRange {
1223 match self {
1224 Scalar::Initialized { valid_range, .. } => valid_range,
1225 Scalar::Union { .. } => panic!("cannot change the valid range of a union"),
1226 }
1227 }
1228
1229 #[inline]
1232 pub fn is_always_valid<C: HasDataLayout>(&self, cx: &C) -> bool {
1233 match *self {
1234 Scalar::Initialized { valid_range, .. } => valid_range.is_full_for(self.size(cx)),
1235 Scalar::Union { .. } => true,
1236 }
1237 }
1238
1239 #[inline]
1241 pub fn is_uninit_valid(&self) -> bool {
1242 match *self {
1243 Scalar::Initialized { .. } => false,
1244 Scalar::Union { .. } => true,
1245 }
1246 }
1247
1248 #[inline]
1250 pub fn is_signed(&self) -> bool {
1251 match self.primitive() {
1252 Primitive::Int(_, signed) => signed,
1253 _ => false,
1254 }
1255 }
1256}
1257
1258#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1261#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1262pub enum FieldsShape<FieldIdx: Idx> {
1263 Primitive,
1265
1266 Union(NonZeroUsize),
1268
1269 Array { stride: Size, count: u64 },
1271
1272 Arbitrary {
1280 offsets: IndexVec<FieldIdx, Size>,
1285
1286 memory_index: IndexVec<FieldIdx, u32>,
1299 },
1300}
1301
1302impl<FieldIdx: Idx> FieldsShape<FieldIdx> {
1303 #[inline]
1304 pub fn count(&self) -> usize {
1305 match *self {
1306 FieldsShape::Primitive => 0,
1307 FieldsShape::Union(count) => count.get(),
1308 FieldsShape::Array { count, .. } => count.try_into().unwrap(),
1309 FieldsShape::Arbitrary { ref offsets, .. } => offsets.len(),
1310 }
1311 }
1312
1313 #[inline]
1314 pub fn offset(&self, i: usize) -> Size {
1315 match *self {
1316 FieldsShape::Primitive => {
1317 unreachable!("FieldsShape::offset: `Primitive`s have no fields")
1318 }
1319 FieldsShape::Union(count) => {
1320 assert!(i < count.get(), "tried to access field {i} of union with {count} fields");
1321 Size::ZERO
1322 }
1323 FieldsShape::Array { stride, count } => {
1324 let i = u64::try_from(i).unwrap();
1325 assert!(i < count, "tried to access field {i} of array with {count} fields");
1326 stride * i
1327 }
1328 FieldsShape::Arbitrary { ref offsets, .. } => offsets[FieldIdx::new(i)],
1329 }
1330 }
1331
1332 #[inline]
1333 pub fn memory_index(&self, i: usize) -> usize {
1334 match *self {
1335 FieldsShape::Primitive => {
1336 unreachable!("FieldsShape::memory_index: `Primitive`s have no fields")
1337 }
1338 FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
1339 FieldsShape::Arbitrary { ref memory_index, .. } => {
1340 memory_index[FieldIdx::new(i)].try_into().unwrap()
1341 }
1342 }
1343 }
1344
1345 #[inline]
1347 pub fn index_by_increasing_offset(&self) -> impl ExactSizeIterator<Item = usize> + '_ {
1348 let mut inverse_small = [0u8; 64];
1349 let mut inverse_big = IndexVec::new();
1350 let use_small = self.count() <= inverse_small.len();
1351
1352 if let FieldsShape::Arbitrary { ref memory_index, .. } = *self {
1354 if use_small {
1355 for (field_idx, &mem_idx) in memory_index.iter_enumerated() {
1356 inverse_small[mem_idx as usize] = field_idx.index() as u8;
1357 }
1358 } else {
1359 inverse_big = memory_index.invert_bijective_mapping();
1360 }
1361 }
1362
1363 let pseudofield_count = if let FieldsShape::Primitive = self { 1 } else { self.count() };
1367
1368 (0..pseudofield_count).map(move |i| match *self {
1369 FieldsShape::Primitive | FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
1370 FieldsShape::Arbitrary { .. } => {
1371 if use_small {
1372 inverse_small[i] as usize
1373 } else {
1374 inverse_big[i as u32].index()
1375 }
1376 }
1377 })
1378 }
1379}
1380
1381#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
1385#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1386pub struct AddressSpace(pub u32);
1387
1388impl AddressSpace {
1389 pub const DATA: Self = AddressSpace(0);
1391}
1392
1393#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1404#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1405pub enum BackendRepr {
1406 Uninhabited,
1407 Scalar(Scalar),
1408 ScalarPair(Scalar, Scalar),
1409 Vector {
1410 element: Scalar,
1411 count: u64,
1412 },
1413 Memory {
1415 sized: bool,
1417 },
1418}
1419
1420impl BackendRepr {
1421 #[inline]
1423 pub fn is_unsized(&self) -> bool {
1424 match *self {
1425 BackendRepr::Uninhabited
1426 | BackendRepr::Scalar(_)
1427 | BackendRepr::ScalarPair(..)
1428 | BackendRepr::Vector { .. } => false,
1429 BackendRepr::Memory { sized } => !sized,
1430 }
1431 }
1432
1433 #[inline]
1434 pub fn is_sized(&self) -> bool {
1435 !self.is_unsized()
1436 }
1437
1438 #[inline]
1440 pub fn is_signed(&self) -> bool {
1441 match self {
1442 BackendRepr::Scalar(scal) => scal.is_signed(),
1443 _ => panic!("`is_signed` on non-scalar ABI {self:?}"),
1444 }
1445 }
1446
1447 #[inline]
1449 pub fn is_uninhabited(&self) -> bool {
1450 matches!(*self, BackendRepr::Uninhabited)
1451 }
1452
1453 #[inline]
1455 pub fn is_scalar(&self) -> bool {
1456 matches!(*self, BackendRepr::Scalar(_))
1457 }
1458
1459 #[inline]
1461 pub fn is_bool(&self) -> bool {
1462 matches!(*self, BackendRepr::Scalar(s) if s.is_bool())
1463 }
1464
1465 pub fn inherent_align<C: HasDataLayout>(&self, cx: &C) -> Option<AbiAndPrefAlign> {
1467 Some(match *self {
1468 BackendRepr::Scalar(s) => s.align(cx),
1469 BackendRepr::ScalarPair(s1, s2) => s1.align(cx).max(s2.align(cx)),
1470 BackendRepr::Vector { element, count } => {
1471 cx.data_layout().vector_align(element.size(cx) * count)
1472 }
1473 BackendRepr::Uninhabited | BackendRepr::Memory { .. } => return None,
1474 })
1475 }
1476
1477 pub fn inherent_size<C: HasDataLayout>(&self, cx: &C) -> Option<Size> {
1479 Some(match *self {
1480 BackendRepr::Scalar(s) => {
1481 s.size(cx)
1483 }
1484 BackendRepr::ScalarPair(s1, s2) => {
1485 let field2_offset = s1.size(cx).align_to(s2.align(cx).abi);
1487 (field2_offset + s2.size(cx)).align_to(self.inherent_align(cx)?.abi)
1488 }
1489 BackendRepr::Vector { element, count } => {
1490 (element.size(cx) * count).align_to(self.inherent_align(cx)?.abi)
1493 }
1494 BackendRepr::Uninhabited | BackendRepr::Memory { .. } => return None,
1495 })
1496 }
1497
1498 pub fn to_union(&self) -> Self {
1500 match *self {
1501 BackendRepr::Scalar(s) => BackendRepr::Scalar(s.to_union()),
1502 BackendRepr::ScalarPair(s1, s2) => {
1503 BackendRepr::ScalarPair(s1.to_union(), s2.to_union())
1504 }
1505 BackendRepr::Vector { element, count } => {
1506 BackendRepr::Vector { element: element.to_union(), count }
1507 }
1508 BackendRepr::Uninhabited | BackendRepr::Memory { .. } => {
1509 BackendRepr::Memory { sized: true }
1510 }
1511 }
1512 }
1513
1514 pub fn eq_up_to_validity(&self, other: &Self) -> bool {
1515 match (self, other) {
1516 (BackendRepr::Scalar(l), BackendRepr::Scalar(r)) => l.primitive() == r.primitive(),
1519 (
1520 BackendRepr::Vector { element: element_l, count: count_l },
1521 BackendRepr::Vector { element: element_r, count: count_r },
1522 ) => element_l.primitive() == element_r.primitive() && count_l == count_r,
1523 (BackendRepr::ScalarPair(l1, l2), BackendRepr::ScalarPair(r1, r2)) => {
1524 l1.primitive() == r1.primitive() && l2.primitive() == r2.primitive()
1525 }
1526 _ => self == other,
1528 }
1529 }
1530}
1531
1532#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1534#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1535pub enum Variants<FieldIdx: Idx, VariantIdx: Idx> {
1536 Empty,
1538
1539 Single {
1541 index: VariantIdx,
1543 },
1544
1545 Multiple {
1552 tag: Scalar,
1553 tag_encoding: TagEncoding<VariantIdx>,
1554 tag_field: usize,
1555 variants: IndexVec<VariantIdx, LayoutData<FieldIdx, VariantIdx>>,
1556 },
1557}
1558
1559#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1561#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1562pub enum TagEncoding<VariantIdx: Idx> {
1563 Direct,
1566
1567 Niche {
1582 untagged_variant: VariantIdx,
1583 niche_variants: RangeInclusive<VariantIdx>,
1586 niche_start: u128,
1589 },
1590}
1591
1592#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1593#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1594pub struct Niche {
1595 pub offset: Size,
1596 pub value: Primitive,
1597 pub valid_range: WrappingRange,
1598}
1599
1600impl Niche {
1601 pub fn from_scalar<C: HasDataLayout>(cx: &C, offset: Size, scalar: Scalar) -> Option<Self> {
1602 let Scalar::Initialized { value, valid_range } = scalar else { return None };
1603 let niche = Niche { offset, value, valid_range };
1604 if niche.available(cx) > 0 { Some(niche) } else { None }
1605 }
1606
1607 pub fn available<C: HasDataLayout>(&self, cx: &C) -> u128 {
1608 let Self { value, valid_range: v, .. } = *self;
1609 let size = value.size(cx);
1610 assert!(size.bits() <= 128);
1611 let max_value = size.unsigned_int_max();
1612
1613 let niche = v.end.wrapping_add(1)..v.start;
1615 niche.end.wrapping_sub(niche.start) & max_value
1616 }
1617
1618 pub fn reserve<C: HasDataLayout>(&self, cx: &C, count: u128) -> Option<(u128, Scalar)> {
1619 assert!(count > 0);
1620
1621 let Self { value, valid_range: v, .. } = *self;
1622 let size = value.size(cx);
1623 assert!(size.bits() <= 128);
1624 let max_value = size.unsigned_int_max();
1625
1626 let niche = v.end.wrapping_add(1)..v.start;
1627 let available = niche.end.wrapping_sub(niche.start) & max_value;
1628 if count > available {
1629 return None;
1630 }
1631
1632 let move_start = |v: WrappingRange| {
1646 let start = v.start.wrapping_sub(count) & max_value;
1647 Some((start, Scalar::Initialized { value, valid_range: v.with_start(start) }))
1648 };
1649 let move_end = |v: WrappingRange| {
1650 let start = v.end.wrapping_add(1) & max_value;
1651 let end = v.end.wrapping_add(count) & max_value;
1652 Some((start, Scalar::Initialized { value, valid_range: v.with_end(end) }))
1653 };
1654 let distance_end_zero = max_value - v.end;
1655 if v.start > v.end {
1656 move_end(v)
1658 } else if v.start <= distance_end_zero {
1659 if count <= v.start {
1660 move_start(v)
1661 } else {
1662 move_end(v)
1664 }
1665 } else {
1666 let end = v.end.wrapping_add(count) & max_value;
1667 let overshot_zero = (1..=v.end).contains(&end);
1668 if overshot_zero {
1669 move_start(v)
1671 } else {
1672 move_end(v)
1673 }
1674 }
1675 }
1676}
1677
1678#[derive(PartialEq, Eq, Hash, Clone)]
1680#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1681pub struct LayoutData<FieldIdx: Idx, VariantIdx: Idx> {
1682 pub fields: FieldsShape<FieldIdx>,
1684
1685 pub variants: Variants<FieldIdx, VariantIdx>,
1693
1694 pub backend_repr: BackendRepr,
1702
1703 pub largest_niche: Option<Niche>,
1706
1707 pub align: AbiAndPrefAlign,
1708 pub size: Size,
1709
1710 pub max_repr_align: Option<Align>,
1714
1715 pub unadjusted_abi_align: Align,
1719
1720 pub randomization_seed: u64,
1731}
1732
1733impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
1734 pub fn is_aggregate(&self) -> bool {
1736 match self.backend_repr {
1737 BackendRepr::Uninhabited | BackendRepr::Scalar(_) | BackendRepr::Vector { .. } => false,
1738 BackendRepr::ScalarPair(..) | BackendRepr::Memory { .. } => true,
1739 }
1740 }
1741
1742 pub fn is_uninhabited(&self) -> bool {
1744 self.backend_repr.is_uninhabited()
1745 }
1746
1747 pub fn scalar<C: HasDataLayout>(cx: &C, scalar: Scalar) -> Self {
1748 let largest_niche = Niche::from_scalar(cx, Size::ZERO, scalar);
1749 let size = scalar.size(cx);
1750 let align = scalar.align(cx);
1751
1752 let range = scalar.valid_range(cx);
1753
1754 let randomization_seed = size
1761 .bytes()
1762 .wrapping_add(
1763 match scalar.primitive() {
1764 Primitive::Int(_, true) => 1,
1765 Primitive::Int(_, false) => 2,
1766 Primitive::Float(_) => 3,
1767 Primitive::Pointer(_) => 4,
1768 } << 32,
1769 )
1770 .wrapping_add((range.start as u64).rotate_right(16))
1772 .wrapping_add((range.end as u64).rotate_right(16));
1774
1775 LayoutData {
1776 variants: Variants::Single { index: VariantIdx::new(0) },
1777 fields: FieldsShape::Primitive,
1778 backend_repr: BackendRepr::Scalar(scalar),
1779 largest_niche,
1780 size,
1781 align,
1782 max_repr_align: None,
1783 unadjusted_abi_align: align.abi,
1784 randomization_seed,
1785 }
1786 }
1787}
1788
1789impl<FieldIdx: Idx, VariantIdx: Idx> fmt::Debug for LayoutData<FieldIdx, VariantIdx>
1790where
1791 FieldsShape<FieldIdx>: fmt::Debug,
1792 Variants<FieldIdx, VariantIdx>: fmt::Debug,
1793{
1794 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1795 let LayoutData {
1799 size,
1800 align,
1801 backend_repr,
1802 fields,
1803 largest_niche,
1804 variants,
1805 max_repr_align,
1806 unadjusted_abi_align,
1807 ref randomization_seed,
1808 } = self;
1809 f.debug_struct("Layout")
1810 .field("size", size)
1811 .field("align", align)
1812 .field("abi", backend_repr)
1813 .field("fields", fields)
1814 .field("largest_niche", largest_niche)
1815 .field("variants", variants)
1816 .field("max_repr_align", max_repr_align)
1817 .field("unadjusted_abi_align", unadjusted_abi_align)
1818 .field("randomization_seed", randomization_seed)
1819 .finish()
1820 }
1821}
1822
1823#[derive(Copy, Clone, PartialEq, Eq, Debug)]
1824pub enum PointerKind {
1825 SharedRef { frozen: bool },
1827 MutableRef { unpin: bool },
1829 Box { unpin: bool, global: bool },
1832}
1833
1834#[derive(Copy, Clone, Debug)]
1839pub struct PointeeInfo {
1840 pub safe: Option<PointerKind>,
1843 pub size: Size,
1849 pub align: Align,
1851}
1852
1853impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
1854 #[inline]
1856 pub fn is_unsized(&self) -> bool {
1857 self.backend_repr.is_unsized()
1858 }
1859
1860 #[inline]
1861 pub fn is_sized(&self) -> bool {
1862 self.backend_repr.is_sized()
1863 }
1864
1865 pub fn is_1zst(&self) -> bool {
1867 self.is_sized() && self.size.bytes() == 0 && self.align.abi.bytes() == 1
1868 }
1869
1870 pub fn is_zst(&self) -> bool {
1875 match self.backend_repr {
1876 BackendRepr::Scalar(_) | BackendRepr::ScalarPair(..) | BackendRepr::Vector { .. } => {
1877 false
1878 }
1879 BackendRepr::Uninhabited => self.size.bytes() == 0,
1880 BackendRepr::Memory { sized } => sized && self.size.bytes() == 0,
1881 }
1882 }
1883
1884 pub fn eq_abi(&self, other: &Self) -> bool {
1890 self.size == other.size
1894 && self.is_sized() == other.is_sized()
1895 && self.backend_repr.eq_up_to_validity(&other.backend_repr)
1896 && self.backend_repr.is_bool() == other.backend_repr.is_bool()
1897 && self.align.abi == other.align.abi
1898 && self.max_repr_align == other.max_repr_align
1899 && self.unadjusted_abi_align == other.unadjusted_abi_align
1900 }
1901}
1902
1903#[derive(Copy, Clone, Debug)]
1904pub enum StructKind {
1905 AlwaysSized,
1907 MaybeUnsized,
1909 Prefixed(Size, Align),
1911}