1#![cfg_attr(feature = "nightly", allow(internal_features))]
3#![cfg_attr(feature = "nightly", feature(assert_matches))]
4#![cfg_attr(feature = "nightly", feature(rustc_attrs))]
5#![cfg_attr(feature = "nightly", feature(step_trait))]
6use std::fmt;
41#[cfg(feature = "nightly")]
42use std::iter::Step;
43use std::num::{NonZeroUsize, ParseIntError};
44use std::ops::{Add, AddAssign, Deref, Mul, RangeFull, RangeInclusive, Sub};
45use std::str::FromStr;
46
47use bitflags::bitflags;
48#[cfg(feature = "nightly")]
49use rustc_data_structures::stable_hasher::StableOrd;
50use rustc_hashes::Hash64;
51use rustc_index::{Idx, IndexSlice, IndexVec};
52#[cfg(feature = "nightly")]
53use rustc_macros::{Decodable_NoContext, Encodable_NoContext, HashStable_Generic};
54
55mod callconv;
56mod canon_abi;
57mod extern_abi;
58mod layout;
59#[cfg(test)]
60mod tests;
61
62pub use callconv::{Heterogeneous, HomogeneousAggregate, Reg, RegKind};
63pub use canon_abi::{ArmCall, CanonAbi, InterruptKind, X86Call};
64#[cfg(feature = "nightly")]
65pub use extern_abi::CVariadicStatus;
66pub use extern_abi::{ExternAbi, all_names};
67#[cfg(feature = "nightly")]
68pub use layout::{FIRST_VARIANT, FieldIdx, Layout, TyAbiInterface, TyAndLayout, VariantIdx};
69pub use layout::{LayoutCalculator, LayoutCalculatorError};
70
71#[cfg(feature = "nightly")]
75pub trait HashStableContext {}
76
77#[derive(Clone, Copy, PartialEq, Eq, Default)]
78#[cfg_attr(
79 feature = "nightly",
80 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
81)]
82pub struct ReprFlags(u8);
83
84bitflags! {
85 impl ReprFlags: u8 {
86 const IS_C = 1 << 0;
87 const IS_SIMD = 1 << 1;
88 const IS_TRANSPARENT = 1 << 2;
89 const IS_LINEAR = 1 << 3;
92 const RANDOMIZE_LAYOUT = 1 << 4;
96 const PASS_INDIRECTLY_IN_NON_RUSTIC_ABIS = 1 << 5;
99 const FIELD_ORDER_UNOPTIMIZABLE = ReprFlags::IS_C.bits()
101 | ReprFlags::IS_SIMD.bits()
102 | ReprFlags::IS_LINEAR.bits();
103 const ABI_UNOPTIMIZABLE = ReprFlags::IS_C.bits() | ReprFlags::IS_SIMD.bits();
104 }
105}
106
107impl std::fmt::Debug for ReprFlags {
110 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
111 bitflags::parser::to_writer(self, f)
112 }
113}
114
115#[derive(Copy, Clone, Debug, Eq, PartialEq)]
116#[cfg_attr(
117 feature = "nightly",
118 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
119)]
120pub enum IntegerType {
121 Pointer(bool),
124 Fixed(Integer, bool),
127}
128
129impl IntegerType {
130 pub fn is_signed(&self) -> bool {
131 match self {
132 IntegerType::Pointer(b) => *b,
133 IntegerType::Fixed(_, b) => *b,
134 }
135 }
136}
137
138#[derive(Copy, Clone, Debug, Eq, PartialEq, Default)]
140#[cfg_attr(
141 feature = "nightly",
142 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
143)]
144pub struct ReprOptions {
145 pub int: Option<IntegerType>,
146 pub align: Option<Align>,
147 pub pack: Option<Align>,
148 pub flags: ReprFlags,
149 pub field_shuffle_seed: Hash64,
157}
158
159impl ReprOptions {
160 #[inline]
161 pub fn simd(&self) -> bool {
162 self.flags.contains(ReprFlags::IS_SIMD)
163 }
164
165 #[inline]
166 pub fn c(&self) -> bool {
167 self.flags.contains(ReprFlags::IS_C)
168 }
169
170 #[inline]
171 pub fn packed(&self) -> bool {
172 self.pack.is_some()
173 }
174
175 #[inline]
176 pub fn transparent(&self) -> bool {
177 self.flags.contains(ReprFlags::IS_TRANSPARENT)
178 }
179
180 #[inline]
181 pub fn linear(&self) -> bool {
182 self.flags.contains(ReprFlags::IS_LINEAR)
183 }
184
185 pub fn discr_type(&self) -> IntegerType {
193 self.int.unwrap_or(IntegerType::Pointer(true))
194 }
195
196 pub fn inhibit_enum_layout_opt(&self) -> bool {
200 self.c() || self.int.is_some()
201 }
202
203 pub fn inhibit_newtype_abi_optimization(&self) -> bool {
204 self.flags.intersects(ReprFlags::ABI_UNOPTIMIZABLE)
205 }
206
207 pub fn inhibit_struct_field_reordering(&self) -> bool {
210 self.flags.intersects(ReprFlags::FIELD_ORDER_UNOPTIMIZABLE) || self.int.is_some()
211 }
212
213 pub fn can_randomize_type_layout(&self) -> bool {
216 !self.inhibit_struct_field_reordering() && self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT)
217 }
218
219 pub fn inhibits_union_abi_opt(&self) -> bool {
221 self.c()
222 }
223}
224
225pub const MAX_SIMD_LANES: u64 = 1 << 0xF;
231
232#[derive(Copy, Clone, Debug, PartialEq, Eq)]
234pub struct PointerSpec {
235 pointer_size: Size,
237 pointer_align: Align,
239 pointer_offset: Size,
241 _is_fat: bool,
244}
245
246#[derive(Debug, PartialEq, Eq)]
249pub struct TargetDataLayout {
250 pub endian: Endian,
251 pub i1_align: Align,
252 pub i8_align: Align,
253 pub i16_align: Align,
254 pub i32_align: Align,
255 pub i64_align: Align,
256 pub i128_align: Align,
257 pub f16_align: Align,
258 pub f32_align: Align,
259 pub f64_align: Align,
260 pub f128_align: Align,
261 pub aggregate_align: Align,
262
263 pub vector_align: Vec<(Size, Align)>,
265
266 pub default_address_space: AddressSpace,
267 pub default_address_space_pointer_spec: PointerSpec,
268
269 address_space_info: Vec<(AddressSpace, PointerSpec)>,
276
277 pub instruction_address_space: AddressSpace,
278
279 pub c_enum_min_size: Integer,
283}
284
285impl Default for TargetDataLayout {
286 fn default() -> TargetDataLayout {
288 let align = |bits| Align::from_bits(bits).unwrap();
289 TargetDataLayout {
290 endian: Endian::Big,
291 i1_align: align(8),
292 i8_align: align(8),
293 i16_align: align(16),
294 i32_align: align(32),
295 i64_align: align(32),
296 i128_align: align(32),
297 f16_align: align(16),
298 f32_align: align(32),
299 f64_align: align(64),
300 f128_align: align(128),
301 aggregate_align: align(8),
302 vector_align: vec![
303 (Size::from_bits(64), align(64)),
304 (Size::from_bits(128), align(128)),
305 ],
306 default_address_space: AddressSpace::ZERO,
307 default_address_space_pointer_spec: PointerSpec {
308 pointer_size: Size::from_bits(64),
309 pointer_align: align(64),
310 pointer_offset: Size::from_bits(64),
311 _is_fat: false,
312 },
313 address_space_info: vec![],
314 instruction_address_space: AddressSpace::ZERO,
315 c_enum_min_size: Integer::I32,
316 }
317 }
318}
319
320pub enum TargetDataLayoutErrors<'a> {
321 InvalidAddressSpace { addr_space: &'a str, cause: &'a str, err: ParseIntError },
322 InvalidBits { kind: &'a str, bit: &'a str, cause: &'a str, err: ParseIntError },
323 MissingAlignment { cause: &'a str },
324 InvalidAlignment { cause: &'a str, err: AlignFromBytesError },
325 InconsistentTargetArchitecture { dl: &'a str, target: &'a str },
326 InconsistentTargetPointerWidth { pointer_size: u64, target: u16 },
327 InvalidBitsSize { err: String },
328 UnknownPointerSpecification { err: String },
329}
330
331impl TargetDataLayout {
332 pub fn parse_from_llvm_datalayout_string<'a>(
338 input: &'a str,
339 default_address_space: AddressSpace,
340 ) -> Result<TargetDataLayout, TargetDataLayoutErrors<'a>> {
341 let parse_address_space = |s: &'a str, cause: &'a str| {
343 s.parse::<u32>().map(AddressSpace).map_err(|err| {
344 TargetDataLayoutErrors::InvalidAddressSpace { addr_space: s, cause, err }
345 })
346 };
347
348 let parse_bits = |s: &'a str, kind: &'a str, cause: &'a str| {
350 s.parse::<u64>().map_err(|err| TargetDataLayoutErrors::InvalidBits {
351 kind,
352 bit: s,
353 cause,
354 err,
355 })
356 };
357
358 let parse_size =
360 |s: &'a str, cause: &'a str| parse_bits(s, "size", cause).map(Size::from_bits);
361
362 let parse_align_str = |s: &'a str, cause: &'a str| {
364 let align_from_bits = |bits| {
365 Align::from_bits(bits)
366 .map_err(|err| TargetDataLayoutErrors::InvalidAlignment { cause, err })
367 };
368 let abi = parse_bits(s, "alignment", cause)?;
369 Ok(align_from_bits(abi)?)
370 };
371
372 let parse_align_seq = |s: &[&'a str], cause: &'a str| {
375 if s.is_empty() {
376 return Err(TargetDataLayoutErrors::MissingAlignment { cause });
377 }
378 parse_align_str(s[0], cause)
379 };
380
381 let mut dl = TargetDataLayout::default();
382 dl.default_address_space = default_address_space;
383
384 let mut i128_align_src = 64;
385 for spec in input.split('-') {
386 let spec_parts = spec.split(':').collect::<Vec<_>>();
387
388 match &*spec_parts {
389 ["e"] => dl.endian = Endian::Little,
390 ["E"] => dl.endian = Endian::Big,
391 [p] if p.starts_with('P') => {
392 dl.instruction_address_space = parse_address_space(&p[1..], "P")?
393 }
394 ["a", a @ ..] => dl.aggregate_align = parse_align_seq(a, "a")?,
395 ["f16", a @ ..] => dl.f16_align = parse_align_seq(a, "f16")?,
396 ["f32", a @ ..] => dl.f32_align = parse_align_seq(a, "f32")?,
397 ["f64", a @ ..] => dl.f64_align = parse_align_seq(a, "f64")?,
398 ["f128", a @ ..] => dl.f128_align = parse_align_seq(a, "f128")?,
399 [p, s, a @ ..] if p.starts_with("p") => {
400 let mut p = p.strip_prefix('p').unwrap();
401 let mut _is_fat = false;
402
403 if p.starts_with('f') {
407 p = p.strip_prefix('f').unwrap();
408 _is_fat = true;
409 }
410
411 if p.starts_with(char::is_alphabetic) {
414 return Err(TargetDataLayoutErrors::UnknownPointerSpecification {
415 err: p.to_string(),
416 });
417 }
418
419 let addr_space = if !p.is_empty() {
420 parse_address_space(p, "p-")?
421 } else {
422 AddressSpace::ZERO
423 };
424
425 let pointer_size = parse_size(s, "p-")?;
426 let pointer_align = parse_align_seq(a, "p-")?;
427 let info = PointerSpec {
428 pointer_offset: pointer_size,
429 pointer_size,
430 pointer_align,
431 _is_fat,
432 };
433 if addr_space == default_address_space {
434 dl.default_address_space_pointer_spec = info;
435 } else {
436 match dl.address_space_info.iter_mut().find(|(a, _)| *a == addr_space) {
437 Some(e) => e.1 = info,
438 None => {
439 dl.address_space_info.push((addr_space, info));
440 }
441 }
442 }
443 }
444 [p, s, a, _pr, i] if p.starts_with("p") => {
445 let mut p = p.strip_prefix('p').unwrap();
446 let mut _is_fat = false;
447
448 if p.starts_with('f') {
452 p = p.strip_prefix('f').unwrap();
453 _is_fat = true;
454 }
455
456 if p.starts_with(char::is_alphabetic) {
459 return Err(TargetDataLayoutErrors::UnknownPointerSpecification {
460 err: p.to_string(),
461 });
462 }
463
464 let addr_space = if !p.is_empty() {
465 parse_address_space(p, "p")?
466 } else {
467 AddressSpace::ZERO
468 };
469
470 let info = PointerSpec {
471 pointer_size: parse_size(s, "p-")?,
472 pointer_align: parse_align_str(a, "p-")?,
473 pointer_offset: parse_size(i, "p-")?,
474 _is_fat,
475 };
476
477 if addr_space == default_address_space {
478 dl.default_address_space_pointer_spec = info;
479 } else {
480 match dl.address_space_info.iter_mut().find(|(a, _)| *a == addr_space) {
481 Some(e) => e.1 = info,
482 None => {
483 dl.address_space_info.push((addr_space, info));
484 }
485 }
486 }
487 }
488
489 [s, a @ ..] if s.starts_with('i') => {
490 let Ok(bits) = s[1..].parse::<u64>() else {
491 parse_size(&s[1..], "i")?; continue;
493 };
494 let a = parse_align_seq(a, s)?;
495 match bits {
496 1 => dl.i1_align = a,
497 8 => dl.i8_align = a,
498 16 => dl.i16_align = a,
499 32 => dl.i32_align = a,
500 64 => dl.i64_align = a,
501 _ => {}
502 }
503 if bits >= i128_align_src && bits <= 128 {
504 i128_align_src = bits;
507 dl.i128_align = a;
508 }
509 }
510 [s, a @ ..] if s.starts_with('v') => {
511 let v_size = parse_size(&s[1..], "v")?;
512 let a = parse_align_seq(a, s)?;
513 if let Some(v) = dl.vector_align.iter_mut().find(|v| v.0 == v_size) {
514 v.1 = a;
515 continue;
516 }
517 dl.vector_align.push((v_size, a));
519 }
520 _ => {} }
522 }
523
524 if (dl.instruction_address_space != dl.default_address_space)
527 && dl
528 .address_space_info
529 .iter()
530 .find(|(a, _)| *a == dl.instruction_address_space)
531 .is_none()
532 {
533 dl.address_space_info.push((
534 dl.instruction_address_space,
535 dl.default_address_space_pointer_spec.clone(),
536 ));
537 }
538
539 Ok(dl)
540 }
541
542 #[inline]
553 pub fn obj_size_bound(&self) -> u64 {
554 match self.pointer_size().bits() {
555 16 => 1 << 15,
556 32 => 1 << 31,
557 64 => 1 << 61,
558 bits => panic!("obj_size_bound: unknown pointer bit size {bits}"),
559 }
560 }
561
562 #[inline]
572 pub fn obj_size_bound_in(&self, address_space: AddressSpace) -> u64 {
573 match self.pointer_size_in(address_space).bits() {
574 16 => 1 << 15,
575 32 => 1 << 31,
576 64 => 1 << 61,
577 bits => panic!("obj_size_bound: unknown pointer bit size {bits}"),
578 }
579 }
580
581 #[inline]
582 pub fn ptr_sized_integer(&self) -> Integer {
583 use Integer::*;
584 match self.pointer_offset().bits() {
585 16 => I16,
586 32 => I32,
587 64 => I64,
588 bits => panic!("ptr_sized_integer: unknown pointer bit size {bits}"),
589 }
590 }
591
592 #[inline]
593 pub fn ptr_sized_integer_in(&self, address_space: AddressSpace) -> Integer {
594 use Integer::*;
595 match self.pointer_offset_in(address_space).bits() {
596 16 => I16,
597 32 => I32,
598 64 => I64,
599 bits => panic!("ptr_sized_integer: unknown pointer bit size {bits}"),
600 }
601 }
602
603 #[inline]
605 fn cabi_vector_align(&self, vec_size: Size) -> Option<Align> {
606 self.vector_align
607 .iter()
608 .find(|(size, _align)| *size == vec_size)
609 .map(|(_size, align)| *align)
610 }
611
612 #[inline]
614 pub fn llvmlike_vector_align(&self, vec_size: Size) -> Align {
615 self.cabi_vector_align(vec_size)
616 .unwrap_or(Align::from_bytes(vec_size.bytes().next_power_of_two()).unwrap())
617 }
618
619 #[inline]
621 pub fn pointer_size(&self) -> Size {
622 self.default_address_space_pointer_spec.pointer_size
623 }
624
625 #[inline]
627 pub fn pointer_size_in(&self, c: AddressSpace) -> Size {
628 if c == self.default_address_space {
629 return self.default_address_space_pointer_spec.pointer_size;
630 }
631
632 if let Some(e) = self.address_space_info.iter().find(|(a, _)| a == &c) {
633 e.1.pointer_size
634 } else {
635 panic!("Use of unknown address space {c:?}");
636 }
637 }
638
639 #[inline]
641 pub fn pointer_offset(&self) -> Size {
642 self.default_address_space_pointer_spec.pointer_offset
643 }
644
645 #[inline]
647 pub fn pointer_offset_in(&self, c: AddressSpace) -> Size {
648 if c == self.default_address_space {
649 return self.default_address_space_pointer_spec.pointer_offset;
650 }
651
652 if let Some(e) = self.address_space_info.iter().find(|(a, _)| a == &c) {
653 e.1.pointer_offset
654 } else {
655 panic!("Use of unknown address space {c:?}");
656 }
657 }
658
659 #[inline]
661 pub fn pointer_align(&self) -> AbiAlign {
662 AbiAlign::new(self.default_address_space_pointer_spec.pointer_align)
663 }
664
665 #[inline]
667 pub fn pointer_align_in(&self, c: AddressSpace) -> AbiAlign {
668 AbiAlign::new(if c == self.default_address_space {
669 self.default_address_space_pointer_spec.pointer_align
670 } else if let Some(e) = self.address_space_info.iter().find(|(a, _)| a == &c) {
671 e.1.pointer_align
672 } else {
673 panic!("Use of unknown address space {c:?}");
674 })
675 }
676}
677
678pub trait HasDataLayout {
679 fn data_layout(&self) -> &TargetDataLayout;
680}
681
682impl HasDataLayout for TargetDataLayout {
683 #[inline]
684 fn data_layout(&self) -> &TargetDataLayout {
685 self
686 }
687}
688
689impl HasDataLayout for &TargetDataLayout {
691 #[inline]
692 fn data_layout(&self) -> &TargetDataLayout {
693 (**self).data_layout()
694 }
695}
696
697#[derive(Copy, Clone, PartialEq, Eq)]
699pub enum Endian {
700 Little,
701 Big,
702}
703
704impl Endian {
705 pub fn as_str(&self) -> &'static str {
706 match self {
707 Self::Little => "little",
708 Self::Big => "big",
709 }
710 }
711}
712
713impl fmt::Debug for Endian {
714 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
715 f.write_str(self.as_str())
716 }
717}
718
719impl FromStr for Endian {
720 type Err = String;
721
722 fn from_str(s: &str) -> Result<Self, Self::Err> {
723 match s {
724 "little" => Ok(Self::Little),
725 "big" => Ok(Self::Big),
726 _ => Err(format!(r#"unknown endian: "{s}""#)),
727 }
728 }
729}
730
731#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
733#[cfg_attr(
734 feature = "nightly",
735 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
736)]
737pub struct Size {
738 raw: u64,
739}
740
741#[cfg(feature = "nightly")]
742impl StableOrd for Size {
743 const CAN_USE_UNSTABLE_SORT: bool = true;
744
745 const THIS_IMPLEMENTATION_HAS_BEEN_TRIPLE_CHECKED: () = ();
748}
749
750impl fmt::Debug for Size {
752 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
753 write!(f, "Size({} bytes)", self.bytes())
754 }
755}
756
757impl Size {
758 pub const ZERO: Size = Size { raw: 0 };
759
760 pub fn from_bits(bits: impl TryInto<u64>) -> Size {
763 let bits = bits.try_into().ok().unwrap();
764 Size { raw: bits.div_ceil(8) }
765 }
766
767 #[inline]
768 pub fn from_bytes(bytes: impl TryInto<u64>) -> Size {
769 let bytes: u64 = bytes.try_into().ok().unwrap();
770 Size { raw: bytes }
771 }
772
773 #[inline]
774 pub fn bytes(self) -> u64 {
775 self.raw
776 }
777
778 #[inline]
779 pub fn bytes_usize(self) -> usize {
780 self.bytes().try_into().unwrap()
781 }
782
783 #[inline]
784 pub fn bits(self) -> u64 {
785 #[cold]
786 fn overflow(bytes: u64) -> ! {
787 panic!("Size::bits: {bytes} bytes in bits doesn't fit in u64")
788 }
789
790 self.bytes().checked_mul(8).unwrap_or_else(|| overflow(self.bytes()))
791 }
792
793 #[inline]
794 pub fn bits_usize(self) -> usize {
795 self.bits().try_into().unwrap()
796 }
797
798 #[inline]
799 pub fn align_to(self, align: Align) -> Size {
800 let mask = align.bytes() - 1;
801 Size::from_bytes((self.bytes() + mask) & !mask)
802 }
803
804 #[inline]
805 pub fn is_aligned(self, align: Align) -> bool {
806 let mask = align.bytes() - 1;
807 self.bytes() & mask == 0
808 }
809
810 #[inline]
811 pub fn checked_add<C: HasDataLayout>(self, offset: Size, cx: &C) -> Option<Size> {
812 let dl = cx.data_layout();
813
814 let bytes = self.bytes().checked_add(offset.bytes())?;
815
816 if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
817 }
818
819 #[inline]
820 pub fn checked_mul<C: HasDataLayout>(self, count: u64, cx: &C) -> Option<Size> {
821 let dl = cx.data_layout();
822
823 let bytes = self.bytes().checked_mul(count)?;
824 if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
825 }
826
827 #[inline]
830 pub fn sign_extend(self, value: u128) -> i128 {
831 let size = self.bits();
832 if size == 0 {
833 return 0;
835 }
836 let shift = 128 - size;
838 ((value << shift) as i128) >> shift
841 }
842
843 #[inline]
845 pub fn truncate(self, value: u128) -> u128 {
846 let size = self.bits();
847 if size == 0 {
848 return 0;
850 }
851 let shift = 128 - size;
852 (value << shift) >> shift
854 }
855
856 #[inline]
857 pub fn signed_int_min(&self) -> i128 {
858 self.sign_extend(1_u128 << (self.bits() - 1))
859 }
860
861 #[inline]
862 pub fn signed_int_max(&self) -> i128 {
863 i128::MAX >> (128 - self.bits())
864 }
865
866 #[inline]
867 pub fn unsigned_int_max(&self) -> u128 {
868 u128::MAX >> (128 - self.bits())
869 }
870}
871
872impl Add for Size {
876 type Output = Size;
877 #[inline]
878 fn add(self, other: Size) -> Size {
879 Size::from_bytes(self.bytes().checked_add(other.bytes()).unwrap_or_else(|| {
880 panic!("Size::add: {} + {} doesn't fit in u64", self.bytes(), other.bytes())
881 }))
882 }
883}
884
885impl Sub for Size {
886 type Output = Size;
887 #[inline]
888 fn sub(self, other: Size) -> Size {
889 Size::from_bytes(self.bytes().checked_sub(other.bytes()).unwrap_or_else(|| {
890 panic!("Size::sub: {} - {} would result in negative size", self.bytes(), other.bytes())
891 }))
892 }
893}
894
895impl Mul<Size> for u64 {
896 type Output = Size;
897 #[inline]
898 fn mul(self, size: Size) -> Size {
899 size * self
900 }
901}
902
903impl Mul<u64> for Size {
904 type Output = Size;
905 #[inline]
906 fn mul(self, count: u64) -> Size {
907 match self.bytes().checked_mul(count) {
908 Some(bytes) => Size::from_bytes(bytes),
909 None => panic!("Size::mul: {} * {} doesn't fit in u64", self.bytes(), count),
910 }
911 }
912}
913
914impl AddAssign for Size {
915 #[inline]
916 fn add_assign(&mut self, other: Size) {
917 *self = *self + other;
918 }
919}
920
921#[cfg(feature = "nightly")]
922impl Step for Size {
923 #[inline]
924 fn steps_between(start: &Self, end: &Self) -> (usize, Option<usize>) {
925 u64::steps_between(&start.bytes(), &end.bytes())
926 }
927
928 #[inline]
929 fn forward_checked(start: Self, count: usize) -> Option<Self> {
930 u64::forward_checked(start.bytes(), count).map(Self::from_bytes)
931 }
932
933 #[inline]
934 fn forward(start: Self, count: usize) -> Self {
935 Self::from_bytes(u64::forward(start.bytes(), count))
936 }
937
938 #[inline]
939 unsafe fn forward_unchecked(start: Self, count: usize) -> Self {
940 Self::from_bytes(unsafe { u64::forward_unchecked(start.bytes(), count) })
941 }
942
943 #[inline]
944 fn backward_checked(start: Self, count: usize) -> Option<Self> {
945 u64::backward_checked(start.bytes(), count).map(Self::from_bytes)
946 }
947
948 #[inline]
949 fn backward(start: Self, count: usize) -> Self {
950 Self::from_bytes(u64::backward(start.bytes(), count))
951 }
952
953 #[inline]
954 unsafe fn backward_unchecked(start: Self, count: usize) -> Self {
955 Self::from_bytes(unsafe { u64::backward_unchecked(start.bytes(), count) })
956 }
957}
958
959#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
961#[cfg_attr(
962 feature = "nightly",
963 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
964)]
965pub struct Align {
966 pow2: u8,
967}
968
969impl fmt::Debug for Align {
971 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
972 write!(f, "Align({} bytes)", self.bytes())
973 }
974}
975
976#[derive(Clone, Copy)]
977pub enum AlignFromBytesError {
978 NotPowerOfTwo(u64),
979 TooLarge(u64),
980}
981
982impl AlignFromBytesError {
983 pub fn diag_ident(self) -> &'static str {
984 match self {
985 Self::NotPowerOfTwo(_) => "not_power_of_two",
986 Self::TooLarge(_) => "too_large",
987 }
988 }
989
990 pub fn align(self) -> u64 {
991 let (Self::NotPowerOfTwo(align) | Self::TooLarge(align)) = self;
992 align
993 }
994}
995
996impl fmt::Debug for AlignFromBytesError {
997 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
998 fmt::Display::fmt(self, f)
999 }
1000}
1001
1002impl fmt::Display for AlignFromBytesError {
1003 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1004 match self {
1005 AlignFromBytesError::NotPowerOfTwo(align) => write!(f, "`{align}` is not a power of 2"),
1006 AlignFromBytesError::TooLarge(align) => write!(f, "`{align}` is too large"),
1007 }
1008 }
1009}
1010
1011impl Align {
1012 pub const ONE: Align = Align { pow2: 0 };
1013 pub const EIGHT: Align = Align { pow2: 3 };
1014 pub const MAX: Align = Align { pow2: 29 };
1016
1017 #[inline]
1018 pub fn from_bits(bits: u64) -> Result<Align, AlignFromBytesError> {
1019 Align::from_bytes(Size::from_bits(bits).bytes())
1020 }
1021
1022 #[inline]
1023 pub const fn from_bytes(align: u64) -> Result<Align, AlignFromBytesError> {
1024 if align == 0 {
1026 return Ok(Align::ONE);
1027 }
1028
1029 #[cold]
1030 const fn not_power_of_2(align: u64) -> AlignFromBytesError {
1031 AlignFromBytesError::NotPowerOfTwo(align)
1032 }
1033
1034 #[cold]
1035 const fn too_large(align: u64) -> AlignFromBytesError {
1036 AlignFromBytesError::TooLarge(align)
1037 }
1038
1039 let tz = align.trailing_zeros();
1040 if align != (1 << tz) {
1041 return Err(not_power_of_2(align));
1042 }
1043
1044 let pow2 = tz as u8;
1045 if pow2 > Self::MAX.pow2 {
1046 return Err(too_large(align));
1047 }
1048
1049 Ok(Align { pow2 })
1050 }
1051
1052 #[inline]
1053 pub const fn bytes(self) -> u64 {
1054 1 << self.pow2
1055 }
1056
1057 #[inline]
1058 pub fn bytes_usize(self) -> usize {
1059 self.bytes().try_into().unwrap()
1060 }
1061
1062 #[inline]
1063 pub const fn bits(self) -> u64 {
1064 self.bytes() * 8
1065 }
1066
1067 #[inline]
1068 pub fn bits_usize(self) -> usize {
1069 self.bits().try_into().unwrap()
1070 }
1071
1072 #[inline]
1077 pub fn max_aligned_factor(size: Size) -> Align {
1078 Align { pow2: size.bytes().trailing_zeros() as u8 }
1079 }
1080
1081 #[inline]
1083 pub fn restrict_for_offset(self, size: Size) -> Align {
1084 self.min(Align::max_aligned_factor(size))
1085 }
1086}
1087
1088#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
1098#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1099pub struct AbiAlign {
1100 pub abi: Align,
1101}
1102
1103impl AbiAlign {
1104 #[inline]
1105 pub fn new(align: Align) -> AbiAlign {
1106 AbiAlign { abi: align }
1107 }
1108
1109 #[inline]
1110 pub fn min(self, other: AbiAlign) -> AbiAlign {
1111 AbiAlign { abi: self.abi.min(other.abi) }
1112 }
1113
1114 #[inline]
1115 pub fn max(self, other: AbiAlign) -> AbiAlign {
1116 AbiAlign { abi: self.abi.max(other.abi) }
1117 }
1118}
1119
1120impl Deref for AbiAlign {
1121 type Target = Align;
1122
1123 fn deref(&self) -> &Self::Target {
1124 &self.abi
1125 }
1126}
1127
1128#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
1130#[cfg_attr(
1131 feature = "nightly",
1132 derive(Encodable_NoContext, Decodable_NoContext, HashStable_Generic)
1133)]
1134pub enum Integer {
1135 I8,
1136 I16,
1137 I32,
1138 I64,
1139 I128,
1140}
1141
1142impl Integer {
1143 pub fn int_ty_str(self) -> &'static str {
1144 use Integer::*;
1145 match self {
1146 I8 => "i8",
1147 I16 => "i16",
1148 I32 => "i32",
1149 I64 => "i64",
1150 I128 => "i128",
1151 }
1152 }
1153
1154 pub fn uint_ty_str(self) -> &'static str {
1155 use Integer::*;
1156 match self {
1157 I8 => "u8",
1158 I16 => "u16",
1159 I32 => "u32",
1160 I64 => "u64",
1161 I128 => "u128",
1162 }
1163 }
1164
1165 #[inline]
1166 pub fn size(self) -> Size {
1167 use Integer::*;
1168 match self {
1169 I8 => Size::from_bytes(1),
1170 I16 => Size::from_bytes(2),
1171 I32 => Size::from_bytes(4),
1172 I64 => Size::from_bytes(8),
1173 I128 => Size::from_bytes(16),
1174 }
1175 }
1176
1177 pub fn from_attr<C: HasDataLayout>(cx: &C, ity: IntegerType) -> Integer {
1179 let dl = cx.data_layout();
1180
1181 match ity {
1182 IntegerType::Pointer(_) => dl.ptr_sized_integer(),
1183 IntegerType::Fixed(x, _) => x,
1184 }
1185 }
1186
1187 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAlign {
1188 use Integer::*;
1189 let dl = cx.data_layout();
1190
1191 AbiAlign::new(match self {
1192 I8 => dl.i8_align,
1193 I16 => dl.i16_align,
1194 I32 => dl.i32_align,
1195 I64 => dl.i64_align,
1196 I128 => dl.i128_align,
1197 })
1198 }
1199
1200 #[inline]
1202 pub fn signed_max(self) -> i128 {
1203 use Integer::*;
1204 match self {
1205 I8 => i8::MAX as i128,
1206 I16 => i16::MAX as i128,
1207 I32 => i32::MAX as i128,
1208 I64 => i64::MAX as i128,
1209 I128 => i128::MAX,
1210 }
1211 }
1212
1213 #[inline]
1215 pub fn signed_min(self) -> i128 {
1216 use Integer::*;
1217 match self {
1218 I8 => i8::MIN as i128,
1219 I16 => i16::MIN as i128,
1220 I32 => i32::MIN as i128,
1221 I64 => i64::MIN as i128,
1222 I128 => i128::MIN,
1223 }
1224 }
1225
1226 #[inline]
1228 pub fn fit_signed(x: i128) -> Integer {
1229 use Integer::*;
1230 match x {
1231 -0x0000_0000_0000_0080..=0x0000_0000_0000_007f => I8,
1232 -0x0000_0000_0000_8000..=0x0000_0000_0000_7fff => I16,
1233 -0x0000_0000_8000_0000..=0x0000_0000_7fff_ffff => I32,
1234 -0x8000_0000_0000_0000..=0x7fff_ffff_ffff_ffff => I64,
1235 _ => I128,
1236 }
1237 }
1238
1239 #[inline]
1241 pub fn fit_unsigned(x: u128) -> Integer {
1242 use Integer::*;
1243 match x {
1244 0..=0x0000_0000_0000_00ff => I8,
1245 0..=0x0000_0000_0000_ffff => I16,
1246 0..=0x0000_0000_ffff_ffff => I32,
1247 0..=0xffff_ffff_ffff_ffff => I64,
1248 _ => I128,
1249 }
1250 }
1251
1252 pub fn for_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Option<Integer> {
1254 use Integer::*;
1255 let dl = cx.data_layout();
1256
1257 [I8, I16, I32, I64, I128].into_iter().find(|&candidate| {
1258 wanted == candidate.align(dl).abi && wanted.bytes() == candidate.size().bytes()
1259 })
1260 }
1261
1262 pub fn approximate_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Integer {
1264 use Integer::*;
1265 let dl = cx.data_layout();
1266
1267 for candidate in [I64, I32, I16] {
1269 if wanted >= candidate.align(dl).abi && wanted.bytes() >= candidate.size().bytes() {
1270 return candidate;
1271 }
1272 }
1273 I8
1274 }
1275
1276 #[inline]
1279 pub fn from_size(size: Size) -> Result<Self, String> {
1280 match size.bits() {
1281 8 => Ok(Integer::I8),
1282 16 => Ok(Integer::I16),
1283 32 => Ok(Integer::I32),
1284 64 => Ok(Integer::I64),
1285 128 => Ok(Integer::I128),
1286 _ => Err(format!("rust does not support integers with {} bits", size.bits())),
1287 }
1288 }
1289}
1290
1291#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
1293#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1294pub enum Float {
1295 F16,
1296 F32,
1297 F64,
1298 F128,
1299}
1300
1301impl Float {
1302 pub fn size(self) -> Size {
1303 use Float::*;
1304
1305 match self {
1306 F16 => Size::from_bits(16),
1307 F32 => Size::from_bits(32),
1308 F64 => Size::from_bits(64),
1309 F128 => Size::from_bits(128),
1310 }
1311 }
1312
1313 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAlign {
1314 use Float::*;
1315 let dl = cx.data_layout();
1316
1317 AbiAlign::new(match self {
1318 F16 => dl.f16_align,
1319 F32 => dl.f32_align,
1320 F64 => dl.f64_align,
1321 F128 => dl.f128_align,
1322 })
1323 }
1324}
1325
1326#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
1328#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1329pub enum Primitive {
1330 Int(Integer, bool),
1338 Float(Float),
1339 Pointer(AddressSpace),
1340}
1341
1342impl Primitive {
1343 pub fn size<C: HasDataLayout>(self, cx: &C) -> Size {
1344 use Primitive::*;
1345 let dl = cx.data_layout();
1346
1347 match self {
1348 Int(i, _) => i.size(),
1349 Float(f) => f.size(),
1350 Pointer(a) => dl.pointer_size_in(a),
1351 }
1352 }
1353
1354 pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAlign {
1355 use Primitive::*;
1356 let dl = cx.data_layout();
1357
1358 match self {
1359 Int(i, _) => i.align(dl),
1360 Float(f) => f.align(dl),
1361 Pointer(a) => dl.pointer_align_in(a),
1362 }
1363 }
1364}
1365
1366#[derive(Clone, Copy, PartialEq, Eq, Hash)]
1376#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1377pub struct WrappingRange {
1378 pub start: u128,
1379 pub end: u128,
1380}
1381
1382impl WrappingRange {
1383 pub fn full(size: Size) -> Self {
1384 Self { start: 0, end: size.unsigned_int_max() }
1385 }
1386
1387 #[inline(always)]
1389 pub fn contains(&self, v: u128) -> bool {
1390 if self.start <= self.end {
1391 self.start <= v && v <= self.end
1392 } else {
1393 self.start <= v || v <= self.end
1394 }
1395 }
1396
1397 #[inline(always)]
1400 pub fn contains_range(&self, other: Self, size: Size) -> bool {
1401 if self.is_full_for(size) {
1402 true
1403 } else {
1404 let trunc = |x| size.truncate(x);
1405
1406 let delta = self.start;
1407 let max = trunc(self.end.wrapping_sub(delta));
1408
1409 let other_start = trunc(other.start.wrapping_sub(delta));
1410 let other_end = trunc(other.end.wrapping_sub(delta));
1411
1412 (other_start <= other_end) && (other_end <= max)
1416 }
1417 }
1418
1419 #[inline(always)]
1421 fn with_start(mut self, start: u128) -> Self {
1422 self.start = start;
1423 self
1424 }
1425
1426 #[inline(always)]
1428 fn with_end(mut self, end: u128) -> Self {
1429 self.end = end;
1430 self
1431 }
1432
1433 #[inline]
1439 fn is_full_for(&self, size: Size) -> bool {
1440 let max_value = size.unsigned_int_max();
1441 debug_assert!(self.start <= max_value && self.end <= max_value);
1442 self.start == (self.end.wrapping_add(1) & max_value)
1443 }
1444
1445 #[inline]
1451 pub fn no_unsigned_wraparound(&self, size: Size) -> Result<bool, RangeFull> {
1452 if self.is_full_for(size) { Err(..) } else { Ok(self.start <= self.end) }
1453 }
1454
1455 #[inline]
1464 pub fn no_signed_wraparound(&self, size: Size) -> Result<bool, RangeFull> {
1465 if self.is_full_for(size) {
1466 Err(..)
1467 } else {
1468 let start: i128 = size.sign_extend(self.start);
1469 let end: i128 = size.sign_extend(self.end);
1470 Ok(start <= end)
1471 }
1472 }
1473}
1474
1475impl fmt::Debug for WrappingRange {
1476 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
1477 if self.start > self.end {
1478 write!(fmt, "(..={}) | ({}..)", self.end, self.start)?;
1479 } else {
1480 write!(fmt, "{}..={}", self.start, self.end)?;
1481 }
1482 Ok(())
1483 }
1484}
1485
1486#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1488#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1489pub enum Scalar {
1490 Initialized {
1491 value: Primitive,
1492
1493 valid_range: WrappingRange,
1497 },
1498 Union {
1499 value: Primitive,
1505 },
1506}
1507
1508impl Scalar {
1509 #[inline]
1510 pub fn is_bool(&self) -> bool {
1511 use Integer::*;
1512 matches!(
1513 self,
1514 Scalar::Initialized {
1515 value: Primitive::Int(I8, false),
1516 valid_range: WrappingRange { start: 0, end: 1 }
1517 }
1518 )
1519 }
1520
1521 pub fn primitive(&self) -> Primitive {
1524 match *self {
1525 Scalar::Initialized { value, .. } | Scalar::Union { value } => value,
1526 }
1527 }
1528
1529 pub fn align(self, cx: &impl HasDataLayout) -> AbiAlign {
1530 self.primitive().align(cx)
1531 }
1532
1533 pub fn size(self, cx: &impl HasDataLayout) -> Size {
1534 self.primitive().size(cx)
1535 }
1536
1537 #[inline]
1538 pub fn to_union(&self) -> Self {
1539 Self::Union { value: self.primitive() }
1540 }
1541
1542 #[inline]
1543 pub fn valid_range(&self, cx: &impl HasDataLayout) -> WrappingRange {
1544 match *self {
1545 Scalar::Initialized { valid_range, .. } => valid_range,
1546 Scalar::Union { value } => WrappingRange::full(value.size(cx)),
1547 }
1548 }
1549
1550 #[inline]
1551 pub fn valid_range_mut(&mut self) -> &mut WrappingRange {
1554 match self {
1555 Scalar::Initialized { valid_range, .. } => valid_range,
1556 Scalar::Union { .. } => panic!("cannot change the valid range of a union"),
1557 }
1558 }
1559
1560 #[inline]
1563 pub fn is_always_valid<C: HasDataLayout>(&self, cx: &C) -> bool {
1564 match *self {
1565 Scalar::Initialized { valid_range, .. } => valid_range.is_full_for(self.size(cx)),
1566 Scalar::Union { .. } => true,
1567 }
1568 }
1569
1570 #[inline]
1572 pub fn is_uninit_valid(&self) -> bool {
1573 match *self {
1574 Scalar::Initialized { .. } => false,
1575 Scalar::Union { .. } => true,
1576 }
1577 }
1578
1579 #[inline]
1581 pub fn is_signed(&self) -> bool {
1582 match self.primitive() {
1583 Primitive::Int(_, signed) => signed,
1584 _ => false,
1585 }
1586 }
1587}
1588
1589#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1592#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1593pub enum FieldsShape<FieldIdx: Idx> {
1594 Primitive,
1596
1597 Union(NonZeroUsize),
1599
1600 Array { stride: Size, count: u64 },
1602
1603 Arbitrary {
1611 offsets: IndexVec<FieldIdx, Size>,
1616
1617 memory_index: IndexVec<FieldIdx, u32>,
1630 },
1631}
1632
1633impl<FieldIdx: Idx> FieldsShape<FieldIdx> {
1634 #[inline]
1635 pub fn count(&self) -> usize {
1636 match *self {
1637 FieldsShape::Primitive => 0,
1638 FieldsShape::Union(count) => count.get(),
1639 FieldsShape::Array { count, .. } => count.try_into().unwrap(),
1640 FieldsShape::Arbitrary { ref offsets, .. } => offsets.len(),
1641 }
1642 }
1643
1644 #[inline]
1645 pub fn offset(&self, i: usize) -> Size {
1646 match *self {
1647 FieldsShape::Primitive => {
1648 unreachable!("FieldsShape::offset: `Primitive`s have no fields")
1649 }
1650 FieldsShape::Union(count) => {
1651 assert!(i < count.get(), "tried to access field {i} of union with {count} fields");
1652 Size::ZERO
1653 }
1654 FieldsShape::Array { stride, count } => {
1655 let i = u64::try_from(i).unwrap();
1656 assert!(i < count, "tried to access field {i} of array with {count} fields");
1657 stride * i
1658 }
1659 FieldsShape::Arbitrary { ref offsets, .. } => offsets[FieldIdx::new(i)],
1660 }
1661 }
1662
1663 #[inline]
1664 pub fn memory_index(&self, i: usize) -> usize {
1665 match *self {
1666 FieldsShape::Primitive => {
1667 unreachable!("FieldsShape::memory_index: `Primitive`s have no fields")
1668 }
1669 FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
1670 FieldsShape::Arbitrary { ref memory_index, .. } => {
1671 memory_index[FieldIdx::new(i)].try_into().unwrap()
1672 }
1673 }
1674 }
1675
1676 #[inline]
1678 pub fn index_by_increasing_offset(&self) -> impl ExactSizeIterator<Item = usize> {
1679 let mut inverse_small = [0u8; 64];
1680 let mut inverse_big = IndexVec::new();
1681 let use_small = self.count() <= inverse_small.len();
1682
1683 if let FieldsShape::Arbitrary { ref memory_index, .. } = *self {
1685 if use_small {
1686 for (field_idx, &mem_idx) in memory_index.iter_enumerated() {
1687 inverse_small[mem_idx as usize] = field_idx.index() as u8;
1688 }
1689 } else {
1690 inverse_big = memory_index.invert_bijective_mapping();
1691 }
1692 }
1693
1694 let pseudofield_count = if let FieldsShape::Primitive = self { 1 } else { self.count() };
1698
1699 (0..pseudofield_count).map(move |i| match *self {
1700 FieldsShape::Primitive | FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
1701 FieldsShape::Arbitrary { .. } => {
1702 if use_small {
1703 inverse_small[i] as usize
1704 } else {
1705 inverse_big[i as u32].index()
1706 }
1707 }
1708 })
1709 }
1710}
1711
1712#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
1716#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1717pub struct AddressSpace(pub u32);
1718
1719impl AddressSpace {
1720 pub const ZERO: Self = AddressSpace(0);
1722}
1723
1724#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1735#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1736pub enum BackendRepr {
1737 Scalar(Scalar),
1738 ScalarPair(Scalar, Scalar),
1739 SimdVector {
1740 element: Scalar,
1741 count: u64,
1742 },
1743 Memory {
1745 sized: bool,
1747 },
1748}
1749
1750impl BackendRepr {
1751 #[inline]
1753 pub fn is_unsized(&self) -> bool {
1754 match *self {
1755 BackendRepr::Scalar(_)
1756 | BackendRepr::ScalarPair(..)
1757 | BackendRepr::SimdVector { .. } => false,
1758 BackendRepr::Memory { sized } => !sized,
1759 }
1760 }
1761
1762 #[inline]
1763 pub fn is_sized(&self) -> bool {
1764 !self.is_unsized()
1765 }
1766
1767 #[inline]
1770 pub fn is_signed(&self) -> bool {
1771 match self {
1772 BackendRepr::Scalar(scal) => scal.is_signed(),
1773 _ => panic!("`is_signed` on non-scalar ABI {self:?}"),
1774 }
1775 }
1776
1777 #[inline]
1779 pub fn is_scalar(&self) -> bool {
1780 matches!(*self, BackendRepr::Scalar(_))
1781 }
1782
1783 #[inline]
1785 pub fn is_bool(&self) -> bool {
1786 matches!(*self, BackendRepr::Scalar(s) if s.is_bool())
1787 }
1788
1789 pub fn scalar_align<C: HasDataLayout>(&self, cx: &C) -> Option<Align> {
1793 match *self {
1794 BackendRepr::Scalar(s) => Some(s.align(cx).abi),
1795 BackendRepr::ScalarPair(s1, s2) => Some(s1.align(cx).max(s2.align(cx)).abi),
1796 BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => None,
1798 }
1799 }
1800
1801 pub fn scalar_size<C: HasDataLayout>(&self, cx: &C) -> Option<Size> {
1805 match *self {
1806 BackendRepr::Scalar(s) => Some(s.size(cx)),
1808 BackendRepr::ScalarPair(s1, s2) => {
1810 let field2_offset = s1.size(cx).align_to(s2.align(cx).abi);
1811 let size = (field2_offset + s2.size(cx)).align_to(
1812 self.scalar_align(cx)
1813 .unwrap(),
1815 );
1816 Some(size)
1817 }
1818 BackendRepr::SimdVector { .. } | BackendRepr::Memory { .. } => None,
1820 }
1821 }
1822
1823 pub fn to_union(&self) -> Self {
1825 match *self {
1826 BackendRepr::Scalar(s) => BackendRepr::Scalar(s.to_union()),
1827 BackendRepr::ScalarPair(s1, s2) => {
1828 BackendRepr::ScalarPair(s1.to_union(), s2.to_union())
1829 }
1830 BackendRepr::SimdVector { element, count } => {
1831 BackendRepr::SimdVector { element: element.to_union(), count }
1832 }
1833 BackendRepr::Memory { .. } => BackendRepr::Memory { sized: true },
1834 }
1835 }
1836
1837 pub fn eq_up_to_validity(&self, other: &Self) -> bool {
1838 match (self, other) {
1839 (BackendRepr::Scalar(l), BackendRepr::Scalar(r)) => l.primitive() == r.primitive(),
1842 (
1843 BackendRepr::SimdVector { element: element_l, count: count_l },
1844 BackendRepr::SimdVector { element: element_r, count: count_r },
1845 ) => element_l.primitive() == element_r.primitive() && count_l == count_r,
1846 (BackendRepr::ScalarPair(l1, l2), BackendRepr::ScalarPair(r1, r2)) => {
1847 l1.primitive() == r1.primitive() && l2.primitive() == r2.primitive()
1848 }
1849 _ => self == other,
1851 }
1852 }
1853}
1854
1855#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1857#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1858pub enum Variants<FieldIdx: Idx, VariantIdx: Idx> {
1859 Empty,
1861
1862 Single {
1864 index: VariantIdx,
1866 },
1867
1868 Multiple {
1875 tag: Scalar,
1876 tag_encoding: TagEncoding<VariantIdx>,
1877 tag_field: FieldIdx,
1878 variants: IndexVec<VariantIdx, LayoutData<FieldIdx, VariantIdx>>,
1879 },
1880}
1881
1882#[derive(PartialEq, Eq, Hash, Clone, Debug)]
1884#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1885pub enum TagEncoding<VariantIdx: Idx> {
1886 Direct,
1889
1890 Niche {
1914 untagged_variant: VariantIdx,
1915 niche_variants: RangeInclusive<VariantIdx>,
1918 niche_start: u128,
1921 },
1922}
1923
1924#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
1925#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
1926pub struct Niche {
1927 pub offset: Size,
1928 pub value: Primitive,
1929 pub valid_range: WrappingRange,
1930}
1931
1932impl Niche {
1933 pub fn from_scalar<C: HasDataLayout>(cx: &C, offset: Size, scalar: Scalar) -> Option<Self> {
1934 let Scalar::Initialized { value, valid_range } = scalar else { return None };
1935 let niche = Niche { offset, value, valid_range };
1936 if niche.available(cx) > 0 { Some(niche) } else { None }
1937 }
1938
1939 pub fn available<C: HasDataLayout>(&self, cx: &C) -> u128 {
1940 let Self { value, valid_range: v, .. } = *self;
1941 let size = value.size(cx);
1942 assert!(size.bits() <= 128);
1943 let max_value = size.unsigned_int_max();
1944
1945 let niche = v.end.wrapping_add(1)..v.start;
1947 niche.end.wrapping_sub(niche.start) & max_value
1948 }
1949
1950 pub fn reserve<C: HasDataLayout>(&self, cx: &C, count: u128) -> Option<(u128, Scalar)> {
1951 assert!(count > 0);
1952
1953 let Self { value, valid_range: v, .. } = *self;
1954 let size = value.size(cx);
1955 assert!(size.bits() <= 128);
1956 let max_value = size.unsigned_int_max();
1957
1958 let niche = v.end.wrapping_add(1)..v.start;
1959 let available = niche.end.wrapping_sub(niche.start) & max_value;
1960 if count > available {
1961 return None;
1962 }
1963
1964 let move_start = |v: WrappingRange| {
1978 let start = v.start.wrapping_sub(count) & max_value;
1979 Some((start, Scalar::Initialized { value, valid_range: v.with_start(start) }))
1980 };
1981 let move_end = |v: WrappingRange| {
1982 let start = v.end.wrapping_add(1) & max_value;
1983 let end = v.end.wrapping_add(count) & max_value;
1984 Some((start, Scalar::Initialized { value, valid_range: v.with_end(end) }))
1985 };
1986 let distance_end_zero = max_value - v.end;
1987 if v.start > v.end {
1988 move_end(v)
1990 } else if v.start <= distance_end_zero {
1991 if count <= v.start {
1992 move_start(v)
1993 } else {
1994 move_end(v)
1996 }
1997 } else {
1998 let end = v.end.wrapping_add(count) & max_value;
1999 let overshot_zero = (1..=v.end).contains(&end);
2000 if overshot_zero {
2001 move_start(v)
2003 } else {
2004 move_end(v)
2005 }
2006 }
2007 }
2008}
2009
2010#[derive(PartialEq, Eq, Hash, Clone)]
2012#[cfg_attr(feature = "nightly", derive(HashStable_Generic))]
2013pub struct LayoutData<FieldIdx: Idx, VariantIdx: Idx> {
2014 pub fields: FieldsShape<FieldIdx>,
2016
2017 pub variants: Variants<FieldIdx, VariantIdx>,
2025
2026 pub backend_repr: BackendRepr,
2034
2035 pub largest_niche: Option<Niche>,
2038 pub uninhabited: bool,
2043
2044 pub align: AbiAlign,
2045 pub size: Size,
2046
2047 pub max_repr_align: Option<Align>,
2051
2052 pub unadjusted_abi_align: Align,
2056
2057 pub randomization_seed: Hash64,
2068}
2069
2070impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
2071 pub fn is_aggregate(&self) -> bool {
2073 match self.backend_repr {
2074 BackendRepr::Scalar(_) | BackendRepr::SimdVector { .. } => false,
2075 BackendRepr::ScalarPair(..) | BackendRepr::Memory { .. } => true,
2076 }
2077 }
2078
2079 pub fn is_uninhabited(&self) -> bool {
2081 self.uninhabited
2082 }
2083}
2084
2085impl<FieldIdx: Idx, VariantIdx: Idx> fmt::Debug for LayoutData<FieldIdx, VariantIdx>
2086where
2087 FieldsShape<FieldIdx>: fmt::Debug,
2088 Variants<FieldIdx, VariantIdx>: fmt::Debug,
2089{
2090 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2091 let LayoutData {
2095 size,
2096 align,
2097 backend_repr,
2098 fields,
2099 largest_niche,
2100 uninhabited,
2101 variants,
2102 max_repr_align,
2103 unadjusted_abi_align,
2104 randomization_seed,
2105 } = self;
2106 f.debug_struct("Layout")
2107 .field("size", size)
2108 .field("align", align)
2109 .field("backend_repr", backend_repr)
2110 .field("fields", fields)
2111 .field("largest_niche", largest_niche)
2112 .field("uninhabited", uninhabited)
2113 .field("variants", variants)
2114 .field("max_repr_align", max_repr_align)
2115 .field("unadjusted_abi_align", unadjusted_abi_align)
2116 .field("randomization_seed", randomization_seed)
2117 .finish()
2118 }
2119}
2120
2121#[derive(Copy, Clone, PartialEq, Eq, Debug)]
2122pub enum PointerKind {
2123 SharedRef { frozen: bool },
2125 MutableRef { unpin: bool },
2127 Box { unpin: bool, global: bool },
2130}
2131
2132#[derive(Copy, Clone, Debug)]
2137pub struct PointeeInfo {
2138 pub safe: Option<PointerKind>,
2141 pub size: Size,
2147 pub align: Align,
2149}
2150
2151impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
2152 #[inline]
2154 pub fn is_unsized(&self) -> bool {
2155 self.backend_repr.is_unsized()
2156 }
2157
2158 #[inline]
2159 pub fn is_sized(&self) -> bool {
2160 self.backend_repr.is_sized()
2161 }
2162
2163 pub fn is_1zst(&self) -> bool {
2165 self.is_sized() && self.size.bytes() == 0 && self.align.bytes() == 1
2166 }
2167
2168 pub fn is_zst(&self) -> bool {
2173 match self.backend_repr {
2174 BackendRepr::Scalar(_)
2175 | BackendRepr::ScalarPair(..)
2176 | BackendRepr::SimdVector { .. } => false,
2177 BackendRepr::Memory { sized } => sized && self.size.bytes() == 0,
2178 }
2179 }
2180
2181 pub fn eq_abi(&self, other: &Self) -> bool {
2187 self.size == other.size
2191 && self.is_sized() == other.is_sized()
2192 && self.backend_repr.eq_up_to_validity(&other.backend_repr)
2193 && self.backend_repr.is_bool() == other.backend_repr.is_bool()
2194 && self.align.abi == other.align.abi
2195 && self.max_repr_align == other.max_repr_align
2196 && self.unadjusted_abi_align == other.unadjusted_abi_align
2197 }
2198}
2199
2200#[derive(Copy, Clone, Debug)]
2201pub enum StructKind {
2202 AlwaysSized,
2204 MaybeUnsized,
2206 Prefixed(Size, Align),
2208}
2209
2210#[derive(Clone, Debug)]
2211pub enum AbiFromStrErr {
2212 Unknown,
2214 NoExplicitUnwind,
2216}