From c45be66cbff05d4e96bad67c81036e72a1643129 Mon Sep 17 00:00:00 2001 From: Thomas Bertschinger Date: Tue, 23 Jan 2024 21:41:14 -0700 Subject: [PATCH] try to avoid `#[repr(packed)]` when `align` is needed Currently rustc forbids compound types from having both a `packed` and `align` attribute. When a source type has both attributes, this may mean it cannot be represented with the current rustc. Often, though, one or both of these attributes is redundant and can be safely removed from the generated Rust code. Previously, bindgen avoided placing the `align` attribute when it is not needed. However, it would always place the `packed` attribute if the source type has it, even when it is redundant because the source type is "naturally packed". With this change, bindgen avoids placing `packed` on a type if the `packed` is redundant and the type needs an `align` attribute. If the type does not have an "align" attribute, then bindgen will still place `packed` so as to avoid changing existing working behavior. This commit also takes out an extraneous `is_packed()` call from `StructLayoutTracker::new()` since the value can be passed in from the caller; this avoids duplicating work in some cases. --- .../tests/redundant-packed-and-align.rs | 369 ++++++++++++++++++ .../headers/redundant-packed-and-align.h | 42 ++ bindgen/codegen/mod.rs | 10 +- bindgen/codegen/struct_layout.rs | 2 +- bindgen/ir/comp.rs | 20 + 5 files changed, 441 insertions(+), 2 deletions(-) create mode 100644 bindgen-tests/tests/expectations/tests/redundant-packed-and-align.rs create mode 100644 bindgen-tests/tests/headers/redundant-packed-and-align.h diff --git a/bindgen-tests/tests/expectations/tests/redundant-packed-and-align.rs b/bindgen-tests/tests/expectations/tests/redundant-packed-and-align.rs new file mode 100644 index 0000000000..b5feaa140a --- /dev/null +++ b/bindgen-tests/tests/expectations/tests/redundant-packed-and-align.rs @@ -0,0 +1,369 @@ +#![allow(dead_code, non_snake_case, non_camel_case_types, non_upper_case_globals)] +#[repr(C)] +#[derive(Copy, Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] +pub struct __BindgenBitfieldUnit { + storage: Storage, +} +impl __BindgenBitfieldUnit { + #[inline] + pub const fn new(storage: Storage) -> Self { + Self { storage } + } +} +impl __BindgenBitfieldUnit +where + Storage: AsRef<[u8]> + AsMut<[u8]>, +{ + #[inline] + pub fn get_bit(&self, index: usize) -> bool { + debug_assert!(index / 8 < self.storage.as_ref().len()); + let byte_index = index / 8; + let byte = self.storage.as_ref()[byte_index]; + let bit_index = if cfg!(target_endian = "big") { + 7 - (index % 8) + } else { + index % 8 + }; + let mask = 1 << bit_index; + byte & mask == mask + } + #[inline] + pub fn set_bit(&mut self, index: usize, val: bool) { + debug_assert!(index / 8 < self.storage.as_ref().len()); + let byte_index = index / 8; + let byte = &mut self.storage.as_mut()[byte_index]; + let bit_index = if cfg!(target_endian = "big") { + 7 - (index % 8) + } else { + index % 8 + }; + let mask = 1 << bit_index; + if val { + *byte |= mask; + } else { + *byte &= !mask; + } + } + #[inline] + pub fn get(&self, bit_offset: usize, bit_width: u8) -> u64 { + debug_assert!(bit_width <= 64); + debug_assert!(bit_offset / 8 < self.storage.as_ref().len()); + debug_assert!( + (bit_offset + (bit_width as usize)) / 8 <= self.storage.as_ref().len(), + ); + let mut val = 0; + for i in 0..(bit_width as usize) { + if self.get_bit(i + bit_offset) { + let index = if cfg!(target_endian = "big") { + bit_width as usize - 1 - i + } else { + i + }; + val |= 1 << index; + } + } + val + } + #[inline] + pub fn set(&mut self, bit_offset: usize, bit_width: u8, val: u64) { + debug_assert!(bit_width <= 64); + debug_assert!(bit_offset / 8 < self.storage.as_ref().len()); + debug_assert!( + (bit_offset + (bit_width as usize)) / 8 <= self.storage.as_ref().len(), + ); + for i in 0..(bit_width as usize) { + let mask = 1 << i; + let val_bit_is_set = val & mask == mask; + let index = if cfg!(target_endian = "big") { + bit_width as usize - 1 - i + } else { + i + }; + self.set_bit(index + bit_offset, val_bit_is_set); + } + } +} +#[repr(C)] +#[repr(align(8))] +#[derive(Debug, Default, Copy, Clone)] +pub struct redundant_packed { + pub a: u32, + pub b: u32, +} +#[test] +fn bindgen_test_layout_redundant_packed() { + const UNINIT: ::std::mem::MaybeUninit = ::std::mem::MaybeUninit::uninit(); + let ptr = UNINIT.as_ptr(); + assert_eq!( + ::std::mem::size_of::(), + 8usize, + concat!("Size of: ", stringify!(redundant_packed)), + ); + assert_eq!( + ::std::mem::align_of::(), + 8usize, + concat!("Alignment of ", stringify!(redundant_packed)), + ); + assert_eq!( + unsafe { ::std::ptr::addr_of!((*ptr).a) as usize - ptr as usize }, + 0usize, + concat!("Offset of field: ", stringify!(redundant_packed), "::", stringify!(a)), + ); + assert_eq!( + unsafe { ::std::ptr::addr_of!((*ptr).b) as usize - ptr as usize }, + 4usize, + concat!("Offset of field: ", stringify!(redundant_packed), "::", stringify!(b)), + ); +} +#[repr(C)] +#[repr(align(8))] +#[derive(Debug, Default, Copy, Clone)] +pub struct redundant_packed_bitfield { + pub a: [u8; 3usize], + pub _bitfield_align_1: [u8; 0], + pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>, + pub c: u32, +} +#[test] +fn bindgen_test_layout_redundant_packed_bitfield() { + const UNINIT: ::std::mem::MaybeUninit = ::std::mem::MaybeUninit::uninit(); + let ptr = UNINIT.as_ptr(); + assert_eq!( + ::std::mem::size_of::(), + 8usize, + concat!("Size of: ", stringify!(redundant_packed_bitfield)), + ); + assert_eq!( + ::std::mem::align_of::(), + 8usize, + concat!("Alignment of ", stringify!(redundant_packed_bitfield)), + ); + assert_eq!( + unsafe { ::std::ptr::addr_of!((*ptr).a) as usize - ptr as usize }, + 0usize, + concat!( + "Offset of field: ", + stringify!(redundant_packed_bitfield), + "::", + stringify!(a), + ), + ); + assert_eq!( + unsafe { ::std::ptr::addr_of!((*ptr).c) as usize - ptr as usize }, + 4usize, + concat!( + "Offset of field: ", + stringify!(redundant_packed_bitfield), + "::", + stringify!(c), + ), + ); +} +impl redundant_packed_bitfield { + #[inline] + pub fn b0(&self) -> u8 { + unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u8) } + } + #[inline] + pub fn set_b0(&mut self, val: u8) { + unsafe { + let val: u8 = ::std::mem::transmute(val); + self._bitfield_1.set(0usize, 1u8, val as u64) + } + } + #[inline] + pub fn b1(&self) -> u8 { + unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u8) } + } + #[inline] + pub fn set_b1(&mut self, val: u8) { + unsafe { + let val: u8 = ::std::mem::transmute(val); + self._bitfield_1.set(1usize, 1u8, val as u64) + } + } + #[inline] + pub fn new_bitfield_1(b0: u8, b1: u8) -> __BindgenBitfieldUnit<[u8; 1usize]> { + let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default(); + __bindgen_bitfield_unit + .set( + 0usize, + 1u8, + { + let b0: u8 = unsafe { ::std::mem::transmute(b0) }; + b0 as u64 + }, + ); + __bindgen_bitfield_unit + .set( + 1usize, + 1u8, + { + let b1: u8 = unsafe { ::std::mem::transmute(b1) }; + b1 as u64 + }, + ); + __bindgen_bitfield_unit + } +} +#[repr(C)] +#[repr(align(16))] +#[derive(Copy, Clone)] +pub union redundant_packed_union { + pub a: u64, + pub b: u32, +} +#[test] +fn bindgen_test_layout_redundant_packed_union() { + const UNINIT: ::std::mem::MaybeUninit = ::std::mem::MaybeUninit::uninit(); + let ptr = UNINIT.as_ptr(); + assert_eq!( + ::std::mem::size_of::(), + 16usize, + concat!("Size of: ", stringify!(redundant_packed_union)), + ); + assert_eq!( + ::std::mem::align_of::(), + 16usize, + concat!("Alignment of ", stringify!(redundant_packed_union)), + ); + assert_eq!( + unsafe { ::std::ptr::addr_of!((*ptr).a) as usize - ptr as usize }, + 0usize, + concat!( + "Offset of field: ", + stringify!(redundant_packed_union), + "::", + stringify!(a), + ), + ); + assert_eq!( + unsafe { ::std::ptr::addr_of!((*ptr).b) as usize - ptr as usize }, + 0usize, + concat!( + "Offset of field: ", + stringify!(redundant_packed_union), + "::", + stringify!(b), + ), + ); +} +impl Default for redundant_packed_union { + fn default() -> Self { + let mut s = ::std::mem::MaybeUninit::::uninit(); + unsafe { + ::std::ptr::write_bytes(s.as_mut_ptr(), 0, 1); + s.assume_init() + } + } +} +#[repr(C)] +#[repr(align(2))] +#[derive(Debug, Default, Copy, Clone)] +pub struct inner { + pub a: u8, +} +#[test] +fn bindgen_test_layout_inner() { + const UNINIT: ::std::mem::MaybeUninit = ::std::mem::MaybeUninit::uninit(); + let ptr = UNINIT.as_ptr(); + assert_eq!( + ::std::mem::size_of::(), + 2usize, + concat!("Size of: ", stringify!(inner)), + ); + assert_eq!( + ::std::mem::align_of::(), + 2usize, + concat!("Alignment of ", stringify!(inner)), + ); + assert_eq!( + unsafe { ::std::ptr::addr_of!((*ptr).a) as usize - ptr as usize }, + 0usize, + concat!("Offset of field: ", stringify!(inner), "::", stringify!(a)), + ); +} +#[repr(C)] +#[repr(align(8))] +#[derive(Debug, Default, Copy, Clone)] +pub struct outer_redundant_packed { + pub a: [inner; 2usize], + pub b: u32, +} +#[test] +fn bindgen_test_layout_outer_redundant_packed() { + const UNINIT: ::std::mem::MaybeUninit = ::std::mem::MaybeUninit::uninit(); + let ptr = UNINIT.as_ptr(); + assert_eq!( + ::std::mem::size_of::(), + 8usize, + concat!("Size of: ", stringify!(outer_redundant_packed)), + ); + assert_eq!( + ::std::mem::align_of::(), + 8usize, + concat!("Alignment of ", stringify!(outer_redundant_packed)), + ); + assert_eq!( + unsafe { ::std::ptr::addr_of!((*ptr).a) as usize - ptr as usize }, + 0usize, + concat!( + "Offset of field: ", + stringify!(outer_redundant_packed), + "::", + stringify!(a), + ), + ); + assert_eq!( + unsafe { ::std::ptr::addr_of!((*ptr).b) as usize - ptr as usize }, + 4usize, + concat!( + "Offset of field: ", + stringify!(outer_redundant_packed), + "::", + stringify!(b), + ), + ); +} +#[repr(C)] +#[repr(align(4))] +#[derive(Debug, Default, Copy, Clone)] +pub struct redundant_pragma_packed { + pub a: u8, + pub b: u16, +} +#[test] +fn bindgen_test_layout_redundant_pragma_packed() { + const UNINIT: ::std::mem::MaybeUninit = ::std::mem::MaybeUninit::uninit(); + let ptr = UNINIT.as_ptr(); + assert_eq!( + ::std::mem::size_of::(), + 4usize, + concat!("Size of: ", stringify!(redundant_pragma_packed)), + ); + assert_eq!( + ::std::mem::align_of::(), + 4usize, + concat!("Alignment of ", stringify!(redundant_pragma_packed)), + ); + assert_eq!( + unsafe { ::std::ptr::addr_of!((*ptr).a) as usize - ptr as usize }, + 0usize, + concat!( + "Offset of field: ", + stringify!(redundant_pragma_packed), + "::", + stringify!(a), + ), + ); + assert_eq!( + unsafe { ::std::ptr::addr_of!((*ptr).b) as usize - ptr as usize }, + 2usize, + concat!( + "Offset of field: ", + stringify!(redundant_pragma_packed), + "::", + stringify!(b), + ), + ); +} diff --git a/bindgen-tests/tests/headers/redundant-packed-and-align.h b/bindgen-tests/tests/headers/redundant-packed-and-align.h new file mode 100644 index 0000000000..75e15e4139 --- /dev/null +++ b/bindgen-tests/tests/headers/redundant-packed-and-align.h @@ -0,0 +1,42 @@ +typedef unsigned char uint8_t; +typedef unsigned short uint16_t; +typedef unsigned int uint32_t; +typedef unsigned long long uint64_t; + +struct redundant_packed { + uint32_t a; + uint32_t b; +} __attribute__((packed, aligned(8))); + +struct redundant_packed_bitfield { + uint8_t a[3]; + uint8_t b0:1; + uint8_t b1:1; + uint32_t c; +} __attribute__((packed, aligned(8))); + + +union redundant_packed_union { + uint64_t a; + uint32_t b; +} __attribute__((packed, aligned(16))); + + +struct inner { + uint8_t a; +} __attribute__((packed, aligned(2))); + +struct outer_redundant_packed { + struct inner a[2]; + uint32_t b; +} __attribute__((packed, aligned(8))); + + +#pragma pack(2) + +struct redundant_pragma_packed { + uint8_t a; + uint16_t b; +} __attribute__((aligned(4))); + +#pragma pack() diff --git a/bindgen/codegen/mod.rs b/bindgen/codegen/mod.rs index c84320ce5b..b2f0f15173 100644 --- a/bindgen/codegen/mod.rs +++ b/bindgen/codegen/mod.rs @@ -1977,6 +1977,7 @@ impl CodeGenerator for CompInfo { ty, &canonical_name, visibility, + packed, ); if !is_opaque { @@ -2196,7 +2197,14 @@ impl CodeGenerator for CompInfo { if let Some(comment) = item.comment(ctx) { attributes.push(attributes::doc(comment)); } - if packed && !is_opaque { + + // if a type has both a "packed" attribute and an "align(N)" attribute, then check if the + // "packed" attr is redundant, and do not include it if so. + if packed && + !is_opaque && + !(explicit_align.is_some() && + self.already_packed(ctx).map_or(false, |t| t)) + { let n = layout.map_or(1, |l| l.align); assert!(ctx.options().rust_features().repr_packed_n || n == 1); let packed_repr = if n == 1 { diff --git a/bindgen/codegen/struct_layout.rs b/bindgen/codegen/struct_layout.rs index a62da69534..f4596a1992 100644 --- a/bindgen/codegen/struct_layout.rs +++ b/bindgen/codegen/struct_layout.rs @@ -91,9 +91,9 @@ impl<'a> StructLayoutTracker<'a> { ty: &'a Type, name: &'a str, visibility: FieldVisibilityKind, + is_packed: bool, ) -> Self { let known_type_layout = ty.layout(ctx); - let is_packed = comp.is_packed(ctx, known_type_layout.as_ref()); let (is_rust_union, can_copy_union_fields) = comp.is_rust_union(ctx, known_type_layout.as_ref(), name); StructLayoutTracker { diff --git a/bindgen/ir/comp.rs b/bindgen/ir/comp.rs index a5d06fa556..fed6ba8ac8 100644 --- a/bindgen/ir/comp.rs +++ b/bindgen/ir/comp.rs @@ -1642,6 +1642,26 @@ impl CompInfo { false } + /// Return true if a compound type is "naturally packed". This means we can exclude the + /// "packed" attribute without changing the layout. + /// This is useful for types that need an "align(N)" attribute since rustc won't compile + /// structs that have both of those attributes. + pub(crate) fn already_packed(&self, ctx: &BindgenContext) -> Option { + let mut total_size: usize = 0; + + for field in self.fields().iter() { + let layout = field.layout(ctx)?; + + if layout.align != 0 && total_size % layout.align != 0 { + return Some(false); + } + + total_size += layout.size; + } + + Some(true) + } + /// Returns true if compound type has been forward declared pub(crate) fn is_forward_declaration(&self) -> bool { self.is_forward_declaration