Skip to content

Commit d447c37

Browse files
committed
always add padding bytes for latest_offset
1 parent 13ed2a6 commit d447c37

File tree

3 files changed

+76
-4
lines changed

3 files changed

+76
-4
lines changed

src/codegen/struct_layout.rs

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -94,9 +94,11 @@ impl<'a, 'ctx> StructLayoutTracker<'a, 'ctx> {
9494
}
9595
};
9696

97+
self.latest_offset += padding_bytes;
98+
9799
debug!("align field {} to {}/{} with {} padding bytes {:?}, calculated {:?}",
98100
field_name,
99-
self.latest_offset + padding_bytes,
101+
self.latest_offset,
100102
field_offset.unwrap_or(0) / 8,
101103
padding_bytes,
102104
field_layout,
@@ -111,8 +113,7 @@ impl<'a, 'ctx> StructLayoutTracker<'a, 'ctx> {
111113
}
112114
};
113115

114-
self.latest_offset += padding_layout.map(|layout| layout.size).unwrap_or(0) +
115-
field_ty.calc_size(self.ctx).unwrap_or(field_layout.size);
116+
self.latest_offset += field_ty.calc_size(self.ctx).unwrap_or(field_layout.size);
116117

117118
self.latest_field_layout = Some(field_layout);
118119
self.max_field_align = cmp::max(self.max_field_align, field_layout.align);

tests/expectations/tests/layout_array.rs

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
pub const RTE_CACHE_LINE_SIZE: ::std::os::raw::c_uint = 64;
88
pub const RTE_MEMPOOL_OPS_NAMESIZE: ::std::os::raw::c_uint = 32;
99
pub const RTE_MEMPOOL_MAX_OPS_IDX: ::std::os::raw::c_uint = 16;
10+
pub const RTE_HEAP_NUM_FREELISTS: ::std::os::raw::c_uint = 13;
1011
#[repr(C)]
1112
#[derive(Debug, Copy, Clone)]
1213
pub struct rte_mempool([u8; 0]);
@@ -154,3 +155,52 @@ fn bindgen_test_layout_rte_mempool_ops_table() {
154155
impl Clone for rte_mempool_ops_table {
155156
fn clone(&self) -> Self { *self }
156157
}
158+
/**
159+
* Structure to hold malloc heap
160+
*/
161+
#[repr(C)]
162+
#[derive(Debug, Copy)]
163+
pub struct malloc_heap {
164+
pub lock: rte_spinlock_t,
165+
pub free_head: [malloc_heap__bindgen_ty_1; 13usize],
166+
pub alloc_count: ::std::os::raw::c_uint,
167+
pub total_size: usize,
168+
}
169+
#[repr(C)]
170+
#[derive(Debug, Copy)]
171+
pub struct malloc_heap__bindgen_ty_1 {
172+
pub lh_first: *mut malloc_heap__bindgen_ty_1_malloc_elem,
173+
}
174+
#[repr(C)]
175+
#[derive(Debug, Copy, Clone)]
176+
pub struct malloc_heap__bindgen_ty_1_malloc_elem([u8; 0]);
177+
#[test]
178+
fn bindgen_test_layout_malloc_heap__bindgen_ty_1() {
179+
assert_eq!(::std::mem::size_of::<malloc_heap__bindgen_ty_1>() , 8usize);
180+
assert_eq! (::std::mem::align_of::<malloc_heap__bindgen_ty_1>() , 8usize);
181+
assert_eq! (unsafe {
182+
& ( * ( 0 as * const malloc_heap__bindgen_ty_1 ) ) . lh_first
183+
as * const _ as usize } , 0usize);
184+
}
185+
impl Clone for malloc_heap__bindgen_ty_1 {
186+
fn clone(&self) -> Self { *self }
187+
}
188+
#[test]
189+
fn bindgen_test_layout_malloc_heap() {
190+
assert_eq!(::std::mem::size_of::<malloc_heap>() , 128usize);
191+
assert_eq! (unsafe {
192+
& ( * ( 0 as * const malloc_heap ) ) . lock as * const _ as
193+
usize } , 0usize);
194+
assert_eq! (unsafe {
195+
& ( * ( 0 as * const malloc_heap ) ) . free_head as * const _
196+
as usize } , 8usize);
197+
assert_eq! (unsafe {
198+
& ( * ( 0 as * const malloc_heap ) ) . alloc_count as * const
199+
_ as usize } , 112usize);
200+
assert_eq! (unsafe {
201+
& ( * ( 0 as * const malloc_heap ) ) . total_size as * const _
202+
as usize } , 120usize);
203+
}
204+
impl Clone for malloc_heap {
205+
fn clone(&self) -> Self { *self }
206+
}

tests/headers/layout_array.h

Lines changed: 22 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,8 @@ typedef unsigned short uint16_t;
33
typedef unsigned int uint32_t;
44
typedef unsigned long long uint64_t;
55

6+
typedef long long size_t;
7+
68
#define RTE_CACHE_LINE_SIZE 64
79

810
/**
@@ -85,4 +87,23 @@ struct rte_mempool_ops_table {
8587
* Storage for all possible ops structs.
8688
*/
8789
struct rte_mempool_ops ops[RTE_MEMPOOL_MAX_OPS_IDX];
88-
} __rte_cache_aligned;
90+
} __rte_cache_aligned;
91+
92+
93+
/* Number of free lists per heap, grouped by size. */
94+
#define RTE_HEAP_NUM_FREELISTS 13
95+
96+
#define LIST_HEAD(name, type) \
97+
struct name { \
98+
struct type *lh_first; /* first element */ \
99+
}
100+
101+
/**
102+
* Structure to hold malloc heap
103+
*/
104+
struct malloc_heap {
105+
rte_spinlock_t lock;
106+
LIST_HEAD(, malloc_elem) free_head[RTE_HEAP_NUM_FREELISTS];
107+
unsigned alloc_count;
108+
size_t total_size;
109+
} __rte_cache_aligned;

0 commit comments

Comments
 (0)