/
divide-by-zero-in-struct-layout.rs
135 lines (133 loc) · 4.21 KB
/
divide-by-zero-in-struct-layout.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
/* automatically generated by rust-bindgen */
#![allow(
dead_code,
non_snake_case,
non_camel_case_types,
non_upper_case_globals
)]
#[repr(C)]
#[derive(Copy, Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct __BindgenBitfieldUnit<Storage, Align> {
storage: Storage,
align: [Align; 0],
}
impl<Storage, Align> __BindgenBitfieldUnit<Storage, Align> {
#[inline]
pub const fn new(storage: Storage) -> Self {
Self { storage, align: [] }
}
}
impl<Storage, Align> __BindgenBitfieldUnit<Storage, Align>
where
Storage: AsRef<[u8]> + AsMut<[u8]>,
{
#[inline]
pub fn get_bit(&self, index: usize) -> bool {
debug_assert!(index / 8 < self.storage.as_ref().len());
let byte_index = index / 8;
let byte = self.storage.as_ref()[byte_index];
let bit_index = if cfg!(target_endian = "big") {
7 - (index % 8)
} else {
index % 8
};
let mask = 1 << bit_index;
byte & mask == mask
}
#[inline]
pub fn set_bit(&mut self, index: usize, val: bool) {
debug_assert!(index / 8 < self.storage.as_ref().len());
let byte_index = index / 8;
let byte = &mut self.storage.as_mut()[byte_index];
let bit_index = if cfg!(target_endian = "big") {
7 - (index % 8)
} else {
index % 8
};
let mask = 1 << bit_index;
if val {
*byte |= mask;
} else {
*byte &= !mask;
}
}
#[inline]
pub fn get(&self, bit_offset: usize, bit_width: u8) -> u64 {
debug_assert!(bit_width <= 64);
debug_assert!(bit_offset / 8 < self.storage.as_ref().len());
debug_assert!((bit_offset + (bit_width as usize)) / 8 <= self.storage.as_ref().len());
let mut val = 0;
for i in 0..(bit_width as usize) {
if self.get_bit(i + bit_offset) {
let index = if cfg!(target_endian = "big") {
bit_width as usize - 1 - i
} else {
i
};
val |= 1 << index;
}
}
val
}
#[inline]
pub fn set(&mut self, bit_offset: usize, bit_width: u8, val: u64) {
debug_assert!(bit_width <= 64);
debug_assert!(bit_offset / 8 < self.storage.as_ref().len());
debug_assert!((bit_offset + (bit_width as usize)) / 8 <= self.storage.as_ref().len());
for i in 0..(bit_width as usize) {
let mask = 1 << i;
let val_bit_is_set = val & mask == mask;
let index = if cfg!(target_endian = "big") {
bit_width as usize - 1 - i
} else {
i
};
self.set_bit(index + bit_offset, val_bit_is_set);
}
}
}
#[repr(C)]
#[derive(Debug, Default, Copy, Clone)]
pub struct WithBitfield {
pub _bitfield_1: __BindgenBitfieldUnit<[u8; 0usize], u8>,
pub __bindgen_padding_0: u32,
pub a: ::std::os::raw::c_uint,
}
impl WithBitfield {
#[inline]
pub fn new_bitfield_1() -> __BindgenBitfieldUnit<[u8; 0usize], u8> {
let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 0usize], u8> =
Default::default();
__bindgen_bitfield_unit
}
}
#[repr(C, packed)]
#[derive(Debug, Default, Copy, Clone)]
pub struct WithBitfieldAndAttrPacked {
pub _bitfield_1: __BindgenBitfieldUnit<[u8; 0usize], u8>,
pub a: ::std::os::raw::c_uint,
pub __bindgen_padding_0: u8,
}
impl WithBitfieldAndAttrPacked {
#[inline]
pub fn new_bitfield_1() -> __BindgenBitfieldUnit<[u8; 0usize], u8> {
let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 0usize], u8> =
Default::default();
__bindgen_bitfield_unit
}
}
#[repr(C, packed)]
#[derive(Debug, Default, Copy, Clone)]
pub struct WithBitfieldAndPacked {
pub _bitfield_1: __BindgenBitfieldUnit<[u8; 0usize], u8>,
pub a: ::std::os::raw::c_uint,
pub __bindgen_padding_0: u8,
}
impl WithBitfieldAndPacked {
#[inline]
pub fn new_bitfield_1() -> __BindgenBitfieldUnit<[u8; 0usize], u8> {
let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 0usize], u8> =
Default::default();
__bindgen_bitfield_unit
}
}