1 #ifndef _IPXE_PSEUDOBIT_H 2 #define _IPXE_PSEUDOBIT_H 43 #ifdef PSEUDOBIT_LITTLE_ENDIAN 44 #define cpu_to_BIT64 cpu_to_le64 45 #define cpu_to_BIT32 cpu_to_le32 46 #define BIT64_to_cpu le64_to_cpu 47 #define BIT32_to_cpu le32_to_cpu 48 #define QWORD_SHIFT( offset, width ) (offset) 50 #ifdef PSEUDOBIT_BIG_ENDIAN 51 #define cpu_to_BIT64 cpu_to_be64 52 #define cpu_to_BIT32 cpu_to_be32 53 #define BIT64_to_cpu be64_to_cpu 54 #define BIT32_to_cpu be32_to_cpu 55 #define QWORD_SHIFT( offset, width ) ( 64 - (offset) - (width) ) 70 #define PSEUDO_BIT_STRUCT( _structure ) \ 72 uint8_t bytes[ sizeof ( _structure ) / 8 ]; \ 73 uint32_t dwords[ sizeof ( _structure ) / 32 ]; \ 74 uint64_t qwords[ sizeof ( _structure ) / 64 ]; \ 75 _structure *dummy[0]; \ 76 } __attribute__ (( packed )) u 79 #define PSEUDO_BIT_STRUCT_TYPE( _ptr ) \ 80 typeof ( *((_ptr)->u.dummy[0]) ) 83 #define BIT_OFFSET( _ptr, _field ) \ 84 offsetof ( PSEUDO_BIT_STRUCT_TYPE ( _ptr ), _field ) 87 #define BIT_WIDTH( _ptr, _field ) \ 88 sizeof ( ( ( PSEUDO_BIT_STRUCT_TYPE ( _ptr ) * ) NULL )->_field ) 91 #define QWORD_OFFSET( _ptr, _field ) \ 92 ( BIT_OFFSET ( _ptr, _field ) / 64 ) 95 #define QWORD_BIT_OFFSET( _ptr, _index, _field ) \ 96 ( BIT_OFFSET ( _ptr, _field ) - ( 64 * (_index) ) ) 99 #define QWORD_BIT_SHIFT( _ptr, _index, _field ) \ 100 QWORD_SHIFT ( QWORD_BIT_OFFSET ( _ptr, _index, _field ), \ 101 BIT_WIDTH ( _ptr, _field ) ) 104 #define BIT_MASK( _ptr, _field ) \ 105 ( ( ~( ( uint64_t ) 0 ) ) >> \ 106 ( 64 - BIT_WIDTH ( _ptr, _field ) ) ) 113 #define BIT_ASSEMBLE_1( _ptr, _index, _field, _value ) \ 114 ( ( ( uint64_t) (_value) ) << \ 115 QWORD_BIT_SHIFT ( _ptr, _index, _field ) ) 117 #define BIT_ASSEMBLE_2( _ptr, _index, _field, _value, ... ) \ 118 ( BIT_ASSEMBLE_1 ( _ptr, _index, _field, _value ) | \ 119 BIT_ASSEMBLE_1 ( _ptr, _index, __VA_ARGS__ ) ) 121 #define BIT_ASSEMBLE_3( _ptr, _index, _field, _value, ... ) \ 122 ( BIT_ASSEMBLE_1 ( _ptr, _index, _field, _value ) | \ 123 BIT_ASSEMBLE_2 ( _ptr, _index, __VA_ARGS__ ) ) 125 #define BIT_ASSEMBLE_4( _ptr, _index, _field, _value, ... ) \ 126 ( BIT_ASSEMBLE_1 ( _ptr, _index, _field, _value ) | \ 127 BIT_ASSEMBLE_3 ( _ptr, _index, __VA_ARGS__ ) ) 129 #define BIT_ASSEMBLE_5( _ptr, _index, _field, _value, ... ) \ 130 ( BIT_ASSEMBLE_1 ( _ptr, _index, _field, _value ) | \ 131 BIT_ASSEMBLE_4 ( _ptr, _index, __VA_ARGS__ ) ) 133 #define BIT_ASSEMBLE_6( _ptr, _index, _field, _value, ... ) \ 134 ( BIT_ASSEMBLE_1 ( _ptr, _index, _field, _value ) | \ 135 BIT_ASSEMBLE_5 ( _ptr, _index, __VA_ARGS__ ) ) 137 #define BIT_ASSEMBLE_7( _ptr, _index, _field, _value, ... ) \ 138 ( BIT_ASSEMBLE_1 ( _ptr, _index, _field, _value ) | \ 139 BIT_ASSEMBLE_6 ( _ptr, _index, __VA_ARGS__ ) ) 146 #define BIT_MASK_1( _ptr, _index, _field ) \ 147 ( BIT_MASK ( _ptr, _field ) << \ 148 QWORD_BIT_SHIFT ( _ptr, _index, _field ) ) 150 #define BIT_MASK_2( _ptr, _index, _field, ... ) \ 151 ( BIT_MASK_1 ( _ptr, _index, _field ) | \ 152 BIT_MASK_1 ( _ptr, _index, __VA_ARGS__ ) ) 154 #define BIT_MASK_3( _ptr, _index, _field, ... ) \ 155 ( BIT_MASK_1 ( _ptr, _index, _field ) | \ 156 BIT_MASK_2 ( _ptr, _index, __VA_ARGS__ ) ) 158 #define BIT_MASK_4( _ptr, _index, _field, ... ) \ 159 ( BIT_MASK_1 ( _ptr, _index, _field ) | \ 160 BIT_MASK_3 ( _ptr, _index, __VA_ARGS__ ) ) 162 #define BIT_MASK_5( _ptr, _index, _field, ... ) \ 163 ( BIT_MASK_1 ( _ptr, _index, _field ) | \ 164 BIT_MASK_4 ( _ptr, _index, __VA_ARGS__ ) ) 166 #define BIT_MASK_6( _ptr, _index, _field, ... ) \ 167 ( BIT_MASK_1 ( _ptr, _index, _field ) | \ 168 BIT_MASK_5 ( _ptr, _index, __VA_ARGS__ ) ) 170 #define BIT_MASK_7( _ptr, _index, _field, ... ) \ 171 ( BIT_MASK_1 ( _ptr, _index, _field ) | \ 172 BIT_MASK_6 ( _ptr, _index, __VA_ARGS__ ) ) 179 #define BIT_FILL( _ptr, _index, _assembled ) do { \ 180 uint64_t *__ptr = &(_ptr)->u.qwords[(_index)]; \ 181 uint64_t __assembled = (_assembled); \ 182 *__ptr = cpu_to_BIT64 ( __assembled ); \ 185 #define BIT_FILL_1( _ptr, _field1, ... ) \ 186 BIT_FILL ( _ptr, QWORD_OFFSET ( _ptr, _field1 ), \ 187 BIT_ASSEMBLE_1 ( _ptr, QWORD_OFFSET ( _ptr, _field1 ), \ 188 _field1, __VA_ARGS__ ) ) 190 #define BIT_FILL_2( _ptr, _field1, ... ) \ 191 BIT_FILL ( _ptr, QWORD_OFFSET ( _ptr, _field1 ), \ 192 BIT_ASSEMBLE_2 ( _ptr, QWORD_OFFSET ( _ptr, _field1 ), \ 193 _field1, __VA_ARGS__ ) ) 195 #define BIT_FILL_3( _ptr, _field1, ... ) \ 196 BIT_FILL ( _ptr, QWORD_OFFSET ( _ptr, _field1 ), \ 197 BIT_ASSEMBLE_3 ( _ptr, QWORD_OFFSET ( _ptr, _field1 ), \ 198 _field1, __VA_ARGS__ ) ) 200 #define BIT_FILL_4( _ptr, _field1, ... ) \ 201 BIT_FILL ( _ptr, QWORD_OFFSET ( _ptr, _field1 ), \ 202 BIT_ASSEMBLE_4 ( _ptr, QWORD_OFFSET ( _ptr, _field1 ), \ 203 _field1, __VA_ARGS__ ) ) 205 #define BIT_FILL_5( _ptr, _field1, ... ) \ 206 BIT_FILL ( _ptr, QWORD_OFFSET ( _ptr, _field1 ), \ 207 BIT_ASSEMBLE_5 ( _ptr, QWORD_OFFSET ( _ptr, _field1 ), \ 208 _field1, __VA_ARGS__ ) ) 210 #define BIT_FILL_6( _ptr, _field1, ... ) \ 211 BIT_FILL ( _ptr, QWORD_OFFSET ( _ptr, _field1 ), \ 212 BIT_ASSEMBLE_6 ( _ptr, QWORD_OFFSET ( _ptr, _field1 ), \ 213 _field1, __VA_ARGS__ ) ) 215 #define BIT_QWORD_PTR( _ptr, _field ) \ 217 unsigned int __index = QWORD_OFFSET ( _ptr, _field ); \ 218 uint64_t *__ptr = &(_ptr)->u.qwords[__index]; \ 223 #define BIT_GET64( _ptr, _field ) \ 225 unsigned int __index = QWORD_OFFSET ( _ptr, _field ); \ 226 uint64_t *__ptr = &(_ptr)->u.qwords[__index]; \ 227 uint64_t __value = BIT64_to_cpu ( *__ptr ); \ 229 QWORD_BIT_SHIFT ( _ptr, __index, _field ); \ 230 __value &= BIT_MASK ( _ptr, _field ); \ 235 #define BIT_GET( _ptr, _field ) \ 236 ( ( unsigned long ) BIT_GET64 ( _ptr, _field ) ) 238 #define BIT_SET( _ptr, _field, _value ) do { \ 239 unsigned int __index = QWORD_OFFSET ( _ptr, _field ); \ 240 uint64_t *__ptr = &(_ptr)->u.qwords[__index]; \ 241 unsigned int __shift = \ 242 QWORD_BIT_SHIFT ( _ptr, __index, _field ); \ 243 uint64_t __value = (_value); \ 244 *__ptr &= cpu_to_BIT64 ( ~( BIT_MASK ( _ptr, _field ) << \ 246 *__ptr |= cpu_to_BIT64 ( __value << __shift ); \
FILE_LICENCE(GPL2_OR_LATER_OR_UBDL)
unsigned char pseudo_bit_t
Datatype used to represent a bit in the pseudo-structures.