1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
|
/* SPDX-License-Identifier: LGPL-2.1-or-later */
#pragma once
#if !SD_BOOT
# include <assert.h>
#endif
#include <limits.h>
#include <stdalign.h>
#include <stdbool.h>
#include <stddef.h>
#include <stdint.h>
/* Temporarily disable some warnings */
#define DISABLE_WARNING_DEPRECATED_DECLARATIONS \
_Pragma("GCC diagnostic push"); \
_Pragma("GCC diagnostic ignored \"-Wdeprecated-declarations\"")
#define DISABLE_WARNING_FORMAT_NONLITERAL \
_Pragma("GCC diagnostic push"); \
_Pragma("GCC diagnostic ignored \"-Wformat-nonliteral\"")
#define DISABLE_WARNING_MISSING_PROTOTYPES \
_Pragma("GCC diagnostic push"); \
_Pragma("GCC diagnostic ignored \"-Wmissing-prototypes\"")
#define DISABLE_WARNING_NONNULL \
_Pragma("GCC diagnostic push"); \
_Pragma("GCC diagnostic ignored \"-Wnonnull\"")
#define DISABLE_WARNING_SHADOW \
_Pragma("GCC diagnostic push"); \
_Pragma("GCC diagnostic ignored \"-Wshadow\"")
#define DISABLE_WARNING_STRINGOP_OVERREAD \
_Pragma("GCC diagnostic push"); \
_Pragma("GCC diagnostic ignored \"-Wstringop-overread\"")
#define DISABLE_WARNING_INCOMPATIBLE_POINTER_TYPES \
_Pragma("GCC diagnostic push"); \
_Pragma("GCC diagnostic ignored \"-Wincompatible-pointer-types\"")
#define DISABLE_WARNING_TYPE_LIMITS \
_Pragma("GCC diagnostic push"); \
_Pragma("GCC diagnostic ignored \"-Wtype-limits\"")
#define DISABLE_WARNING_ADDRESS \
_Pragma("GCC diagnostic push"); \
_Pragma("GCC diagnostic ignored \"-Waddress\"")
#define REENABLE_WARNING \
_Pragma("GCC diagnostic pop")
#define _align_(x) __attribute__((__aligned__(x)))
#define _alignas_(x) __attribute__((__aligned__(alignof(x))))
#define _alignptr_ __attribute__((__aligned__(sizeof(void *))))
#define _cleanup_(x) __attribute__((__cleanup__(x)))
#define _const_ __attribute__((__const__))
#define _deprecated_ __attribute__((__deprecated__))
#define _destructor_ __attribute__((__destructor__))
#define _hidden_ __attribute__((__visibility__("hidden")))
#define _likely_(x) (__builtin_expect(!!(x), 1))
#define _malloc_ __attribute__((__malloc__))
#define _noinline_ __attribute__((noinline))
#define _noreturn_ _Noreturn
#define _packed_ __attribute__((__packed__))
#define _printf_(a, b) __attribute__((__format__(printf, a, b)))
#define _public_ __attribute__((__visibility__("default")))
#define _pure_ __attribute__((__pure__))
#define _retain_ __attribute__((__retain__))
#define _returns_nonnull_ __attribute__((__returns_nonnull__))
#define _section_(x) __attribute__((__section__(x)))
#define _sentinel_ __attribute__((__sentinel__))
#define _unlikely_(x) (__builtin_expect(!!(x), 0))
#define _unused_ __attribute__((__unused__))
#define _used_ __attribute__((__used__))
#define _warn_unused_result_ __attribute__((__warn_unused_result__))
#define _weak_ __attribute__((__weak__))
#define _weakref_(x) __attribute__((__weakref__(#x)))
#ifdef __clang__
# define _alloc_(...)
#else
# define _alloc_(...) __attribute__((__alloc_size__(__VA_ARGS__)))
#endif
#if __GNUC__ >= 7 || (defined(__clang__) && __clang_major__ >= 10)
# define _fallthrough_ __attribute__((__fallthrough__))
#else
# define _fallthrough_
#endif
#define XSTRINGIFY(x) #x
#define STRINGIFY(x) XSTRINGIFY(x)
#ifndef __COVERITY__
# define VOID_0 ((void)0)
#else
# define VOID_0 ((void*)0)
#endif
#define ELEMENTSOF(x) \
(__builtin_choose_expr( \
!__builtin_types_compatible_p(typeof(x), typeof(&*(x))), \
sizeof(x)/sizeof((x)[0]), \
VOID_0))
#define XCONCATENATE(x, y) x ## y
#define CONCATENATE(x, y) XCONCATENATE(x, y)
#if SD_BOOT
_noreturn_ void efi_assert(const char *expr, const char *file, unsigned line, const char *function);
#ifdef NDEBUG
#define assert(expr) ({ if (!(expr)) __builtin_unreachable(); })
#define assert_not_reached() __builtin_unreachable()
#else
#define assert(expr) ({ _likely_(expr) ? VOID_0 : efi_assert(#expr, __FILE__, __LINE__, __func__); })
#define assert_not_reached() efi_assert("Code should not be reached", __FILE__, __LINE__, __func__)
#endif
#define static_assert _Static_assert
#define assert_se(expr) ({ _likely_(expr) ? VOID_0 : efi_assert(#expr, __FILE__, __LINE__, __func__); })
#endif
/* This passes the argument through after (if asserts are enabled) checking that it is not null. */
#define ASSERT_PTR(expr) _ASSERT_PTR(expr, UNIQ_T(_expr_, UNIQ), assert)
#define ASSERT_SE_PTR(expr) _ASSERT_PTR(expr, UNIQ_T(_expr_, UNIQ), assert_se)
#define _ASSERT_PTR(expr, var, check) \
({ \
typeof(expr) var = (expr); \
check(var); \
var; \
})
#define ASSERT_NONNEG(expr) \
({ \
typeof(expr) _expr_ = (expr), _zero = 0; \
assert(_expr_ >= _zero); \
_expr_; \
})
#define ASSERT_SE_NONNEG(expr) \
({ \
typeof(expr) _expr_ = (expr), _zero = 0; \
assert_se(_expr_ >= _zero); \
_expr_; \
})
#define assert_cc(expr) static_assert(expr, #expr)
#define UNIQ_T(x, uniq) CONCATENATE(__unique_prefix_, CONCATENATE(x, uniq))
#define UNIQ __COUNTER__
/* Note that this works differently from pthread_once(): this macro does
* not synchronize code execution, i.e. code that is run conditionalized
* on this macro will run concurrently to all other code conditionalized
* the same way, there's no ordering or completion enforced. */
#define ONCE __ONCE(UNIQ_T(_once_, UNIQ))
#define __ONCE(o) \
({ \
static bool (o) = false; \
__atomic_exchange_n(&(o), true, __ATOMIC_SEQ_CST); \
})
#define U64_KB UINT64_C(1024)
#define U64_MB (UINT64_C(1024) * U64_KB)
#define U64_GB (UINT64_C(1024) * U64_MB)
#undef MAX
#define MAX(a, b) __MAX(UNIQ, (a), UNIQ, (b))
#define __MAX(aq, a, bq, b) \
({ \
const typeof(a) UNIQ_T(A, aq) = (a); \
const typeof(b) UNIQ_T(B, bq) = (b); \
UNIQ_T(A, aq) > UNIQ_T(B, bq) ? UNIQ_T(A, aq) : UNIQ_T(B, bq); \
})
#define IS_UNSIGNED_INTEGER_TYPE(type) \
(__builtin_types_compatible_p(typeof(type), unsigned char) || \
__builtin_types_compatible_p(typeof(type), unsigned short) || \
__builtin_types_compatible_p(typeof(type), unsigned) || \
__builtin_types_compatible_p(typeof(type), unsigned long) || \
__builtin_types_compatible_p(typeof(type), unsigned long long))
#define IS_SIGNED_INTEGER_TYPE(type) \
(__builtin_types_compatible_p(typeof(type), signed char) || \
__builtin_types_compatible_p(typeof(type), signed short) || \
__builtin_types_compatible_p(typeof(type), signed) || \
__builtin_types_compatible_p(typeof(type), signed long) || \
__builtin_types_compatible_p(typeof(type), signed long long))
/* Evaluates to (void) if _A or _B are not constant or of different types (being integers of different sizes
* is also OK as long as the signedness matches) */
#define CONST_MAX(_A, _B) \
(__builtin_choose_expr( \
__builtin_constant_p(_A) && \
__builtin_constant_p(_B) && \
(__builtin_types_compatible_p(typeof(_A), typeof(_B)) || \
(IS_UNSIGNED_INTEGER_TYPE(_A) && IS_UNSIGNED_INTEGER_TYPE(_B)) || \
(IS_SIGNED_INTEGER_TYPE(_A) && IS_SIGNED_INTEGER_TYPE(_B))), \
((_A) > (_B)) ? (_A) : (_B), \
VOID_0))
/* takes two types and returns the size of the larger one */
#define MAXSIZE(A, B) (sizeof(union _packed_ { typeof(A) a; typeof(B) b; }))
#define MAX3(x, y, z) \
({ \
const typeof(x) _c = MAX(x, y); \
MAX(_c, z); \
})
#define MAX4(x, y, z, a) \
({ \
const typeof(x) _d = MAX3(x, y, z); \
MAX(_d, a); \
})
#undef MIN
#define MIN(a, b) __MIN(UNIQ, (a), UNIQ, (b))
#define __MIN(aq, a, bq, b) \
({ \
const typeof(a) UNIQ_T(A, aq) = (a); \
const typeof(b) UNIQ_T(B, bq) = (b); \
UNIQ_T(A, aq) < UNIQ_T(B, bq) ? UNIQ_T(A, aq) : UNIQ_T(B, bq); \
})
/* evaluates to (void) if _A or _B are not constant or of different types */
#define CONST_MIN(_A, _B) \
(__builtin_choose_expr( \
__builtin_constant_p(_A) && \
__builtin_constant_p(_B) && \
__builtin_types_compatible_p(typeof(_A), typeof(_B)), \
((_A) < (_B)) ? (_A) : (_B), \
VOID_0))
#define MIN3(x, y, z) \
({ \
const typeof(x) _c = MIN(x, y); \
MIN(_c, z); \
})
/* Returns true if the passed integer is a positive power of two */
#define CONST_ISPOWEROF2(x) \
((x) > 0 && ((x) & ((x) - 1)) == 0)
#define ISPOWEROF2(x) \
__builtin_choose_expr( \
__builtin_constant_p(x), \
CONST_ISPOWEROF2(x), \
({ \
const typeof(x) _x = (x); \
CONST_ISPOWEROF2(_x); \
}))
#define ADD_SAFE(ret, a, b) (!__builtin_add_overflow(a, b, ret))
#define INC_SAFE(a, b) __INC_SAFE(UNIQ, a, b)
#define __INC_SAFE(q, a, b) \
({ \
const typeof(a) UNIQ_T(A, q) = (a); \
ADD_SAFE(UNIQ_T(A, q), *UNIQ_T(A, q), b); \
})
#define SUB_SAFE(ret, a, b) (!__builtin_sub_overflow(a, b, ret))
#define DEC_SAFE(a, b) __DEC_SAFE(UNIQ, a, b)
#define __DEC_SAFE(q, a, b) \
({ \
const typeof(a) UNIQ_T(A, q) = (a); \
SUB_SAFE(UNIQ_T(A, q), *UNIQ_T(A, q), b); \
})
#define MUL_SAFE(ret, a, b) (!__builtin_mul_overflow(a, b, ret))
#define MUL_ASSIGN_SAFE(a, b) __MUL_ASSIGN_SAFE(UNIQ, a, b)
#define __MUL_ASSIGN_SAFE(q, a, b) \
({ \
const typeof(a) UNIQ_T(A, q) = (a); \
MUL_SAFE(UNIQ_T(A, q), *UNIQ_T(A, q), b); \
})
#define LESS_BY(a, b) __LESS_BY(UNIQ, (a), UNIQ, (b))
#define __LESS_BY(aq, a, bq, b) \
({ \
const typeof(a) UNIQ_T(A, aq) = (a); \
const typeof(b) UNIQ_T(B, bq) = (b); \
UNIQ_T(A, aq) > UNIQ_T(B, bq) ? UNIQ_T(A, aq) - UNIQ_T(B, bq) : 0; \
})
#define CMP(a, b) __CMP(UNIQ, (a), UNIQ, (b))
#define __CMP(aq, a, bq, b) \
({ \
const typeof(a) UNIQ_T(A, aq) = (a); \
const typeof(b) UNIQ_T(B, bq) = (b); \
UNIQ_T(A, aq) < UNIQ_T(B, bq) ? -1 : \
UNIQ_T(A, aq) > UNIQ_T(B, bq) ? 1 : 0; \
})
#undef CLAMP
#define CLAMP(x, low, high) __CLAMP(UNIQ, (x), UNIQ, (low), UNIQ, (high))
#define __CLAMP(xq, x, lowq, low, highq, high) \
({ \
const typeof(x) UNIQ_T(X, xq) = (x); \
const typeof(low) UNIQ_T(LOW, lowq) = (low); \
const typeof(high) UNIQ_T(HIGH, highq) = (high); \
UNIQ_T(X, xq) > UNIQ_T(HIGH, highq) ? \
UNIQ_T(HIGH, highq) : \
UNIQ_T(X, xq) < UNIQ_T(LOW, lowq) ? \
UNIQ_T(LOW, lowq) : \
UNIQ_T(X, xq); \
})
/* [(x + y - 1) / y] suffers from an integer overflow, even though the
* computation should be possible in the given type. Therefore, we use
* [x / y + !!(x % y)]. Note that on "Real CPUs" a division returns both the
* quotient and the remainder, so both should be equally fast. */
#define DIV_ROUND_UP(x, y) __DIV_ROUND_UP(UNIQ, (x), UNIQ, (y))
#define __DIV_ROUND_UP(xq, x, yq, y) \
({ \
const typeof(x) UNIQ_T(X, xq) = (x); \
const typeof(y) UNIQ_T(Y, yq) = (y); \
(UNIQ_T(X, xq) / UNIQ_T(Y, yq) + !!(UNIQ_T(X, xq) % UNIQ_T(Y, yq))); \
})
/* Rounds up x to the next multiple of y. Resolves to typeof(x) -1 in case of overflow */
#define __ROUND_UP(q, x, y) \
({ \
const typeof(y) UNIQ_T(A, q) = (y); \
const typeof(x) UNIQ_T(B, q) = DIV_ROUND_UP((x), UNIQ_T(A, q)); \
typeof(x) UNIQ_T(C, q); \
MUL_SAFE(&UNIQ_T(C, q), UNIQ_T(B, q), UNIQ_T(A, q)) ? UNIQ_T(C, q) : (typeof(x)) -1; \
})
#define ROUND_UP(x, y) __ROUND_UP(UNIQ, (x), (y))
#define CASE_F_1(X) case X:
#define CASE_F_2(X, ...) case X: CASE_F_1( __VA_ARGS__)
#define CASE_F_3(X, ...) case X: CASE_F_2( __VA_ARGS__)
#define CASE_F_4(X, ...) case X: CASE_F_3( __VA_ARGS__)
#define CASE_F_5(X, ...) case X: CASE_F_4( __VA_ARGS__)
#define CASE_F_6(X, ...) case X: CASE_F_5( __VA_ARGS__)
#define CASE_F_7(X, ...) case X: CASE_F_6( __VA_ARGS__)
#define CASE_F_8(X, ...) case X: CASE_F_7( __VA_ARGS__)
#define CASE_F_9(X, ...) case X: CASE_F_8( __VA_ARGS__)
#define CASE_F_10(X, ...) case X: CASE_F_9( __VA_ARGS__)
#define CASE_F_11(X, ...) case X: CASE_F_10( __VA_ARGS__)
#define CASE_F_12(X, ...) case X: CASE_F_11( __VA_ARGS__)
#define CASE_F_13(X, ...) case X: CASE_F_12( __VA_ARGS__)
#define CASE_F_14(X, ...) case X: CASE_F_13( __VA_ARGS__)
#define CASE_F_15(X, ...) case X: CASE_F_14( __VA_ARGS__)
#define CASE_F_16(X, ...) case X: CASE_F_15( __VA_ARGS__)
#define CASE_F_17(X, ...) case X: CASE_F_16( __VA_ARGS__)
#define CASE_F_18(X, ...) case X: CASE_F_17( __VA_ARGS__)
#define CASE_F_19(X, ...) case X: CASE_F_18( __VA_ARGS__)
#define CASE_F_20(X, ...) case X: CASE_F_19( __VA_ARGS__)
#define GET_CASE_F(_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14,_15,_16,_17,_18,_19,_20,NAME,...) NAME
#define FOR_EACH_MAKE_CASE(...) \
GET_CASE_F(__VA_ARGS__,CASE_F_20,CASE_F_19,CASE_F_18,CASE_F_17,CASE_F_16,CASE_F_15,CASE_F_14,CASE_F_13,CASE_F_12,CASE_F_11, \
CASE_F_10,CASE_F_9,CASE_F_8,CASE_F_7,CASE_F_6,CASE_F_5,CASE_F_4,CASE_F_3,CASE_F_2,CASE_F_1) \
(__VA_ARGS__)
#define IN_SET(x, first, ...) \
({ \
bool _found = false; \
/* If the build breaks in the line below, you need to extend the case macros. We use typeof(+x) \
* here to widen the type of x if it is a bit-field as this would otherwise be illegal. */ \
static const typeof(+x) __assert_in_set[] _unused_ = { first, __VA_ARGS__ }; \
assert_cc(ELEMENTSOF(__assert_in_set) <= 20); \
switch (x) { \
FOR_EACH_MAKE_CASE(first, __VA_ARGS__) \
_found = true; \
break; \
default: \
break; \
} \
_found; \
})
/* Takes inspiration from Rust's Option::take() method: reads and returns a pointer, but at the same time
* resets it to NULL. See: https://doc.rust-lang.org/std/option/enum.Option.html#method.take */
#define TAKE_GENERIC(var, type, nullvalue) \
({ \
type *_pvar_ = &(var); \
type _var_ = *_pvar_; \
type _nullvalue_ = nullvalue; \
*_pvar_ = _nullvalue_; \
_var_; \
})
#define TAKE_PTR_TYPE(ptr, type) TAKE_GENERIC(ptr, type, NULL)
#define TAKE_PTR(ptr) TAKE_PTR_TYPE(ptr, typeof(ptr))
#define TAKE_STRUCT_TYPE(s, type) TAKE_GENERIC(s, type, {})
#define TAKE_STRUCT(s) TAKE_STRUCT_TYPE(s, typeof(s))
/*
* STRLEN - return the length of a string literal, minus the trailing NUL byte.
* Contrary to strlen(), this is a constant expression.
* @x: a string literal.
*/
#define STRLEN(x) (sizeof(""x"") - sizeof(typeof(x[0])))
#define mfree(memory) \
({ \
free(memory); \
(typeof(memory)) NULL; \
})
static inline size_t ALIGN_TO(size_t l, size_t ali) {
assert(ISPOWEROF2(ali));
if (l > SIZE_MAX - (ali - 1))
return SIZE_MAX; /* indicate overflow */
return ((l + (ali - 1)) & ~(ali - 1));
}
static inline uint64_t ALIGN_TO_U64(uint64_t l, uint64_t ali) {
assert(ISPOWEROF2(ali));
if (l > UINT64_MAX - (ali - 1))
return UINT64_MAX; /* indicate overflow */
return ((l + (ali - 1)) & ~(ali - 1));
}
static inline size_t ALIGN_DOWN(size_t l, size_t ali) {
assert(ISPOWEROF2(ali));
return l & ~(ali - 1);
}
static inline uint64_t ALIGN_DOWN_U64(uint64_t l, uint64_t ali) {
assert(ISPOWEROF2(ali));
return l & ~(ali - 1);
}
static inline size_t ALIGN_OFFSET(size_t l, size_t ali) {
assert(ISPOWEROF2(ali));
return l & (ali - 1);
}
static inline uint64_t ALIGN_OFFSET_U64(uint64_t l, uint64_t ali) {
assert(ISPOWEROF2(ali));
return l & (ali - 1);
}
#define ALIGN2(l) ALIGN_TO(l, 2)
#define ALIGN4(l) ALIGN_TO(l, 4)
#define ALIGN8(l) ALIGN_TO(l, 8)
#define ALIGN2_PTR(p) ((void*) ALIGN2((uintptr_t) p))
#define ALIGN4_PTR(p) ((void*) ALIGN4((uintptr_t) p))
#define ALIGN8_PTR(p) ((void*) ALIGN8((uintptr_t) p))
#define ALIGN(l) ALIGN_TO(l, sizeof(void*))
#define ALIGN_PTR(p) ((void*) ALIGN((uintptr_t) (p)))
/* Checks if the specified pointer is aligned as appropriate for the specific type */
#define IS_ALIGNED16(p) (((uintptr_t) p) % alignof(uint16_t) == 0)
#define IS_ALIGNED32(p) (((uintptr_t) p) % alignof(uint32_t) == 0)
#define IS_ALIGNED64(p) (((uintptr_t) p) % alignof(uint64_t) == 0)
/* Same as ALIGN_TO but callable in constant contexts. */
#define CONST_ALIGN_TO(l, ali) \
__builtin_choose_expr( \
__builtin_constant_p(l) && \
__builtin_constant_p(ali) && \
CONST_ISPOWEROF2(ali) && \
(l <= SIZE_MAX - (ali - 1)), /* overflow? */ \
((l) + (ali) - 1) & ~((ali) - 1), \
VOID_0)
/* Similar to ((t *) (void *) (p)) to cast a pointer. The macro asserts that the pointer has a suitable
* alignment for type "t". This exists for places where otherwise "-Wcast-align=strict" would issue a
* warning or if you want to assert that the cast gives a pointer of suitable alignment. */
#define CAST_ALIGN_PTR(t, p) \
({ \
const void *_p = (p); \
assert(((uintptr_t) _p) % alignof(t) == 0); \
(t *) _p; \
})
#define UPDATE_FLAG(orig, flag, b) \
((b) ? ((orig) | (flag)) : ((orig) & ~(flag)))
#define SET_FLAG(v, flag, b) \
(v) = UPDATE_FLAG(v, flag, b)
#define FLAGS_SET(v, flags) \
((~(v) & (flags)) == 0)
/* A wrapper for 'func' to return void.
* Only useful when a void-returning function is required by some API. */
#define DEFINE_TRIVIAL_DESTRUCTOR(name, type, func) \
static inline void name(type *p) { \
func(p); \
}
/* When func() returns the void value (NULL, -1, …) of the appropriate type */
#define DEFINE_TRIVIAL_CLEANUP_FUNC(type, func) \
static inline void func##p(type *p) { \
if (*p) \
*p = func(*p); \
}
/* When func() doesn't return the appropriate type, set variable to empty afterwards.
* The func() may be provided by a dynamically loaded shared library, hence add an assertion. */
#define DEFINE_TRIVIAL_CLEANUP_FUNC_FULL(type, func, empty) \
static inline void func##p(type *p) { \
if (*p != (empty)) { \
DISABLE_WARNING_ADDRESS; \
assert(func); \
REENABLE_WARNING; \
func(*p); \
*p = (empty); \
} \
}
/* When func() doesn't return the appropriate type, and is also a macro, set variable to empty afterwards. */
#define DEFINE_TRIVIAL_CLEANUP_FUNC_FULL_MACRO(type, func, empty) \
static inline void func##p(type *p) { \
if (*p != (empty)) { \
func(*p); \
*p = (empty); \
} \
}
/* Restriction/bug (see below) was fixed in GCC 15 and clang 19. */
#if __GNUC__ >= 15 || (defined(__clang__) && __clang_major__ >= 19)
#define DECLARE_FLEX_ARRAY(type, name) type name[]
#else
/* Declare a flexible array usable in a union.
* This is essentially a work-around for a pointless constraint in C99
* and might go away in some future version of the standard.
*
* See https://git.kernel.org/cgit/linux/kernel/git/torvalds/linux.git/commit/?id=3080ea5553cc909b000d1f1d964a9041962f2c5b
*/
#define DECLARE_FLEX_ARRAY(type, name) \
struct { \
dummy_t __empty__ ## name; \
type name[]; \
}
#endif
/* Declares an ELF read-only string section that does not occupy memory at runtime. */
#define DECLARE_NOALLOC_SECTION(name, text) \
asm(".pushsection " name ",\"S\"\n\t" \
".ascii " STRINGIFY(text) "\n\t" \
".popsection\n")
#ifdef SBAT_DISTRO
#define DECLARE_SBAT(text) DECLARE_NOALLOC_SECTION(".sbat", text)
#else
#define DECLARE_SBAT(text)
#endif
#define sizeof_field(struct_type, member) sizeof(((struct_type *) 0)->member)
#define endoffsetof_field(struct_type, member) (offsetof(struct_type, member) + sizeof_field(struct_type, member))
#define voffsetof(v, member) offsetof(typeof(v), member)
#define _FOREACH_ARRAY(i, array, num, m, end) \
for (typeof(array[0]) *i = (array), *end = ({ \
typeof(num) m = (num); \
(i && m > 0) ? i + m : NULL; \
}); end && i < end; i++)
#define FOREACH_ARRAY(i, array, num) \
_FOREACH_ARRAY(i, array, num, UNIQ_T(m, UNIQ), UNIQ_T(end, UNIQ))
#define FOREACH_ELEMENT(i, array) \
FOREACH_ARRAY(i, array, ELEMENTSOF(array))
#define PTR_TO_SIZE(p) ((size_t) ((uintptr_t) (p)))
#define SIZE_TO_PTR(u) ((void *) ((uintptr_t) (u)))
|