blob: bd763cc6141cd1b8670affdde006442d3a35ea0c [file] [log] [blame]
Soby Mathewb4c6df42022-11-09 11:13:29 +00001/*
2 * SPDX-License-Identifier: BSD-3-Clause
3 * SPDX-FileCopyrightText: Copyright TF-RMM Contributors.
4 * SPDX-FileCopyrightText: Copyright NVIDIA Corporation.
5 */
6
7#ifndef UTILS_DEF_H
8#define UTILS_DEF_H
9
10#if !(defined(__ASSEMBLER__) || defined(__LINKER__))
11#include <stdint.h>
12#endif
13
14/*
15 * For those constants to be shared between C and other sources, apply a 'U',
16 * 'UL', 'ULL', 'L' or 'LL' suffix to the argument only in C, to avoid
17 * undefined or unintended behaviour.
18 *
19 * The GNU assembler and linker do not support these suffixes (it causes the
20 * build process to fail) therefore the suffix is omitted when used in linker
21 * scripts and assembler files.
22 */
23#if defined(__ASSEMBLER__) || defined(__LINKER__)
24# define U(_x) (_x)
25# define UL(_x) (_x)
26# define ULL(_x) (_x)
27# define L(_x) (_x)
28# define LL(_x) (_x)
29#else
AlexeiFedorov1ba649f2023-10-19 13:56:02 +010030# define U(_x) (unsigned int)(_x)
Soby Mathewb4c6df42022-11-09 11:13:29 +000031# define UL(_x) (_x##UL)
32# define ULL(_x) (_x##ULL)
33# define L(_x) (_x##L)
34# define LL(_x) (_x##LL)
35#endif /* __ASSEMBLER__ */
36
37/* Short forms for commonly used attributes */
38#define __dead2 __attribute__((__noreturn__))
39#define __deprecated __attribute__((__deprecated__))
40#define __packed __attribute__((__packed__))
41#define __used __attribute__((__used__))
42#define __unused __attribute__((__unused__))
43#define __aligned(x) __attribute__((__aligned__(x)))
44#define __section(x) __attribute__((__section__(x)))
45
46#define __printflike(fmtarg, firstvararg) \
47 __attribute__((__format__ (__printf__, fmtarg, firstvararg)))
48
49/*
50 * The round_up() macro rounds up a value to the given boundary in a
51 * type-agnostic yet type-safe manner. The boundary must be a power of two.
52 * In other words, it computes the smallest multiple of boundary which is
53 * greater than or equal to value.
54 *
55 * round_down() is similar but rounds the value down instead.
56 */
57#define round_boundary(value, boundary) \
58 ((__typeof__(value))((boundary) - 1))
59
60#define round_up(value, boundary) \
61 ((((value) - 1) | round_boundary(value, boundary)) + 1)
62
63#define round_down(value, boundary) \
64 ((value) & ~round_boundary(value, boundary))
65
66/* Compute the number of elements in the given array */
67#define ARRAY_SIZE(a) \
68 (sizeof(a) / sizeof((a)[0]))
69
70#define ARRAY_LEN(_a) \
Chuyue Luoe95902b2023-10-17 15:46:25 +010071 ((sizeof(_a) / sizeof((_a)[0])) + CHECK_TYPE_IS_ARRAY(_a))
Soby Mathewb4c6df42022-11-09 11:13:29 +000072
73/*
74 * Macro checks types of array and variable/value to write
75 * and reports compilation error if they mismatch.
76 */
AlexeiFedorov7b3c3042023-06-28 15:41:11 +010077#define CHECK_ARRAY_TYPE(_a, _v) \
Chuyue Luoe95902b2023-10-17 15:46:25 +010078 _Static_assert(__builtin_types_compatible_p(typeof((_a)[0]), typeof(_v)), \
Soby Mathewb4c6df42022-11-09 11:13:29 +000079 "array type mismatch")
80
81/*
82 * Array read/write macros with boundary and types checks
83 * _a: name of array
84 * _i: index
85 * _v: variable/value to write
86 */
AlexeiFedorov7b3c3042023-06-28 15:41:11 +010087#define SAFE_ARRAY_READ(_a, _i, _v) \
Soby Mathewb4c6df42022-11-09 11:13:29 +000088({ \
89 CHECK_ARRAY_TYPE(_a, _v); \
Chuyue Luoe95902b2023-10-17 15:46:25 +010090 if ((_i) >= ARRAY_SIZE(_a)) { \
Soby Mathewb4c6df42022-11-09 11:13:29 +000091 panic(); \
92 } \
Chuyue Luoe95902b2023-10-17 15:46:25 +010093 (_v) = (_a)[_i]; \
Soby Mathewb4c6df42022-11-09 11:13:29 +000094})
95
AlexeiFedorov7b3c3042023-06-28 15:41:11 +010096#define SAFE_ARRAY_WRITE(_a, _i, _v) \
Soby Mathewb4c6df42022-11-09 11:13:29 +000097({ \
98 CHECK_ARRAY_TYPE(_a, _v); \
Chuyue Luoe95902b2023-10-17 15:46:25 +010099 if ((_i) >= ARRAY_SIZE(_a)) { \
Soby Mathewb4c6df42022-11-09 11:13:29 +0000100 panic(); \
101 } \
Chuyue Luoe95902b2023-10-17 15:46:25 +0100102 (_a)[_i] = (_v); \
Soby Mathewb4c6df42022-11-09 11:13:29 +0000103})
104
Mate Toth-Paldb4f2122023-06-20 11:08:50 +0200105#ifndef CBMC
Soby Mathewe02e0bd2023-01-18 10:57:18 +0100106#define COMPILER_ASSERT(_condition) \
107 extern char compiler_assert[(_condition) ? 1 : -1]
Soby Mathewb4c6df42022-11-09 11:13:29 +0000108
109/*
110 * If _expr is false, this will result in a compile time error as it tries to
111 * define a bitfield of size -1 in that case. Otherwise, it will define a
112 * bitfield of size 0, which is valid, and not create a compiler warning.
113 *
114 * The return value is only relevant when the compilation succeeds, and by
115 * subtracting the size of the same struct, this should always return 0 as a
116 * value and can be included in other expressions.
117 */
118#define COMPILER_ASSERT_ZERO(_expr) (sizeof(struct { char: (-!(_expr)); }) \
119 - sizeof(struct { char: 0; }))
120
121#define CHECK_TYPE_IS_ARRAY(_v) \
Soby Mathewe02e0bd2023-01-18 10:57:18 +0100122 COMPILER_ASSERT_ZERO(!__builtin_types_compatible_p(typeof(_v), \
Chuyue Luoe95902b2023-10-17 15:46:25 +0100123 typeof(&((_v)[0]))))
Mate Toth-Paldb4f2122023-06-20 11:08:50 +0200124#else /* CBMC */
125#define COMPILER_ASSERT(_condition) extern char disabled_compiler_assert[1]
126#define COMPILER_ASSERT_ZERO(_expr) extern char disabled_compiler_assert[1]
127#define CHECK_TYPE_IS_ARRAY(_v) 1
128#endif /* CBMC */
Soby Mathewb4c6df42022-11-09 11:13:29 +0000129
130#define IS_POWER_OF_TWO(x) \
131 ((((x) + UL(0)) & ((x) - UL(1))) == UL(0))
132
133#define COMPILER_BARRIER() __asm__ volatile ("" ::: "memory")
134
Soby Mathewe02e0bd2023-01-18 10:57:18 +0100135#define ALIGNED(_size, _alignment) \
136 (((unsigned long)(_size) % (_alignment)) == UL(0))
Soby Mathewb4c6df42022-11-09 11:13:29 +0000137
138#define GRANULE_ALIGNED(_addr) ALIGNED((void *)(_addr), GRANULE_SIZE)
Soby Mathewe02e0bd2023-01-18 10:57:18 +0100139#define ALIGNED_TO_ARRAY(_addr, _array) \
Chuyue Luoe95902b2023-10-17 15:46:25 +0100140 (((uintptr_t)(_addr) >= (uintptr_t)&(_array)[0]) && \
141 ((((uintptr_t)(_addr) - (uintptr_t)&(_array)[0]) % \
142 sizeof((_array)[0])) == UL(0)))
Mate Toth-Palac0cfbb2023-06-19 16:29:52 +0200143
144#define GRANULE_SIZE (UL(1) << GRANULE_SHIFT)
145#define GRANULE_MASK (~(GRANULE_SIZE - 1U))
Soby Mathewb4c6df42022-11-09 11:13:29 +0000146
147#define HAS_MPAM 0
148
149#if HAS_MPAM
150#define MPAM(_x...) _x
151#else
152#define MPAM(_x...)
153#endif
154
155#define HAS_SPE 0
156
157#if HAS_SPE
158#define SPE(_x...) _x
159#else
160#define SPE(_x...)
161#endif
162
163#if !(defined(__ASSEMBLER__) || defined(__LINKER__))
164
165/*
166 * System register field definitions.
167 *
168 * For any register field we define:
169 * - <register>_<field>_SHIFT
170 * The bit offset of the LSB of the field.
171 * - <register>_<field>_WIDTH
172 * The width of the field in bits.
173 *
174 * For single bit fields, we define:
175 * - <register>_<field>_BIT
176 * The in-place value of the field with the bit set.
177 *
178 * For multi-bit fields, we define:
179 * - <register>_<field>_<enum>
180 * The in-place value of the field set to the value corresponding to the
181 * enumeration name.
182 *
183 * For any register field, we define:
184 * - INPLACE(<register>_<field>, val)
185 * The in-place value of the field set to val, handling any necessary type
186 * promotion to avoid truncation of val.
187 * - MASK(<register>_<field)
188 * An in-place bitmask covering all bits of the field.
189 * - EXTRACT(<register_field> <register_value>)
190 * A macro to extract the value of a register field shifted down so the
191 * value can be evaluated directly.
192 * - EXTRACT_BIT(<register_field> <register_value>)
193 * A macro to extract the value of a register bit shifted down so the
194 * value can be evaluated directly.
195 */
196#define INPLACE(regfield, val) \
197 (((val) + UL(0)) << (regfield##_SHIFT))
198
199#define MASK(regfield) \
200 ((~0UL >> (64UL - (regfield##_WIDTH))) << (regfield##_SHIFT))
201
202#define EXTRACT(regfield, reg) \
203 (((reg) & MASK(regfield)) >> (regfield##_SHIFT))
204
205#define EXTRACT_BIT(regfield, reg) \
206 (((reg) >> (regfield##_SHIFT)) & UL(1))
207
208/*
209 * Generates an unsigned long long (64-bit) value where the bits @_msb
210 * through @_lsb (inclusive) are set to one and all other bits are zero. The
Soby Mathewe02e0bd2023-01-18 10:57:18 +0100211 * parameters can hold values from 0 through 63 and if _msb == _lsb a single
212 * bit is set at that location.
Soby Mathewb4c6df42022-11-09 11:13:29 +0000213 */
214#define BIT_MASK_ULL(_msb, _lsb) \
215 ((~ULL(0) >> (63UL - (_msb))) & (~ULL(0) << (_lsb)))
216
217/*
218 * Stringify the result of expansion of a macro argument
219 */
220#ifndef __XSTRING
221#define __STRING(x) #x
222#define __XSTRING(x) __STRING(x)
223#endif
224
225/*
226 * Defines member of structure and reserves space
227 * for the next member with specified offset.
228 */
229#define SET_MEMBER(member, start, end) \
230 union { \
231 member; \
AlexeiFedorov80618d72023-10-23 14:15:28 +0100232 unsigned char reserved##end[((end) - (start))]; \
Soby Mathewb4c6df42022-11-09 11:13:29 +0000233 }
234
AlexeiFedorov7b3c3042023-06-28 15:41:11 +0100235#define FALLTHROUGH __attribute__((fallthrough))
Soby Mathewb4c6df42022-11-09 11:13:29 +0000236
Mate Toth-Pal7e3727a2023-07-20 15:21:25 +0200237/*
238 * Helper macros for making code parts to be conditionally compiled, depending
239 * on the current build being a CBMC build or not.
240 */
241#ifdef CBMC
242#define IF_NCBMC(x)
243#define IF_CBMC(x) x
244#else /* CBMC */
245#define IF_NCBMC(x) x
246#define IF_CBMC(x)
247#endif /* CBMC */
248
Soby Mathewb4c6df42022-11-09 11:13:29 +0000249#endif /* !(defined(__ASSEMBLER__) || defined(__LINKER__)) */
250
251#endif /* UTILS_DEF_H */