blob: 06391efc56b2b6a487b2d0d68dd429e7bd3c41d9 [file] [log] [blame]
Andrew Sculle22b32f2019-05-13 14:46:02 +01001/*
2 * Copyright 2019 The Hafnium Authors.
3 *
Andrew Walbrane959ec12020-06-17 15:01:09 +01004 * Use of this source code is governed by a BSD-style
5 * license that can be found in the LICENSE file or at
6 * https://opensource.org/licenses/BSD-3-Clause.
Andrew Sculle22b32f2019-05-13 14:46:02 +01007 */
8
9#pragma once
10
11#include <stddef.h>
12#include <stdint.h>
13
14#include "hf/arch/barriers.h"
Olivier Deprezf5ad66a2021-10-18 10:35:41 +010015#include "hf/arch/types.h"
Andrew Sculle22b32f2019-05-13 14:46:02 +010016
Daniel Boulbyb74d2682021-11-26 13:54:50 +000017#include "hf/assert.h"
Andrew Sculle22b32f2019-05-13 14:46:02 +010018
19/* Opaque types for different sized fields of memory mapped IO. */
20
21typedef struct {
22 volatile uint8_t *ptr;
23} io8_t;
24
25typedef struct {
26 volatile uint16_t *ptr;
27} io16_t;
28
29typedef struct {
30 volatile uint32_t *ptr;
31} io32_t;
32
33typedef struct {
34 volatile uint64_t *ptr;
35} io64_t;
36
37typedef struct {
38 volatile uint8_t *base;
39 size_t count;
40} io8_array_t;
41
42typedef struct {
43 volatile uint16_t *base;
44 size_t count;
45} io16_array_t;
46
47typedef struct {
48 volatile uint32_t *base;
49 size_t count;
50} io32_array_t;
51
52typedef struct {
53 volatile uint64_t *base;
54 size_t count;
55} io64_array_t;
56
57/* Contructors for literals. */
58
Olivier Deprezf5ad66a2021-10-18 10:35:41 +010059static inline io8_t io8_c(uintpaddr_t addr, uintpaddr_t offset)
60{
Karl Meakin2ad6b662024-07-29 20:45:40 +010061 /* NOLINTNEXTLINE(performance-no-int-to-ptr) */
Olivier Deprezf5ad66a2021-10-18 10:35:41 +010062 return (io8_t){.ptr = (volatile uint8_t *)(addr + offset)};
63}
Andrew Sculle22b32f2019-05-13 14:46:02 +010064
Olivier Deprezf5ad66a2021-10-18 10:35:41 +010065static inline io8_array_t io8_array_c(uintpaddr_t addr, uintpaddr_t offset,
66 uint32_t count)
67{
Karl Meakin4a88b962024-04-30 15:57:19 +010068 (void)offset;
69
Karl Meakin2ad6b662024-07-29 20:45:40 +010070 /* NOLINTNEXTLINE(performance-no-int-to-ptr) */
Olivier Deprezf5ad66a2021-10-18 10:35:41 +010071 return (io8_array_t){.base = (volatile uint8_t *)addr, .count = count};
72}
73
74static inline io16_t io16_c(uintpaddr_t addr, uintpaddr_t offset)
75{
Karl Meakin2ad6b662024-07-29 20:45:40 +010076 /* NOLINTNEXTLINE(performance-no-int-to-ptr) */
Olivier Deprezf5ad66a2021-10-18 10:35:41 +010077 return (io16_t){.ptr = (volatile uint16_t *)(addr + offset)};
78}
79
80static inline io16_array_t io16_array_c(uintpaddr_t addr, uintpaddr_t offset,
81 uint32_t count)
82{
Karl Meakin4a88b962024-04-30 15:57:19 +010083 (void)offset;
84
Karl Meakin2ad6b662024-07-29 20:45:40 +010085 /* NOLINTNEXTLINE(performance-no-int-to-ptr) */
Olivier Deprezf5ad66a2021-10-18 10:35:41 +010086 return (io16_array_t){.base = (volatile uint16_t *)addr,
87 .count = count};
88}
89
90static inline io32_t io32_c(uintpaddr_t addr, uintpaddr_t offset)
91{
Karl Meakin2ad6b662024-07-29 20:45:40 +010092 /* NOLINTNEXTLINE(performance-no-int-to-ptr) */
Olivier Deprezf5ad66a2021-10-18 10:35:41 +010093 return (io32_t){.ptr = (volatile uint32_t *)(addr + offset)};
94}
95
96static inline io32_array_t io32_array_c(uintpaddr_t addr, uintpaddr_t offset,
97 uint32_t count)
98{
Karl Meakin4a88b962024-04-30 15:57:19 +010099 (void)offset;
100
Karl Meakin2ad6b662024-07-29 20:45:40 +0100101 /* NOLINTNEXTLINE(performance-no-int-to-ptr) */
Olivier Deprezf5ad66a2021-10-18 10:35:41 +0100102 return (io32_array_t){.base = (volatile uint32_t *)addr,
103 .count = count};
104}
105
106static inline io64_t io64_c(uintpaddr_t addr, uintpaddr_t offset)
107{
Karl Meakin2ad6b662024-07-29 20:45:40 +0100108 /* NOLINTNEXTLINE(performance-no-int-to-ptr) */
Olivier Deprezf5ad66a2021-10-18 10:35:41 +0100109 return (io64_t){.ptr = (volatile uint64_t *)(addr + offset)};
110}
111
112static inline io64_array_t io64_array_c(uintpaddr_t addr, uintpaddr_t offset,
113 uint32_t count)
114{
Karl Meakin4a88b962024-04-30 15:57:19 +0100115 (void)offset;
116
Karl Meakin2ad6b662024-07-29 20:45:40 +0100117 /* NOLINTNEXTLINE(performance-no-int-to-ptr) */
Olivier Deprezf5ad66a2021-10-18 10:35:41 +0100118 return (io64_array_t){.base = (volatile uint64_t *)addr,
119 .count = count};
120}
121
122#define IO8_C(addr) io8_c((addr), 0)
123#define IO16_C(addr) io16_c((addr), 0)
124#define IO32_C(addr) io32_c((addr), 0)
125#define IO64_C(addr) io64_c((addr), 0)
126
127#define IO8_ARRAY_C(addr, cnt) io8_array_c((addr), 0, cnt)
128#define IO16_ARRAY_C(addr, cnt) io16_array_c((addr), 0, cnt)
129#define IO32_ARRAY_C(addr, cnt) io32_array_c((addr), 0, cnt)
130#define IO64_ARRAY_C(addr, cnt) io64_array_c((addr), 0, cnt)
Andrew Sculle22b32f2019-05-13 14:46:02 +0100131
132/** Read from memory-mapped IO. */
133
134static inline uint8_t io_read8(io8_t io)
135{
136 return *io.ptr;
137}
138
139static inline uint16_t io_read16(io16_t io)
140{
141 return *io.ptr;
142}
143
144static inline uint32_t io_read32(io32_t io)
145{
146 return *io.ptr;
147}
148
149static inline uint64_t io_read64(io64_t io)
150{
151 return *io.ptr;
152}
153
154static inline uint8_t io_read8_array(io8_array_t io, size_t n)
155{
Daniel Boulbyb74d2682021-11-26 13:54:50 +0000156 assert(n < io.count);
Andrew Sculle22b32f2019-05-13 14:46:02 +0100157 return io.base[n];
158}
159
160static inline uint16_t io_read16_array(io16_array_t io, size_t n)
161{
Daniel Boulbyb74d2682021-11-26 13:54:50 +0000162 assert(n < io.count);
Andrew Sculle22b32f2019-05-13 14:46:02 +0100163 return io.base[n];
164}
165
166static inline uint32_t io_read32_array(io32_array_t io, size_t n)
167{
Daniel Boulbyb74d2682021-11-26 13:54:50 +0000168 assert(n < io.count);
Andrew Sculle22b32f2019-05-13 14:46:02 +0100169 return io.base[n];
170}
171
172static inline uint64_t io_read64_array(io64_array_t io, size_t n)
173{
Daniel Boulbyb74d2682021-11-26 13:54:50 +0000174 assert(n < io.count);
Andrew Sculle22b32f2019-05-13 14:46:02 +0100175 return io.base[n];
176}
177
178/**
179 * Read from memory-mapped IO with memory barrier.
180 *
181 * The read is ordered before subsequent memory accesses.
182 */
183
184static inline uint8_t io_read8_mb(io8_t io)
185{
186 uint8_t v = io_read8(io);
187
David Brazdil851948e2019-08-09 12:02:12 +0100188 data_sync_barrier();
Andrew Sculle22b32f2019-05-13 14:46:02 +0100189 return v;
190}
191
192static inline uint16_t io_read16_mb(io16_t io)
193{
194 uint16_t v = io_read16(io);
195
David Brazdil851948e2019-08-09 12:02:12 +0100196 data_sync_barrier();
Andrew Sculle22b32f2019-05-13 14:46:02 +0100197 return v;
198}
199
200static inline uint32_t io_read32_mb(io32_t io)
201{
202 uint32_t v = io_read32(io);
203
David Brazdil851948e2019-08-09 12:02:12 +0100204 data_sync_barrier();
Andrew Sculle22b32f2019-05-13 14:46:02 +0100205 return v;
206}
207
208static inline uint64_t io_read64_mb(io64_t io)
209{
210 uint64_t v = io_read64(io);
211
David Brazdil851948e2019-08-09 12:02:12 +0100212 data_sync_barrier();
Andrew Sculle22b32f2019-05-13 14:46:02 +0100213 return v;
214}
215
216static inline uint8_t io_read8_array_mb(io8_array_t io, size_t n)
217{
218 uint8_t v = io_read8_array(io, n);
219
David Brazdil851948e2019-08-09 12:02:12 +0100220 data_sync_barrier();
Andrew Sculle22b32f2019-05-13 14:46:02 +0100221 return v;
222}
223
224static inline uint16_t io_read16_array_mb(io16_array_t io, size_t n)
225{
226 uint16_t v = io_read16_array(io, n);
227
David Brazdil851948e2019-08-09 12:02:12 +0100228 data_sync_barrier();
Andrew Sculle22b32f2019-05-13 14:46:02 +0100229 return v;
230}
231
232static inline uint32_t io_read32_array_mb(io32_array_t io, size_t n)
233{
234 uint32_t v = io_read32_array(io, n);
235
David Brazdil851948e2019-08-09 12:02:12 +0100236 data_sync_barrier();
Andrew Sculle22b32f2019-05-13 14:46:02 +0100237 return v;
238}
239
240static inline uint64_t io_read64_array_mb(io64_array_t io, size_t n)
241{
242 uint64_t v = io_read64_array(io, n);
243
David Brazdil851948e2019-08-09 12:02:12 +0100244 data_sync_barrier();
Andrew Sculle22b32f2019-05-13 14:46:02 +0100245 return v;
246}
247
248/* Write to memory-mapped IO. */
249
250static inline void io_write8(io8_t io, uint8_t v)
251{
252 *io.ptr = v;
253}
254
255static inline void io_write16(io16_t io, uint16_t v)
256{
257 *io.ptr = v;
258}
259
260static inline void io_write32(io32_t io, uint32_t v)
261{
262 *io.ptr = v;
263}
264
265static inline void io_write64(io64_t io, uint64_t v)
266{
267 *io.ptr = v;
268}
269
Madhukar Pappireddy2b86c0d2021-08-03 09:19:59 -0500270static inline void io_clrbits32(io32_t io, uint32_t clear)
271{
272 io_write32(io, io_read32(io) & ~clear);
273}
274
275static inline void io_setbits32(io32_t io, uint32_t set)
276{
277 io_write32(io, io_read32(io) | set);
278}
279
280static inline void io_clrsetbits32(io32_t io, uint32_t clear, uint32_t set)
281{
282 io_write32(io, (io_read32(io) & ~clear) | set);
283}
284
Andrew Sculle22b32f2019-05-13 14:46:02 +0100285static inline void io_write8_array(io8_array_t io, size_t n, uint8_t v)
286{
Daniel Boulbyb74d2682021-11-26 13:54:50 +0000287 assert(n < io.count);
Andrew Sculle22b32f2019-05-13 14:46:02 +0100288 io.base[n] = v;
289}
290
291static inline void io_write16_array(io16_array_t io, size_t n, uint16_t v)
292{
Daniel Boulbyb74d2682021-11-26 13:54:50 +0000293 assert(n < io.count);
Andrew Sculle22b32f2019-05-13 14:46:02 +0100294 io.base[n] = v;
295}
296
297static inline void io_write32_array(io32_array_t io, size_t n, uint32_t v)
298{
Daniel Boulbyb74d2682021-11-26 13:54:50 +0000299 assert(n < io.count);
Andrew Sculle22b32f2019-05-13 14:46:02 +0100300 io.base[n] = v;
301}
302
303static inline void io_write64_array(io64_array_t io, size_t n, uint64_t v)
304{
Daniel Boulbyb74d2682021-11-26 13:54:50 +0000305 assert(n < io.count);
Andrew Sculle22b32f2019-05-13 14:46:02 +0100306 io.base[n] = v;
307}
308
309/*
310 * Write to memory-mapped IO with memory barrier.
311 *
312 * The write is ordered after previous memory accesses.
313 */
314
315static inline void io_write8_mb(io8_t io, uint8_t v)
316{
David Brazdil851948e2019-08-09 12:02:12 +0100317 data_sync_barrier();
Andrew Sculle22b32f2019-05-13 14:46:02 +0100318 io_write8(io, v);
319}
320
321static inline void io_write16_mb(io16_t io, uint16_t v)
322{
David Brazdil851948e2019-08-09 12:02:12 +0100323 data_sync_barrier();
Andrew Sculle22b32f2019-05-13 14:46:02 +0100324 io_write16(io, v);
325}
326
327static inline void io_write32_mb(io32_t io, uint32_t v)
328{
David Brazdil851948e2019-08-09 12:02:12 +0100329 data_sync_barrier();
Andrew Sculle22b32f2019-05-13 14:46:02 +0100330 io_write32(io, v);
331}
332
333static inline void io_write64_mb(io64_t io, uint64_t v)
334{
David Brazdil851948e2019-08-09 12:02:12 +0100335 data_sync_barrier();
Andrew Sculle22b32f2019-05-13 14:46:02 +0100336 io_write64(io, v);
337}
338
339static inline void io_write8_array_mb(io8_array_t io, size_t n, uint8_t v)
340{
David Brazdil851948e2019-08-09 12:02:12 +0100341 data_sync_barrier();
Andrew Sculle22b32f2019-05-13 14:46:02 +0100342 io_write8_array(io, n, v);
343}
344
345static inline void io_write16_array_mb(io16_array_t io, size_t n, uint16_t v)
346{
David Brazdil851948e2019-08-09 12:02:12 +0100347 data_sync_barrier();
Andrew Sculle22b32f2019-05-13 14:46:02 +0100348 io_write16_array(io, n, v);
349}
350
351static inline void io_write32_array_mb(io32_array_t io, size_t n, uint32_t v)
352{
David Brazdil851948e2019-08-09 12:02:12 +0100353 data_sync_barrier();
Andrew Sculle22b32f2019-05-13 14:46:02 +0100354 io_write32_array(io, n, v);
355}
356
357static inline void io_write64_array_mb(io64_array_t io, size_t n, uint64_t v)
358{
David Brazdil851948e2019-08-09 12:02:12 +0100359 data_sync_barrier();
Andrew Sculle22b32f2019-05-13 14:46:02 +0100360 io_write64_array(io, n, v);
361}