blob: c8cfbda40575e64b37487ae36be709477a425598 [file] [log] [blame]
Soby Mathewb4c6df42022-11-09 11:13:29 +00001/*
2 * SPDX-License-Identifier: BSD-3-Clause
3 * SPDX-FileCopyrightText: Copyright TF-RMM Contributors.
4 */
5
6#include <arch_helpers.h>
AlexeiFedorov3a739332023-04-13 13:54:04 +01007#include <attestation_token.h>
Soby Mathewb4c6df42022-11-09 11:13:29 +00008#include <bitmap.h>
9#include <buffer.h>
10#include <gic.h>
11#include <granule.h>
12#include <memory_alloc.h>
13#include <realm.h>
14#include <ripas.h>
15#include <smc.h>
16#include <status.h>
17#include <stddef.h>
18#include <string.h>
19#include <table.h>
20
21/*
22 * For prototyping we assume 4K pages
23 */
24#define BLOCK_L2_SIZE (GRANULE_SIZE * S2TTES_PER_S2TT)
25
26/*
27 * The maximum number of bits supported by the RMM for a stage 2 translation
28 * output address (including stage 2 table entries).
29 */
30#define S2TTE_OA_BITS 48
31
32#define DESC_TYPE_MASK 0x3UL
33#define S2TTE_L012_TABLE 0x3UL
34#define S2TTE_L012_BLOCK 0x1UL
35#define S2TTE_L3_PAGE 0x3UL
36#define S2TTE_Lx_INVALID 0x0UL
37
38/*
39 * The following constants for the mapping attributes (S2_TTE_MEMATTR_*)
40 * assume that HCR_EL2.FWB is set.
41 */
42#define S2TTE_MEMATTR_SHIFT 2
43#define S2TTE_MEMATTR_MASK (0x7UL << S2TTE_MEMATTR_SHIFT)
44#define S2TTE_MEMATTR_FWB_NORMAL_WB ((1UL << 4) | (2UL << 2))
45#define S2TTE_MEMATTR_FWB_RESERVED ((1UL << 4) | (0UL << 2))
46
47#define S2TTE_AP_SHIFT 6
48#define S2TTE_AP_MASK (3UL << S2TTE_AP_SHIFT)
49#define S2TTE_AP_RW (3UL << S2TTE_AP_SHIFT)
50
51#define S2TTE_SH_SHIFT 8
52#define S2TTE_SH_MASK (3UL << S2TTE_SH_SHIFT)
53#define S2TTE_SH_NS (0UL << S2TTE_SH_SHIFT)
54#define S2TTE_SH_RESERVED (1UL << S2TTE_SH_SHIFT)
55#define S2TTE_SH_OS (2UL << S2TTE_SH_SHIFT)
56#define S2TTE_SH_IS (3UL << S2TTE_SH_SHIFT) /* Inner Shareable */
57
58/*
59 * We set HCR_EL2.FWB So we set bit[4] to 1 and bits[3:2] to 2 and force
60 * everyting to be Normal Write-Back
61 */
62#define S2TTE_MEMATTR_FWB_NORMAL_WB ((1UL << 4) | (2UL << 2))
63#define S2TTE_AF (1UL << 10)
64#define S2TTE_XN (2UL << 53)
65#define S2TTE_NS (1UL << 55)
66
67#define S2TTE_ATTRS (S2TTE_MEMATTR_FWB_NORMAL_WB | S2TTE_AP_RW | \
68 S2TTE_SH_IS | S2TTE_AF)
69
70#define S2TTE_TABLE S2TTE_L012_TABLE
71#define S2TTE_BLOCK (S2TTE_ATTRS | S2TTE_L012_BLOCK)
72#define S2TTE_PAGE (S2TTE_ATTRS | S2TTE_L3_PAGE)
73#define S2TTE_BLOCK_NS (S2TTE_NS | S2TTE_XN | S2TTE_AF | S2TTE_L012_BLOCK)
74#define S2TTE_PAGE_NS (S2TTE_NS | S2TTE_XN | S2TTE_AF | S2TTE_L3_PAGE)
75#define S2TTE_INVALID 0
76
77/*
AlexeiFedorov5ceff352023-04-12 16:17:00 +010078 * The type of stage 2 translation table entry (s2tte) is defined by:
79 * 1. Table level where it resides
80 * 2. DESC_TYPE field[1:0]
81 * 4. HIPAS field [5:2]
82 * 4. RIPAS field [6]
83 * 5. NS field [55]
Soby Mathewb4c6df42022-11-09 11:13:29 +000084 *
AlexeiFedorov5ceff352023-04-12 16:17:00 +010085 * s2tte type level DESC_TYPE[1:0] HIPAS[5:2] RIPAS[6] NS OA alignment
86 * =============================================================================
87 * unassigned_empty any invalid[0] unassigned[0] empty[0] 0 n/a
88 * -----------------------------------------------------------------------------
89 * unassigned_ram any invalid[0] unassigned[0] ram[1] 0 n/a
90 * -----------------------------------------------------------------------------
91 * assigned_empty 2,3 invalid[0] assigned[1] empty[0] 0 to level
92 * -----------------------------------------------------------------------------
93 * assigned_ram 3 page[1] n/a n/a 0 to level
94 * 2 block[3] n/a n/a 0 to level
95 * -----------------------------------------------------------------------------
96 * destroyed any invalid[0] destroyed[2] n/a 0 n/a
97 * =============================================================================
98 * unassigned_ns any invalid[0] unassigned[0] n/a 1 n/a
99 * -----------------------------------------------------------------------------
100 * assigned_ns 3 page[1] n/a n/a 1 to level
101 * 2 block[3] n/a n/a 1 to level
102 * =============================================================================
103 * table <=2 table[1] n/a n/a n/a to 4K
Soby Mathewb4c6df42022-11-09 11:13:29 +0000104 */
105
106#define S2TTE_INVALID_HIPAS_SHIFT 2
107#define S2TTE_INVALID_HIPAS_WIDTH 4
108#define S2TTE_INVALID_HIPAS_MASK MASK(S2TTE_INVALID_HIPAS)
109
110#define S2TTE_INVALID_HIPAS_UNASSIGNED (INPLACE(S2TTE_INVALID_HIPAS, 0))
111#define S2TTE_INVALID_HIPAS_ASSIGNED (INPLACE(S2TTE_INVALID_HIPAS, 1))
112#define S2TTE_INVALID_HIPAS_DESTROYED (INPLACE(S2TTE_INVALID_HIPAS, 2))
113
114#define S2TTE_INVALID_RIPAS_SHIFT 6
115#define S2TTE_INVALID_RIPAS_WIDTH 1
116#define S2TTE_INVALID_RIPAS_MASK MASK(S2TTE_INVALID_RIPAS)
117
118#define S2TTE_INVALID_RIPAS_EMPTY (INPLACE(S2TTE_INVALID_RIPAS, 0))
119#define S2TTE_INVALID_RIPAS_RAM (INPLACE(S2TTE_INVALID_RIPAS, 1))
120
121#define S2TTE_INVALID_DESTROYED S2TTE_INVALID_HIPAS_DESTROYED
122#define S2TTE_INVALID_UNPROTECTED 0x0UL
123
124#define NR_RTT_LEVELS 4
125
126/*
127 * Invalidates S2 TLB entries from [ipa, ipa + size] region tagged with `vmid`.
128 */
129static void stage2_tlbi_ipa(const struct realm_s2_context *s2_ctx,
130 unsigned long ipa,
131 unsigned long size)
132{
133 /*
134 * Notes:
135 *
136 * - This follows the description provided in the Arm ARM on
137 * "Invalidation of TLB entries from stage 2 translations".
138 *
139 * - @TODO: Provide additional information to this primitive so that
140 * we can utilize:
141 * - The TTL level hint, see FEAT_TTL,
142 * - Final level lookup only invalidation,
143 * - Address range invalidation.
144 */
145
146 /*
147 * Save the current content of vttb_el2.
148 */
149 unsigned long old_vttbr_el2 = read_vttbr_el2();
150
151 /*
152 * Make 'vmid' the `current vmid`. Note that the tlbi instructions
153 * bellow target the TLB entries that match the `current vmid`.
154 */
155 write_vttbr_el2(INPLACE(VTTBR_EL2_VMID, s2_ctx->vmid));
156 isb();
157
158 /*
159 * Invalidate entries in S2 TLB caches that
160 * match both `ipa` & the `current vmid`.
161 */
162 while (size != 0UL) {
163 tlbiipas2e1is(ipa >> 12);
164 size -= GRANULE_SIZE;
165 ipa += GRANULE_SIZE;
166 }
167 dsb(ish);
168
169 /*
170 * The architecture does not require TLB invalidation by IPA to affect
171 * combined Stage-1 + Stage-2 TLBs. Therefore we must invalidate all of
172 * Stage-1 (tagged with the `current vmid`) after invalidating Stage-2.
173 */
174 tlbivmalle1is();
175 dsb(ish);
176 isb();
177
178 /*
179 * Restore the old content of vttb_el2.
180 */
181 write_vttbr_el2(old_vttbr_el2);
182 isb();
183}
184
185/*
186 * Invalidate S2 TLB entries with "addr" IPA.
187 * Call this function after:
188 * 1. A L3 page desc has been removed.
189 */
190void invalidate_page(const struct realm_s2_context *s2_ctx, unsigned long addr)
191{
192 stage2_tlbi_ipa(s2_ctx, addr, GRANULE_SIZE);
193}
194
195/*
196 * Invalidate S2 TLB entries with "addr" IPA.
197 * Call this function after:
198 * 1. A L2 block desc has been removed, or
199 * 2a. A L2 table desc has been removed, where
200 * 2b. All S2TTEs in L3 table that the L2 table desc was pointed to were invalid.
201 */
202void invalidate_block(const struct realm_s2_context *s2_ctx, unsigned long addr)
203{
204 stage2_tlbi_ipa(s2_ctx, addr, GRANULE_SIZE);
205}
206
207/*
208 * Invalidate S2 TLB entries with "addr" IPA.
209 * Call this function after:
210 * 1a. A L2 table desc has been removed, where
211 * 1b. Some S2TTEs in the table that the L2 table desc was pointed to were valid.
212 */
213void invalidate_pages_in_block(const struct realm_s2_context *s2_ctx, unsigned long addr)
214{
215 stage2_tlbi_ipa(s2_ctx, addr, BLOCK_L2_SIZE);
216}
217
218/*
219 * Return the index of the entry describing @addr in the translation table at
220 * level @level. This only works for non-concatenated page tables, so should
221 * not be called to get the index for the starting level.
222 *
223 * See the library pseudocode
224 * aarch64/translation/vmsa_addrcalc/AArch64.TTEntryAddress on which this is
225 * modeled.
226 */
227static unsigned long s2_addr_to_idx(unsigned long addr, long level)
228{
229 int levels = RTT_PAGE_LEVEL - level;
230 int lsb = levels * S2TTE_STRIDE + GRANULE_SHIFT;
231
232 addr >>= lsb;
233 addr &= (1UL << S2TTE_STRIDE) - 1;
234 return addr;
235}
236
237/*
238 * Return the index of the entry describing @addr in the translation table
239 * starting level. This may return an index >= S2TTES_PER_S2TT when the
240 * combination of @start_level and @ipa_bits implies concatenated
241 * stage 2 tables.
242 *
243 * See the library pseudocode
244 * aarch64/translation/vmsa_addrcalc/AArch64.S2SLTTEntryAddress on which
245 * this is modeled.
246 */
247static unsigned long s2_sl_addr_to_idx(unsigned long addr, int start_level,
248 unsigned long ipa_bits)
249{
250 int levels = RTT_PAGE_LEVEL - start_level;
251 int lsb = levels * S2TTE_STRIDE + GRANULE_SHIFT;
252
253 addr &= (1UL << ipa_bits) - 1UL;
254 addr >>= lsb;
255 return addr;
256}
257
258static unsigned long addr_level_mask(unsigned long addr, long level)
259{
260 int levels = RTT_PAGE_LEVEL - level;
261 unsigned int lsb = levels * S2TTE_STRIDE + GRANULE_SHIFT;
262 unsigned int msb = S2TTE_OA_BITS - 1;
263
264 return addr & BIT_MASK_ULL(msb, lsb);
265}
266
267static inline unsigned long table_entry_to_phys(unsigned long entry)
268{
269 return addr_level_mask(entry, RTT_PAGE_LEVEL);
270}
271
272static inline bool entry_is_table(unsigned long entry)
273{
274 return (entry & DESC_TYPE_MASK) == S2TTE_L012_TABLE;
275}
276
277static unsigned long __table_get_entry(struct granule *g_tbl,
278 unsigned long idx)
279{
280 unsigned long *table, entry;
281
282 table = granule_map(g_tbl, SLOT_RTT);
283 entry = s2tte_read(&table[idx]);
284 buffer_unmap(table);
285
286 return entry;
287}
288
289static struct granule *__find_next_level_idx(struct granule *g_tbl,
290 unsigned long idx)
291{
292 const unsigned long entry = __table_get_entry(g_tbl, idx);
293
294 if (!entry_is_table(entry)) {
295 return NULL;
296 }
297
298 return addr_to_granule(table_entry_to_phys(entry));
299}
300
301static struct granule *__find_lock_next_level(struct granule *g_tbl,
302 unsigned long map_addr,
303 long level)
304{
305 const unsigned long idx = s2_addr_to_idx(map_addr, level);
306 struct granule *g = __find_next_level_idx(g_tbl, idx);
307
308 if (g != NULL) {
309 granule_lock(g, GRANULE_STATE_RTT);
310 }
311
312 return g;
313}
314
315/*
316 * Walk an RTT until level @level using @map_addr.
317 * @g_root is the root (level 0) table and must be locked before the call.
318 * @start_level is the initial lookup level used for the stage 2 translation
319 * tables which may depend on the configuration of the realm, factoring in the
320 * IPA size of the realm and the desired starting level (within the limits
321 * defined by the Armv8 VMSA including options for stage 2 table concatenation).
322 * The function uses hand-over-hand locking to avoid race conditions and allow
323 * concurrent access to RTT tree which is not part of the current walk; when a
324 * next level table is reached it is locked before releasing previously locked
325 * table.
326 * The walk stops when either:
327 * - The entry found is a leaf entry (not an RTT Table entry), or
328 * - Level @level is reached.
329 *
330 * On return:
331 * - rtt_walk::last_level is the last level that has been reached by the walk.
332 * - rtt_walk.g_llt points to the TABLE granule at level @rtt_walk::level.
333 * The granule is locked.
334 * - rtt_walk::index is the entry index at rtt_walk.g_llt for @map_addr.
335 */
336void rtt_walk_lock_unlock(struct granule *g_root,
337 int start_level,
338 unsigned long ipa_bits,
339 unsigned long map_addr,
340 long level,
341 struct rtt_walk *wi)
342{
343 struct granule *g_tbls[NR_RTT_LEVELS] = { NULL };
344 unsigned long sl_idx;
345 int i, last_level;
346
347 assert(start_level >= MIN_STARTING_LEVEL);
348 assert(level >= start_level);
349 assert(map_addr < (1UL << ipa_bits));
350 assert(wi != NULL);
351
352 /* Handle concatenated starting level (SL) tables */
353 sl_idx = s2_sl_addr_to_idx(map_addr, start_level, ipa_bits);
354 if (sl_idx >= S2TTES_PER_S2TT) {
355 unsigned int tt_num = (sl_idx >> S2TTE_STRIDE);
356 struct granule *g_concat_root = g_root + tt_num;
357
358 granule_lock(g_concat_root, GRANULE_STATE_RTT);
359 granule_unlock(g_root);
360 g_root = g_concat_root;
361 }
362
363 g_tbls[start_level] = g_root;
364 for (i = start_level; i < level; i++) {
365 /*
366 * Lock next RTT level. Correct locking order is guaranteed
367 * because reference is obtained from a locked granule
368 * (previous level). Also, hand-over-hand locking/unlocking is
369 * used to avoid race conditions.
370 */
371 g_tbls[i + 1] = __find_lock_next_level(g_tbls[i], map_addr, i);
372 if (g_tbls[i + 1] == NULL) {
373 last_level = i;
374 goto out;
375 }
376 granule_unlock(g_tbls[i]);
377 }
378
379 last_level = level;
380out:
381 wi->last_level = last_level;
382 wi->g_llt = g_tbls[last_level];
383 wi->index = s2_addr_to_idx(map_addr, last_level);
384}
385
386/*
387 * Creates a value which can be OR'd with an s2tte to set RIPAS=@ripas.
388 */
389unsigned long s2tte_create_ripas(enum ripas ripas)
390{
Yousuf A62808152022-10-31 10:35:42 +0000391 if (ripas == RIPAS_EMPTY) {
Soby Mathewb4c6df42022-11-09 11:13:29 +0000392 return S2TTE_INVALID_RIPAS_EMPTY;
393 }
394 return S2TTE_INVALID_RIPAS_RAM;
395}
396
397/*
398 * Creates an invalid s2tte with HIPAS=UNASSIGNED and RIPAS=@ripas.
399 */
400unsigned long s2tte_create_unassigned(enum ripas ripas)
401{
402 return S2TTE_INVALID_HIPAS_UNASSIGNED | s2tte_create_ripas(ripas);
403}
404
405/*
406 * Creates an invalid s2tte with HIPAS=DESTROYED.
407 */
408unsigned long s2tte_create_destroyed(void)
409{
410 return S2TTE_INVALID_DESTROYED;
411}
412
413/*
414 * Creates an invalid s2tte with output address @pa, HIPAS=ASSIGNED and
415 * RIPAS=EMPTY, at level @level.
416 */
417unsigned long s2tte_create_assigned_empty(unsigned long pa, long level)
418{
419 assert(level >= RTT_MIN_BLOCK_LEVEL);
420 assert(addr_is_level_aligned(pa, level));
421 return (pa | S2TTE_INVALID_HIPAS_ASSIGNED | S2TTE_INVALID_RIPAS_EMPTY);
422}
423
424/*
AlexeiFedorov3a739332023-04-13 13:54:04 +0100425 * Creates an assigned_ram s2tte with output address @pa.
Soby Mathewb4c6df42022-11-09 11:13:29 +0000426 */
AlexeiFedorov3a739332023-04-13 13:54:04 +0100427unsigned long s2tte_create_assigned_ram(unsigned long pa, long level)
Soby Mathewb4c6df42022-11-09 11:13:29 +0000428{
429 assert(level >= RTT_MIN_BLOCK_LEVEL);
430 assert(addr_is_level_aligned(pa, level));
431 if (level == RTT_PAGE_LEVEL) {
432 return (pa | S2TTE_PAGE);
433 }
434 return (pa | S2TTE_BLOCK);
435}
436
437/*
AlexeiFedorov5ceff352023-04-12 16:17:00 +0100438 * Creates an unassigned_ns s2tte.
Soby Mathewb4c6df42022-11-09 11:13:29 +0000439 */
AlexeiFedorov5ceff352023-04-12 16:17:00 +0100440unsigned long s2tte_create_unassigned_ns(void)
Soby Mathewb4c6df42022-11-09 11:13:29 +0000441{
AlexeiFedorov5ceff352023-04-12 16:17:00 +0100442 return S2TTE_NS | S2TTE_INVALID_HIPAS_UNASSIGNED |
443 S2TTE_INVALID_UNPROTECTED;
Soby Mathewb4c6df42022-11-09 11:13:29 +0000444}
445
446/*
AlexeiFedorov3a739332023-04-13 13:54:04 +0100447 * Creates an assigned_ns s2tte at level @level.
Soby Mathewb4c6df42022-11-09 11:13:29 +0000448 *
449 * The following S2 TTE fields are provided through @s2tte argument:
450 * - The physical address
451 * - MemAttr
452 * - S2AP
453 * - Shareability
454 */
AlexeiFedorov3a739332023-04-13 13:54:04 +0100455unsigned long s2tte_create_assigned_ns(unsigned long s2tte, long level)
Soby Mathewb4c6df42022-11-09 11:13:29 +0000456{
457 assert(level >= RTT_MIN_BLOCK_LEVEL);
458 if (level == RTT_PAGE_LEVEL) {
459 return (s2tte | S2TTE_PAGE_NS);
460 }
461 return (s2tte | S2TTE_BLOCK_NS);
462}
463
464/*
465 * Validate the portion of NS S2TTE that is provided by the host.
466 */
467bool host_ns_s2tte_is_valid(unsigned long s2tte, long level)
468{
469 unsigned long mask = addr_level_mask(~0UL, level) |
470 S2TTE_MEMATTR_MASK |
471 S2TTE_AP_MASK |
472 S2TTE_SH_MASK;
473
474 /*
475 * Test that all fields that are not controlled by the host are zero
476 * and that the output address is correctly aligned. Note that
477 * the host is permitted to map any physical address outside PAR.
478 */
479 if ((s2tte & ~mask) != 0UL) {
480 return false;
481 }
482
483 /*
484 * Only one value masked by S2TTE_MEMATTR_MASK is invalid/reserved.
485 */
486 if ((s2tte & S2TTE_MEMATTR_MASK) == S2TTE_MEMATTR_FWB_RESERVED) {
487 return false;
488 }
489
490 /*
491 * Only one value masked by S2TTE_SH_MASK is invalid/reserved.
492 */
493 if ((s2tte & S2TTE_SH_MASK) == S2TTE_SH_RESERVED) {
494 return false;
495 }
496
497 /*
498 * Note that all the values that are masked by S2TTE_AP_MASK are valid.
499 */
500 return true;
501}
502
503/*
504 * Returns the portion of NS S2TTE that is set by the host.
505 */
506unsigned long host_ns_s2tte(unsigned long s2tte, long level)
507{
508 unsigned long mask = addr_level_mask(~0UL, level) |
509 S2TTE_MEMATTR_MASK |
510 S2TTE_AP_MASK |
511 S2TTE_SH_MASK;
512 return (s2tte & mask);
513}
514
515/*
516 * Creates a table s2tte at level @level with output address @pa.
517 */
518unsigned long s2tte_create_table(unsigned long pa, long level)
519{
520 assert(level < RTT_PAGE_LEVEL);
521 assert(GRANULE_ALIGNED(pa));
522
523 return (pa | S2TTE_TABLE);
524}
525
526/*
527 * Returns true if @s2tte has HIPAS=@hipas.
528 */
529static bool s2tte_has_hipas(unsigned long s2tte, unsigned long hipas)
530{
531 unsigned long desc_type = s2tte & DESC_TYPE_MASK;
532 unsigned long invalid_desc_hipas = s2tte & S2TTE_INVALID_HIPAS_MASK;
533
534 if ((desc_type != S2TTE_Lx_INVALID) || (invalid_desc_hipas != hipas)) {
535 return false;
536 }
537 return true;
538}
539
540/*
AlexeiFedorov5ceff352023-04-12 16:17:00 +0100541 * Returns true if @s2tte has HIPAS=UNASSIGNED.
Soby Mathewb4c6df42022-11-09 11:13:29 +0000542 */
543bool s2tte_is_unassigned(unsigned long s2tte)
544{
AlexeiFedorov5ceff352023-04-12 16:17:00 +0100545 if ((s2tte & S2TTE_NS) != 0UL) {
546 return false;
547 }
548
549 return s2tte_has_hipas(s2tte, S2TTE_INVALID_HIPAS_UNASSIGNED);
550}
551
552/*
553 * Returns true if @s2tte is unassigned_ns.
554 */
555bool s2tte_is_unassigned_ns(unsigned long s2tte)
556{
557 if ((s2tte & S2TTE_NS) == 0UL) {
558 return false;
559 }
560
Soby Mathewb4c6df42022-11-09 11:13:29 +0000561 return s2tte_has_hipas(s2tte, S2TTE_INVALID_HIPAS_UNASSIGNED);
562}
563
564/*
565 * Returns true if @s2tte has HIPAS=DESTROYED.
566 */
567bool s2tte_is_destroyed(unsigned long s2tte)
568{
569 return s2tte_has_hipas(s2tte, S2TTE_INVALID_HIPAS_DESTROYED);
570}
571
572/*
AlexeiFedorov3a739332023-04-13 13:54:04 +0100573 * Returns true if @s2tte is an assigned_empty.
Soby Mathewb4c6df42022-11-09 11:13:29 +0000574 */
AlexeiFedorov3a739332023-04-13 13:54:04 +0100575bool s2tte_is_assigned_empty(unsigned long s2tte, long level)
Soby Mathewb4c6df42022-11-09 11:13:29 +0000576{
577 (void)level;
578
579 return s2tte_has_hipas(s2tte, S2TTE_INVALID_HIPAS_ASSIGNED);
580}
581
582static bool s2tte_check(unsigned long s2tte, long level, unsigned long ns)
583{
584 unsigned long desc_type;
585
586 if ((s2tte & S2TTE_NS) != ns) {
587 return false;
588 }
589
590 desc_type = s2tte & DESC_TYPE_MASK;
591
592 /* Only pages at L3 and valid blocks at L2 allowed */
593 if (((level == RTT_PAGE_LEVEL) && (desc_type == S2TTE_L3_PAGE)) ||
AlexeiFedorov80c2f042022-11-25 14:54:46 +0000594 ((level == RTT_MIN_BLOCK_LEVEL) && (desc_type == S2TTE_L012_BLOCK))) {
Soby Mathewb4c6df42022-11-09 11:13:29 +0000595 return true;
596 }
597
598 return false;
599}
600
601/*
AlexeiFedorov3a739332023-04-13 13:54:04 +0100602 * Returns true if @s2tte is an assigned_ram.
Soby Mathewb4c6df42022-11-09 11:13:29 +0000603 */
AlexeiFedorov3a739332023-04-13 13:54:04 +0100604bool s2tte_is_assigned_ram(unsigned long s2tte, long level)
Soby Mathewb4c6df42022-11-09 11:13:29 +0000605{
606 return s2tte_check(s2tte, level, 0UL);
607}
608
609/*
AlexeiFedorov3a739332023-04-13 13:54:04 +0100610 * Returns true if @s2tte is an assigned_ns s2tte.
Soby Mathewb4c6df42022-11-09 11:13:29 +0000611 */
AlexeiFedorov3a739332023-04-13 13:54:04 +0100612bool s2tte_is_assigned_ns(unsigned long s2tte, long level)
Soby Mathewb4c6df42022-11-09 11:13:29 +0000613{
614 return s2tte_check(s2tte, level, S2TTE_NS);
615}
616
617/*
618 * Returns true if @s2tte is a table at level @level.
619 */
620bool s2tte_is_table(unsigned long s2tte, long level)
621{
622 unsigned long desc_type = s2tte & DESC_TYPE_MASK;
623
624 if ((level < RTT_PAGE_LEVEL) && (desc_type == S2TTE_TABLE)) {
625 return true;
626 }
627
628 return false;
629}
630
631/*
632 * Returns RIPAS of @s2tte.
633 *
634 * Caller should ensure that HIPAS=UNASSIGNED or HIPAS=ASSIGNED.
635 * The s2tte must be not valid/invalid descriptor.
636 */
637enum ripas s2tte_get_ripas(unsigned long s2tte)
638{
639 unsigned long desc_ripas = s2tte & S2TTE_INVALID_RIPAS_MASK;
640
641 /*
642 * If valid s2tte descriptor is passed, then ensure S2AP[0]
643 * bit is 1 (S2AP is set to RW for lower EL), which corresponds
644 * to RIPAS_RAM (bit[6]) on a valid descriptor.
645 */
646 if (((s2tte & DESC_TYPE_MASK) != S2TTE_Lx_INVALID) &&
647 (desc_ripas != S2TTE_INVALID_RIPAS_RAM)) {
648 assert(false);
649 }
650
651 if (desc_ripas == S2TTE_INVALID_RIPAS_EMPTY) {
Yousuf A62808152022-10-31 10:35:42 +0000652 return RIPAS_EMPTY;
Soby Mathewb4c6df42022-11-09 11:13:29 +0000653 }
654
Yousuf A62808152022-10-31 10:35:42 +0000655 return RIPAS_RAM;
Soby Mathewb4c6df42022-11-09 11:13:29 +0000656}
657
658/*
659 * Populates @s2tt with s2ttes which have HIPAS=UNASSIGNED and RIPAS=@ripas.
660 *
661 * The granule is populated before it is made a table,
662 * hence, don't use s2tte_write for access.
663 */
664void s2tt_init_unassigned(unsigned long *s2tt, enum ripas ripas)
665{
666 for (unsigned int i = 0U; i < S2TTES_PER_S2TT; i++) {
667 s2tt[i] = s2tte_create_unassigned(ripas);
668 }
669
670 dsb(ish);
671}
672
673/*
AlexeiFedorov5ceff352023-04-12 16:17:00 +0100674 * Populates @s2tt with unassigned_ns s2ttes.
675 *
676 * The granule is populated before it is made a table,
677 * hence, don't use s2tte_write for access.
678 */
679void s2tt_init_unassigned_ns(unsigned long *s2tt)
680{
681 for (unsigned int i = 0U; i < S2TTES_PER_S2TT; i++) {
682 s2tt[i] = s2tte_create_unassigned_ns();
683 }
684
685 dsb(ish);
686}
687
688/*
Soby Mathewb4c6df42022-11-09 11:13:29 +0000689 * Populates @s2tt with s2ttes which have HIPAS=DESTROYED.
690 *
691 * The granule is populated before it is made a table,
692 * hence, don't use s2tte_write for access.
693 */
694void s2tt_init_destroyed(unsigned long *s2tt)
695{
696 for (unsigned int i = 0U; i < S2TTES_PER_S2TT; i++) {
697 s2tt[i] = s2tte_create_destroyed();
698 }
699
700 dsb(ish);
701}
702
703unsigned long s2tte_map_size(int level)
704{
705 int levels, lsb;
706
707 assert(level <= RTT_PAGE_LEVEL);
708
709 levels = RTT_PAGE_LEVEL - level;
710 lsb = levels * S2TTE_STRIDE + GRANULE_SHIFT;
711 return 1UL << lsb;
712}
713
714/*
715 * Populates @s2tt with HIPAS=ASSIGNED, RIPAS=EMPTY s2ttes that refer to a
716 * contiguous memory block starting at @pa, and mapped at level @level.
717 *
718 * The granule is populated before it is made a table,
719 * hence, don't use s2tte_write for access.
720 */
721void s2tt_init_assigned_empty(unsigned long *s2tt, unsigned long pa, long level)
722{
723 const unsigned long map_size = s2tte_map_size(level);
Soby Mathewb4c6df42022-11-09 11:13:29 +0000724
AlexeiFedorov3a739332023-04-13 13:54:04 +0100725 for (unsigned int i = 0U; i < S2TTES_PER_S2TT; i++) {
Soby Mathewb4c6df42022-11-09 11:13:29 +0000726 s2tt[i] = s2tte_create_assigned_empty(pa, level);
727 pa += map_size;
728 }
729 dsb(ish);
730}
731
732/*
AlexeiFedorov3a739332023-04-13 13:54:04 +0100733 * Populates @s2tt with assigned_ram s2ttes that refer to a
Soby Mathewb4c6df42022-11-09 11:13:29 +0000734 * contiguous memory block starting at @pa, and mapped at level @level.
735 *
736 * The granule is populated before it is made a table,
737 * hence, don't use s2tte_write for access.
738 */
AlexeiFedorov3a739332023-04-13 13:54:04 +0100739void s2tt_init_assigned_ram(unsigned long *s2tt, unsigned long pa, long level)
Soby Mathewb4c6df42022-11-09 11:13:29 +0000740{
741 const unsigned long map_size = s2tte_map_size(level);
Soby Mathewb4c6df42022-11-09 11:13:29 +0000742
AlexeiFedorov3a739332023-04-13 13:54:04 +0100743 for (unsigned int i = 0U; i < S2TTES_PER_S2TT; i++) {
744 s2tt[i] = s2tte_create_assigned_ram(pa, level);
Soby Mathewb4c6df42022-11-09 11:13:29 +0000745 pa += map_size;
746 }
747 dsb(ish);
748}
749
750/*
AlexeiFedorov3a739332023-04-13 13:54:04 +0100751 * Populates @s2tt with assigned_ns s2ttes that refer to a
Soby Mathewb4c6df42022-11-09 11:13:29 +0000752 * contiguous memory block starting at @pa, and mapped at level @level.
753 *
754 * The granule is populated before it is made a table,
755 * hence, don't use s2tte_write for access.
756 */
AlexeiFedorov3a739332023-04-13 13:54:04 +0100757void s2tt_init_assigned_ns(unsigned long *s2tt, unsigned long pa, long level)
Soby Mathewb4c6df42022-11-09 11:13:29 +0000758{
759 const unsigned long map_size = s2tte_map_size(level);
Soby Mathewb4c6df42022-11-09 11:13:29 +0000760
AlexeiFedorov3a739332023-04-13 13:54:04 +0100761 for (unsigned int i = 0U; i < S2TTES_PER_S2TT; i++) {
762 s2tt[i] = s2tte_create_assigned_ns(pa, level);
Soby Mathewb4c6df42022-11-09 11:13:29 +0000763 pa += map_size;
764 }
765 dsb(ish);
766}
767
768/* Returns physical address of a page entry or block */
769unsigned long s2tte_pa(unsigned long s2tte, long level)
770{
771 if (s2tte_is_unassigned(s2tte) || s2tte_is_destroyed(s2tte) ||
772 s2tte_is_table(s2tte, level)) {
773 assert(false);
774 }
775 return addr_level_mask(s2tte, level);
776}
777
778/* Returns physical address of a table entry */
779unsigned long s2tte_pa_table(unsigned long s2tte, long level)
780{
781 assert(s2tte_is_table(s2tte, level));
782 return addr_level_mask(s2tte, RTT_PAGE_LEVEL);
783}
784
785bool addr_is_level_aligned(unsigned long addr, long level)
786{
787 return (addr == addr_level_mask(addr, level));
788}
789
790typedef bool (*s2tte_type_checker)(unsigned long s2tte);
791
792static bool __table_is_uniform_block(unsigned long *table,
793 s2tte_type_checker s2tte_is_x,
794 enum ripas *ripas_ptr)
795{
796 unsigned long s2tte = s2tte_read(&table[0]);
797 enum ripas ripas;
798 unsigned int i;
799
800 if (!s2tte_is_x(s2tte)) {
801 return false;
802 }
803
804 if (ripas_ptr != NULL) {
805 ripas = s2tte_get_ripas(s2tte);
806 }
807
808 for (i = 1U; i < S2TTES_PER_S2TT; i++) {
809 s2tte = s2tte_read(&table[i]);
810
811 if (!s2tte_is_x(s2tte)) {
812 return false;
813 }
814
815 if ((ripas_ptr != NULL) &&
816 (s2tte_get_ripas(s2tte) != ripas)) {
817 return false;
818 }
819 }
820
821 if (ripas_ptr != NULL) {
822 *ripas_ptr = ripas;
823 }
824
825 return true;
826}
827
828/*
829 * Returns true if all s2ttes in @table have HIPAS=UNASSIGNED and
830 * have the same RIPAS.
831 *
832 * If return value is true, the RIPAS value is returned in @ripas.
833 */
834bool table_is_unassigned_block(unsigned long *table, enum ripas *ripas)
835{
836 return __table_is_uniform_block(table, s2tte_is_unassigned, ripas);
837}
838
839/*
AlexeiFedorov5ceff352023-04-12 16:17:00 +0100840 * Returns true if all s2ttes in @table are unassigned_ns
841 */
842bool table_is_unassigned_ns_block(unsigned long *table)
843{
844 return __table_is_uniform_block(table, s2tte_is_unassigned_ns, NULL);
845}
846
847/*
Soby Mathewb4c6df42022-11-09 11:13:29 +0000848 * Returns true if all s2ttes in @table have HIPAS=DESTROYED.
849 */
850bool table_is_destroyed_block(unsigned long *table)
851{
852 return __table_is_uniform_block(table, s2tte_is_destroyed, NULL);
853}
854
855typedef bool (*s2tte_type_level_checker)(unsigned long s2tte, long level);
856
857static bool __table_maps_block(unsigned long *table,
858 long level,
859 s2tte_type_level_checker s2tte_is_x)
860{
861 unsigned long base_pa;
862 unsigned long map_size = s2tte_map_size(level);
863 unsigned long s2tte = s2tte_read(&table[0]);
864 unsigned int i;
865
866 if (!s2tte_is_x(s2tte, level)) {
867 return false;
868 }
869
870 base_pa = s2tte_pa(s2tte, level);
871 if (!addr_is_level_aligned(base_pa, level - 1L)) {
872 return false;
873 }
874
875 for (i = 1U; i < S2TTES_PER_S2TT; i++) {
876 unsigned long expected_pa = base_pa + (i * map_size);
877
878 s2tte = s2tte_read(&table[i]);
879
880 if (!s2tte_is_x(s2tte, level)) {
881 return false;
882 }
883
884 if (s2tte_pa(s2tte, level) != expected_pa) {
885 return false;
886 }
887 }
888
889 return true;
890}
891
892/*
AlexeiFedorov3a739332023-04-13 13:54:04 +0100893 * Returns true if all s2ttes are assigned_empty
Soby Mathewb4c6df42022-11-09 11:13:29 +0000894 * and refer to a contiguous block of granules aligned to @level - 1.
895 */
AlexeiFedorov3a739332023-04-13 13:54:04 +0100896bool table_maps_assigned_empty_block(unsigned long *table, long level)
Soby Mathewb4c6df42022-11-09 11:13:29 +0000897{
AlexeiFedorov3a739332023-04-13 13:54:04 +0100898 return __table_maps_block(table, level, s2tte_is_assigned_empty);
Soby Mathewb4c6df42022-11-09 11:13:29 +0000899}
900
901/*
AlexeiFedorov3a739332023-04-13 13:54:04 +0100902 * Returns true if all s2ttes are assigned_ram and
Soby Mathewb4c6df42022-11-09 11:13:29 +0000903 * refer to a contiguous block of granules aligned to @level - 1.
904 */
AlexeiFedorov3a739332023-04-13 13:54:04 +0100905bool table_maps_assigned_ram_block(unsigned long *table, long level)
Soby Mathewb4c6df42022-11-09 11:13:29 +0000906{
AlexeiFedorov3a739332023-04-13 13:54:04 +0100907 return __table_maps_block(table, level, s2tte_is_assigned_ram);
Soby Mathewb4c6df42022-11-09 11:13:29 +0000908}
909
910/*
AlexeiFedorov3a739332023-04-13 13:54:04 +0100911 * Returns true if all s2ttes in @table are assigned_ns s2ttes and
Soby Mathewb4c6df42022-11-09 11:13:29 +0000912 * refer to a contiguous block of granules aligned to @level - 1.
AlexeiFedorov3a739332023-04-13 13:54:04 +0100913 *
914 * @pre: @table maps IPA outside PAR.
Soby Mathewb4c6df42022-11-09 11:13:29 +0000915 */
AlexeiFedorov3a739332023-04-13 13:54:04 +0100916bool table_maps_assigned_ns_block(unsigned long *table, long level)
Soby Mathewb4c6df42022-11-09 11:13:29 +0000917{
AlexeiFedorov3a739332023-04-13 13:54:04 +0100918 return __table_maps_block(table, level, s2tte_is_assigned_ns);
Soby Mathewb4c6df42022-11-09 11:13:29 +0000919}