blob: 20827160623f4615a07ad76c29e229b70b0062d8 [file] [log] [blame]
Soby Mathewb4c6df42022-11-09 11:13:29 +00001/*
2 * SPDX-License-Identifier: BSD-3-Clause
3 * SPDX-FileCopyrightText: Copyright TF-RMM Contributors.
4 */
5
6#include <arch_helpers.h>
Soby Mathewb4c6df42022-11-09 11:13:29 +00007#include <bitmap.h>
8#include <buffer.h>
9#include <gic.h>
10#include <granule.h>
11#include <memory_alloc.h>
12#include <realm.h>
13#include <ripas.h>
14#include <smc.h>
15#include <status.h>
16#include <stddef.h>
17#include <string.h>
18#include <table.h>
19
20/*
21 * For prototyping we assume 4K pages
22 */
23#define BLOCK_L2_SIZE (GRANULE_SIZE * S2TTES_PER_S2TT)
24
25/*
26 * The maximum number of bits supported by the RMM for a stage 2 translation
27 * output address (including stage 2 table entries).
28 */
29#define S2TTE_OA_BITS 48
30
31#define DESC_TYPE_MASK 0x3UL
32#define S2TTE_L012_TABLE 0x3UL
33#define S2TTE_L012_BLOCK 0x1UL
34#define S2TTE_L3_PAGE 0x3UL
35#define S2TTE_Lx_INVALID 0x0UL
36
37/*
38 * The following constants for the mapping attributes (S2_TTE_MEMATTR_*)
39 * assume that HCR_EL2.FWB is set.
40 */
41#define S2TTE_MEMATTR_SHIFT 2
42#define S2TTE_MEMATTR_MASK (0x7UL << S2TTE_MEMATTR_SHIFT)
43#define S2TTE_MEMATTR_FWB_NORMAL_WB ((1UL << 4) | (2UL << 2))
44#define S2TTE_MEMATTR_FWB_RESERVED ((1UL << 4) | (0UL << 2))
45
46#define S2TTE_AP_SHIFT 6
47#define S2TTE_AP_MASK (3UL << S2TTE_AP_SHIFT)
48#define S2TTE_AP_RW (3UL << S2TTE_AP_SHIFT)
49
50#define S2TTE_SH_SHIFT 8
51#define S2TTE_SH_MASK (3UL << S2TTE_SH_SHIFT)
52#define S2TTE_SH_NS (0UL << S2TTE_SH_SHIFT)
53#define S2TTE_SH_RESERVED (1UL << S2TTE_SH_SHIFT)
54#define S2TTE_SH_OS (2UL << S2TTE_SH_SHIFT)
55#define S2TTE_SH_IS (3UL << S2TTE_SH_SHIFT) /* Inner Shareable */
56
57/*
58 * We set HCR_EL2.FWB So we set bit[4] to 1 and bits[3:2] to 2 and force
59 * everyting to be Normal Write-Back
60 */
61#define S2TTE_MEMATTR_FWB_NORMAL_WB ((1UL << 4) | (2UL << 2))
62#define S2TTE_AF (1UL << 10)
63#define S2TTE_XN (2UL << 53)
64#define S2TTE_NS (1UL << 55)
65
66#define S2TTE_ATTRS (S2TTE_MEMATTR_FWB_NORMAL_WB | S2TTE_AP_RW | \
67 S2TTE_SH_IS | S2TTE_AF)
68
69#define S2TTE_TABLE S2TTE_L012_TABLE
70#define S2TTE_BLOCK (S2TTE_ATTRS | S2TTE_L012_BLOCK)
71#define S2TTE_PAGE (S2TTE_ATTRS | S2TTE_L3_PAGE)
72#define S2TTE_BLOCK_NS (S2TTE_NS | S2TTE_XN | S2TTE_AF | S2TTE_L012_BLOCK)
73#define S2TTE_PAGE_NS (S2TTE_NS | S2TTE_XN | S2TTE_AF | S2TTE_L3_PAGE)
74#define S2TTE_INVALID 0
75
76/*
77 * The type of an S2TTE is one of the following:
78 *
79 * - Invalid
80 * - Valid page
81 * - Valid block
82 * - Table
83 *
84 * Within an invalid S2TTE for a Protected IPA, architecturally RES0 bits are
85 * used to encode the HIPAS and RIPAS.
86 *
87 * A valid S2TTE for a Protected IPA implies HIPAS=ASSIGNED and RIPAS=RAM.
88 *
89 * An invalid S2TTE for an Unprotected IPA implies HIPAS=INVALID_NS.
90 * A valid S2TTE for an Unprotected IPA implies HIPAS=VALID_NS.
91 *
92 * The following table defines the mapping from a (HIPAS, RIPAS) tuple to the
93 * value of the S2TTE.
94 *
95 * ------------------------------------------------------------------------------
96 * IPA HIPAS RIPAS S2TTE value
97 * ==============================================================================
98 * Protected UNASSIGNED EMPTY (S2TTE_INVALID_HIPAS_UNASSIGNED |
99 * S2TTE_INVALID_RIPAS_EMPTY)
100 * Protected UNASSIGNED RAM (S2TTE_INVALID_HIPAS_UNASSIGNED |
101 * S2TTE_INVALID_RIPAS_RAM)
102 * Protected ASSIGNED EMPTY (S2TTE_INVALID_HIPAS_ASSIGNED |
103 * S2TTE_INVALID_RIPAS_EMPTY)
104 * Protected ASSIGNED RAM Valid page / block with NS=0
105 * Protected DESTROYED * S2TTE_INVALID_DESTROYED
106 * Unprotected INVALID_NS N/A S2TTE_INVALID_UNPROTECTED
107 * Unprotected VALID_NS N/A Valid page / block with NS=1
108 * ------------------------------------------------------------------------------
109 */
110
111#define S2TTE_INVALID_HIPAS_SHIFT 2
112#define S2TTE_INVALID_HIPAS_WIDTH 4
113#define S2TTE_INVALID_HIPAS_MASK MASK(S2TTE_INVALID_HIPAS)
114
115#define S2TTE_INVALID_HIPAS_UNASSIGNED (INPLACE(S2TTE_INVALID_HIPAS, 0))
116#define S2TTE_INVALID_HIPAS_ASSIGNED (INPLACE(S2TTE_INVALID_HIPAS, 1))
117#define S2TTE_INVALID_HIPAS_DESTROYED (INPLACE(S2TTE_INVALID_HIPAS, 2))
118
119#define S2TTE_INVALID_RIPAS_SHIFT 6
120#define S2TTE_INVALID_RIPAS_WIDTH 1
121#define S2TTE_INVALID_RIPAS_MASK MASK(S2TTE_INVALID_RIPAS)
122
123#define S2TTE_INVALID_RIPAS_EMPTY (INPLACE(S2TTE_INVALID_RIPAS, 0))
124#define S2TTE_INVALID_RIPAS_RAM (INPLACE(S2TTE_INVALID_RIPAS, 1))
125
126#define S2TTE_INVALID_DESTROYED S2TTE_INVALID_HIPAS_DESTROYED
127#define S2TTE_INVALID_UNPROTECTED 0x0UL
128
129#define NR_RTT_LEVELS 4
130
131/*
132 * Invalidates S2 TLB entries from [ipa, ipa + size] region tagged with `vmid`.
133 */
134static void stage2_tlbi_ipa(const struct realm_s2_context *s2_ctx,
135 unsigned long ipa,
136 unsigned long size)
137{
138 /*
139 * Notes:
140 *
141 * - This follows the description provided in the Arm ARM on
142 * "Invalidation of TLB entries from stage 2 translations".
143 *
144 * - @TODO: Provide additional information to this primitive so that
145 * we can utilize:
146 * - The TTL level hint, see FEAT_TTL,
147 * - Final level lookup only invalidation,
148 * - Address range invalidation.
149 */
150
151 /*
152 * Save the current content of vttb_el2.
153 */
154 unsigned long old_vttbr_el2 = read_vttbr_el2();
155
156 /*
157 * Make 'vmid' the `current vmid`. Note that the tlbi instructions
158 * bellow target the TLB entries that match the `current vmid`.
159 */
160 write_vttbr_el2(INPLACE(VTTBR_EL2_VMID, s2_ctx->vmid));
161 isb();
162
163 /*
164 * Invalidate entries in S2 TLB caches that
165 * match both `ipa` & the `current vmid`.
166 */
167 while (size != 0UL) {
168 tlbiipas2e1is(ipa >> 12);
169 size -= GRANULE_SIZE;
170 ipa += GRANULE_SIZE;
171 }
172 dsb(ish);
173
174 /*
175 * The architecture does not require TLB invalidation by IPA to affect
176 * combined Stage-1 + Stage-2 TLBs. Therefore we must invalidate all of
177 * Stage-1 (tagged with the `current vmid`) after invalidating Stage-2.
178 */
179 tlbivmalle1is();
180 dsb(ish);
181 isb();
182
183 /*
184 * Restore the old content of vttb_el2.
185 */
186 write_vttbr_el2(old_vttbr_el2);
187 isb();
188}
189
190/*
191 * Invalidate S2 TLB entries with "addr" IPA.
192 * Call this function after:
193 * 1. A L3 page desc has been removed.
194 */
195void invalidate_page(const struct realm_s2_context *s2_ctx, unsigned long addr)
196{
197 stage2_tlbi_ipa(s2_ctx, addr, GRANULE_SIZE);
198}
199
200/*
201 * Invalidate S2 TLB entries with "addr" IPA.
202 * Call this function after:
203 * 1. A L2 block desc has been removed, or
204 * 2a. A L2 table desc has been removed, where
205 * 2b. All S2TTEs in L3 table that the L2 table desc was pointed to were invalid.
206 */
207void invalidate_block(const struct realm_s2_context *s2_ctx, unsigned long addr)
208{
209 stage2_tlbi_ipa(s2_ctx, addr, GRANULE_SIZE);
210}
211
212/*
213 * Invalidate S2 TLB entries with "addr" IPA.
214 * Call this function after:
215 * 1a. A L2 table desc has been removed, where
216 * 1b. Some S2TTEs in the table that the L2 table desc was pointed to were valid.
217 */
218void invalidate_pages_in_block(const struct realm_s2_context *s2_ctx, unsigned long addr)
219{
220 stage2_tlbi_ipa(s2_ctx, addr, BLOCK_L2_SIZE);
221}
222
223/*
224 * Return the index of the entry describing @addr in the translation table at
225 * level @level. This only works for non-concatenated page tables, so should
226 * not be called to get the index for the starting level.
227 *
228 * See the library pseudocode
229 * aarch64/translation/vmsa_addrcalc/AArch64.TTEntryAddress on which this is
230 * modeled.
231 */
232static unsigned long s2_addr_to_idx(unsigned long addr, long level)
233{
234 int levels = RTT_PAGE_LEVEL - level;
235 int lsb = levels * S2TTE_STRIDE + GRANULE_SHIFT;
236
237 addr >>= lsb;
238 addr &= (1UL << S2TTE_STRIDE) - 1;
239 return addr;
240}
241
242/*
243 * Return the index of the entry describing @addr in the translation table
244 * starting level. This may return an index >= S2TTES_PER_S2TT when the
245 * combination of @start_level and @ipa_bits implies concatenated
246 * stage 2 tables.
247 *
248 * See the library pseudocode
249 * aarch64/translation/vmsa_addrcalc/AArch64.S2SLTTEntryAddress on which
250 * this is modeled.
251 */
252static unsigned long s2_sl_addr_to_idx(unsigned long addr, int start_level,
253 unsigned long ipa_bits)
254{
255 int levels = RTT_PAGE_LEVEL - start_level;
256 int lsb = levels * S2TTE_STRIDE + GRANULE_SHIFT;
257
258 addr &= (1UL << ipa_bits) - 1UL;
259 addr >>= lsb;
260 return addr;
261}
262
263static unsigned long addr_level_mask(unsigned long addr, long level)
264{
265 int levels = RTT_PAGE_LEVEL - level;
266 unsigned int lsb = levels * S2TTE_STRIDE + GRANULE_SHIFT;
267 unsigned int msb = S2TTE_OA_BITS - 1;
268
269 return addr & BIT_MASK_ULL(msb, lsb);
270}
271
272static inline unsigned long table_entry_to_phys(unsigned long entry)
273{
274 return addr_level_mask(entry, RTT_PAGE_LEVEL);
275}
276
277static inline bool entry_is_table(unsigned long entry)
278{
279 return (entry & DESC_TYPE_MASK) == S2TTE_L012_TABLE;
280}
281
282static unsigned long __table_get_entry(struct granule *g_tbl,
283 unsigned long idx)
284{
285 unsigned long *table, entry;
286
287 table = granule_map(g_tbl, SLOT_RTT);
288 entry = s2tte_read(&table[idx]);
289 buffer_unmap(table);
290
291 return entry;
292}
293
294static struct granule *__find_next_level_idx(struct granule *g_tbl,
295 unsigned long idx)
296{
297 const unsigned long entry = __table_get_entry(g_tbl, idx);
298
299 if (!entry_is_table(entry)) {
300 return NULL;
301 }
302
303 return addr_to_granule(table_entry_to_phys(entry));
304}
305
306static struct granule *__find_lock_next_level(struct granule *g_tbl,
307 unsigned long map_addr,
308 long level)
309{
310 const unsigned long idx = s2_addr_to_idx(map_addr, level);
311 struct granule *g = __find_next_level_idx(g_tbl, idx);
312
313 if (g != NULL) {
314 granule_lock(g, GRANULE_STATE_RTT);
315 }
316
317 return g;
318}
319
320/*
321 * Walk an RTT until level @level using @map_addr.
322 * @g_root is the root (level 0) table and must be locked before the call.
323 * @start_level is the initial lookup level used for the stage 2 translation
324 * tables which may depend on the configuration of the realm, factoring in the
325 * IPA size of the realm and the desired starting level (within the limits
326 * defined by the Armv8 VMSA including options for stage 2 table concatenation).
327 * The function uses hand-over-hand locking to avoid race conditions and allow
328 * concurrent access to RTT tree which is not part of the current walk; when a
329 * next level table is reached it is locked before releasing previously locked
330 * table.
331 * The walk stops when either:
332 * - The entry found is a leaf entry (not an RTT Table entry), or
333 * - Level @level is reached.
334 *
335 * On return:
336 * - rtt_walk::last_level is the last level that has been reached by the walk.
337 * - rtt_walk.g_llt points to the TABLE granule at level @rtt_walk::level.
338 * The granule is locked.
339 * - rtt_walk::index is the entry index at rtt_walk.g_llt for @map_addr.
340 */
341void rtt_walk_lock_unlock(struct granule *g_root,
342 int start_level,
343 unsigned long ipa_bits,
344 unsigned long map_addr,
345 long level,
346 struct rtt_walk *wi)
347{
348 struct granule *g_tbls[NR_RTT_LEVELS] = { NULL };
349 unsigned long sl_idx;
350 int i, last_level;
351
352 assert(start_level >= MIN_STARTING_LEVEL);
353 assert(level >= start_level);
354 assert(map_addr < (1UL << ipa_bits));
355 assert(wi != NULL);
356
357 /* Handle concatenated starting level (SL) tables */
358 sl_idx = s2_sl_addr_to_idx(map_addr, start_level, ipa_bits);
359 if (sl_idx >= S2TTES_PER_S2TT) {
360 unsigned int tt_num = (sl_idx >> S2TTE_STRIDE);
361 struct granule *g_concat_root = g_root + tt_num;
362
363 granule_lock(g_concat_root, GRANULE_STATE_RTT);
364 granule_unlock(g_root);
365 g_root = g_concat_root;
366 }
367
368 g_tbls[start_level] = g_root;
369 for (i = start_level; i < level; i++) {
370 /*
371 * Lock next RTT level. Correct locking order is guaranteed
372 * because reference is obtained from a locked granule
373 * (previous level). Also, hand-over-hand locking/unlocking is
374 * used to avoid race conditions.
375 */
376 g_tbls[i + 1] = __find_lock_next_level(g_tbls[i], map_addr, i);
377 if (g_tbls[i + 1] == NULL) {
378 last_level = i;
379 goto out;
380 }
381 granule_unlock(g_tbls[i]);
382 }
383
384 last_level = level;
385out:
386 wi->last_level = last_level;
387 wi->g_llt = g_tbls[last_level];
388 wi->index = s2_addr_to_idx(map_addr, last_level);
389}
390
391/*
392 * Creates a value which can be OR'd with an s2tte to set RIPAS=@ripas.
393 */
394unsigned long s2tte_create_ripas(enum ripas ripas)
395{
Yousuf A62808152022-10-31 10:35:42 +0000396 if (ripas == RIPAS_EMPTY) {
Soby Mathewb4c6df42022-11-09 11:13:29 +0000397 return S2TTE_INVALID_RIPAS_EMPTY;
398 }
399 return S2TTE_INVALID_RIPAS_RAM;
400}
401
402/*
403 * Creates an invalid s2tte with HIPAS=UNASSIGNED and RIPAS=@ripas.
404 */
405unsigned long s2tte_create_unassigned(enum ripas ripas)
406{
407 return S2TTE_INVALID_HIPAS_UNASSIGNED | s2tte_create_ripas(ripas);
408}
409
410/*
411 * Creates an invalid s2tte with HIPAS=DESTROYED.
412 */
413unsigned long s2tte_create_destroyed(void)
414{
415 return S2TTE_INVALID_DESTROYED;
416}
417
418/*
419 * Creates an invalid s2tte with output address @pa, HIPAS=ASSIGNED and
420 * RIPAS=EMPTY, at level @level.
421 */
422unsigned long s2tte_create_assigned_empty(unsigned long pa, long level)
423{
424 assert(level >= RTT_MIN_BLOCK_LEVEL);
425 assert(addr_is_level_aligned(pa, level));
426 return (pa | S2TTE_INVALID_HIPAS_ASSIGNED | S2TTE_INVALID_RIPAS_EMPTY);
427}
428
429/*
430 * Creates a page or block s2tte for a Protected IPA, with output address @pa.
431 */
432unsigned long s2tte_create_valid(unsigned long pa, long level)
433{
434 assert(level >= RTT_MIN_BLOCK_LEVEL);
435 assert(addr_is_level_aligned(pa, level));
436 if (level == RTT_PAGE_LEVEL) {
437 return (pa | S2TTE_PAGE);
438 }
439 return (pa | S2TTE_BLOCK);
440}
441
442/*
443 * Creates an invalid s2tte with HIPAS=INVALID_NS.
444 */
445unsigned long s2tte_create_invalid_ns(void)
446{
447 return S2TTE_INVALID_UNPROTECTED;
448}
449
450/*
451 * Creates a page or block s2tte for an Unprotected IPA at level @level.
452 *
453 * The following S2 TTE fields are provided through @s2tte argument:
454 * - The physical address
455 * - MemAttr
456 * - S2AP
457 * - Shareability
458 */
459unsigned long s2tte_create_valid_ns(unsigned long s2tte, long level)
460{
461 assert(level >= RTT_MIN_BLOCK_LEVEL);
462 if (level == RTT_PAGE_LEVEL) {
463 return (s2tte | S2TTE_PAGE_NS);
464 }
465 return (s2tte | S2TTE_BLOCK_NS);
466}
467
468/*
469 * Validate the portion of NS S2TTE that is provided by the host.
470 */
471bool host_ns_s2tte_is_valid(unsigned long s2tte, long level)
472{
473 unsigned long mask = addr_level_mask(~0UL, level) |
474 S2TTE_MEMATTR_MASK |
475 S2TTE_AP_MASK |
476 S2TTE_SH_MASK;
477
478 /*
479 * Test that all fields that are not controlled by the host are zero
480 * and that the output address is correctly aligned. Note that
481 * the host is permitted to map any physical address outside PAR.
482 */
483 if ((s2tte & ~mask) != 0UL) {
484 return false;
485 }
486
487 /*
488 * Only one value masked by S2TTE_MEMATTR_MASK is invalid/reserved.
489 */
490 if ((s2tte & S2TTE_MEMATTR_MASK) == S2TTE_MEMATTR_FWB_RESERVED) {
491 return false;
492 }
493
494 /*
495 * Only one value masked by S2TTE_SH_MASK is invalid/reserved.
496 */
497 if ((s2tte & S2TTE_SH_MASK) == S2TTE_SH_RESERVED) {
498 return false;
499 }
500
501 /*
502 * Note that all the values that are masked by S2TTE_AP_MASK are valid.
503 */
504 return true;
505}
506
507/*
508 * Returns the portion of NS S2TTE that is set by the host.
509 */
510unsigned long host_ns_s2tte(unsigned long s2tte, long level)
511{
512 unsigned long mask = addr_level_mask(~0UL, level) |
513 S2TTE_MEMATTR_MASK |
514 S2TTE_AP_MASK |
515 S2TTE_SH_MASK;
516 return (s2tte & mask);
517}
518
519/*
520 * Creates a table s2tte at level @level with output address @pa.
521 */
522unsigned long s2tte_create_table(unsigned long pa, long level)
523{
524 assert(level < RTT_PAGE_LEVEL);
525 assert(GRANULE_ALIGNED(pa));
526
527 return (pa | S2TTE_TABLE);
528}
529
530/*
531 * Returns true if @s2tte has HIPAS=@hipas.
532 */
533static bool s2tte_has_hipas(unsigned long s2tte, unsigned long hipas)
534{
535 unsigned long desc_type = s2tte & DESC_TYPE_MASK;
536 unsigned long invalid_desc_hipas = s2tte & S2TTE_INVALID_HIPAS_MASK;
537
538 if ((desc_type != S2TTE_Lx_INVALID) || (invalid_desc_hipas != hipas)) {
539 return false;
540 }
541 return true;
542}
543
544/*
545 * Returns true if @s2tte has HIPAS=UNASSIGNED or HIPAS=INVALID_NS.
546 */
547bool s2tte_is_unassigned(unsigned long s2tte)
548{
549 return s2tte_has_hipas(s2tte, S2TTE_INVALID_HIPAS_UNASSIGNED);
550}
551
552/*
553 * Returns true if @s2tte has HIPAS=DESTROYED.
554 */
555bool s2tte_is_destroyed(unsigned long s2tte)
556{
557 return s2tte_has_hipas(s2tte, S2TTE_INVALID_HIPAS_DESTROYED);
558}
559
560/*
561 * Returns true if @s2tte has HIPAS=ASSIGNED.
562 */
563bool s2tte_is_assigned(unsigned long s2tte, long level)
564{
565 (void)level;
566
567 return s2tte_has_hipas(s2tte, S2TTE_INVALID_HIPAS_ASSIGNED);
568}
569
570static bool s2tte_check(unsigned long s2tte, long level, unsigned long ns)
571{
572 unsigned long desc_type;
573
574 if ((s2tte & S2TTE_NS) != ns) {
575 return false;
576 }
577
578 desc_type = s2tte & DESC_TYPE_MASK;
579
580 /* Only pages at L3 and valid blocks at L2 allowed */
581 if (((level == RTT_PAGE_LEVEL) && (desc_type == S2TTE_L3_PAGE)) ||
AlexeiFedorov80c2f042022-11-25 14:54:46 +0000582 ((level == RTT_MIN_BLOCK_LEVEL) && (desc_type == S2TTE_L012_BLOCK))) {
Soby Mathewb4c6df42022-11-09 11:13:29 +0000583 return true;
584 }
585
586 return false;
587}
588
589/*
590 * Returns true if @s2tte is a page or block s2tte, and NS=0.
591 */
592bool s2tte_is_valid(unsigned long s2tte, long level)
593{
594 return s2tte_check(s2tte, level, 0UL);
595}
596
597/*
598 * Returns true if @s2tte is a page or block s2tte, and NS=1.
599 */
600bool s2tte_is_valid_ns(unsigned long s2tte, long level)
601{
602 return s2tte_check(s2tte, level, S2TTE_NS);
603}
604
605/*
606 * Returns true if @s2tte is a table at level @level.
607 */
608bool s2tte_is_table(unsigned long s2tte, long level)
609{
610 unsigned long desc_type = s2tte & DESC_TYPE_MASK;
611
612 if ((level < RTT_PAGE_LEVEL) && (desc_type == S2TTE_TABLE)) {
613 return true;
614 }
615
616 return false;
617}
618
619/*
620 * Returns RIPAS of @s2tte.
621 *
622 * Caller should ensure that HIPAS=UNASSIGNED or HIPAS=ASSIGNED.
623 * The s2tte must be not valid/invalid descriptor.
624 */
625enum ripas s2tte_get_ripas(unsigned long s2tte)
626{
627 unsigned long desc_ripas = s2tte & S2TTE_INVALID_RIPAS_MASK;
628
629 /*
630 * If valid s2tte descriptor is passed, then ensure S2AP[0]
631 * bit is 1 (S2AP is set to RW for lower EL), which corresponds
632 * to RIPAS_RAM (bit[6]) on a valid descriptor.
633 */
634 if (((s2tte & DESC_TYPE_MASK) != S2TTE_Lx_INVALID) &&
635 (desc_ripas != S2TTE_INVALID_RIPAS_RAM)) {
636 assert(false);
637 }
638
639 if (desc_ripas == S2TTE_INVALID_RIPAS_EMPTY) {
Yousuf A62808152022-10-31 10:35:42 +0000640 return RIPAS_EMPTY;
Soby Mathewb4c6df42022-11-09 11:13:29 +0000641 }
642
Yousuf A62808152022-10-31 10:35:42 +0000643 return RIPAS_RAM;
Soby Mathewb4c6df42022-11-09 11:13:29 +0000644}
645
646/*
647 * Populates @s2tt with s2ttes which have HIPAS=UNASSIGNED and RIPAS=@ripas.
648 *
649 * The granule is populated before it is made a table,
650 * hence, don't use s2tte_write for access.
651 */
652void s2tt_init_unassigned(unsigned long *s2tt, enum ripas ripas)
653{
654 for (unsigned int i = 0U; i < S2TTES_PER_S2TT; i++) {
655 s2tt[i] = s2tte_create_unassigned(ripas);
656 }
657
658 dsb(ish);
659}
660
661/*
662 * Populates @s2tt with s2ttes which have HIPAS=DESTROYED.
663 *
664 * The granule is populated before it is made a table,
665 * hence, don't use s2tte_write for access.
666 */
667void s2tt_init_destroyed(unsigned long *s2tt)
668{
669 for (unsigned int i = 0U; i < S2TTES_PER_S2TT; i++) {
670 s2tt[i] = s2tte_create_destroyed();
671 }
672
673 dsb(ish);
674}
675
676unsigned long s2tte_map_size(int level)
677{
678 int levels, lsb;
679
680 assert(level <= RTT_PAGE_LEVEL);
681
682 levels = RTT_PAGE_LEVEL - level;
683 lsb = levels * S2TTE_STRIDE + GRANULE_SHIFT;
684 return 1UL << lsb;
685}
686
687/*
688 * Populates @s2tt with HIPAS=ASSIGNED, RIPAS=EMPTY s2ttes that refer to a
689 * contiguous memory block starting at @pa, and mapped at level @level.
690 *
691 * The granule is populated before it is made a table,
692 * hence, don't use s2tte_write for access.
693 */
694void s2tt_init_assigned_empty(unsigned long *s2tt, unsigned long pa, long level)
695{
696 const unsigned long map_size = s2tte_map_size(level);
697 unsigned int i;
698
699 for (i = 0U; i < S2TTES_PER_S2TT; i++) {
700 s2tt[i] = s2tte_create_assigned_empty(pa, level);
701 pa += map_size;
702 }
703 dsb(ish);
704}
705
706/*
707 * Populates @s2tt with HIPAS=VALID, RIPAS=@ripas s2ttes that refer to a
708 * contiguous memory block starting at @pa, and mapped at level @level.
709 *
710 * The granule is populated before it is made a table,
711 * hence, don't use s2tte_write for access.
712 */
713void s2tt_init_valid(unsigned long *s2tt, unsigned long pa, long level)
714{
715 const unsigned long map_size = s2tte_map_size(level);
716 unsigned int i;
717
718 for (i = 0U; i < S2TTES_PER_S2TT; i++) {
719 s2tt[i] = s2tte_create_valid(pa, level);
720 pa += map_size;
721 }
722 dsb(ish);
723}
724
725/*
726 * Populates @s2tt with HIPAS=VALID_NS, RIPAS=@ripas s2ttes that refer to a
727 * contiguous memory block starting at @pa, and mapped at level @level.
728 *
729 * The granule is populated before it is made a table,
730 * hence, don't use s2tte_write for access.
731 */
732void s2tt_init_valid_ns(unsigned long *s2tt, unsigned long pa, long level)
733{
734 const unsigned long map_size = s2tte_map_size(level);
735 unsigned int i;
736
737 for (i = 0U; i < S2TTES_PER_S2TT; i++) {
738 s2tt[i] = s2tte_create_valid_ns(pa, level);
739 pa += map_size;
740 }
741 dsb(ish);
742}
743
744/* Returns physical address of a page entry or block */
745unsigned long s2tte_pa(unsigned long s2tte, long level)
746{
747 if (s2tte_is_unassigned(s2tte) || s2tte_is_destroyed(s2tte) ||
748 s2tte_is_table(s2tte, level)) {
749 assert(false);
750 }
751 return addr_level_mask(s2tte, level);
752}
753
754/* Returns physical address of a table entry */
755unsigned long s2tte_pa_table(unsigned long s2tte, long level)
756{
757 assert(s2tte_is_table(s2tte, level));
758 return addr_level_mask(s2tte, RTT_PAGE_LEVEL);
759}
760
761bool addr_is_level_aligned(unsigned long addr, long level)
762{
763 return (addr == addr_level_mask(addr, level));
764}
765
766typedef bool (*s2tte_type_checker)(unsigned long s2tte);
767
768static bool __table_is_uniform_block(unsigned long *table,
769 s2tte_type_checker s2tte_is_x,
770 enum ripas *ripas_ptr)
771{
772 unsigned long s2tte = s2tte_read(&table[0]);
773 enum ripas ripas;
774 unsigned int i;
775
776 if (!s2tte_is_x(s2tte)) {
777 return false;
778 }
779
780 if (ripas_ptr != NULL) {
781 ripas = s2tte_get_ripas(s2tte);
782 }
783
784 for (i = 1U; i < S2TTES_PER_S2TT; i++) {
785 s2tte = s2tte_read(&table[i]);
786
787 if (!s2tte_is_x(s2tte)) {
788 return false;
789 }
790
791 if ((ripas_ptr != NULL) &&
792 (s2tte_get_ripas(s2tte) != ripas)) {
793 return false;
794 }
795 }
796
797 if (ripas_ptr != NULL) {
798 *ripas_ptr = ripas;
799 }
800
801 return true;
802}
803
804/*
805 * Returns true if all s2ttes in @table have HIPAS=UNASSIGNED and
806 * have the same RIPAS.
807 *
808 * If return value is true, the RIPAS value is returned in @ripas.
809 */
810bool table_is_unassigned_block(unsigned long *table, enum ripas *ripas)
811{
812 return __table_is_uniform_block(table, s2tte_is_unassigned, ripas);
813}
814
815/*
816 * Returns true if all s2ttes in @table have HIPAS=DESTROYED.
817 */
818bool table_is_destroyed_block(unsigned long *table)
819{
820 return __table_is_uniform_block(table, s2tte_is_destroyed, NULL);
821}
822
823typedef bool (*s2tte_type_level_checker)(unsigned long s2tte, long level);
824
825static bool __table_maps_block(unsigned long *table,
826 long level,
827 s2tte_type_level_checker s2tte_is_x)
828{
829 unsigned long base_pa;
830 unsigned long map_size = s2tte_map_size(level);
831 unsigned long s2tte = s2tte_read(&table[0]);
832 unsigned int i;
833
834 if (!s2tte_is_x(s2tte, level)) {
835 return false;
836 }
837
838 base_pa = s2tte_pa(s2tte, level);
839 if (!addr_is_level_aligned(base_pa, level - 1L)) {
840 return false;
841 }
842
843 for (i = 1U; i < S2TTES_PER_S2TT; i++) {
844 unsigned long expected_pa = base_pa + (i * map_size);
845
846 s2tte = s2tte_read(&table[i]);
847
848 if (!s2tte_is_x(s2tte, level)) {
849 return false;
850 }
851
852 if (s2tte_pa(s2tte, level) != expected_pa) {
853 return false;
854 }
855 }
856
857 return true;
858}
859
860/*
861 * Returns true if all s2ttes in @table have HIPAS=ASSIGNED
862 * and refer to a contiguous block of granules aligned to @level - 1.
863 */
864bool table_maps_assigned_block(unsigned long *table, long level)
865{
866 return __table_maps_block(table, level, s2tte_is_assigned);
867}
868
869/*
870 * Returns true if all s2ttes in @table have HIPAS=VALID and
871 * refer to a contiguous block of granules aligned to @level - 1.
872 */
873bool table_maps_valid_block(unsigned long *table, long level)
874{
875 return __table_maps_block(table, level, s2tte_is_valid);
876}
877
878/*
879 * Returns true if all s2ttes in @table have HIPAS=VALID_NS and
880 * refer to a contiguous block of granules aligned to @level - 1.
881 */
882bool table_maps_valid_ns_block(unsigned long *table, long level)
883{
884 return __table_maps_block(table, level, s2tte_is_valid_ns);
885}