blob: e5aa28963d70ae78bc9f97d8c836ac65c4473e53 [file] [log] [blame]
Soby Mathewb4c6df42022-11-09 11:13:29 +00001/*
2 * SPDX-License-Identifier: BSD-3-Clause
3 * SPDX-FileCopyrightText: Copyright TF-RMM Contributors.
4 */
5
6#include <arch_helpers.h>
7#include <attestation_token.h>
8#include <bitmap.h>
9#include <buffer.h>
10#include <gic.h>
11#include <granule.h>
12#include <memory_alloc.h>
13#include <realm.h>
14#include <ripas.h>
15#include <smc.h>
16#include <status.h>
17#include <stddef.h>
18#include <string.h>
19#include <table.h>
20
21/*
22 * For prototyping we assume 4K pages
23 */
24#define BLOCK_L2_SIZE (GRANULE_SIZE * S2TTES_PER_S2TT)
25
26/*
27 * The maximum number of bits supported by the RMM for a stage 2 translation
28 * output address (including stage 2 table entries).
29 */
30#define S2TTE_OA_BITS 48
31
32#define DESC_TYPE_MASK 0x3UL
33#define S2TTE_L012_TABLE 0x3UL
34#define S2TTE_L012_BLOCK 0x1UL
35#define S2TTE_L3_PAGE 0x3UL
36#define S2TTE_Lx_INVALID 0x0UL
37
38/*
39 * The following constants for the mapping attributes (S2_TTE_MEMATTR_*)
40 * assume that HCR_EL2.FWB is set.
41 */
42#define S2TTE_MEMATTR_SHIFT 2
43#define S2TTE_MEMATTR_MASK (0x7UL << S2TTE_MEMATTR_SHIFT)
44#define S2TTE_MEMATTR_FWB_NORMAL_WB ((1UL << 4) | (2UL << 2))
45#define S2TTE_MEMATTR_FWB_RESERVED ((1UL << 4) | (0UL << 2))
46
47#define S2TTE_AP_SHIFT 6
48#define S2TTE_AP_MASK (3UL << S2TTE_AP_SHIFT)
49#define S2TTE_AP_RW (3UL << S2TTE_AP_SHIFT)
50
51#define S2TTE_SH_SHIFT 8
52#define S2TTE_SH_MASK (3UL << S2TTE_SH_SHIFT)
53#define S2TTE_SH_NS (0UL << S2TTE_SH_SHIFT)
54#define S2TTE_SH_RESERVED (1UL << S2TTE_SH_SHIFT)
55#define S2TTE_SH_OS (2UL << S2TTE_SH_SHIFT)
56#define S2TTE_SH_IS (3UL << S2TTE_SH_SHIFT) /* Inner Shareable */
57
58/*
59 * We set HCR_EL2.FWB So we set bit[4] to 1 and bits[3:2] to 2 and force
60 * everyting to be Normal Write-Back
61 */
62#define S2TTE_MEMATTR_FWB_NORMAL_WB ((1UL << 4) | (2UL << 2))
63#define S2TTE_AF (1UL << 10)
64#define S2TTE_XN (2UL << 53)
65#define S2TTE_NS (1UL << 55)
66
67#define S2TTE_ATTRS (S2TTE_MEMATTR_FWB_NORMAL_WB | S2TTE_AP_RW | \
68 S2TTE_SH_IS | S2TTE_AF)
69
70#define S2TTE_TABLE S2TTE_L012_TABLE
71#define S2TTE_BLOCK (S2TTE_ATTRS | S2TTE_L012_BLOCK)
72#define S2TTE_PAGE (S2TTE_ATTRS | S2TTE_L3_PAGE)
73#define S2TTE_BLOCK_NS (S2TTE_NS | S2TTE_XN | S2TTE_AF | S2TTE_L012_BLOCK)
74#define S2TTE_PAGE_NS (S2TTE_NS | S2TTE_XN | S2TTE_AF | S2TTE_L3_PAGE)
75#define S2TTE_INVALID 0
76
77/*
78 * The type of an S2TTE is one of the following:
79 *
80 * - Invalid
81 * - Valid page
82 * - Valid block
83 * - Table
84 *
85 * Within an invalid S2TTE for a Protected IPA, architecturally RES0 bits are
86 * used to encode the HIPAS and RIPAS.
87 *
88 * A valid S2TTE for a Protected IPA implies HIPAS=ASSIGNED and RIPAS=RAM.
89 *
90 * An invalid S2TTE for an Unprotected IPA implies HIPAS=INVALID_NS.
91 * A valid S2TTE for an Unprotected IPA implies HIPAS=VALID_NS.
92 *
93 * The following table defines the mapping from a (HIPAS, RIPAS) tuple to the
94 * value of the S2TTE.
95 *
96 * ------------------------------------------------------------------------------
97 * IPA HIPAS RIPAS S2TTE value
98 * ==============================================================================
99 * Protected UNASSIGNED EMPTY (S2TTE_INVALID_HIPAS_UNASSIGNED |
100 * S2TTE_INVALID_RIPAS_EMPTY)
101 * Protected UNASSIGNED RAM (S2TTE_INVALID_HIPAS_UNASSIGNED |
102 * S2TTE_INVALID_RIPAS_RAM)
103 * Protected ASSIGNED EMPTY (S2TTE_INVALID_HIPAS_ASSIGNED |
104 * S2TTE_INVALID_RIPAS_EMPTY)
105 * Protected ASSIGNED RAM Valid page / block with NS=0
106 * Protected DESTROYED * S2TTE_INVALID_DESTROYED
107 * Unprotected INVALID_NS N/A S2TTE_INVALID_UNPROTECTED
108 * Unprotected VALID_NS N/A Valid page / block with NS=1
109 * ------------------------------------------------------------------------------
110 */
111
112#define S2TTE_INVALID_HIPAS_SHIFT 2
113#define S2TTE_INVALID_HIPAS_WIDTH 4
114#define S2TTE_INVALID_HIPAS_MASK MASK(S2TTE_INVALID_HIPAS)
115
116#define S2TTE_INVALID_HIPAS_UNASSIGNED (INPLACE(S2TTE_INVALID_HIPAS, 0))
117#define S2TTE_INVALID_HIPAS_ASSIGNED (INPLACE(S2TTE_INVALID_HIPAS, 1))
118#define S2TTE_INVALID_HIPAS_DESTROYED (INPLACE(S2TTE_INVALID_HIPAS, 2))
119
120#define S2TTE_INVALID_RIPAS_SHIFT 6
121#define S2TTE_INVALID_RIPAS_WIDTH 1
122#define S2TTE_INVALID_RIPAS_MASK MASK(S2TTE_INVALID_RIPAS)
123
124#define S2TTE_INVALID_RIPAS_EMPTY (INPLACE(S2TTE_INVALID_RIPAS, 0))
125#define S2TTE_INVALID_RIPAS_RAM (INPLACE(S2TTE_INVALID_RIPAS, 1))
126
127#define S2TTE_INVALID_DESTROYED S2TTE_INVALID_HIPAS_DESTROYED
128#define S2TTE_INVALID_UNPROTECTED 0x0UL
129
130#define NR_RTT_LEVELS 4
131
132/*
133 * Invalidates S2 TLB entries from [ipa, ipa + size] region tagged with `vmid`.
134 */
135static void stage2_tlbi_ipa(const struct realm_s2_context *s2_ctx,
136 unsigned long ipa,
137 unsigned long size)
138{
139 /*
140 * Notes:
141 *
142 * - This follows the description provided in the Arm ARM on
143 * "Invalidation of TLB entries from stage 2 translations".
144 *
145 * - @TODO: Provide additional information to this primitive so that
146 * we can utilize:
147 * - The TTL level hint, see FEAT_TTL,
148 * - Final level lookup only invalidation,
149 * - Address range invalidation.
150 */
151
152 /*
153 * Save the current content of vttb_el2.
154 */
155 unsigned long old_vttbr_el2 = read_vttbr_el2();
156
157 /*
158 * Make 'vmid' the `current vmid`. Note that the tlbi instructions
159 * bellow target the TLB entries that match the `current vmid`.
160 */
161 write_vttbr_el2(INPLACE(VTTBR_EL2_VMID, s2_ctx->vmid));
162 isb();
163
164 /*
165 * Invalidate entries in S2 TLB caches that
166 * match both `ipa` & the `current vmid`.
167 */
168 while (size != 0UL) {
169 tlbiipas2e1is(ipa >> 12);
170 size -= GRANULE_SIZE;
171 ipa += GRANULE_SIZE;
172 }
173 dsb(ish);
174
175 /*
176 * The architecture does not require TLB invalidation by IPA to affect
177 * combined Stage-1 + Stage-2 TLBs. Therefore we must invalidate all of
178 * Stage-1 (tagged with the `current vmid`) after invalidating Stage-2.
179 */
180 tlbivmalle1is();
181 dsb(ish);
182 isb();
183
184 /*
185 * Restore the old content of vttb_el2.
186 */
187 write_vttbr_el2(old_vttbr_el2);
188 isb();
189}
190
191/*
192 * Invalidate S2 TLB entries with "addr" IPA.
193 * Call this function after:
194 * 1. A L3 page desc has been removed.
195 */
196void invalidate_page(const struct realm_s2_context *s2_ctx, unsigned long addr)
197{
198 stage2_tlbi_ipa(s2_ctx, addr, GRANULE_SIZE);
199}
200
201/*
202 * Invalidate S2 TLB entries with "addr" IPA.
203 * Call this function after:
204 * 1. A L2 block desc has been removed, or
205 * 2a. A L2 table desc has been removed, where
206 * 2b. All S2TTEs in L3 table that the L2 table desc was pointed to were invalid.
207 */
208void invalidate_block(const struct realm_s2_context *s2_ctx, unsigned long addr)
209{
210 stage2_tlbi_ipa(s2_ctx, addr, GRANULE_SIZE);
211}
212
213/*
214 * Invalidate S2 TLB entries with "addr" IPA.
215 * Call this function after:
216 * 1a. A L2 table desc has been removed, where
217 * 1b. Some S2TTEs in the table that the L2 table desc was pointed to were valid.
218 */
219void invalidate_pages_in_block(const struct realm_s2_context *s2_ctx, unsigned long addr)
220{
221 stage2_tlbi_ipa(s2_ctx, addr, BLOCK_L2_SIZE);
222}
223
224/*
225 * Return the index of the entry describing @addr in the translation table at
226 * level @level. This only works for non-concatenated page tables, so should
227 * not be called to get the index for the starting level.
228 *
229 * See the library pseudocode
230 * aarch64/translation/vmsa_addrcalc/AArch64.TTEntryAddress on which this is
231 * modeled.
232 */
233static unsigned long s2_addr_to_idx(unsigned long addr, long level)
234{
235 int levels = RTT_PAGE_LEVEL - level;
236 int lsb = levels * S2TTE_STRIDE + GRANULE_SHIFT;
237
238 addr >>= lsb;
239 addr &= (1UL << S2TTE_STRIDE) - 1;
240 return addr;
241}
242
243/*
244 * Return the index of the entry describing @addr in the translation table
245 * starting level. This may return an index >= S2TTES_PER_S2TT when the
246 * combination of @start_level and @ipa_bits implies concatenated
247 * stage 2 tables.
248 *
249 * See the library pseudocode
250 * aarch64/translation/vmsa_addrcalc/AArch64.S2SLTTEntryAddress on which
251 * this is modeled.
252 */
253static unsigned long s2_sl_addr_to_idx(unsigned long addr, int start_level,
254 unsigned long ipa_bits)
255{
256 int levels = RTT_PAGE_LEVEL - start_level;
257 int lsb = levels * S2TTE_STRIDE + GRANULE_SHIFT;
258
259 addr &= (1UL << ipa_bits) - 1UL;
260 addr >>= lsb;
261 return addr;
262}
263
264static unsigned long addr_level_mask(unsigned long addr, long level)
265{
266 int levels = RTT_PAGE_LEVEL - level;
267 unsigned int lsb = levels * S2TTE_STRIDE + GRANULE_SHIFT;
268 unsigned int msb = S2TTE_OA_BITS - 1;
269
270 return addr & BIT_MASK_ULL(msb, lsb);
271}
272
273static inline unsigned long table_entry_to_phys(unsigned long entry)
274{
275 return addr_level_mask(entry, RTT_PAGE_LEVEL);
276}
277
278static inline bool entry_is_table(unsigned long entry)
279{
280 return (entry & DESC_TYPE_MASK) == S2TTE_L012_TABLE;
281}
282
283static unsigned long __table_get_entry(struct granule *g_tbl,
284 unsigned long idx)
285{
286 unsigned long *table, entry;
287
288 table = granule_map(g_tbl, SLOT_RTT);
289 entry = s2tte_read(&table[idx]);
290 buffer_unmap(table);
291
292 return entry;
293}
294
295static struct granule *__find_next_level_idx(struct granule *g_tbl,
296 unsigned long idx)
297{
298 const unsigned long entry = __table_get_entry(g_tbl, idx);
299
300 if (!entry_is_table(entry)) {
301 return NULL;
302 }
303
304 return addr_to_granule(table_entry_to_phys(entry));
305}
306
307static struct granule *__find_lock_next_level(struct granule *g_tbl,
308 unsigned long map_addr,
309 long level)
310{
311 const unsigned long idx = s2_addr_to_idx(map_addr, level);
312 struct granule *g = __find_next_level_idx(g_tbl, idx);
313
314 if (g != NULL) {
315 granule_lock(g, GRANULE_STATE_RTT);
316 }
317
318 return g;
319}
320
321/*
322 * Walk an RTT until level @level using @map_addr.
323 * @g_root is the root (level 0) table and must be locked before the call.
324 * @start_level is the initial lookup level used for the stage 2 translation
325 * tables which may depend on the configuration of the realm, factoring in the
326 * IPA size of the realm and the desired starting level (within the limits
327 * defined by the Armv8 VMSA including options for stage 2 table concatenation).
328 * The function uses hand-over-hand locking to avoid race conditions and allow
329 * concurrent access to RTT tree which is not part of the current walk; when a
330 * next level table is reached it is locked before releasing previously locked
331 * table.
332 * The walk stops when either:
333 * - The entry found is a leaf entry (not an RTT Table entry), or
334 * - Level @level is reached.
335 *
336 * On return:
337 * - rtt_walk::last_level is the last level that has been reached by the walk.
338 * - rtt_walk.g_llt points to the TABLE granule at level @rtt_walk::level.
339 * The granule is locked.
340 * - rtt_walk::index is the entry index at rtt_walk.g_llt for @map_addr.
341 */
342void rtt_walk_lock_unlock(struct granule *g_root,
343 int start_level,
344 unsigned long ipa_bits,
345 unsigned long map_addr,
346 long level,
347 struct rtt_walk *wi)
348{
349 struct granule *g_tbls[NR_RTT_LEVELS] = { NULL };
350 unsigned long sl_idx;
351 int i, last_level;
352
353 assert(start_level >= MIN_STARTING_LEVEL);
354 assert(level >= start_level);
355 assert(map_addr < (1UL << ipa_bits));
356 assert(wi != NULL);
357
358 /* Handle concatenated starting level (SL) tables */
359 sl_idx = s2_sl_addr_to_idx(map_addr, start_level, ipa_bits);
360 if (sl_idx >= S2TTES_PER_S2TT) {
361 unsigned int tt_num = (sl_idx >> S2TTE_STRIDE);
362 struct granule *g_concat_root = g_root + tt_num;
363
364 granule_lock(g_concat_root, GRANULE_STATE_RTT);
365 granule_unlock(g_root);
366 g_root = g_concat_root;
367 }
368
369 g_tbls[start_level] = g_root;
370 for (i = start_level; i < level; i++) {
371 /*
372 * Lock next RTT level. Correct locking order is guaranteed
373 * because reference is obtained from a locked granule
374 * (previous level). Also, hand-over-hand locking/unlocking is
375 * used to avoid race conditions.
376 */
377 g_tbls[i + 1] = __find_lock_next_level(g_tbls[i], map_addr, i);
378 if (g_tbls[i + 1] == NULL) {
379 last_level = i;
380 goto out;
381 }
382 granule_unlock(g_tbls[i]);
383 }
384
385 last_level = level;
386out:
387 wi->last_level = last_level;
388 wi->g_llt = g_tbls[last_level];
389 wi->index = s2_addr_to_idx(map_addr, last_level);
390}
391
392/*
393 * Creates a value which can be OR'd with an s2tte to set RIPAS=@ripas.
394 */
395unsigned long s2tte_create_ripas(enum ripas ripas)
396{
Yousuf A62808152022-10-31 10:35:42 +0000397 if (ripas == RIPAS_EMPTY) {
Soby Mathewb4c6df42022-11-09 11:13:29 +0000398 return S2TTE_INVALID_RIPAS_EMPTY;
399 }
400 return S2TTE_INVALID_RIPAS_RAM;
401}
402
403/*
404 * Creates an invalid s2tte with HIPAS=UNASSIGNED and RIPAS=@ripas.
405 */
406unsigned long s2tte_create_unassigned(enum ripas ripas)
407{
408 return S2TTE_INVALID_HIPAS_UNASSIGNED | s2tte_create_ripas(ripas);
409}
410
411/*
412 * Creates an invalid s2tte with HIPAS=DESTROYED.
413 */
414unsigned long s2tte_create_destroyed(void)
415{
416 return S2TTE_INVALID_DESTROYED;
417}
418
419/*
420 * Creates an invalid s2tte with output address @pa, HIPAS=ASSIGNED and
421 * RIPAS=EMPTY, at level @level.
422 */
423unsigned long s2tte_create_assigned_empty(unsigned long pa, long level)
424{
425 assert(level >= RTT_MIN_BLOCK_LEVEL);
426 assert(addr_is_level_aligned(pa, level));
427 return (pa | S2TTE_INVALID_HIPAS_ASSIGNED | S2TTE_INVALID_RIPAS_EMPTY);
428}
429
430/*
431 * Creates a page or block s2tte for a Protected IPA, with output address @pa.
432 */
433unsigned long s2tte_create_valid(unsigned long pa, long level)
434{
435 assert(level >= RTT_MIN_BLOCK_LEVEL);
436 assert(addr_is_level_aligned(pa, level));
437 if (level == RTT_PAGE_LEVEL) {
438 return (pa | S2TTE_PAGE);
439 }
440 return (pa | S2TTE_BLOCK);
441}
442
443/*
444 * Creates an invalid s2tte with HIPAS=INVALID_NS.
445 */
446unsigned long s2tte_create_invalid_ns(void)
447{
448 return S2TTE_INVALID_UNPROTECTED;
449}
450
451/*
452 * Creates a page or block s2tte for an Unprotected IPA at level @level.
453 *
454 * The following S2 TTE fields are provided through @s2tte argument:
455 * - The physical address
456 * - MemAttr
457 * - S2AP
458 * - Shareability
459 */
460unsigned long s2tte_create_valid_ns(unsigned long s2tte, long level)
461{
462 assert(level >= RTT_MIN_BLOCK_LEVEL);
463 if (level == RTT_PAGE_LEVEL) {
464 return (s2tte | S2TTE_PAGE_NS);
465 }
466 return (s2tte | S2TTE_BLOCK_NS);
467}
468
469/*
470 * Validate the portion of NS S2TTE that is provided by the host.
471 */
472bool host_ns_s2tte_is_valid(unsigned long s2tte, long level)
473{
474 unsigned long mask = addr_level_mask(~0UL, level) |
475 S2TTE_MEMATTR_MASK |
476 S2TTE_AP_MASK |
477 S2TTE_SH_MASK;
478
479 /*
480 * Test that all fields that are not controlled by the host are zero
481 * and that the output address is correctly aligned. Note that
482 * the host is permitted to map any physical address outside PAR.
483 */
484 if ((s2tte & ~mask) != 0UL) {
485 return false;
486 }
487
488 /*
489 * Only one value masked by S2TTE_MEMATTR_MASK is invalid/reserved.
490 */
491 if ((s2tte & S2TTE_MEMATTR_MASK) == S2TTE_MEMATTR_FWB_RESERVED) {
492 return false;
493 }
494
495 /*
496 * Only one value masked by S2TTE_SH_MASK is invalid/reserved.
497 */
498 if ((s2tte & S2TTE_SH_MASK) == S2TTE_SH_RESERVED) {
499 return false;
500 }
501
502 /*
503 * Note that all the values that are masked by S2TTE_AP_MASK are valid.
504 */
505 return true;
506}
507
508/*
509 * Returns the portion of NS S2TTE that is set by the host.
510 */
511unsigned long host_ns_s2tte(unsigned long s2tte, long level)
512{
513 unsigned long mask = addr_level_mask(~0UL, level) |
514 S2TTE_MEMATTR_MASK |
515 S2TTE_AP_MASK |
516 S2TTE_SH_MASK;
517 return (s2tte & mask);
518}
519
520/*
521 * Creates a table s2tte at level @level with output address @pa.
522 */
523unsigned long s2tte_create_table(unsigned long pa, long level)
524{
525 assert(level < RTT_PAGE_LEVEL);
526 assert(GRANULE_ALIGNED(pa));
527
528 return (pa | S2TTE_TABLE);
529}
530
531/*
532 * Returns true if @s2tte has HIPAS=@hipas.
533 */
534static bool s2tte_has_hipas(unsigned long s2tte, unsigned long hipas)
535{
536 unsigned long desc_type = s2tte & DESC_TYPE_MASK;
537 unsigned long invalid_desc_hipas = s2tte & S2TTE_INVALID_HIPAS_MASK;
538
539 if ((desc_type != S2TTE_Lx_INVALID) || (invalid_desc_hipas != hipas)) {
540 return false;
541 }
542 return true;
543}
544
545/*
546 * Returns true if @s2tte has HIPAS=UNASSIGNED or HIPAS=INVALID_NS.
547 */
548bool s2tte_is_unassigned(unsigned long s2tte)
549{
550 return s2tte_has_hipas(s2tte, S2TTE_INVALID_HIPAS_UNASSIGNED);
551}
552
553/*
554 * Returns true if @s2tte has HIPAS=DESTROYED.
555 */
556bool s2tte_is_destroyed(unsigned long s2tte)
557{
558 return s2tte_has_hipas(s2tte, S2TTE_INVALID_HIPAS_DESTROYED);
559}
560
561/*
562 * Returns true if @s2tte has HIPAS=ASSIGNED.
563 */
564bool s2tte_is_assigned(unsigned long s2tte, long level)
565{
566 (void)level;
567
568 return s2tte_has_hipas(s2tte, S2TTE_INVALID_HIPAS_ASSIGNED);
569}
570
571static bool s2tte_check(unsigned long s2tte, long level, unsigned long ns)
572{
573 unsigned long desc_type;
574
575 if ((s2tte & S2TTE_NS) != ns) {
576 return false;
577 }
578
579 desc_type = s2tte & DESC_TYPE_MASK;
580
581 /* Only pages at L3 and valid blocks at L2 allowed */
582 if (((level == RTT_PAGE_LEVEL) && (desc_type == S2TTE_L3_PAGE)) ||
AlexeiFedorov80c2f042022-11-25 14:54:46 +0000583 ((level == RTT_MIN_BLOCK_LEVEL) && (desc_type == S2TTE_L012_BLOCK))) {
Soby Mathewb4c6df42022-11-09 11:13:29 +0000584 return true;
585 }
586
587 return false;
588}
589
590/*
591 * Returns true if @s2tte is a page or block s2tte, and NS=0.
592 */
593bool s2tte_is_valid(unsigned long s2tte, long level)
594{
595 return s2tte_check(s2tte, level, 0UL);
596}
597
598/*
599 * Returns true if @s2tte is a page or block s2tte, and NS=1.
600 */
601bool s2tte_is_valid_ns(unsigned long s2tte, long level)
602{
603 return s2tte_check(s2tte, level, S2TTE_NS);
604}
605
606/*
607 * Returns true if @s2tte is a table at level @level.
608 */
609bool s2tte_is_table(unsigned long s2tte, long level)
610{
611 unsigned long desc_type = s2tte & DESC_TYPE_MASK;
612
613 if ((level < RTT_PAGE_LEVEL) && (desc_type == S2TTE_TABLE)) {
614 return true;
615 }
616
617 return false;
618}
619
620/*
621 * Returns RIPAS of @s2tte.
622 *
623 * Caller should ensure that HIPAS=UNASSIGNED or HIPAS=ASSIGNED.
624 * The s2tte must be not valid/invalid descriptor.
625 */
626enum ripas s2tte_get_ripas(unsigned long s2tte)
627{
628 unsigned long desc_ripas = s2tte & S2TTE_INVALID_RIPAS_MASK;
629
630 /*
631 * If valid s2tte descriptor is passed, then ensure S2AP[0]
632 * bit is 1 (S2AP is set to RW for lower EL), which corresponds
633 * to RIPAS_RAM (bit[6]) on a valid descriptor.
634 */
635 if (((s2tte & DESC_TYPE_MASK) != S2TTE_Lx_INVALID) &&
636 (desc_ripas != S2TTE_INVALID_RIPAS_RAM)) {
637 assert(false);
638 }
639
640 if (desc_ripas == S2TTE_INVALID_RIPAS_EMPTY) {
Yousuf A62808152022-10-31 10:35:42 +0000641 return RIPAS_EMPTY;
Soby Mathewb4c6df42022-11-09 11:13:29 +0000642 }
643
Yousuf A62808152022-10-31 10:35:42 +0000644 return RIPAS_RAM;
Soby Mathewb4c6df42022-11-09 11:13:29 +0000645}
646
647/*
648 * Populates @s2tt with s2ttes which have HIPAS=UNASSIGNED and RIPAS=@ripas.
649 *
650 * The granule is populated before it is made a table,
651 * hence, don't use s2tte_write for access.
652 */
653void s2tt_init_unassigned(unsigned long *s2tt, enum ripas ripas)
654{
655 for (unsigned int i = 0U; i < S2TTES_PER_S2TT; i++) {
656 s2tt[i] = s2tte_create_unassigned(ripas);
657 }
658
659 dsb(ish);
660}
661
662/*
663 * Populates @s2tt with s2ttes which have HIPAS=DESTROYED.
664 *
665 * The granule is populated before it is made a table,
666 * hence, don't use s2tte_write for access.
667 */
668void s2tt_init_destroyed(unsigned long *s2tt)
669{
670 for (unsigned int i = 0U; i < S2TTES_PER_S2TT; i++) {
671 s2tt[i] = s2tte_create_destroyed();
672 }
673
674 dsb(ish);
675}
676
677unsigned long s2tte_map_size(int level)
678{
679 int levels, lsb;
680
681 assert(level <= RTT_PAGE_LEVEL);
682
683 levels = RTT_PAGE_LEVEL - level;
684 lsb = levels * S2TTE_STRIDE + GRANULE_SHIFT;
685 return 1UL << lsb;
686}
687
688/*
689 * Populates @s2tt with HIPAS=ASSIGNED, RIPAS=EMPTY s2ttes that refer to a
690 * contiguous memory block starting at @pa, and mapped at level @level.
691 *
692 * The granule is populated before it is made a table,
693 * hence, don't use s2tte_write for access.
694 */
695void s2tt_init_assigned_empty(unsigned long *s2tt, unsigned long pa, long level)
696{
697 const unsigned long map_size = s2tte_map_size(level);
698 unsigned int i;
699
700 for (i = 0U; i < S2TTES_PER_S2TT; i++) {
701 s2tt[i] = s2tte_create_assigned_empty(pa, level);
702 pa += map_size;
703 }
704 dsb(ish);
705}
706
707/*
708 * Populates @s2tt with HIPAS=VALID, RIPAS=@ripas s2ttes that refer to a
709 * contiguous memory block starting at @pa, and mapped at level @level.
710 *
711 * The granule is populated before it is made a table,
712 * hence, don't use s2tte_write for access.
713 */
714void s2tt_init_valid(unsigned long *s2tt, unsigned long pa, long level)
715{
716 const unsigned long map_size = s2tte_map_size(level);
717 unsigned int i;
718
719 for (i = 0U; i < S2TTES_PER_S2TT; i++) {
720 s2tt[i] = s2tte_create_valid(pa, level);
721 pa += map_size;
722 }
723 dsb(ish);
724}
725
726/*
727 * Populates @s2tt with HIPAS=VALID_NS, RIPAS=@ripas s2ttes that refer to a
728 * contiguous memory block starting at @pa, and mapped at level @level.
729 *
730 * The granule is populated before it is made a table,
731 * hence, don't use s2tte_write for access.
732 */
733void s2tt_init_valid_ns(unsigned long *s2tt, unsigned long pa, long level)
734{
735 const unsigned long map_size = s2tte_map_size(level);
736 unsigned int i;
737
738 for (i = 0U; i < S2TTES_PER_S2TT; i++) {
739 s2tt[i] = s2tte_create_valid_ns(pa, level);
740 pa += map_size;
741 }
742 dsb(ish);
743}
744
745/* Returns physical address of a page entry or block */
746unsigned long s2tte_pa(unsigned long s2tte, long level)
747{
748 if (s2tte_is_unassigned(s2tte) || s2tte_is_destroyed(s2tte) ||
749 s2tte_is_table(s2tte, level)) {
750 assert(false);
751 }
752 return addr_level_mask(s2tte, level);
753}
754
755/* Returns physical address of a table entry */
756unsigned long s2tte_pa_table(unsigned long s2tte, long level)
757{
758 assert(s2tte_is_table(s2tte, level));
759 return addr_level_mask(s2tte, RTT_PAGE_LEVEL);
760}
761
762bool addr_is_level_aligned(unsigned long addr, long level)
763{
764 return (addr == addr_level_mask(addr, level));
765}
766
767typedef bool (*s2tte_type_checker)(unsigned long s2tte);
768
769static bool __table_is_uniform_block(unsigned long *table,
770 s2tte_type_checker s2tte_is_x,
771 enum ripas *ripas_ptr)
772{
773 unsigned long s2tte = s2tte_read(&table[0]);
774 enum ripas ripas;
775 unsigned int i;
776
777 if (!s2tte_is_x(s2tte)) {
778 return false;
779 }
780
781 if (ripas_ptr != NULL) {
782 ripas = s2tte_get_ripas(s2tte);
783 }
784
785 for (i = 1U; i < S2TTES_PER_S2TT; i++) {
786 s2tte = s2tte_read(&table[i]);
787
788 if (!s2tte_is_x(s2tte)) {
789 return false;
790 }
791
792 if ((ripas_ptr != NULL) &&
793 (s2tte_get_ripas(s2tte) != ripas)) {
794 return false;
795 }
796 }
797
798 if (ripas_ptr != NULL) {
799 *ripas_ptr = ripas;
800 }
801
802 return true;
803}
804
805/*
806 * Returns true if all s2ttes in @table have HIPAS=UNASSIGNED and
807 * have the same RIPAS.
808 *
809 * If return value is true, the RIPAS value is returned in @ripas.
810 */
811bool table_is_unassigned_block(unsigned long *table, enum ripas *ripas)
812{
813 return __table_is_uniform_block(table, s2tte_is_unassigned, ripas);
814}
815
816/*
817 * Returns true if all s2ttes in @table have HIPAS=DESTROYED.
818 */
819bool table_is_destroyed_block(unsigned long *table)
820{
821 return __table_is_uniform_block(table, s2tte_is_destroyed, NULL);
822}
823
824typedef bool (*s2tte_type_level_checker)(unsigned long s2tte, long level);
825
826static bool __table_maps_block(unsigned long *table,
827 long level,
828 s2tte_type_level_checker s2tte_is_x)
829{
830 unsigned long base_pa;
831 unsigned long map_size = s2tte_map_size(level);
832 unsigned long s2tte = s2tte_read(&table[0]);
833 unsigned int i;
834
835 if (!s2tte_is_x(s2tte, level)) {
836 return false;
837 }
838
839 base_pa = s2tte_pa(s2tte, level);
840 if (!addr_is_level_aligned(base_pa, level - 1L)) {
841 return false;
842 }
843
844 for (i = 1U; i < S2TTES_PER_S2TT; i++) {
845 unsigned long expected_pa = base_pa + (i * map_size);
846
847 s2tte = s2tte_read(&table[i]);
848
849 if (!s2tte_is_x(s2tte, level)) {
850 return false;
851 }
852
853 if (s2tte_pa(s2tte, level) != expected_pa) {
854 return false;
855 }
856 }
857
858 return true;
859}
860
861/*
862 * Returns true if all s2ttes in @table have HIPAS=ASSIGNED
863 * and refer to a contiguous block of granules aligned to @level - 1.
864 */
865bool table_maps_assigned_block(unsigned long *table, long level)
866{
867 return __table_maps_block(table, level, s2tte_is_assigned);
868}
869
870/*
871 * Returns true if all s2ttes in @table have HIPAS=VALID and
872 * refer to a contiguous block of granules aligned to @level - 1.
873 */
874bool table_maps_valid_block(unsigned long *table, long level)
875{
876 return __table_maps_block(table, level, s2tte_is_valid);
877}
878
879/*
880 * Returns true if all s2ttes in @table have HIPAS=VALID_NS and
881 * refer to a contiguous block of granules aligned to @level - 1.
882 */
883bool table_maps_valid_ns_block(unsigned long *table, long level)
884{
885 return __table_maps_block(table, level, s2tte_is_valid_ns);
886}