Andrew Scull | 1883487 | 2018-10-12 11:48:09 +0100 | [diff] [blame] | 1 | /* |
Andrew Walbran | 692b325 | 2019-03-07 15:51:31 +0000 | [diff] [blame] | 2 | * Copyright 2018 The Hafnium Authors. |
Andrew Scull | 1883487 | 2018-10-12 11:48:09 +0100 | [diff] [blame] | 3 | * |
Andrew Walbran | e959ec1 | 2020-06-17 15:01:09 +0100 | [diff] [blame] | 4 | * Use of this source code is governed by a BSD-style |
| 5 | * license that can be found in the LICENSE file or at |
| 6 | * https://opensource.org/licenses/BSD-3-Clause. |
Andrew Scull | 1883487 | 2018-10-12 11:48:09 +0100 | [diff] [blame] | 7 | */ |
| 8 | |
Andrew Scull | fbc938a | 2018-08-20 14:09:28 +0100 | [diff] [blame] | 9 | #pragma once |
Wedson Almeida Filho | fed6902 | 2018-07-11 15:39:12 +0100 | [diff] [blame] | 10 | |
| 11 | #include <stdbool.h> |
| 12 | #include <stddef.h> |
Wedson Almeida Filho | fed6902 | 2018-07-11 15:39:12 +0100 | [diff] [blame] | 13 | |
Andrew Scull | 18c78fc | 2018-08-20 12:57:41 +0100 | [diff] [blame] | 14 | #include "hf/addr.h" |
Karl Meakin | 07a69ab | 2025-02-07 14:53:19 +0000 | [diff] [blame] | 15 | #include "hf/mm.h" |
Wedson Almeida Filho | fed6902 | 2018-07-11 15:39:12 +0100 | [diff] [blame] | 16 | |
Olivier Deprez | 96a2a26 | 2020-06-11 17:21:38 +0200 | [diff] [blame] | 17 | #include "vmapi/hf/ffa.h" |
| 18 | |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 19 | /** |
| 20 | * Creates an absent PTE. |
Andrew Walbran | 2513374 | 2018-09-28 16:28:02 +0100 | [diff] [blame] | 21 | */ |
Karl Meakin | 07a69ab | 2025-02-07 14:53:19 +0000 | [diff] [blame] | 22 | pte_t arch_mm_absent_pte(mm_level_t level); |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 23 | |
| 24 | /** |
Andrew Scull | 9a6384b | 2019-01-02 12:08:40 +0000 | [diff] [blame] | 25 | * Creates a table PTE. |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 26 | */ |
Karl Meakin | 07a69ab | 2025-02-07 14:53:19 +0000 | [diff] [blame] | 27 | pte_t arch_mm_table_pte(mm_level_t level, paddr_t pa); |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 28 | |
| 29 | /** |
| 30 | * Creates a block PTE. |
| 31 | */ |
Karl Meakin | 07a69ab | 2025-02-07 14:53:19 +0000 | [diff] [blame] | 32 | pte_t arch_mm_block_pte(mm_level_t level, paddr_t pa, mm_attr_t attrs); |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 33 | |
Karl Meakin | 23122e1 | 2025-02-05 14:44:20 +0000 | [diff] [blame] | 34 | enum mm_pte_type arch_mm_pte_type(pte_t pte, mm_level_t level); |
| 35 | |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 36 | /** |
Andrew Scull | 9a6384b | 2019-01-02 12:08:40 +0000 | [diff] [blame] | 37 | * Checks whether a block is allowed at the given level of the page table. |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 38 | */ |
Karl Meakin | 07a69ab | 2025-02-07 14:53:19 +0000 | [diff] [blame] | 39 | bool arch_mm_is_block_allowed(mm_level_t level); |
Andrew Scull | c66a04d | 2018-12-07 13:41:56 +0000 | [diff] [blame] | 40 | |
Karl Meakin | 23122e1 | 2025-02-05 14:44:20 +0000 | [diff] [blame] | 41 | static inline bool arch_mm_pte_is_absent(pte_t pte, mm_level_t level) |
| 42 | { |
| 43 | return arch_mm_pte_type(pte, level) == PTE_TYPE_ABSENT; |
| 44 | } |
| 45 | |
Andrew Scull | c66a04d | 2018-12-07 13:41:56 +0000 | [diff] [blame] | 46 | /** |
| 47 | * Determines if a PTE is present i.e. it contains information and therefore |
| 48 | * needs to exist in the page table. Any non-absent PTE is present. |
| 49 | */ |
Karl Meakin | 23122e1 | 2025-02-05 14:44:20 +0000 | [diff] [blame] | 50 | static inline bool arch_mm_pte_is_present(pte_t pte, mm_level_t level) |
| 51 | { |
| 52 | return !arch_mm_pte_is_absent(pte, level); |
| 53 | } |
Andrew Scull | c66a04d | 2018-12-07 13:41:56 +0000 | [diff] [blame] | 54 | |
| 55 | /** |
| 56 | * Determines if a PTE is valid i.e. it can affect the address space. Tables and |
| 57 | * valid blocks fall into this category. Invalid blocks do not as they hold |
| 58 | * information about blocks that are not in the address space. |
| 59 | */ |
Karl Meakin | 23122e1 | 2025-02-05 14:44:20 +0000 | [diff] [blame] | 60 | static inline bool arch_mm_pte_is_valid(pte_t pte, mm_level_t level) |
| 61 | { |
| 62 | switch (arch_mm_pte_type(pte, level)) { |
| 63 | case PTE_TYPE_ABSENT: |
| 64 | case PTE_TYPE_INVALID_BLOCK: |
| 65 | return false; |
| 66 | case PTE_TYPE_VALID_BLOCK: |
| 67 | case PTE_TYPE_TABLE: |
| 68 | return true; |
| 69 | } |
| 70 | } |
Andrew Scull | c66a04d | 2018-12-07 13:41:56 +0000 | [diff] [blame] | 71 | |
| 72 | /** |
| 73 | * Determines if a PTE is a block and represents an address range, valid or |
| 74 | * invalid. |
| 75 | */ |
Karl Meakin | 23122e1 | 2025-02-05 14:44:20 +0000 | [diff] [blame] | 76 | static inline bool arch_mm_pte_is_block(pte_t pte, mm_level_t level) |
| 77 | { |
| 78 | switch (arch_mm_pte_type(pte, level)) { |
| 79 | case PTE_TYPE_ABSENT: |
| 80 | case PTE_TYPE_TABLE: |
| 81 | return false; |
| 82 | case PTE_TYPE_INVALID_BLOCK: |
| 83 | case PTE_TYPE_VALID_BLOCK: |
| 84 | return true; |
| 85 | } |
| 86 | } |
Andrew Scull | c66a04d | 2018-12-07 13:41:56 +0000 | [diff] [blame] | 87 | |
| 88 | /** |
| 89 | * Determines if a PTE represents a reference to a table of PTEs. |
| 90 | */ |
Karl Meakin | 23122e1 | 2025-02-05 14:44:20 +0000 | [diff] [blame] | 91 | static inline bool arch_mm_pte_is_table(pte_t pte, mm_level_t level) |
| 92 | { |
| 93 | return arch_mm_pte_type(pte, level) == PTE_TYPE_TABLE; |
| 94 | } |
Andrew Scull | c66a04d | 2018-12-07 13:41:56 +0000 | [diff] [blame] | 95 | |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 96 | /** |
Andrew Scull | 9a6384b | 2019-01-02 12:08:40 +0000 | [diff] [blame] | 97 | * Extracts the start address of the PTE range. |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 98 | */ |
Karl Meakin | 07a69ab | 2025-02-07 14:53:19 +0000 | [diff] [blame] | 99 | paddr_t arch_mm_block_from_pte(pte_t pte, mm_level_t level); |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 100 | |
| 101 | /** |
Andrew Scull | 9a6384b | 2019-01-02 12:08:40 +0000 | [diff] [blame] | 102 | * Extracts the address of the table referenced by the PTE. |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 103 | */ |
Karl Meakin | 07a69ab | 2025-02-07 14:53:19 +0000 | [diff] [blame] | 104 | paddr_t arch_mm_table_from_pte(pte_t pte, mm_level_t level); |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 105 | |
| 106 | /** |
Andrew Scull | 9a6384b | 2019-01-02 12:08:40 +0000 | [diff] [blame] | 107 | * Extracts the attributes of the PTE. |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 108 | */ |
Karl Meakin | 07a69ab | 2025-02-07 14:53:19 +0000 | [diff] [blame] | 109 | mm_attr_t arch_mm_pte_attrs(pte_t pte, mm_level_t level); |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 110 | |
| 111 | /** |
Karl Meakin | 23122e1 | 2025-02-05 14:44:20 +0000 | [diff] [blame] | 112 | * Merges the attributes of a block into those of its parent table. |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 113 | */ |
Karl Meakin | 07a69ab | 2025-02-07 14:53:19 +0000 | [diff] [blame] | 114 | mm_attr_t arch_mm_combine_table_entry_attrs(mm_attr_t table_attrs, |
| 115 | mm_attr_t block_attrs); |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 116 | |
| 117 | /** |
Andrew Scull | 9a6384b | 2019-01-02 12:08:40 +0000 | [diff] [blame] | 118 | * Invalidates the given range of stage-1 TLB. |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 119 | */ |
Karl Meakin | 07a69ab | 2025-02-07 14:53:19 +0000 | [diff] [blame] | 120 | void arch_mm_invalidate_stage1_range(ffa_id_t asid, vaddr_t va_begin, |
Raghu Krishnamurthy | 8fdd6df | 2021-02-03 18:30:59 -0800 | [diff] [blame] | 121 | vaddr_t va_end); |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 122 | |
| 123 | /** |
Andrew Scull | 9a6384b | 2019-01-02 12:08:40 +0000 | [diff] [blame] | 124 | * Invalidates the given range of stage-2 TLB. |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 125 | */ |
Karl Meakin | 07a69ab | 2025-02-07 14:53:19 +0000 | [diff] [blame] | 126 | void arch_mm_invalidate_stage2_range(ffa_id_t vmid, ipaddr_t va_begin, |
Olivier Deprez | 6f40037 | 2022-03-07 09:31:08 +0100 | [diff] [blame] | 127 | ipaddr_t va_end, bool non_secure); |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 128 | |
| 129 | /** |
Andrew Scull | c059fbe | 2019-09-12 12:58:40 +0100 | [diff] [blame] | 130 | * Writes back the given range of virtual memory to such a point that all cores |
| 131 | * and devices will see the updated values. The corresponding cache lines are |
| 132 | * also invalidated. |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 133 | */ |
Andrew Scull | c059fbe | 2019-09-12 12:58:40 +0100 | [diff] [blame] | 134 | void arch_mm_flush_dcache(void *base, size_t size); |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 135 | |
| 136 | /** |
Arunachalam Ganapathy | 0f0f706 | 2022-01-26 17:09:53 +0000 | [diff] [blame] | 137 | * Sets the maximum level allowed in the page table for stage-1. |
| 138 | */ |
Karl Meakin | a3a9f95 | 2025-02-08 00:11:16 +0000 | [diff] [blame] | 139 | void arch_mm_stage1_root_level_set(uint32_t pa_bits); |
Arunachalam Ganapathy | 0f0f706 | 2022-01-26 17:09:53 +0000 | [diff] [blame] | 140 | |
| 141 | /** |
Andrew Scull | da3df7f | 2019-01-05 17:49:27 +0000 | [diff] [blame] | 142 | * Gets the maximum level allowed in the page table for stage-1. |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 143 | */ |
Karl Meakin | a3a9f95 | 2025-02-08 00:11:16 +0000 | [diff] [blame] | 144 | mm_level_t arch_mm_stage1_root_level(void); |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 145 | |
| 146 | /** |
Andrew Scull | da3df7f | 2019-01-05 17:49:27 +0000 | [diff] [blame] | 147 | * Gets the maximum level allowed in the page table for stage-2. |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 148 | */ |
Karl Meakin | a3a9f95 | 2025-02-08 00:11:16 +0000 | [diff] [blame] | 149 | mm_level_t arch_mm_stage2_root_level(void); |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 150 | |
| 151 | /** |
Andrew Scull | da3df7f | 2019-01-05 17:49:27 +0000 | [diff] [blame] | 152 | * Gets the number of concatenated page tables used at the root for stage-1. |
| 153 | * |
| 154 | * Tables are concatenated at the root to avoid introducing another level in the |
| 155 | * page table meaning the table is shallow and wide. Each level is an extra |
| 156 | * memory access when walking the table so keeping it shallow reduces the memory |
| 157 | * accesses to aid performance. |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 158 | */ |
Andrew Scull | da3df7f | 2019-01-05 17:49:27 +0000 | [diff] [blame] | 159 | uint8_t arch_mm_stage1_root_table_count(void); |
| 160 | |
| 161 | /** |
| 162 | * Gets the number of concatenated page tables used at the root for stage-2. |
| 163 | */ |
| 164 | uint8_t arch_mm_stage2_root_table_count(void); |
| 165 | |
| 166 | /** |
| 167 | * Converts the mode into stage-1 attributes for a block PTE. |
| 168 | */ |
Karl Meakin | 07a69ab | 2025-02-07 14:53:19 +0000 | [diff] [blame] | 169 | mm_attr_t arch_mm_mode_to_stage1_attrs(mm_mode_t mode); |
Andrew Scull | da3df7f | 2019-01-05 17:49:27 +0000 | [diff] [blame] | 170 | |
| 171 | /** |
| 172 | * Converts the mode into stage-2 attributes for a block PTE. |
| 173 | */ |
Karl Meakin | 07a69ab | 2025-02-07 14:53:19 +0000 | [diff] [blame] | 174 | mm_attr_t arch_mm_mode_to_stage2_attrs(mm_mode_t mode); |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 175 | |
| 176 | /** |
Andrew Scull | 81e8509 | 2018-12-12 12:56:20 +0000 | [diff] [blame] | 177 | * Converts the stage-2 block attributes back to the corresponding mode. |
| 178 | */ |
Karl Meakin | 07a69ab | 2025-02-07 14:53:19 +0000 | [diff] [blame] | 179 | mm_mode_t arch_mm_stage2_attrs_to_mode(mm_attr_t attrs); |
Andrew Scull | 81e8509 | 2018-12-12 12:56:20 +0000 | [diff] [blame] | 180 | |
| 181 | /** |
Raghu Krishnamurthy | 2323d72 | 2021-02-12 22:55:38 -0800 | [diff] [blame] | 182 | * Converts the stage-1 block attributes back to the corresponding mode. |
| 183 | */ |
Karl Meakin | 07a69ab | 2025-02-07 14:53:19 +0000 | [diff] [blame] | 184 | mm_mode_t arch_mm_stage1_attrs_to_mode(mm_attr_t attrs); |
Raghu Krishnamurthy | 2323d72 | 2021-02-12 22:55:38 -0800 | [diff] [blame] | 185 | |
| 186 | /** |
Andrew Scull | c280bee | 2019-08-14 11:11:03 +0100 | [diff] [blame] | 187 | * Initializes the arch specific memory management. |
Andrew Scull | 11a4a0c | 2018-12-29 11:38:31 +0000 | [diff] [blame] | 188 | */ |
Karl Meakin | e1aeb1d | 2025-02-08 00:35:14 +0000 | [diff] [blame^] | 189 | bool arch_mm_init(const struct mm_ptable *ptable); |
Olivier Deprez | 96a2a26 | 2020-06-11 17:21:38 +0200 | [diff] [blame] | 190 | |
| 191 | /** |
| 192 | * Return the arch specific mm mode for send/recv pages of given VM ID. |
| 193 | */ |
Karl Meakin | 07a69ab | 2025-02-07 14:53:19 +0000 | [diff] [blame] | 194 | mm_mode_t arch_mm_extra_mode_from_vm(ffa_id_t id); |
Raghu Krishnamurthy | c1012d6 | 2021-01-24 19:19:31 -0800 | [diff] [blame] | 195 | |
| 196 | /** |
| 197 | * Execute any barriers or synchronization that is required |
| 198 | * by a given architecture, after page table writes. |
| 199 | */ |
| 200 | void arch_mm_sync_table_writes(void); |
Federico Recanati | 4fd065d | 2021-12-13 20:06:23 +0100 | [diff] [blame] | 201 | |
| 202 | /** |
Jens Wiklander | 4f1880c | 2022-10-19 17:00:14 +0200 | [diff] [blame] | 203 | * Returns the maximum supported PA Range index. |
| 204 | */ |
| 205 | uint64_t arch_mm_get_pa_range(void); |
| 206 | |
| 207 | /** |
Federico Recanati | 4fd065d | 2021-12-13 20:06:23 +0100 | [diff] [blame] | 208 | * Returns the maximum supported PA Range in bits. |
| 209 | */ |
Jens Wiklander | 4f1880c | 2022-10-19 17:00:14 +0200 | [diff] [blame] | 210 | uint32_t arch_mm_get_pa_bits(uint64_t pa_range); |
Olivier Deprez | b7f6bd6 | 2022-03-08 10:55:52 +0100 | [diff] [blame] | 211 | |
Maksims Svecovs | 7efb163 | 2022-03-29 17:05:24 +0100 | [diff] [blame] | 212 | /** |
| 213 | * Returns VTCR_EL2 configured in arch_mm_init. |
| 214 | */ |
Olivier Deprez | b7f6bd6 | 2022-03-08 10:55:52 +0100 | [diff] [blame] | 215 | uintptr_t arch_mm_get_vtcr_el2(void); |
| 216 | |
Maksims Svecovs | 7efb163 | 2022-03-29 17:05:24 +0100 | [diff] [blame] | 217 | /** |
| 218 | * Returns VSTCR_EL2 configured in arch_mm_init. |
| 219 | */ |
Olivier Deprez | b7f6bd6 | 2022-03-08 10:55:52 +0100 | [diff] [blame] | 220 | uintptr_t arch_mm_get_vstcr_el2(void); |