Soby Mathew | b4c6df4 | 2022-11-09 11:13:29 +0000 | [diff] [blame] | 1 | /* |
| 2 | * SPDX-License-Identifier: BSD-3-Clause |
| 3 | * |
| 4 | * SPDX-FileCopyrightText: Copyright TF-RMM Contributors. |
| 5 | */ |
| 6 | |
| 7 | #include <arch.h> |
| 8 | #include <arch_helpers.h> |
| 9 | #include <debug.h> |
| 10 | #include <esr.h> |
| 11 | #include <memory_alloc.h> |
| 12 | #include <rec.h> |
| 13 | #include <smc-rmi.h> |
| 14 | |
AlexeiFedorov | 7bb7a70 | 2023-01-17 17:04:14 +0000 | [diff] [blame] | 15 | #define SYSREG_CASE(reg) \ |
| 16 | case ESR_EL2_SYSREG_##ID_AA64##reg##_EL1: |
Soby Mathew | b4c6df4 | 2022-11-09 11:13:29 +0000 | [diff] [blame] | 17 | |
AlexeiFedorov | 7bb7a70 | 2023-01-17 17:04:14 +0000 | [diff] [blame] | 18 | #define SYSREG_READ(reg) \ |
| 19 | read_ID_AA64##reg##_EL1() |
Soby Mathew | b4c6df4 | 2022-11-09 11:13:29 +0000 | [diff] [blame] | 20 | |
AlexeiFedorov | 7bb7a70 | 2023-01-17 17:04:14 +0000 | [diff] [blame] | 21 | #define SYSREG_READ_CLEAR(reg) \ |
| 22 | (read_ID_AA64##reg##_EL1() & \ |
| 23 | ~(ID_AA64##reg##_EL1_CLEAR)) |
| 24 | |
| 25 | #define SYSREG_READ_CLEAR_SET(reg) \ |
| 26 | ((read_ID_AA64##reg##_EL1() & \ |
| 27 | ~(ID_AA64##reg##_EL1_CLEAR)) | \ |
| 28 | (ID_AA64##reg##_EL1_SET)) |
| 29 | |
| 30 | /* System registers ID_AA64xxx_EL1 feature clear masks and set values */ |
| 31 | |
| 32 | /* |
| 33 | * ID_AA64DFR0_EL1: |
| 34 | * |
| 35 | * Cleared fields: |
| 36 | * - Debug architecture version: |
| 37 | * set in ID_AA64DFR0_EL1_SET |
| 38 | * - Trace unit System registers not implemented |
AlexeiFedorov | 7bb7a70 | 2023-01-17 17:04:14 +0000 | [diff] [blame] | 39 | * - Number of breakpoints: |
| 40 | * set in ID_AA64DFR0_EL1_SET |
AlexeiFedorov | eaec0c4 | 2023-02-01 18:13:32 +0000 | [diff] [blame^] | 41 | * - PMU Snapshot extension not implemented |
AlexeiFedorov | 7bb7a70 | 2023-01-17 17:04:14 +0000 | [diff] [blame] | 42 | * - Number of watchpoints: |
| 43 | * set in ID_AA64DFR0_EL1_SET |
AlexeiFedorov | eaec0c4 | 2023-02-01 18:13:32 +0000 | [diff] [blame^] | 44 | * - Synchronous-exception-based event profiling not implemented |
AlexeiFedorov | 7bb7a70 | 2023-01-17 17:04:14 +0000 | [diff] [blame] | 45 | * - Number of breakpoints that are context-aware |
| 46 | * - Statistical Profiling Extension not implemented |
| 47 | * - Armv8.4 Self-hosted Trace Extension not implemented |
| 48 | * - Trace Buffer Extension not implemented |
AlexeiFedorov | 7bb7a70 | 2023-01-17 17:04:14 +0000 | [diff] [blame] | 49 | * - Branch Record Buffer Extension not implemented |
AlexeiFedorov | eaec0c4 | 2023-02-01 18:13:32 +0000 | [diff] [blame^] | 50 | * - Trace Buffer External Mode not implemented |
AlexeiFedorov | 7bb7a70 | 2023-01-17 17:04:14 +0000 | [diff] [blame] | 51 | */ |
| 52 | #define ID_AA64DFR0_EL1_CLEAR \ |
AlexeiFedorov | 537bee0 | 2023-02-02 13:38:23 +0000 | [diff] [blame] | 53 | MASK(ID_AA64DFR0_EL1_DebugVer) | \ |
| 54 | MASK(ID_AA64DFR0_EL1_TraceVer) | \ |
AlexeiFedorov | 537bee0 | 2023-02-02 13:38:23 +0000 | [diff] [blame] | 55 | MASK(ID_AA64DFR0_EL1_BRPs) | \ |
AlexeiFedorov | eaec0c4 | 2023-02-01 18:13:32 +0000 | [diff] [blame^] | 56 | MASK(ID_AA64DFR0_EL1_PMSS) | \ |
AlexeiFedorov | 537bee0 | 2023-02-02 13:38:23 +0000 | [diff] [blame] | 57 | MASK(ID_AA64DFR0_EL1_WRPs) | \ |
AlexeiFedorov | eaec0c4 | 2023-02-01 18:13:32 +0000 | [diff] [blame^] | 58 | MASK(ID_AA64DFR0_EL1_SEBEP) | \ |
AlexeiFedorov | 537bee0 | 2023-02-02 13:38:23 +0000 | [diff] [blame] | 59 | MASK(ID_AA64DFR0_EL1_CTX_CMPS) | \ |
| 60 | MASK(ID_AA64DFR0_EL1_PMSVer) | \ |
| 61 | MASK(ID_AA64DFR0_EL1_TraceFilt) | \ |
| 62 | MASK(ID_AA64DFR0_EL1_TraceBuffer) | \ |
AlexeiFedorov | eaec0c4 | 2023-02-01 18:13:32 +0000 | [diff] [blame^] | 63 | MASK(ID_AA64DFR0_EL1_BRBE) | \ |
| 64 | MASK(ID_AA64DFR0_EL1_ExtTrcBuff) |
AlexeiFedorov | 7bb7a70 | 2023-01-17 17:04:14 +0000 | [diff] [blame] | 65 | |
| 66 | /* |
| 67 | * Set fields: |
| 68 | * - Armv8 debug architecture |
| 69 | * - Number of breakpoints: 2 |
| 70 | * - Number of watchpoints: 2 |
| 71 | */ |
AlexeiFedorov | eaec0c4 | 2023-02-01 18:13:32 +0000 | [diff] [blame^] | 72 | #define ID_AA64DFR0_EL1_SET \ |
| 73 | INPLACE(ID_AA64DFR0_EL1_DebugVer, ID_AA64DFR0_EL1_Debugv8) | \ |
| 74 | INPLACE(ID_AA64DFR0_EL1_BRPs, 1UL) | \ |
AlexeiFedorov | 7bb7a70 | 2023-01-17 17:04:14 +0000 | [diff] [blame] | 75 | INPLACE(ID_AA64DFR0_EL1_WRPs, 1UL) |
| 76 | |
| 77 | /* |
AlexeiFedorov | eaec0c4 | 2023-02-01 18:13:32 +0000 | [diff] [blame^] | 78 | * ID_AA64DFR1_EL1: |
| 79 | * |
| 80 | * Cleared fields: |
| 81 | * - Exception-based event profiling not implemented |
| 82 | * - PMU fixed-function instruction counter not implemented |
| 83 | */ |
| 84 | #define ID_AA64DFR1_EL1_CLEAR \ |
| 85 | MASK(ID_AA64DFR1_EL1_EBEP) | \ |
| 86 | MASK(ID_AA64DFR1_EL1_ICNTR) |
| 87 | |
| 88 | /* |
AlexeiFedorov | 7bb7a70 | 2023-01-17 17:04:14 +0000 | [diff] [blame] | 89 | * ID_AA64ISAR1_EL1: |
| 90 | * |
| 91 | * Cleared fields: |
| 92 | * - Address and Generic Authentication are not implemented |
| 93 | */ |
| 94 | #define ID_AA64ISAR1_EL1_CLEAR \ |
AlexeiFedorov | 537bee0 | 2023-02-02 13:38:23 +0000 | [diff] [blame] | 95 | MASK(ID_AA64ISAR1_EL1_APA) | \ |
| 96 | MASK(ID_AA64ISAR1_EL1_API) | \ |
| 97 | MASK(ID_AA64ISAR1_EL1_GPA) | \ |
| 98 | MASK(ID_AA64ISAR1_EL1_GPI) |
AlexeiFedorov | 7bb7a70 | 2023-01-17 17:04:14 +0000 | [diff] [blame] | 99 | |
| 100 | /* |
| 101 | * ID_AA64PFR0_EL1: |
| 102 | * |
| 103 | * Cleared fields: |
| 104 | * - Activity Monitors Extension not implemented |
| 105 | * - Scalable Vector Extension not implemented. |
| 106 | * This is a temporary fix until RMM completely supports SVE. |
AlexeiFedorov | 7bb7a70 | 2023-01-17 17:04:14 +0000 | [diff] [blame] | 107 | */ |
| 108 | #define ID_AA64PFR0_EL1_CLEAR \ |
| 109 | MASK(ID_AA64PFR0_EL1_AMU) | \ |
| 110 | MASK(ID_AA64PFR0_EL1_SVE) |
Soby Mathew | b4c6df4 | 2022-11-09 11:13:29 +0000 | [diff] [blame] | 111 | |
| 112 | /* |
Arunachalam Ganapathy | a27de37 | 2023-03-06 11:13:49 +0000 | [diff] [blame] | 113 | * ID_AA64PFR1_EL1: |
| 114 | * |
| 115 | * Cleared fields: |
| 116 | * - Memory Tagging Extension is not implemented |
| 117 | */ |
| 118 | #define ID_AA64PFR1_EL1_CLEAR \ |
| 119 | MASK(ID_AA64PFR1_EL1_MTE) |
| 120 | |
| 121 | /* |
Soby Mathew | b4c6df4 | 2022-11-09 11:13:29 +0000 | [diff] [blame] | 122 | * Handle ID_AA64XXX<n>_EL1 instructions |
| 123 | */ |
| 124 | static bool handle_id_sysreg_trap(struct rec *rec, |
| 125 | struct rmi_rec_exit *rec_exit, |
| 126 | unsigned long esr) |
| 127 | { |
| 128 | unsigned int rt; |
AlexeiFedorov | 7bb7a70 | 2023-01-17 17:04:14 +0000 | [diff] [blame] | 129 | unsigned long idreg, value; |
Soby Mathew | b4c6df4 | 2022-11-09 11:13:29 +0000 | [diff] [blame] | 130 | |
| 131 | /* |
| 132 | * We only set HCR_EL2.TID3 to trap ID registers at the moment and |
| 133 | * that only traps reads of registers. Seeing a write here indicates a |
| 134 | * consistency problem with the RMM and we should panic immediately. |
| 135 | */ |
| 136 | assert(!ESR_EL2_SYSREG_IS_WRITE(esr)); |
| 137 | |
| 138 | /* |
| 139 | * Read Rt value from the issued instruction, |
| 140 | * the general-purpose register used for the transfer. |
AlexeiFedorov | feaef16 | 2022-12-23 16:59:51 +0000 | [diff] [blame] | 141 | * Rt bits [9:5] of ISS field cannot exceed 0b11111. |
Soby Mathew | b4c6df4 | 2022-11-09 11:13:29 +0000 | [diff] [blame] | 142 | */ |
| 143 | rt = ESR_EL2_SYSREG_ISS_RT(esr); |
| 144 | |
| 145 | /* Handle writes to XZR register */ |
| 146 | if (rt == 31U) { |
| 147 | return true; |
| 148 | } |
| 149 | |
| 150 | idreg = esr & ESR_EL2_SYSREG_MASK; |
| 151 | |
AlexeiFedorov | 7bb7a70 | 2023-01-17 17:04:14 +0000 | [diff] [blame] | 152 | switch (idreg) { |
| 153 | SYSREG_CASE(AFR0) |
| 154 | value = SYSREG_READ(AFR0); |
| 155 | break; |
| 156 | SYSREG_CASE(AFR1) |
| 157 | value = SYSREG_READ(AFR1); |
| 158 | break; |
| 159 | SYSREG_CASE(DFR0) |
| 160 | value = SYSREG_READ_CLEAR_SET(DFR0); |
| 161 | break; |
| 162 | SYSREG_CASE(DFR1) |
AlexeiFedorov | eaec0c4 | 2023-02-01 18:13:32 +0000 | [diff] [blame^] | 163 | value = SYSREG_READ_CLEAR(DFR1); |
AlexeiFedorov | 7bb7a70 | 2023-01-17 17:04:14 +0000 | [diff] [blame] | 164 | break; |
| 165 | SYSREG_CASE(ISAR0) |
| 166 | value = SYSREG_READ(ISAR0); |
| 167 | break; |
| 168 | SYSREG_CASE(ISAR1) |
| 169 | value = SYSREG_READ_CLEAR(ISAR1); |
| 170 | break; |
| 171 | SYSREG_CASE(MMFR0) |
| 172 | value = SYSREG_READ(MMFR0); |
| 173 | break; |
| 174 | SYSREG_CASE(MMFR1) |
| 175 | value = SYSREG_READ(MMFR1); |
| 176 | break; |
| 177 | SYSREG_CASE(MMFR2) |
| 178 | value = SYSREG_READ(MMFR2); |
| 179 | break; |
| 180 | SYSREG_CASE(PFR0) |
Soby Mathew | b4c6df4 | 2022-11-09 11:13:29 +0000 | [diff] [blame] | 181 | /* |
AlexeiFedorov | 7bb7a70 | 2023-01-17 17:04:14 +0000 | [diff] [blame] | 182 | * Workaround for TF-A trapping AMU registers access |
| 183 | * to EL3 in Realm state. |
Soby Mathew | b4c6df4 | 2022-11-09 11:13:29 +0000 | [diff] [blame] | 184 | */ |
AlexeiFedorov | 7bb7a70 | 2023-01-17 17:04:14 +0000 | [diff] [blame] | 185 | value = SYSREG_READ_CLEAR(PFR0); |
| 186 | break; |
| 187 | SYSREG_CASE(PFR1) |
Arunachalam Ganapathy | a27de37 | 2023-03-06 11:13:49 +0000 | [diff] [blame] | 188 | value = SYSREG_READ_CLEAR(PFR1); |
AlexeiFedorov | 7bb7a70 | 2023-01-17 17:04:14 +0000 | [diff] [blame] | 189 | break; |
| 190 | /* |
| 191 | * TODO: not supported without SVE: |
| 192 | * SYSREG_CASE(ZFR0) |
| 193 | */ |
| 194 | default: |
| 195 | /* All other encodings are in the RES0 space */ |
| 196 | value = 0UL; |
Soby Mathew | b4c6df4 | 2022-11-09 11:13:29 +0000 | [diff] [blame] | 197 | } |
| 198 | |
AlexeiFedorov | 7bb7a70 | 2023-01-17 17:04:14 +0000 | [diff] [blame] | 199 | rec->regs[rt] = value; |
Soby Mathew | b4c6df4 | 2022-11-09 11:13:29 +0000 | [diff] [blame] | 200 | return true; |
| 201 | } |
| 202 | |
| 203 | static bool handle_icc_el1_sysreg_trap(struct rec *rec, |
| 204 | struct rmi_rec_exit *rec_exit, |
| 205 | unsigned long esr) |
| 206 | { |
| 207 | __unused unsigned long sysreg = esr & ESR_EL2_SYSREG_MASK; |
| 208 | |
| 209 | /* |
| 210 | * We should only have configured ICH_HCR_EL2 to trap on DIR and we |
| 211 | * always trap on the SGIRs following the architecture, so make sure |
| 212 | * we're not accidentally trapping on some other register here. |
| 213 | */ |
| 214 | assert((sysreg == ESR_EL2_SYSREG_ICC_DIR) || |
| 215 | (sysreg == ESR_EL2_SYSREG_ICC_SGI1R_EL1) || |
| 216 | (sysreg == ESR_EL2_SYSREG_ICC_SGI0R_EL1)); |
| 217 | |
| 218 | /* |
| 219 | * The registers above should only trap to EL2 for writes, read |
| 220 | * instructions are not defined and should cause an Undefined exception |
| 221 | * at EL1. |
| 222 | */ |
| 223 | assert(ESR_EL2_SYSREG_IS_WRITE(esr)); |
| 224 | |
| 225 | rec_exit->exit_reason = RMI_EXIT_SYNC; |
| 226 | rec_exit->esr = esr; |
| 227 | return false; |
| 228 | } |
| 229 | |
| 230 | typedef bool (*sysreg_handler_fn)(struct rec *rec, struct rmi_rec_exit *rec_exit, |
| 231 | unsigned long esr); |
| 232 | |
| 233 | struct sysreg_handler { |
| 234 | unsigned long esr_mask; |
| 235 | unsigned long esr_value; |
| 236 | sysreg_handler_fn fn; |
| 237 | }; |
| 238 | |
| 239 | #define SYSREG_HANDLER(_mask, _value, _handler_fn) \ |
| 240 | { .esr_mask = (_mask), .esr_value = (_value), .fn = _handler_fn } |
| 241 | |
| 242 | static const struct sysreg_handler sysreg_handlers[] = { |
| 243 | SYSREG_HANDLER(ESR_EL2_SYSREG_ID_MASK, ESR_EL2_SYSREG_ID, handle_id_sysreg_trap), |
| 244 | SYSREG_HANDLER(ESR_EL2_SYSREG_ICC_EL1_MASK, ESR_EL2_SYSREG_ICC_EL1, handle_icc_el1_sysreg_trap), |
| 245 | SYSREG_HANDLER(ESR_EL2_SYSREG_MASK, ESR_EL2_SYSREG_ICC_PMR_EL1, handle_icc_el1_sysreg_trap) |
| 246 | }; |
| 247 | |
| 248 | static unsigned long get_sysreg_write_value(struct rec *rec, unsigned long esr) |
| 249 | { |
AlexeiFedorov | feaef16 | 2022-12-23 16:59:51 +0000 | [diff] [blame] | 250 | /* Rt bits [9:5] of ISS field cannot exceed 0b11111 */ |
| 251 | unsigned int rt = ESR_EL2_SYSREG_ISS_RT(esr); |
Soby Mathew | b4c6df4 | 2022-11-09 11:13:29 +0000 | [diff] [blame] | 252 | |
| 253 | /* Handle reads from XZR register */ |
| 254 | if (rt == 31U) { |
| 255 | return 0UL; |
| 256 | } |
| 257 | |
AlexeiFedorov | feaef16 | 2022-12-23 16:59:51 +0000 | [diff] [blame] | 258 | return rec->regs[rt]; |
Soby Mathew | b4c6df4 | 2022-11-09 11:13:29 +0000 | [diff] [blame] | 259 | } |
| 260 | |
| 261 | static void emulate_sysreg_access_ns(struct rec *rec, struct rmi_rec_exit *rec_exit, |
| 262 | unsigned long esr) |
| 263 | { |
| 264 | if (ESR_EL2_SYSREG_IS_WRITE(esr)) { |
| 265 | rec_exit->gprs[0] = get_sysreg_write_value(rec, esr); |
| 266 | } |
| 267 | } |
| 268 | |
| 269 | /* |
| 270 | * Handle trapped MSR, MRS or System instruction execution |
| 271 | * in AArch64 state |
| 272 | */ |
| 273 | bool handle_sysreg_access_trap(struct rec *rec, struct rmi_rec_exit *rec_exit, |
| 274 | unsigned long esr) |
| 275 | { |
| 276 | /* |
| 277 | * Read Rt value from the issued instruction, |
| 278 | * the general-purpose register used for the transfer. |
AlexeiFedorov | feaef16 | 2022-12-23 16:59:51 +0000 | [diff] [blame] | 279 | * Rt bits [9:5] of ISS field cannot exceed 0b11111. |
Soby Mathew | b4c6df4 | 2022-11-09 11:13:29 +0000 | [diff] [blame] | 280 | */ |
| 281 | unsigned int rt = ESR_EL2_SYSREG_ISS_RT(esr); |
Soby Mathew | b4c6df4 | 2022-11-09 11:13:29 +0000 | [diff] [blame] | 282 | unsigned int __unused op0, op1, crn, crm, op2; |
| 283 | unsigned long __unused sysreg; |
| 284 | |
| 285 | /* Check for 32-bit instruction trapped */ |
| 286 | assert(ESR_IL(esr) != 0UL); |
| 287 | |
Shruti Gupta | 9debb13 | 2022-12-13 14:38:49 +0000 | [diff] [blame] | 288 | for (unsigned int i = 0U; i < ARRAY_LEN(sysreg_handlers); i++) { |
Soby Mathew | b4c6df4 | 2022-11-09 11:13:29 +0000 | [diff] [blame] | 289 | const struct sysreg_handler *handler = &sysreg_handlers[i]; |
Soby Mathew | b4c6df4 | 2022-11-09 11:13:29 +0000 | [diff] [blame] | 290 | |
| 291 | if ((esr & handler->esr_mask) == handler->esr_value) { |
Shruti Gupta | 9debb13 | 2022-12-13 14:38:49 +0000 | [diff] [blame] | 292 | bool handled = handler->fn(rec, rec_exit, esr); |
| 293 | |
Soby Mathew | b4c6df4 | 2022-11-09 11:13:29 +0000 | [diff] [blame] | 294 | if (!handled) { |
| 295 | emulate_sysreg_access_ns(rec, rec_exit, esr); |
| 296 | } |
| 297 | return handled; |
| 298 | } |
| 299 | } |
| 300 | |
| 301 | /* |
| 302 | * For now, treat all unhandled accesses as RAZ/WI. |
| 303 | * Handle writes to XZR register. |
| 304 | */ |
| 305 | if (!ESR_EL2_SYSREG_IS_WRITE(esr) && (rt != 31U)) { |
AlexeiFedorov | feaef16 | 2022-12-23 16:59:51 +0000 | [diff] [blame] | 306 | rec->regs[rt] = 0UL; |
Soby Mathew | b4c6df4 | 2022-11-09 11:13:29 +0000 | [diff] [blame] | 307 | } |
| 308 | |
| 309 | sysreg = esr & ESR_EL2_SYSREG_MASK; |
| 310 | |
| 311 | /* Extract sytem register encoding */ |
| 312 | op0 = EXTRACT(ESR_EL2_SYSREG_TRAP_OP0, sysreg); |
| 313 | op1 = EXTRACT(ESR_EL2_SYSREG_TRAP_OP1, sysreg); |
| 314 | crn = EXTRACT(ESR_EL2_SYSREG_TRAP_CRN, sysreg); |
| 315 | crm = EXTRACT(ESR_EL2_SYSREG_TRAP_CRM, sysreg); |
| 316 | op2 = EXTRACT(ESR_EL2_SYSREG_TRAP_OP2, sysreg); |
| 317 | |
| 318 | INFO("Unhandled %s S%u_%u_C%u_C%u_%u\n", |
| 319 | ESR_EL2_SYSREG_IS_WRITE(esr) ? "write" : "read", |
| 320 | op0, op1, crn, crm, op2); |
| 321 | |
| 322 | return true; |
| 323 | } |