Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 1 | /* |
Govindraj Raja | 30788a8 | 2024-01-25 08:09:39 -0600 | [diff] [blame] | 2 | * Copyright (c) 2013-2024, Arm Limited and Contributors. All rights reserved. |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 3 | * |
dp-arm | 82cb2c1 | 2017-05-03 09:38:09 +0100 | [diff] [blame] | 4 | * SPDX-License-Identifier: BSD-3-Clause |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 5 | */ |
| 6 | |
Dan Handley | 97043ac | 2014-04-09 13:14:54 +0100 | [diff] [blame] | 7 | #include <arch.h> |
Andrew Thoelke | 0a30cf5 | 2014-03-18 13:46:55 +0000 | [diff] [blame] | 8 | #include <asm_macros.S> |
Jan Dabros | bb9549b | 2019-12-02 13:30:03 +0100 | [diff] [blame] | 9 | #include <assert_macros.S> |
Dan Handley | 97043ac | 2014-04-09 13:14:54 +0100 | [diff] [blame] | 10 | #include <context.h> |
Manish V Badarkhe | 3b8456b | 2020-07-23 12:43:25 +0100 | [diff] [blame] | 11 | #include <el3_common_macros.S> |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 12 | |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 13 | #if CTX_INCLUDE_FPREGS |
| 14 | .global fpregs_context_save |
| 15 | .global fpregs_context_restore |
Jayanth Dodderi Chidanand | 0ce220a | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 16 | #endif /* CTX_INCLUDE_FPREGS */ |
Jayanth Dodderi Chidanand | 59b7c0a | 2024-06-05 11:13:05 +0100 | [diff] [blame] | 17 | |
| 18 | #if ERRATA_SPECULATIVE_AT |
| 19 | .global save_and_update_ptw_el1_sys_regs |
| 20 | #endif /* ERRATA_SPECULATIVE_AT */ |
| 21 | |
Daniel Boulby | 97215e0 | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 22 | .global prepare_el3_entry |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 23 | .global restore_gp_pmcr_pauth_regs |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 24 | .global el3_exit |
| 25 | |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 26 | /* ------------------------------------------------------------------ |
| 27 | * The following function follows the aapcs_64 strictly to use |
| 28 | * x9-x17 (temporary caller-saved registers according to AArch64 PCS) |
| 29 | * to save floating point register context. It assumes that 'x0' is |
| 30 | * pointing to a 'fp_regs' structure where the register context will |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 31 | * be saved. |
| 32 | * |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 33 | * Access to VFP registers will trap if CPTR_EL3.TFP is set. |
| 34 | * However currently we don't use VFP registers nor set traps in |
| 35 | * Trusted Firmware, and assume it's cleared. |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 36 | * |
| 37 | * TODO: Revisit when VFP is used in secure world |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 38 | * ------------------------------------------------------------------ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 39 | */ |
Juan Castillo | 0f21c54 | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 40 | #if CTX_INCLUDE_FPREGS |
Andrew Thoelke | 0a30cf5 | 2014-03-18 13:46:55 +0000 | [diff] [blame] | 41 | func fpregs_context_save |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 42 | stp q0, q1, [x0, #CTX_FP_Q0] |
| 43 | stp q2, q3, [x0, #CTX_FP_Q2] |
| 44 | stp q4, q5, [x0, #CTX_FP_Q4] |
| 45 | stp q6, q7, [x0, #CTX_FP_Q6] |
| 46 | stp q8, q9, [x0, #CTX_FP_Q8] |
| 47 | stp q10, q11, [x0, #CTX_FP_Q10] |
| 48 | stp q12, q13, [x0, #CTX_FP_Q12] |
| 49 | stp q14, q15, [x0, #CTX_FP_Q14] |
| 50 | stp q16, q17, [x0, #CTX_FP_Q16] |
| 51 | stp q18, q19, [x0, #CTX_FP_Q18] |
| 52 | stp q20, q21, [x0, #CTX_FP_Q20] |
| 53 | stp q22, q23, [x0, #CTX_FP_Q22] |
| 54 | stp q24, q25, [x0, #CTX_FP_Q24] |
| 55 | stp q26, q27, [x0, #CTX_FP_Q26] |
| 56 | stp q28, q29, [x0, #CTX_FP_Q28] |
| 57 | stp q30, q31, [x0, #CTX_FP_Q30] |
| 58 | |
| 59 | mrs x9, fpsr |
| 60 | str x9, [x0, #CTX_FP_FPSR] |
| 61 | |
| 62 | mrs x10, fpcr |
| 63 | str x10, [x0, #CTX_FP_FPCR] |
| 64 | |
David Cunado | 91089f3 | 2017-10-20 11:30:57 +0100 | [diff] [blame] | 65 | #if CTX_INCLUDE_AARCH32_REGS |
| 66 | mrs x11, fpexc32_el2 |
| 67 | str x11, [x0, #CTX_FP_FPEXC32_EL2] |
Jayanth Dodderi Chidanand | 0ce220a | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 68 | #endif /* CTX_INCLUDE_AARCH32_REGS */ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 69 | ret |
Kévin Petit | 8b77962 | 2015-03-24 14:03:57 +0000 | [diff] [blame] | 70 | endfunc fpregs_context_save |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 71 | |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 72 | /* ------------------------------------------------------------------ |
| 73 | * The following function follows the aapcs_64 strictly to use x9-x17 |
| 74 | * (temporary caller-saved registers according to AArch64 PCS) to |
| 75 | * restore floating point register context. It assumes that 'x0' is |
| 76 | * pointing to a 'fp_regs' structure from where the register context |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 77 | * will be restored. |
| 78 | * |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 79 | * Access to VFP registers will trap if CPTR_EL3.TFP is set. |
| 80 | * However currently we don't use VFP registers nor set traps in |
| 81 | * Trusted Firmware, and assume it's cleared. |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 82 | * |
| 83 | * TODO: Revisit when VFP is used in secure world |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 84 | * ------------------------------------------------------------------ |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 85 | */ |
Andrew Thoelke | 0a30cf5 | 2014-03-18 13:46:55 +0000 | [diff] [blame] | 86 | func fpregs_context_restore |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 87 | ldp q0, q1, [x0, #CTX_FP_Q0] |
| 88 | ldp q2, q3, [x0, #CTX_FP_Q2] |
| 89 | ldp q4, q5, [x0, #CTX_FP_Q4] |
| 90 | ldp q6, q7, [x0, #CTX_FP_Q6] |
| 91 | ldp q8, q9, [x0, #CTX_FP_Q8] |
| 92 | ldp q10, q11, [x0, #CTX_FP_Q10] |
| 93 | ldp q12, q13, [x0, #CTX_FP_Q12] |
| 94 | ldp q14, q15, [x0, #CTX_FP_Q14] |
| 95 | ldp q16, q17, [x0, #CTX_FP_Q16] |
| 96 | ldp q18, q19, [x0, #CTX_FP_Q18] |
| 97 | ldp q20, q21, [x0, #CTX_FP_Q20] |
| 98 | ldp q22, q23, [x0, #CTX_FP_Q22] |
| 99 | ldp q24, q25, [x0, #CTX_FP_Q24] |
| 100 | ldp q26, q27, [x0, #CTX_FP_Q26] |
| 101 | ldp q28, q29, [x0, #CTX_FP_Q28] |
| 102 | ldp q30, q31, [x0, #CTX_FP_Q30] |
| 103 | |
| 104 | ldr x9, [x0, #CTX_FP_FPSR] |
| 105 | msr fpsr, x9 |
| 106 | |
Soby Mathew | 817ac8d | 2015-12-03 09:42:50 +0000 | [diff] [blame] | 107 | ldr x10, [x0, #CTX_FP_FPCR] |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 108 | msr fpcr, x10 |
| 109 | |
David Cunado | 91089f3 | 2017-10-20 11:30:57 +0100 | [diff] [blame] | 110 | #if CTX_INCLUDE_AARCH32_REGS |
| 111 | ldr x11, [x0, #CTX_FP_FPEXC32_EL2] |
| 112 | msr fpexc32_el2, x11 |
Jayanth Dodderi Chidanand | 0ce220a | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 113 | #endif /* CTX_INCLUDE_AARCH32_REGS */ |
| 114 | |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 115 | /* |
| 116 | * No explict ISB required here as ERET to |
Sandrine Bailleux | 1645d3e | 2015-12-17 13:58:58 +0000 | [diff] [blame] | 117 | * switch to secure EL1 or non-secure world |
Achin Gupta | 9ac63c5 | 2014-01-16 12:08:03 +0000 | [diff] [blame] | 118 | * covers it |
| 119 | */ |
| 120 | |
| 121 | ret |
Kévin Petit | 8b77962 | 2015-03-24 14:03:57 +0000 | [diff] [blame] | 122 | endfunc fpregs_context_restore |
Juan Castillo | 0f21c54 | 2014-06-25 17:26:36 +0100 | [diff] [blame] | 123 | #endif /* CTX_INCLUDE_FPREGS */ |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 124 | |
Daniel Boulby | 7d33ffe | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 125 | /* |
Manish Pandey | 1cbe42a | 2022-11-17 15:47:05 +0000 | [diff] [blame] | 126 | * Set SCR_EL3.EA bit to enable SErrors at EL3 |
| 127 | */ |
| 128 | .macro enable_serror_at_el3 |
| 129 | mrs x8, scr_el3 |
| 130 | orr x8, x8, #SCR_EA_BIT |
| 131 | msr scr_el3, x8 |
| 132 | .endm |
| 133 | |
| 134 | /* |
Daniel Boulby | 7d33ffe | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 135 | * Set the PSTATE bits not set when the exception was taken as |
| 136 | * described in the AArch64.TakeException() pseudocode function |
| 137 | * in ARM DDI 0487F.c page J1-7635 to a default value. |
| 138 | */ |
| 139 | .macro set_unset_pstate_bits |
Jayanth Dodderi Chidanand | 0ce220a | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 140 | /* |
| 141 | * If Data Independent Timing (DIT) functionality is implemented, |
| 142 | * always enable DIT in EL3 |
| 143 | */ |
Daniel Boulby | 7d33ffe | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 144 | #if ENABLE_FEAT_DIT |
Andre Przywara | 88727fc | 2023-01-26 16:47:52 +0000 | [diff] [blame] | 145 | #if ENABLE_FEAT_DIT == 2 |
| 146 | mrs x8, id_aa64pfr0_el1 |
| 147 | and x8, x8, #(ID_AA64PFR0_DIT_MASK << ID_AA64PFR0_DIT_SHIFT) |
| 148 | cbz x8, 1f |
| 149 | #endif |
Jayanth Dodderi Chidanand | 0ce220a | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 150 | mov x8, #DIT_BIT |
| 151 | msr DIT, x8 |
Andre Przywara | 88727fc | 2023-01-26 16:47:52 +0000 | [diff] [blame] | 152 | 1: |
Daniel Boulby | 7d33ffe | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 153 | #endif /* ENABLE_FEAT_DIT */ |
| 154 | .endm /* set_unset_pstate_bits */ |
| 155 | |
Arvind Ram Prakash | edebefb | 2023-10-11 12:10:56 -0500 | [diff] [blame] | 156 | /*------------------------------------------------------------------------- |
| 157 | * This macro checks the ENABLE_FEAT_MPAM state, performs ID register |
| 158 | * check to see if the platform supports MPAM extension and restores MPAM3 |
| 159 | * register value if it is FEAT_STATE_ENABLED/FEAT_STATE_CHECKED. |
| 160 | * |
| 161 | * This is particularly more complicated because we can't check |
| 162 | * if the platform supports MPAM by looking for status of a particular bit |
| 163 | * in the MDCR_EL3 or CPTR_EL3 register like other extensions. |
| 164 | * ------------------------------------------------------------------------ |
| 165 | */ |
| 166 | |
| 167 | .macro restore_mpam3_el3 |
| 168 | #if ENABLE_FEAT_MPAM |
| 169 | #if ENABLE_FEAT_MPAM == 2 |
| 170 | |
| 171 | mrs x8, id_aa64pfr0_el1 |
| 172 | lsr x8, x8, #(ID_AA64PFR0_MPAM_SHIFT) |
| 173 | and x8, x8, #(ID_AA64PFR0_MPAM_MASK) |
| 174 | mrs x7, id_aa64pfr1_el1 |
| 175 | lsr x7, x7, #(ID_AA64PFR1_MPAM_FRAC_SHIFT) |
| 176 | and x7, x7, #(ID_AA64PFR1_MPAM_FRAC_MASK) |
| 177 | orr x7, x7, x8 |
| 178 | cbz x7, no_mpam |
| 179 | #endif |
| 180 | /* ----------------------------------------------------------- |
| 181 | * Restore MPAM3_EL3 register as per context state |
| 182 | * Currently we only enable MPAM for NS world and trap to EL3 |
| 183 | * for MPAM access in lower ELs of Secure and Realm world |
Arvind Ram Prakash | ac4f6aa | 2023-11-08 12:28:30 -0600 | [diff] [blame] | 184 | * x9 holds address of the per_world context |
Arvind Ram Prakash | edebefb | 2023-10-11 12:10:56 -0500 | [diff] [blame] | 185 | * ----------------------------------------------------------- |
| 186 | */ |
Arvind Ram Prakash | ac4f6aa | 2023-11-08 12:28:30 -0600 | [diff] [blame] | 187 | |
| 188 | ldr x17, [x9, #CTX_MPAM3_EL3] |
Arvind Ram Prakash | edebefb | 2023-10-11 12:10:56 -0500 | [diff] [blame] | 189 | msr S3_6_C10_C5_0, x17 /* mpam3_el3 */ |
| 190 | |
| 191 | no_mpam: |
| 192 | #endif |
| 193 | .endm /* restore_mpam3_el3 */ |
| 194 | |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 195 | /* ------------------------------------------------------------------ |
Daniel Boulby | 97215e0 | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 196 | * The following macro is used to save and restore all the general |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 197 | * purpose and ARMv8.3-PAuth (if enabled) registers. |
Jayanth Dodderi Chidanand | d64bfef | 2022-09-19 23:32:08 +0100 | [diff] [blame] | 198 | * It also checks if the Secure Cycle Counter (PMCCNTR_EL0) |
| 199 | * is disabled in EL3/Secure (ARMv8.5-PMU), wherein PMCCNTR_EL0 |
| 200 | * needs not to be saved/restored during world switch. |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 201 | * |
| 202 | * Ideally we would only save and restore the callee saved registers |
| 203 | * when a world switch occurs but that type of implementation is more |
| 204 | * complex. So currently we will always save and restore these |
| 205 | * registers on entry and exit of EL3. |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 206 | * clobbers: x18 |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 207 | * ------------------------------------------------------------------ |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 208 | */ |
Daniel Boulby | 97215e0 | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 209 | .macro save_gp_pmcr_pauth_regs |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 210 | stp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] |
| 211 | stp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] |
| 212 | stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] |
| 213 | stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] |
| 214 | stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] |
| 215 | stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] |
| 216 | stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] |
| 217 | stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] |
| 218 | stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] |
| 219 | stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] |
| 220 | stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] |
| 221 | stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] |
| 222 | stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] |
| 223 | stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] |
| 224 | stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] |
| 225 | mrs x18, sp_el0 |
| 226 | str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] |
Boyan Karatotev | c73686a | 2023-02-15 13:21:50 +0000 | [diff] [blame] | 227 | |
| 228 | /* PMUv3 is presumed to be always present */ |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 229 | mrs x9, pmcr_el0 |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 230 | str x9, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 231 | /* Disable cycle counter when event counting is prohibited */ |
Boyan Karatotev | 1d6d680 | 2022-12-06 09:03:42 +0000 | [diff] [blame] | 232 | orr x9, x9, #PMCR_EL0_DP_BIT |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 233 | msr pmcr_el0, x9 |
| 234 | isb |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 235 | #if CTX_INCLUDE_PAUTH_REGS |
| 236 | /* ---------------------------------------------------------- |
| 237 | * Save the ARMv8.3-PAuth keys as they are not banked |
| 238 | * by exception level |
| 239 | * ---------------------------------------------------------- |
| 240 | */ |
| 241 | add x19, sp, #CTX_PAUTH_REGS_OFFSET |
| 242 | |
| 243 | mrs x20, APIAKeyLo_EL1 /* x21:x20 = APIAKey */ |
| 244 | mrs x21, APIAKeyHi_EL1 |
| 245 | mrs x22, APIBKeyLo_EL1 /* x23:x22 = APIBKey */ |
| 246 | mrs x23, APIBKeyHi_EL1 |
| 247 | mrs x24, APDAKeyLo_EL1 /* x25:x24 = APDAKey */ |
| 248 | mrs x25, APDAKeyHi_EL1 |
| 249 | mrs x26, APDBKeyLo_EL1 /* x27:x26 = APDBKey */ |
| 250 | mrs x27, APDBKeyHi_EL1 |
| 251 | mrs x28, APGAKeyLo_EL1 /* x29:x28 = APGAKey */ |
| 252 | mrs x29, APGAKeyHi_EL1 |
| 253 | |
| 254 | stp x20, x21, [x19, #CTX_PACIAKEY_LO] |
| 255 | stp x22, x23, [x19, #CTX_PACIBKEY_LO] |
| 256 | stp x24, x25, [x19, #CTX_PACDAKEY_LO] |
| 257 | stp x26, x27, [x19, #CTX_PACDBKEY_LO] |
| 258 | stp x28, x29, [x19, #CTX_PACGAKEY_LO] |
| 259 | #endif /* CTX_INCLUDE_PAUTH_REGS */ |
Daniel Boulby | 97215e0 | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 260 | .endm /* save_gp_pmcr_pauth_regs */ |
| 261 | |
| 262 | /* ----------------------------------------------------------------- |
Daniel Boulby | 7d33ffe | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 263 | * This function saves the context and sets the PSTATE to a known |
| 264 | * state, preparing entry to el3. |
Daniel Boulby | 97215e0 | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 265 | * Save all the general purpose and ARMv8.3-PAuth (if enabled) |
| 266 | * registers. |
Daniel Boulby | 7d33ffe | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 267 | * Then set any of the PSTATE bits that are not set by hardware |
| 268 | * according to the Aarch64.TakeException pseudocode in the Arm |
| 269 | * Architecture Reference Manual to a default value for EL3. |
| 270 | * clobbers: x17 |
Daniel Boulby | 97215e0 | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 271 | * ----------------------------------------------------------------- |
| 272 | */ |
| 273 | func prepare_el3_entry |
| 274 | save_gp_pmcr_pauth_regs |
Manish Pandey | 1cbe42a | 2022-11-17 15:47:05 +0000 | [diff] [blame] | 275 | enable_serror_at_el3 |
Daniel Boulby | 7d33ffe | 2021-05-25 18:09:34 +0100 | [diff] [blame] | 276 | /* |
| 277 | * Set the PSTATE bits not described in the Aarch64.TakeException |
| 278 | * pseudocode to their default values. |
| 279 | */ |
| 280 | set_unset_pstate_bits |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 281 | ret |
Daniel Boulby | 97215e0 | 2022-01-19 11:20:05 +0000 | [diff] [blame] | 282 | endfunc prepare_el3_entry |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 283 | |
| 284 | /* ------------------------------------------------------------------ |
| 285 | * This function restores ARMv8.3-PAuth (if enabled) and all general |
| 286 | * purpose registers except x30 from the CPU context. |
| 287 | * x30 register must be explicitly restored by the caller. |
| 288 | * ------------------------------------------------------------------ |
Jeenu Viswambharan | ef653d9 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 289 | */ |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 290 | func restore_gp_pmcr_pauth_regs |
| 291 | #if CTX_INCLUDE_PAUTH_REGS |
| 292 | /* Restore the ARMv8.3 PAuth keys */ |
| 293 | add x10, sp, #CTX_PAUTH_REGS_OFFSET |
| 294 | |
| 295 | ldp x0, x1, [x10, #CTX_PACIAKEY_LO] /* x1:x0 = APIAKey */ |
| 296 | ldp x2, x3, [x10, #CTX_PACIBKEY_LO] /* x3:x2 = APIBKey */ |
| 297 | ldp x4, x5, [x10, #CTX_PACDAKEY_LO] /* x5:x4 = APDAKey */ |
| 298 | ldp x6, x7, [x10, #CTX_PACDBKEY_LO] /* x7:x6 = APDBKey */ |
| 299 | ldp x8, x9, [x10, #CTX_PACGAKEY_LO] /* x9:x8 = APGAKey */ |
| 300 | |
| 301 | msr APIAKeyLo_EL1, x0 |
| 302 | msr APIAKeyHi_EL1, x1 |
| 303 | msr APIBKeyLo_EL1, x2 |
| 304 | msr APIBKeyHi_EL1, x3 |
| 305 | msr APDAKeyLo_EL1, x4 |
| 306 | msr APDAKeyHi_EL1, x5 |
| 307 | msr APDBKeyLo_EL1, x6 |
| 308 | msr APDBKeyHi_EL1, x7 |
| 309 | msr APGAKeyLo_EL1, x8 |
| 310 | msr APGAKeyHi_EL1, x9 |
| 311 | #endif /* CTX_INCLUDE_PAUTH_REGS */ |
Boyan Karatotev | c73686a | 2023-02-15 13:21:50 +0000 | [diff] [blame] | 312 | |
| 313 | /* PMUv3 is presumed to be always present */ |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 314 | ldr x0, [sp, #CTX_EL3STATE_OFFSET + CTX_PMCR_EL0] |
| 315 | msr pmcr_el0, x0 |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 316 | ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0] |
| 317 | ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2] |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 318 | ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4] |
| 319 | ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6] |
| 320 | ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8] |
| 321 | ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10] |
| 322 | ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12] |
| 323 | ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14] |
Jeenu Viswambharan | ef653d9 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 324 | ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16] |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 325 | ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18] |
| 326 | ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20] |
| 327 | ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22] |
| 328 | ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24] |
| 329 | ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26] |
Jeenu Viswambharan | ef653d9 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 330 | ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0] |
| 331 | msr sp_el0, x28 |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 332 | ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28] |
Jeenu Viswambharan | ef653d9 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 333 | ret |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 334 | endfunc restore_gp_pmcr_pauth_regs |
Jeenu Viswambharan | ef653d9 | 2017-11-29 16:59:34 +0000 | [diff] [blame] | 335 | |
Jayanth Dodderi Chidanand | 59b7c0a | 2024-06-05 11:13:05 +0100 | [diff] [blame] | 336 | #if ERRATA_SPECULATIVE_AT |
| 337 | /* -------------------------------------------------------------------- |
Manish V Badarkhe | 3b8456b | 2020-07-23 12:43:25 +0100 | [diff] [blame] | 338 | * In case of ERRATA_SPECULATIVE_AT, save SCTLR_EL1 and TCR_EL1 |
| 339 | * registers and update EL1 registers to disable stage1 and stage2 |
Jayanth Dodderi Chidanand | 59b7c0a | 2024-06-05 11:13:05 +0100 | [diff] [blame] | 340 | * page table walk. |
| 341 | * -------------------------------------------------------------------- |
Manish V Badarkhe | 3b8456b | 2020-07-23 12:43:25 +0100 | [diff] [blame] | 342 | */ |
| 343 | func save_and_update_ptw_el1_sys_regs |
| 344 | /* ---------------------------------------------------------- |
| 345 | * Save only sctlr_el1 and tcr_el1 registers |
| 346 | * ---------------------------------------------------------- |
| 347 | */ |
| 348 | mrs x29, sctlr_el1 |
Jayanth Dodderi Chidanand | 59b7c0a | 2024-06-05 11:13:05 +0100 | [diff] [blame] | 349 | str x29, [sp, #(CTX_ERRATA_SPEC_AT_OFFSET + CTX_ERRATA_SPEC_AT_SCTLR_EL1)] |
Manish V Badarkhe | 3b8456b | 2020-07-23 12:43:25 +0100 | [diff] [blame] | 350 | mrs x29, tcr_el1 |
Jayanth Dodderi Chidanand | 59b7c0a | 2024-06-05 11:13:05 +0100 | [diff] [blame] | 351 | str x29, [sp, #(CTX_ERRATA_SPEC_AT_OFFSET + CTX_ERRATA_SPEC_AT_TCR_EL1)] |
Manish V Badarkhe | 3b8456b | 2020-07-23 12:43:25 +0100 | [diff] [blame] | 352 | |
| 353 | /* ------------------------------------------------------------ |
| 354 | * Must follow below order in order to disable page table |
| 355 | * walk for lower ELs (EL1 and EL0). First step ensures that |
| 356 | * page table walk is disabled for stage1 and second step |
| 357 | * ensures that page table walker should use TCR_EL1.EPDx |
| 358 | * bits to perform address translation. ISB ensures that CPU |
| 359 | * does these 2 steps in order. |
| 360 | * |
| 361 | * 1. Update TCR_EL1.EPDx bits to disable page table walk by |
| 362 | * stage1. |
| 363 | * 2. Enable MMU bit to avoid identity mapping via stage2 |
| 364 | * and force TCR_EL1.EPDx to be used by the page table |
| 365 | * walker. |
| 366 | * ------------------------------------------------------------ |
| 367 | */ |
| 368 | orr x29, x29, #(TCR_EPD0_BIT) |
| 369 | orr x29, x29, #(TCR_EPD1_BIT) |
| 370 | msr tcr_el1, x29 |
| 371 | isb |
| 372 | mrs x29, sctlr_el1 |
| 373 | orr x29, x29, #SCTLR_M_BIT |
| 374 | msr sctlr_el1, x29 |
| 375 | isb |
Manish V Badarkhe | 3b8456b | 2020-07-23 12:43:25 +0100 | [diff] [blame] | 376 | ret |
| 377 | endfunc save_and_update_ptw_el1_sys_regs |
| 378 | |
Jayanth Dodderi Chidanand | 59b7c0a | 2024-06-05 11:13:05 +0100 | [diff] [blame] | 379 | #endif /* ERRATA_SPECULATIVE_AT */ |
| 380 | |
Elizabeth Ho | 461c0a5 | 2023-07-18 14:10:25 +0100 | [diff] [blame] | 381 | /* ----------------------------------------------------------------- |
| 382 | * The below macro returns the address of the per_world context for |
| 383 | * the security state, retrieved through "get_security_state" macro. |
| 384 | * The per_world context address is returned in the register argument. |
| 385 | * Clobbers: x9, x10 |
| 386 | * ------------------------------------------------------------------ |
| 387 | */ |
| 388 | |
| 389 | .macro get_per_world_context _reg:req |
| 390 | ldr x10, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] |
| 391 | get_security_state x9, x10 |
Jayanth Dodderi Chidanand | 4087ed6 | 2023-12-11 11:22:02 +0000 | [diff] [blame] | 392 | mov_imm x10, (CTX_PERWORLD_EL3STATE_END - CTX_CPTR_EL3) |
Elizabeth Ho | 461c0a5 | 2023-07-18 14:10:25 +0100 | [diff] [blame] | 393 | mul x9, x9, x10 |
| 394 | adrp x10, per_world_context |
| 395 | add x10, x10, :lo12:per_world_context |
| 396 | add x9, x9, x10 |
| 397 | mov \_reg, x9 |
| 398 | .endm |
| 399 | |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 400 | /* ------------------------------------------------------------------ |
| 401 | * This routine assumes that the SP_EL3 is pointing to a valid |
| 402 | * context structure from where the gp regs and other special |
| 403 | * registers can be retrieved. |
| 404 | * ------------------------------------------------------------------ |
Antonio Nino Diaz | 4d1ccf0 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 405 | */ |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 406 | func el3_exit |
Jan Dabros | bb9549b | 2019-12-02 13:30:03 +0100 | [diff] [blame] | 407 | #if ENABLE_ASSERTIONS |
| 408 | /* el3_exit assumes SP_EL0 on entry */ |
| 409 | mrs x17, spsel |
| 410 | cmp x17, #MODE_SP_EL0 |
| 411 | ASM_ASSERT(eq) |
Jayanth Dodderi Chidanand | 0ce220a | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 412 | #endif /* ENABLE_ASSERTIONS */ |
Jan Dabros | bb9549b | 2019-12-02 13:30:03 +0100 | [diff] [blame] | 413 | |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 414 | /* ---------------------------------------------------------- |
| 415 | * Save the current SP_EL0 i.e. the EL3 runtime stack which |
| 416 | * will be used for handling the next SMC. |
| 417 | * Then switch to SP_EL3. |
| 418 | * ---------------------------------------------------------- |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 419 | */ |
| 420 | mov x17, sp |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 421 | msr spsel, #MODE_SP_ELX |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 422 | str x17, [sp, #CTX_EL3STATE_OFFSET + CTX_RUNTIME_SP] |
| 423 | |
Max Shvetsov | 0c5e7d1 | 2021-03-22 11:59:37 +0000 | [diff] [blame] | 424 | /* ---------------------------------------------------------- |
Arunachalam Ganapathy | 68ac5ed | 2021-07-08 09:35:57 +0100 | [diff] [blame] | 425 | * Restore CPTR_EL3. |
Max Shvetsov | 0c5e7d1 | 2021-03-22 11:59:37 +0000 | [diff] [blame] | 426 | * ZCR is only restored if SVE is supported and enabled. |
| 427 | * Synchronization is required before zcr_el3 is addressed. |
| 428 | * ---------------------------------------------------------- |
| 429 | */ |
Elizabeth Ho | 461c0a5 | 2023-07-18 14:10:25 +0100 | [diff] [blame] | 430 | |
| 431 | /* The address of the per_world context is stored in x9 */ |
| 432 | get_per_world_context x9 |
| 433 | |
| 434 | ldp x19, x20, [x9, #CTX_CPTR_EL3] |
Max Shvetsov | 0c5e7d1 | 2021-03-22 11:59:37 +0000 | [diff] [blame] | 435 | msr cptr_el3, x19 |
| 436 | |
Boyan Karatotev | f0c96a2 | 2023-04-20 11:00:50 +0100 | [diff] [blame] | 437 | #if IMAGE_BL31 |
Max Shvetsov | 0c5e7d1 | 2021-03-22 11:59:37 +0000 | [diff] [blame] | 438 | ands x19, x19, #CPTR_EZ_BIT |
| 439 | beq sve_not_enabled |
| 440 | |
| 441 | isb |
| 442 | msr S3_6_C1_C2_0, x20 /* zcr_el3 */ |
| 443 | sve_not_enabled: |
Arvind Ram Prakash | edebefb | 2023-10-11 12:10:56 -0500 | [diff] [blame] | 444 | |
| 445 | restore_mpam3_el3 |
| 446 | |
Jayanth Dodderi Chidanand | 0ce220a | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 447 | #endif /* IMAGE_BL31 */ |
Max Shvetsov | 0c5e7d1 | 2021-03-22 11:59:37 +0000 | [diff] [blame] | 448 | |
Dimitris Papastamos | fe007b2 | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 449 | #if IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 450 | /* ---------------------------------------------------------- |
| 451 | * Restore mitigation state as it was on entry to EL3 |
| 452 | * ---------------------------------------------------------- |
| 453 | */ |
Dimitris Papastamos | fe007b2 | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 454 | ldr x17, [sp, #CTX_CVE_2018_3639_OFFSET + CTX_CVE_2018_3639_DISABLE] |
Alexei Fedorov | ed108b5 | 2019-09-13 14:11:59 +0100 | [diff] [blame] | 455 | cbz x17, 1f |
Dimitris Papastamos | fe007b2 | 2018-05-16 11:36:14 +0100 | [diff] [blame] | 456 | blr x17 |
Antonio Nino Diaz | 4d1ccf0 | 2019-01-30 20:41:31 +0000 | [diff] [blame] | 457 | 1: |
Jayanth Dodderi Chidanand | 0ce220a | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 458 | #endif /* IMAGE_BL31 && DYNAMIC_WORKAROUND_CVE_2018_3639 */ |
| 459 | |
Manish Pandey | 6597fcf | 2023-06-26 17:46:14 +0100 | [diff] [blame] | 460 | #if IMAGE_BL31 |
| 461 | synchronize_errors |
| 462 | #endif /* IMAGE_BL31 */ |
Jayanth Dodderi Chidanand | 0ce220a | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 463 | |
Jayanth Dodderi Chidanand | 123002f | 2024-06-18 15:22:54 +0100 | [diff] [blame] | 464 | /* -------------------------------------------------------------- |
| 465 | * Restore MDCR_EL3, SPSR_EL3, ELR_EL3 and SCR_EL3 prior to ERET |
| 466 | * -------------------------------------------------------------- |
Manish Pandey | ff1d2ef | 2022-11-17 14:43:15 +0000 | [diff] [blame] | 467 | */ |
Manish Pandey | ff1d2ef | 2022-11-17 14:43:15 +0000 | [diff] [blame] | 468 | ldp x16, x17, [sp, #CTX_EL3STATE_OFFSET + CTX_SPSR_EL3] |
Jayanth Dodderi Chidanand | 123002f | 2024-06-18 15:22:54 +0100 | [diff] [blame] | 469 | ldr x18, [sp, #CTX_EL3STATE_OFFSET + CTX_SCR_EL3] |
| 470 | ldr x19, [sp, #CTX_EL3STATE_OFFSET + CTX_MDCR_EL3] |
Manish Pandey | ff1d2ef | 2022-11-17 14:43:15 +0000 | [diff] [blame] | 471 | msr spsr_el3, x16 |
| 472 | msr elr_el3, x17 |
Jayanth Dodderi Chidanand | 123002f | 2024-06-18 15:22:54 +0100 | [diff] [blame] | 473 | msr scr_el3, x18 |
| 474 | msr mdcr_el3, x19 |
Manish Pandey | ff1d2ef | 2022-11-17 14:43:15 +0000 | [diff] [blame] | 475 | |
| 476 | restore_ptw_el1_sys_regs |
| 477 | |
| 478 | /* ---------------------------------------------------------- |
| 479 | * Restore general purpose (including x30), PMCR_EL0 and |
| 480 | * ARMv8.3-PAuth registers. |
| 481 | * Exit EL3 via ERET to a lower exception level. |
| 482 | * ---------------------------------------------------------- |
| 483 | */ |
| 484 | bl restore_gp_pmcr_pauth_regs |
| 485 | ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR] |
| 486 | |
Madhukar Pappireddy | c2d32a5 | 2020-07-24 03:27:12 -0500 | [diff] [blame] | 487 | #ifdef IMAGE_BL31 |
Manish Pandey | d04c04a | 2023-05-25 13:46:14 +0100 | [diff] [blame] | 488 | /* Clear the EL3 flag as we are exiting el3 */ |
| 489 | str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_NESTED_EA_FLAG] |
Jayanth Dodderi Chidanand | 0ce220a | 2022-01-26 17:14:43 +0000 | [diff] [blame] | 490 | #endif /* IMAGE_BL31 */ |
| 491 | |
Anthony Steinhauser | f461fe3 | 2020-01-07 15:44:06 -0800 | [diff] [blame] | 492 | exception_return |
Antonio Nino Diaz | 5283962 | 2019-01-31 11:58:00 +0000 | [diff] [blame] | 493 | |
Yatharth Kochar | bbf8f6f | 2015-10-02 17:56:48 +0100 | [diff] [blame] | 494 | endfunc el3_exit |