Wedson Almeida Filho | 22c973a | 2018-10-27 16:25:42 +0100 | [diff] [blame] | 1 | /* |
Andrew Walbran | 692b325 | 2019-03-07 15:51:31 +0000 | [diff] [blame] | 2 | * Copyright 2018 The Hafnium Authors. |
Wedson Almeida Filho | 22c973a | 2018-10-27 16:25:42 +0100 | [diff] [blame] | 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * https://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 17 | #include "offsets.h" |
Andrew Walbran | c55365d | 2018-12-06 15:45:11 +0000 | [diff] [blame] | 18 | #include "exception_macros.S" |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 19 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 20 | /** |
| 21 | * Saves the volatile registers into the register buffer of the current vcpu. It |
| 22 | * allocates space on the stack for x18 and saves it if "also_save_x18" is |
| 23 | * specified; otherwise the caller is expected to have saved x18 in a similar |
| 24 | * fashion. |
| 25 | */ |
| 26 | .macro save_volatile_to_vcpu also_save_x18 |
| 27 | .ifnb \also_save_x18 |
Wedson Almeida Filho | 5bc0b4c | 2018-07-30 15:31:44 +0100 | [diff] [blame] | 28 | /* |
| 29 | * Save x18 since we're about to clobber it. We subtract 16 instead of |
| 30 | * 8 from the stack pointer to keep it 16-byte aligned. |
| 31 | */ |
| 32 | str x18, [sp, #-16]! |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 33 | .endif |
| 34 | /* Get the current vcpu. */ |
| 35 | mrs x18, tpidr_el2 |
| 36 | stp x0, x1, [x18, #VCPU_REGS + 8 * 0] |
| 37 | stp x2, x3, [x18, #VCPU_REGS + 8 * 2] |
| 38 | stp x4, x5, [x18, #VCPU_REGS + 8 * 4] |
| 39 | stp x6, x7, [x18, #VCPU_REGS + 8 * 6] |
| 40 | stp x8, x9, [x18, #VCPU_REGS + 8 * 8] |
| 41 | stp x10, x11, [x18, #VCPU_REGS + 8 * 10] |
| 42 | stp x12, x13, [x18, #VCPU_REGS + 8 * 12] |
| 43 | stp x14, x15, [x18, #VCPU_REGS + 8 * 14] |
| 44 | stp x16, x17, [x18, #VCPU_REGS + 8 * 16] |
| 45 | stp x29, x30, [x18, #VCPU_REGS + 8 * 29] |
| 46 | |
| 47 | /* x18 was saved on the stack, so we move it to vcpu regs buffer. */ |
| 48 | ldr x0, [sp], #16 |
| 49 | str x0, [x18, #VCPU_REGS + 8 * 18] |
| 50 | |
| 51 | /* Save return address & mode. */ |
| 52 | mrs x1, elr_el2 |
| 53 | mrs x2, spsr_el2 |
| 54 | stp x1, x2, [x18, #VCPU_REGS + 8 * 31] |
| 55 | .endm |
| 56 | |
| 57 | /** |
Fuad Tabba | c76466d | 2019-09-06 10:42:12 +0100 | [diff] [blame^] | 58 | * Save all general purpose registers into register buffer of current vcpu. |
| 59 | */ |
| 60 | .macro save_registers_to_vcpu |
| 61 | save_volatile_to_vcpu also_save_x18 |
| 62 | stp x19, x20, [x18, #VCPU_REGS + 8 * 19] |
| 63 | stp x21, x22, [x18, #VCPU_REGS + 8 * 21] |
| 64 | stp x23, x24, [x18, #VCPU_REGS + 8 * 23] |
| 65 | stp x25, x26, [x18, #VCPU_REGS + 8 * 25] |
| 66 | stp x27, x28, [x18, #VCPU_REGS + 8 * 27] |
| 67 | .endm |
| 68 | |
| 69 | /** |
| 70 | * Restore the volatile registers from the register buffer of the current vcpu. |
| 71 | */ |
| 72 | .macro restore_volatile_from_vcpu vcpu_ptr:req |
| 73 | ldp x4, x5, [\vcpu_ptr, #VCPU_REGS + 8 * 4] |
| 74 | ldp x6, x7, [\vcpu_ptr, #VCPU_REGS + 8 * 6] |
| 75 | ldp x8, x9, [\vcpu_ptr, #VCPU_REGS + 8 * 8] |
| 76 | ldp x10, x11, [\vcpu_ptr, #VCPU_REGS + 8 * 10] |
| 77 | ldp x12, x13, [\vcpu_ptr, #VCPU_REGS + 8 * 12] |
| 78 | ldp x14, x15, [\vcpu_ptr, #VCPU_REGS + 8 * 14] |
| 79 | ldp x16, x17, [\vcpu_ptr, #VCPU_REGS + 8 * 16] |
| 80 | ldr x18, [\vcpu_ptr, #VCPU_REGS + 8 * 18] |
| 81 | ldp x29, x30, [\vcpu_ptr, #VCPU_REGS + 8 * 29] |
| 82 | |
| 83 | /* Restore return address & mode. */ |
| 84 | ldp x1, x2, [\vcpu_ptr, #VCPU_REGS + 8 * 31] |
| 85 | msr elr_el2, x1 |
| 86 | msr spsr_el2, x2 |
| 87 | |
| 88 | /* Restore x0..x3, which we have used as scratch before. */ |
| 89 | ldp x2, x3, [\vcpu_ptr, #VCPU_REGS + 8 * 2] |
| 90 | ldp x0, x1, [\vcpu_ptr, #VCPU_REGS + 8 * 0] |
| 91 | .endm |
| 92 | |
| 93 | /** |
| 94 | * Restore all general purpose registers from register buffer of current vcpu. |
| 95 | */ |
| 96 | .macro restore_registers_from_vcpu vcpu_ptr:req |
| 97 | ldp x19, x20, [\vcpu_ptr, #VCPU_REGS + 8 * 19] |
| 98 | ldp x21, x22, [\vcpu_ptr, #VCPU_REGS + 8 * 21] |
| 99 | ldp x23, x24, [\vcpu_ptr, #VCPU_REGS + 8 * 23] |
| 100 | ldp x25, x26, [\vcpu_ptr, #VCPU_REGS + 8 * 25] |
| 101 | ldp x27, x28, [\vcpu_ptr, #VCPU_REGS + 8 * 27] |
| 102 | restore_volatile_from_vcpu \vcpu_ptr |
| 103 | .endm |
| 104 | |
| 105 | /** |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 106 | * This is a generic handler for exceptions taken at a lower EL. It saves the |
| 107 | * volatile registers to the current vcpu and calls the C handler, which can |
| 108 | * select one of two paths: (a) restore volatile registers and return, or |
| 109 | * (b) switch to a different vcpu. In the latter case, the handler needs to save |
| 110 | * all non-volatile registers (they haven't been saved yet), then restore all |
| 111 | * registers from the new vcpu. |
| 112 | */ |
| 113 | .macro lower_exception handler:req |
| 114 | save_volatile_to_vcpu also_save_x18 |
| 115 | |
| 116 | /* Call C handler. */ |
| 117 | bl \handler |
| 118 | |
| 119 | /* Switch vcpu if requested by handler. */ |
| 120 | cbnz x0, vcpu_switch |
| 121 | |
| 122 | /* vcpu is not changing. */ |
| 123 | mrs x0, tpidr_el2 |
| 124 | b vcpu_restore_volatile_and_run |
| 125 | .endm |
| 126 | |
| 127 | /** |
| 128 | * This is the handler for a sync exception taken at a lower EL. If the reason |
| 129 | * for the exception is an HVC call, it calls the faster hvc_handler without |
| 130 | * saving a lot of the registers, otherwise it goes to slow_sync_lower, which is |
| 131 | * the slow path where all registers needs to be saved/restored. |
| 132 | */ |
| 133 | .macro lower_sync_exception |
| 134 | /* Save x18 as save_volatile_to_vcpu would have. */ |
| 135 | str x18, [sp, #-16]! |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 136 | |
| 137 | /* Extract the exception class (EC) from exception syndrome register. */ |
| 138 | mrs x18, esr_el2 |
| 139 | lsr x18, x18, #26 |
| 140 | |
| 141 | /* Take the slow path if exception is not due to an HVC instruction. */ |
Fuad Tabba | c76466d | 2019-09-06 10:42:12 +0100 | [diff] [blame^] | 142 | cmp x18, #0x16 |
| 143 | b.ne slow_sync_lower |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 144 | |
Wedson Almeida Filho | 8700964 | 2018-07-02 10:20:07 +0100 | [diff] [blame] | 145 | /* |
| 146 | * Save x29 and x30, which are not saved by the callee, then jump to |
| 147 | * HVC handler. |
| 148 | */ |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 149 | stp x29, x30, [sp, #-16]! |
| 150 | bl hvc_handler |
| 151 | ldp x29, x30, [sp], #16 |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 152 | cbnz x1, sync_lower_switch |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 153 | |
| 154 | /* Zero out all volatile registers (except x0) and return. */ |
Wedson Almeida Filho | d615cdb | 2018-10-09 13:00:21 +0100 | [diff] [blame] | 155 | stp xzr, xzr, [sp, #-16]! |
| 156 | ldp x1, x2, [sp] |
| 157 | ldp x3, x4, [sp] |
| 158 | ldp x5, x6, [sp] |
| 159 | ldp x7, x8, [sp] |
| 160 | ldp x9, x10, [sp] |
| 161 | ldp x11, x12, [sp] |
| 162 | ldp x13, x14, [sp] |
| 163 | ldp x15, x16, [sp], #16 |
Wedson Almeida Filho | 5bc0b4c | 2018-07-30 15:31:44 +0100 | [diff] [blame] | 164 | mov x17, xzr |
Wedson Almeida Filho | 450ccb8 | 2018-08-12 16:25:36 +0100 | [diff] [blame] | 165 | |
| 166 | /* Restore x18, which was saved on the stack. */ |
| 167 | ldr x18, [sp], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 168 | eret |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 169 | .endm |
| 170 | |
| 171 | /** |
| 172 | * The following is the exception table. A pointer to it will be stored in |
| 173 | * register vbar_el2. |
| 174 | */ |
| 175 | .section .text.vector_table_el2, "ax" |
| 176 | .global vector_table_el2 |
| 177 | .balign 0x800 |
| 178 | vector_table_el2: |
| 179 | sync_cur_sp0: |
Andrew Walbran | c55365d | 2018-12-06 15:45:11 +0000 | [diff] [blame] | 180 | current_exception_sp0 el2 sync_current_exception |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 181 | |
| 182 | .balign 0x80 |
| 183 | irq_cur_sp0: |
Andrew Walbran | c55365d | 2018-12-06 15:45:11 +0000 | [diff] [blame] | 184 | current_exception_sp0 el2 irq_current_exception |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 185 | |
| 186 | .balign 0x80 |
| 187 | fiq_cur_sp0: |
Andrew Walbran | c55365d | 2018-12-06 15:45:11 +0000 | [diff] [blame] | 188 | current_exception_sp0 el2 fiq_current_exception |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 189 | |
| 190 | .balign 0x80 |
| 191 | serr_cur_sp0: |
Andrew Walbran | c55365d | 2018-12-06 15:45:11 +0000 | [diff] [blame] | 192 | current_exception_sp0 el2 serr_current_exception |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 193 | |
| 194 | .balign 0x80 |
| 195 | sync_cur_spx: |
Andrew Walbran | c55365d | 2018-12-06 15:45:11 +0000 | [diff] [blame] | 196 | current_exception_spx el2 sync_current_exception |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 197 | |
| 198 | .balign 0x80 |
| 199 | irq_cur_spx: |
Andrew Walbran | c55365d | 2018-12-06 15:45:11 +0000 | [diff] [blame] | 200 | current_exception_spx el2 irq_current_exception |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 201 | |
| 202 | .balign 0x80 |
| 203 | fiq_cur_spx: |
Andrew Walbran | c55365d | 2018-12-06 15:45:11 +0000 | [diff] [blame] | 204 | current_exception_spx el2 fiq_current_exception |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 205 | |
| 206 | .balign 0x80 |
| 207 | serr_cur_spx: |
Andrew Walbran | c55365d | 2018-12-06 15:45:11 +0000 | [diff] [blame] | 208 | current_exception_spx el2 serr_current_exception |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 209 | |
| 210 | .balign 0x80 |
| 211 | sync_lower_64: |
| 212 | lower_sync_exception |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 213 | |
| 214 | .balign 0x80 |
Andrew Walbran | 83f6132 | 2018-11-12 13:29:30 +0000 | [diff] [blame] | 215 | irq_lower_64: |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 216 | lower_exception irq_lower |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 217 | |
| 218 | .balign 0x80 |
Andrew Walbran | 83f6132 | 2018-11-12 13:29:30 +0000 | [diff] [blame] | 219 | fiq_lower_64: |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 220 | lower_exception fiq_lower |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 221 | |
| 222 | .balign 0x80 |
Andrew Walbran | 83f6132 | 2018-11-12 13:29:30 +0000 | [diff] [blame] | 223 | serr_lower_64: |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 224 | lower_exception serr_lower |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 225 | |
| 226 | .balign 0x80 |
Andrew Walbran | 83f6132 | 2018-11-12 13:29:30 +0000 | [diff] [blame] | 227 | sync_lower_32: |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 228 | lower_sync_exception |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 229 | |
| 230 | .balign 0x80 |
Andrew Walbran | 83f6132 | 2018-11-12 13:29:30 +0000 | [diff] [blame] | 231 | irq_lower_32: |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 232 | lower_exception irq_lower |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 233 | |
| 234 | .balign 0x80 |
Andrew Walbran | 83f6132 | 2018-11-12 13:29:30 +0000 | [diff] [blame] | 235 | fiq_lower_32: |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 236 | lower_exception fiq_lower |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 237 | |
| 238 | .balign 0x80 |
Andrew Walbran | 83f6132 | 2018-11-12 13:29:30 +0000 | [diff] [blame] | 239 | serr_lower_32: |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 240 | lower_exception serr_lower |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 241 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 242 | .balign 0x40 |
| 243 | slow_sync_lower: |
Fuad Tabba | c76466d | 2019-09-06 10:42:12 +0100 | [diff] [blame^] | 244 | /* Take the system register path for EC 0x18 */ |
| 245 | cmp x18, #0x18 |
| 246 | b.eq handle_system_register_access_s |
| 247 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 248 | /* The caller must have saved x18, so we don't save it here. */ |
| 249 | save_volatile_to_vcpu |
Wedson Almeida Filho | 5997832 | 2018-10-24 15:13:33 +0100 | [diff] [blame] | 250 | |
| 251 | /* Read syndrome register and call C handler. */ |
| 252 | mrs x0, esr_el2 |
| 253 | bl sync_lower_exception |
Wedson Almeida Filho | 5997832 | 2018-10-24 15:13:33 +0100 | [diff] [blame] | 254 | cbnz x0, vcpu_switch |
| 255 | |
| 256 | /* vcpu is not changing. */ |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 257 | mrs x0, tpidr_el2 |
Wedson Almeida Filho | 5997832 | 2018-10-24 15:13:33 +0100 | [diff] [blame] | 258 | b vcpu_restore_volatile_and_run |
| 259 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 260 | sync_lower_switch: |
Wedson Almeida Filho | 5997832 | 2018-10-24 15:13:33 +0100 | [diff] [blame] | 261 | /* We'll have to switch, so save volatile state before doing so. */ |
| 262 | mrs x18, tpidr_el2 |
| 263 | |
| 264 | /* Store zeroes in volatile register storage, except x0. */ |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 265 | stp x0, xzr, [x18, #VCPU_REGS + 8 * 0] |
| 266 | stp xzr, xzr, [x18, #VCPU_REGS + 8 * 2] |
| 267 | stp xzr, xzr, [x18, #VCPU_REGS + 8 * 4] |
| 268 | stp xzr, xzr, [x18, #VCPU_REGS + 8 * 6] |
| 269 | stp xzr, xzr, [x18, #VCPU_REGS + 8 * 8] |
| 270 | stp xzr, xzr, [x18, #VCPU_REGS + 8 * 10] |
| 271 | stp xzr, xzr, [x18, #VCPU_REGS + 8 * 12] |
| 272 | stp xzr, xzr, [x18, #VCPU_REGS + 8 * 14] |
| 273 | stp xzr, xzr, [x18, #VCPU_REGS + 8 * 16] |
| 274 | stp x29, x30, [x18, #VCPU_REGS + 8 * 29] |
Wedson Almeida Filho | 5997832 | 2018-10-24 15:13:33 +0100 | [diff] [blame] | 275 | |
| 276 | /* x18 was saved on the stack, so we move it to vcpu regs buffer. */ |
| 277 | ldr x2, [sp], #16 |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 278 | str x2, [x18, #VCPU_REGS + 8 * 18] |
Wedson Almeida Filho | 5997832 | 2018-10-24 15:13:33 +0100 | [diff] [blame] | 279 | |
| 280 | /* Save return address & mode. */ |
| 281 | mrs x2, elr_el2 |
| 282 | mrs x3, spsr_el2 |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 283 | stp x2, x3, [x18, #VCPU_REGS + 8 * 31] |
Wedson Almeida Filho | 5997832 | 2018-10-24 15:13:33 +0100 | [diff] [blame] | 284 | |
| 285 | /* Save lazy state, then switch to new vcpu. */ |
| 286 | mov x0, x1 |
Wedson Almeida Filho | 5997832 | 2018-10-24 15:13:33 +0100 | [diff] [blame] | 287 | |
| 288 | /* Intentional fallthrough. */ |
Wedson Almeida Filho | 8700964 | 2018-07-02 10:20:07 +0100 | [diff] [blame] | 289 | /** |
| 290 | * Switch to a new vcpu. |
| 291 | * |
| 292 | * All volatile registers from the old vcpu have already been saved. We need |
| 293 | * to save only non-volatile ones from the old vcpu, and restore all from the |
| 294 | * new one. |
| 295 | * |
| 296 | * x0 is a pointer to the new vcpu. |
Wedson Almeida Filho | 8700964 | 2018-07-02 10:20:07 +0100 | [diff] [blame] | 297 | */ |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 298 | vcpu_switch: |
| 299 | /* Save non-volatile registers. */ |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 300 | mrs x1, tpidr_el2 |
| 301 | stp x19, x20, [x1, #VCPU_REGS + 8 * 19] |
| 302 | stp x21, x22, [x1, #VCPU_REGS + 8 * 21] |
| 303 | stp x23, x24, [x1, #VCPU_REGS + 8 * 23] |
| 304 | stp x25, x26, [x1, #VCPU_REGS + 8 * 25] |
| 305 | stp x27, x28, [x1, #VCPU_REGS + 8 * 27] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 306 | |
| 307 | /* Save lazy state. */ |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 308 | /* Use x28 as the base */ |
| 309 | add x28, x1, #VCPU_LAZY |
| 310 | |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 311 | mrs x24, vmpidr_el2 |
| 312 | mrs x25, csselr_el1 |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 313 | stp x24, x25, [x28], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 314 | |
| 315 | mrs x2, sctlr_el1 |
| 316 | mrs x3, actlr_el1 |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 317 | stp x2, x3, [x28], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 318 | |
| 319 | mrs x4, cpacr_el1 |
| 320 | mrs x5, ttbr0_el1 |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 321 | stp x4, x5, [x28], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 322 | |
| 323 | mrs x6, ttbr1_el1 |
| 324 | mrs x7, tcr_el1 |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 325 | stp x6, x7, [x28], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 326 | |
| 327 | mrs x8, esr_el1 |
| 328 | mrs x9, afsr0_el1 |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 329 | stp x8, x9, [x28], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 330 | |
| 331 | mrs x10, afsr1_el1 |
| 332 | mrs x11, far_el1 |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 333 | stp x10, x11, [x28], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 334 | |
| 335 | mrs x12, mair_el1 |
| 336 | mrs x13, vbar_el1 |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 337 | stp x12, x13, [x28], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 338 | |
| 339 | mrs x14, contextidr_el1 |
| 340 | mrs x15, tpidr_el0 |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 341 | stp x14, x15, [x28], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 342 | |
| 343 | mrs x16, tpidrro_el0 |
| 344 | mrs x17, tpidr_el1 |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 345 | stp x16, x17, [x28], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 346 | |
| 347 | mrs x18, amair_el1 |
| 348 | mrs x19, cntkctl_el1 |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 349 | stp x18, x19, [x28], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 350 | |
| 351 | mrs x20, sp_el0 |
| 352 | mrs x21, sp_el1 |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 353 | stp x20, x21, [x28], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 354 | |
Andrew Walbran | bc82f2d | 2019-02-21 14:50:29 +0000 | [diff] [blame] | 355 | mrs x22, elr_el1 |
| 356 | mrs x23, spsr_el1 |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 357 | stp x22, x23, [x28], #16 |
Wedson Almeida Filho | 1f81b75 | 2018-10-24 15:15:49 +0100 | [diff] [blame] | 358 | |
Andrew Walbran | bc82f2d | 2019-02-21 14:50:29 +0000 | [diff] [blame] | 359 | mrs x24, par_el1 |
| 360 | mrs x25, hcr_el2 |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 361 | stp x24, x25, [x28], #16 |
Wedson Almeida Filho | 1f81b75 | 2018-10-24 15:15:49 +0100 | [diff] [blame] | 362 | |
Andrew Walbran | bc82f2d | 2019-02-21 14:50:29 +0000 | [diff] [blame] | 363 | mrs x26, cptr_el2 |
| 364 | mrs x27, cnthctl_el2 |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 365 | stp x26, x27, [x28], #16 |
Andrew Walbran | bc82f2d | 2019-02-21 14:50:29 +0000 | [diff] [blame] | 366 | |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 367 | mrs x4, vttbr_el2 |
| 368 | mrs x5, mdcr_el2 |
| 369 | stp x4, x5, [x28], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 370 | |
Fuad Tabba | c76466d | 2019-09-06 10:42:12 +0100 | [diff] [blame^] | 371 | mrs x6, mdscr_el1 |
| 372 | str x6, [x28], #16 |
| 373 | |
Andrew Walbran | b208b4a | 2019-05-20 12:42:22 +0100 | [diff] [blame] | 374 | /* Save GIC registers. */ |
| 375 | #if GIC_VERSION == 3 || GIC_VERSION == 4 |
| 376 | /* Offset is too large, so start from a new base. */ |
| 377 | add x2, x1, #VCPU_GIC |
| 378 | |
| 379 | mrs x3, ich_hcr_el2 |
Andrew Walbran | 4b976f4 | 2019-06-05 15:00:50 +0100 | [diff] [blame] | 380 | mrs x4, icc_sre_el2 |
| 381 | stp x3, x4, [x2, #16 * 0] |
Andrew Walbran | b208b4a | 2019-05-20 12:42:22 +0100 | [diff] [blame] | 382 | #endif |
| 383 | |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 384 | /* Save floating point registers. */ |
| 385 | /* Use x28 as the base. */ |
| 386 | add x28, x1, #VCPU_FREGS |
| 387 | stp q0, q1, [x28], #32 |
| 388 | stp q2, q3, [x28], #32 |
| 389 | stp q4, q5, [x28], #32 |
| 390 | stp q6, q7, [x28], #32 |
| 391 | stp q8, q9, [x28], #32 |
| 392 | stp q10, q11, [x28], #32 |
| 393 | stp q12, q13, [x28], #32 |
| 394 | stp q14, q15, [x28], #32 |
| 395 | stp q16, q17, [x28], #32 |
| 396 | stp q18, q19, [x28], #32 |
| 397 | stp q20, q21, [x28], #32 |
| 398 | stp q22, q23, [x28], #32 |
| 399 | stp q24, q25, [x28], #32 |
| 400 | stp q26, q27, [x28], #32 |
| 401 | stp q28, q29, [x28], #32 |
| 402 | stp q30, q31, [x28], #32 |
Conrad Grobler | a824af6 | 2019-03-22 17:33:23 +0000 | [diff] [blame] | 403 | mrs x3, fpsr |
| 404 | mrs x4, fpcr |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 405 | stp x3, x4, [x28], #32 |
Conrad Grobler | a824af6 | 2019-03-22 17:33:23 +0000 | [diff] [blame] | 406 | |
Wedson Almeida Filho | 0330611 | 2018-11-26 00:08:03 +0000 | [diff] [blame] | 407 | /* Save new vcpu pointer in non-volatile register. */ |
| 408 | mov x19, x0 |
Wedson Almeida Filho | 8700964 | 2018-07-02 10:20:07 +0100 | [diff] [blame] | 409 | |
Andrew Walbran | 1f8d487 | 2018-12-20 11:21:32 +0000 | [diff] [blame] | 410 | /* |
| 411 | * Save peripheral registers, and inform the arch-independent sections |
| 412 | * that registers have been saved. |
| 413 | */ |
Wedson Almeida Filho | 0330611 | 2018-11-26 00:08:03 +0000 | [diff] [blame] | 414 | mov x0, x1 |
Andrew Walbran | 1f8d487 | 2018-12-20 11:21:32 +0000 | [diff] [blame] | 415 | bl complete_saving_state |
Wedson Almeida Filho | 0330611 | 2018-11-26 00:08:03 +0000 | [diff] [blame] | 416 | mov x0, x19 |
| 417 | |
| 418 | /* Intentional fallthrough. */ |
Andrew Walbran | 375f453 | 2019-07-09 16:54:37 +0100 | [diff] [blame] | 419 | .global vcpu_restore_all_and_run |
Wedson Almeida Filho | 8700964 | 2018-07-02 10:20:07 +0100 | [diff] [blame] | 420 | vcpu_restore_all_and_run: |
Wedson Almeida Filho | 5997832 | 2018-10-24 15:13:33 +0100 | [diff] [blame] | 421 | /* Update pointer to current vcpu. */ |
Wedson Almeida Filho | 00df6c7 | 2018-10-18 11:19:24 +0100 | [diff] [blame] | 422 | msr tpidr_el2, x0 |
Wedson Almeida Filho | 8700964 | 2018-07-02 10:20:07 +0100 | [diff] [blame] | 423 | |
Andrew Walbran | 1f8d487 | 2018-12-20 11:21:32 +0000 | [diff] [blame] | 424 | /* Restore peripheral registers. */ |
| 425 | mov x19, x0 |
| 426 | bl begin_restoring_state |
| 427 | mov x0, x19 |
| 428 | |
Conrad Grobler | a824af6 | 2019-03-22 17:33:23 +0000 | [diff] [blame] | 429 | /* |
| 430 | * Restore floating point registers. |
| 431 | * |
| 432 | * Offset is too large, so start from a new base. |
| 433 | */ |
| 434 | add x2, x0, #VCPU_FREGS |
| 435 | ldp q0, q1, [x2, #32 * 0] |
| 436 | ldp q2, q3, [x2, #32 * 1] |
| 437 | ldp q4, q5, [x2, #32 * 2] |
| 438 | ldp q6, q7, [x2, #32 * 3] |
| 439 | ldp q8, q9, [x2, #32 * 4] |
| 440 | ldp q10, q11, [x2, #32 * 5] |
| 441 | ldp q12, q13, [x2, #32 * 6] |
| 442 | ldp q14, q15, [x2, #32 * 7] |
| 443 | ldp q16, q17, [x2, #32 * 8] |
| 444 | ldp q18, q19, [x2, #32 * 9] |
| 445 | ldp q20, q21, [x2, #32 * 10] |
| 446 | ldp q22, q23, [x2, #32 * 11] |
| 447 | ldp q24, q25, [x2, #32 * 12] |
| 448 | ldp q26, q27, [x2, #32 * 13] |
| 449 | ldp q28, q29, [x2, #32 * 14] |
Andrew Walbran | b208b4a | 2019-05-20 12:42:22 +0100 | [diff] [blame] | 450 | /* Offset becomes too large, so move the base. */ |
Conrad Grobler | a824af6 | 2019-03-22 17:33:23 +0000 | [diff] [blame] | 451 | ldp q30, q31, [x2, #32 * 15]! |
| 452 | ldp x3, x4, [x2, #32 * 1] |
| 453 | msr fpsr, x3 |
Conrad Grobler | a824af6 | 2019-03-22 17:33:23 +0000 | [diff] [blame] | 454 | |
Conrad Grobler | 02ff6af | 2019-06-04 09:40:28 +0100 | [diff] [blame] | 455 | /* |
| 456 | * Only restore FPCR if changed, to avoid expensive |
| 457 | * self-synchronising operation where possible. |
| 458 | */ |
| 459 | mrs x5, fpcr |
| 460 | cmp x5, x4 |
| 461 | b.eq vcpu_restore_lazy_and_run |
| 462 | msr fpcr, x4 |
| 463 | /* Intentional fallthrough. */ |
| 464 | |
| 465 | vcpu_restore_lazy_and_run: |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 466 | /* Restore lazy registers. */ |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 467 | /* Use x28 as the base. */ |
| 468 | add x28, x0, #VCPU_LAZY |
| 469 | |
| 470 | ldp x24, x25, [x28], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 471 | msr vmpidr_el2, x24 |
| 472 | msr csselr_el1, x25 |
| 473 | |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 474 | ldp x2, x3, [x28], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 475 | msr sctlr_el1, x2 |
| 476 | msr actlr_el1, x3 |
| 477 | |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 478 | ldp x4, x5, [x28], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 479 | msr cpacr_el1, x4 |
| 480 | msr ttbr0_el1, x5 |
| 481 | |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 482 | ldp x6, x7, [x28], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 483 | msr ttbr1_el1, x6 |
| 484 | msr tcr_el1, x7 |
| 485 | |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 486 | ldp x8, x9, [x28], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 487 | msr esr_el1, x8 |
| 488 | msr afsr0_el1, x9 |
| 489 | |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 490 | ldp x10, x11, [x28], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 491 | msr afsr1_el1, x10 |
| 492 | msr far_el1, x11 |
| 493 | |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 494 | ldp x12, x13, [x28], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 495 | msr mair_el1, x12 |
| 496 | msr vbar_el1, x13 |
| 497 | |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 498 | ldp x14, x15, [x28], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 499 | msr contextidr_el1, x14 |
| 500 | msr tpidr_el0, x15 |
| 501 | |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 502 | ldp x16, x17, [x28], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 503 | msr tpidrro_el0, x16 |
| 504 | msr tpidr_el1, x17 |
| 505 | |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 506 | ldp x18, x19, [x28], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 507 | msr amair_el1, x18 |
| 508 | msr cntkctl_el1, x19 |
| 509 | |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 510 | ldp x20, x21, [x28], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 511 | msr sp_el0, x20 |
| 512 | msr sp_el1, x21 |
| 513 | |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 514 | ldp x22, x23, [x28], #16 |
Andrew Walbran | bc82f2d | 2019-02-21 14:50:29 +0000 | [diff] [blame] | 515 | msr elr_el1, x22 |
| 516 | msr spsr_el1, x23 |
Wedson Almeida Filho | 1f81b75 | 2018-10-24 15:15:49 +0100 | [diff] [blame] | 517 | |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 518 | ldp x24, x25, [x28], #16 |
Andrew Walbran | bc82f2d | 2019-02-21 14:50:29 +0000 | [diff] [blame] | 519 | msr par_el1, x24 |
| 520 | msr hcr_el2, x25 |
Wedson Almeida Filho | 1f81b75 | 2018-10-24 15:15:49 +0100 | [diff] [blame] | 521 | |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 522 | ldp x26, x27, [x28], #16 |
Andrew Walbran | bc82f2d | 2019-02-21 14:50:29 +0000 | [diff] [blame] | 523 | msr cptr_el2, x26 |
| 524 | msr cnthctl_el2, x27 |
| 525 | |
Fuad Tabba | 5e147a9 | 2019-08-14 15:30:30 +0100 | [diff] [blame] | 526 | ldp x4, x5, [x28], #16 |
| 527 | msr vttbr_el2, x4 |
| 528 | msr mdcr_el2, x5 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 529 | |
Fuad Tabba | c76466d | 2019-09-06 10:42:12 +0100 | [diff] [blame^] | 530 | ldr x6, [x28], #16 |
| 531 | msr mdscr_el1, x6 |
| 532 | |
Andrew Walbran | b208b4a | 2019-05-20 12:42:22 +0100 | [diff] [blame] | 533 | /* Restore GIC registers. */ |
| 534 | #if GIC_VERSION == 3 || GIC_VERSION == 4 |
| 535 | /* Offset is too large, so start from a new base. */ |
| 536 | add x2, x0, #VCPU_GIC |
| 537 | |
Andrew Walbran | 4b976f4 | 2019-06-05 15:00:50 +0100 | [diff] [blame] | 538 | ldp x3, x4, [x2, #16 * 0] |
Andrew Walbran | b208b4a | 2019-05-20 12:42:22 +0100 | [diff] [blame] | 539 | msr ich_hcr_el2, x3 |
Andrew Walbran | 4b976f4 | 2019-06-05 15:00:50 +0100 | [diff] [blame] | 540 | msr icc_sre_el2, x4 |
Andrew Walbran | b208b4a | 2019-05-20 12:42:22 +0100 | [diff] [blame] | 541 | #endif |
| 542 | |
Andrew Walbran | 1f32e72 | 2019-06-07 17:57:26 +0100 | [diff] [blame] | 543 | /* |
| 544 | * If a different vCPU is being run on this physical CPU to the last one |
| 545 | * which was run for this VM, invalidate the TLB. This must be called |
| 546 | * after vttbr_el2 has been updated, so that we have the page table and |
| 547 | * VMID of the vCPU to which we are switching. |
| 548 | */ |
| 549 | mov x19, x0 |
| 550 | bl maybe_invalidate_tlb |
| 551 | mov x0, x19 |
| 552 | |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 553 | /* Restore non-volatile registers. */ |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 554 | ldp x19, x20, [x0, #VCPU_REGS + 8 * 19] |
| 555 | ldp x21, x22, [x0, #VCPU_REGS + 8 * 21] |
| 556 | ldp x23, x24, [x0, #VCPU_REGS + 8 * 23] |
| 557 | ldp x25, x26, [x0, #VCPU_REGS + 8 * 25] |
| 558 | ldp x27, x28, [x0, #VCPU_REGS + 8 * 27] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 559 | |
Wedson Almeida Filho | d615cdb | 2018-10-09 13:00:21 +0100 | [diff] [blame] | 560 | /* Intentional fallthrough. */ |
Wedson Almeida Filho | 8700964 | 2018-07-02 10:20:07 +0100 | [diff] [blame] | 561 | /** |
Wedson Almeida Filho | 8700964 | 2018-07-02 10:20:07 +0100 | [diff] [blame] | 562 | * Restore volatile registers and run the given vcpu. |
Wedson Almeida Filho | d615cdb | 2018-10-09 13:00:21 +0100 | [diff] [blame] | 563 | * |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 564 | * x0 is a pointer to the target vcpu. |
Wedson Almeida Filho | 8700964 | 2018-07-02 10:20:07 +0100 | [diff] [blame] | 565 | */ |
| 566 | vcpu_restore_volatile_and_run: |
Fuad Tabba | c76466d | 2019-09-06 10:42:12 +0100 | [diff] [blame^] | 567 | restore_volatile_from_vcpu x0 |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 568 | eret |
| 569 | |
| 570 | .balign 0x40 |
| 571 | /** |
Fuad Tabba | c76466d | 2019-09-06 10:42:12 +0100 | [diff] [blame^] | 572 | * Restore volatile registers from stack and return to original caller. |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame] | 573 | */ |
| 574 | restore_from_stack_and_return: |
Andrew Walbran | c55365d | 2018-12-06 15:45:11 +0000 | [diff] [blame] | 575 | restore_volatile_from_stack el2 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 576 | eret |
Fuad Tabba | c76466d | 2019-09-06 10:42:12 +0100 | [diff] [blame^] | 577 | |
| 578 | .balign 0x40 |
| 579 | /** |
| 580 | * Handle accesses to system registers (EC=0x18) and return to original caller. |
| 581 | */ |
| 582 | handle_system_register_access_s: |
| 583 | /* |
| 584 | * All registers are (conservatively) saved because the handler can |
| 585 | * clobber non-volatile registers that are used by the msr/mrs, which |
| 586 | * results in the wrong value being read or written. |
| 587 | */ |
| 588 | save_registers_to_vcpu |
| 589 | |
| 590 | /* Read syndrome register and call C handler. */ |
| 591 | mrs x0, esr_el2 |
| 592 | bl handle_system_register_access |
| 593 | cbnz x0, vcpu_switch |
| 594 | |
| 595 | /* vcpu is not changing. */ |
| 596 | mrs x0, tpidr_el2 |
| 597 | restore_registers_from_vcpu x0 |
| 598 | eret |