Wedson Almeida Filho | 22c973a | 2018-10-27 16:25:42 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2018 Google LLC |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * https://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 17 | #include "offsets.h" |
| 18 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 19 | /** |
| 20 | * Saves the volatile registers onto the stack. This currently takes 14 |
| 21 | * instructions, so it can be used in exception handlers with 18 instructions |
| 22 | * left, 2 of which in the same cache line (assuming a 16-byte cache line). |
| 23 | * |
| 24 | * On return, x0 and x1 are initialised to elr_el2 and spsr_el2 respectively, |
| 25 | * which can be used as the first and second arguments of a subsequent call. |
| 26 | */ |
| 27 | .macro save_volatile_to_stack |
| 28 | /* Reserve stack space and save registers x0-x18, x29 & x30. */ |
| 29 | stp x0, x1, [sp, #-(8 * 24)]! |
| 30 | stp x2, x3, [sp, #8 * 2] |
| 31 | stp x4, x5, [sp, #8 * 4] |
| 32 | stp x6, x7, [sp, #8 * 6] |
| 33 | stp x8, x9, [sp, #8 * 8] |
| 34 | stp x10, x11, [sp, #8 * 10] |
| 35 | stp x12, x13, [sp, #8 * 12] |
| 36 | stp x14, x15, [sp, #8 * 14] |
| 37 | stp x16, x17, [sp, #8 * 16] |
| 38 | str x18, [sp, #8 * 18] |
| 39 | stp x29, x30, [sp, #8 * 20] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 40 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 41 | /* |
| 42 | * Save elr_el2 & spsr_el2. This such that we can take nested exception |
| 43 | * and still be able to unwind. |
| 44 | */ |
| 45 | mrs x0, elr_el2 |
| 46 | mrs x1, spsr_el2 |
| 47 | stp x0, x1, [sp, #8 * 22] |
| 48 | .endm |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 49 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 50 | /** |
| 51 | * Restores the volatile registers from the stack. This currently takes 14 |
| 52 | * instructions, so it can be used in exception handlers while still leaving 18 |
| 53 | * instructions left; if paired with save_volatile_to_stack, there are 4 |
| 54 | * instructions to spare. |
| 55 | */ |
| 56 | .macro restore_volatile_from_stack |
| 57 | /* Restore registers x2-x18, x29 & x30. */ |
| 58 | ldp x2, x3, [sp, #8 * 2] |
| 59 | ldp x4, x5, [sp, #8 * 4] |
| 60 | ldp x6, x7, [sp, #8 * 6] |
| 61 | ldp x8, x9, [sp, #8 * 8] |
| 62 | ldp x10, x11, [sp, #8 * 10] |
| 63 | ldp x12, x13, [sp, #8 * 12] |
| 64 | ldp x14, x15, [sp, #8 * 14] |
| 65 | ldp x16, x17, [sp, #8 * 16] |
| 66 | ldr x18, [sp, #8 * 18] |
| 67 | ldp x29, x30, [sp, #8 * 20] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 68 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 69 | /* Restore registers elr_el2 & spsr_el2, using x0 & x1 as scratch. */ |
| 70 | ldp x0, x1, [sp, #8 * 22] |
| 71 | msr elr_el2, x0 |
| 72 | msr spsr_el2, x1 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 73 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 74 | /* Restore x0 & x1, and release stack space. */ |
| 75 | ldp x0, x1, [sp], #8 * 24 |
| 76 | .endm |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 77 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 78 | /** |
| 79 | * This is a generic handler for exceptions taken at the current EL while using |
| 80 | * SP0. It behaves similarly to the SPx case by first switching to SPx, doing |
| 81 | * the work, then switching back to SP0 before returning. |
| 82 | * |
| 83 | * Switching to SPx and calling the C handler takes 16 instructions, so it's not |
| 84 | * possible to add a branch to a common exit path without going into the next |
| 85 | * cache line (assuming 16-byte cache lines). Additionally, to restore and |
| 86 | * return we need an additional 16 instructions, so we implement the whole |
| 87 | * handler within the allotted 32 instructions. |
| 88 | */ |
| 89 | .macro current_exception_sp0 handler:req |
| 90 | msr spsel, #1 |
| 91 | save_volatile_to_stack |
| 92 | bl \handler |
| 93 | restore_volatile_from_stack |
| 94 | msr spsel, #0 |
| 95 | eret |
| 96 | .endm |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 97 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 98 | /** |
| 99 | * This is a generic handler for exceptions taken at the current EL while using |
| 100 | * SPx. It saves volatile registers, calls the C handler, restores volatile |
| 101 | * registers, then returns. |
| 102 | * |
| 103 | * Saving state and jumping to C handler takes 15 instructions. We add an extra |
| 104 | * branch to a common exit path. So each handler takes up one unique cache line |
| 105 | * and one shared cache line (assuming 16-byte cache lines). |
| 106 | */ |
| 107 | .macro current_exception_spx handler:req |
| 108 | save_volatile_to_stack |
| 109 | bl \handler |
| 110 | b restore_from_stack_and_return |
| 111 | .endm |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 112 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 113 | /** |
| 114 | * Saves the volatile registers into the register buffer of the current vcpu. It |
| 115 | * allocates space on the stack for x18 and saves it if "also_save_x18" is |
| 116 | * specified; otherwise the caller is expected to have saved x18 in a similar |
| 117 | * fashion. |
| 118 | */ |
| 119 | .macro save_volatile_to_vcpu also_save_x18 |
| 120 | .ifnb \also_save_x18 |
Wedson Almeida Filho | 5bc0b4c | 2018-07-30 15:31:44 +0100 | [diff] [blame] | 121 | /* |
| 122 | * Save x18 since we're about to clobber it. We subtract 16 instead of |
| 123 | * 8 from the stack pointer to keep it 16-byte aligned. |
| 124 | */ |
| 125 | str x18, [sp, #-16]! |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 126 | .endif |
| 127 | /* Get the current vcpu. */ |
| 128 | mrs x18, tpidr_el2 |
| 129 | stp x0, x1, [x18, #VCPU_REGS + 8 * 0] |
| 130 | stp x2, x3, [x18, #VCPU_REGS + 8 * 2] |
| 131 | stp x4, x5, [x18, #VCPU_REGS + 8 * 4] |
| 132 | stp x6, x7, [x18, #VCPU_REGS + 8 * 6] |
| 133 | stp x8, x9, [x18, #VCPU_REGS + 8 * 8] |
| 134 | stp x10, x11, [x18, #VCPU_REGS + 8 * 10] |
| 135 | stp x12, x13, [x18, #VCPU_REGS + 8 * 12] |
| 136 | stp x14, x15, [x18, #VCPU_REGS + 8 * 14] |
| 137 | stp x16, x17, [x18, #VCPU_REGS + 8 * 16] |
| 138 | stp x29, x30, [x18, #VCPU_REGS + 8 * 29] |
| 139 | |
| 140 | /* x18 was saved on the stack, so we move it to vcpu regs buffer. */ |
| 141 | ldr x0, [sp], #16 |
| 142 | str x0, [x18, #VCPU_REGS + 8 * 18] |
| 143 | |
| 144 | /* Save return address & mode. */ |
| 145 | mrs x1, elr_el2 |
| 146 | mrs x2, spsr_el2 |
| 147 | stp x1, x2, [x18, #VCPU_REGS + 8 * 31] |
| 148 | .endm |
| 149 | |
| 150 | /** |
| 151 | * This is a generic handler for exceptions taken at a lower EL. It saves the |
| 152 | * volatile registers to the current vcpu and calls the C handler, which can |
| 153 | * select one of two paths: (a) restore volatile registers and return, or |
| 154 | * (b) switch to a different vcpu. In the latter case, the handler needs to save |
| 155 | * all non-volatile registers (they haven't been saved yet), then restore all |
| 156 | * registers from the new vcpu. |
| 157 | */ |
| 158 | .macro lower_exception handler:req |
| 159 | save_volatile_to_vcpu also_save_x18 |
| 160 | |
| 161 | /* Call C handler. */ |
| 162 | bl \handler |
| 163 | |
| 164 | /* Switch vcpu if requested by handler. */ |
| 165 | cbnz x0, vcpu_switch |
| 166 | |
| 167 | /* vcpu is not changing. */ |
| 168 | mrs x0, tpidr_el2 |
| 169 | b vcpu_restore_volatile_and_run |
| 170 | .endm |
| 171 | |
| 172 | /** |
| 173 | * This is the handler for a sync exception taken at a lower EL. If the reason |
| 174 | * for the exception is an HVC call, it calls the faster hvc_handler without |
| 175 | * saving a lot of the registers, otherwise it goes to slow_sync_lower, which is |
| 176 | * the slow path where all registers needs to be saved/restored. |
| 177 | */ |
| 178 | .macro lower_sync_exception |
| 179 | /* Save x18 as save_volatile_to_vcpu would have. */ |
| 180 | str x18, [sp, #-16]! |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 181 | |
| 182 | /* Extract the exception class (EC) from exception syndrome register. */ |
| 183 | mrs x18, esr_el2 |
| 184 | lsr x18, x18, #26 |
| 185 | |
| 186 | /* Take the slow path if exception is not due to an HVC instruction. */ |
Wedson Almeida Filho | d615cdb | 2018-10-09 13:00:21 +0100 | [diff] [blame] | 187 | sub x18, x18, #0x16 |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 188 | cbnz x18, slow_sync_lower |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 189 | |
Wedson Almeida Filho | 8700964 | 2018-07-02 10:20:07 +0100 | [diff] [blame] | 190 | /* |
| 191 | * Save x29 and x30, which are not saved by the callee, then jump to |
| 192 | * HVC handler. |
| 193 | */ |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 194 | stp x29, x30, [sp, #-16]! |
| 195 | bl hvc_handler |
| 196 | ldp x29, x30, [sp], #16 |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 197 | cbnz x1, sync_lower_switch |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 198 | |
| 199 | /* Zero out all volatile registers (except x0) and return. */ |
Wedson Almeida Filho | d615cdb | 2018-10-09 13:00:21 +0100 | [diff] [blame] | 200 | stp xzr, xzr, [sp, #-16]! |
| 201 | ldp x1, x2, [sp] |
| 202 | ldp x3, x4, [sp] |
| 203 | ldp x5, x6, [sp] |
| 204 | ldp x7, x8, [sp] |
| 205 | ldp x9, x10, [sp] |
| 206 | ldp x11, x12, [sp] |
| 207 | ldp x13, x14, [sp] |
| 208 | ldp x15, x16, [sp], #16 |
Wedson Almeida Filho | 5bc0b4c | 2018-07-30 15:31:44 +0100 | [diff] [blame] | 209 | mov x17, xzr |
Wedson Almeida Filho | 450ccb8 | 2018-08-12 16:25:36 +0100 | [diff] [blame] | 210 | |
| 211 | /* Restore x18, which was saved on the stack. */ |
| 212 | ldr x18, [sp], #16 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 213 | eret |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 214 | .endm |
| 215 | |
| 216 | /** |
| 217 | * The following is the exception table. A pointer to it will be stored in |
| 218 | * register vbar_el2. |
| 219 | */ |
| 220 | .section .text.vector_table_el2, "ax" |
| 221 | .global vector_table_el2 |
| 222 | .balign 0x800 |
| 223 | vector_table_el2: |
| 224 | sync_cur_sp0: |
| 225 | current_exception_sp0 sync_current_exception |
| 226 | |
| 227 | .balign 0x80 |
| 228 | irq_cur_sp0: |
| 229 | current_exception_sp0 irq_current_exception |
| 230 | |
| 231 | .balign 0x80 |
| 232 | fiq_cur_sp0: |
| 233 | current_exception_sp0 fiq_current_exception |
| 234 | |
| 235 | .balign 0x80 |
| 236 | serr_cur_sp0: |
| 237 | current_exception_sp0 serr_current_exception |
| 238 | |
| 239 | .balign 0x80 |
| 240 | sync_cur_spx: |
| 241 | current_exception_spx sync_current_exception |
| 242 | |
| 243 | .balign 0x80 |
| 244 | irq_cur_spx: |
| 245 | current_exception_spx irq_current_exception |
| 246 | |
| 247 | .balign 0x80 |
| 248 | fiq_cur_spx: |
| 249 | current_exception_spx fiq_current_exception |
| 250 | |
| 251 | .balign 0x80 |
| 252 | serr_cur_spx: |
| 253 | current_exception_spx serr_current_exception |
| 254 | |
| 255 | .balign 0x80 |
| 256 | sync_lower_64: |
| 257 | lower_sync_exception |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 258 | |
| 259 | .balign 0x80 |
Andrew Walbran | 83f6132 | 2018-11-12 13:29:30 +0000 | [diff] [blame] | 260 | irq_lower_64: |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 261 | lower_exception irq_lower |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 262 | |
| 263 | .balign 0x80 |
Andrew Walbran | 83f6132 | 2018-11-12 13:29:30 +0000 | [diff] [blame] | 264 | fiq_lower_64: |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 265 | lower_exception fiq_lower |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 266 | |
| 267 | .balign 0x80 |
Andrew Walbran | 83f6132 | 2018-11-12 13:29:30 +0000 | [diff] [blame] | 268 | serr_lower_64: |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 269 | lower_exception serr_lower |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 270 | |
| 271 | .balign 0x80 |
Andrew Walbran | 83f6132 | 2018-11-12 13:29:30 +0000 | [diff] [blame] | 272 | sync_lower_32: |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 273 | lower_sync_exception |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 274 | |
| 275 | .balign 0x80 |
Andrew Walbran | 83f6132 | 2018-11-12 13:29:30 +0000 | [diff] [blame] | 276 | irq_lower_32: |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 277 | lower_exception irq_lower |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 278 | |
| 279 | .balign 0x80 |
Andrew Walbran | 83f6132 | 2018-11-12 13:29:30 +0000 | [diff] [blame] | 280 | fiq_lower_32: |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 281 | lower_exception fiq_lower |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 282 | |
| 283 | .balign 0x80 |
Andrew Walbran | 83f6132 | 2018-11-12 13:29:30 +0000 | [diff] [blame] | 284 | serr_lower_32: |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 285 | lower_exception serr_lower |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 286 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 287 | .balign 0x40 |
| 288 | slow_sync_lower: |
| 289 | /* The caller must have saved x18, so we don't save it here. */ |
| 290 | save_volatile_to_vcpu |
Wedson Almeida Filho | 5997832 | 2018-10-24 15:13:33 +0100 | [diff] [blame] | 291 | |
| 292 | /* Read syndrome register and call C handler. */ |
| 293 | mrs x0, esr_el2 |
| 294 | bl sync_lower_exception |
Wedson Almeida Filho | 5997832 | 2018-10-24 15:13:33 +0100 | [diff] [blame] | 295 | cbnz x0, vcpu_switch |
| 296 | |
| 297 | /* vcpu is not changing. */ |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 298 | mrs x0, tpidr_el2 |
Wedson Almeida Filho | 5997832 | 2018-10-24 15:13:33 +0100 | [diff] [blame] | 299 | b vcpu_restore_volatile_and_run |
| 300 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 301 | sync_lower_switch: |
Wedson Almeida Filho | 5997832 | 2018-10-24 15:13:33 +0100 | [diff] [blame] | 302 | /* We'll have to switch, so save volatile state before doing so. */ |
| 303 | mrs x18, tpidr_el2 |
| 304 | |
| 305 | /* Store zeroes in volatile register storage, except x0. */ |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 306 | stp x0, xzr, [x18, #VCPU_REGS + 8 * 0] |
| 307 | stp xzr, xzr, [x18, #VCPU_REGS + 8 * 2] |
| 308 | stp xzr, xzr, [x18, #VCPU_REGS + 8 * 4] |
| 309 | stp xzr, xzr, [x18, #VCPU_REGS + 8 * 6] |
| 310 | stp xzr, xzr, [x18, #VCPU_REGS + 8 * 8] |
| 311 | stp xzr, xzr, [x18, #VCPU_REGS + 8 * 10] |
| 312 | stp xzr, xzr, [x18, #VCPU_REGS + 8 * 12] |
| 313 | stp xzr, xzr, [x18, #VCPU_REGS + 8 * 14] |
| 314 | stp xzr, xzr, [x18, #VCPU_REGS + 8 * 16] |
| 315 | stp x29, x30, [x18, #VCPU_REGS + 8 * 29] |
Wedson Almeida Filho | 5997832 | 2018-10-24 15:13:33 +0100 | [diff] [blame] | 316 | |
| 317 | /* x18 was saved on the stack, so we move it to vcpu regs buffer. */ |
| 318 | ldr x2, [sp], #16 |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 319 | str x2, [x18, #VCPU_REGS + 8 * 18] |
Wedson Almeida Filho | 5997832 | 2018-10-24 15:13:33 +0100 | [diff] [blame] | 320 | |
| 321 | /* Save return address & mode. */ |
| 322 | mrs x2, elr_el2 |
| 323 | mrs x3, spsr_el2 |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 324 | stp x2, x3, [x18, #VCPU_REGS + 8 * 31] |
Wedson Almeida Filho | 5997832 | 2018-10-24 15:13:33 +0100 | [diff] [blame] | 325 | |
| 326 | /* Save lazy state, then switch to new vcpu. */ |
| 327 | mov x0, x1 |
Wedson Almeida Filho | 5997832 | 2018-10-24 15:13:33 +0100 | [diff] [blame] | 328 | |
| 329 | /* Intentional fallthrough. */ |
Wedson Almeida Filho | 8700964 | 2018-07-02 10:20:07 +0100 | [diff] [blame] | 330 | /** |
| 331 | * Switch to a new vcpu. |
| 332 | * |
| 333 | * All volatile registers from the old vcpu have already been saved. We need |
| 334 | * to save only non-volatile ones from the old vcpu, and restore all from the |
| 335 | * new one. |
| 336 | * |
| 337 | * x0 is a pointer to the new vcpu. |
Wedson Almeida Filho | 8700964 | 2018-07-02 10:20:07 +0100 | [diff] [blame] | 338 | */ |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 339 | vcpu_switch: |
| 340 | /* Save non-volatile registers. */ |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 341 | mrs x1, tpidr_el2 |
| 342 | stp x19, x20, [x1, #VCPU_REGS + 8 * 19] |
| 343 | stp x21, x22, [x1, #VCPU_REGS + 8 * 21] |
| 344 | stp x23, x24, [x1, #VCPU_REGS + 8 * 23] |
| 345 | stp x25, x26, [x1, #VCPU_REGS + 8 * 25] |
| 346 | stp x27, x28, [x1, #VCPU_REGS + 8 * 27] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 347 | |
| 348 | /* Save lazy state. */ |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 349 | mrs x24, vmpidr_el2 |
| 350 | mrs x25, csselr_el1 |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 351 | stp x24, x25, [x1, #VCPU_LAZY + 16 * 0] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 352 | |
| 353 | mrs x2, sctlr_el1 |
| 354 | mrs x3, actlr_el1 |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 355 | stp x2, x3, [x1, #VCPU_LAZY + 16 * 1] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 356 | |
| 357 | mrs x4, cpacr_el1 |
| 358 | mrs x5, ttbr0_el1 |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 359 | stp x4, x5, [x1, #VCPU_LAZY + 16 * 2] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 360 | |
| 361 | mrs x6, ttbr1_el1 |
| 362 | mrs x7, tcr_el1 |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 363 | stp x6, x7, [x1, #VCPU_LAZY + 16 * 3] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 364 | |
| 365 | mrs x8, esr_el1 |
| 366 | mrs x9, afsr0_el1 |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 367 | stp x8, x9, [x1, #VCPU_LAZY + 16 * 4] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 368 | |
| 369 | mrs x10, afsr1_el1 |
| 370 | mrs x11, far_el1 |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 371 | stp x10, x11, [x1, #VCPU_LAZY + 16 * 5] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 372 | |
| 373 | mrs x12, mair_el1 |
| 374 | mrs x13, vbar_el1 |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 375 | stp x12, x13, [x1, #VCPU_LAZY + 16 * 6] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 376 | |
| 377 | mrs x14, contextidr_el1 |
| 378 | mrs x15, tpidr_el0 |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 379 | stp x14, x15, [x1, #VCPU_LAZY + 16 * 7] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 380 | |
| 381 | mrs x16, tpidrro_el0 |
| 382 | mrs x17, tpidr_el1 |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 383 | stp x16, x17, [x1, #VCPU_LAZY + 16 * 8] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 384 | |
| 385 | mrs x18, amair_el1 |
| 386 | mrs x19, cntkctl_el1 |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 387 | stp x18, x19, [x1, #VCPU_LAZY + 16 * 9] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 388 | |
| 389 | mrs x20, sp_el0 |
| 390 | mrs x21, sp_el1 |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 391 | stp x20, x21, [x1, #VCPU_LAZY + 16 * 10] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 392 | |
| 393 | mrs x22, par_el1 |
Wedson Almeida Filho | 1f81b75 | 2018-10-24 15:15:49 +0100 | [diff] [blame] | 394 | mrs x23, hcr_el2 |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 395 | stp x22, x23, [x1, #VCPU_LAZY + 16 * 11] |
Wedson Almeida Filho | 1f81b75 | 2018-10-24 15:15:49 +0100 | [diff] [blame] | 396 | |
| 397 | mrs x24, cptr_el2 |
| 398 | mrs x25, cnthctl_el2 |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 399 | stp x24, x25, [x1, #VCPU_LAZY + 16 * 12] |
Wedson Almeida Filho | 1f81b75 | 2018-10-24 15:15:49 +0100 | [diff] [blame] | 400 | |
| 401 | mrs x26, vttbr_el2 |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 402 | str x26, [x1, #VCPU_LAZY + 16 * 13] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 403 | |
Wedson Almeida Filho | 5997832 | 2018-10-24 15:13:33 +0100 | [diff] [blame] | 404 | /* Intentional fallthrough. */ |
Wedson Almeida Filho | 8700964 | 2018-07-02 10:20:07 +0100 | [diff] [blame] | 405 | |
| 406 | .globl vcpu_restore_all_and_run |
| 407 | vcpu_restore_all_and_run: |
Wedson Almeida Filho | 5997832 | 2018-10-24 15:13:33 +0100 | [diff] [blame] | 408 | /* Update pointer to current vcpu. */ |
Wedson Almeida Filho | 00df6c7 | 2018-10-18 11:19:24 +0100 | [diff] [blame] | 409 | msr tpidr_el2, x0 |
Wedson Almeida Filho | 8700964 | 2018-07-02 10:20:07 +0100 | [diff] [blame] | 410 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 411 | /* Restore lazy registers. */ |
| 412 | ldp x24, x25, [x0, #VCPU_LAZY + 16 * 0] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 413 | msr vmpidr_el2, x24 |
| 414 | msr csselr_el1, x25 |
| 415 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 416 | ldp x2, x3, [x0, #VCPU_LAZY + 16 * 1] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 417 | msr sctlr_el1, x2 |
| 418 | msr actlr_el1, x3 |
| 419 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 420 | ldp x4, x5, [x0, #VCPU_LAZY + 16 * 2] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 421 | msr cpacr_el1, x4 |
| 422 | msr ttbr0_el1, x5 |
| 423 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 424 | ldp x6, x7, [x0, #VCPU_LAZY + 16 * 3] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 425 | msr ttbr1_el1, x6 |
| 426 | msr tcr_el1, x7 |
| 427 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 428 | ldp x8, x9, [x0, #VCPU_LAZY + 16 * 4] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 429 | msr esr_el1, x8 |
| 430 | msr afsr0_el1, x9 |
| 431 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 432 | ldp x10, x11, [x0, #VCPU_LAZY + 16 * 5] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 433 | msr afsr1_el1, x10 |
| 434 | msr far_el1, x11 |
| 435 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 436 | ldp x12, x13, [x0, #VCPU_LAZY + 16 * 6] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 437 | msr mair_el1, x12 |
| 438 | msr vbar_el1, x13 |
| 439 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 440 | ldp x14, x15, [x0, #VCPU_LAZY + 16 * 7] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 441 | msr contextidr_el1, x14 |
| 442 | msr tpidr_el0, x15 |
| 443 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 444 | ldp x16, x17, [x0, #VCPU_LAZY + 16 * 8] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 445 | msr tpidrro_el0, x16 |
| 446 | msr tpidr_el1, x17 |
| 447 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 448 | ldp x18, x19, [x0, #VCPU_LAZY + 16 * 9] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 449 | msr amair_el1, x18 |
| 450 | msr cntkctl_el1, x19 |
| 451 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 452 | ldp x20, x21, [x0, #VCPU_LAZY + 16 * 10] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 453 | msr sp_el0, x20 |
| 454 | msr sp_el1, x21 |
| 455 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 456 | ldp x22, x23, [x0, #VCPU_LAZY + 16 * 11] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 457 | msr par_el1, x22 |
Wedson Almeida Filho | 1f81b75 | 2018-10-24 15:15:49 +0100 | [diff] [blame] | 458 | msr hcr_el2, x23 |
| 459 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 460 | ldp x24, x25, [x0, #VCPU_LAZY + 16 * 12] |
Andrew Walbran | 570f9b7 | 2018-11-13 17:51:50 +0000 | [diff] [blame] | 461 | msr cptr_el2, x24 |
| 462 | msr cnthctl_el2, x25 |
Wedson Almeida Filho | 1f81b75 | 2018-10-24 15:15:49 +0100 | [diff] [blame] | 463 | |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 464 | ldr x26, [x0, #VCPU_LAZY + 16 * 13] |
Wedson Almeida Filho | 1f81b75 | 2018-10-24 15:15:49 +0100 | [diff] [blame] | 465 | msr vttbr_el2, x26 |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 466 | |
| 467 | /* Restore non-volatile registers. */ |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 468 | ldp x19, x20, [x0, #VCPU_REGS + 8 * 19] |
| 469 | ldp x21, x22, [x0, #VCPU_REGS + 8 * 21] |
| 470 | ldp x23, x24, [x0, #VCPU_REGS + 8 * 23] |
| 471 | ldp x25, x26, [x0, #VCPU_REGS + 8 * 25] |
| 472 | ldp x27, x28, [x0, #VCPU_REGS + 8 * 27] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 473 | |
Wedson Almeida Filho | d615cdb | 2018-10-09 13:00:21 +0100 | [diff] [blame] | 474 | /* Intentional fallthrough. */ |
Wedson Almeida Filho | 8700964 | 2018-07-02 10:20:07 +0100 | [diff] [blame] | 475 | /** |
Wedson Almeida Filho | 8700964 | 2018-07-02 10:20:07 +0100 | [diff] [blame] | 476 | * Restore volatile registers and run the given vcpu. |
Wedson Almeida Filho | d615cdb | 2018-10-09 13:00:21 +0100 | [diff] [blame] | 477 | * |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 478 | * x0 is a pointer to the target vcpu. |
Wedson Almeida Filho | 8700964 | 2018-07-02 10:20:07 +0100 | [diff] [blame] | 479 | */ |
| 480 | vcpu_restore_volatile_and_run: |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 481 | ldp x4, x5, [x0, #VCPU_REGS + 8 * 4] |
| 482 | ldp x6, x7, [x0, #VCPU_REGS + 8 * 6] |
| 483 | ldp x8, x9, [x0, #VCPU_REGS + 8 * 8] |
| 484 | ldp x10, x11, [x0, #VCPU_REGS + 8 * 10] |
| 485 | ldp x12, x13, [x0, #VCPU_REGS + 8 * 12] |
| 486 | ldp x14, x15, [x0, #VCPU_REGS + 8 * 14] |
| 487 | ldp x16, x17, [x0, #VCPU_REGS + 8 * 16] |
| 488 | ldr x18, [x0, #VCPU_REGS + 8 * 18] |
| 489 | ldp x29, x30, [x0, #VCPU_REGS + 8 * 29] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 490 | |
| 491 | /* Restore return address & mode. */ |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 492 | ldp x1, x2, [x0, #VCPU_REGS + 8 * 31] |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 493 | msr elr_el2, x1 |
| 494 | msr spsr_el2, x2 |
| 495 | |
| 496 | /* Restore x0..x3, which we have used as scratch before. */ |
Wedson Almeida Filho | 9d5040f | 2018-10-29 08:41:27 +0000 | [diff] [blame^] | 497 | ldp x2, x3, [x0, #VCPU_REGS + 8 * 2] |
| 498 | ldp x0, x1, [x0, #VCPU_REGS + 8 * 0] |
| 499 | eret |
| 500 | |
| 501 | .balign 0x40 |
| 502 | /** |
| 503 | * Restores volatile registers from stack and returns. |
| 504 | */ |
| 505 | restore_from_stack_and_return: |
| 506 | restore_volatile_from_stack |
Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame] | 507 | eret |