Wedson Almeida Filho | 987c0ff | 2018-06-20 16:34:38 +0100 | [diff] [blame^] | 1 | #include "offsets.h" |
| 2 | |
| 3 | .text |
| 4 | |
| 5 | .balign 0x800 |
| 6 | .global vector_table_el2 |
| 7 | vector_table_el2: |
| 8 | /* sync_cur_sp0 */ |
| 9 | b . |
| 10 | |
| 11 | .balign 0x80 |
| 12 | /* irq_cur_sp0 */ |
| 13 | b irq_current |
| 14 | |
| 15 | .balign 0x80 |
| 16 | /* fiq_cur_sp0 */ |
| 17 | b . |
| 18 | |
| 19 | .balign 0x80 |
| 20 | /* serr_cur_sp0 */ |
| 21 | b . |
| 22 | |
| 23 | .balign 0x80 |
| 24 | /* sync_cur_spx */ |
| 25 | mrs x0, esr_el2 |
| 26 | mrs x1, elr_el2 |
| 27 | b sync_current_exception |
| 28 | |
| 29 | .balign 0x80 |
| 30 | /* irq_cur_spx */ |
| 31 | b irq_current |
| 32 | |
| 33 | .balign 0x80 |
| 34 | /* fiq_cur_spx */ |
| 35 | b . |
| 36 | |
| 37 | .balign 0x80 |
| 38 | /* serr_cur_spx */ |
| 39 | b . |
| 40 | |
| 41 | .balign 0x80 |
| 42 | /* sync_lower_64 */ |
| 43 | |
| 44 | /* Save x18 since we're about to clobber it. */ |
| 45 | str x18, [sp, #-8] |
| 46 | |
| 47 | /* Extract the exception class (EC) from exception syndrome register. */ |
| 48 | mrs x18, esr_el2 |
| 49 | lsr x18, x18, #26 |
| 50 | |
| 51 | /* Take the slow path if exception is not due to an HVC instruction. */ |
| 52 | subs x18, x18, #0x16 |
| 53 | b.ne slow_sync_lower_64 |
| 54 | |
| 55 | /* Save x29 and x30, then jump to HVC handler. */ |
| 56 | stp x29, x30, [sp, #-16]! |
| 57 | bl hvc_handler |
| 58 | ldp x29, x30, [sp], #16 |
| 59 | cbnz x1, sync_lower_64_switch |
| 60 | |
| 61 | /* Zero out all volatile registers (except x0) and return. */ |
| 62 | stp xzr, xzr, [sp, #-16] |
| 63 | ldp x1, x2, [sp, #-16] |
| 64 | ldp x3, x4, [sp, #-16] |
| 65 | ldp x5, x6, [sp, #-16] |
| 66 | ldp x7, x8, [sp, #-16] |
| 67 | ldp x9, x10, [sp, #-16] |
| 68 | ldp x11, x12, [sp, #-16] |
| 69 | ldp x13, x14, [sp, #-16] |
| 70 | ldp x15, x16, [sp, #-16] |
| 71 | ldp x17, x18, [sp, #-16] |
| 72 | eret |
| 73 | |
| 74 | .balign 0x80 |
| 75 | /* irq_lower_64 */ |
| 76 | |
| 77 | /* Save x0 since we're about to clobber it. */ |
| 78 | str x0, [sp, #-8] |
| 79 | |
| 80 | /* Get the current vcpu. */ |
| 81 | mrs x0, tpidr_el2 |
| 82 | ldr x0, [x0, #CPU_CURRENT] |
| 83 | |
| 84 | /* Save volatile registers. */ |
| 85 | add x0, x0, #VCPU_REGS |
| 86 | stp x2, x3, [x0, #8 * 2] |
| 87 | stp x4, x5, [x0, #8 * 4] |
| 88 | stp x6, x7, [x0, #8 * 6] |
| 89 | stp x8, x9, [x0, #8 * 8] |
| 90 | stp x10, x11, [x0, #8 * 10] |
| 91 | stp x12, x13, [x0, #8 * 12] |
| 92 | stp x14, x15, [x0, #8 * 14] |
| 93 | stp x16, x17, [x0, #8 * 16] |
| 94 | str x18, [x0, #8 * 18] |
| 95 | stp x29, x30, [x0, #8 * 29] |
| 96 | |
| 97 | ldr x2, [sp, #-8] |
| 98 | stp x2, x1, [x0, #8 * 0] |
| 99 | |
| 100 | /* Save return address & mode. */ |
| 101 | mrs x1, elr_el2 |
| 102 | mrs x2, spsr_el2 |
| 103 | stp x1, x2, [x0, #8 * 31] |
| 104 | |
| 105 | /* Call C handler. */ |
| 106 | bl irq_handle_lower |
| 107 | cbz x0, vcpu_return |
| 108 | |
| 109 | /* Find a new vcpu to run. */ |
| 110 | bl cpu_next_vcpu |
| 111 | cbz x0, vcpu_return |
| 112 | b vcpu_switch |
| 113 | |
| 114 | .balign 0x80 |
| 115 | /* fiq_lower_64 */ |
| 116 | b . |
| 117 | |
| 118 | .balign 0x80 |
| 119 | /* serr_lower_64 */ |
| 120 | b . |
| 121 | |
| 122 | .balign 0x80 |
| 123 | /* sync_lower_32 */ |
| 124 | b . |
| 125 | |
| 126 | .balign 0x80 |
| 127 | /* irq_lower_32 */ |
| 128 | b . |
| 129 | |
| 130 | .balign 0x80 |
| 131 | /* fiq_lower_32 */ |
| 132 | b . |
| 133 | |
| 134 | .balign 0x80 |
| 135 | /* serr_lower_32 */ |
| 136 | b . |
| 137 | |
| 138 | .balign 0x80 |
| 139 | |
| 140 | vcpu_switch: |
| 141 | /* Save non-volatile registers. */ |
| 142 | add x1, x1, #VCPU_REGS |
| 143 | stp x19, x20, [x1, #8 * 19] |
| 144 | stp x21, x22, [x1, #8 * 21] |
| 145 | stp x23, x24, [x1, #8 * 23] |
| 146 | stp x25, x26, [x1, #8 * 25] |
| 147 | stp x27, x28, [x1, #8 * 27] |
| 148 | |
| 149 | /* Save lazy state. */ |
| 150 | add x1, x1, #(VCPU_LAZY - VCPU_REGS) |
| 151 | |
| 152 | mrs x24, vmpidr_el2 |
| 153 | mrs x25, csselr_el1 |
| 154 | stp x24, x25, [x1, #16 * 0] |
| 155 | |
| 156 | mrs x2, sctlr_el1 |
| 157 | mrs x3, actlr_el1 |
| 158 | stp x2, x3, [x1, #16 * 1] |
| 159 | |
| 160 | mrs x4, cpacr_el1 |
| 161 | mrs x5, ttbr0_el1 |
| 162 | stp x4, x5, [x1, #16 * 2] |
| 163 | |
| 164 | mrs x6, ttbr1_el1 |
| 165 | mrs x7, tcr_el1 |
| 166 | stp x6, x7, [x1, #16 * 3] |
| 167 | |
| 168 | mrs x8, esr_el1 |
| 169 | mrs x9, afsr0_el1 |
| 170 | stp x8, x9, [x1, #16 * 4] |
| 171 | |
| 172 | mrs x10, afsr1_el1 |
| 173 | mrs x11, far_el1 |
| 174 | stp x10, x11, [x1, #16 * 5] |
| 175 | |
| 176 | mrs x12, mair_el1 |
| 177 | mrs x13, vbar_el1 |
| 178 | stp x12, x13, [x1, #16 * 6] |
| 179 | |
| 180 | mrs x14, contextidr_el1 |
| 181 | mrs x15, tpidr_el0 |
| 182 | stp x14, x15, [x1, #16 * 7] |
| 183 | |
| 184 | mrs x16, tpidrro_el0 |
| 185 | mrs x17, tpidr_el1 |
| 186 | stp x16, x17, [x1, #16 * 8] |
| 187 | |
| 188 | mrs x18, amair_el1 |
| 189 | mrs x19, cntkctl_el1 |
| 190 | stp x18, x19, [x1, #16 * 9] |
| 191 | |
| 192 | mrs x20, sp_el0 |
| 193 | mrs x21, sp_el1 |
| 194 | stp x20, x21, [x1, #16 * 10] |
| 195 | |
| 196 | mrs x22, par_el1 |
| 197 | str x22, [x1, #16 * 11] |
| 198 | |
| 199 | .globl vcpu_enter_restore_all |
| 200 | vcpu_enter_restore_all: |
| 201 | /* Get a pointer to the lazy registers. */ |
| 202 | add x0, x0, #VCPU_LAZY |
| 203 | |
| 204 | ldp x24, x25, [x0, #16 * 0] |
| 205 | msr vmpidr_el2, x24 |
| 206 | msr csselr_el1, x25 |
| 207 | |
| 208 | ldp x2, x3, [x0, #16 * 1] |
| 209 | msr sctlr_el1, x2 |
| 210 | msr actlr_el1, x3 |
| 211 | |
| 212 | ldp x4, x5, [x0, #16 * 2] |
| 213 | msr cpacr_el1, x4 |
| 214 | msr ttbr0_el1, x5 |
| 215 | |
| 216 | ldp x6, x7, [x0, #16 * 3] |
| 217 | msr ttbr1_el1, x6 |
| 218 | msr tcr_el1, x7 |
| 219 | |
| 220 | ldp x8, x9, [x0, #16 * 4] |
| 221 | msr esr_el1, x8 |
| 222 | msr afsr0_el1, x9 |
| 223 | |
| 224 | ldp x10, x11, [x0, #16 * 5] |
| 225 | msr afsr1_el1, x10 |
| 226 | msr far_el1, x11 |
| 227 | |
| 228 | ldp x12, x13, [x0, #16 * 6] |
| 229 | msr mair_el1, x12 |
| 230 | msr vbar_el1, x13 |
| 231 | |
| 232 | ldp x14, x15, [x0, #16 * 7] |
| 233 | msr contextidr_el1, x14 |
| 234 | msr tpidr_el0, x15 |
| 235 | |
| 236 | ldp x16, x17, [x0, #16 * 8] |
| 237 | msr tpidrro_el0, x16 |
| 238 | msr tpidr_el1, x17 |
| 239 | |
| 240 | ldp x18, x19, [x0, #16 * 9] |
| 241 | msr amair_el1, x18 |
| 242 | msr cntkctl_el1, x19 |
| 243 | |
| 244 | ldp x20, x21, [x0, #16 * 10] |
| 245 | msr sp_el0, x20 |
| 246 | msr sp_el1, x21 |
| 247 | |
| 248 | ldp x22, x23, [x0, #16 * 11] |
| 249 | msr par_el1, x22 |
| 250 | msr hcr_el2, x23 |
| 251 | |
| 252 | /* Restore non-volatile registers. */ |
| 253 | add x0, x0, #(VCPU_REGS - VCPU_LAZY) |
| 254 | |
| 255 | ldp x19, x20, [x0, #8 * 19] |
| 256 | ldp x21, x22, [x0, #8 * 21] |
| 257 | ldp x23, x24, [x0, #8 * 23] |
| 258 | ldp x25, x26, [x0, #8 * 25] |
| 259 | ldp x27, x28, [x0, #8 * 27] |
| 260 | |
| 261 | /* Restore volatile registers and return. */ |
| 262 | sub x0, x0, #VCPU_REGS |
| 263 | |
| 264 | vcpu_return: |
| 265 | /* Restore volatile registers. */ |
| 266 | add x0, x0, #VCPU_REGS |
| 267 | |
| 268 | ldp x4, x5, [x0, #8 * 4] |
| 269 | ldp x6, x7, [x0, #8 * 6] |
| 270 | ldp x8, x9, [x0, #8 * 8] |
| 271 | ldp x10, x11, [x0, #8 * 10] |
| 272 | ldp x12, x13, [x0, #8 * 12] |
| 273 | ldp x14, x15, [x0, #8 * 14] |
| 274 | ldp x16, x17, [x0, #8 * 16] |
| 275 | ldr x18, [x0, #8 * 18] |
| 276 | ldp x29, x30, [x0, #8 * 29] |
| 277 | |
| 278 | /* Restore return address & mode. */ |
| 279 | ldp x1, x2, [x0, #8 * 31] |
| 280 | msr elr_el2, x1 |
| 281 | msr spsr_el2, x2 |
| 282 | |
| 283 | /* Restore x0..x3, which we have used as scratch before. */ |
| 284 | ldp x2, x3, [x0, #8 * 2] |
| 285 | ldp x0, x1, [x0, #8 * 0] |
| 286 | eret |
| 287 | |
| 288 | slow_sync_lower_64: |
| 289 | /* Get the current vcpu. */ |
| 290 | mrs x18, tpidr_el2 |
| 291 | /* TODO: tpidr_el2 should store the vcpu pointer directly. */ |
| 292 | ldr x18, [x18, #CPU_CURRENT] |
| 293 | |
| 294 | /* Save volatile registers. */ |
| 295 | add x18, x18, #VCPU_REGS |
| 296 | stp x0, x1, [x18, #8 * 0] |
| 297 | stp x2, x3, [x18, #8 * 2] |
| 298 | stp x4, x5, [x18, #8 * 4] |
| 299 | stp x6, x7, [x18, #8 * 6] |
| 300 | stp x8, x9, [x18, #8 * 8] |
| 301 | stp x10, x11, [x18, #8 * 10] |
| 302 | stp x12, x13, [x18, #8 * 12] |
| 303 | stp x14, x15, [x18, #8 * 14] |
| 304 | stp x16, x17, [x18, #8 * 16] |
| 305 | stp x29, x30, [x18, #8 * 29] |
| 306 | |
| 307 | ldr x0, [sp, #-8] |
| 308 | str x0, [x18, #8 * 18] |
| 309 | |
| 310 | /* Save return address & mode. */ |
| 311 | mrs x1, elr_el2 |
| 312 | mrs x2, spsr_el2 |
| 313 | stp x1, x2, [x18, #8 * 31] |
| 314 | |
| 315 | /* Read syndrome register and call C handler. */ |
| 316 | mrs x0, esr_el2 |
| 317 | bl sync_lower_exception |
| 318 | cbz x0, vcpu_return |
| 319 | |
| 320 | /* Find a new vcpu to run. */ |
| 321 | bl cpu_next_vcpu |
| 322 | cbz x0, vcpu_return |
| 323 | b vcpu_switch |
| 324 | |
| 325 | sync_lower_64_switch: |
| 326 | /* We'll have to reschedule, so store state before doing so. */ |
| 327 | mrs x18, tpidr_el2 |
| 328 | ldr x18, [x18, #CPU_CURRENT] |
| 329 | |
| 330 | /* Store zeroes in volatile register storage, except x0. */ |
| 331 | add x18, x18, #VCPU_REGS |
| 332 | stp x0, xzr, [x18, #8 * 0] |
| 333 | stp xzr, xzr, [x18, #8 * 2] |
| 334 | stp xzr, xzr, [x18, #8 * 4] |
| 335 | stp xzr, xzr, [x18, #8 * 6] |
| 336 | stp xzr, xzr, [x18, #8 * 8] |
| 337 | stp xzr, xzr, [x18, #8 * 10] |
| 338 | stp xzr, xzr, [x18, #8 * 12] |
| 339 | stp xzr, xzr, [x18, #8 * 14] |
| 340 | stp xzr, xzr, [x18, #8 * 16] |
| 341 | stp x29, x30, [x18, #8 * 29] |
| 342 | |
| 343 | str xzr, [x18, #8 * 18] |
| 344 | |
| 345 | /* Save return address & mode. */ |
| 346 | mrs x1, elr_el2 |
| 347 | mrs x2, spsr_el2 |
| 348 | stp x1, x2, [x18, #8 * 31] |
| 349 | |
| 350 | /* Find a new vcpu to run. */ |
| 351 | bl cpu_next_vcpu |
| 352 | cbz x0, vcpu_return |
| 353 | b vcpu_switch |