blob: 1c4d6422c57626c6ed43c3421c58dde5ab3d72a5 [file] [log] [blame]
Wedson Almeida Filho22c973a2018-10-27 16:25:42 +01001/*
Andrew Walbran692b3252019-03-07 15:51:31 +00002 * Copyright 2018 The Hafnium Authors.
Wedson Almeida Filho22c973a2018-10-27 16:25:42 +01003 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * https://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010017#include "offsets.h"
Andrew Walbranc55365d2018-12-06 15:45:11 +000018#include "exception_macros.S"
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010019
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000020/**
21 * Saves the volatile registers into the register buffer of the current vcpu. It
22 * allocates space on the stack for x18 and saves it if "also_save_x18" is
23 * specified; otherwise the caller is expected to have saved x18 in a similar
24 * fashion.
25 */
26.macro save_volatile_to_vcpu also_save_x18
27.ifnb \also_save_x18
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +010028 /*
29 * Save x18 since we're about to clobber it. We subtract 16 instead of
30 * 8 from the stack pointer to keep it 16-byte aligned.
31 */
32 str x18, [sp, #-16]!
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000033.endif
34 /* Get the current vcpu. */
35 mrs x18, tpidr_el2
36 stp x0, x1, [x18, #VCPU_REGS + 8 * 0]
37 stp x2, x3, [x18, #VCPU_REGS + 8 * 2]
38 stp x4, x5, [x18, #VCPU_REGS + 8 * 4]
39 stp x6, x7, [x18, #VCPU_REGS + 8 * 6]
40 stp x8, x9, [x18, #VCPU_REGS + 8 * 8]
41 stp x10, x11, [x18, #VCPU_REGS + 8 * 10]
42 stp x12, x13, [x18, #VCPU_REGS + 8 * 12]
43 stp x14, x15, [x18, #VCPU_REGS + 8 * 14]
44 stp x16, x17, [x18, #VCPU_REGS + 8 * 16]
45 stp x29, x30, [x18, #VCPU_REGS + 8 * 29]
46
47 /* x18 was saved on the stack, so we move it to vcpu regs buffer. */
48 ldr x0, [sp], #16
49 str x0, [x18, #VCPU_REGS + 8 * 18]
50
51 /* Save return address & mode. */
52 mrs x1, elr_el2
53 mrs x2, spsr_el2
54 stp x1, x2, [x18, #VCPU_REGS + 8 * 31]
55.endm
56
57/**
Fuad Tabbac76466d2019-09-06 10:42:12 +010058 * Save all general purpose registers into register buffer of current vcpu.
59 */
60.macro save_registers_to_vcpu
61 save_volatile_to_vcpu also_save_x18
62 stp x19, x20, [x18, #VCPU_REGS + 8 * 19]
63 stp x21, x22, [x18, #VCPU_REGS + 8 * 21]
64 stp x23, x24, [x18, #VCPU_REGS + 8 * 23]
65 stp x25, x26, [x18, #VCPU_REGS + 8 * 25]
66 stp x27, x28, [x18, #VCPU_REGS + 8 * 27]
67.endm
68
69/**
70 * Restore the volatile registers from the register buffer of the current vcpu.
71 */
72.macro restore_volatile_from_vcpu vcpu_ptr:req
73 ldp x4, x5, [\vcpu_ptr, #VCPU_REGS + 8 * 4]
74 ldp x6, x7, [\vcpu_ptr, #VCPU_REGS + 8 * 6]
75 ldp x8, x9, [\vcpu_ptr, #VCPU_REGS + 8 * 8]
76 ldp x10, x11, [\vcpu_ptr, #VCPU_REGS + 8 * 10]
77 ldp x12, x13, [\vcpu_ptr, #VCPU_REGS + 8 * 12]
78 ldp x14, x15, [\vcpu_ptr, #VCPU_REGS + 8 * 14]
79 ldp x16, x17, [\vcpu_ptr, #VCPU_REGS + 8 * 16]
80 ldr x18, [\vcpu_ptr, #VCPU_REGS + 8 * 18]
81 ldp x29, x30, [\vcpu_ptr, #VCPU_REGS + 8 * 29]
82
83 /* Restore return address & mode. */
84 ldp x1, x2, [\vcpu_ptr, #VCPU_REGS + 8 * 31]
85 msr elr_el2, x1
86 msr spsr_el2, x2
87
88 /* Restore x0..x3, which we have used as scratch before. */
89 ldp x2, x3, [\vcpu_ptr, #VCPU_REGS + 8 * 2]
90 ldp x0, x1, [\vcpu_ptr, #VCPU_REGS + 8 * 0]
91.endm
92
93/**
94 * Restore all general purpose registers from register buffer of current vcpu.
95 */
96.macro restore_registers_from_vcpu vcpu_ptr:req
97 ldp x19, x20, [\vcpu_ptr, #VCPU_REGS + 8 * 19]
98 ldp x21, x22, [\vcpu_ptr, #VCPU_REGS + 8 * 21]
99 ldp x23, x24, [\vcpu_ptr, #VCPU_REGS + 8 * 23]
100 ldp x25, x26, [\vcpu_ptr, #VCPU_REGS + 8 * 25]
101 ldp x27, x28, [\vcpu_ptr, #VCPU_REGS + 8 * 27]
102 restore_volatile_from_vcpu \vcpu_ptr
103.endm
104
105/**
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000106 * This is a generic handler for exceptions taken at a lower EL. It saves the
107 * volatile registers to the current vcpu and calls the C handler, which can
108 * select one of two paths: (a) restore volatile registers and return, or
109 * (b) switch to a different vcpu. In the latter case, the handler needs to save
110 * all non-volatile registers (they haven't been saved yet), then restore all
111 * registers from the new vcpu.
112 */
113.macro lower_exception handler:req
114 save_volatile_to_vcpu also_save_x18
115
116 /* Call C handler. */
117 bl \handler
118
119 /* Switch vcpu if requested by handler. */
120 cbnz x0, vcpu_switch
121
122 /* vcpu is not changing. */
123 mrs x0, tpidr_el2
124 b vcpu_restore_volatile_and_run
125.endm
126
127/**
128 * This is the handler for a sync exception taken at a lower EL. If the reason
129 * for the exception is an HVC call, it calls the faster hvc_handler without
130 * saving a lot of the registers, otherwise it goes to slow_sync_lower, which is
131 * the slow path where all registers needs to be saved/restored.
132 */
133.macro lower_sync_exception
134 /* Save x18 as save_volatile_to_vcpu would have. */
135 str x18, [sp, #-16]!
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100136
137 /* Extract the exception class (EC) from exception syndrome register. */
138 mrs x18, esr_el2
139 lsr x18, x18, #26
140
141 /* Take the slow path if exception is not due to an HVC instruction. */
Fuad Tabbac76466d2019-09-06 10:42:12 +0100142 cmp x18, #0x16
143 b.ne slow_sync_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100144
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100145 /*
146 * Save x29 and x30, which are not saved by the callee, then jump to
147 * HVC handler.
148 */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100149 stp x29, x30, [sp, #-16]!
150 bl hvc_handler
151 ldp x29, x30, [sp], #16
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000152 cbnz x1, sync_lower_switch
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100153
154 /* Zero out all volatile registers (except x0) and return. */
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100155 stp xzr, xzr, [sp, #-16]!
156 ldp x1, x2, [sp]
157 ldp x3, x4, [sp]
158 ldp x5, x6, [sp]
159 ldp x7, x8, [sp]
160 ldp x9, x10, [sp]
161 ldp x11, x12, [sp]
162 ldp x13, x14, [sp]
163 ldp x15, x16, [sp], #16
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +0100164 mov x17, xzr
Wedson Almeida Filho450ccb82018-08-12 16:25:36 +0100165
166 /* Restore x18, which was saved on the stack. */
167 ldr x18, [sp], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100168 eret
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000169.endm
170
171/**
172 * The following is the exception table. A pointer to it will be stored in
173 * register vbar_el2.
174 */
175.section .text.vector_table_el2, "ax"
176.global vector_table_el2
177.balign 0x800
178vector_table_el2:
179sync_cur_sp0:
Andrew Walbranc55365d2018-12-06 15:45:11 +0000180 current_exception_sp0 el2 sync_current_exception
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000181
182.balign 0x80
183irq_cur_sp0:
Andrew Walbranc55365d2018-12-06 15:45:11 +0000184 current_exception_sp0 el2 irq_current_exception
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000185
186.balign 0x80
187fiq_cur_sp0:
Andrew Walbranc55365d2018-12-06 15:45:11 +0000188 current_exception_sp0 el2 fiq_current_exception
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000189
190.balign 0x80
191serr_cur_sp0:
Andrew Walbranc55365d2018-12-06 15:45:11 +0000192 current_exception_sp0 el2 serr_current_exception
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000193
194.balign 0x80
195sync_cur_spx:
Andrew Walbranc55365d2018-12-06 15:45:11 +0000196 current_exception_spx el2 sync_current_exception
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000197
198.balign 0x80
199irq_cur_spx:
Andrew Walbranc55365d2018-12-06 15:45:11 +0000200 current_exception_spx el2 irq_current_exception
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000201
202.balign 0x80
203fiq_cur_spx:
Andrew Walbranc55365d2018-12-06 15:45:11 +0000204 current_exception_spx el2 fiq_current_exception
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000205
206.balign 0x80
207serr_cur_spx:
Andrew Walbranc55365d2018-12-06 15:45:11 +0000208 current_exception_spx el2 serr_current_exception
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000209
210.balign 0x80
211sync_lower_64:
212 lower_sync_exception
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100213
214.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000215irq_lower_64:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000216 lower_exception irq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100217
218.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000219fiq_lower_64:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000220 lower_exception fiq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100221
222.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000223serr_lower_64:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000224 lower_exception serr_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100225
226.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000227sync_lower_32:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000228 lower_sync_exception
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100229
230.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000231irq_lower_32:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000232 lower_exception irq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100233
234.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000235fiq_lower_32:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000236 lower_exception fiq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100237
238.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000239serr_lower_32:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000240 lower_exception serr_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100241
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000242.balign 0x40
243slow_sync_lower:
Fuad Tabbac76466d2019-09-06 10:42:12 +0100244 /* Take the system register path for EC 0x18 */
245 cmp x18, #0x18
246 b.eq handle_system_register_access_s
247
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000248 /* The caller must have saved x18, so we don't save it here. */
249 save_volatile_to_vcpu
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100250
251 /* Read syndrome register and call C handler. */
252 mrs x0, esr_el2
253 bl sync_lower_exception
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100254 cbnz x0, vcpu_switch
255
256 /* vcpu is not changing. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000257 mrs x0, tpidr_el2
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100258 b vcpu_restore_volatile_and_run
259
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000260sync_lower_switch:
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100261 /* We'll have to switch, so save volatile state before doing so. */
262 mrs x18, tpidr_el2
263
264 /* Store zeroes in volatile register storage, except x0. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000265 stp x0, xzr, [x18, #VCPU_REGS + 8 * 0]
266 stp xzr, xzr, [x18, #VCPU_REGS + 8 * 2]
267 stp xzr, xzr, [x18, #VCPU_REGS + 8 * 4]
268 stp xzr, xzr, [x18, #VCPU_REGS + 8 * 6]
269 stp xzr, xzr, [x18, #VCPU_REGS + 8 * 8]
270 stp xzr, xzr, [x18, #VCPU_REGS + 8 * 10]
271 stp xzr, xzr, [x18, #VCPU_REGS + 8 * 12]
272 stp xzr, xzr, [x18, #VCPU_REGS + 8 * 14]
273 stp xzr, xzr, [x18, #VCPU_REGS + 8 * 16]
274 stp x29, x30, [x18, #VCPU_REGS + 8 * 29]
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100275
276 /* x18 was saved on the stack, so we move it to vcpu regs buffer. */
277 ldr x2, [sp], #16
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000278 str x2, [x18, #VCPU_REGS + 8 * 18]
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100279
280 /* Save return address & mode. */
281 mrs x2, elr_el2
282 mrs x3, spsr_el2
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000283 stp x2, x3, [x18, #VCPU_REGS + 8 * 31]
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100284
285 /* Save lazy state, then switch to new vcpu. */
286 mov x0, x1
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100287
288 /* Intentional fallthrough. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100289/**
290 * Switch to a new vcpu.
291 *
292 * All volatile registers from the old vcpu have already been saved. We need
293 * to save only non-volatile ones from the old vcpu, and restore all from the
294 * new one.
295 *
296 * x0 is a pointer to the new vcpu.
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100297 */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100298vcpu_switch:
299 /* Save non-volatile registers. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000300 mrs x1, tpidr_el2
301 stp x19, x20, [x1, #VCPU_REGS + 8 * 19]
302 stp x21, x22, [x1, #VCPU_REGS + 8 * 21]
303 stp x23, x24, [x1, #VCPU_REGS + 8 * 23]
304 stp x25, x26, [x1, #VCPU_REGS + 8 * 25]
305 stp x27, x28, [x1, #VCPU_REGS + 8 * 27]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100306
307 /* Save lazy state. */
Fuad Tabba5e147a92019-08-14 15:30:30 +0100308 /* Use x28 as the base */
309 add x28, x1, #VCPU_LAZY
310
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100311 mrs x24, vmpidr_el2
312 mrs x25, csselr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100313 stp x24, x25, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100314
315 mrs x2, sctlr_el1
316 mrs x3, actlr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100317 stp x2, x3, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100318
319 mrs x4, cpacr_el1
320 mrs x5, ttbr0_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100321 stp x4, x5, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100322
323 mrs x6, ttbr1_el1
324 mrs x7, tcr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100325 stp x6, x7, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100326
327 mrs x8, esr_el1
328 mrs x9, afsr0_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100329 stp x8, x9, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100330
331 mrs x10, afsr1_el1
332 mrs x11, far_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100333 stp x10, x11, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100334
335 mrs x12, mair_el1
336 mrs x13, vbar_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100337 stp x12, x13, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100338
339 mrs x14, contextidr_el1
340 mrs x15, tpidr_el0
Fuad Tabba5e147a92019-08-14 15:30:30 +0100341 stp x14, x15, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100342
343 mrs x16, tpidrro_el0
344 mrs x17, tpidr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100345 stp x16, x17, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100346
347 mrs x18, amair_el1
348 mrs x19, cntkctl_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100349 stp x18, x19, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100350
351 mrs x20, sp_el0
352 mrs x21, sp_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100353 stp x20, x21, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100354
Andrew Walbranbc82f2d2019-02-21 14:50:29 +0000355 mrs x22, elr_el1
356 mrs x23, spsr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100357 stp x22, x23, [x28], #16
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100358
Andrew Walbranbc82f2d2019-02-21 14:50:29 +0000359 mrs x24, par_el1
360 mrs x25, hcr_el2
Fuad Tabba5e147a92019-08-14 15:30:30 +0100361 stp x24, x25, [x28], #16
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100362
Andrew Walbranbc82f2d2019-02-21 14:50:29 +0000363 mrs x26, cptr_el2
364 mrs x27, cnthctl_el2
Fuad Tabba5e147a92019-08-14 15:30:30 +0100365 stp x26, x27, [x28], #16
Andrew Walbranbc82f2d2019-02-21 14:50:29 +0000366
Fuad Tabba5e147a92019-08-14 15:30:30 +0100367 mrs x4, vttbr_el2
368 mrs x5, mdcr_el2
369 stp x4, x5, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100370
Fuad Tabbac76466d2019-09-06 10:42:12 +0100371 mrs x6, mdscr_el1
372 str x6, [x28], #16
373
Andrew Walbranb208b4a2019-05-20 12:42:22 +0100374 /* Save GIC registers. */
375#if GIC_VERSION == 3 || GIC_VERSION == 4
376 /* Offset is too large, so start from a new base. */
377 add x2, x1, #VCPU_GIC
378
379 mrs x3, ich_hcr_el2
Andrew Walbran4b976f42019-06-05 15:00:50 +0100380 mrs x4, icc_sre_el2
381 stp x3, x4, [x2, #16 * 0]
Andrew Walbranb208b4a2019-05-20 12:42:22 +0100382#endif
383
Fuad Tabba5e147a92019-08-14 15:30:30 +0100384 /* Save floating point registers. */
385 /* Use x28 as the base. */
386 add x28, x1, #VCPU_FREGS
387 stp q0, q1, [x28], #32
388 stp q2, q3, [x28], #32
389 stp q4, q5, [x28], #32
390 stp q6, q7, [x28], #32
391 stp q8, q9, [x28], #32
392 stp q10, q11, [x28], #32
393 stp q12, q13, [x28], #32
394 stp q14, q15, [x28], #32
395 stp q16, q17, [x28], #32
396 stp q18, q19, [x28], #32
397 stp q20, q21, [x28], #32
398 stp q22, q23, [x28], #32
399 stp q24, q25, [x28], #32
400 stp q26, q27, [x28], #32
401 stp q28, q29, [x28], #32
402 stp q30, q31, [x28], #32
Conrad Groblera824af62019-03-22 17:33:23 +0000403 mrs x3, fpsr
404 mrs x4, fpcr
Fuad Tabba5e147a92019-08-14 15:30:30 +0100405 stp x3, x4, [x28], #32
Conrad Groblera824af62019-03-22 17:33:23 +0000406
Wedson Almeida Filho03306112018-11-26 00:08:03 +0000407 /* Save new vcpu pointer in non-volatile register. */
408 mov x19, x0
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100409
Andrew Walbran1f8d4872018-12-20 11:21:32 +0000410 /*
411 * Save peripheral registers, and inform the arch-independent sections
412 * that registers have been saved.
413 */
Wedson Almeida Filho03306112018-11-26 00:08:03 +0000414 mov x0, x1
Andrew Walbran1f8d4872018-12-20 11:21:32 +0000415 bl complete_saving_state
Wedson Almeida Filho03306112018-11-26 00:08:03 +0000416 mov x0, x19
417
418 /* Intentional fallthrough. */
Andrew Walbran375f4532019-07-09 16:54:37 +0100419.global vcpu_restore_all_and_run
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100420vcpu_restore_all_and_run:
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100421 /* Update pointer to current vcpu. */
Wedson Almeida Filho00df6c72018-10-18 11:19:24 +0100422 msr tpidr_el2, x0
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100423
Andrew Walbran1f8d4872018-12-20 11:21:32 +0000424 /* Restore peripheral registers. */
425 mov x19, x0
426 bl begin_restoring_state
427 mov x0, x19
428
Conrad Groblera824af62019-03-22 17:33:23 +0000429 /*
430 * Restore floating point registers.
431 *
432 * Offset is too large, so start from a new base.
433 */
434 add x2, x0, #VCPU_FREGS
435 ldp q0, q1, [x2, #32 * 0]
436 ldp q2, q3, [x2, #32 * 1]
437 ldp q4, q5, [x2, #32 * 2]
438 ldp q6, q7, [x2, #32 * 3]
439 ldp q8, q9, [x2, #32 * 4]
440 ldp q10, q11, [x2, #32 * 5]
441 ldp q12, q13, [x2, #32 * 6]
442 ldp q14, q15, [x2, #32 * 7]
443 ldp q16, q17, [x2, #32 * 8]
444 ldp q18, q19, [x2, #32 * 9]
445 ldp q20, q21, [x2, #32 * 10]
446 ldp q22, q23, [x2, #32 * 11]
447 ldp q24, q25, [x2, #32 * 12]
448 ldp q26, q27, [x2, #32 * 13]
449 ldp q28, q29, [x2, #32 * 14]
Andrew Walbranb208b4a2019-05-20 12:42:22 +0100450 /* Offset becomes too large, so move the base. */
Conrad Groblera824af62019-03-22 17:33:23 +0000451 ldp q30, q31, [x2, #32 * 15]!
452 ldp x3, x4, [x2, #32 * 1]
453 msr fpsr, x3
Conrad Groblera824af62019-03-22 17:33:23 +0000454
Conrad Grobler02ff6af2019-06-04 09:40:28 +0100455 /*
456 * Only restore FPCR if changed, to avoid expensive
457 * self-synchronising operation where possible.
458 */
459 mrs x5, fpcr
460 cmp x5, x4
461 b.eq vcpu_restore_lazy_and_run
462 msr fpcr, x4
463 /* Intentional fallthrough. */
464
465vcpu_restore_lazy_and_run:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000466 /* Restore lazy registers. */
Fuad Tabba5e147a92019-08-14 15:30:30 +0100467 /* Use x28 as the base. */
468 add x28, x0, #VCPU_LAZY
469
470 ldp x24, x25, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100471 msr vmpidr_el2, x24
472 msr csselr_el1, x25
473
Fuad Tabba5e147a92019-08-14 15:30:30 +0100474 ldp x2, x3, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100475 msr sctlr_el1, x2
476 msr actlr_el1, x3
477
Fuad Tabba5e147a92019-08-14 15:30:30 +0100478 ldp x4, x5, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100479 msr cpacr_el1, x4
480 msr ttbr0_el1, x5
481
Fuad Tabba5e147a92019-08-14 15:30:30 +0100482 ldp x6, x7, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100483 msr ttbr1_el1, x6
484 msr tcr_el1, x7
485
Fuad Tabba5e147a92019-08-14 15:30:30 +0100486 ldp x8, x9, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100487 msr esr_el1, x8
488 msr afsr0_el1, x9
489
Fuad Tabba5e147a92019-08-14 15:30:30 +0100490 ldp x10, x11, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100491 msr afsr1_el1, x10
492 msr far_el1, x11
493
Fuad Tabba5e147a92019-08-14 15:30:30 +0100494 ldp x12, x13, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100495 msr mair_el1, x12
496 msr vbar_el1, x13
497
Fuad Tabba5e147a92019-08-14 15:30:30 +0100498 ldp x14, x15, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100499 msr contextidr_el1, x14
500 msr tpidr_el0, x15
501
Fuad Tabba5e147a92019-08-14 15:30:30 +0100502 ldp x16, x17, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100503 msr tpidrro_el0, x16
504 msr tpidr_el1, x17
505
Fuad Tabba5e147a92019-08-14 15:30:30 +0100506 ldp x18, x19, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100507 msr amair_el1, x18
508 msr cntkctl_el1, x19
509
Fuad Tabba5e147a92019-08-14 15:30:30 +0100510 ldp x20, x21, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100511 msr sp_el0, x20
512 msr sp_el1, x21
513
Fuad Tabba5e147a92019-08-14 15:30:30 +0100514 ldp x22, x23, [x28], #16
Andrew Walbranbc82f2d2019-02-21 14:50:29 +0000515 msr elr_el1, x22
516 msr spsr_el1, x23
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100517
Fuad Tabba5e147a92019-08-14 15:30:30 +0100518 ldp x24, x25, [x28], #16
Andrew Walbranbc82f2d2019-02-21 14:50:29 +0000519 msr par_el1, x24
520 msr hcr_el2, x25
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100521
Fuad Tabba5e147a92019-08-14 15:30:30 +0100522 ldp x26, x27, [x28], #16
Andrew Walbranbc82f2d2019-02-21 14:50:29 +0000523 msr cptr_el2, x26
524 msr cnthctl_el2, x27
525
Fuad Tabba5e147a92019-08-14 15:30:30 +0100526 ldp x4, x5, [x28], #16
527 msr vttbr_el2, x4
528 msr mdcr_el2, x5
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100529
Fuad Tabbac76466d2019-09-06 10:42:12 +0100530 ldr x6, [x28], #16
531 msr mdscr_el1, x6
532
Andrew Walbranb208b4a2019-05-20 12:42:22 +0100533 /* Restore GIC registers. */
534#if GIC_VERSION == 3 || GIC_VERSION == 4
535 /* Offset is too large, so start from a new base. */
536 add x2, x0, #VCPU_GIC
537
Andrew Walbran4b976f42019-06-05 15:00:50 +0100538 ldp x3, x4, [x2, #16 * 0]
Andrew Walbranb208b4a2019-05-20 12:42:22 +0100539 msr ich_hcr_el2, x3
Andrew Walbran4b976f42019-06-05 15:00:50 +0100540 msr icc_sre_el2, x4
Andrew Walbranb208b4a2019-05-20 12:42:22 +0100541#endif
542
Andrew Walbran1f32e722019-06-07 17:57:26 +0100543 /*
544 * If a different vCPU is being run on this physical CPU to the last one
545 * which was run for this VM, invalidate the TLB. This must be called
546 * after vttbr_el2 has been updated, so that we have the page table and
547 * VMID of the vCPU to which we are switching.
548 */
549 mov x19, x0
550 bl maybe_invalidate_tlb
551 mov x0, x19
552
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100553 /* Restore non-volatile registers. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000554 ldp x19, x20, [x0, #VCPU_REGS + 8 * 19]
555 ldp x21, x22, [x0, #VCPU_REGS + 8 * 21]
556 ldp x23, x24, [x0, #VCPU_REGS + 8 * 23]
557 ldp x25, x26, [x0, #VCPU_REGS + 8 * 25]
558 ldp x27, x28, [x0, #VCPU_REGS + 8 * 27]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100559
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100560 /* Intentional fallthrough. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100561/**
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100562 * Restore volatile registers and run the given vcpu.
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100563 *
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000564 * x0 is a pointer to the target vcpu.
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100565 */
566vcpu_restore_volatile_and_run:
Fuad Tabbac76466d2019-09-06 10:42:12 +0100567 restore_volatile_from_vcpu x0
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000568 eret
569
570.balign 0x40
571/**
Fuad Tabbac76466d2019-09-06 10:42:12 +0100572 * Restore volatile registers from stack and return to original caller.
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000573 */
574restore_from_stack_and_return:
Andrew Walbranc55365d2018-12-06 15:45:11 +0000575 restore_volatile_from_stack el2
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100576 eret
Fuad Tabbac76466d2019-09-06 10:42:12 +0100577
578.balign 0x40
579/**
580 * Handle accesses to system registers (EC=0x18) and return to original caller.
581 */
582handle_system_register_access_s:
583 /*
584 * All registers are (conservatively) saved because the handler can
585 * clobber non-volatile registers that are used by the msr/mrs, which
586 * results in the wrong value being read or written.
587 */
588 save_registers_to_vcpu
589
590 /* Read syndrome register and call C handler. */
591 mrs x0, esr_el2
592 bl handle_system_register_access
593 cbnz x0, vcpu_switch
594
595 /* vcpu is not changing. */
596 mrs x0, tpidr_el2
597 restore_registers_from_vcpu x0
598 eret