blob: da58560b768ccdccc5f9db4bbabbc21757cc8f10 [file] [log] [blame]
Wedson Almeida Filho22c973a2018-10-27 16:25:42 +01001/*
2 * Copyright 2018 Google LLC
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * https://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010017#include "offsets.h"
18
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000019/**
20 * Saves the volatile registers onto the stack. This currently takes 14
21 * instructions, so it can be used in exception handlers with 18 instructions
22 * left, 2 of which in the same cache line (assuming a 16-byte cache line).
23 *
24 * On return, x0 and x1 are initialised to elr_el2 and spsr_el2 respectively,
25 * which can be used as the first and second arguments of a subsequent call.
26 */
27.macro save_volatile_to_stack
28 /* Reserve stack space and save registers x0-x18, x29 & x30. */
29 stp x0, x1, [sp, #-(8 * 24)]!
30 stp x2, x3, [sp, #8 * 2]
31 stp x4, x5, [sp, #8 * 4]
32 stp x6, x7, [sp, #8 * 6]
33 stp x8, x9, [sp, #8 * 8]
34 stp x10, x11, [sp, #8 * 10]
35 stp x12, x13, [sp, #8 * 12]
36 stp x14, x15, [sp, #8 * 14]
37 stp x16, x17, [sp, #8 * 16]
38 str x18, [sp, #8 * 18]
39 stp x29, x30, [sp, #8 * 20]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010040
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000041 /*
42 * Save elr_el2 & spsr_el2. This such that we can take nested exception
43 * and still be able to unwind.
44 */
45 mrs x0, elr_el2
46 mrs x1, spsr_el2
47 stp x0, x1, [sp, #8 * 22]
48.endm
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010049
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000050/**
51 * Restores the volatile registers from the stack. This currently takes 14
52 * instructions, so it can be used in exception handlers while still leaving 18
53 * instructions left; if paired with save_volatile_to_stack, there are 4
54 * instructions to spare.
55 */
56.macro restore_volatile_from_stack
57 /* Restore registers x2-x18, x29 & x30. */
58 ldp x2, x3, [sp, #8 * 2]
59 ldp x4, x5, [sp, #8 * 4]
60 ldp x6, x7, [sp, #8 * 6]
61 ldp x8, x9, [sp, #8 * 8]
62 ldp x10, x11, [sp, #8 * 10]
63 ldp x12, x13, [sp, #8 * 12]
64 ldp x14, x15, [sp, #8 * 14]
65 ldp x16, x17, [sp, #8 * 16]
66 ldr x18, [sp, #8 * 18]
67 ldp x29, x30, [sp, #8 * 20]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010068
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000069 /* Restore registers elr_el2 & spsr_el2, using x0 & x1 as scratch. */
70 ldp x0, x1, [sp, #8 * 22]
71 msr elr_el2, x0
72 msr spsr_el2, x1
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010073
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000074 /* Restore x0 & x1, and release stack space. */
75 ldp x0, x1, [sp], #8 * 24
76.endm
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010077
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000078/**
79 * This is a generic handler for exceptions taken at the current EL while using
80 * SP0. It behaves similarly to the SPx case by first switching to SPx, doing
81 * the work, then switching back to SP0 before returning.
82 *
83 * Switching to SPx and calling the C handler takes 16 instructions, so it's not
84 * possible to add a branch to a common exit path without going into the next
85 * cache line (assuming 16-byte cache lines). Additionally, to restore and
86 * return we need an additional 16 instructions, so we implement the whole
87 * handler within the allotted 32 instructions.
88 */
89.macro current_exception_sp0 handler:req
90 msr spsel, #1
91 save_volatile_to_stack
92 bl \handler
93 restore_volatile_from_stack
94 msr spsel, #0
95 eret
96.endm
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010097
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000098/**
99 * This is a generic handler for exceptions taken at the current EL while using
100 * SPx. It saves volatile registers, calls the C handler, restores volatile
101 * registers, then returns.
102 *
103 * Saving state and jumping to C handler takes 15 instructions. We add an extra
104 * branch to a common exit path. So each handler takes up one unique cache line
105 * and one shared cache line (assuming 16-byte cache lines).
106 */
107.macro current_exception_spx handler:req
108 save_volatile_to_stack
109 bl \handler
110 b restore_from_stack_and_return
111.endm
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100112
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000113/**
114 * Saves the volatile registers into the register buffer of the current vcpu. It
115 * allocates space on the stack for x18 and saves it if "also_save_x18" is
116 * specified; otherwise the caller is expected to have saved x18 in a similar
117 * fashion.
118 */
119.macro save_volatile_to_vcpu also_save_x18
120.ifnb \also_save_x18
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +0100121 /*
122 * Save x18 since we're about to clobber it. We subtract 16 instead of
123 * 8 from the stack pointer to keep it 16-byte aligned.
124 */
125 str x18, [sp, #-16]!
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000126.endif
127 /* Get the current vcpu. */
128 mrs x18, tpidr_el2
129 stp x0, x1, [x18, #VCPU_REGS + 8 * 0]
130 stp x2, x3, [x18, #VCPU_REGS + 8 * 2]
131 stp x4, x5, [x18, #VCPU_REGS + 8 * 4]
132 stp x6, x7, [x18, #VCPU_REGS + 8 * 6]
133 stp x8, x9, [x18, #VCPU_REGS + 8 * 8]
134 stp x10, x11, [x18, #VCPU_REGS + 8 * 10]
135 stp x12, x13, [x18, #VCPU_REGS + 8 * 12]
136 stp x14, x15, [x18, #VCPU_REGS + 8 * 14]
137 stp x16, x17, [x18, #VCPU_REGS + 8 * 16]
138 stp x29, x30, [x18, #VCPU_REGS + 8 * 29]
139
140 /* x18 was saved on the stack, so we move it to vcpu regs buffer. */
141 ldr x0, [sp], #16
142 str x0, [x18, #VCPU_REGS + 8 * 18]
143
144 /* Save return address & mode. */
145 mrs x1, elr_el2
146 mrs x2, spsr_el2
147 stp x1, x2, [x18, #VCPU_REGS + 8 * 31]
148.endm
149
150/**
151 * This is a generic handler for exceptions taken at a lower EL. It saves the
152 * volatile registers to the current vcpu and calls the C handler, which can
153 * select one of two paths: (a) restore volatile registers and return, or
154 * (b) switch to a different vcpu. In the latter case, the handler needs to save
155 * all non-volatile registers (they haven't been saved yet), then restore all
156 * registers from the new vcpu.
157 */
158.macro lower_exception handler:req
159 save_volatile_to_vcpu also_save_x18
160
161 /* Call C handler. */
162 bl \handler
163
164 /* Switch vcpu if requested by handler. */
165 cbnz x0, vcpu_switch
166
167 /* vcpu is not changing. */
168 mrs x0, tpidr_el2
169 b vcpu_restore_volatile_and_run
170.endm
171
172/**
173 * This is the handler for a sync exception taken at a lower EL. If the reason
174 * for the exception is an HVC call, it calls the faster hvc_handler without
175 * saving a lot of the registers, otherwise it goes to slow_sync_lower, which is
176 * the slow path where all registers needs to be saved/restored.
177 */
178.macro lower_sync_exception
179 /* Save x18 as save_volatile_to_vcpu would have. */
180 str x18, [sp, #-16]!
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100181
182 /* Extract the exception class (EC) from exception syndrome register. */
183 mrs x18, esr_el2
184 lsr x18, x18, #26
185
186 /* Take the slow path if exception is not due to an HVC instruction. */
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100187 sub x18, x18, #0x16
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000188 cbnz x18, slow_sync_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100189
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100190 /*
191 * Save x29 and x30, which are not saved by the callee, then jump to
192 * HVC handler.
193 */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100194 stp x29, x30, [sp, #-16]!
195 bl hvc_handler
196 ldp x29, x30, [sp], #16
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000197 cbnz x1, sync_lower_switch
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100198
199 /* Zero out all volatile registers (except x0) and return. */
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100200 stp xzr, xzr, [sp, #-16]!
201 ldp x1, x2, [sp]
202 ldp x3, x4, [sp]
203 ldp x5, x6, [sp]
204 ldp x7, x8, [sp]
205 ldp x9, x10, [sp]
206 ldp x11, x12, [sp]
207 ldp x13, x14, [sp]
208 ldp x15, x16, [sp], #16
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +0100209 mov x17, xzr
Wedson Almeida Filho450ccb82018-08-12 16:25:36 +0100210
211 /* Restore x18, which was saved on the stack. */
212 ldr x18, [sp], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100213 eret
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000214.endm
215
216/**
217 * The following is the exception table. A pointer to it will be stored in
218 * register vbar_el2.
219 */
220.section .text.vector_table_el2, "ax"
221.global vector_table_el2
222.balign 0x800
223vector_table_el2:
224sync_cur_sp0:
225 current_exception_sp0 sync_current_exception
226
227.balign 0x80
228irq_cur_sp0:
229 current_exception_sp0 irq_current_exception
230
231.balign 0x80
232fiq_cur_sp0:
233 current_exception_sp0 fiq_current_exception
234
235.balign 0x80
236serr_cur_sp0:
237 current_exception_sp0 serr_current_exception
238
239.balign 0x80
240sync_cur_spx:
241 current_exception_spx sync_current_exception
242
243.balign 0x80
244irq_cur_spx:
245 current_exception_spx irq_current_exception
246
247.balign 0x80
248fiq_cur_spx:
249 current_exception_spx fiq_current_exception
250
251.balign 0x80
252serr_cur_spx:
253 current_exception_spx serr_current_exception
254
255.balign 0x80
256sync_lower_64:
257 lower_sync_exception
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100258
259.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000260irq_lower_64:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000261 lower_exception irq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100262
263.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000264fiq_lower_64:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000265 lower_exception fiq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100266
267.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000268serr_lower_64:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000269 lower_exception serr_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100270
271.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000272sync_lower_32:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000273 lower_sync_exception
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100274
275.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000276irq_lower_32:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000277 lower_exception irq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100278
279.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000280fiq_lower_32:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000281 lower_exception fiq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100282
283.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000284serr_lower_32:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000285 lower_exception serr_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100286
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000287.balign 0x40
288slow_sync_lower:
289 /* The caller must have saved x18, so we don't save it here. */
290 save_volatile_to_vcpu
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100291
292 /* Read syndrome register and call C handler. */
293 mrs x0, esr_el2
294 bl sync_lower_exception
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100295 cbnz x0, vcpu_switch
296
297 /* vcpu is not changing. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000298 mrs x0, tpidr_el2
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100299 b vcpu_restore_volatile_and_run
300
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000301sync_lower_switch:
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100302 /* We'll have to switch, so save volatile state before doing so. */
303 mrs x18, tpidr_el2
304
305 /* Store zeroes in volatile register storage, except x0. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000306 stp x0, xzr, [x18, #VCPU_REGS + 8 * 0]
307 stp xzr, xzr, [x18, #VCPU_REGS + 8 * 2]
308 stp xzr, xzr, [x18, #VCPU_REGS + 8 * 4]
309 stp xzr, xzr, [x18, #VCPU_REGS + 8 * 6]
310 stp xzr, xzr, [x18, #VCPU_REGS + 8 * 8]
311 stp xzr, xzr, [x18, #VCPU_REGS + 8 * 10]
312 stp xzr, xzr, [x18, #VCPU_REGS + 8 * 12]
313 stp xzr, xzr, [x18, #VCPU_REGS + 8 * 14]
314 stp xzr, xzr, [x18, #VCPU_REGS + 8 * 16]
315 stp x29, x30, [x18, #VCPU_REGS + 8 * 29]
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100316
317 /* x18 was saved on the stack, so we move it to vcpu regs buffer. */
318 ldr x2, [sp], #16
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000319 str x2, [x18, #VCPU_REGS + 8 * 18]
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100320
321 /* Save return address & mode. */
322 mrs x2, elr_el2
323 mrs x3, spsr_el2
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000324 stp x2, x3, [x18, #VCPU_REGS + 8 * 31]
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100325
326 /* Save lazy state, then switch to new vcpu. */
327 mov x0, x1
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100328
329 /* Intentional fallthrough. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100330/**
331 * Switch to a new vcpu.
332 *
333 * All volatile registers from the old vcpu have already been saved. We need
334 * to save only non-volatile ones from the old vcpu, and restore all from the
335 * new one.
336 *
337 * x0 is a pointer to the new vcpu.
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100338 */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100339vcpu_switch:
340 /* Save non-volatile registers. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000341 mrs x1, tpidr_el2
342 stp x19, x20, [x1, #VCPU_REGS + 8 * 19]
343 stp x21, x22, [x1, #VCPU_REGS + 8 * 21]
344 stp x23, x24, [x1, #VCPU_REGS + 8 * 23]
345 stp x25, x26, [x1, #VCPU_REGS + 8 * 25]
346 stp x27, x28, [x1, #VCPU_REGS + 8 * 27]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100347
348 /* Save lazy state. */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100349 mrs x24, vmpidr_el2
350 mrs x25, csselr_el1
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000351 stp x24, x25, [x1, #VCPU_LAZY + 16 * 0]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100352
353 mrs x2, sctlr_el1
354 mrs x3, actlr_el1
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000355 stp x2, x3, [x1, #VCPU_LAZY + 16 * 1]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100356
357 mrs x4, cpacr_el1
358 mrs x5, ttbr0_el1
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000359 stp x4, x5, [x1, #VCPU_LAZY + 16 * 2]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100360
361 mrs x6, ttbr1_el1
362 mrs x7, tcr_el1
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000363 stp x6, x7, [x1, #VCPU_LAZY + 16 * 3]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100364
365 mrs x8, esr_el1
366 mrs x9, afsr0_el1
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000367 stp x8, x9, [x1, #VCPU_LAZY + 16 * 4]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100368
369 mrs x10, afsr1_el1
370 mrs x11, far_el1
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000371 stp x10, x11, [x1, #VCPU_LAZY + 16 * 5]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100372
373 mrs x12, mair_el1
374 mrs x13, vbar_el1
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000375 stp x12, x13, [x1, #VCPU_LAZY + 16 * 6]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100376
377 mrs x14, contextidr_el1
378 mrs x15, tpidr_el0
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000379 stp x14, x15, [x1, #VCPU_LAZY + 16 * 7]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100380
381 mrs x16, tpidrro_el0
382 mrs x17, tpidr_el1
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000383 stp x16, x17, [x1, #VCPU_LAZY + 16 * 8]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100384
385 mrs x18, amair_el1
386 mrs x19, cntkctl_el1
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000387 stp x18, x19, [x1, #VCPU_LAZY + 16 * 9]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100388
389 mrs x20, sp_el0
390 mrs x21, sp_el1
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000391 stp x20, x21, [x1, #VCPU_LAZY + 16 * 10]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100392
393 mrs x22, par_el1
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100394 mrs x23, hcr_el2
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000395 stp x22, x23, [x1, #VCPU_LAZY + 16 * 11]
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100396
397 mrs x24, cptr_el2
398 mrs x25, cnthctl_el2
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000399 stp x24, x25, [x1, #VCPU_LAZY + 16 * 12]
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100400
401 mrs x26, vttbr_el2
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000402 str x26, [x1, #VCPU_LAZY + 16 * 13]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100403
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100404 /* Intentional fallthrough. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100405
406.globl vcpu_restore_all_and_run
407vcpu_restore_all_and_run:
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100408 /* Update pointer to current vcpu. */
Wedson Almeida Filho00df6c72018-10-18 11:19:24 +0100409 msr tpidr_el2, x0
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100410
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000411 /* Restore lazy registers. */
412 ldp x24, x25, [x0, #VCPU_LAZY + 16 * 0]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100413 msr vmpidr_el2, x24
414 msr csselr_el1, x25
415
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000416 ldp x2, x3, [x0, #VCPU_LAZY + 16 * 1]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100417 msr sctlr_el1, x2
418 msr actlr_el1, x3
419
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000420 ldp x4, x5, [x0, #VCPU_LAZY + 16 * 2]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100421 msr cpacr_el1, x4
422 msr ttbr0_el1, x5
423
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000424 ldp x6, x7, [x0, #VCPU_LAZY + 16 * 3]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100425 msr ttbr1_el1, x6
426 msr tcr_el1, x7
427
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000428 ldp x8, x9, [x0, #VCPU_LAZY + 16 * 4]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100429 msr esr_el1, x8
430 msr afsr0_el1, x9
431
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000432 ldp x10, x11, [x0, #VCPU_LAZY + 16 * 5]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100433 msr afsr1_el1, x10
434 msr far_el1, x11
435
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000436 ldp x12, x13, [x0, #VCPU_LAZY + 16 * 6]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100437 msr mair_el1, x12
438 msr vbar_el1, x13
439
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000440 ldp x14, x15, [x0, #VCPU_LAZY + 16 * 7]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100441 msr contextidr_el1, x14
442 msr tpidr_el0, x15
443
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000444 ldp x16, x17, [x0, #VCPU_LAZY + 16 * 8]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100445 msr tpidrro_el0, x16
446 msr tpidr_el1, x17
447
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000448 ldp x18, x19, [x0, #VCPU_LAZY + 16 * 9]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100449 msr amair_el1, x18
450 msr cntkctl_el1, x19
451
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000452 ldp x20, x21, [x0, #VCPU_LAZY + 16 * 10]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100453 msr sp_el0, x20
454 msr sp_el1, x21
455
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000456 ldp x22, x23, [x0, #VCPU_LAZY + 16 * 11]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100457 msr par_el1, x22
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100458 msr hcr_el2, x23
459
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000460 ldp x24, x25, [x0, #VCPU_LAZY + 16 * 12]
Andrew Walbran570f9b72018-11-13 17:51:50 +0000461 msr cptr_el2, x24
462 msr cnthctl_el2, x25
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100463
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000464 ldr x26, [x0, #VCPU_LAZY + 16 * 13]
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100465 msr vttbr_el2, x26
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100466
467 /* Restore non-volatile registers. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000468 ldp x19, x20, [x0, #VCPU_REGS + 8 * 19]
469 ldp x21, x22, [x0, #VCPU_REGS + 8 * 21]
470 ldp x23, x24, [x0, #VCPU_REGS + 8 * 23]
471 ldp x25, x26, [x0, #VCPU_REGS + 8 * 25]
472 ldp x27, x28, [x0, #VCPU_REGS + 8 * 27]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100473
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100474 /* Intentional fallthrough. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100475/**
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100476 * Restore volatile registers and run the given vcpu.
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100477 *
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000478 * x0 is a pointer to the target vcpu.
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100479 */
480vcpu_restore_volatile_and_run:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000481 ldp x4, x5, [x0, #VCPU_REGS + 8 * 4]
482 ldp x6, x7, [x0, #VCPU_REGS + 8 * 6]
483 ldp x8, x9, [x0, #VCPU_REGS + 8 * 8]
484 ldp x10, x11, [x0, #VCPU_REGS + 8 * 10]
485 ldp x12, x13, [x0, #VCPU_REGS + 8 * 12]
486 ldp x14, x15, [x0, #VCPU_REGS + 8 * 14]
487 ldp x16, x17, [x0, #VCPU_REGS + 8 * 16]
488 ldr x18, [x0, #VCPU_REGS + 8 * 18]
489 ldp x29, x30, [x0, #VCPU_REGS + 8 * 29]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100490
491 /* Restore return address & mode. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000492 ldp x1, x2, [x0, #VCPU_REGS + 8 * 31]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100493 msr elr_el2, x1
494 msr spsr_el2, x2
495
496 /* Restore x0..x3, which we have used as scratch before. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000497 ldp x2, x3, [x0, #VCPU_REGS + 8 * 2]
498 ldp x0, x1, [x0, #VCPU_REGS + 8 * 0]
499 eret
500
501.balign 0x40
502/**
503 * Restores volatile registers from stack and returns.
504 */
505restore_from_stack_and_return:
506 restore_volatile_from_stack
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100507 eret