blob: 9a39031c93bc868372f3fd89e2a4cfc59888224f [file] [log] [blame]
Wedson Almeida Filho22c973a2018-10-27 16:25:42 +01001/*
Andrew Walbran692b3252019-03-07 15:51:31 +00002 * Copyright 2018 The Hafnium Authors.
Wedson Almeida Filho22c973a2018-10-27 16:25:42 +01003 *
Andrew Walbrane959ec12020-06-17 15:01:09 +01004 * Use of this source code is governed by a BSD-style
5 * license that can be found in the LICENSE file or at
6 * https://opensource.org/licenses/BSD-3-Clause.
Wedson Almeida Filho22c973a2018-10-27 16:25:42 +01007 */
8
David Brazdil863b1502019-10-24 13:55:50 +01009#include "hf/arch/offsets.h"
Olivier Deprez3caed1c2021-02-05 12:07:36 +010010
11#include "hf/arch/vmid_base.h"
12
Jose Marinhoab1081d2019-10-18 11:39:01 +010013#include "msr.h"
Andrew Walbranc55365d2018-12-06 15:45:11 +000014#include "exception_macros.S"
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010015
Max Shvetsov2ff5b572021-03-22 12:03:38 +000016
17/**
18 * PE feature information about SVE implementation in AArch64 state.
19 */
20#define ID_AA64PFR0_SVE_SHIFT (32)
21#define ID_AA64PFR0_SVE_LENGTH (4)
22
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000023/**
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000024 * Saves the volatile registers into the register buffer of the current vCPU.
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000025 */
Andrew Walbran59182d52019-09-23 17:55:39 +010026.macro save_volatile_to_vcpu
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +010027 /*
28 * Save x18 since we're about to clobber it. We subtract 16 instead of
29 * 8 from the stack pointer to keep it 16-byte aligned.
30 */
31 str x18, [sp, #-16]!
Andrew Walbran59182d52019-09-23 17:55:39 +010032
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000033 /* Get the current vCPU. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000034 mrs x18, tpidr_el2
35 stp x0, x1, [x18, #VCPU_REGS + 8 * 0]
36 stp x2, x3, [x18, #VCPU_REGS + 8 * 2]
37 stp x4, x5, [x18, #VCPU_REGS + 8 * 4]
38 stp x6, x7, [x18, #VCPU_REGS + 8 * 6]
39 stp x8, x9, [x18, #VCPU_REGS + 8 * 8]
40 stp x10, x11, [x18, #VCPU_REGS + 8 * 10]
41 stp x12, x13, [x18, #VCPU_REGS + 8 * 12]
42 stp x14, x15, [x18, #VCPU_REGS + 8 * 14]
43 stp x16, x17, [x18, #VCPU_REGS + 8 * 16]
44 stp x29, x30, [x18, #VCPU_REGS + 8 * 29]
45
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000046 /* x18 was saved on the stack, so we move it to vCPU regs buffer. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000047 ldr x0, [sp], #16
48 str x0, [x18, #VCPU_REGS + 8 * 18]
49
50 /* Save return address & mode. */
51 mrs x1, elr_el2
52 mrs x2, spsr_el2
53 stp x1, x2, [x18, #VCPU_REGS + 8 * 31]
Raghu Krishnamurthy7e925bd2020-12-26 10:14:40 -080054 mrs x1, hcr_el2
55 str x1, [x18, #VCPU_REGS + 8 * 33]
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000056.endm
57
58/**
59 * This is a generic handler for exceptions taken at a lower EL. It saves the
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000060 * volatile registers to the current vCPU and calls the C handler, which can
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000061 * select one of two paths: (a) restore volatile registers and return, or
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000062 * (b) switch to a different vCPU. In the latter case, the handler needs to save
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000063 * all non-volatile registers (they haven't been saved yet), then restore all
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000064 * registers from the new vCPU.
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000065 */
66.macro lower_exception handler:req
Andrew Walbran59182d52019-09-23 17:55:39 +010067 save_volatile_to_vcpu
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000068
Olivier Depreze7d7f322020-12-14 16:01:03 +010069#if BRANCH_PROTECTION
70 /* NOTE: x18 still holds pointer to current vCPU. */
71 bl pauth_save_vcpu_and_restore_hyp_key
72#endif
73
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000074 /* Call C handler. */
75 bl \handler
76
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000077 /* Switch vCPU if requested by handler. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000078 cbnz x0, vcpu_switch
79
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000080 /* vCPU is not changing. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000081 mrs x0, tpidr_el2
82 b vcpu_restore_volatile_and_run
83.endm
84
85/**
Andrew Walbran59182d52019-09-23 17:55:39 +010086 * This is the handler for a sync exception taken at a lower EL.
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000087 */
88.macro lower_sync_exception
Andrew Walbran59182d52019-09-23 17:55:39 +010089 save_volatile_to_vcpu
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010090
Olivier Depreze7d7f322020-12-14 16:01:03 +010091#if BRANCH_PROTECTION
92 /* NOTE: x18 still holds pointer to current vCPU. */
93 bl pauth_save_vcpu_and_restore_hyp_key
94#endif
95
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010096 /* Extract the exception class (EC) from exception syndrome register. */
97 mrs x18, esr_el2
98 lsr x18, x18, #26
99
Andrew Walbran59182d52019-09-23 17:55:39 +0100100 /* Take the system register path for EC 0x18. */
101 sub x18, x18, #0x18
102 cbz x18, system_register_access
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100103
Fuad Tabbac3847c72020-08-11 09:32:25 +0100104 /* Call C handler passing the syndrome and fault address registers. */
Andrew Walbran59182d52019-09-23 17:55:39 +0100105 mrs x0, esr_el2
Fuad Tabbac3847c72020-08-11 09:32:25 +0100106 mrs x1, far_el2
Andrew Walbran59182d52019-09-23 17:55:39 +0100107 bl sync_lower_exception
Andrew Walbran3a71c982019-09-12 18:22:11 +0100108
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000109 /* Switch vCPU if requested by handler. */
Andrew Walbran59182d52019-09-23 17:55:39 +0100110 cbnz x0, vcpu_switch
Andrew Walbranfed412e2019-09-02 18:23:16 +0100111
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000112 /* vCPU is not changing. */
Andrew Walbran59182d52019-09-23 17:55:39 +0100113 mrs x0, tpidr_el2
114 b vcpu_restore_volatile_and_run
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000115.endm
116
117/**
118 * The following is the exception table. A pointer to it will be stored in
119 * register vbar_el2.
120 */
121.section .text.vector_table_el2, "ax"
122.global vector_table_el2
123.balign 0x800
124vector_table_el2:
125sync_cur_sp0:
David Brazdil768f69c2019-12-19 15:46:12 +0000126 noreturn_current_exception_sp0 el2 sync_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000127
128.balign 0x80
129irq_cur_sp0:
David Brazdil768f69c2019-12-19 15:46:12 +0000130 noreturn_current_exception_sp0 el2 irq_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000131
132.balign 0x80
133fiq_cur_sp0:
David Brazdil768f69c2019-12-19 15:46:12 +0000134 noreturn_current_exception_sp0 el2 fiq_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000135
136.balign 0x80
137serr_cur_sp0:
David Brazdil768f69c2019-12-19 15:46:12 +0000138 noreturn_current_exception_sp0 el2 serr_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000139
140.balign 0x80
141sync_cur_spx:
David Brazdil768f69c2019-12-19 15:46:12 +0000142 noreturn_current_exception_spx el2 sync_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000143
144.balign 0x80
145irq_cur_spx:
David Brazdil768f69c2019-12-19 15:46:12 +0000146 noreturn_current_exception_spx el2 irq_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000147
148.balign 0x80
149fiq_cur_spx:
David Brazdil768f69c2019-12-19 15:46:12 +0000150 noreturn_current_exception_spx el2 fiq_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000151
152.balign 0x80
153serr_cur_spx:
David Brazdil768f69c2019-12-19 15:46:12 +0000154 noreturn_current_exception_spx el2 serr_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000155
156.balign 0x80
157sync_lower_64:
158 lower_sync_exception
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100159
160.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000161irq_lower_64:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000162 lower_exception irq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100163
164.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000165fiq_lower_64:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000166 lower_exception fiq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100167
168.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000169serr_lower_64:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000170 lower_exception serr_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100171
172.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000173sync_lower_32:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000174 lower_sync_exception
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100175
176.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000177irq_lower_32:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000178 lower_exception irq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100179
180.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000181fiq_lower_32:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000182 lower_exception fiq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100183
184.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000185serr_lower_32:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000186 lower_exception serr_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100187
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000188.balign 0x40
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100189
Fuad Tabba7c299d82019-09-12 13:05:18 +0100190/**
Olivier Depreze7d7f322020-12-14 16:01:03 +0100191 * pauth_save_vcpu_and_restore_hyp_key
192 *
193 * NOTE: expect x18 holds pointer to current vCPU.
194 */
195#if BRANCH_PROTECTION
196pauth_save_vcpu_and_restore_hyp_key:
197 /*
198 * Save APIA key for the vCPU as Hypervisor replaces it with its
199 * own key. Other vCPU PAuth keys are taken care in vcpu_switch.
200 */
201 mrs x0, APIAKEYLO_EL1
202 mrs x1, APIAKEYHI_EL1
203 add x18, x18, #VCPU_PAC
204 stp x0, x1, [x18]
205
206 /* Restore Hypervisor APIA key. */
207 pauth_restore_hypervisor_key x0 x1
208 ret
209#endif
210
211/**
Fuad Tabba7c299d82019-09-12 13:05:18 +0100212 * Handle accesses to system registers (EC=0x18) and return to original caller.
213 */
214system_register_access:
215 /*
216 * Non-volatile registers are (conservatively) saved because the handler
217 * can clobber non-volatile registers that are used by the msr/mrs,
218 * which results in the wrong value being read or written.
219 */
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000220 /* Get the current vCPU. */
Fuad Tabba7c299d82019-09-12 13:05:18 +0100221 mrs x18, tpidr_el2
222 stp x19, x20, [x18, #VCPU_REGS + 8 * 19]
223 stp x21, x22, [x18, #VCPU_REGS + 8 * 21]
224 stp x23, x24, [x18, #VCPU_REGS + 8 * 23]
225 stp x25, x26, [x18, #VCPU_REGS + 8 * 25]
226 stp x27, x28, [x18, #VCPU_REGS + 8 * 27]
227
228 /* Read syndrome register and call C handler. */
229 mrs x0, esr_el2
230 bl handle_system_register_access
Fuad Tabba7c299d82019-09-12 13:05:18 +0100231
Fuad Tabbab86325a2020-01-10 13:38:15 +0000232 /* Continue running the same vCPU. */
Fuad Tabba7c299d82019-09-12 13:05:18 +0100233 mrs x0, tpidr_el2
234 b vcpu_restore_nonvolatile_and_run
235
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100236/**
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000237 * Switch to a new vCPU.
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100238 *
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000239 * All volatile registers from the old vCPU have already been saved. We need
240 * to save only non-volatile ones from the old vCPU, and restore all from the
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100241 * new one.
242 *
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000243 * x0 is a pointer to the new vCPU.
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100244 */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100245vcpu_switch:
246 /* Save non-volatile registers. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000247 mrs x1, tpidr_el2
248 stp x19, x20, [x1, #VCPU_REGS + 8 * 19]
249 stp x21, x22, [x1, #VCPU_REGS + 8 * 21]
250 stp x23, x24, [x1, #VCPU_REGS + 8 * 23]
251 stp x25, x26, [x1, #VCPU_REGS + 8 * 25]
252 stp x27, x28, [x1, #VCPU_REGS + 8 * 27]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100253
254 /* Save lazy state. */
Fuad Tabba5e147a92019-08-14 15:30:30 +0100255 /* Use x28 as the base */
256 add x28, x1, #VCPU_LAZY
257
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800258#if ENABLE_VHE
259 /* Check if VHE support is enabled, equivalent to has_vhe_support(). */
260 mrs x19, id_aa64mmfr1_el1
261 tst x19, #0xf00
262 b.ne vhe_save
263#endif
264
265 mrs x24, sctlr_el1
266 mrs x25, cpacr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100267 stp x24, x25, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100268
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800269 mrs x2, ttbr0_el1
270 mrs x3, ttbr1_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100271 stp x2, x3, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100272
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800273 mrs x4, tcr_el1
274 mrs x5, esr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100275 stp x4, x5, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100276
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800277 mrs x6, afsr0_el1
278 mrs x7, afsr1_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100279 stp x6, x7, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100280
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800281 mrs x8, far_el1
282 mrs x9, mair_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100283 stp x8, x9, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100284
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800285 mrs x10, vbar_el1
286 mrs x11, contextidr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100287 stp x10, x11, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100288
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800289 mrs x12, amair_el1
290 mrs x13, cntkctl_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100291 stp x12, x13, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100292
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800293 mrs x14, elr_el1
294 mrs x15, spsr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100295 stp x14, x15, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100296
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800297#if ENABLE_VHE
298 b skip_vhe_save
299
300vhe_save:
301 mrs x24, MSR_SCTLR_EL12
302 mrs x25, MSR_CPACR_EL12
303 stp x24, x25, [x28], #16
304
305 mrs x2, MSR_TTBR0_EL12
306 mrs x3, MSR_TTBR1_EL12
307 stp x2, x3, [x28], #16
308
309 mrs x4, MSR_TCR_EL12
310 mrs x5, MSR_ESR_EL12
311 stp x4, x5, [x28], #16
312
313 mrs x6, MSR_AFSR0_EL12
314 mrs x7, MSR_AFSR1_EL12
315 stp x6, x7, [x28], #16
316
317 mrs x8, MSR_FAR_EL12
318 mrs x9, MSR_MAIR_EL12
319 stp x8, x9, [x28], #16
320
321 mrs x10, MSR_VBAR_EL12
322 mrs x11, MSR_CONTEXTIDR_EL12
323 stp x10, x11, [x28], #16
324
325 mrs x12, MSR_AMAIR_EL12
326 mrs x13, MSR_CNTKCTL_EL12
327 stp x12, x13, [x28], #16
328
329 mrs x14, MSR_ELR_EL12
330 mrs x15, MSR_SPSR_EL12
331 stp x14, x15, [x28], #16
332
333skip_vhe_save:
334#endif
335 mrs x16, vmpidr_el2
336 mrs x17, csselr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100337 stp x16, x17, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100338
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800339 mrs x18, actlr_el1
340 mrs x19, tpidr_el0
Fuad Tabba5e147a92019-08-14 15:30:30 +0100341 stp x18, x19, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100342
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800343 mrs x20, tpidrro_el0
344 mrs x21, tpidr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100345 stp x20, x21, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100346
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800347 mrs x22, sp_el0
348 mrs x23, sp_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100349 stp x22, x23, [x28], #16
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100350
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000351 mrs x26, cnthctl_el2
352 mrs x27, vttbr_el2
Fuad Tabba5e147a92019-08-14 15:30:30 +0100353 stp x26, x27, [x28], #16
Andrew Walbranbc82f2d2019-02-21 14:50:29 +0000354
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000355 mrs x4, mdcr_el2
356 mrs x5, mdscr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100357 stp x4, x5, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100358
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000359 mrs x6, pmccfiltr_el0
360 mrs x7, pmcr_el0
Fuad Tabbaf1d6dc52019-09-18 17:33:14 +0100361 stp x6, x7, [x28], #16
362
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000363 mrs x8, pmcntenset_el0
364 mrs x9, pmintenset_el1
Fuad Tabbaf1d6dc52019-09-18 17:33:14 +0100365 stp x8, x9, [x28], #16
366
Raghu Krishnamurthy7e925bd2020-12-26 10:14:40 -0800367 mrs x8, par_el1
368 str x8, [x28], #8
369
Olivier Depreze7d7f322020-12-14 16:01:03 +0100370#if BRANCH_PROTECTION
371 add x2, x1, #(VCPU_PAC + 16)
372 mrs x10, APIBKEYLO_EL1
373 mrs x11, APIBKEYHI_EL1
374 stp x10, x11, [x2], #16
375 mrs x12, APDAKEYLO_EL1
376 mrs x13, APDAKEYHI_EL1
377 stp x12, x13, [x2], #16
378 mrs x14, APDBKEYLO_EL1
379 mrs x15, APDBKEYHI_EL1
380 stp x14, x15, [x2], #16
381 mrs x16, APGAKEYLO_EL1
382 mrs x17, APGAKEYHI_EL1
383 stp x16, x17, [x2], #16
384#endif
385
Andrew Walbranb208b4a2019-05-20 12:42:22 +0100386 /* Save GIC registers. */
387#if GIC_VERSION == 3 || GIC_VERSION == 4
388 /* Offset is too large, so start from a new base. */
389 add x2, x1, #VCPU_GIC
390
391 mrs x3, ich_hcr_el2
Andrew Walbran4b976f42019-06-05 15:00:50 +0100392 mrs x4, icc_sre_el2
393 stp x3, x4, [x2, #16 * 0]
Andrew Walbranb208b4a2019-05-20 12:42:22 +0100394#endif
395
Fuad Tabba5e147a92019-08-14 15:30:30 +0100396 /* Save floating point registers. */
397 /* Use x28 as the base. */
398 add x28, x1, #VCPU_FREGS
Olivier Deprez82961762021-02-08 10:24:19 +0100399 simd_op_vectors stp, x28
Conrad Groblera824af62019-03-22 17:33:23 +0000400 mrs x3, fpsr
401 mrs x4, fpcr
Olivier Deprez82961762021-02-08 10:24:19 +0100402 stp x3, x4, [x28]
Conrad Groblera824af62019-03-22 17:33:23 +0000403
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000404 /* Save new vCPU pointer in non-volatile register. */
Wedson Almeida Filho03306112018-11-26 00:08:03 +0000405 mov x19, x0
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100406
Andrew Walbran1f8d4872018-12-20 11:21:32 +0000407 /*
408 * Save peripheral registers, and inform the arch-independent sections
409 * that registers have been saved.
410 */
Wedson Almeida Filho03306112018-11-26 00:08:03 +0000411 mov x0, x1
Andrew Walbran1f8d4872018-12-20 11:21:32 +0000412 bl complete_saving_state
Wedson Almeida Filho03306112018-11-26 00:08:03 +0000413 mov x0, x19
414
Olivier Deprez3caed1c2021-02-05 12:07:36 +0100415#if SECURE_WORLD == 1
416
417 ldr x1, [x0, #VCPU_VM]
418 ldrh w1, [x1, #VM_ID]
419
420 /* Exit to normal world if VM is HF_OTHER_WORLD_ID. */
421 cmp w1, #HF_OTHER_WORLD_ID
422 bne vcpu_restore_all_and_run
423
424 /*
425 * The current vCPU state is saved so it's now safe to switch to the
426 * normal world.
427 */
428
429other_world_loop:
Max Shvetsov2ff5b572021-03-22 12:03:38 +0000430 /* Check if SVE is implemented. */
431 mrs x0, id_aa64pfr0_el1
432 ubfx x0, x0, ID_AA64PFR0_SVE_SHIFT, ID_AA64PFR0_SVE_LENGTH
433 cbnz x0, sve_context_restore
Olivier Deprez82961762021-02-08 10:24:19 +0100434
435 /* Restore the other world SIMD context to the other world VM vCPU. */
436 add x18, x19, #VCPU_FREGS
437 simd_op_vectors ldp, x18
438 ldp x0, x1, [x18]
439 msr fpsr, x0
440 msr fpcr, x1
Max Shvetsov2ff5b572021-03-22 12:03:38 +0000441 b sve_skip_context_restore
Olivier Deprez82961762021-02-08 10:24:19 +0100442
Max Shvetsov2ff5b572021-03-22 12:03:38 +0000443 /* Restore the other world SVE context from internal buffer. */
444sve_context_restore:
445 adrp x18, sve_other_world_context
446 add x18, x18, :lo12: sve_other_world_context
447 ldr x0, [x19, #VCPU_CPU]
448 bl cpu_index
449 mov x20, #SVE_CTX_SIZE
450 madd x18, x0, x20, x18
451
452 /* Restore vector registers. */
453 sve_op_vectors ldr, x18
454 /* Restore FFR register before predicates. */
455 add x20, x18, #SVE_CTX_FFR
456 ldr p0, [x20]
457 wrffr p0.b
458 /* Restore predicate registers. */
459 add x20, x18, #SVE_CTX_PREDICATES
460 sve_predicate_op ldr, x20
461
462 /*
463 * Prepare arguments from other world VM vCPU.
464 * x19 holds the other world VM vCPU pointer.
465 */
466sve_skip_context_restore:
Olivier Deprez3caed1c2021-02-05 12:07:36 +0100467 ldp x0, x1, [x19, #VCPU_REGS + 8 * 0]
468 ldp x2, x3, [x19, #VCPU_REGS + 8 * 2]
469 ldp x4, x5, [x19, #VCPU_REGS + 8 * 4]
470 ldp x6, x7, [x19, #VCPU_REGS + 8 * 6]
471
Olivier Depreze7d7f322020-12-14 16:01:03 +0100472#if BRANCH_PROTECTION
473 /*
474 * EL3 saves pointer authentication keys when entering by SMC.
475 * Although prefer clearing the keys to be on the safe side.
476 */
477 msr APIAKEYLO_EL1, xzr
478 msr APIAKEYHI_EL1, xzr
479 msr APIBKEYLO_EL1, xzr
480 msr APIBKEYHI_EL1, xzr
481 msr APDAKEYLO_EL1, xzr
482 msr APDAKEYHI_EL1, xzr
483 msr APDBKEYLO_EL1, xzr
484 msr APDBKEYHI_EL1, xzr
485 msr APGAKEYLO_EL1, xzr
486 msr APGAKEYHI_EL1, xzr
487#endif
488
Olivier Deprez3caed1c2021-02-05 12:07:36 +0100489 smc #0
490
491 /*
492 * The call to EL3 returned, First eight GP registers contain an FF-A
493 * call from the physical FF-A instance. Save those arguments to the
494 * other world VM vCPU.
495 * x19 is restored with the other world VM vCPU pointer.
496 */
497 stp x0, x1, [x19, #VCPU_REGS + 8 * 0]
498 stp x2, x3, [x19, #VCPU_REGS + 8 * 2]
499 stp x4, x5, [x19, #VCPU_REGS + 8 * 4]
500 stp x6, x7, [x19, #VCPU_REGS + 8 * 6]
501
Max Shvetsov2ff5b572021-03-22 12:03:38 +0000502 /* Check if SVE is implemented. */
503 mrs x0, id_aa64pfr0_el1
504 ubfx x0, x0, ID_AA64PFR0_SVE_SHIFT, ID_AA64PFR0_SVE_LENGTH
505 cbnz x0, sve_context_save
506
Olivier Deprez82961762021-02-08 10:24:19 +0100507 /* Save the other world SIMD context to the other world VM vCPU. */
508 add x18, x19, #VCPU_FREGS
509 simd_op_vectors stp, x18
510 mrs x0, fpsr
511 mrs x1, fpcr
512 stp x0, x1, [x18]
Max Shvetsov2ff5b572021-03-22 12:03:38 +0000513 b sve_skip_context_save
514
515 /* Save the other world SVE context to internal buffer. */
516sve_context_save:
517 adrp x18, sve_other_world_context
518 add x18, x18, :lo12: sve_other_world_context
519 ldr x0, [x19, #VCPU_CPU]
520 bl cpu_index
521 mov x20, #SVE_CTX_SIZE
522 madd x18, x0, x20, x18
523
524 /* Save vector registers. */
525 sve_op_vectors str, x18
526 /* Save predicate registers. */
527 add x20, x18, #SVE_CTX_PREDICATES
528 sve_predicate_op str, x20
529 /* Save FFR register after predicates. */
530 add x20, x18, #SVE_CTX_FFR
531 rdffr p0.b
532 str p0, [x20]
533
534sve_skip_context_save:
Olivier Deprez82961762021-02-08 10:24:19 +0100535
Olivier Depreze7d7f322020-12-14 16:01:03 +0100536#if BRANCH_PROTECTION
537 pauth_restore_hypervisor_key x0 x1
538#endif
539
Olivier Deprez3caed1c2021-02-05 12:07:36 +0100540 /*
541 * Stack is at top and execution can restart straight into C code.
542 * Handle the FF-A call from other world.
543 */
544 mov x0, x19
545 bl smc_handler_from_nwd
546
547 /*
548 * If the smc handler returns null this indicates no vCPU has to be
549 * resumed and GP registers contain a fresh FF-A response or call
550 * directed to the normal world. Hence loop back and emit SMC again.
551 * Otherwise restore the vCPU pointed to by the handler return value.
552 */
553 cbz x0, other_world_loop
554
555#endif
556
Wedson Almeida Filho03306112018-11-26 00:08:03 +0000557 /* Intentional fallthrough. */
Andrew Walbran375f4532019-07-09 16:54:37 +0100558.global vcpu_restore_all_and_run
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100559vcpu_restore_all_and_run:
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000560 /* Update pointer to current vCPU. */
Wedson Almeida Filho00df6c72018-10-18 11:19:24 +0100561 msr tpidr_el2, x0
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100562
Andrew Walbran1f8d4872018-12-20 11:21:32 +0000563 /* Restore peripheral registers. */
564 mov x19, x0
565 bl begin_restoring_state
566 mov x0, x19
567
Conrad Groblera824af62019-03-22 17:33:23 +0000568 /*
569 * Restore floating point registers.
Conrad Groblera824af62019-03-22 17:33:23 +0000570 */
571 add x2, x0, #VCPU_FREGS
Olivier Deprez82961762021-02-08 10:24:19 +0100572 simd_op_vectors ldp, x2
573 ldp x3, x4, [x2]
Conrad Groblera824af62019-03-22 17:33:23 +0000574 msr fpsr, x3
Conrad Groblera824af62019-03-22 17:33:23 +0000575
Conrad Grobler02ff6af2019-06-04 09:40:28 +0100576 /*
577 * Only restore FPCR if changed, to avoid expensive
578 * self-synchronising operation where possible.
579 */
580 mrs x5, fpcr
581 cmp x5, x4
582 b.eq vcpu_restore_lazy_and_run
583 msr fpcr, x4
584 /* Intentional fallthrough. */
585
586vcpu_restore_lazy_and_run:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000587 /* Restore lazy registers. */
Fuad Tabba5e147a92019-08-14 15:30:30 +0100588 /* Use x28 as the base. */
589 add x28, x0, #VCPU_LAZY
590
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800591#if ENABLE_VHE
592 /* Check if VHE support is enabled, equivalent to has_vhe_support(). */
593 mrs x19, id_aa64mmfr1_el1
594 tst x19, #0xf00
595 b.ne vhe_restore
596#endif
597
Fuad Tabba5e147a92019-08-14 15:30:30 +0100598 ldp x24, x25, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800599 msr sctlr_el1, x24
600 msr cpacr_el1, x25
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100601
Fuad Tabba5e147a92019-08-14 15:30:30 +0100602 ldp x2, x3, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800603 msr ttbr0_el1, x2
604 msr ttbr1_el1, x3
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100605
Fuad Tabba5e147a92019-08-14 15:30:30 +0100606 ldp x4, x5, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800607 msr tcr_el1, x4
608 msr esr_el1, x5
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100609
Fuad Tabba5e147a92019-08-14 15:30:30 +0100610 ldp x6, x7, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800611 msr afsr0_el1, x6
612 msr afsr1_el1, x7
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100613
Fuad Tabba5e147a92019-08-14 15:30:30 +0100614 ldp x8, x9, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800615 msr far_el1, x8
616 msr mair_el1, x9
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100617
Fuad Tabba5e147a92019-08-14 15:30:30 +0100618 ldp x10, x11, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800619 msr vbar_el1, x10
620 msr contextidr_el1, x11
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100621
Fuad Tabba5e147a92019-08-14 15:30:30 +0100622 ldp x12, x13, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800623 msr amair_el1, x12
624 msr cntkctl_el1, x13
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100625
Fuad Tabba5e147a92019-08-14 15:30:30 +0100626 ldp x14, x15, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800627 msr elr_el1, x14
628 msr spsr_el1, x15
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100629
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800630#if ENABLE_VHE
631 b skip_vhe_restore
632
633vhe_restore:
634 ldp x24, x25, [x28], #16
635 msr MSR_SCTLR_EL12, x24
636 msr MSR_CPACR_EL12, x25
637
638 ldp x2, x3, [x28], #16
639 msr MSR_TTBR0_EL12, x2
640 msr MSR_TTBR1_EL12, x3
641
642 ldp x4, x5, [x28], #16
643 msr MSR_TCR_EL12, x4
644 msr MSR_ESR_EL12, x5
645
646 ldp x6, x7, [x28], #16
647 msr MSR_AFSR0_EL12, x6
648 msr MSR_AFSR1_EL12, x7
649
650 ldp x8, x9, [x28], #16
651 msr MSR_FAR_EL12, x8
652 msr MSR_MAIR_EL12, x9
653
654 ldp x10, x11, [x28], #16
655 msr MSR_VBAR_EL12, x10
656 msr MSR_CONTEXTIDR_EL12, x11
657
658 ldp x12, x13, [x28], #16
659 msr MSR_AMAIR_EL12, x12
660 msr MSR_CNTKCTL_EL12, x13
661
662 ldp x14, x15, [x28], #16
663 msr MSR_ELR_EL12, x14
664 msr MSR_SPSR_EL12, x15
665
666skip_vhe_restore:
667#endif
Fuad Tabba5e147a92019-08-14 15:30:30 +0100668 ldp x16, x17, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800669 msr vmpidr_el2, x16
670 msr csselr_el1, x17
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100671
Fuad Tabba5e147a92019-08-14 15:30:30 +0100672 ldp x18, x19, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800673 msr actlr_el1, x18
674 msr tpidr_el0, x19
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100675
Fuad Tabba5e147a92019-08-14 15:30:30 +0100676 ldp x20, x21, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800677 msr tpidrro_el0, x20
678 msr tpidr_el1, x21
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100679
Fuad Tabba5e147a92019-08-14 15:30:30 +0100680 ldp x22, x23, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800681 msr sp_el0, x22
682 msr sp_el1, x23
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100683
Fuad Tabba5e147a92019-08-14 15:30:30 +0100684 ldp x26, x27, [x28], #16
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000685 msr cnthctl_el2, x26
686 msr vttbr_el2, x27
Andrew Walbranbc82f2d2019-02-21 14:50:29 +0000687
Jose Marinhoab1081d2019-10-18 11:39:01 +0100688#if SECURE_WORLD == 1
689 msr MSR_VSTTBR_EL2, x27
690#endif
691
Fuad Tabba5e147a92019-08-14 15:30:30 +0100692 ldp x4, x5, [x28], #16
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000693 msr mdcr_el2, x4
694 msr mdscr_el1, x5
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100695
Fuad Tabbaf1d6dc52019-09-18 17:33:14 +0100696 ldp x6, x7, [x28], #16
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000697 msr pmccfiltr_el0, x6
698 msr pmcr_el0, x7
Fuad Tabbaf1d6dc52019-09-18 17:33:14 +0100699
700 ldp x8, x9, [x28], #16
Fuad Tabbaf1d6dc52019-09-18 17:33:14 +0100701 /*
702 * NOTE: Writing 0s to pmcntenset_el0's bits do not alter their values.
703 * To reset them, clear the register by writing to pmcntenclr_el0.
704 */
705 mov x27, #0xffffffff
706 msr pmcntenclr_el0, x27
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000707 msr pmcntenset_el0, x8
Fuad Tabbaf1d6dc52019-09-18 17:33:14 +0100708
Fuad Tabbaf1d6dc52019-09-18 17:33:14 +0100709 /*
710 * NOTE: Writing 0s to pmintenset_el1's bits do not alter their values.
711 * To reset them, clear the register by writing to pmintenclr_el1.
712 */
713 msr pmintenclr_el1, x27
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000714 msr pmintenset_el1, x9
Fuad Tabbac76466d2019-09-06 10:42:12 +0100715
Raghu Krishnamurthy7e925bd2020-12-26 10:14:40 -0800716 ldr x8, [x28], #8
717 msr par_el1, x8
718
Olivier Depreze7d7f322020-12-14 16:01:03 +0100719#if BRANCH_PROTECTION
720 add x2, x0, #(VCPU_PAC + 16)
721 ldp x10, x11, [x2], #16
722 msr APIBKEYLO_EL1, x10
723 msr APIBKEYHI_EL1, x11
724 ldp x12, x13, [x2], #16
725 msr APDAKEYLO_EL1, x12
726 msr APDAKEYHI_EL1, x13
727 ldp x14, x15, [x2], #16
728 msr APDBKEYLO_EL1, x14
729 msr APDBKEYHI_EL1, x15
730 ldp x16, x17, [x2], #16
731 msr APGAKEYLO_EL1, x16
732 msr APGAKEYHI_EL1, x17
733#endif
734
Andrew Walbranb208b4a2019-05-20 12:42:22 +0100735 /* Restore GIC registers. */
736#if GIC_VERSION == 3 || GIC_VERSION == 4
737 /* Offset is too large, so start from a new base. */
738 add x2, x0, #VCPU_GIC
739
Andrew Walbran4b976f42019-06-05 15:00:50 +0100740 ldp x3, x4, [x2, #16 * 0]
Andrew Walbranb208b4a2019-05-20 12:42:22 +0100741 msr ich_hcr_el2, x3
Andrew Walbran4b976f42019-06-05 15:00:50 +0100742 msr icc_sre_el2, x4
Andrew Walbranb208b4a2019-05-20 12:42:22 +0100743#endif
744
Andrew Walbran1f32e722019-06-07 17:57:26 +0100745 /*
746 * If a different vCPU is being run on this physical CPU to the last one
747 * which was run for this VM, invalidate the TLB. This must be called
748 * after vttbr_el2 has been updated, so that we have the page table and
749 * VMID of the vCPU to which we are switching.
750 */
751 mov x19, x0
752 bl maybe_invalidate_tlb
753 mov x0, x19
754
Fuad Tabba7c299d82019-09-12 13:05:18 +0100755 /* Intentional fallthrough. */
756
757vcpu_restore_nonvolatile_and_run:
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100758 /* Restore non-volatile registers. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000759 ldp x19, x20, [x0, #VCPU_REGS + 8 * 19]
760 ldp x21, x22, [x0, #VCPU_REGS + 8 * 21]
761 ldp x23, x24, [x0, #VCPU_REGS + 8 * 23]
762 ldp x25, x26, [x0, #VCPU_REGS + 8 * 25]
763 ldp x27, x28, [x0, #VCPU_REGS + 8 * 27]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100764
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100765 /* Intentional fallthrough. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100766/**
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000767 * Restore volatile registers and run the given vCPU.
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100768 *
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000769 * x0 is a pointer to the target vCPU.
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100770 */
771vcpu_restore_volatile_and_run:
Olivier Depreze7d7f322020-12-14 16:01:03 +0100772#if BRANCH_PROTECTION
773 add x1, x0, #VCPU_PAC
774 ldp x1, x2, [x1]
775
776 /* Restore vCPU APIA key. */
777 msr APIAKEYLO_EL1, x1
778 msr APIAKEYHI_EL1, x2
779#endif
780
Fuad Tabba7c299d82019-09-12 13:05:18 +0100781 ldp x4, x5, [x0, #VCPU_REGS + 8 * 4]
782 ldp x6, x7, [x0, #VCPU_REGS + 8 * 6]
783 ldp x8, x9, [x0, #VCPU_REGS + 8 * 8]
784 ldp x10, x11, [x0, #VCPU_REGS + 8 * 10]
785 ldp x12, x13, [x0, #VCPU_REGS + 8 * 12]
786 ldp x14, x15, [x0, #VCPU_REGS + 8 * 14]
787 ldp x16, x17, [x0, #VCPU_REGS + 8 * 16]
788 ldr x18, [x0, #VCPU_REGS + 8 * 18]
789 ldp x29, x30, [x0, #VCPU_REGS + 8 * 29]
790
791 /* Restore return address & mode. */
792 ldp x1, x2, [x0, #VCPU_REGS + 8 * 31]
793 msr elr_el2, x1
794 msr spsr_el2, x2
795
Raghu Krishnamurthy7e925bd2020-12-26 10:14:40 -0800796 ldr x1, [x0, #VCPU_REGS + 8 * 33]
797 msr hcr_el2, x1
798
Fuad Tabba7c299d82019-09-12 13:05:18 +0100799 /* Restore x0..x3, which we have used as scratch before. */
800 ldp x2, x3, [x0, #VCPU_REGS + 8 * 2]
801 ldp x0, x1, [x0, #VCPU_REGS + 8 * 0]
David Brazdild623d312019-12-19 16:04:06 +0000802 eret_with_sb