blob: 6239c602a881c55704bf6b55ef676a0ae87ba3de [file] [log] [blame]
Wedson Almeida Filho22c973a2018-10-27 16:25:42 +01001/*
Andrew Walbran692b3252019-03-07 15:51:31 +00002 * Copyright 2018 The Hafnium Authors.
Wedson Almeida Filho22c973a2018-10-27 16:25:42 +01003 *
Andrew Walbrane959ec12020-06-17 15:01:09 +01004 * Use of this source code is governed by a BSD-style
5 * license that can be found in the LICENSE file or at
6 * https://opensource.org/licenses/BSD-3-Clause.
Wedson Almeida Filho22c973a2018-10-27 16:25:42 +01007 */
8
David Brazdil863b1502019-10-24 13:55:50 +01009#include "hf/arch/offsets.h"
Olivier Deprez3caed1c2021-02-05 12:07:36 +010010
11#include "hf/arch/vmid_base.h"
12
Jose Marinhoab1081d2019-10-18 11:39:01 +010013#include "msr.h"
Andrew Walbranc55365d2018-12-06 15:45:11 +000014#include "exception_macros.S"
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010015
Max Shvetsov2ff5b572021-03-22 12:03:38 +000016
17/**
18 * PE feature information about SVE implementation in AArch64 state.
19 */
20#define ID_AA64PFR0_SVE_SHIFT (32)
21#define ID_AA64PFR0_SVE_LENGTH (4)
22
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000023/**
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000024 * Saves the volatile registers into the register buffer of the current vCPU.
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000025 */
Andrew Walbran59182d52019-09-23 17:55:39 +010026.macro save_volatile_to_vcpu
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +010027 /*
28 * Save x18 since we're about to clobber it. We subtract 16 instead of
29 * 8 from the stack pointer to keep it 16-byte aligned.
30 */
31 str x18, [sp, #-16]!
Andrew Walbran59182d52019-09-23 17:55:39 +010032
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000033 /* Get the current vCPU. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000034 mrs x18, tpidr_el2
35 stp x0, x1, [x18, #VCPU_REGS + 8 * 0]
36 stp x2, x3, [x18, #VCPU_REGS + 8 * 2]
37 stp x4, x5, [x18, #VCPU_REGS + 8 * 4]
38 stp x6, x7, [x18, #VCPU_REGS + 8 * 6]
39 stp x8, x9, [x18, #VCPU_REGS + 8 * 8]
40 stp x10, x11, [x18, #VCPU_REGS + 8 * 10]
41 stp x12, x13, [x18, #VCPU_REGS + 8 * 12]
42 stp x14, x15, [x18, #VCPU_REGS + 8 * 14]
43 stp x16, x17, [x18, #VCPU_REGS + 8 * 16]
44 stp x29, x30, [x18, #VCPU_REGS + 8 * 29]
45
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000046 /* x18 was saved on the stack, so we move it to vCPU regs buffer. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000047 ldr x0, [sp], #16
48 str x0, [x18, #VCPU_REGS + 8 * 18]
49
50 /* Save return address & mode. */
51 mrs x1, elr_el2
52 mrs x2, spsr_el2
53 stp x1, x2, [x18, #VCPU_REGS + 8 * 31]
54.endm
55
56/**
57 * This is a generic handler for exceptions taken at a lower EL. It saves the
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000058 * volatile registers to the current vCPU and calls the C handler, which can
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000059 * select one of two paths: (a) restore volatile registers and return, or
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000060 * (b) switch to a different vCPU. In the latter case, the handler needs to save
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000061 * all non-volatile registers (they haven't been saved yet), then restore all
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000062 * registers from the new vCPU.
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000063 */
64.macro lower_exception handler:req
Andrew Walbran59182d52019-09-23 17:55:39 +010065 save_volatile_to_vcpu
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000066
Olivier Depreze7d7f322020-12-14 16:01:03 +010067#if BRANCH_PROTECTION
68 /* NOTE: x18 still holds pointer to current vCPU. */
69 bl pauth_save_vcpu_and_restore_hyp_key
70#endif
71
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000072 /* Call C handler. */
73 bl \handler
74
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000075 /* Switch vCPU if requested by handler. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000076 cbnz x0, vcpu_switch
77
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000078 /* vCPU is not changing. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000079 mrs x0, tpidr_el2
80 b vcpu_restore_volatile_and_run
81.endm
82
83/**
Andrew Walbran59182d52019-09-23 17:55:39 +010084 * This is the handler for a sync exception taken at a lower EL.
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000085 */
86.macro lower_sync_exception
Andrew Walbran59182d52019-09-23 17:55:39 +010087 save_volatile_to_vcpu
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010088
Olivier Depreze7d7f322020-12-14 16:01:03 +010089#if BRANCH_PROTECTION
90 /* NOTE: x18 still holds pointer to current vCPU. */
91 bl pauth_save_vcpu_and_restore_hyp_key
92#endif
93
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010094 /* Extract the exception class (EC) from exception syndrome register. */
95 mrs x18, esr_el2
96 lsr x18, x18, #26
97
Andrew Walbran59182d52019-09-23 17:55:39 +010098 /* Take the system register path for EC 0x18. */
99 sub x18, x18, #0x18
100 cbz x18, system_register_access
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100101
Fuad Tabbac3847c72020-08-11 09:32:25 +0100102 /* Call C handler passing the syndrome and fault address registers. */
Andrew Walbran59182d52019-09-23 17:55:39 +0100103 mrs x0, esr_el2
Fuad Tabbac3847c72020-08-11 09:32:25 +0100104 mrs x1, far_el2
Andrew Walbran59182d52019-09-23 17:55:39 +0100105 bl sync_lower_exception
Andrew Walbran3a71c982019-09-12 18:22:11 +0100106
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000107 /* Switch vCPU if requested by handler. */
Andrew Walbran59182d52019-09-23 17:55:39 +0100108 cbnz x0, vcpu_switch
Andrew Walbranfed412e2019-09-02 18:23:16 +0100109
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000110 /* vCPU is not changing. */
Andrew Walbran59182d52019-09-23 17:55:39 +0100111 mrs x0, tpidr_el2
112 b vcpu_restore_volatile_and_run
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000113.endm
114
115/**
116 * The following is the exception table. A pointer to it will be stored in
117 * register vbar_el2.
118 */
119.section .text.vector_table_el2, "ax"
120.global vector_table_el2
121.balign 0x800
122vector_table_el2:
123sync_cur_sp0:
David Brazdil768f69c2019-12-19 15:46:12 +0000124 noreturn_current_exception_sp0 el2 sync_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000125
126.balign 0x80
127irq_cur_sp0:
David Brazdil768f69c2019-12-19 15:46:12 +0000128 noreturn_current_exception_sp0 el2 irq_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000129
130.balign 0x80
131fiq_cur_sp0:
David Brazdil768f69c2019-12-19 15:46:12 +0000132 noreturn_current_exception_sp0 el2 fiq_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000133
134.balign 0x80
135serr_cur_sp0:
David Brazdil768f69c2019-12-19 15:46:12 +0000136 noreturn_current_exception_sp0 el2 serr_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000137
138.balign 0x80
139sync_cur_spx:
David Brazdil768f69c2019-12-19 15:46:12 +0000140 noreturn_current_exception_spx el2 sync_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000141
142.balign 0x80
143irq_cur_spx:
David Brazdil768f69c2019-12-19 15:46:12 +0000144 noreturn_current_exception_spx el2 irq_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000145
146.balign 0x80
147fiq_cur_spx:
David Brazdil768f69c2019-12-19 15:46:12 +0000148 noreturn_current_exception_spx el2 fiq_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000149
150.balign 0x80
151serr_cur_spx:
David Brazdil768f69c2019-12-19 15:46:12 +0000152 noreturn_current_exception_spx el2 serr_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000153
154.balign 0x80
155sync_lower_64:
156 lower_sync_exception
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100157
158.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000159irq_lower_64:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000160 lower_exception irq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100161
162.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000163fiq_lower_64:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000164 lower_exception fiq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100165
166.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000167serr_lower_64:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000168 lower_exception serr_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100169
170.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000171sync_lower_32:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000172 lower_sync_exception
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100173
174.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000175irq_lower_32:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000176 lower_exception irq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100177
178.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000179fiq_lower_32:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000180 lower_exception fiq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100181
182.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000183serr_lower_32:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000184 lower_exception serr_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100185
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000186.balign 0x40
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100187
Fuad Tabba7c299d82019-09-12 13:05:18 +0100188/**
Olivier Depreze7d7f322020-12-14 16:01:03 +0100189 * pauth_save_vcpu_and_restore_hyp_key
190 *
191 * NOTE: expect x18 holds pointer to current vCPU.
192 */
193#if BRANCH_PROTECTION
194pauth_save_vcpu_and_restore_hyp_key:
195 /*
196 * Save APIA key for the vCPU as Hypervisor replaces it with its
197 * own key. Other vCPU PAuth keys are taken care in vcpu_switch.
198 */
199 mrs x0, APIAKEYLO_EL1
200 mrs x1, APIAKEYHI_EL1
201 add x18, x18, #VCPU_PAC
202 stp x0, x1, [x18]
203
204 /* Restore Hypervisor APIA key. */
205 pauth_restore_hypervisor_key x0 x1
206 ret
207#endif
208
209/**
Fuad Tabba7c299d82019-09-12 13:05:18 +0100210 * Handle accesses to system registers (EC=0x18) and return to original caller.
211 */
212system_register_access:
213 /*
214 * Non-volatile registers are (conservatively) saved because the handler
215 * can clobber non-volatile registers that are used by the msr/mrs,
216 * which results in the wrong value being read or written.
217 */
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000218 /* Get the current vCPU. */
Fuad Tabba7c299d82019-09-12 13:05:18 +0100219 mrs x18, tpidr_el2
220 stp x19, x20, [x18, #VCPU_REGS + 8 * 19]
221 stp x21, x22, [x18, #VCPU_REGS + 8 * 21]
222 stp x23, x24, [x18, #VCPU_REGS + 8 * 23]
223 stp x25, x26, [x18, #VCPU_REGS + 8 * 25]
224 stp x27, x28, [x18, #VCPU_REGS + 8 * 27]
225
226 /* Read syndrome register and call C handler. */
227 mrs x0, esr_el2
228 bl handle_system_register_access
Fuad Tabba7c299d82019-09-12 13:05:18 +0100229
Fuad Tabbab86325a2020-01-10 13:38:15 +0000230 /* Continue running the same vCPU. */
Fuad Tabba7c299d82019-09-12 13:05:18 +0100231 mrs x0, tpidr_el2
232 b vcpu_restore_nonvolatile_and_run
233
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100234/**
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000235 * Switch to a new vCPU.
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100236 *
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000237 * All volatile registers from the old vCPU have already been saved. We need
238 * to save only non-volatile ones from the old vCPU, and restore all from the
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100239 * new one.
240 *
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000241 * x0 is a pointer to the new vCPU.
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100242 */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100243vcpu_switch:
244 /* Save non-volatile registers. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000245 mrs x1, tpidr_el2
246 stp x19, x20, [x1, #VCPU_REGS + 8 * 19]
247 stp x21, x22, [x1, #VCPU_REGS + 8 * 21]
248 stp x23, x24, [x1, #VCPU_REGS + 8 * 23]
249 stp x25, x26, [x1, #VCPU_REGS + 8 * 25]
250 stp x27, x28, [x1, #VCPU_REGS + 8 * 27]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100251
252 /* Save lazy state. */
Fuad Tabba5e147a92019-08-14 15:30:30 +0100253 /* Use x28 as the base */
254 add x28, x1, #VCPU_LAZY
255
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800256#if ENABLE_VHE
257 /* Check if VHE support is enabled, equivalent to has_vhe_support(). */
258 mrs x19, id_aa64mmfr1_el1
259 tst x19, #0xf00
260 b.ne vhe_save
261#endif
262
263 mrs x24, sctlr_el1
264 mrs x25, cpacr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100265 stp x24, x25, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100266
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800267 mrs x2, ttbr0_el1
268 mrs x3, ttbr1_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100269 stp x2, x3, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100270
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800271 mrs x4, tcr_el1
272 mrs x5, esr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100273 stp x4, x5, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100274
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800275 mrs x6, afsr0_el1
276 mrs x7, afsr1_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100277 stp x6, x7, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100278
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800279 mrs x8, far_el1
280 mrs x9, mair_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100281 stp x8, x9, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100282
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800283 mrs x10, vbar_el1
284 mrs x11, contextidr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100285 stp x10, x11, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100286
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800287 mrs x12, amair_el1
288 mrs x13, cntkctl_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100289 stp x12, x13, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100290
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800291 mrs x14, elr_el1
292 mrs x15, spsr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100293 stp x14, x15, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100294
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800295#if ENABLE_VHE
296 b skip_vhe_save
297
298vhe_save:
299 mrs x24, MSR_SCTLR_EL12
300 mrs x25, MSR_CPACR_EL12
301 stp x24, x25, [x28], #16
302
303 mrs x2, MSR_TTBR0_EL12
304 mrs x3, MSR_TTBR1_EL12
305 stp x2, x3, [x28], #16
306
307 mrs x4, MSR_TCR_EL12
308 mrs x5, MSR_ESR_EL12
309 stp x4, x5, [x28], #16
310
311 mrs x6, MSR_AFSR0_EL12
312 mrs x7, MSR_AFSR1_EL12
313 stp x6, x7, [x28], #16
314
315 mrs x8, MSR_FAR_EL12
316 mrs x9, MSR_MAIR_EL12
317 stp x8, x9, [x28], #16
318
319 mrs x10, MSR_VBAR_EL12
320 mrs x11, MSR_CONTEXTIDR_EL12
321 stp x10, x11, [x28], #16
322
323 mrs x12, MSR_AMAIR_EL12
324 mrs x13, MSR_CNTKCTL_EL12
325 stp x12, x13, [x28], #16
326
327 mrs x14, MSR_ELR_EL12
328 mrs x15, MSR_SPSR_EL12
329 stp x14, x15, [x28], #16
330
331skip_vhe_save:
332#endif
333 mrs x16, vmpidr_el2
334 mrs x17, csselr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100335 stp x16, x17, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100336
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800337 mrs x18, actlr_el1
338 mrs x19, tpidr_el0
Fuad Tabba5e147a92019-08-14 15:30:30 +0100339 stp x18, x19, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100340
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800341 mrs x20, tpidrro_el0
342 mrs x21, tpidr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100343 stp x20, x21, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100344
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800345 mrs x22, sp_el0
346 mrs x23, sp_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100347 stp x22, x23, [x28], #16
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100348
Andrew Walbranbc82f2d2019-02-21 14:50:29 +0000349 mrs x24, par_el1
350 mrs x25, hcr_el2
Fuad Tabba5e147a92019-08-14 15:30:30 +0100351 stp x24, x25, [x28], #16
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100352
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000353 mrs x26, cnthctl_el2
354 mrs x27, vttbr_el2
Fuad Tabba5e147a92019-08-14 15:30:30 +0100355 stp x26, x27, [x28], #16
Andrew Walbranbc82f2d2019-02-21 14:50:29 +0000356
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000357 mrs x4, mdcr_el2
358 mrs x5, mdscr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100359 stp x4, x5, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100360
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000361 mrs x6, pmccfiltr_el0
362 mrs x7, pmcr_el0
Fuad Tabbaf1d6dc52019-09-18 17:33:14 +0100363 stp x6, x7, [x28], #16
364
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000365 mrs x8, pmcntenset_el0
366 mrs x9, pmintenset_el1
Fuad Tabbaf1d6dc52019-09-18 17:33:14 +0100367 stp x8, x9, [x28], #16
368
Olivier Depreze7d7f322020-12-14 16:01:03 +0100369#if BRANCH_PROTECTION
370 add x2, x1, #(VCPU_PAC + 16)
371 mrs x10, APIBKEYLO_EL1
372 mrs x11, APIBKEYHI_EL1
373 stp x10, x11, [x2], #16
374 mrs x12, APDAKEYLO_EL1
375 mrs x13, APDAKEYHI_EL1
376 stp x12, x13, [x2], #16
377 mrs x14, APDBKEYLO_EL1
378 mrs x15, APDBKEYHI_EL1
379 stp x14, x15, [x2], #16
380 mrs x16, APGAKEYLO_EL1
381 mrs x17, APGAKEYHI_EL1
382 stp x16, x17, [x2], #16
383#endif
384
Andrew Walbranb208b4a2019-05-20 12:42:22 +0100385 /* Save GIC registers. */
386#if GIC_VERSION == 3 || GIC_VERSION == 4
387 /* Offset is too large, so start from a new base. */
388 add x2, x1, #VCPU_GIC
389
390 mrs x3, ich_hcr_el2
Andrew Walbran4b976f42019-06-05 15:00:50 +0100391 mrs x4, icc_sre_el2
392 stp x3, x4, [x2, #16 * 0]
Andrew Walbranb208b4a2019-05-20 12:42:22 +0100393#endif
394
Fuad Tabba5e147a92019-08-14 15:30:30 +0100395 /* Save floating point registers. */
396 /* Use x28 as the base. */
397 add x28, x1, #VCPU_FREGS
Olivier Deprez82961762021-02-08 10:24:19 +0100398 simd_op_vectors stp, x28
Conrad Groblera824af62019-03-22 17:33:23 +0000399 mrs x3, fpsr
400 mrs x4, fpcr
Olivier Deprez82961762021-02-08 10:24:19 +0100401 stp x3, x4, [x28]
Conrad Groblera824af62019-03-22 17:33:23 +0000402
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000403 /* Save new vCPU pointer in non-volatile register. */
Wedson Almeida Filho03306112018-11-26 00:08:03 +0000404 mov x19, x0
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100405
Andrew Walbran1f8d4872018-12-20 11:21:32 +0000406 /*
407 * Save peripheral registers, and inform the arch-independent sections
408 * that registers have been saved.
409 */
Wedson Almeida Filho03306112018-11-26 00:08:03 +0000410 mov x0, x1
Andrew Walbran1f8d4872018-12-20 11:21:32 +0000411 bl complete_saving_state
Wedson Almeida Filho03306112018-11-26 00:08:03 +0000412 mov x0, x19
413
Olivier Deprez3caed1c2021-02-05 12:07:36 +0100414#if SECURE_WORLD == 1
415
416 ldr x1, [x0, #VCPU_VM]
417 ldrh w1, [x1, #VM_ID]
418
419 /* Exit to normal world if VM is HF_OTHER_WORLD_ID. */
420 cmp w1, #HF_OTHER_WORLD_ID
421 bne vcpu_restore_all_and_run
422
423 /*
424 * The current vCPU state is saved so it's now safe to switch to the
425 * normal world.
426 */
427
428other_world_loop:
Max Shvetsov2ff5b572021-03-22 12:03:38 +0000429 /* Check if SVE is implemented. */
430 mrs x0, id_aa64pfr0_el1
431 ubfx x0, x0, ID_AA64PFR0_SVE_SHIFT, ID_AA64PFR0_SVE_LENGTH
432 cbnz x0, sve_context_restore
Olivier Deprez82961762021-02-08 10:24:19 +0100433
434 /* Restore the other world SIMD context to the other world VM vCPU. */
435 add x18, x19, #VCPU_FREGS
436 simd_op_vectors ldp, x18
437 ldp x0, x1, [x18]
438 msr fpsr, x0
439 msr fpcr, x1
Max Shvetsov2ff5b572021-03-22 12:03:38 +0000440 b sve_skip_context_restore
Olivier Deprez82961762021-02-08 10:24:19 +0100441
Max Shvetsov2ff5b572021-03-22 12:03:38 +0000442 /* Restore the other world SVE context from internal buffer. */
443sve_context_restore:
444 adrp x18, sve_other_world_context
445 add x18, x18, :lo12: sve_other_world_context
446 ldr x0, [x19, #VCPU_CPU]
447 bl cpu_index
448 mov x20, #SVE_CTX_SIZE
449 madd x18, x0, x20, x18
450
451 /* Restore vector registers. */
452 sve_op_vectors ldr, x18
453 /* Restore FFR register before predicates. */
454 add x20, x18, #SVE_CTX_FFR
455 ldr p0, [x20]
456 wrffr p0.b
457 /* Restore predicate registers. */
458 add x20, x18, #SVE_CTX_PREDICATES
459 sve_predicate_op ldr, x20
460
461 /*
462 * Prepare arguments from other world VM vCPU.
463 * x19 holds the other world VM vCPU pointer.
464 */
465sve_skip_context_restore:
Olivier Deprez3caed1c2021-02-05 12:07:36 +0100466 ldp x0, x1, [x19, #VCPU_REGS + 8 * 0]
467 ldp x2, x3, [x19, #VCPU_REGS + 8 * 2]
468 ldp x4, x5, [x19, #VCPU_REGS + 8 * 4]
469 ldp x6, x7, [x19, #VCPU_REGS + 8 * 6]
470
Olivier Depreze7d7f322020-12-14 16:01:03 +0100471#if BRANCH_PROTECTION
472 /*
473 * EL3 saves pointer authentication keys when entering by SMC.
474 * Although prefer clearing the keys to be on the safe side.
475 */
476 msr APIAKEYLO_EL1, xzr
477 msr APIAKEYHI_EL1, xzr
478 msr APIBKEYLO_EL1, xzr
479 msr APIBKEYHI_EL1, xzr
480 msr APDAKEYLO_EL1, xzr
481 msr APDAKEYHI_EL1, xzr
482 msr APDBKEYLO_EL1, xzr
483 msr APDBKEYHI_EL1, xzr
484 msr APGAKEYLO_EL1, xzr
485 msr APGAKEYHI_EL1, xzr
486#endif
487
Olivier Deprez3caed1c2021-02-05 12:07:36 +0100488 smc #0
489
490 /*
491 * The call to EL3 returned, First eight GP registers contain an FF-A
492 * call from the physical FF-A instance. Save those arguments to the
493 * other world VM vCPU.
494 * x19 is restored with the other world VM vCPU pointer.
495 */
496 stp x0, x1, [x19, #VCPU_REGS + 8 * 0]
497 stp x2, x3, [x19, #VCPU_REGS + 8 * 2]
498 stp x4, x5, [x19, #VCPU_REGS + 8 * 4]
499 stp x6, x7, [x19, #VCPU_REGS + 8 * 6]
500
Max Shvetsov2ff5b572021-03-22 12:03:38 +0000501 /* Check if SVE is implemented. */
502 mrs x0, id_aa64pfr0_el1
503 ubfx x0, x0, ID_AA64PFR0_SVE_SHIFT, ID_AA64PFR0_SVE_LENGTH
504 cbnz x0, sve_context_save
505
Olivier Deprez82961762021-02-08 10:24:19 +0100506 /* Save the other world SIMD context to the other world VM vCPU. */
507 add x18, x19, #VCPU_FREGS
508 simd_op_vectors stp, x18
509 mrs x0, fpsr
510 mrs x1, fpcr
511 stp x0, x1, [x18]
Max Shvetsov2ff5b572021-03-22 12:03:38 +0000512 b sve_skip_context_save
513
514 /* Save the other world SVE context to internal buffer. */
515sve_context_save:
516 adrp x18, sve_other_world_context
517 add x18, x18, :lo12: sve_other_world_context
518 ldr x0, [x19, #VCPU_CPU]
519 bl cpu_index
520 mov x20, #SVE_CTX_SIZE
521 madd x18, x0, x20, x18
522
523 /* Save vector registers. */
524 sve_op_vectors str, x18
525 /* Save predicate registers. */
526 add x20, x18, #SVE_CTX_PREDICATES
527 sve_predicate_op str, x20
528 /* Save FFR register after predicates. */
529 add x20, x18, #SVE_CTX_FFR
530 rdffr p0.b
531 str p0, [x20]
532
533sve_skip_context_save:
Olivier Deprez82961762021-02-08 10:24:19 +0100534
Olivier Depreze7d7f322020-12-14 16:01:03 +0100535#if BRANCH_PROTECTION
536 pauth_restore_hypervisor_key x0 x1
537#endif
538
Olivier Deprez3caed1c2021-02-05 12:07:36 +0100539 /*
540 * Stack is at top and execution can restart straight into C code.
541 * Handle the FF-A call from other world.
542 */
543 mov x0, x19
544 bl smc_handler_from_nwd
545
546 /*
547 * If the smc handler returns null this indicates no vCPU has to be
548 * resumed and GP registers contain a fresh FF-A response or call
549 * directed to the normal world. Hence loop back and emit SMC again.
550 * Otherwise restore the vCPU pointed to by the handler return value.
551 */
552 cbz x0, other_world_loop
553
554#endif
555
Wedson Almeida Filho03306112018-11-26 00:08:03 +0000556 /* Intentional fallthrough. */
Andrew Walbran375f4532019-07-09 16:54:37 +0100557.global vcpu_restore_all_and_run
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100558vcpu_restore_all_and_run:
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000559 /* Update pointer to current vCPU. */
Wedson Almeida Filho00df6c72018-10-18 11:19:24 +0100560 msr tpidr_el2, x0
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100561
Andrew Walbran1f8d4872018-12-20 11:21:32 +0000562 /* Restore peripheral registers. */
563 mov x19, x0
564 bl begin_restoring_state
565 mov x0, x19
566
Conrad Groblera824af62019-03-22 17:33:23 +0000567 /*
568 * Restore floating point registers.
Conrad Groblera824af62019-03-22 17:33:23 +0000569 */
570 add x2, x0, #VCPU_FREGS
Olivier Deprez82961762021-02-08 10:24:19 +0100571 simd_op_vectors ldp, x2
572 ldp x3, x4, [x2]
Conrad Groblera824af62019-03-22 17:33:23 +0000573 msr fpsr, x3
Conrad Groblera824af62019-03-22 17:33:23 +0000574
Conrad Grobler02ff6af2019-06-04 09:40:28 +0100575 /*
576 * Only restore FPCR if changed, to avoid expensive
577 * self-synchronising operation where possible.
578 */
579 mrs x5, fpcr
580 cmp x5, x4
581 b.eq vcpu_restore_lazy_and_run
582 msr fpcr, x4
583 /* Intentional fallthrough. */
584
585vcpu_restore_lazy_and_run:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000586 /* Restore lazy registers. */
Fuad Tabba5e147a92019-08-14 15:30:30 +0100587 /* Use x28 as the base. */
588 add x28, x0, #VCPU_LAZY
589
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800590#if ENABLE_VHE
591 /* Check if VHE support is enabled, equivalent to has_vhe_support(). */
592 mrs x19, id_aa64mmfr1_el1
593 tst x19, #0xf00
594 b.ne vhe_restore
595#endif
596
Fuad Tabba5e147a92019-08-14 15:30:30 +0100597 ldp x24, x25, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800598 msr sctlr_el1, x24
599 msr cpacr_el1, x25
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100600
Fuad Tabba5e147a92019-08-14 15:30:30 +0100601 ldp x2, x3, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800602 msr ttbr0_el1, x2
603 msr ttbr1_el1, x3
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100604
Fuad Tabba5e147a92019-08-14 15:30:30 +0100605 ldp x4, x5, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800606 msr tcr_el1, x4
607 msr esr_el1, x5
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100608
Fuad Tabba5e147a92019-08-14 15:30:30 +0100609 ldp x6, x7, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800610 msr afsr0_el1, x6
611 msr afsr1_el1, x7
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100612
Fuad Tabba5e147a92019-08-14 15:30:30 +0100613 ldp x8, x9, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800614 msr far_el1, x8
615 msr mair_el1, x9
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100616
Fuad Tabba5e147a92019-08-14 15:30:30 +0100617 ldp x10, x11, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800618 msr vbar_el1, x10
619 msr contextidr_el1, x11
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100620
Fuad Tabba5e147a92019-08-14 15:30:30 +0100621 ldp x12, x13, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800622 msr amair_el1, x12
623 msr cntkctl_el1, x13
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100624
Fuad Tabba5e147a92019-08-14 15:30:30 +0100625 ldp x14, x15, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800626 msr elr_el1, x14
627 msr spsr_el1, x15
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100628
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800629#if ENABLE_VHE
630 b skip_vhe_restore
631
632vhe_restore:
633 ldp x24, x25, [x28], #16
634 msr MSR_SCTLR_EL12, x24
635 msr MSR_CPACR_EL12, x25
636
637 ldp x2, x3, [x28], #16
638 msr MSR_TTBR0_EL12, x2
639 msr MSR_TTBR1_EL12, x3
640
641 ldp x4, x5, [x28], #16
642 msr MSR_TCR_EL12, x4
643 msr MSR_ESR_EL12, x5
644
645 ldp x6, x7, [x28], #16
646 msr MSR_AFSR0_EL12, x6
647 msr MSR_AFSR1_EL12, x7
648
649 ldp x8, x9, [x28], #16
650 msr MSR_FAR_EL12, x8
651 msr MSR_MAIR_EL12, x9
652
653 ldp x10, x11, [x28], #16
654 msr MSR_VBAR_EL12, x10
655 msr MSR_CONTEXTIDR_EL12, x11
656
657 ldp x12, x13, [x28], #16
658 msr MSR_AMAIR_EL12, x12
659 msr MSR_CNTKCTL_EL12, x13
660
661 ldp x14, x15, [x28], #16
662 msr MSR_ELR_EL12, x14
663 msr MSR_SPSR_EL12, x15
664
665skip_vhe_restore:
666#endif
Fuad Tabba5e147a92019-08-14 15:30:30 +0100667 ldp x16, x17, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800668 msr vmpidr_el2, x16
669 msr csselr_el1, x17
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100670
Fuad Tabba5e147a92019-08-14 15:30:30 +0100671 ldp x18, x19, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800672 msr actlr_el1, x18
673 msr tpidr_el0, x19
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100674
Fuad Tabba5e147a92019-08-14 15:30:30 +0100675 ldp x20, x21, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800676 msr tpidrro_el0, x20
677 msr tpidr_el1, x21
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100678
Fuad Tabba5e147a92019-08-14 15:30:30 +0100679 ldp x22, x23, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800680 msr sp_el0, x22
681 msr sp_el1, x23
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100682
Fuad Tabba5e147a92019-08-14 15:30:30 +0100683 ldp x24, x25, [x28], #16
Andrew Walbranbc82f2d2019-02-21 14:50:29 +0000684 msr par_el1, x24
685 msr hcr_el2, x25
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100686
Fuad Tabba5e147a92019-08-14 15:30:30 +0100687 ldp x26, x27, [x28], #16
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000688 msr cnthctl_el2, x26
689 msr vttbr_el2, x27
Andrew Walbranbc82f2d2019-02-21 14:50:29 +0000690
Jose Marinhoab1081d2019-10-18 11:39:01 +0100691#if SECURE_WORLD == 1
692 msr MSR_VSTTBR_EL2, x27
693#endif
694
Fuad Tabba5e147a92019-08-14 15:30:30 +0100695 ldp x4, x5, [x28], #16
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000696 msr mdcr_el2, x4
697 msr mdscr_el1, x5
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100698
Fuad Tabbaf1d6dc52019-09-18 17:33:14 +0100699 ldp x6, x7, [x28], #16
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000700 msr pmccfiltr_el0, x6
701 msr pmcr_el0, x7
Fuad Tabbaf1d6dc52019-09-18 17:33:14 +0100702
703 ldp x8, x9, [x28], #16
Fuad Tabbaf1d6dc52019-09-18 17:33:14 +0100704 /*
705 * NOTE: Writing 0s to pmcntenset_el0's bits do not alter their values.
706 * To reset them, clear the register by writing to pmcntenclr_el0.
707 */
708 mov x27, #0xffffffff
709 msr pmcntenclr_el0, x27
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000710 msr pmcntenset_el0, x8
Fuad Tabbaf1d6dc52019-09-18 17:33:14 +0100711
Fuad Tabbaf1d6dc52019-09-18 17:33:14 +0100712 /*
713 * NOTE: Writing 0s to pmintenset_el1's bits do not alter their values.
714 * To reset them, clear the register by writing to pmintenclr_el1.
715 */
716 msr pmintenclr_el1, x27
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000717 msr pmintenset_el1, x9
Fuad Tabbac76466d2019-09-06 10:42:12 +0100718
Olivier Depreze7d7f322020-12-14 16:01:03 +0100719#if BRANCH_PROTECTION
720 add x2, x0, #(VCPU_PAC + 16)
721 ldp x10, x11, [x2], #16
722 msr APIBKEYLO_EL1, x10
723 msr APIBKEYHI_EL1, x11
724 ldp x12, x13, [x2], #16
725 msr APDAKEYLO_EL1, x12
726 msr APDAKEYHI_EL1, x13
727 ldp x14, x15, [x2], #16
728 msr APDBKEYLO_EL1, x14
729 msr APDBKEYHI_EL1, x15
730 ldp x16, x17, [x2], #16
731 msr APGAKEYLO_EL1, x16
732 msr APGAKEYHI_EL1, x17
733#endif
734
Andrew Walbranb208b4a2019-05-20 12:42:22 +0100735 /* Restore GIC registers. */
736#if GIC_VERSION == 3 || GIC_VERSION == 4
737 /* Offset is too large, so start from a new base. */
738 add x2, x0, #VCPU_GIC
739
Andrew Walbran4b976f42019-06-05 15:00:50 +0100740 ldp x3, x4, [x2, #16 * 0]
Andrew Walbranb208b4a2019-05-20 12:42:22 +0100741 msr ich_hcr_el2, x3
Andrew Walbran4b976f42019-06-05 15:00:50 +0100742 msr icc_sre_el2, x4
Andrew Walbranb208b4a2019-05-20 12:42:22 +0100743#endif
744
Andrew Walbran1f32e722019-06-07 17:57:26 +0100745 /*
746 * If a different vCPU is being run on this physical CPU to the last one
747 * which was run for this VM, invalidate the TLB. This must be called
748 * after vttbr_el2 has been updated, so that we have the page table and
749 * VMID of the vCPU to which we are switching.
750 */
751 mov x19, x0
752 bl maybe_invalidate_tlb
753 mov x0, x19
754
Fuad Tabba7c299d82019-09-12 13:05:18 +0100755 /* Intentional fallthrough. */
756
757vcpu_restore_nonvolatile_and_run:
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100758 /* Restore non-volatile registers. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000759 ldp x19, x20, [x0, #VCPU_REGS + 8 * 19]
760 ldp x21, x22, [x0, #VCPU_REGS + 8 * 21]
761 ldp x23, x24, [x0, #VCPU_REGS + 8 * 23]
762 ldp x25, x26, [x0, #VCPU_REGS + 8 * 25]
763 ldp x27, x28, [x0, #VCPU_REGS + 8 * 27]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100764
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100765 /* Intentional fallthrough. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100766/**
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000767 * Restore volatile registers and run the given vCPU.
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100768 *
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000769 * x0 is a pointer to the target vCPU.
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100770 */
771vcpu_restore_volatile_and_run:
Olivier Depreze7d7f322020-12-14 16:01:03 +0100772#if BRANCH_PROTECTION
773 add x1, x0, #VCPU_PAC
774 ldp x1, x2, [x1]
775
776 /* Restore vCPU APIA key. */
777 msr APIAKEYLO_EL1, x1
778 msr APIAKEYHI_EL1, x2
779#endif
780
Fuad Tabba7c299d82019-09-12 13:05:18 +0100781 ldp x4, x5, [x0, #VCPU_REGS + 8 * 4]
782 ldp x6, x7, [x0, #VCPU_REGS + 8 * 6]
783 ldp x8, x9, [x0, #VCPU_REGS + 8 * 8]
784 ldp x10, x11, [x0, #VCPU_REGS + 8 * 10]
785 ldp x12, x13, [x0, #VCPU_REGS + 8 * 12]
786 ldp x14, x15, [x0, #VCPU_REGS + 8 * 14]
787 ldp x16, x17, [x0, #VCPU_REGS + 8 * 16]
788 ldr x18, [x0, #VCPU_REGS + 8 * 18]
789 ldp x29, x30, [x0, #VCPU_REGS + 8 * 29]
790
791 /* Restore return address & mode. */
792 ldp x1, x2, [x0, #VCPU_REGS + 8 * 31]
793 msr elr_el2, x1
794 msr spsr_el2, x2
795
796 /* Restore x0..x3, which we have used as scratch before. */
797 ldp x2, x3, [x0, #VCPU_REGS + 8 * 2]
798 ldp x0, x1, [x0, #VCPU_REGS + 8 * 0]
David Brazdild623d312019-12-19 16:04:06 +0000799 eret_with_sb