blob: 2482d92ca3ddec84d23d37aa4aefc9dcdc8cdddf [file] [log] [blame]
Wedson Almeida Filho22c973a2018-10-27 16:25:42 +01001/*
Andrew Walbran692b3252019-03-07 15:51:31 +00002 * Copyright 2018 The Hafnium Authors.
Wedson Almeida Filho22c973a2018-10-27 16:25:42 +01003 *
Andrew Walbrane959ec12020-06-17 15:01:09 +01004 * Use of this source code is governed by a BSD-style
5 * license that can be found in the LICENSE file or at
6 * https://opensource.org/licenses/BSD-3-Clause.
Wedson Almeida Filho22c973a2018-10-27 16:25:42 +01007 */
8
David Brazdil863b1502019-10-24 13:55:50 +01009#include "hf/arch/offsets.h"
Olivier Deprez3caed1c2021-02-05 12:07:36 +010010
11#include "hf/arch/vmid_base.h"
12
Jose Marinhoab1081d2019-10-18 11:39:01 +010013#include "msr.h"
Andrew Walbranc55365d2018-12-06 15:45:11 +000014#include "exception_macros.S"
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010015
Max Shvetsov2ff5b572021-03-22 12:03:38 +000016
17/**
18 * PE feature information about SVE implementation in AArch64 state.
19 */
20#define ID_AA64PFR0_SVE_SHIFT (32)
21#define ID_AA64PFR0_SVE_LENGTH (4)
22
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000023/**
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000024 * Saves the volatile registers into the register buffer of the current vCPU.
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000025 */
Andrew Walbran59182d52019-09-23 17:55:39 +010026.macro save_volatile_to_vcpu
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +010027 /*
28 * Save x18 since we're about to clobber it. We subtract 16 instead of
29 * 8 from the stack pointer to keep it 16-byte aligned.
30 */
31 str x18, [sp, #-16]!
Andrew Walbran59182d52019-09-23 17:55:39 +010032
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000033 /* Get the current vCPU. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000034 mrs x18, tpidr_el2
35 stp x0, x1, [x18, #VCPU_REGS + 8 * 0]
36 stp x2, x3, [x18, #VCPU_REGS + 8 * 2]
37 stp x4, x5, [x18, #VCPU_REGS + 8 * 4]
38 stp x6, x7, [x18, #VCPU_REGS + 8 * 6]
39 stp x8, x9, [x18, #VCPU_REGS + 8 * 8]
40 stp x10, x11, [x18, #VCPU_REGS + 8 * 10]
41 stp x12, x13, [x18, #VCPU_REGS + 8 * 12]
42 stp x14, x15, [x18, #VCPU_REGS + 8 * 14]
43 stp x16, x17, [x18, #VCPU_REGS + 8 * 16]
44 stp x29, x30, [x18, #VCPU_REGS + 8 * 29]
45
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000046 /* x18 was saved on the stack, so we move it to vCPU regs buffer. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000047 ldr x0, [sp], #16
48 str x0, [x18, #VCPU_REGS + 8 * 18]
49
50 /* Save return address & mode. */
51 mrs x1, elr_el2
52 mrs x2, spsr_el2
53 stp x1, x2, [x18, #VCPU_REGS + 8 * 31]
Raghu Krishnamurthy7e925bd2020-12-26 10:14:40 -080054 mrs x1, hcr_el2
55 str x1, [x18, #VCPU_REGS + 8 * 33]
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000056.endm
57
58/**
59 * This is a generic handler for exceptions taken at a lower EL. It saves the
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000060 * volatile registers to the current vCPU and calls the C handler, which can
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000061 * select one of two paths: (a) restore volatile registers and return, or
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000062 * (b) switch to a different vCPU. In the latter case, the handler needs to save
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000063 * all non-volatile registers (they haven't been saved yet), then restore all
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000064 * registers from the new vCPU.
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000065 */
66.macro lower_exception handler:req
Andrew Walbran59182d52019-09-23 17:55:39 +010067 save_volatile_to_vcpu
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000068
Raghu Krishnamurthy905e8162020-12-26 10:20:03 -080069#if ENABLE_VHE
70 bl enable_vhe_tge
71#endif
72
Olivier Depreze7d7f322020-12-14 16:01:03 +010073#if BRANCH_PROTECTION
74 /* NOTE: x18 still holds pointer to current vCPU. */
75 bl pauth_save_vcpu_and_restore_hyp_key
76#endif
77
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000078 /* Call C handler. */
79 bl \handler
80
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000081 /* Switch vCPU if requested by handler. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000082 cbnz x0, vcpu_switch
83
Fuad Tabbab0ef2a42019-12-19 11:19:25 +000084 /* vCPU is not changing. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000085 mrs x0, tpidr_el2
86 b vcpu_restore_volatile_and_run
87.endm
88
89/**
Andrew Walbran59182d52019-09-23 17:55:39 +010090 * This is the handler for a sync exception taken at a lower EL.
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +000091 */
92.macro lower_sync_exception
Andrew Walbran59182d52019-09-23 17:55:39 +010093 save_volatile_to_vcpu
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010094
Raghu Krishnamurthy905e8162020-12-26 10:20:03 -080095#if ENABLE_VHE
96 bl enable_vhe_tge
97#endif
98
Olivier Depreze7d7f322020-12-14 16:01:03 +010099#if BRANCH_PROTECTION
100 /* NOTE: x18 still holds pointer to current vCPU. */
101 bl pauth_save_vcpu_and_restore_hyp_key
102#endif
103
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100104 /* Extract the exception class (EC) from exception syndrome register. */
105 mrs x18, esr_el2
106 lsr x18, x18, #26
107
Andrew Walbran59182d52019-09-23 17:55:39 +0100108 /* Take the system register path for EC 0x18. */
109 sub x18, x18, #0x18
110 cbz x18, system_register_access
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100111
Fuad Tabbac3847c72020-08-11 09:32:25 +0100112 /* Call C handler passing the syndrome and fault address registers. */
Andrew Walbran59182d52019-09-23 17:55:39 +0100113 mrs x0, esr_el2
Fuad Tabbac3847c72020-08-11 09:32:25 +0100114 mrs x1, far_el2
Andrew Walbran59182d52019-09-23 17:55:39 +0100115 bl sync_lower_exception
Andrew Walbran3a71c982019-09-12 18:22:11 +0100116
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000117 /* Switch vCPU if requested by handler. */
Andrew Walbran59182d52019-09-23 17:55:39 +0100118 cbnz x0, vcpu_switch
Andrew Walbranfed412e2019-09-02 18:23:16 +0100119
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000120 /* vCPU is not changing. */
Andrew Walbran59182d52019-09-23 17:55:39 +0100121 mrs x0, tpidr_el2
122 b vcpu_restore_volatile_and_run
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000123.endm
124
125/**
126 * The following is the exception table. A pointer to it will be stored in
127 * register vbar_el2.
128 */
129.section .text.vector_table_el2, "ax"
130.global vector_table_el2
131.balign 0x800
132vector_table_el2:
133sync_cur_sp0:
David Brazdil768f69c2019-12-19 15:46:12 +0000134 noreturn_current_exception_sp0 el2 sync_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000135
136.balign 0x80
137irq_cur_sp0:
David Brazdil768f69c2019-12-19 15:46:12 +0000138 noreturn_current_exception_sp0 el2 irq_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000139
140.balign 0x80
141fiq_cur_sp0:
David Brazdil768f69c2019-12-19 15:46:12 +0000142 noreturn_current_exception_sp0 el2 fiq_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000143
144.balign 0x80
145serr_cur_sp0:
David Brazdil768f69c2019-12-19 15:46:12 +0000146 noreturn_current_exception_sp0 el2 serr_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000147
148.balign 0x80
149sync_cur_spx:
David Brazdil768f69c2019-12-19 15:46:12 +0000150 noreturn_current_exception_spx el2 sync_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000151
152.balign 0x80
153irq_cur_spx:
David Brazdil768f69c2019-12-19 15:46:12 +0000154 noreturn_current_exception_spx el2 irq_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000155
156.balign 0x80
157fiq_cur_spx:
David Brazdil768f69c2019-12-19 15:46:12 +0000158 noreturn_current_exception_spx el2 fiq_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000159
160.balign 0x80
161serr_cur_spx:
David Brazdil768f69c2019-12-19 15:46:12 +0000162 noreturn_current_exception_spx el2 serr_current_exception_noreturn
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000163
164.balign 0x80
165sync_lower_64:
166 lower_sync_exception
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100167
168.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000169irq_lower_64:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000170 lower_exception irq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100171
172.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000173fiq_lower_64:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000174 lower_exception fiq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100175
176.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000177serr_lower_64:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000178 lower_exception serr_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100179
180.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000181sync_lower_32:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000182 lower_sync_exception
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100183
184.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000185irq_lower_32:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000186 lower_exception irq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100187
188.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000189fiq_lower_32:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000190 lower_exception fiq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100191
192.balign 0x80
Andrew Walbran83f61322018-11-12 13:29:30 +0000193serr_lower_32:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000194 lower_exception serr_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100195
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000196.balign 0x40
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100197
Fuad Tabba7c299d82019-09-12 13:05:18 +0100198/**
Olivier Depreze7d7f322020-12-14 16:01:03 +0100199 * pauth_save_vcpu_and_restore_hyp_key
200 *
201 * NOTE: expect x18 holds pointer to current vCPU.
202 */
203#if BRANCH_PROTECTION
204pauth_save_vcpu_and_restore_hyp_key:
205 /*
206 * Save APIA key for the vCPU as Hypervisor replaces it with its
207 * own key. Other vCPU PAuth keys are taken care in vcpu_switch.
208 */
209 mrs x0, APIAKEYLO_EL1
210 mrs x1, APIAKEYHI_EL1
211 add x18, x18, #VCPU_PAC
212 stp x0, x1, [x18]
213
214 /* Restore Hypervisor APIA key. */
215 pauth_restore_hypervisor_key x0 x1
216 ret
217#endif
218
219/**
Fuad Tabba7c299d82019-09-12 13:05:18 +0100220 * Handle accesses to system registers (EC=0x18) and return to original caller.
221 */
222system_register_access:
223 /*
224 * Non-volatile registers are (conservatively) saved because the handler
225 * can clobber non-volatile registers that are used by the msr/mrs,
226 * which results in the wrong value being read or written.
227 */
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000228 /* Get the current vCPU. */
Fuad Tabba7c299d82019-09-12 13:05:18 +0100229 mrs x18, tpidr_el2
230 stp x19, x20, [x18, #VCPU_REGS + 8 * 19]
231 stp x21, x22, [x18, #VCPU_REGS + 8 * 21]
232 stp x23, x24, [x18, #VCPU_REGS + 8 * 23]
233 stp x25, x26, [x18, #VCPU_REGS + 8 * 25]
234 stp x27, x28, [x18, #VCPU_REGS + 8 * 27]
235
236 /* Read syndrome register and call C handler. */
237 mrs x0, esr_el2
238 bl handle_system_register_access
Fuad Tabba7c299d82019-09-12 13:05:18 +0100239
Fuad Tabbab86325a2020-01-10 13:38:15 +0000240 /* Continue running the same vCPU. */
Fuad Tabba7c299d82019-09-12 13:05:18 +0100241 mrs x0, tpidr_el2
242 b vcpu_restore_nonvolatile_and_run
243
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100244/**
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000245 * Switch to a new vCPU.
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100246 *
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000247 * All volatile registers from the old vCPU have already been saved. We need
248 * to save only non-volatile ones from the old vCPU, and restore all from the
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100249 * new one.
250 *
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000251 * x0 is a pointer to the new vCPU.
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100252 */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100253vcpu_switch:
254 /* Save non-volatile registers. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000255 mrs x1, tpidr_el2
256 stp x19, x20, [x1, #VCPU_REGS + 8 * 19]
257 stp x21, x22, [x1, #VCPU_REGS + 8 * 21]
258 stp x23, x24, [x1, #VCPU_REGS + 8 * 23]
259 stp x25, x26, [x1, #VCPU_REGS + 8 * 25]
260 stp x27, x28, [x1, #VCPU_REGS + 8 * 27]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100261
262 /* Save lazy state. */
Fuad Tabba5e147a92019-08-14 15:30:30 +0100263 /* Use x28 as the base */
264 add x28, x1, #VCPU_LAZY
265
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800266#if ENABLE_VHE
267 /* Check if VHE support is enabled, equivalent to has_vhe_support(). */
268 mrs x19, id_aa64mmfr1_el1
269 tst x19, #0xf00
270 b.ne vhe_save
271#endif
272
273 mrs x24, sctlr_el1
274 mrs x25, cpacr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100275 stp x24, x25, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100276
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800277 mrs x2, ttbr0_el1
278 mrs x3, ttbr1_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100279 stp x2, x3, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100280
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800281 mrs x4, tcr_el1
282 mrs x5, esr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100283 stp x4, x5, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100284
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800285 mrs x6, afsr0_el1
286 mrs x7, afsr1_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100287 stp x6, x7, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100288
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800289 mrs x8, far_el1
290 mrs x9, mair_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100291 stp x8, x9, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100292
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800293 mrs x10, vbar_el1
294 mrs x11, contextidr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100295 stp x10, x11, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100296
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800297 mrs x12, amair_el1
298 mrs x13, cntkctl_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100299 stp x12, x13, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100300
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800301 mrs x14, elr_el1
302 mrs x15, spsr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100303 stp x14, x15, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100304
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800305#if ENABLE_VHE
306 b skip_vhe_save
307
308vhe_save:
309 mrs x24, MSR_SCTLR_EL12
310 mrs x25, MSR_CPACR_EL12
311 stp x24, x25, [x28], #16
312
313 mrs x2, MSR_TTBR0_EL12
314 mrs x3, MSR_TTBR1_EL12
315 stp x2, x3, [x28], #16
316
317 mrs x4, MSR_TCR_EL12
318 mrs x5, MSR_ESR_EL12
319 stp x4, x5, [x28], #16
320
321 mrs x6, MSR_AFSR0_EL12
322 mrs x7, MSR_AFSR1_EL12
323 stp x6, x7, [x28], #16
324
325 mrs x8, MSR_FAR_EL12
326 mrs x9, MSR_MAIR_EL12
327 stp x8, x9, [x28], #16
328
329 mrs x10, MSR_VBAR_EL12
330 mrs x11, MSR_CONTEXTIDR_EL12
331 stp x10, x11, [x28], #16
332
333 mrs x12, MSR_AMAIR_EL12
334 mrs x13, MSR_CNTKCTL_EL12
335 stp x12, x13, [x28], #16
336
337 mrs x14, MSR_ELR_EL12
338 mrs x15, MSR_SPSR_EL12
339 stp x14, x15, [x28], #16
340
341skip_vhe_save:
342#endif
343 mrs x16, vmpidr_el2
344 mrs x17, csselr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100345 stp x16, x17, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100346
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800347 mrs x18, actlr_el1
348 mrs x19, tpidr_el0
Fuad Tabba5e147a92019-08-14 15:30:30 +0100349 stp x18, x19, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100350
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800351 mrs x20, tpidrro_el0
352 mrs x21, tpidr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100353 stp x20, x21, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100354
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800355 mrs x22, sp_el0
356 mrs x23, sp_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100357 stp x22, x23, [x28], #16
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100358
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000359 mrs x26, cnthctl_el2
360 mrs x27, vttbr_el2
Fuad Tabba5e147a92019-08-14 15:30:30 +0100361 stp x26, x27, [x28], #16
Andrew Walbranbc82f2d2019-02-21 14:50:29 +0000362
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000363 mrs x4, mdcr_el2
364 mrs x5, mdscr_el1
Fuad Tabba5e147a92019-08-14 15:30:30 +0100365 stp x4, x5, [x28], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100366
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000367 mrs x6, pmccfiltr_el0
368 mrs x7, pmcr_el0
Fuad Tabbaf1d6dc52019-09-18 17:33:14 +0100369 stp x6, x7, [x28], #16
370
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000371 mrs x8, pmcntenset_el0
372 mrs x9, pmintenset_el1
Fuad Tabbaf1d6dc52019-09-18 17:33:14 +0100373 stp x8, x9, [x28], #16
374
Raghu Krishnamurthy7e925bd2020-12-26 10:14:40 -0800375 mrs x8, par_el1
376 str x8, [x28], #8
377
Olivier Depreze7d7f322020-12-14 16:01:03 +0100378#if BRANCH_PROTECTION
379 add x2, x1, #(VCPU_PAC + 16)
380 mrs x10, APIBKEYLO_EL1
381 mrs x11, APIBKEYHI_EL1
382 stp x10, x11, [x2], #16
383 mrs x12, APDAKEYLO_EL1
384 mrs x13, APDAKEYHI_EL1
385 stp x12, x13, [x2], #16
386 mrs x14, APDBKEYLO_EL1
387 mrs x15, APDBKEYHI_EL1
388 stp x14, x15, [x2], #16
389 mrs x16, APGAKEYLO_EL1
390 mrs x17, APGAKEYHI_EL1
391 stp x16, x17, [x2], #16
392#endif
393
Andrew Walbranb208b4a2019-05-20 12:42:22 +0100394 /* Save GIC registers. */
395#if GIC_VERSION == 3 || GIC_VERSION == 4
396 /* Offset is too large, so start from a new base. */
397 add x2, x1, #VCPU_GIC
398
399 mrs x3, ich_hcr_el2
Andrew Walbran4b976f42019-06-05 15:00:50 +0100400 mrs x4, icc_sre_el2
401 stp x3, x4, [x2, #16 * 0]
Andrew Walbranb208b4a2019-05-20 12:42:22 +0100402#endif
403
Fuad Tabba5e147a92019-08-14 15:30:30 +0100404 /* Save floating point registers. */
405 /* Use x28 as the base. */
406 add x28, x1, #VCPU_FREGS
Olivier Deprez82961762021-02-08 10:24:19 +0100407 simd_op_vectors stp, x28
Conrad Groblera824af62019-03-22 17:33:23 +0000408 mrs x3, fpsr
409 mrs x4, fpcr
Olivier Deprez82961762021-02-08 10:24:19 +0100410 stp x3, x4, [x28]
Conrad Groblera824af62019-03-22 17:33:23 +0000411
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000412 /* Save new vCPU pointer in non-volatile register. */
Wedson Almeida Filho03306112018-11-26 00:08:03 +0000413 mov x19, x0
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100414
Andrew Walbran1f8d4872018-12-20 11:21:32 +0000415 /*
416 * Save peripheral registers, and inform the arch-independent sections
417 * that registers have been saved.
418 */
Wedson Almeida Filho03306112018-11-26 00:08:03 +0000419 mov x0, x1
Andrew Walbran1f8d4872018-12-20 11:21:32 +0000420 bl complete_saving_state
Wedson Almeida Filho03306112018-11-26 00:08:03 +0000421 mov x0, x19
422
Olivier Deprez3caed1c2021-02-05 12:07:36 +0100423#if SECURE_WORLD == 1
424
425 ldr x1, [x0, #VCPU_VM]
426 ldrh w1, [x1, #VM_ID]
427
428 /* Exit to normal world if VM is HF_OTHER_WORLD_ID. */
429 cmp w1, #HF_OTHER_WORLD_ID
430 bne vcpu_restore_all_and_run
431
432 /*
433 * The current vCPU state is saved so it's now safe to switch to the
434 * normal world.
435 */
436
437other_world_loop:
Max Shvetsov2ff5b572021-03-22 12:03:38 +0000438 /* Check if SVE is implemented. */
439 mrs x0, id_aa64pfr0_el1
440 ubfx x0, x0, ID_AA64PFR0_SVE_SHIFT, ID_AA64PFR0_SVE_LENGTH
441 cbnz x0, sve_context_restore
Olivier Deprez82961762021-02-08 10:24:19 +0100442
443 /* Restore the other world SIMD context to the other world VM vCPU. */
444 add x18, x19, #VCPU_FREGS
445 simd_op_vectors ldp, x18
446 ldp x0, x1, [x18]
447 msr fpsr, x0
448 msr fpcr, x1
Max Shvetsov2ff5b572021-03-22 12:03:38 +0000449 b sve_skip_context_restore
Olivier Deprez82961762021-02-08 10:24:19 +0100450
Max Shvetsov2ff5b572021-03-22 12:03:38 +0000451 /* Restore the other world SVE context from internal buffer. */
452sve_context_restore:
Olivier Deprez55a189e2021-06-09 15:45:27 +0200453 adrp x18, sve_context
454 add x18, x18, :lo12: sve_context
Max Shvetsov2ff5b572021-03-22 12:03:38 +0000455 ldr x0, [x19, #VCPU_CPU]
456 bl cpu_index
457 mov x20, #SVE_CTX_SIZE
458 madd x18, x0, x20, x18
459
460 /* Restore vector registers. */
461 sve_op_vectors ldr, x18
462 /* Restore FFR register before predicates. */
463 add x20, x18, #SVE_CTX_FFR
464 ldr p0, [x20]
465 wrffr p0.b
466 /* Restore predicate registers. */
467 add x20, x18, #SVE_CTX_PREDICATES
468 sve_predicate_op ldr, x20
469
470 /*
471 * Prepare arguments from other world VM vCPU.
472 * x19 holds the other world VM vCPU pointer.
473 */
474sve_skip_context_restore:
Olivier Deprez3caed1c2021-02-05 12:07:36 +0100475 ldp x0, x1, [x19, #VCPU_REGS + 8 * 0]
476 ldp x2, x3, [x19, #VCPU_REGS + 8 * 2]
477 ldp x4, x5, [x19, #VCPU_REGS + 8 * 4]
478 ldp x6, x7, [x19, #VCPU_REGS + 8 * 6]
479
Olivier Depreze7d7f322020-12-14 16:01:03 +0100480#if BRANCH_PROTECTION
481 /*
482 * EL3 saves pointer authentication keys when entering by SMC.
483 * Although prefer clearing the keys to be on the safe side.
484 */
485 msr APIAKEYLO_EL1, xzr
486 msr APIAKEYHI_EL1, xzr
487 msr APIBKEYLO_EL1, xzr
488 msr APIBKEYHI_EL1, xzr
489 msr APDAKEYLO_EL1, xzr
490 msr APDAKEYHI_EL1, xzr
491 msr APDBKEYLO_EL1, xzr
492 msr APDBKEYHI_EL1, xzr
493 msr APGAKEYLO_EL1, xzr
494 msr APGAKEYHI_EL1, xzr
Olivier Deprez0b717842021-07-08 09:32:51 +0200495 /* Omit ISB as SMC following is a context synchronizing event. */
Olivier Depreze7d7f322020-12-14 16:01:03 +0100496#endif
497
Olivier Deprez3caed1c2021-02-05 12:07:36 +0100498 smc #0
499
500 /*
501 * The call to EL3 returned, First eight GP registers contain an FF-A
502 * call from the physical FF-A instance. Save those arguments to the
503 * other world VM vCPU.
504 * x19 is restored with the other world VM vCPU pointer.
505 */
506 stp x0, x1, [x19, #VCPU_REGS + 8 * 0]
507 stp x2, x3, [x19, #VCPU_REGS + 8 * 2]
508 stp x4, x5, [x19, #VCPU_REGS + 8 * 4]
509 stp x6, x7, [x19, #VCPU_REGS + 8 * 6]
510
Max Shvetsov2ff5b572021-03-22 12:03:38 +0000511 /* Check if SVE is implemented. */
512 mrs x0, id_aa64pfr0_el1
513 ubfx x0, x0, ID_AA64PFR0_SVE_SHIFT, ID_AA64PFR0_SVE_LENGTH
514 cbnz x0, sve_context_save
515
Olivier Deprez82961762021-02-08 10:24:19 +0100516 /* Save the other world SIMD context to the other world VM vCPU. */
517 add x18, x19, #VCPU_FREGS
518 simd_op_vectors stp, x18
519 mrs x0, fpsr
520 mrs x1, fpcr
521 stp x0, x1, [x18]
Max Shvetsov2ff5b572021-03-22 12:03:38 +0000522 b sve_skip_context_save
523
524 /* Save the other world SVE context to internal buffer. */
525sve_context_save:
Olivier Deprez55a189e2021-06-09 15:45:27 +0200526 adrp x18, sve_context
527 add x18, x18, :lo12: sve_context
Max Shvetsov2ff5b572021-03-22 12:03:38 +0000528 ldr x0, [x19, #VCPU_CPU]
529 bl cpu_index
530 mov x20, #SVE_CTX_SIZE
531 madd x18, x0, x20, x18
532
533 /* Save vector registers. */
534 sve_op_vectors str, x18
535 /* Save predicate registers. */
536 add x20, x18, #SVE_CTX_PREDICATES
537 sve_predicate_op str, x20
538 /* Save FFR register after predicates. */
539 add x20, x18, #SVE_CTX_FFR
540 rdffr p0.b
541 str p0, [x20]
542
543sve_skip_context_save:
Olivier Deprez82961762021-02-08 10:24:19 +0100544
Olivier Depreze7d7f322020-12-14 16:01:03 +0100545#if BRANCH_PROTECTION
546 pauth_restore_hypervisor_key x0 x1
547#endif
548
Olivier Deprez3caed1c2021-02-05 12:07:36 +0100549 /*
550 * Stack is at top and execution can restart straight into C code.
551 * Handle the FF-A call from other world.
552 */
553 mov x0, x19
554 bl smc_handler_from_nwd
555
556 /*
557 * If the smc handler returns null this indicates no vCPU has to be
558 * resumed and GP registers contain a fresh FF-A response or call
559 * directed to the normal world. Hence loop back and emit SMC again.
560 * Otherwise restore the vCPU pointed to by the handler return value.
561 */
562 cbz x0, other_world_loop
563
564#endif
565
Wedson Almeida Filho03306112018-11-26 00:08:03 +0000566 /* Intentional fallthrough. */
Andrew Walbran375f4532019-07-09 16:54:37 +0100567.global vcpu_restore_all_and_run
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100568vcpu_restore_all_and_run:
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000569 /* Update pointer to current vCPU. */
Wedson Almeida Filho00df6c72018-10-18 11:19:24 +0100570 msr tpidr_el2, x0
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100571
Andrew Walbran1f8d4872018-12-20 11:21:32 +0000572 /* Restore peripheral registers. */
573 mov x19, x0
574 bl begin_restoring_state
575 mov x0, x19
576
Conrad Groblera824af62019-03-22 17:33:23 +0000577 /*
578 * Restore floating point registers.
Conrad Groblera824af62019-03-22 17:33:23 +0000579 */
580 add x2, x0, #VCPU_FREGS
Olivier Deprez82961762021-02-08 10:24:19 +0100581 simd_op_vectors ldp, x2
582 ldp x3, x4, [x2]
Conrad Groblera824af62019-03-22 17:33:23 +0000583 msr fpsr, x3
Conrad Groblera824af62019-03-22 17:33:23 +0000584
Conrad Grobler02ff6af2019-06-04 09:40:28 +0100585 /*
586 * Only restore FPCR if changed, to avoid expensive
587 * self-synchronising operation where possible.
588 */
589 mrs x5, fpcr
590 cmp x5, x4
591 b.eq vcpu_restore_lazy_and_run
592 msr fpcr, x4
593 /* Intentional fallthrough. */
594
595vcpu_restore_lazy_and_run:
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000596 /* Restore lazy registers. */
Fuad Tabba5e147a92019-08-14 15:30:30 +0100597 /* Use x28 as the base. */
598 add x28, x0, #VCPU_LAZY
599
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800600#if ENABLE_VHE
601 /* Check if VHE support is enabled, equivalent to has_vhe_support(). */
602 mrs x19, id_aa64mmfr1_el1
603 tst x19, #0xf00
604 b.ne vhe_restore
605#endif
606
Fuad Tabba5e147a92019-08-14 15:30:30 +0100607 ldp x24, x25, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800608 msr sctlr_el1, x24
609 msr cpacr_el1, x25
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100610
Fuad Tabba5e147a92019-08-14 15:30:30 +0100611 ldp x2, x3, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800612 msr ttbr0_el1, x2
613 msr ttbr1_el1, x3
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100614
Fuad Tabba5e147a92019-08-14 15:30:30 +0100615 ldp x4, x5, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800616 msr tcr_el1, x4
617 msr esr_el1, x5
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100618
Fuad Tabba5e147a92019-08-14 15:30:30 +0100619 ldp x6, x7, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800620 msr afsr0_el1, x6
621 msr afsr1_el1, x7
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100622
Fuad Tabba5e147a92019-08-14 15:30:30 +0100623 ldp x8, x9, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800624 msr far_el1, x8
625 msr mair_el1, x9
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100626
Fuad Tabba5e147a92019-08-14 15:30:30 +0100627 ldp x10, x11, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800628 msr vbar_el1, x10
629 msr contextidr_el1, x11
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100630
Fuad Tabba5e147a92019-08-14 15:30:30 +0100631 ldp x12, x13, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800632 msr amair_el1, x12
633 msr cntkctl_el1, x13
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100634
Fuad Tabba5e147a92019-08-14 15:30:30 +0100635 ldp x14, x15, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800636 msr elr_el1, x14
637 msr spsr_el1, x15
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100638
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800639#if ENABLE_VHE
640 b skip_vhe_restore
641
642vhe_restore:
643 ldp x24, x25, [x28], #16
644 msr MSR_SCTLR_EL12, x24
645 msr MSR_CPACR_EL12, x25
646
647 ldp x2, x3, [x28], #16
648 msr MSR_TTBR0_EL12, x2
649 msr MSR_TTBR1_EL12, x3
650
651 ldp x4, x5, [x28], #16
652 msr MSR_TCR_EL12, x4
653 msr MSR_ESR_EL12, x5
654
655 ldp x6, x7, [x28], #16
656 msr MSR_AFSR0_EL12, x6
657 msr MSR_AFSR1_EL12, x7
658
659 ldp x8, x9, [x28], #16
660 msr MSR_FAR_EL12, x8
661 msr MSR_MAIR_EL12, x9
662
663 ldp x10, x11, [x28], #16
664 msr MSR_VBAR_EL12, x10
665 msr MSR_CONTEXTIDR_EL12, x11
666
667 ldp x12, x13, [x28], #16
668 msr MSR_AMAIR_EL12, x12
669 msr MSR_CNTKCTL_EL12, x13
670
671 ldp x14, x15, [x28], #16
672 msr MSR_ELR_EL12, x14
673 msr MSR_SPSR_EL12, x15
674
675skip_vhe_restore:
676#endif
Fuad Tabba5e147a92019-08-14 15:30:30 +0100677 ldp x16, x17, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800678 msr vmpidr_el2, x16
679 msr csselr_el1, x17
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100680
Fuad Tabba5e147a92019-08-14 15:30:30 +0100681 ldp x18, x19, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800682 msr actlr_el1, x18
683 msr tpidr_el0, x19
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100684
Fuad Tabba5e147a92019-08-14 15:30:30 +0100685 ldp x20, x21, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800686 msr tpidrro_el0, x20
687 msr tpidr_el1, x21
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100688
Fuad Tabba5e147a92019-08-14 15:30:30 +0100689 ldp x22, x23, [x28], #16
Raghu Krishnamurthy2baf07a2021-01-17 09:42:35 -0800690 msr sp_el0, x22
691 msr sp_el1, x23
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100692
Fuad Tabba5e147a92019-08-14 15:30:30 +0100693 ldp x26, x27, [x28], #16
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000694 msr cnthctl_el2, x26
695 msr vttbr_el2, x27
Andrew Walbranbc82f2d2019-02-21 14:50:29 +0000696
Jose Marinhoab1081d2019-10-18 11:39:01 +0100697#if SECURE_WORLD == 1
698 msr MSR_VSTTBR_EL2, x27
699#endif
700
Fuad Tabba5e147a92019-08-14 15:30:30 +0100701 ldp x4, x5, [x28], #16
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000702 msr mdcr_el2, x4
703 msr mdscr_el1, x5
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100704
Fuad Tabbaf1d6dc52019-09-18 17:33:14 +0100705 ldp x6, x7, [x28], #16
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000706 msr pmccfiltr_el0, x6
707 msr pmcr_el0, x7
Fuad Tabbaf1d6dc52019-09-18 17:33:14 +0100708
709 ldp x8, x9, [x28], #16
Fuad Tabbaf1d6dc52019-09-18 17:33:14 +0100710 /*
711 * NOTE: Writing 0s to pmcntenset_el0's bits do not alter their values.
712 * To reset them, clear the register by writing to pmcntenclr_el0.
713 */
714 mov x27, #0xffffffff
715 msr pmcntenclr_el0, x27
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000716 msr pmcntenset_el0, x8
Fuad Tabbaf1d6dc52019-09-18 17:33:14 +0100717
Fuad Tabbaf1d6dc52019-09-18 17:33:14 +0100718 /*
719 * NOTE: Writing 0s to pmintenset_el1's bits do not alter their values.
720 * To reset them, clear the register by writing to pmintenclr_el1.
721 */
722 msr pmintenclr_el1, x27
Fuad Tabba2e2c98b2019-11-04 14:37:24 +0000723 msr pmintenset_el1, x9
Fuad Tabbac76466d2019-09-06 10:42:12 +0100724
Raghu Krishnamurthy7e925bd2020-12-26 10:14:40 -0800725 ldr x8, [x28], #8
726 msr par_el1, x8
727
Olivier Depreze7d7f322020-12-14 16:01:03 +0100728#if BRANCH_PROTECTION
729 add x2, x0, #(VCPU_PAC + 16)
730 ldp x10, x11, [x2], #16
731 msr APIBKEYLO_EL1, x10
732 msr APIBKEYHI_EL1, x11
733 ldp x12, x13, [x2], #16
734 msr APDAKEYLO_EL1, x12
735 msr APDAKEYHI_EL1, x13
736 ldp x14, x15, [x2], #16
737 msr APDBKEYLO_EL1, x14
738 msr APDBKEYHI_EL1, x15
739 ldp x16, x17, [x2], #16
740 msr APGAKEYLO_EL1, x16
741 msr APGAKEYHI_EL1, x17
742#endif
743
Andrew Walbranb208b4a2019-05-20 12:42:22 +0100744 /* Restore GIC registers. */
745#if GIC_VERSION == 3 || GIC_VERSION == 4
746 /* Offset is too large, so start from a new base. */
747 add x2, x0, #VCPU_GIC
748
Andrew Walbran4b976f42019-06-05 15:00:50 +0100749 ldp x3, x4, [x2, #16 * 0]
Andrew Walbranb208b4a2019-05-20 12:42:22 +0100750 msr ich_hcr_el2, x3
Andrew Walbran4b976f42019-06-05 15:00:50 +0100751 msr icc_sre_el2, x4
Andrew Walbranb208b4a2019-05-20 12:42:22 +0100752#endif
753
Andrew Walbran1f32e722019-06-07 17:57:26 +0100754 /*
755 * If a different vCPU is being run on this physical CPU to the last one
756 * which was run for this VM, invalidate the TLB. This must be called
757 * after vttbr_el2 has been updated, so that we have the page table and
758 * VMID of the vCPU to which we are switching.
759 */
760 mov x19, x0
761 bl maybe_invalidate_tlb
762 mov x0, x19
763
Fuad Tabba7c299d82019-09-12 13:05:18 +0100764 /* Intentional fallthrough. */
765
766vcpu_restore_nonvolatile_and_run:
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100767 /* Restore non-volatile registers. */
Wedson Almeida Filho9d5040f2018-10-29 08:41:27 +0000768 ldp x19, x20, [x0, #VCPU_REGS + 8 * 19]
769 ldp x21, x22, [x0, #VCPU_REGS + 8 * 21]
770 ldp x23, x24, [x0, #VCPU_REGS + 8 * 23]
771 ldp x25, x26, [x0, #VCPU_REGS + 8 * 25]
772 ldp x27, x28, [x0, #VCPU_REGS + 8 * 27]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100773
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100774 /* Intentional fallthrough. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100775/**
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000776 * Restore volatile registers and run the given vCPU.
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100777 *
Fuad Tabbab0ef2a42019-12-19 11:19:25 +0000778 * x0 is a pointer to the target vCPU.
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100779 */
780vcpu_restore_volatile_and_run:
Olivier Depreze7d7f322020-12-14 16:01:03 +0100781#if BRANCH_PROTECTION
782 add x1, x0, #VCPU_PAC
783 ldp x1, x2, [x1]
784
785 /* Restore vCPU APIA key. */
786 msr APIAKEYLO_EL1, x1
787 msr APIAKEYHI_EL1, x2
Olivier Deprez0b717842021-07-08 09:32:51 +0200788 /* Omit ISB as ERET following is a context synchronizing event. */
Olivier Depreze7d7f322020-12-14 16:01:03 +0100789#endif
790
Fuad Tabba7c299d82019-09-12 13:05:18 +0100791 ldp x4, x5, [x0, #VCPU_REGS + 8 * 4]
792 ldp x6, x7, [x0, #VCPU_REGS + 8 * 6]
793 ldp x8, x9, [x0, #VCPU_REGS + 8 * 8]
794 ldp x10, x11, [x0, #VCPU_REGS + 8 * 10]
795 ldp x12, x13, [x0, #VCPU_REGS + 8 * 12]
796 ldp x14, x15, [x0, #VCPU_REGS + 8 * 14]
797 ldp x16, x17, [x0, #VCPU_REGS + 8 * 16]
798 ldr x18, [x0, #VCPU_REGS + 8 * 18]
799 ldp x29, x30, [x0, #VCPU_REGS + 8 * 29]
800
801 /* Restore return address & mode. */
802 ldp x1, x2, [x0, #VCPU_REGS + 8 * 31]
803 msr elr_el2, x1
804 msr spsr_el2, x2
805
Raghu Krishnamurthy7e925bd2020-12-26 10:14:40 -0800806 ldr x1, [x0, #VCPU_REGS + 8 * 33]
807 msr hcr_el2, x1
Raghu Krishnamurthy5a13c342021-02-13 15:49:29 -0800808 isb
809
810 ldr x1, [x0, #VCPU_REGS + 8 * 34]
811 msr ttbr0_el2, x1
812 isb
Raghu Krishnamurthy7e925bd2020-12-26 10:14:40 -0800813
Fuad Tabba7c299d82019-09-12 13:05:18 +0100814 /* Restore x0..x3, which we have used as scratch before. */
815 ldp x2, x3, [x0, #VCPU_REGS + 8 * 2]
816 ldp x0, x1, [x0, #VCPU_REGS + 8 * 0]
David Brazdild623d312019-12-19 16:04:06 +0000817 eret_with_sb
Raghu Krishnamurthy905e8162020-12-26 10:20:03 -0800818
819#if ENABLE_VHE
820enable_vhe_tge:
821 /**
822 * Switch to host mode ({E2H, TGE} = {1,1}) when VHE is enabled.
823 * Note that E2H is always set when VHE is enabled.
824 */
825 mrs x0, id_aa64mmfr1_el1
826 tst x0, #0xf00
827 b.eq 1f
828 orr x1, x1, #(1 << 27)
829 msr hcr_el2, x1
830 isb
Raghu Krishnamurthy02e33d42021-02-10 20:14:36 -0800831
832 /**
833 * Switch to host page tables(ASID 0 tables).
834 */
835 adrp x0, arch_mm_config
836 add x0, x0, :lo12:arch_mm_config
837 ldr x0, [x0]
838 msr ttbr0_el2, x0
839 isb
Raghu Krishnamurthy905e8162020-12-26 10:20:03 -08008401:
841 ret
842#endif