blob: a6de2ce3bf7eaf94b97b0129afef729ef584dadd [file] [log] [blame]
Wedson Almeida Filho22c973a2018-10-27 16:25:42 +01001/*
2 * Copyright 2018 Google LLC
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * https://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010017#include "offsets.h"
18
Andrew Scull75e28632018-08-08 20:09:14 +010019.section .text.vector_table_el2, "ax"
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010020.global vector_table_el2
Andrew Scullf4b22c52018-10-18 14:54:13 +010021.balign 0x800
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010022vector_table_el2:
23 /* sync_cur_sp0 */
24 b .
25
26.balign 0x80
27 /* irq_cur_sp0 */
28 b irq_current
29
30.balign 0x80
31 /* fiq_cur_sp0 */
32 b .
33
34.balign 0x80
35 /* serr_cur_sp0 */
36 b .
37
38.balign 0x80
39 /* sync_cur_spx */
40 mrs x0, esr_el2
41 mrs x1, elr_el2
42 b sync_current_exception
43
44.balign 0x80
45 /* irq_cur_spx */
46 b irq_current
47
48.balign 0x80
49 /* fiq_cur_spx */
50 b .
51
52.balign 0x80
53 /* serr_cur_spx */
54 b .
55
56.balign 0x80
57 /* sync_lower_64 */
58
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +010059 /*
60 * Save x18 since we're about to clobber it. We subtract 16 instead of
61 * 8 from the stack pointer to keep it 16-byte aligned.
62 */
63 str x18, [sp, #-16]!
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010064
65 /* Extract the exception class (EC) from exception syndrome register. */
66 mrs x18, esr_el2
67 lsr x18, x18, #26
68
69 /* Take the slow path if exception is not due to an HVC instruction. */
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +010070 sub x18, x18, #0x16
71 cbnz x18, slow_sync_lower_64
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010072
Wedson Almeida Filho87009642018-07-02 10:20:07 +010073 /*
74 * Save x29 and x30, which are not saved by the callee, then jump to
75 * HVC handler.
76 */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010077 stp x29, x30, [sp, #-16]!
78 bl hvc_handler
79 ldp x29, x30, [sp], #16
80 cbnz x1, sync_lower_64_switch
81
82 /* Zero out all volatile registers (except x0) and return. */
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +010083 stp xzr, xzr, [sp, #-16]!
84 ldp x1, x2, [sp]
85 ldp x3, x4, [sp]
86 ldp x5, x6, [sp]
87 ldp x7, x8, [sp]
88 ldp x9, x10, [sp]
89 ldp x11, x12, [sp]
90 ldp x13, x14, [sp]
91 ldp x15, x16, [sp], #16
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +010092 mov x17, xzr
Wedson Almeida Filho450ccb82018-08-12 16:25:36 +010093
94 /* Restore x18, which was saved on the stack. */
95 ldr x18, [sp], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010096 eret
97
98.balign 0x80
99 /* irq_lower_64 */
100
101 /* Save x0 since we're about to clobber it. */
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100102 str x0, [sp, #-8]!
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100103
104 /* Get the current vcpu. */
105 mrs x0, tpidr_el2
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100106
107 /* Save volatile registers. */
108 add x0, x0, #VCPU_REGS
109 stp x2, x3, [x0, #8 * 2]
110 stp x4, x5, [x0, #8 * 4]
111 stp x6, x7, [x0, #8 * 6]
112 stp x8, x9, [x0, #8 * 8]
113 stp x10, x11, [x0, #8 * 10]
114 stp x12, x13, [x0, #8 * 12]
115 stp x14, x15, [x0, #8 * 14]
116 stp x16, x17, [x0, #8 * 16]
117 str x18, [x0, #8 * 18]
118 stp x29, x30, [x0, #8 * 29]
119
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100120 ldr x2, [sp], #8
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100121 stp x2, x1, [x0, #8 * 0]
122
123 /* Save return address & mode. */
124 mrs x1, elr_el2
125 mrs x2, spsr_el2
126 stp x1, x2, [x0, #8 * 31]
127
128 /* Call C handler. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100129 bl irq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100130
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100131 mrs x1, tpidr_el2
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100132 cbnz x0, vcpu_switch
133
134 /* vcpu is not changing. */
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100135 add x0, x1, #VCPU_REGS
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100136 b vcpu_restore_volatile_and_run
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100137
138.balign 0x80
139 /* fiq_lower_64 */
140 b .
141
142.balign 0x80
143 /* serr_lower_64 */
144 b .
145
146.balign 0x80
147 /* sync_lower_32 */
148 b .
149
150.balign 0x80
151 /* irq_lower_32 */
152 b .
153
154.balign 0x80
155 /* fiq_lower_32 */
156 b .
157
158.balign 0x80
159 /* serr_lower_32 */
160 b .
161
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100162slow_sync_lower_64:
163 /* Get the current vcpu. */
164 mrs x18, tpidr_el2
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100165
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100166 /* Save volatile registers. */
167 add x18, x18, #VCPU_REGS
168 stp x0, x1, [x18, #8 * 0]
169 stp x2, x3, [x18, #8 * 2]
170 stp x4, x5, [x18, #8 * 4]
171 stp x6, x7, [x18, #8 * 6]
172 stp x8, x9, [x18, #8 * 8]
173 stp x10, x11, [x18, #8 * 10]
174 stp x12, x13, [x18, #8 * 12]
175 stp x14, x15, [x18, #8 * 14]
176 stp x16, x17, [x18, #8 * 16]
177 stp x29, x30, [x18, #8 * 29]
178
179 /* x18 was saved on the stack, so we move it to vcpu regs buffer. */
180 ldr x0, [sp], #16
181 str x0, [x18, #8 * 18]
182
183 /* Save return address & mode. */
184 mrs x1, elr_el2
185 mrs x2, spsr_el2
186 stp x1, x2, [x18, #8 * 31]
187
188 /* Read syndrome register and call C handler. */
189 mrs x0, esr_el2
190 bl sync_lower_exception
191
192 /* Switch to the vcpu returned by sync_lower_exception. */
193 mrs x1, tpidr_el2
194 cbnz x0, vcpu_switch
195
196 /* vcpu is not changing. */
197 add x0, x1, #VCPU_REGS
198 b vcpu_restore_volatile_and_run
199
200sync_lower_64_switch:
201 /* We'll have to switch, so save volatile state before doing so. */
202 mrs x18, tpidr_el2
203
204 /* Store zeroes in volatile register storage, except x0. */
205 add x18, x18, #VCPU_REGS
206 stp x0, xzr, [x18, #8 * 0]
207 stp xzr, xzr, [x18, #8 * 2]
208 stp xzr, xzr, [x18, #8 * 4]
209 stp xzr, xzr, [x18, #8 * 6]
210 stp xzr, xzr, [x18, #8 * 8]
211 stp xzr, xzr, [x18, #8 * 10]
212 stp xzr, xzr, [x18, #8 * 12]
213 stp xzr, xzr, [x18, #8 * 14]
214 stp xzr, xzr, [x18, #8 * 16]
215 stp x29, x30, [x18, #8 * 29]
216
217 /* x18 was saved on the stack, so we move it to vcpu regs buffer. */
218 ldr x2, [sp], #16
219 str x2, [x18, #8 * 18]
220
221 /* Save return address & mode. */
222 mrs x2, elr_el2
223 mrs x3, spsr_el2
224 stp x2, x3, [x18, #8 * 31]
225
226 /* Save lazy state, then switch to new vcpu. */
227 mov x0, x1
228 sub x1, x18, #VCPU_REGS
229
230 /* Intentional fallthrough. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100231/**
232 * Switch to a new vcpu.
233 *
234 * All volatile registers from the old vcpu have already been saved. We need
235 * to save only non-volatile ones from the old vcpu, and restore all from the
236 * new one.
237 *
238 * x0 is a pointer to the new vcpu.
239 * x1 is a pointer to the old vcpu.
240 */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100241vcpu_switch:
242 /* Save non-volatile registers. */
243 add x1, x1, #VCPU_REGS
244 stp x19, x20, [x1, #8 * 19]
245 stp x21, x22, [x1, #8 * 21]
246 stp x23, x24, [x1, #8 * 23]
247 stp x25, x26, [x1, #8 * 25]
248 stp x27, x28, [x1, #8 * 27]
249
250 /* Save lazy state. */
251 add x1, x1, #(VCPU_LAZY - VCPU_REGS)
252
253 mrs x24, vmpidr_el2
254 mrs x25, csselr_el1
255 stp x24, x25, [x1, #16 * 0]
256
257 mrs x2, sctlr_el1
258 mrs x3, actlr_el1
259 stp x2, x3, [x1, #16 * 1]
260
261 mrs x4, cpacr_el1
262 mrs x5, ttbr0_el1
263 stp x4, x5, [x1, #16 * 2]
264
265 mrs x6, ttbr1_el1
266 mrs x7, tcr_el1
267 stp x6, x7, [x1, #16 * 3]
268
269 mrs x8, esr_el1
270 mrs x9, afsr0_el1
271 stp x8, x9, [x1, #16 * 4]
272
273 mrs x10, afsr1_el1
274 mrs x11, far_el1
275 stp x10, x11, [x1, #16 * 5]
276
277 mrs x12, mair_el1
278 mrs x13, vbar_el1
279 stp x12, x13, [x1, #16 * 6]
280
281 mrs x14, contextidr_el1
282 mrs x15, tpidr_el0
283 stp x14, x15, [x1, #16 * 7]
284
285 mrs x16, tpidrro_el0
286 mrs x17, tpidr_el1
287 stp x16, x17, [x1, #16 * 8]
288
289 mrs x18, amair_el1
290 mrs x19, cntkctl_el1
291 stp x18, x19, [x1, #16 * 9]
292
293 mrs x20, sp_el0
294 mrs x21, sp_el1
295 stp x20, x21, [x1, #16 * 10]
296
297 mrs x22, par_el1
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100298 mrs x23, hcr_el2
299 stp x22, x23, [x1, #16 * 11]
300
301 mrs x24, cptr_el2
302 mrs x25, cnthctl_el2
303 stp x24, x25, [x1, #16 * 12]
304
305 mrs x26, vttbr_el2
306 str x26, [x1, #16 * 13]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100307
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100308 /* Intentional fallthrough. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100309
310.globl vcpu_restore_all_and_run
311vcpu_restore_all_and_run:
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100312 /* Update pointer to current vcpu. */
Wedson Almeida Filho00df6c72018-10-18 11:19:24 +0100313 msr tpidr_el2, x0
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100314
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100315 /* Get a pointer to the lazy registers. */
316 add x0, x0, #VCPU_LAZY
317
318 ldp x24, x25, [x0, #16 * 0]
319 msr vmpidr_el2, x24
320 msr csselr_el1, x25
321
322 ldp x2, x3, [x0, #16 * 1]
323 msr sctlr_el1, x2
324 msr actlr_el1, x3
325
326 ldp x4, x5, [x0, #16 * 2]
327 msr cpacr_el1, x4
328 msr ttbr0_el1, x5
329
330 ldp x6, x7, [x0, #16 * 3]
331 msr ttbr1_el1, x6
332 msr tcr_el1, x7
333
334 ldp x8, x9, [x0, #16 * 4]
335 msr esr_el1, x8
336 msr afsr0_el1, x9
337
338 ldp x10, x11, [x0, #16 * 5]
339 msr afsr1_el1, x10
340 msr far_el1, x11
341
342 ldp x12, x13, [x0, #16 * 6]
343 msr mair_el1, x12
344 msr vbar_el1, x13
345
346 ldp x14, x15, [x0, #16 * 7]
347 msr contextidr_el1, x14
348 msr tpidr_el0, x15
349
350 ldp x16, x17, [x0, #16 * 8]
351 msr tpidrro_el0, x16
352 msr tpidr_el1, x17
353
354 ldp x18, x19, [x0, #16 * 9]
355 msr amair_el1, x18
356 msr cntkctl_el1, x19
357
358 ldp x20, x21, [x0, #16 * 10]
359 msr sp_el0, x20
360 msr sp_el1, x21
361
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100362 ldp x22, x23, [x0, #16 * 11]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100363 msr par_el1, x22
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100364 msr hcr_el2, x23
365
366 ldp x24, x25, [x0, #16 * 12]
367 msr cptr_el2, x22
368 msr cnthctl_el2, x23
369
370 ldr x26, [x0, #16 * 13]
371 msr vttbr_el2, x26
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100372
373 /* Restore non-volatile registers. */
374 add x0, x0, #(VCPU_REGS - VCPU_LAZY)
375
376 ldp x19, x20, [x0, #8 * 19]
377 ldp x21, x22, [x0, #8 * 21]
378 ldp x23, x24, [x0, #8 * 23]
379 ldp x25, x26, [x0, #8 * 25]
380 ldp x27, x28, [x0, #8 * 27]
381
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100382 /* Intentional fallthrough. */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100383
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100384/**
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100385 * Restore volatile registers and run the given vcpu.
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100386 *
387 * x0 is a pointer to the volatile registers of the target vcpu.
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100388 */
389vcpu_restore_volatile_and_run:
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100390 ldp x4, x5, [x0, #8 * 4]
391 ldp x6, x7, [x0, #8 * 6]
392 ldp x8, x9, [x0, #8 * 8]
393 ldp x10, x11, [x0, #8 * 10]
394 ldp x12, x13, [x0, #8 * 12]
395 ldp x14, x15, [x0, #8 * 14]
396 ldp x16, x17, [x0, #8 * 16]
397 ldr x18, [x0, #8 * 18]
398 ldp x29, x30, [x0, #8 * 29]
399
400 /* Restore return address & mode. */
401 ldp x1, x2, [x0, #8 * 31]
402 msr elr_el2, x1
403 msr spsr_el2, x2
404
405 /* Restore x0..x3, which we have used as scratch before. */
406 ldp x2, x3, [x0, #8 * 2]
407 ldp x0, x1, [x0, #8 * 0]
408 eret