blob: de349a1ab24c0ca273f3aa5356fd2fda8926ac22 [file] [log] [blame]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +01001#include "offsets.h"
2
Andrew Scull75e28632018-08-08 20:09:14 +01003.section .text.vector_table_el2, "ax"
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +01004.global vector_table_el2
Andrew Scullf4b22c52018-10-18 14:54:13 +01005.balign 0x800
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +01006vector_table_el2:
7 /* sync_cur_sp0 */
8 b .
9
10.balign 0x80
11 /* irq_cur_sp0 */
12 b irq_current
13
14.balign 0x80
15 /* fiq_cur_sp0 */
16 b .
17
18.balign 0x80
19 /* serr_cur_sp0 */
20 b .
21
22.balign 0x80
23 /* sync_cur_spx */
24 mrs x0, esr_el2
25 mrs x1, elr_el2
26 b sync_current_exception
27
28.balign 0x80
29 /* irq_cur_spx */
30 b irq_current
31
32.balign 0x80
33 /* fiq_cur_spx */
34 b .
35
36.balign 0x80
37 /* serr_cur_spx */
38 b .
39
40.balign 0x80
41 /* sync_lower_64 */
42
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +010043 /*
44 * Save x18 since we're about to clobber it. We subtract 16 instead of
45 * 8 from the stack pointer to keep it 16-byte aligned.
46 */
47 str x18, [sp, #-16]!
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010048
49 /* Extract the exception class (EC) from exception syndrome register. */
50 mrs x18, esr_el2
51 lsr x18, x18, #26
52
53 /* Take the slow path if exception is not due to an HVC instruction. */
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +010054 sub x18, x18, #0x16
55 cbnz x18, slow_sync_lower_64
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010056
Wedson Almeida Filho87009642018-07-02 10:20:07 +010057 /*
58 * Save x29 and x30, which are not saved by the callee, then jump to
59 * HVC handler.
60 */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010061 stp x29, x30, [sp, #-16]!
62 bl hvc_handler
63 ldp x29, x30, [sp], #16
64 cbnz x1, sync_lower_64_switch
65
66 /* Zero out all volatile registers (except x0) and return. */
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +010067 stp xzr, xzr, [sp, #-16]!
68 ldp x1, x2, [sp]
69 ldp x3, x4, [sp]
70 ldp x5, x6, [sp]
71 ldp x7, x8, [sp]
72 ldp x9, x10, [sp]
73 ldp x11, x12, [sp]
74 ldp x13, x14, [sp]
75 ldp x15, x16, [sp], #16
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +010076 mov x17, xzr
Wedson Almeida Filho450ccb82018-08-12 16:25:36 +010077
78 /* Restore x18, which was saved on the stack. */
79 ldr x18, [sp], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010080 eret
81
82.balign 0x80
83 /* irq_lower_64 */
84
85 /* Save x0 since we're about to clobber it. */
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +010086 str x0, [sp, #-8]!
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010087
88 /* Get the current vcpu. */
89 mrs x0, tpidr_el2
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010090
91 /* Save volatile registers. */
92 add x0, x0, #VCPU_REGS
93 stp x2, x3, [x0, #8 * 2]
94 stp x4, x5, [x0, #8 * 4]
95 stp x6, x7, [x0, #8 * 6]
96 stp x8, x9, [x0, #8 * 8]
97 stp x10, x11, [x0, #8 * 10]
98 stp x12, x13, [x0, #8 * 12]
99 stp x14, x15, [x0, #8 * 14]
100 stp x16, x17, [x0, #8 * 16]
101 str x18, [x0, #8 * 18]
102 stp x29, x30, [x0, #8 * 29]
103
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100104 ldr x2, [sp], #8
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100105 stp x2, x1, [x0, #8 * 0]
106
107 /* Save return address & mode. */
108 mrs x1, elr_el2
109 mrs x2, spsr_el2
110 stp x1, x2, [x0, #8 * 31]
111
112 /* Call C handler. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100113 bl irq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100114
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100115 mrs x1, tpidr_el2
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100116 cbnz x0, vcpu_switch
117
118 /* vcpu is not changing. */
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100119 add x0, x1, #VCPU_REGS
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100120 b vcpu_restore_volatile_and_run
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100121
122.balign 0x80
123 /* fiq_lower_64 */
124 b .
125
126.balign 0x80
127 /* serr_lower_64 */
128 b .
129
130.balign 0x80
131 /* sync_lower_32 */
132 b .
133
134.balign 0x80
135 /* irq_lower_32 */
136 b .
137
138.balign 0x80
139 /* fiq_lower_32 */
140 b .
141
142.balign 0x80
143 /* serr_lower_32 */
144 b .
145
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100146slow_sync_lower_64:
147 /* Get the current vcpu. */
148 mrs x18, tpidr_el2
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100149
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100150 /* Save volatile registers. */
151 add x18, x18, #VCPU_REGS
152 stp x0, x1, [x18, #8 * 0]
153 stp x2, x3, [x18, #8 * 2]
154 stp x4, x5, [x18, #8 * 4]
155 stp x6, x7, [x18, #8 * 6]
156 stp x8, x9, [x18, #8 * 8]
157 stp x10, x11, [x18, #8 * 10]
158 stp x12, x13, [x18, #8 * 12]
159 stp x14, x15, [x18, #8 * 14]
160 stp x16, x17, [x18, #8 * 16]
161 stp x29, x30, [x18, #8 * 29]
162
163 /* x18 was saved on the stack, so we move it to vcpu regs buffer. */
164 ldr x0, [sp], #16
165 str x0, [x18, #8 * 18]
166
167 /* Save return address & mode. */
168 mrs x1, elr_el2
169 mrs x2, spsr_el2
170 stp x1, x2, [x18, #8 * 31]
171
172 /* Read syndrome register and call C handler. */
173 mrs x0, esr_el2
174 bl sync_lower_exception
175
176 /* Switch to the vcpu returned by sync_lower_exception. */
177 mrs x1, tpidr_el2
178 cbnz x0, vcpu_switch
179
180 /* vcpu is not changing. */
181 add x0, x1, #VCPU_REGS
182 b vcpu_restore_volatile_and_run
183
184sync_lower_64_switch:
185 /* We'll have to switch, so save volatile state before doing so. */
186 mrs x18, tpidr_el2
187
188 /* Store zeroes in volatile register storage, except x0. */
189 add x18, x18, #VCPU_REGS
190 stp x0, xzr, [x18, #8 * 0]
191 stp xzr, xzr, [x18, #8 * 2]
192 stp xzr, xzr, [x18, #8 * 4]
193 stp xzr, xzr, [x18, #8 * 6]
194 stp xzr, xzr, [x18, #8 * 8]
195 stp xzr, xzr, [x18, #8 * 10]
196 stp xzr, xzr, [x18, #8 * 12]
197 stp xzr, xzr, [x18, #8 * 14]
198 stp xzr, xzr, [x18, #8 * 16]
199 stp x29, x30, [x18, #8 * 29]
200
201 /* x18 was saved on the stack, so we move it to vcpu regs buffer. */
202 ldr x2, [sp], #16
203 str x2, [x18, #8 * 18]
204
205 /* Save return address & mode. */
206 mrs x2, elr_el2
207 mrs x3, spsr_el2
208 stp x2, x3, [x18, #8 * 31]
209
210 /* Save lazy state, then switch to new vcpu. */
211 mov x0, x1
212 sub x1, x18, #VCPU_REGS
213
214 /* Intentional fallthrough. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100215/**
216 * Switch to a new vcpu.
217 *
218 * All volatile registers from the old vcpu have already been saved. We need
219 * to save only non-volatile ones from the old vcpu, and restore all from the
220 * new one.
221 *
222 * x0 is a pointer to the new vcpu.
223 * x1 is a pointer to the old vcpu.
224 */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100225vcpu_switch:
226 /* Save non-volatile registers. */
227 add x1, x1, #VCPU_REGS
228 stp x19, x20, [x1, #8 * 19]
229 stp x21, x22, [x1, #8 * 21]
230 stp x23, x24, [x1, #8 * 23]
231 stp x25, x26, [x1, #8 * 25]
232 stp x27, x28, [x1, #8 * 27]
233
234 /* Save lazy state. */
235 add x1, x1, #(VCPU_LAZY - VCPU_REGS)
236
237 mrs x24, vmpidr_el2
238 mrs x25, csselr_el1
239 stp x24, x25, [x1, #16 * 0]
240
241 mrs x2, sctlr_el1
242 mrs x3, actlr_el1
243 stp x2, x3, [x1, #16 * 1]
244
245 mrs x4, cpacr_el1
246 mrs x5, ttbr0_el1
247 stp x4, x5, [x1, #16 * 2]
248
249 mrs x6, ttbr1_el1
250 mrs x7, tcr_el1
251 stp x6, x7, [x1, #16 * 3]
252
253 mrs x8, esr_el1
254 mrs x9, afsr0_el1
255 stp x8, x9, [x1, #16 * 4]
256
257 mrs x10, afsr1_el1
258 mrs x11, far_el1
259 stp x10, x11, [x1, #16 * 5]
260
261 mrs x12, mair_el1
262 mrs x13, vbar_el1
263 stp x12, x13, [x1, #16 * 6]
264
265 mrs x14, contextidr_el1
266 mrs x15, tpidr_el0
267 stp x14, x15, [x1, #16 * 7]
268
269 mrs x16, tpidrro_el0
270 mrs x17, tpidr_el1
271 stp x16, x17, [x1, #16 * 8]
272
273 mrs x18, amair_el1
274 mrs x19, cntkctl_el1
275 stp x18, x19, [x1, #16 * 9]
276
277 mrs x20, sp_el0
278 mrs x21, sp_el1
279 stp x20, x21, [x1, #16 * 10]
280
281 mrs x22, par_el1
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100282 mrs x23, hcr_el2
283 stp x22, x23, [x1, #16 * 11]
284
285 mrs x24, cptr_el2
286 mrs x25, cnthctl_el2
287 stp x24, x25, [x1, #16 * 12]
288
289 mrs x26, vttbr_el2
290 str x26, [x1, #16 * 13]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100291
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100292 /* Intentional fallthrough. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100293
294.globl vcpu_restore_all_and_run
295vcpu_restore_all_and_run:
Wedson Almeida Filho59978322018-10-24 15:13:33 +0100296 /* Update pointer to current vcpu. */
Wedson Almeida Filho00df6c72018-10-18 11:19:24 +0100297 msr tpidr_el2, x0
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100298
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100299 /* Get a pointer to the lazy registers. */
300 add x0, x0, #VCPU_LAZY
301
302 ldp x24, x25, [x0, #16 * 0]
303 msr vmpidr_el2, x24
304 msr csselr_el1, x25
305
306 ldp x2, x3, [x0, #16 * 1]
307 msr sctlr_el1, x2
308 msr actlr_el1, x3
309
310 ldp x4, x5, [x0, #16 * 2]
311 msr cpacr_el1, x4
312 msr ttbr0_el1, x5
313
314 ldp x6, x7, [x0, #16 * 3]
315 msr ttbr1_el1, x6
316 msr tcr_el1, x7
317
318 ldp x8, x9, [x0, #16 * 4]
319 msr esr_el1, x8
320 msr afsr0_el1, x9
321
322 ldp x10, x11, [x0, #16 * 5]
323 msr afsr1_el1, x10
324 msr far_el1, x11
325
326 ldp x12, x13, [x0, #16 * 6]
327 msr mair_el1, x12
328 msr vbar_el1, x13
329
330 ldp x14, x15, [x0, #16 * 7]
331 msr contextidr_el1, x14
332 msr tpidr_el0, x15
333
334 ldp x16, x17, [x0, #16 * 8]
335 msr tpidrro_el0, x16
336 msr tpidr_el1, x17
337
338 ldp x18, x19, [x0, #16 * 9]
339 msr amair_el1, x18
340 msr cntkctl_el1, x19
341
342 ldp x20, x21, [x0, #16 * 10]
343 msr sp_el0, x20
344 msr sp_el1, x21
345
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100346 ldp x22, x23, [x0, #16 * 11]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100347 msr par_el1, x22
Wedson Almeida Filho1f81b752018-10-24 15:15:49 +0100348 msr hcr_el2, x23
349
350 ldp x24, x25, [x0, #16 * 12]
351 msr cptr_el2, x22
352 msr cnthctl_el2, x23
353
354 ldr x26, [x0, #16 * 13]
355 msr vttbr_el2, x26
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100356
357 /* Restore non-volatile registers. */
358 add x0, x0, #(VCPU_REGS - VCPU_LAZY)
359
360 ldp x19, x20, [x0, #8 * 19]
361 ldp x21, x22, [x0, #8 * 21]
362 ldp x23, x24, [x0, #8 * 23]
363 ldp x25, x26, [x0, #8 * 25]
364 ldp x27, x28, [x0, #8 * 27]
365
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100366 /* Intentional fallthrough. */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100367
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100368/**
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100369 * Restore volatile registers and run the given vcpu.
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100370 *
371 * x0 is a pointer to the volatile registers of the target vcpu.
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100372 */
373vcpu_restore_volatile_and_run:
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100374 ldp x4, x5, [x0, #8 * 4]
375 ldp x6, x7, [x0, #8 * 6]
376 ldp x8, x9, [x0, #8 * 8]
377 ldp x10, x11, [x0, #8 * 10]
378 ldp x12, x13, [x0, #8 * 12]
379 ldp x14, x15, [x0, #8 * 14]
380 ldp x16, x17, [x0, #8 * 16]
381 ldr x18, [x0, #8 * 18]
382 ldp x29, x30, [x0, #8 * 29]
383
384 /* Restore return address & mode. */
385 ldp x1, x2, [x0, #8 * 31]
386 msr elr_el2, x1
387 msr spsr_el2, x2
388
389 /* Restore x0..x3, which we have used as scratch before. */
390 ldp x2, x3, [x0, #8 * 2]
391 ldp x0, x1, [x0, #8 * 0]
392 eret