blob: 54e3882242ec04b1a3d569e1099cfe82dc87399d [file] [log] [blame]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +01001#include "offsets.h"
2
3.text
4
5.balign 0x800
6.global vector_table_el2
7vector_table_el2:
8 /* sync_cur_sp0 */
9 b .
10
11.balign 0x80
12 /* irq_cur_sp0 */
13 b irq_current
14
15.balign 0x80
16 /* fiq_cur_sp0 */
17 b .
18
19.balign 0x80
20 /* serr_cur_sp0 */
21 b .
22
23.balign 0x80
24 /* sync_cur_spx */
25 mrs x0, esr_el2
26 mrs x1, elr_el2
27 b sync_current_exception
28
29.balign 0x80
30 /* irq_cur_spx */
31 b irq_current
32
33.balign 0x80
34 /* fiq_cur_spx */
35 b .
36
37.balign 0x80
38 /* serr_cur_spx */
39 b .
40
41.balign 0x80
42 /* sync_lower_64 */
43
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +010044 /*
45 * Save x18 since we're about to clobber it. We subtract 16 instead of
46 * 8 from the stack pointer to keep it 16-byte aligned.
47 */
48 str x18, [sp, #-16]!
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010049
50 /* Extract the exception class (EC) from exception syndrome register. */
51 mrs x18, esr_el2
52 lsr x18, x18, #26
53
54 /* Take the slow path if exception is not due to an HVC instruction. */
55 subs x18, x18, #0x16
56 b.ne slow_sync_lower_64
57
Wedson Almeida Filho87009642018-07-02 10:20:07 +010058 /*
59 * Save x29 and x30, which are not saved by the callee, then jump to
60 * HVC handler.
61 */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010062 stp x29, x30, [sp, #-16]!
63 bl hvc_handler
64 ldp x29, x30, [sp], #16
65 cbnz x1, sync_lower_64_switch
66
67 /* Zero out all volatile registers (except x0) and return. */
68 stp xzr, xzr, [sp, #-16]
69 ldp x1, x2, [sp, #-16]
70 ldp x3, x4, [sp, #-16]
71 ldp x5, x6, [sp, #-16]
72 ldp x7, x8, [sp, #-16]
73 ldp x9, x10, [sp, #-16]
74 ldp x11, x12, [sp, #-16]
75 ldp x13, x14, [sp, #-16]
76 ldp x15, x16, [sp, #-16]
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +010077 mov x17, xzr
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010078 eret
79
80.balign 0x80
81 /* irq_lower_64 */
82
83 /* Save x0 since we're about to clobber it. */
84 str x0, [sp, #-8]
85
86 /* Get the current vcpu. */
87 mrs x0, tpidr_el2
88 ldr x0, [x0, #CPU_CURRENT]
89
90 /* Save volatile registers. */
91 add x0, x0, #VCPU_REGS
92 stp x2, x3, [x0, #8 * 2]
93 stp x4, x5, [x0, #8 * 4]
94 stp x6, x7, [x0, #8 * 6]
95 stp x8, x9, [x0, #8 * 8]
96 stp x10, x11, [x0, #8 * 10]
97 stp x12, x13, [x0, #8 * 12]
98 stp x14, x15, [x0, #8 * 14]
99 stp x16, x17, [x0, #8 * 16]
100 str x18, [x0, #8 * 18]
101 stp x29, x30, [x0, #8 * 29]
102
103 ldr x2, [sp, #-8]
104 stp x2, x1, [x0, #8 * 0]
105
106 /* Save return address & mode. */
107 mrs x1, elr_el2
108 mrs x2, spsr_el2
109 stp x1, x2, [x0, #8 * 31]
110
111 /* Call C handler. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100112 bl irq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100113
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100114 mrs x1, tpidr_el2
115 ldr x1, [x1, #CPU_CURRENT]
116 cbnz x0, vcpu_switch
117
118 /* vcpu is not changing. */
119 mov x0, x1
120 b vcpu_restore_volatile_and_run
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100121
122.balign 0x80
123 /* fiq_lower_64 */
124 b .
125
126.balign 0x80
127 /* serr_lower_64 */
128 b .
129
130.balign 0x80
131 /* sync_lower_32 */
132 b .
133
134.balign 0x80
135 /* irq_lower_32 */
136 b .
137
138.balign 0x80
139 /* fiq_lower_32 */
140 b .
141
142.balign 0x80
143 /* serr_lower_32 */
144 b .
145
146.balign 0x80
147
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100148/**
149 * Switch to a new vcpu.
150 *
151 * All volatile registers from the old vcpu have already been saved. We need
152 * to save only non-volatile ones from the old vcpu, and restore all from the
153 * new one.
154 *
155 * x0 is a pointer to the new vcpu.
156 * x1 is a pointer to the old vcpu.
157 */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100158vcpu_switch:
159 /* Save non-volatile registers. */
160 add x1, x1, #VCPU_REGS
161 stp x19, x20, [x1, #8 * 19]
162 stp x21, x22, [x1, #8 * 21]
163 stp x23, x24, [x1, #8 * 23]
164 stp x25, x26, [x1, #8 * 25]
165 stp x27, x28, [x1, #8 * 27]
166
167 /* Save lazy state. */
168 add x1, x1, #(VCPU_LAZY - VCPU_REGS)
169
170 mrs x24, vmpidr_el2
171 mrs x25, csselr_el1
172 stp x24, x25, [x1, #16 * 0]
173
174 mrs x2, sctlr_el1
175 mrs x3, actlr_el1
176 stp x2, x3, [x1, #16 * 1]
177
178 mrs x4, cpacr_el1
179 mrs x5, ttbr0_el1
180 stp x4, x5, [x1, #16 * 2]
181
182 mrs x6, ttbr1_el1
183 mrs x7, tcr_el1
184 stp x6, x7, [x1, #16 * 3]
185
186 mrs x8, esr_el1
187 mrs x9, afsr0_el1
188 stp x8, x9, [x1, #16 * 4]
189
190 mrs x10, afsr1_el1
191 mrs x11, far_el1
192 stp x10, x11, [x1, #16 * 5]
193
194 mrs x12, mair_el1
195 mrs x13, vbar_el1
196 stp x12, x13, [x1, #16 * 6]
197
198 mrs x14, contextidr_el1
199 mrs x15, tpidr_el0
200 stp x14, x15, [x1, #16 * 7]
201
202 mrs x16, tpidrro_el0
203 mrs x17, tpidr_el1
204 stp x16, x17, [x1, #16 * 8]
205
206 mrs x18, amair_el1
207 mrs x19, cntkctl_el1
208 stp x18, x19, [x1, #16 * 9]
209
210 mrs x20, sp_el0
211 mrs x21, sp_el1
212 stp x20, x21, [x1, #16 * 10]
213
214 mrs x22, par_el1
215 str x22, [x1, #16 * 11]
216
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100217 /* Intentional fall through. */
218
219.globl vcpu_restore_all_and_run
220vcpu_restore_all_and_run:
221 /* Update cpu()->current. */
222 mrs x2, tpidr_el2
223 str x0, [x2, #CPU_CURRENT]
224
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100225 /* Get a pointer to the lazy registers. */
226 add x0, x0, #VCPU_LAZY
227
228 ldp x24, x25, [x0, #16 * 0]
229 msr vmpidr_el2, x24
230 msr csselr_el1, x25
231
232 ldp x2, x3, [x0, #16 * 1]
233 msr sctlr_el1, x2
234 msr actlr_el1, x3
235
236 ldp x4, x5, [x0, #16 * 2]
237 msr cpacr_el1, x4
238 msr ttbr0_el1, x5
239
240 ldp x6, x7, [x0, #16 * 3]
241 msr ttbr1_el1, x6
242 msr tcr_el1, x7
243
244 ldp x8, x9, [x0, #16 * 4]
245 msr esr_el1, x8
246 msr afsr0_el1, x9
247
248 ldp x10, x11, [x0, #16 * 5]
249 msr afsr1_el1, x10
250 msr far_el1, x11
251
252 ldp x12, x13, [x0, #16 * 6]
253 msr mair_el1, x12
254 msr vbar_el1, x13
255
256 ldp x14, x15, [x0, #16 * 7]
257 msr contextidr_el1, x14
258 msr tpidr_el0, x15
259
260 ldp x16, x17, [x0, #16 * 8]
261 msr tpidrro_el0, x16
262 msr tpidr_el1, x17
263
264 ldp x18, x19, [x0, #16 * 9]
265 msr amair_el1, x18
266 msr cntkctl_el1, x19
267
268 ldp x20, x21, [x0, #16 * 10]
269 msr sp_el0, x20
270 msr sp_el1, x21
271
272 ldp x22, x23, [x0, #16 * 11]
273 msr par_el1, x22
274 msr hcr_el2, x23
275
276 /* Restore non-volatile registers. */
277 add x0, x0, #(VCPU_REGS - VCPU_LAZY)
278
279 ldp x19, x20, [x0, #8 * 19]
280 ldp x21, x22, [x0, #8 * 21]
281 ldp x23, x24, [x0, #8 * 23]
282 ldp x25, x26, [x0, #8 * 25]
283 ldp x27, x28, [x0, #8 * 27]
284
285 /* Restore volatile registers and return. */
286 sub x0, x0, #VCPU_REGS
287
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100288/**
289 * x0 is a pointer to the vcpu.
290 *
291 * Restore volatile registers and run the given vcpu.
292 */
293vcpu_restore_volatile_and_run:
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100294 /* Restore volatile registers. */
295 add x0, x0, #VCPU_REGS
296
297 ldp x4, x5, [x0, #8 * 4]
298 ldp x6, x7, [x0, #8 * 6]
299 ldp x8, x9, [x0, #8 * 8]
300 ldp x10, x11, [x0, #8 * 10]
301 ldp x12, x13, [x0, #8 * 12]
302 ldp x14, x15, [x0, #8 * 14]
303 ldp x16, x17, [x0, #8 * 16]
304 ldr x18, [x0, #8 * 18]
305 ldp x29, x30, [x0, #8 * 29]
306
307 /* Restore return address & mode. */
308 ldp x1, x2, [x0, #8 * 31]
309 msr elr_el2, x1
310 msr spsr_el2, x2
311
312 /* Restore x0..x3, which we have used as scratch before. */
313 ldp x2, x3, [x0, #8 * 2]
314 ldp x0, x1, [x0, #8 * 0]
315 eret
316
317slow_sync_lower_64:
318 /* Get the current vcpu. */
319 mrs x18, tpidr_el2
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100320 ldr x18, [x18, #CPU_CURRENT]
321
322 /* Save volatile registers. */
323 add x18, x18, #VCPU_REGS
324 stp x0, x1, [x18, #8 * 0]
325 stp x2, x3, [x18, #8 * 2]
326 stp x4, x5, [x18, #8 * 4]
327 stp x6, x7, [x18, #8 * 6]
328 stp x8, x9, [x18, #8 * 8]
329 stp x10, x11, [x18, #8 * 10]
330 stp x12, x13, [x18, #8 * 12]
331 stp x14, x15, [x18, #8 * 14]
332 stp x16, x17, [x18, #8 * 16]
333 stp x29, x30, [x18, #8 * 29]
334
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +0100335 /* x18 was saved on the stack, so we move it to vcpu regs buffer. */
336 ldr x0, [sp], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100337 str x0, [x18, #8 * 18]
338
339 /* Save return address & mode. */
340 mrs x1, elr_el2
341 mrs x2, spsr_el2
342 stp x1, x2, [x18, #8 * 31]
343
344 /* Read syndrome register and call C handler. */
345 mrs x0, esr_el2
346 bl sync_lower_exception
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100347
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100348 /* Switch to the vcpu returned by sync_lower_exception. */
349 mrs x1, tpidr_el2
350 ldr x1, [x1, #CPU_CURRENT]
351 cbnz x0, vcpu_switch
352
353 /* vcpu is not changing. */
354 mov x0, x1
355 b vcpu_restore_volatile_and_run
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100356
357sync_lower_64_switch:
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100358 /* We'll have to switch, so save volatile state before doing so. */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100359 mrs x18, tpidr_el2
360 ldr x18, [x18, #CPU_CURRENT]
361
362 /* Store zeroes in volatile register storage, except x0. */
363 add x18, x18, #VCPU_REGS
364 stp x0, xzr, [x18, #8 * 0]
365 stp xzr, xzr, [x18, #8 * 2]
366 stp xzr, xzr, [x18, #8 * 4]
367 stp xzr, xzr, [x18, #8 * 6]
368 stp xzr, xzr, [x18, #8 * 8]
369 stp xzr, xzr, [x18, #8 * 10]
370 stp xzr, xzr, [x18, #8 * 12]
371 stp xzr, xzr, [x18, #8 * 14]
372 stp xzr, xzr, [x18, #8 * 16]
373 stp x29, x30, [x18, #8 * 29]
374
375 str xzr, [x18, #8 * 18]
376
377 /* Save return address & mode. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100378 mrs x2, elr_el2
379 mrs x3, spsr_el2
380 stp x2, x3, [x18, #8 * 31]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100381
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100382 /* Save lazy state, then switch to new vcpu. */
383 mov x0, x1
384 sub x1, x18, #VCPU_REGS
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100385 b vcpu_switch