blob: efb7ed5f7467b4ba402522903d33e6626f812944 [file] [log] [blame]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +01001#include "offsets.h"
2
3.text
4
5.balign 0x800
6.global vector_table_el2
7vector_table_el2:
8 /* sync_cur_sp0 */
9 b .
10
11.balign 0x80
12 /* irq_cur_sp0 */
13 b irq_current
14
15.balign 0x80
16 /* fiq_cur_sp0 */
17 b .
18
19.balign 0x80
20 /* serr_cur_sp0 */
21 b .
22
23.balign 0x80
24 /* sync_cur_spx */
25 mrs x0, esr_el2
26 mrs x1, elr_el2
27 b sync_current_exception
28
29.balign 0x80
30 /* irq_cur_spx */
31 b irq_current
32
33.balign 0x80
34 /* fiq_cur_spx */
35 b .
36
37.balign 0x80
38 /* serr_cur_spx */
39 b .
40
41.balign 0x80
42 /* sync_lower_64 */
43
44 /* Save x18 since we're about to clobber it. */
45 str x18, [sp, #-8]
46
47 /* Extract the exception class (EC) from exception syndrome register. */
48 mrs x18, esr_el2
49 lsr x18, x18, #26
50
51 /* Take the slow path if exception is not due to an HVC instruction. */
52 subs x18, x18, #0x16
53 b.ne slow_sync_lower_64
54
Wedson Almeida Filho87009642018-07-02 10:20:07 +010055 /*
56 * Save x29 and x30, which are not saved by the callee, then jump to
57 * HVC handler.
58 */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010059 stp x29, x30, [sp, #-16]!
60 bl hvc_handler
61 ldp x29, x30, [sp], #16
62 cbnz x1, sync_lower_64_switch
63
64 /* Zero out all volatile registers (except x0) and return. */
65 stp xzr, xzr, [sp, #-16]
66 ldp x1, x2, [sp, #-16]
67 ldp x3, x4, [sp, #-16]
68 ldp x5, x6, [sp, #-16]
69 ldp x7, x8, [sp, #-16]
70 ldp x9, x10, [sp, #-16]
71 ldp x11, x12, [sp, #-16]
72 ldp x13, x14, [sp, #-16]
73 ldp x15, x16, [sp, #-16]
74 ldp x17, x18, [sp, #-16]
75 eret
76
77.balign 0x80
78 /* irq_lower_64 */
79
80 /* Save x0 since we're about to clobber it. */
81 str x0, [sp, #-8]
82
83 /* Get the current vcpu. */
84 mrs x0, tpidr_el2
85 ldr x0, [x0, #CPU_CURRENT]
86
87 /* Save volatile registers. */
88 add x0, x0, #VCPU_REGS
89 stp x2, x3, [x0, #8 * 2]
90 stp x4, x5, [x0, #8 * 4]
91 stp x6, x7, [x0, #8 * 6]
92 stp x8, x9, [x0, #8 * 8]
93 stp x10, x11, [x0, #8 * 10]
94 stp x12, x13, [x0, #8 * 12]
95 stp x14, x15, [x0, #8 * 14]
96 stp x16, x17, [x0, #8 * 16]
97 str x18, [x0, #8 * 18]
98 stp x29, x30, [x0, #8 * 29]
99
100 ldr x2, [sp, #-8]
101 stp x2, x1, [x0, #8 * 0]
102
103 /* Save return address & mode. */
104 mrs x1, elr_el2
105 mrs x2, spsr_el2
106 stp x1, x2, [x0, #8 * 31]
107
108 /* Call C handler. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100109 bl irq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100110
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100111 mrs x1, tpidr_el2
112 ldr x1, [x1, #CPU_CURRENT]
113 cbnz x0, vcpu_switch
114
115 /* vcpu is not changing. */
116 mov x0, x1
117 b vcpu_restore_volatile_and_run
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100118
119.balign 0x80
120 /* fiq_lower_64 */
121 b .
122
123.balign 0x80
124 /* serr_lower_64 */
125 b .
126
127.balign 0x80
128 /* sync_lower_32 */
129 b .
130
131.balign 0x80
132 /* irq_lower_32 */
133 b .
134
135.balign 0x80
136 /* fiq_lower_32 */
137 b .
138
139.balign 0x80
140 /* serr_lower_32 */
141 b .
142
143.balign 0x80
144
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100145/**
146 * Switch to a new vcpu.
147 *
148 * All volatile registers from the old vcpu have already been saved. We need
149 * to save only non-volatile ones from the old vcpu, and restore all from the
150 * new one.
151 *
152 * x0 is a pointer to the new vcpu.
153 * x1 is a pointer to the old vcpu.
154 */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100155vcpu_switch:
156 /* Save non-volatile registers. */
157 add x1, x1, #VCPU_REGS
158 stp x19, x20, [x1, #8 * 19]
159 stp x21, x22, [x1, #8 * 21]
160 stp x23, x24, [x1, #8 * 23]
161 stp x25, x26, [x1, #8 * 25]
162 stp x27, x28, [x1, #8 * 27]
163
164 /* Save lazy state. */
165 add x1, x1, #(VCPU_LAZY - VCPU_REGS)
166
167 mrs x24, vmpidr_el2
168 mrs x25, csselr_el1
169 stp x24, x25, [x1, #16 * 0]
170
171 mrs x2, sctlr_el1
172 mrs x3, actlr_el1
173 stp x2, x3, [x1, #16 * 1]
174
175 mrs x4, cpacr_el1
176 mrs x5, ttbr0_el1
177 stp x4, x5, [x1, #16 * 2]
178
179 mrs x6, ttbr1_el1
180 mrs x7, tcr_el1
181 stp x6, x7, [x1, #16 * 3]
182
183 mrs x8, esr_el1
184 mrs x9, afsr0_el1
185 stp x8, x9, [x1, #16 * 4]
186
187 mrs x10, afsr1_el1
188 mrs x11, far_el1
189 stp x10, x11, [x1, #16 * 5]
190
191 mrs x12, mair_el1
192 mrs x13, vbar_el1
193 stp x12, x13, [x1, #16 * 6]
194
195 mrs x14, contextidr_el1
196 mrs x15, tpidr_el0
197 stp x14, x15, [x1, #16 * 7]
198
199 mrs x16, tpidrro_el0
200 mrs x17, tpidr_el1
201 stp x16, x17, [x1, #16 * 8]
202
203 mrs x18, amair_el1
204 mrs x19, cntkctl_el1
205 stp x18, x19, [x1, #16 * 9]
206
207 mrs x20, sp_el0
208 mrs x21, sp_el1
209 stp x20, x21, [x1, #16 * 10]
210
211 mrs x22, par_el1
212 str x22, [x1, #16 * 11]
213
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100214 /* Intentional fall through. */
215
216.globl vcpu_restore_all_and_run
217vcpu_restore_all_and_run:
218 /* Update cpu()->current. */
219 mrs x2, tpidr_el2
220 str x0, [x2, #CPU_CURRENT]
221
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100222 /* Get a pointer to the lazy registers. */
223 add x0, x0, #VCPU_LAZY
224
225 ldp x24, x25, [x0, #16 * 0]
226 msr vmpidr_el2, x24
227 msr csselr_el1, x25
228
229 ldp x2, x3, [x0, #16 * 1]
230 msr sctlr_el1, x2
231 msr actlr_el1, x3
232
233 ldp x4, x5, [x0, #16 * 2]
234 msr cpacr_el1, x4
235 msr ttbr0_el1, x5
236
237 ldp x6, x7, [x0, #16 * 3]
238 msr ttbr1_el1, x6
239 msr tcr_el1, x7
240
241 ldp x8, x9, [x0, #16 * 4]
242 msr esr_el1, x8
243 msr afsr0_el1, x9
244
245 ldp x10, x11, [x0, #16 * 5]
246 msr afsr1_el1, x10
247 msr far_el1, x11
248
249 ldp x12, x13, [x0, #16 * 6]
250 msr mair_el1, x12
251 msr vbar_el1, x13
252
253 ldp x14, x15, [x0, #16 * 7]
254 msr contextidr_el1, x14
255 msr tpidr_el0, x15
256
257 ldp x16, x17, [x0, #16 * 8]
258 msr tpidrro_el0, x16
259 msr tpidr_el1, x17
260
261 ldp x18, x19, [x0, #16 * 9]
262 msr amair_el1, x18
263 msr cntkctl_el1, x19
264
265 ldp x20, x21, [x0, #16 * 10]
266 msr sp_el0, x20
267 msr sp_el1, x21
268
269 ldp x22, x23, [x0, #16 * 11]
270 msr par_el1, x22
271 msr hcr_el2, x23
272
273 /* Restore non-volatile registers. */
274 add x0, x0, #(VCPU_REGS - VCPU_LAZY)
275
276 ldp x19, x20, [x0, #8 * 19]
277 ldp x21, x22, [x0, #8 * 21]
278 ldp x23, x24, [x0, #8 * 23]
279 ldp x25, x26, [x0, #8 * 25]
280 ldp x27, x28, [x0, #8 * 27]
281
282 /* Restore volatile registers and return. */
283 sub x0, x0, #VCPU_REGS
284
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100285/**
286 * x0 is a pointer to the vcpu.
287 *
288 * Restore volatile registers and run the given vcpu.
289 */
290vcpu_restore_volatile_and_run:
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100291 /* Restore volatile registers. */
292 add x0, x0, #VCPU_REGS
293
294 ldp x4, x5, [x0, #8 * 4]
295 ldp x6, x7, [x0, #8 * 6]
296 ldp x8, x9, [x0, #8 * 8]
297 ldp x10, x11, [x0, #8 * 10]
298 ldp x12, x13, [x0, #8 * 12]
299 ldp x14, x15, [x0, #8 * 14]
300 ldp x16, x17, [x0, #8 * 16]
301 ldr x18, [x0, #8 * 18]
302 ldp x29, x30, [x0, #8 * 29]
303
304 /* Restore return address & mode. */
305 ldp x1, x2, [x0, #8 * 31]
306 msr elr_el2, x1
307 msr spsr_el2, x2
308
309 /* Restore x0..x3, which we have used as scratch before. */
310 ldp x2, x3, [x0, #8 * 2]
311 ldp x0, x1, [x0, #8 * 0]
312 eret
313
314slow_sync_lower_64:
315 /* Get the current vcpu. */
316 mrs x18, tpidr_el2
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100317 ldr x18, [x18, #CPU_CURRENT]
318
319 /* Save volatile registers. */
320 add x18, x18, #VCPU_REGS
321 stp x0, x1, [x18, #8 * 0]
322 stp x2, x3, [x18, #8 * 2]
323 stp x4, x5, [x18, #8 * 4]
324 stp x6, x7, [x18, #8 * 6]
325 stp x8, x9, [x18, #8 * 8]
326 stp x10, x11, [x18, #8 * 10]
327 stp x12, x13, [x18, #8 * 12]
328 stp x14, x15, [x18, #8 * 14]
329 stp x16, x17, [x18, #8 * 16]
330 stp x29, x30, [x18, #8 * 29]
331
332 ldr x0, [sp, #-8]
333 str x0, [x18, #8 * 18]
334
335 /* Save return address & mode. */
336 mrs x1, elr_el2
337 mrs x2, spsr_el2
338 stp x1, x2, [x18, #8 * 31]
339
340 /* Read syndrome register and call C handler. */
341 mrs x0, esr_el2
342 bl sync_lower_exception
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100343
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100344 /* Switch to the vcpu returned by sync_lower_exception. */
345 mrs x1, tpidr_el2
346 ldr x1, [x1, #CPU_CURRENT]
347 cbnz x0, vcpu_switch
348
349 /* vcpu is not changing. */
350 mov x0, x1
351 b vcpu_restore_volatile_and_run
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100352
353sync_lower_64_switch:
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100354 /* We'll have to switch, so save volatile state before doing so. */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100355 mrs x18, tpidr_el2
356 ldr x18, [x18, #CPU_CURRENT]
357
358 /* Store zeroes in volatile register storage, except x0. */
359 add x18, x18, #VCPU_REGS
360 stp x0, xzr, [x18, #8 * 0]
361 stp xzr, xzr, [x18, #8 * 2]
362 stp xzr, xzr, [x18, #8 * 4]
363 stp xzr, xzr, [x18, #8 * 6]
364 stp xzr, xzr, [x18, #8 * 8]
365 stp xzr, xzr, [x18, #8 * 10]
366 stp xzr, xzr, [x18, #8 * 12]
367 stp xzr, xzr, [x18, #8 * 14]
368 stp xzr, xzr, [x18, #8 * 16]
369 stp x29, x30, [x18, #8 * 29]
370
371 str xzr, [x18, #8 * 18]
372
373 /* Save return address & mode. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100374 mrs x2, elr_el2
375 mrs x3, spsr_el2
376 stp x2, x3, [x18, #8 * 31]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100377
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100378 /* Save lazy state, then switch to new vcpu. */
379 mov x0, x1
380 sub x1, x18, #VCPU_REGS
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100381 b vcpu_switch