blob: a143361bdef86ceab8428e204b2272c3c9207bbf [file] [log] [blame]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +01001#include "offsets.h"
2
3.text
4
5.balign 0x800
6.global vector_table_el2
7vector_table_el2:
8 /* sync_cur_sp0 */
9 b .
10
11.balign 0x80
12 /* irq_cur_sp0 */
13 b irq_current
14
15.balign 0x80
16 /* fiq_cur_sp0 */
17 b .
18
19.balign 0x80
20 /* serr_cur_sp0 */
21 b .
22
23.balign 0x80
24 /* sync_cur_spx */
25 mrs x0, esr_el2
26 mrs x1, elr_el2
27 b sync_current_exception
28
29.balign 0x80
30 /* irq_cur_spx */
31 b irq_current
32
33.balign 0x80
34 /* fiq_cur_spx */
35 b .
36
37.balign 0x80
38 /* serr_cur_spx */
39 b .
40
41.balign 0x80
42 /* sync_lower_64 */
43
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +010044 /*
45 * Save x18 since we're about to clobber it. We subtract 16 instead of
46 * 8 from the stack pointer to keep it 16-byte aligned.
47 */
48 str x18, [sp, #-16]!
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010049
50 /* Extract the exception class (EC) from exception syndrome register. */
51 mrs x18, esr_el2
52 lsr x18, x18, #26
53
54 /* Take the slow path if exception is not due to an HVC instruction. */
55 subs x18, x18, #0x16
56 b.ne slow_sync_lower_64
57
Wedson Almeida Filho87009642018-07-02 10:20:07 +010058 /*
59 * Save x29 and x30, which are not saved by the callee, then jump to
60 * HVC handler.
61 */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010062 stp x29, x30, [sp, #-16]!
63 bl hvc_handler
64 ldp x29, x30, [sp], #16
65 cbnz x1, sync_lower_64_switch
66
67 /* Zero out all volatile registers (except x0) and return. */
68 stp xzr, xzr, [sp, #-16]
69 ldp x1, x2, [sp, #-16]
70 ldp x3, x4, [sp, #-16]
71 ldp x5, x6, [sp, #-16]
72 ldp x7, x8, [sp, #-16]
73 ldp x9, x10, [sp, #-16]
74 ldp x11, x12, [sp, #-16]
75 ldp x13, x14, [sp, #-16]
76 ldp x15, x16, [sp, #-16]
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +010077 mov x17, xzr
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010078 eret
79
80.balign 0x80
81 /* irq_lower_64 */
82
83 /* Save x0 since we're about to clobber it. */
84 str x0, [sp, #-8]
85
86 /* Get the current vcpu. */
87 mrs x0, tpidr_el2
88 ldr x0, [x0, #CPU_CURRENT]
89
90 /* Save volatile registers. */
91 add x0, x0, #VCPU_REGS
92 stp x2, x3, [x0, #8 * 2]
93 stp x4, x5, [x0, #8 * 4]
94 stp x6, x7, [x0, #8 * 6]
95 stp x8, x9, [x0, #8 * 8]
96 stp x10, x11, [x0, #8 * 10]
97 stp x12, x13, [x0, #8 * 12]
98 stp x14, x15, [x0, #8 * 14]
99 stp x16, x17, [x0, #8 * 16]
100 str x18, [x0, #8 * 18]
101 stp x29, x30, [x0, #8 * 29]
102
103 ldr x2, [sp, #-8]
104 stp x2, x1, [x0, #8 * 0]
105
106 /* Save return address & mode. */
107 mrs x1, elr_el2
108 mrs x2, spsr_el2
109 stp x1, x2, [x0, #8 * 31]
110
111 /* Call C handler. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100112 bl irq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100113
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100114 mrs x1, tpidr_el2
115 ldr x1, [x1, #CPU_CURRENT]
116 cbnz x0, vcpu_switch
117
118 /* vcpu is not changing. */
119 mov x0, x1
120 b vcpu_restore_volatile_and_run
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100121
122.balign 0x80
123 /* fiq_lower_64 */
124 b .
125
126.balign 0x80
127 /* serr_lower_64 */
128 b .
129
130.balign 0x80
131 /* sync_lower_32 */
132 b .
133
134.balign 0x80
135 /* irq_lower_32 */
136 b .
137
138.balign 0x80
139 /* fiq_lower_32 */
140 b .
141
142.balign 0x80
143 /* serr_lower_32 */
144 b .
145
146.balign 0x80
147
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100148/**
149 * Switch to a new vcpu.
150 *
151 * All volatile registers from the old vcpu have already been saved. We need
152 * to save only non-volatile ones from the old vcpu, and restore all from the
153 * new one.
154 *
155 * x0 is a pointer to the new vcpu.
156 * x1 is a pointer to the old vcpu.
157 */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100158vcpu_switch:
159 /* Save non-volatile registers. */
160 add x1, x1, #VCPU_REGS
161 stp x19, x20, [x1, #8 * 19]
162 stp x21, x22, [x1, #8 * 21]
163 stp x23, x24, [x1, #8 * 23]
164 stp x25, x26, [x1, #8 * 25]
165 stp x27, x28, [x1, #8 * 27]
166
167 /* Save lazy state. */
168 add x1, x1, #(VCPU_LAZY - VCPU_REGS)
169
170 mrs x24, vmpidr_el2
171 mrs x25, csselr_el1
172 stp x24, x25, [x1, #16 * 0]
173
174 mrs x2, sctlr_el1
175 mrs x3, actlr_el1
176 stp x2, x3, [x1, #16 * 1]
177
178 mrs x4, cpacr_el1
179 mrs x5, ttbr0_el1
180 stp x4, x5, [x1, #16 * 2]
181
182 mrs x6, ttbr1_el1
183 mrs x7, tcr_el1
184 stp x6, x7, [x1, #16 * 3]
185
186 mrs x8, esr_el1
187 mrs x9, afsr0_el1
188 stp x8, x9, [x1, #16 * 4]
189
190 mrs x10, afsr1_el1
191 mrs x11, far_el1
192 stp x10, x11, [x1, #16 * 5]
193
194 mrs x12, mair_el1
195 mrs x13, vbar_el1
196 stp x12, x13, [x1, #16 * 6]
197
198 mrs x14, contextidr_el1
199 mrs x15, tpidr_el0
200 stp x14, x15, [x1, #16 * 7]
201
202 mrs x16, tpidrro_el0
203 mrs x17, tpidr_el1
204 stp x16, x17, [x1, #16 * 8]
205
206 mrs x18, amair_el1
207 mrs x19, cntkctl_el1
208 stp x18, x19, [x1, #16 * 9]
209
210 mrs x20, sp_el0
211 mrs x21, sp_el1
212 stp x20, x21, [x1, #16 * 10]
213
214 mrs x22, par_el1
215 str x22, [x1, #16 * 11]
216
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100217 /* Intentional fall through. */
218
219.globl vcpu_restore_all_and_run
220vcpu_restore_all_and_run:
221 /* Update cpu()->current. */
222 mrs x2, tpidr_el2
223 str x0, [x2, #CPU_CURRENT]
224
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100225 /* Get a pointer to the lazy registers. */
226 add x0, x0, #VCPU_LAZY
227
228 ldp x24, x25, [x0, #16 * 0]
229 msr vmpidr_el2, x24
230 msr csselr_el1, x25
231
232 ldp x2, x3, [x0, #16 * 1]
233 msr sctlr_el1, x2
234 msr actlr_el1, x3
235
236 ldp x4, x5, [x0, #16 * 2]
237 msr cpacr_el1, x4
238 msr ttbr0_el1, x5
239
240 ldp x6, x7, [x0, #16 * 3]
241 msr ttbr1_el1, x6
242 msr tcr_el1, x7
243
244 ldp x8, x9, [x0, #16 * 4]
245 msr esr_el1, x8
246 msr afsr0_el1, x9
247
248 ldp x10, x11, [x0, #16 * 5]
249 msr afsr1_el1, x10
250 msr far_el1, x11
251
252 ldp x12, x13, [x0, #16 * 6]
253 msr mair_el1, x12
254 msr vbar_el1, x13
255
256 ldp x14, x15, [x0, #16 * 7]
257 msr contextidr_el1, x14
258 msr tpidr_el0, x15
259
260 ldp x16, x17, [x0, #16 * 8]
261 msr tpidrro_el0, x16
262 msr tpidr_el1, x17
263
264 ldp x18, x19, [x0, #16 * 9]
265 msr amair_el1, x18
266 msr cntkctl_el1, x19
267
268 ldp x20, x21, [x0, #16 * 10]
269 msr sp_el0, x20
270 msr sp_el1, x21
271
Wedson Almeida Filho52bb3f92018-07-30 15:52:38 +0100272 ldr x22, [x0, #16 * 11]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100273 msr par_el1, x22
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100274
275 /* Restore non-volatile registers. */
276 add x0, x0, #(VCPU_REGS - VCPU_LAZY)
277
278 ldp x19, x20, [x0, #8 * 19]
279 ldp x21, x22, [x0, #8 * 21]
280 ldp x23, x24, [x0, #8 * 23]
281 ldp x25, x26, [x0, #8 * 25]
282 ldp x27, x28, [x0, #8 * 27]
283
284 /* Restore volatile registers and return. */
285 sub x0, x0, #VCPU_REGS
286
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100287/**
288 * x0 is a pointer to the vcpu.
289 *
290 * Restore volatile registers and run the given vcpu.
291 */
292vcpu_restore_volatile_and_run:
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100293 /* Restore volatile registers. */
294 add x0, x0, #VCPU_REGS
295
296 ldp x4, x5, [x0, #8 * 4]
297 ldp x6, x7, [x0, #8 * 6]
298 ldp x8, x9, [x0, #8 * 8]
299 ldp x10, x11, [x0, #8 * 10]
300 ldp x12, x13, [x0, #8 * 12]
301 ldp x14, x15, [x0, #8 * 14]
302 ldp x16, x17, [x0, #8 * 16]
303 ldr x18, [x0, #8 * 18]
304 ldp x29, x30, [x0, #8 * 29]
305
306 /* Restore return address & mode. */
307 ldp x1, x2, [x0, #8 * 31]
308 msr elr_el2, x1
309 msr spsr_el2, x2
310
311 /* Restore x0..x3, which we have used as scratch before. */
312 ldp x2, x3, [x0, #8 * 2]
313 ldp x0, x1, [x0, #8 * 0]
314 eret
315
316slow_sync_lower_64:
317 /* Get the current vcpu. */
318 mrs x18, tpidr_el2
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100319 ldr x18, [x18, #CPU_CURRENT]
320
321 /* Save volatile registers. */
322 add x18, x18, #VCPU_REGS
323 stp x0, x1, [x18, #8 * 0]
324 stp x2, x3, [x18, #8 * 2]
325 stp x4, x5, [x18, #8 * 4]
326 stp x6, x7, [x18, #8 * 6]
327 stp x8, x9, [x18, #8 * 8]
328 stp x10, x11, [x18, #8 * 10]
329 stp x12, x13, [x18, #8 * 12]
330 stp x14, x15, [x18, #8 * 14]
331 stp x16, x17, [x18, #8 * 16]
332 stp x29, x30, [x18, #8 * 29]
333
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +0100334 /* x18 was saved on the stack, so we move it to vcpu regs buffer. */
335 ldr x0, [sp], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100336 str x0, [x18, #8 * 18]
337
338 /* Save return address & mode. */
339 mrs x1, elr_el2
340 mrs x2, spsr_el2
341 stp x1, x2, [x18, #8 * 31]
342
343 /* Read syndrome register and call C handler. */
344 mrs x0, esr_el2
345 bl sync_lower_exception
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100346
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100347 /* Switch to the vcpu returned by sync_lower_exception. */
348 mrs x1, tpidr_el2
349 ldr x1, [x1, #CPU_CURRENT]
350 cbnz x0, vcpu_switch
351
352 /* vcpu is not changing. */
353 mov x0, x1
354 b vcpu_restore_volatile_and_run
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100355
356sync_lower_64_switch:
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100357 /* We'll have to switch, so save volatile state before doing so. */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100358 mrs x18, tpidr_el2
359 ldr x18, [x18, #CPU_CURRENT]
360
361 /* Store zeroes in volatile register storage, except x0. */
362 add x18, x18, #VCPU_REGS
363 stp x0, xzr, [x18, #8 * 0]
364 stp xzr, xzr, [x18, #8 * 2]
365 stp xzr, xzr, [x18, #8 * 4]
366 stp xzr, xzr, [x18, #8 * 6]
367 stp xzr, xzr, [x18, #8 * 8]
368 stp xzr, xzr, [x18, #8 * 10]
369 stp xzr, xzr, [x18, #8 * 12]
370 stp xzr, xzr, [x18, #8 * 14]
371 stp xzr, xzr, [x18, #8 * 16]
372 stp x29, x30, [x18, #8 * 29]
373
374 str xzr, [x18, #8 * 18]
375
376 /* Save return address & mode. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100377 mrs x2, elr_el2
378 mrs x3, spsr_el2
379 stp x2, x3, [x18, #8 * 31]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100380
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100381 /* Save lazy state, then switch to new vcpu. */
382 mov x0, x1
383 sub x1, x18, #VCPU_REGS
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100384 b vcpu_switch