blob: 4da89977a85beee11412b7eb123305a00f5bc4a2 [file] [log] [blame]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +01001#include "offsets.h"
2
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +01003.balign 0x800
Andrew Scull75e28632018-08-08 20:09:14 +01004.section .text.vector_table_el2, "ax"
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +01005.global vector_table_el2
6vector_table_el2:
7 /* sync_cur_sp0 */
8 b .
9
10.balign 0x80
11 /* irq_cur_sp0 */
12 b irq_current
13
14.balign 0x80
15 /* fiq_cur_sp0 */
16 b .
17
18.balign 0x80
19 /* serr_cur_sp0 */
20 b .
21
22.balign 0x80
23 /* sync_cur_spx */
24 mrs x0, esr_el2
25 mrs x1, elr_el2
26 b sync_current_exception
27
28.balign 0x80
29 /* irq_cur_spx */
30 b irq_current
31
32.balign 0x80
33 /* fiq_cur_spx */
34 b .
35
36.balign 0x80
37 /* serr_cur_spx */
38 b .
39
40.balign 0x80
41 /* sync_lower_64 */
42
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +010043 /*
44 * Save x18 since we're about to clobber it. We subtract 16 instead of
45 * 8 from the stack pointer to keep it 16-byte aligned.
46 */
47 str x18, [sp, #-16]!
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010048
49 /* Extract the exception class (EC) from exception syndrome register. */
50 mrs x18, esr_el2
51 lsr x18, x18, #26
52
53 /* Take the slow path if exception is not due to an HVC instruction. */
54 subs x18, x18, #0x16
55 b.ne slow_sync_lower_64
56
Wedson Almeida Filho87009642018-07-02 10:20:07 +010057 /*
58 * Save x29 and x30, which are not saved by the callee, then jump to
59 * HVC handler.
60 */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010061 stp x29, x30, [sp, #-16]!
62 bl hvc_handler
63 ldp x29, x30, [sp], #16
64 cbnz x1, sync_lower_64_switch
65
66 /* Zero out all volatile registers (except x0) and return. */
67 stp xzr, xzr, [sp, #-16]
68 ldp x1, x2, [sp, #-16]
69 ldp x3, x4, [sp, #-16]
70 ldp x5, x6, [sp, #-16]
71 ldp x7, x8, [sp, #-16]
72 ldp x9, x10, [sp, #-16]
73 ldp x11, x12, [sp, #-16]
74 ldp x13, x14, [sp, #-16]
75 ldp x15, x16, [sp, #-16]
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +010076 mov x17, xzr
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010077 eret
78
79.balign 0x80
80 /* irq_lower_64 */
81
82 /* Save x0 since we're about to clobber it. */
83 str x0, [sp, #-8]
84
85 /* Get the current vcpu. */
86 mrs x0, tpidr_el2
87 ldr x0, [x0, #CPU_CURRENT]
88
89 /* Save volatile registers. */
90 add x0, x0, #VCPU_REGS
91 stp x2, x3, [x0, #8 * 2]
92 stp x4, x5, [x0, #8 * 4]
93 stp x6, x7, [x0, #8 * 6]
94 stp x8, x9, [x0, #8 * 8]
95 stp x10, x11, [x0, #8 * 10]
96 stp x12, x13, [x0, #8 * 12]
97 stp x14, x15, [x0, #8 * 14]
98 stp x16, x17, [x0, #8 * 16]
99 str x18, [x0, #8 * 18]
100 stp x29, x30, [x0, #8 * 29]
101
102 ldr x2, [sp, #-8]
103 stp x2, x1, [x0, #8 * 0]
104
105 /* Save return address & mode. */
106 mrs x1, elr_el2
107 mrs x2, spsr_el2
108 stp x1, x2, [x0, #8 * 31]
109
110 /* Call C handler. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100111 bl irq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100112
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100113 mrs x1, tpidr_el2
114 ldr x1, [x1, #CPU_CURRENT]
115 cbnz x0, vcpu_switch
116
117 /* vcpu is not changing. */
118 mov x0, x1
119 b vcpu_restore_volatile_and_run
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100120
121.balign 0x80
122 /* fiq_lower_64 */
123 b .
124
125.balign 0x80
126 /* serr_lower_64 */
127 b .
128
129.balign 0x80
130 /* sync_lower_32 */
131 b .
132
133.balign 0x80
134 /* irq_lower_32 */
135 b .
136
137.balign 0x80
138 /* fiq_lower_32 */
139 b .
140
141.balign 0x80
142 /* serr_lower_32 */
143 b .
144
145.balign 0x80
146
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100147/**
148 * Switch to a new vcpu.
149 *
150 * All volatile registers from the old vcpu have already been saved. We need
151 * to save only non-volatile ones from the old vcpu, and restore all from the
152 * new one.
153 *
154 * x0 is a pointer to the new vcpu.
155 * x1 is a pointer to the old vcpu.
156 */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100157vcpu_switch:
158 /* Save non-volatile registers. */
159 add x1, x1, #VCPU_REGS
160 stp x19, x20, [x1, #8 * 19]
161 stp x21, x22, [x1, #8 * 21]
162 stp x23, x24, [x1, #8 * 23]
163 stp x25, x26, [x1, #8 * 25]
164 stp x27, x28, [x1, #8 * 27]
165
166 /* Save lazy state. */
167 add x1, x1, #(VCPU_LAZY - VCPU_REGS)
168
169 mrs x24, vmpidr_el2
170 mrs x25, csselr_el1
171 stp x24, x25, [x1, #16 * 0]
172
173 mrs x2, sctlr_el1
174 mrs x3, actlr_el1
175 stp x2, x3, [x1, #16 * 1]
176
177 mrs x4, cpacr_el1
178 mrs x5, ttbr0_el1
179 stp x4, x5, [x1, #16 * 2]
180
181 mrs x6, ttbr1_el1
182 mrs x7, tcr_el1
183 stp x6, x7, [x1, #16 * 3]
184
185 mrs x8, esr_el1
186 mrs x9, afsr0_el1
187 stp x8, x9, [x1, #16 * 4]
188
189 mrs x10, afsr1_el1
190 mrs x11, far_el1
191 stp x10, x11, [x1, #16 * 5]
192
193 mrs x12, mair_el1
194 mrs x13, vbar_el1
195 stp x12, x13, [x1, #16 * 6]
196
197 mrs x14, contextidr_el1
198 mrs x15, tpidr_el0
199 stp x14, x15, [x1, #16 * 7]
200
201 mrs x16, tpidrro_el0
202 mrs x17, tpidr_el1
203 stp x16, x17, [x1, #16 * 8]
204
205 mrs x18, amair_el1
206 mrs x19, cntkctl_el1
207 stp x18, x19, [x1, #16 * 9]
208
209 mrs x20, sp_el0
210 mrs x21, sp_el1
211 stp x20, x21, [x1, #16 * 10]
212
213 mrs x22, par_el1
214 str x22, [x1, #16 * 11]
215
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100216 /* Intentional fall through. */
217
218.globl vcpu_restore_all_and_run
219vcpu_restore_all_and_run:
220 /* Update cpu()->current. */
221 mrs x2, tpidr_el2
222 str x0, [x2, #CPU_CURRENT]
223
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100224 /* Get a pointer to the lazy registers. */
225 add x0, x0, #VCPU_LAZY
226
227 ldp x24, x25, [x0, #16 * 0]
228 msr vmpidr_el2, x24
229 msr csselr_el1, x25
230
231 ldp x2, x3, [x0, #16 * 1]
232 msr sctlr_el1, x2
233 msr actlr_el1, x3
234
235 ldp x4, x5, [x0, #16 * 2]
236 msr cpacr_el1, x4
237 msr ttbr0_el1, x5
238
239 ldp x6, x7, [x0, #16 * 3]
240 msr ttbr1_el1, x6
241 msr tcr_el1, x7
242
243 ldp x8, x9, [x0, #16 * 4]
244 msr esr_el1, x8
245 msr afsr0_el1, x9
246
247 ldp x10, x11, [x0, #16 * 5]
248 msr afsr1_el1, x10
249 msr far_el1, x11
250
251 ldp x12, x13, [x0, #16 * 6]
252 msr mair_el1, x12
253 msr vbar_el1, x13
254
255 ldp x14, x15, [x0, #16 * 7]
256 msr contextidr_el1, x14
257 msr tpidr_el0, x15
258
259 ldp x16, x17, [x0, #16 * 8]
260 msr tpidrro_el0, x16
261 msr tpidr_el1, x17
262
263 ldp x18, x19, [x0, #16 * 9]
264 msr amair_el1, x18
265 msr cntkctl_el1, x19
266
267 ldp x20, x21, [x0, #16 * 10]
268 msr sp_el0, x20
269 msr sp_el1, x21
270
Wedson Almeida Filho52bb3f92018-07-30 15:52:38 +0100271 ldr x22, [x0, #16 * 11]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100272 msr par_el1, x22
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100273
274 /* Restore non-volatile registers. */
275 add x0, x0, #(VCPU_REGS - VCPU_LAZY)
276
277 ldp x19, x20, [x0, #8 * 19]
278 ldp x21, x22, [x0, #8 * 21]
279 ldp x23, x24, [x0, #8 * 23]
280 ldp x25, x26, [x0, #8 * 25]
281 ldp x27, x28, [x0, #8 * 27]
282
283 /* Restore volatile registers and return. */
284 sub x0, x0, #VCPU_REGS
285
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100286/**
287 * x0 is a pointer to the vcpu.
288 *
289 * Restore volatile registers and run the given vcpu.
290 */
291vcpu_restore_volatile_and_run:
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100292 /* Restore volatile registers. */
293 add x0, x0, #VCPU_REGS
294
295 ldp x4, x5, [x0, #8 * 4]
296 ldp x6, x7, [x0, #8 * 6]
297 ldp x8, x9, [x0, #8 * 8]
298 ldp x10, x11, [x0, #8 * 10]
299 ldp x12, x13, [x0, #8 * 12]
300 ldp x14, x15, [x0, #8 * 14]
301 ldp x16, x17, [x0, #8 * 16]
302 ldr x18, [x0, #8 * 18]
303 ldp x29, x30, [x0, #8 * 29]
304
305 /* Restore return address & mode. */
306 ldp x1, x2, [x0, #8 * 31]
307 msr elr_el2, x1
308 msr spsr_el2, x2
309
310 /* Restore x0..x3, which we have used as scratch before. */
311 ldp x2, x3, [x0, #8 * 2]
312 ldp x0, x1, [x0, #8 * 0]
313 eret
314
315slow_sync_lower_64:
316 /* Get the current vcpu. */
317 mrs x18, tpidr_el2
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100318 ldr x18, [x18, #CPU_CURRENT]
319
320 /* Save volatile registers. */
321 add x18, x18, #VCPU_REGS
322 stp x0, x1, [x18, #8 * 0]
323 stp x2, x3, [x18, #8 * 2]
324 stp x4, x5, [x18, #8 * 4]
325 stp x6, x7, [x18, #8 * 6]
326 stp x8, x9, [x18, #8 * 8]
327 stp x10, x11, [x18, #8 * 10]
328 stp x12, x13, [x18, #8 * 12]
329 stp x14, x15, [x18, #8 * 14]
330 stp x16, x17, [x18, #8 * 16]
331 stp x29, x30, [x18, #8 * 29]
332
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +0100333 /* x18 was saved on the stack, so we move it to vcpu regs buffer. */
334 ldr x0, [sp], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100335 str x0, [x18, #8 * 18]
336
337 /* Save return address & mode. */
338 mrs x1, elr_el2
339 mrs x2, spsr_el2
340 stp x1, x2, [x18, #8 * 31]
341
342 /* Read syndrome register and call C handler. */
343 mrs x0, esr_el2
344 bl sync_lower_exception
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100345
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100346 /* Switch to the vcpu returned by sync_lower_exception. */
347 mrs x1, tpidr_el2
348 ldr x1, [x1, #CPU_CURRENT]
349 cbnz x0, vcpu_switch
350
351 /* vcpu is not changing. */
352 mov x0, x1
353 b vcpu_restore_volatile_and_run
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100354
355sync_lower_64_switch:
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100356 /* We'll have to switch, so save volatile state before doing so. */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100357 mrs x18, tpidr_el2
358 ldr x18, [x18, #CPU_CURRENT]
359
360 /* Store zeroes in volatile register storage, except x0. */
361 add x18, x18, #VCPU_REGS
362 stp x0, xzr, [x18, #8 * 0]
363 stp xzr, xzr, [x18, #8 * 2]
364 stp xzr, xzr, [x18, #8 * 4]
365 stp xzr, xzr, [x18, #8 * 6]
366 stp xzr, xzr, [x18, #8 * 8]
367 stp xzr, xzr, [x18, #8 * 10]
368 stp xzr, xzr, [x18, #8 * 12]
369 stp xzr, xzr, [x18, #8 * 14]
370 stp xzr, xzr, [x18, #8 * 16]
371 stp x29, x30, [x18, #8 * 29]
372
373 str xzr, [x18, #8 * 18]
374
375 /* Save return address & mode. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100376 mrs x2, elr_el2
377 mrs x3, spsr_el2
378 stp x2, x3, [x18, #8 * 31]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100379
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100380 /* Save lazy state, then switch to new vcpu. */
381 mov x0, x1
382 sub x1, x18, #VCPU_REGS
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100383 b vcpu_switch