blob: 9fec7754a14363277f65dc04f18509f50ac69633 [file] [log] [blame]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +01001#include "offsets.h"
2
Andrew Scull75e28632018-08-08 20:09:14 +01003.section .text.vector_table_el2, "ax"
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +01004.global vector_table_el2
Andrew Scullf4b22c52018-10-18 14:54:13 +01005.balign 0x800
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +01006vector_table_el2:
7 /* sync_cur_sp0 */
8 b .
9
10.balign 0x80
11 /* irq_cur_sp0 */
12 b irq_current
13
14.balign 0x80
15 /* fiq_cur_sp0 */
16 b .
17
18.balign 0x80
19 /* serr_cur_sp0 */
20 b .
21
22.balign 0x80
23 /* sync_cur_spx */
24 mrs x0, esr_el2
25 mrs x1, elr_el2
26 b sync_current_exception
27
28.balign 0x80
29 /* irq_cur_spx */
30 b irq_current
31
32.balign 0x80
33 /* fiq_cur_spx */
34 b .
35
36.balign 0x80
37 /* serr_cur_spx */
38 b .
39
40.balign 0x80
41 /* sync_lower_64 */
42
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +010043 /*
44 * Save x18 since we're about to clobber it. We subtract 16 instead of
45 * 8 from the stack pointer to keep it 16-byte aligned.
46 */
47 str x18, [sp, #-16]!
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010048
49 /* Extract the exception class (EC) from exception syndrome register. */
50 mrs x18, esr_el2
51 lsr x18, x18, #26
52
53 /* Take the slow path if exception is not due to an HVC instruction. */
54 subs x18, x18, #0x16
55 b.ne slow_sync_lower_64
56
Wedson Almeida Filho87009642018-07-02 10:20:07 +010057 /*
58 * Save x29 and x30, which are not saved by the callee, then jump to
59 * HVC handler.
60 */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010061 stp x29, x30, [sp, #-16]!
62 bl hvc_handler
63 ldp x29, x30, [sp], #16
64 cbnz x1, sync_lower_64_switch
65
66 /* Zero out all volatile registers (except x0) and return. */
67 stp xzr, xzr, [sp, #-16]
68 ldp x1, x2, [sp, #-16]
69 ldp x3, x4, [sp, #-16]
70 ldp x5, x6, [sp, #-16]
71 ldp x7, x8, [sp, #-16]
72 ldp x9, x10, [sp, #-16]
73 ldp x11, x12, [sp, #-16]
74 ldp x13, x14, [sp, #-16]
75 ldp x15, x16, [sp, #-16]
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +010076 mov x17, xzr
Wedson Almeida Filho450ccb82018-08-12 16:25:36 +010077
78 /* Restore x18, which was saved on the stack. */
79 ldr x18, [sp], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010080 eret
81
82.balign 0x80
83 /* irq_lower_64 */
84
85 /* Save x0 since we're about to clobber it. */
86 str x0, [sp, #-8]
87
88 /* Get the current vcpu. */
89 mrs x0, tpidr_el2
90 ldr x0, [x0, #CPU_CURRENT]
91
92 /* Save volatile registers. */
93 add x0, x0, #VCPU_REGS
94 stp x2, x3, [x0, #8 * 2]
95 stp x4, x5, [x0, #8 * 4]
96 stp x6, x7, [x0, #8 * 6]
97 stp x8, x9, [x0, #8 * 8]
98 stp x10, x11, [x0, #8 * 10]
99 stp x12, x13, [x0, #8 * 12]
100 stp x14, x15, [x0, #8 * 14]
101 stp x16, x17, [x0, #8 * 16]
102 str x18, [x0, #8 * 18]
103 stp x29, x30, [x0, #8 * 29]
104
105 ldr x2, [sp, #-8]
106 stp x2, x1, [x0, #8 * 0]
107
108 /* Save return address & mode. */
109 mrs x1, elr_el2
110 mrs x2, spsr_el2
111 stp x1, x2, [x0, #8 * 31]
112
113 /* Call C handler. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100114 bl irq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100115
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100116 mrs x1, tpidr_el2
117 ldr x1, [x1, #CPU_CURRENT]
118 cbnz x0, vcpu_switch
119
120 /* vcpu is not changing. */
121 mov x0, x1
122 b vcpu_restore_volatile_and_run
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100123
124.balign 0x80
125 /* fiq_lower_64 */
126 b .
127
128.balign 0x80
129 /* serr_lower_64 */
130 b .
131
132.balign 0x80
133 /* sync_lower_32 */
134 b .
135
136.balign 0x80
137 /* irq_lower_32 */
138 b .
139
140.balign 0x80
141 /* fiq_lower_32 */
142 b .
143
144.balign 0x80
145 /* serr_lower_32 */
146 b .
147
148.balign 0x80
149
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100150/**
151 * Switch to a new vcpu.
152 *
153 * All volatile registers from the old vcpu have already been saved. We need
154 * to save only non-volatile ones from the old vcpu, and restore all from the
155 * new one.
156 *
157 * x0 is a pointer to the new vcpu.
158 * x1 is a pointer to the old vcpu.
159 */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100160vcpu_switch:
161 /* Save non-volatile registers. */
162 add x1, x1, #VCPU_REGS
163 stp x19, x20, [x1, #8 * 19]
164 stp x21, x22, [x1, #8 * 21]
165 stp x23, x24, [x1, #8 * 23]
166 stp x25, x26, [x1, #8 * 25]
167 stp x27, x28, [x1, #8 * 27]
168
169 /* Save lazy state. */
170 add x1, x1, #(VCPU_LAZY - VCPU_REGS)
171
172 mrs x24, vmpidr_el2
173 mrs x25, csselr_el1
174 stp x24, x25, [x1, #16 * 0]
175
176 mrs x2, sctlr_el1
177 mrs x3, actlr_el1
178 stp x2, x3, [x1, #16 * 1]
179
180 mrs x4, cpacr_el1
181 mrs x5, ttbr0_el1
182 stp x4, x5, [x1, #16 * 2]
183
184 mrs x6, ttbr1_el1
185 mrs x7, tcr_el1
186 stp x6, x7, [x1, #16 * 3]
187
188 mrs x8, esr_el1
189 mrs x9, afsr0_el1
190 stp x8, x9, [x1, #16 * 4]
191
192 mrs x10, afsr1_el1
193 mrs x11, far_el1
194 stp x10, x11, [x1, #16 * 5]
195
196 mrs x12, mair_el1
197 mrs x13, vbar_el1
198 stp x12, x13, [x1, #16 * 6]
199
200 mrs x14, contextidr_el1
201 mrs x15, tpidr_el0
202 stp x14, x15, [x1, #16 * 7]
203
204 mrs x16, tpidrro_el0
205 mrs x17, tpidr_el1
206 stp x16, x17, [x1, #16 * 8]
207
208 mrs x18, amair_el1
209 mrs x19, cntkctl_el1
210 stp x18, x19, [x1, #16 * 9]
211
212 mrs x20, sp_el0
213 mrs x21, sp_el1
214 stp x20, x21, [x1, #16 * 10]
215
216 mrs x22, par_el1
217 str x22, [x1, #16 * 11]
218
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100219 /* Intentional fall through. */
220
221.globl vcpu_restore_all_and_run
222vcpu_restore_all_and_run:
223 /* Update cpu()->current. */
224 mrs x2, tpidr_el2
225 str x0, [x2, #CPU_CURRENT]
226
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100227 /* Get a pointer to the lazy registers. */
228 add x0, x0, #VCPU_LAZY
229
230 ldp x24, x25, [x0, #16 * 0]
231 msr vmpidr_el2, x24
232 msr csselr_el1, x25
233
234 ldp x2, x3, [x0, #16 * 1]
235 msr sctlr_el1, x2
236 msr actlr_el1, x3
237
238 ldp x4, x5, [x0, #16 * 2]
239 msr cpacr_el1, x4
240 msr ttbr0_el1, x5
241
242 ldp x6, x7, [x0, #16 * 3]
243 msr ttbr1_el1, x6
244 msr tcr_el1, x7
245
246 ldp x8, x9, [x0, #16 * 4]
247 msr esr_el1, x8
248 msr afsr0_el1, x9
249
250 ldp x10, x11, [x0, #16 * 5]
251 msr afsr1_el1, x10
252 msr far_el1, x11
253
254 ldp x12, x13, [x0, #16 * 6]
255 msr mair_el1, x12
256 msr vbar_el1, x13
257
258 ldp x14, x15, [x0, #16 * 7]
259 msr contextidr_el1, x14
260 msr tpidr_el0, x15
261
262 ldp x16, x17, [x0, #16 * 8]
263 msr tpidrro_el0, x16
264 msr tpidr_el1, x17
265
266 ldp x18, x19, [x0, #16 * 9]
267 msr amair_el1, x18
268 msr cntkctl_el1, x19
269
270 ldp x20, x21, [x0, #16 * 10]
271 msr sp_el0, x20
272 msr sp_el1, x21
273
Wedson Almeida Filho52bb3f92018-07-30 15:52:38 +0100274 ldr x22, [x0, #16 * 11]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100275 msr par_el1, x22
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100276
277 /* Restore non-volatile registers. */
278 add x0, x0, #(VCPU_REGS - VCPU_LAZY)
279
280 ldp x19, x20, [x0, #8 * 19]
281 ldp x21, x22, [x0, #8 * 21]
282 ldp x23, x24, [x0, #8 * 23]
283 ldp x25, x26, [x0, #8 * 25]
284 ldp x27, x28, [x0, #8 * 27]
285
286 /* Restore volatile registers and return. */
287 sub x0, x0, #VCPU_REGS
288
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100289/**
290 * x0 is a pointer to the vcpu.
291 *
292 * Restore volatile registers and run the given vcpu.
293 */
294vcpu_restore_volatile_and_run:
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100295 /* Restore volatile registers. */
296 add x0, x0, #VCPU_REGS
297
298 ldp x4, x5, [x0, #8 * 4]
299 ldp x6, x7, [x0, #8 * 6]
300 ldp x8, x9, [x0, #8 * 8]
301 ldp x10, x11, [x0, #8 * 10]
302 ldp x12, x13, [x0, #8 * 12]
303 ldp x14, x15, [x0, #8 * 14]
304 ldp x16, x17, [x0, #8 * 16]
305 ldr x18, [x0, #8 * 18]
306 ldp x29, x30, [x0, #8 * 29]
307
308 /* Restore return address & mode. */
309 ldp x1, x2, [x0, #8 * 31]
310 msr elr_el2, x1
311 msr spsr_el2, x2
312
313 /* Restore x0..x3, which we have used as scratch before. */
314 ldp x2, x3, [x0, #8 * 2]
315 ldp x0, x1, [x0, #8 * 0]
316 eret
317
318slow_sync_lower_64:
319 /* Get the current vcpu. */
320 mrs x18, tpidr_el2
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100321 ldr x18, [x18, #CPU_CURRENT]
322
323 /* Save volatile registers. */
324 add x18, x18, #VCPU_REGS
325 stp x0, x1, [x18, #8 * 0]
326 stp x2, x3, [x18, #8 * 2]
327 stp x4, x5, [x18, #8 * 4]
328 stp x6, x7, [x18, #8 * 6]
329 stp x8, x9, [x18, #8 * 8]
330 stp x10, x11, [x18, #8 * 10]
331 stp x12, x13, [x18, #8 * 12]
332 stp x14, x15, [x18, #8 * 14]
333 stp x16, x17, [x18, #8 * 16]
334 stp x29, x30, [x18, #8 * 29]
335
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +0100336 /* x18 was saved on the stack, so we move it to vcpu regs buffer. */
337 ldr x0, [sp], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100338 str x0, [x18, #8 * 18]
339
340 /* Save return address & mode. */
341 mrs x1, elr_el2
342 mrs x2, spsr_el2
343 stp x1, x2, [x18, #8 * 31]
344
345 /* Read syndrome register and call C handler. */
346 mrs x0, esr_el2
347 bl sync_lower_exception
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100348
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100349 /* Switch to the vcpu returned by sync_lower_exception. */
350 mrs x1, tpidr_el2
351 ldr x1, [x1, #CPU_CURRENT]
352 cbnz x0, vcpu_switch
353
354 /* vcpu is not changing. */
355 mov x0, x1
356 b vcpu_restore_volatile_and_run
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100357
358sync_lower_64_switch:
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100359 /* We'll have to switch, so save volatile state before doing so. */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100360 mrs x18, tpidr_el2
361 ldr x18, [x18, #CPU_CURRENT]
362
363 /* Store zeroes in volatile register storage, except x0. */
364 add x18, x18, #VCPU_REGS
365 stp x0, xzr, [x18, #8 * 0]
366 stp xzr, xzr, [x18, #8 * 2]
367 stp xzr, xzr, [x18, #8 * 4]
368 stp xzr, xzr, [x18, #8 * 6]
369 stp xzr, xzr, [x18, #8 * 8]
370 stp xzr, xzr, [x18, #8 * 10]
371 stp xzr, xzr, [x18, #8 * 12]
372 stp xzr, xzr, [x18, #8 * 14]
373 stp xzr, xzr, [x18, #8 * 16]
374 stp x29, x30, [x18, #8 * 29]
375
Wedson Almeida Filho450ccb82018-08-12 16:25:36 +0100376 /* x18 was saved on the stack, so we move it to vcpu regs buffer. */
377 ldr x2, [sp], #16
378 str x2, [x18, #8 * 18]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100379
380 /* Save return address & mode. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100381 mrs x2, elr_el2
382 mrs x3, spsr_el2
383 stp x2, x3, [x18, #8 * 31]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100384
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100385 /* Save lazy state, then switch to new vcpu. */
386 mov x0, x1
387 sub x1, x18, #VCPU_REGS
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100388 b vcpu_switch