blob: 8654036eeb438431a47bc8994911f2e756289d08 [file] [log] [blame]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +01001#include "offsets.h"
2
Andrew Scull75e28632018-08-08 20:09:14 +01003.section .text.vector_table_el2, "ax"
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +01004.global vector_table_el2
Andrew Scullf4b22c52018-10-18 14:54:13 +01005.balign 0x800
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +01006vector_table_el2:
7 /* sync_cur_sp0 */
8 b .
9
10.balign 0x80
11 /* irq_cur_sp0 */
12 b irq_current
13
14.balign 0x80
15 /* fiq_cur_sp0 */
16 b .
17
18.balign 0x80
19 /* serr_cur_sp0 */
20 b .
21
22.balign 0x80
23 /* sync_cur_spx */
24 mrs x0, esr_el2
25 mrs x1, elr_el2
26 b sync_current_exception
27
28.balign 0x80
29 /* irq_cur_spx */
30 b irq_current
31
32.balign 0x80
33 /* fiq_cur_spx */
34 b .
35
36.balign 0x80
37 /* serr_cur_spx */
38 b .
39
40.balign 0x80
41 /* sync_lower_64 */
42
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +010043 /*
44 * Save x18 since we're about to clobber it. We subtract 16 instead of
45 * 8 from the stack pointer to keep it 16-byte aligned.
46 */
47 str x18, [sp, #-16]!
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010048
49 /* Extract the exception class (EC) from exception syndrome register. */
50 mrs x18, esr_el2
51 lsr x18, x18, #26
52
53 /* Take the slow path if exception is not due to an HVC instruction. */
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +010054 sub x18, x18, #0x16
55 cbnz x18, slow_sync_lower_64
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010056
Wedson Almeida Filho87009642018-07-02 10:20:07 +010057 /*
58 * Save x29 and x30, which are not saved by the callee, then jump to
59 * HVC handler.
60 */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010061 stp x29, x30, [sp, #-16]!
62 bl hvc_handler
63 ldp x29, x30, [sp], #16
64 cbnz x1, sync_lower_64_switch
65
66 /* Zero out all volatile registers (except x0) and return. */
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +010067 stp xzr, xzr, [sp, #-16]!
68 ldp x1, x2, [sp]
69 ldp x3, x4, [sp]
70 ldp x5, x6, [sp]
71 ldp x7, x8, [sp]
72 ldp x9, x10, [sp]
73 ldp x11, x12, [sp]
74 ldp x13, x14, [sp]
75 ldp x15, x16, [sp], #16
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +010076 mov x17, xzr
Wedson Almeida Filho450ccb82018-08-12 16:25:36 +010077
78 /* Restore x18, which was saved on the stack. */
79 ldr x18, [sp], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010080 eret
81
82.balign 0x80
83 /* irq_lower_64 */
84
85 /* Save x0 since we're about to clobber it. */
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +010086 str x0, [sp, #-8]!
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010087
88 /* Get the current vcpu. */
89 mrs x0, tpidr_el2
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +010090
91 /* Save volatile registers. */
92 add x0, x0, #VCPU_REGS
93 stp x2, x3, [x0, #8 * 2]
94 stp x4, x5, [x0, #8 * 4]
95 stp x6, x7, [x0, #8 * 6]
96 stp x8, x9, [x0, #8 * 8]
97 stp x10, x11, [x0, #8 * 10]
98 stp x12, x13, [x0, #8 * 12]
99 stp x14, x15, [x0, #8 * 14]
100 stp x16, x17, [x0, #8 * 16]
101 str x18, [x0, #8 * 18]
102 stp x29, x30, [x0, #8 * 29]
103
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100104 ldr x2, [sp], #8
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100105 stp x2, x1, [x0, #8 * 0]
106
107 /* Save return address & mode. */
108 mrs x1, elr_el2
109 mrs x2, spsr_el2
110 stp x1, x2, [x0, #8 * 31]
111
112 /* Call C handler. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100113 bl irq_lower
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100114
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100115 mrs x1, tpidr_el2
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100116 cbnz x0, vcpu_switch
117
118 /* vcpu is not changing. */
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100119 add x0, x1, #VCPU_REGS
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100120 b vcpu_restore_volatile_and_run
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100121
122.balign 0x80
123 /* fiq_lower_64 */
124 b .
125
126.balign 0x80
127 /* serr_lower_64 */
128 b .
129
130.balign 0x80
131 /* sync_lower_32 */
132 b .
133
134.balign 0x80
135 /* irq_lower_32 */
136 b .
137
138.balign 0x80
139 /* fiq_lower_32 */
140 b .
141
142.balign 0x80
143 /* serr_lower_32 */
144 b .
145
146.balign 0x80
147
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100148/**
149 * Switch to a new vcpu.
150 *
151 * All volatile registers from the old vcpu have already been saved. We need
152 * to save only non-volatile ones from the old vcpu, and restore all from the
153 * new one.
154 *
155 * x0 is a pointer to the new vcpu.
156 * x1 is a pointer to the old vcpu.
157 */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100158vcpu_switch:
159 /* Save non-volatile registers. */
160 add x1, x1, #VCPU_REGS
161 stp x19, x20, [x1, #8 * 19]
162 stp x21, x22, [x1, #8 * 21]
163 stp x23, x24, [x1, #8 * 23]
164 stp x25, x26, [x1, #8 * 25]
165 stp x27, x28, [x1, #8 * 27]
166
167 /* Save lazy state. */
168 add x1, x1, #(VCPU_LAZY - VCPU_REGS)
169
170 mrs x24, vmpidr_el2
171 mrs x25, csselr_el1
172 stp x24, x25, [x1, #16 * 0]
173
174 mrs x2, sctlr_el1
175 mrs x3, actlr_el1
176 stp x2, x3, [x1, #16 * 1]
177
178 mrs x4, cpacr_el1
179 mrs x5, ttbr0_el1
180 stp x4, x5, [x1, #16 * 2]
181
182 mrs x6, ttbr1_el1
183 mrs x7, tcr_el1
184 stp x6, x7, [x1, #16 * 3]
185
186 mrs x8, esr_el1
187 mrs x9, afsr0_el1
188 stp x8, x9, [x1, #16 * 4]
189
190 mrs x10, afsr1_el1
191 mrs x11, far_el1
192 stp x10, x11, [x1, #16 * 5]
193
194 mrs x12, mair_el1
195 mrs x13, vbar_el1
196 stp x12, x13, [x1, #16 * 6]
197
198 mrs x14, contextidr_el1
199 mrs x15, tpidr_el0
200 stp x14, x15, [x1, #16 * 7]
201
202 mrs x16, tpidrro_el0
203 mrs x17, tpidr_el1
204 stp x16, x17, [x1, #16 * 8]
205
206 mrs x18, amair_el1
207 mrs x19, cntkctl_el1
208 stp x18, x19, [x1, #16 * 9]
209
210 mrs x20, sp_el0
211 mrs x21, sp_el1
212 stp x20, x21, [x1, #16 * 10]
213
214 mrs x22, par_el1
215 str x22, [x1, #16 * 11]
216
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100217 /* Intentional fall through. */
218
219.globl vcpu_restore_all_and_run
220vcpu_restore_all_and_run:
Wedson Almeida Filho00df6c72018-10-18 11:19:24 +0100221 /* Update current(). */
222 msr tpidr_el2, x0
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100223
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100224 /* Get a pointer to the lazy registers. */
225 add x0, x0, #VCPU_LAZY
226
227 ldp x24, x25, [x0, #16 * 0]
228 msr vmpidr_el2, x24
229 msr csselr_el1, x25
230
231 ldp x2, x3, [x0, #16 * 1]
232 msr sctlr_el1, x2
233 msr actlr_el1, x3
234
235 ldp x4, x5, [x0, #16 * 2]
236 msr cpacr_el1, x4
237 msr ttbr0_el1, x5
238
239 ldp x6, x7, [x0, #16 * 3]
240 msr ttbr1_el1, x6
241 msr tcr_el1, x7
242
243 ldp x8, x9, [x0, #16 * 4]
244 msr esr_el1, x8
245 msr afsr0_el1, x9
246
247 ldp x10, x11, [x0, #16 * 5]
248 msr afsr1_el1, x10
249 msr far_el1, x11
250
251 ldp x12, x13, [x0, #16 * 6]
252 msr mair_el1, x12
253 msr vbar_el1, x13
254
255 ldp x14, x15, [x0, #16 * 7]
256 msr contextidr_el1, x14
257 msr tpidr_el0, x15
258
259 ldp x16, x17, [x0, #16 * 8]
260 msr tpidrro_el0, x16
261 msr tpidr_el1, x17
262
263 ldp x18, x19, [x0, #16 * 9]
264 msr amair_el1, x18
265 msr cntkctl_el1, x19
266
267 ldp x20, x21, [x0, #16 * 10]
268 msr sp_el0, x20
269 msr sp_el1, x21
270
Wedson Almeida Filho52bb3f92018-07-30 15:52:38 +0100271 ldr x22, [x0, #16 * 11]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100272 msr par_el1, x22
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100273
274 /* Restore non-volatile registers. */
275 add x0, x0, #(VCPU_REGS - VCPU_LAZY)
276
277 ldp x19, x20, [x0, #8 * 19]
278 ldp x21, x22, [x0, #8 * 21]
279 ldp x23, x24, [x0, #8 * 23]
280 ldp x25, x26, [x0, #8 * 25]
281 ldp x27, x28, [x0, #8 * 27]
282
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100283 /* Intentional fallthrough. */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100284
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100285/**
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100286 * Restore volatile registers and run the given vcpu.
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100287 *
288 * x0 is a pointer to the volatile registers of the target vcpu.
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100289 */
290vcpu_restore_volatile_and_run:
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100291 ldp x4, x5, [x0, #8 * 4]
292 ldp x6, x7, [x0, #8 * 6]
293 ldp x8, x9, [x0, #8 * 8]
294 ldp x10, x11, [x0, #8 * 10]
295 ldp x12, x13, [x0, #8 * 12]
296 ldp x14, x15, [x0, #8 * 14]
297 ldp x16, x17, [x0, #8 * 16]
298 ldr x18, [x0, #8 * 18]
299 ldp x29, x30, [x0, #8 * 29]
300
301 /* Restore return address & mode. */
302 ldp x1, x2, [x0, #8 * 31]
303 msr elr_el2, x1
304 msr spsr_el2, x2
305
306 /* Restore x0..x3, which we have used as scratch before. */
307 ldp x2, x3, [x0, #8 * 2]
308 ldp x0, x1, [x0, #8 * 0]
309 eret
310
311slow_sync_lower_64:
312 /* Get the current vcpu. */
313 mrs x18, tpidr_el2
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100314
315 /* Save volatile registers. */
316 add x18, x18, #VCPU_REGS
317 stp x0, x1, [x18, #8 * 0]
318 stp x2, x3, [x18, #8 * 2]
319 stp x4, x5, [x18, #8 * 4]
320 stp x6, x7, [x18, #8 * 6]
321 stp x8, x9, [x18, #8 * 8]
322 stp x10, x11, [x18, #8 * 10]
323 stp x12, x13, [x18, #8 * 12]
324 stp x14, x15, [x18, #8 * 14]
325 stp x16, x17, [x18, #8 * 16]
326 stp x29, x30, [x18, #8 * 29]
327
Wedson Almeida Filho5bc0b4c2018-07-30 15:31:44 +0100328 /* x18 was saved on the stack, so we move it to vcpu regs buffer. */
329 ldr x0, [sp], #16
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100330 str x0, [x18, #8 * 18]
331
332 /* Save return address & mode. */
333 mrs x1, elr_el2
334 mrs x2, spsr_el2
335 stp x1, x2, [x18, #8 * 31]
336
337 /* Read syndrome register and call C handler. */
338 mrs x0, esr_el2
339 bl sync_lower_exception
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100340
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100341 /* Switch to the vcpu returned by sync_lower_exception. */
342 mrs x1, tpidr_el2
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100343 cbnz x0, vcpu_switch
344
345 /* vcpu is not changing. */
Wedson Almeida Filhod615cdb2018-10-09 13:00:21 +0100346 add x0, x1, #VCPU_REGS
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100347 b vcpu_restore_volatile_and_run
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100348
349sync_lower_64_switch:
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100350 /* We'll have to switch, so save volatile state before doing so. */
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100351 mrs x18, tpidr_el2
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100352
353 /* Store zeroes in volatile register storage, except x0. */
354 add x18, x18, #VCPU_REGS
355 stp x0, xzr, [x18, #8 * 0]
356 stp xzr, xzr, [x18, #8 * 2]
357 stp xzr, xzr, [x18, #8 * 4]
358 stp xzr, xzr, [x18, #8 * 6]
359 stp xzr, xzr, [x18, #8 * 8]
360 stp xzr, xzr, [x18, #8 * 10]
361 stp xzr, xzr, [x18, #8 * 12]
362 stp xzr, xzr, [x18, #8 * 14]
363 stp xzr, xzr, [x18, #8 * 16]
364 stp x29, x30, [x18, #8 * 29]
365
Wedson Almeida Filho450ccb82018-08-12 16:25:36 +0100366 /* x18 was saved on the stack, so we move it to vcpu regs buffer. */
367 ldr x2, [sp], #16
368 str x2, [x18, #8 * 18]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100369
370 /* Save return address & mode. */
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100371 mrs x2, elr_el2
372 mrs x3, spsr_el2
373 stp x2, x3, [x18, #8 * 31]
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100374
Wedson Almeida Filho87009642018-07-02 10:20:07 +0100375 /* Save lazy state, then switch to new vcpu. */
376 mov x0, x1
377 sub x1, x18, #VCPU_REGS
Wedson Almeida Filho987c0ff2018-06-20 16:34:38 +0100378 b vcpu_switch