blob: 8a69c38d5dfbd5da57f13ad85e6dc4f1ddb74554 [file] [log] [blame]
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +02001/*
Ruari Phipps9f1952c2020-08-24 11:32:32 +01002 * Copyright (c) 2018-2021, Arm Limited. All rights reserved.
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +02003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#ifndef __ASM_MACROS_S__
8#define __ASM_MACROS_S__
9
10#include <arch.h>
11#include <asm_macros_common.S>
12
13#define TLB_INVALIDATE(_type) \
14 tlbi _type
15
16 .macro func_prologue
17 stp x29, x30, [sp, #-0x10]!
18 mov x29,sp
19 .endm
20
21 .macro func_epilogue
22 ldp x29, x30, [sp], #0x10
23 .endm
24
25
26 .macro dcache_line_size reg, tmp
27 mrs \tmp, ctr_el0
28 ubfx \tmp, \tmp, #16, #4
29 mov \reg, #4
30 lsl \reg, \reg, \tmp
31 .endm
32
33
34 .macro icache_line_size reg, tmp
35 mrs \tmp, ctr_el0
36 and \tmp, \tmp, #0xf
37 mov \reg, #4
38 lsl \reg, \reg, \tmp
39 .endm
40
41 /*
42 * Declare the exception vector table, enforcing it is aligned on a
43 * 2KB boundary, as required by the ARMv8 architecture.
44 * Use zero bytes as the fill value to be stored in the padding bytes
45 * so that it inserts illegal AArch64 instructions. This increases
46 * security, robustness and potentially facilitates debugging.
47 */
48 .macro vector_base label
49 .section .vectors, "ax"
50 .align 11, 0
51 \label:
52 .endm
53
54 /*
55 * Create an entry in the exception vector table, enforcing it is
56 * aligned on a 128-byte boundary, as required by the ARMv8
57 * architecture. Use zero bytes as the fill value to be stored in the
58 * padding bytes so that it inserts illegal AArch64 instructions.
59 * This increases security, robustness and potentially facilitates
60 * debugging.
61 */
62 .macro vector_entry label
63 .section .vectors, "ax"
Sandrine Bailleux452f3602019-01-14 13:49:22 +010064 .cfi_sections .debug_frame
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020065 .align 7, 0
Sandrine Bailleux452f3602019-01-14 13:49:22 +010066 .type \label, %function
67 .cfi_startproc
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020068 \label:
69 .endm
70
71 /*
Sandrine Bailleux452f3602019-01-14 13:49:22 +010072 * Add the bytes until fill the full exception vector, whose size is always
73 * 32 instructions. If there are more than 32 instructions in the
74 * exception vector then an error is emitted.
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020075 */
Sandrine Bailleux452f3602019-01-14 13:49:22 +010076 .macro end_vector_entry label
77 .cfi_endproc
78 .fill \label + (32 * 4) - .
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020079 .endm
80
81 /*
Ruari Phipps9f1952c2020-08-24 11:32:32 +010082 * Create a vector entry that just spins making the exception unrecoverable.
83 */
84 .macro vector_entry_spin name
85 vector_entry \name
86 b \name
87 end_vector_entry \name
88 .endm
89
90 /*
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020091 * This macro calculates the base address of an MP stack using the
92 * platform_get_core_pos() index, the name of the stack storage and
93 * the size of each stack
94 * Out: X0 = physical address of stack base
95 * Clobber: X30, X1, X2
96 */
97 .macro get_mp_stack _name, _size
98 bl platform_get_core_pos
99 ldr x2, =(\_name + \_size)
100 mov x1, #\_size
101 madd x0, x0, x1, x2
102 .endm
103
104 /*
105 * This macro calculates the base address of a UP stack using the
106 * name of the stack storage and the size of the stack
107 * Out: X0 = physical address of stack base
108 */
109 .macro get_up_stack _name, _size
110 ldr x0, =(\_name + \_size)
111 .endm
112
113 /*
114 * Helper macro to generate the best mov/movk combinations according
115 * the value to be moved. The 16 bits from '_shift' are tested and
116 * if not zero, they are moved into '_reg' without affecting
117 * other bits.
118 */
119 .macro _mov_imm16 _reg, _val, _shift
120 .if (\_val >> \_shift) & 0xffff
121 .if (\_val & (1 << \_shift - 1))
122 movk \_reg, (\_val >> \_shift) & 0xffff, LSL \_shift
123 .else
124 mov \_reg, \_val & (0xffff << \_shift)
125 .endif
126 .endif
127 .endm
128
129 /*
130 * Helper macro to load arbitrary values into 32 or 64-bit registers
131 * which generates the best mov/movk combinations. Many base addresses
132 * are 64KB aligned the macro will eliminate updating bits 15:0 in
133 * that case
134 */
135 .macro mov_imm _reg, _val
136 .if (\_val) == 0
137 mov \_reg, #0
138 .else
139 _mov_imm16 \_reg, (\_val), 0
140 _mov_imm16 \_reg, (\_val), 16
141 _mov_imm16 \_reg, (\_val), 32
142 _mov_imm16 \_reg, (\_val), 48
143 .endif
144 .endm
145
146 .macro asm_read_sysreg_el1_or_el2 sysreg
147 mrs x0, CurrentEL
148 cmp x0, #(MODE_EL1 << MODE_EL_SHIFT)
149 b.eq 1f
150 cmp x0, #(MODE_EL2 << MODE_EL_SHIFT)
151 b.eq 2f
152 b dead
1531:
154 mrs x0, \sysreg\()_el1
155 b 3f
1562:
157 mrs x0, \sysreg\()_el2
1583:
159 .endm
160
161 .macro asm_write_sysreg_el1_or_el2 sysreg scratch_reg
162 mrs \scratch_reg, CurrentEL
163 cmp \scratch_reg, #(MODE_EL1 << MODE_EL_SHIFT)
164 b.eq 1f
165 cmp \scratch_reg, #(MODE_EL2 << MODE_EL_SHIFT)
166 b.eq 2f
167 b dead
1681:
169 msr \sysreg\()_el1, x0
170 b 3f
1712:
172 msr \sysreg\()_el2, x0
1733:
174 .endm
175
176 .macro asm_read_sctlr_el1_or_el2
177 asm_read_sysreg_el1_or_el2 sctlr
178 .endm
179
180 .macro asm_write_sctlr_el1_or_el2 scratch_reg
181 asm_write_sysreg_el1_or_el2 sctlr \scratch_reg
182 .endm
183
184 .macro asm_write_vbar_el1_or_el2 scratch_reg
185 asm_write_sysreg_el1_or_el2 vbar \scratch_reg
186 .endm
187
188/*
189 * Depending on the current exception level, jump to 'label_el1' or 'label_el2'.
190 * If the current exception level is neither EL1 nor EL2, jump to 'label_error'
191 * instead.
192 * The caller needs to provide the macro with a scratch 64-bit register to use.
193 * Its contents prior to calling this function will be lost.
194 */
195 .macro JUMP_EL1_OR_EL2 scratch_reg, label_el1, label_el2, label_error
196 mrs \scratch_reg, CurrentEL
197 cmp \scratch_reg, #(MODE_EL1 << MODE_EL_SHIFT)
198 b.eq \label_el1
199 cmp \scratch_reg, #(MODE_EL2 << MODE_EL_SHIFT)
200 b.eq \label_el2
201 b \label_error
202 .endm
203
Alexei Fedorov45ada402020-06-17 19:07:11 +0100204 /*
205 * Helper macro to read system register value into x0
206 */
207 .macro read reg:req
208#if ENABLE_BTI
209 bti j
210#endif
211 mrs x0, \reg
212 ret
213 .endm
214
215 /*
216 * Helper macro to write value from x1 to system register
217 */
218 .macro write reg:req
219#if ENABLE_BTI
220 bti j
221#endif
222 msr \reg, x1
223 ret
224 .endm
225
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +0200226#endif /* __ASM_MACROS_S__ */