blob: 5298ae0cd13fa14cd884deb9cd833866cde77793 [file] [log] [blame]
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +02001/*
2 * Copyright (c) 2018, Arm Limited. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#ifndef __ASM_MACROS_S__
8#define __ASM_MACROS_S__
9
10#include <arch.h>
11#include <asm_macros_common.S>
12
13#define TLB_INVALIDATE(_type) \
14 tlbi _type
15
16 .macro func_prologue
17 stp x29, x30, [sp, #-0x10]!
18 mov x29,sp
19 .endm
20
21 .macro func_epilogue
22 ldp x29, x30, [sp], #0x10
23 .endm
24
25
26 .macro dcache_line_size reg, tmp
27 mrs \tmp, ctr_el0
28 ubfx \tmp, \tmp, #16, #4
29 mov \reg, #4
30 lsl \reg, \reg, \tmp
31 .endm
32
33
34 .macro icache_line_size reg, tmp
35 mrs \tmp, ctr_el0
36 and \tmp, \tmp, #0xf
37 mov \reg, #4
38 lsl \reg, \reg, \tmp
39 .endm
40
41 /*
42 * Declare the exception vector table, enforcing it is aligned on a
43 * 2KB boundary, as required by the ARMv8 architecture.
44 * Use zero bytes as the fill value to be stored in the padding bytes
45 * so that it inserts illegal AArch64 instructions. This increases
46 * security, robustness and potentially facilitates debugging.
47 */
48 .macro vector_base label
49 .section .vectors, "ax"
50 .align 11, 0
51 \label:
52 .endm
53
54 /*
55 * Create an entry in the exception vector table, enforcing it is
56 * aligned on a 128-byte boundary, as required by the ARMv8
57 * architecture. Use zero bytes as the fill value to be stored in the
58 * padding bytes so that it inserts illegal AArch64 instructions.
59 * This increases security, robustness and potentially facilitates
60 * debugging.
61 */
62 .macro vector_entry label
63 .section .vectors, "ax"
Sandrine Bailleux452f3602019-01-14 13:49:22 +010064 .cfi_sections .debug_frame
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020065 .align 7, 0
Sandrine Bailleux452f3602019-01-14 13:49:22 +010066 .type \label, %function
67 .cfi_startproc
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020068 \label:
69 .endm
70
71 /*
Sandrine Bailleux452f3602019-01-14 13:49:22 +010072 * Add the bytes until fill the full exception vector, whose size is always
73 * 32 instructions. If there are more than 32 instructions in the
74 * exception vector then an error is emitted.
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020075 */
Sandrine Bailleux452f3602019-01-14 13:49:22 +010076 .macro end_vector_entry label
77 .cfi_endproc
78 .fill \label + (32 * 4) - .
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020079 .endm
80
81 /*
82 * This macro calculates the base address of an MP stack using the
83 * platform_get_core_pos() index, the name of the stack storage and
84 * the size of each stack
85 * Out: X0 = physical address of stack base
86 * Clobber: X30, X1, X2
87 */
88 .macro get_mp_stack _name, _size
89 bl platform_get_core_pos
90 ldr x2, =(\_name + \_size)
91 mov x1, #\_size
92 madd x0, x0, x1, x2
93 .endm
94
95 /*
96 * This macro calculates the base address of a UP stack using the
97 * name of the stack storage and the size of the stack
98 * Out: X0 = physical address of stack base
99 */
100 .macro get_up_stack _name, _size
101 ldr x0, =(\_name + \_size)
102 .endm
103
104 /*
105 * Helper macro to generate the best mov/movk combinations according
106 * the value to be moved. The 16 bits from '_shift' are tested and
107 * if not zero, they are moved into '_reg' without affecting
108 * other bits.
109 */
110 .macro _mov_imm16 _reg, _val, _shift
111 .if (\_val >> \_shift) & 0xffff
112 .if (\_val & (1 << \_shift - 1))
113 movk \_reg, (\_val >> \_shift) & 0xffff, LSL \_shift
114 .else
115 mov \_reg, \_val & (0xffff << \_shift)
116 .endif
117 .endif
118 .endm
119
120 /*
121 * Helper macro to load arbitrary values into 32 or 64-bit registers
122 * which generates the best mov/movk combinations. Many base addresses
123 * are 64KB aligned the macro will eliminate updating bits 15:0 in
124 * that case
125 */
126 .macro mov_imm _reg, _val
127 .if (\_val) == 0
128 mov \_reg, #0
129 .else
130 _mov_imm16 \_reg, (\_val), 0
131 _mov_imm16 \_reg, (\_val), 16
132 _mov_imm16 \_reg, (\_val), 32
133 _mov_imm16 \_reg, (\_val), 48
134 .endif
135 .endm
136
137 .macro asm_read_sysreg_el1_or_el2 sysreg
138 mrs x0, CurrentEL
139 cmp x0, #(MODE_EL1 << MODE_EL_SHIFT)
140 b.eq 1f
141 cmp x0, #(MODE_EL2 << MODE_EL_SHIFT)
142 b.eq 2f
143 b dead
1441:
145 mrs x0, \sysreg\()_el1
146 b 3f
1472:
148 mrs x0, \sysreg\()_el2
1493:
150 .endm
151
152 .macro asm_write_sysreg_el1_or_el2 sysreg scratch_reg
153 mrs \scratch_reg, CurrentEL
154 cmp \scratch_reg, #(MODE_EL1 << MODE_EL_SHIFT)
155 b.eq 1f
156 cmp \scratch_reg, #(MODE_EL2 << MODE_EL_SHIFT)
157 b.eq 2f
158 b dead
1591:
160 msr \sysreg\()_el1, x0
161 b 3f
1622:
163 msr \sysreg\()_el2, x0
1643:
165 .endm
166
167 .macro asm_read_sctlr_el1_or_el2
168 asm_read_sysreg_el1_or_el2 sctlr
169 .endm
170
171 .macro asm_write_sctlr_el1_or_el2 scratch_reg
172 asm_write_sysreg_el1_or_el2 sctlr \scratch_reg
173 .endm
174
175 .macro asm_write_vbar_el1_or_el2 scratch_reg
176 asm_write_sysreg_el1_or_el2 vbar \scratch_reg
177 .endm
178
179/*
180 * Depending on the current exception level, jump to 'label_el1' or 'label_el2'.
181 * If the current exception level is neither EL1 nor EL2, jump to 'label_error'
182 * instead.
183 * The caller needs to provide the macro with a scratch 64-bit register to use.
184 * Its contents prior to calling this function will be lost.
185 */
186 .macro JUMP_EL1_OR_EL2 scratch_reg, label_el1, label_el2, label_error
187 mrs \scratch_reg, CurrentEL
188 cmp \scratch_reg, #(MODE_EL1 << MODE_EL_SHIFT)
189 b.eq \label_el1
190 cmp \scratch_reg, #(MODE_EL2 << MODE_EL_SHIFT)
191 b.eq \label_el2
192 b \label_error
193 .endm
194
195#endif /* __ASM_MACROS_S__ */