blob: b15daf32cfd14e8a729cb171afb72b986227509a [file] [log] [blame]
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +02001/*
johpow01b7d752a2020-10-08 17:29:11 -05002 * Copyright (c) 2017-2021, Arm Limited. All rights reserved.
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +02003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <assert_macros.S>
9#include <asm_macros.S>
10
11 .globl amu_group0_cnt_read_internal
12 .globl amu_group1_cnt_read_internal
13
johpow01b7d752a2020-10-08 17:29:11 -050014 /* FEAT_AMUv1p1 virtualisation offset register functions */
15 .globl amu_group0_voffset_read_internal
16 .globl amu_group0_voffset_write_internal
17 .globl amu_group1_voffset_read_internal
18 .globl amu_group1_voffset_write_internal
19
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020020/*
21 * uint64_t amu_group0_cnt_read_internal(int idx);
22 *
23 * Given `idx`, read the corresponding AMU counter
24 * and return it in `x0`.
25 */
26func amu_group0_cnt_read_internal
Alexei Fedorov45ada402020-06-17 19:07:11 +010027 adr x1, 1f
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020028#if ENABLE_ASSERTIONS
29 /*
30 * It can be dangerous to call this function with an
31 * out of bounds index. Ensure `idx` is valid.
32 */
Alexei Fedorov45ada402020-06-17 19:07:11 +010033 tst x0, #~3
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020034 ASM_ASSERT(eq)
35#endif
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020036 /*
37 * Given `idx` calculate address of mrs/ret instruction pair
38 * in the table below.
39 */
Alexei Fedorov45ada402020-06-17 19:07:11 +010040 add x1, x1, x0, lsl #3 /* each mrs/ret sequence is 8 bytes */
41#if ENABLE_BTI
42 add x1, x1, x0, lsl #2 /* + "bti j" instruction */
43#endif
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020044 br x1
45
Alexei Fedorov45ada402020-06-17 19:07:11 +0100461: read AMEVCNTR00_EL0 /* index 0 */
47 read AMEVCNTR01_EL0 /* index 1 */
48 read AMEVCNTR02_EL0 /* index 2 */
49 read AMEVCNTR03_EL0 /* index 3 */
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020050endfunc amu_group0_cnt_read_internal
51
52/*
53 * uint64_t amu_group1_cnt_read_internal(int idx);
54 *
55 * Given `idx`, read the corresponding AMU counter
56 * and return it in `x0`.
57 */
58func amu_group1_cnt_read_internal
Alexei Fedorov45ada402020-06-17 19:07:11 +010059 adr x1, 1f
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020060#if ENABLE_ASSERTIONS
61 /*
62 * It can be dangerous to call this function with an
63 * out of bounds index. Ensure `idx` is valid.
64 */
Alexei Fedorov45ada402020-06-17 19:07:11 +010065 tst x0, #~0xF
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020066 ASM_ASSERT(eq)
67#endif
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020068 /*
69 * Given `idx` calculate address of mrs/ret instruction pair
70 * in the table below.
71 */
Alexei Fedorov45ada402020-06-17 19:07:11 +010072 add x1, x1, x0, lsl #3 /* each mrs/ret sequence is 8 bytes */
73#if ENABLE_BTI
74 add x1, x1, x0, lsl #2 /* + "bti j" instruction */
75#endif
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020076 br x1
77
Alexei Fedorov45ada402020-06-17 19:07:11 +0100781: read AMEVCNTR10_EL0 /* index 0 */
79 read AMEVCNTR11_EL0 /* index 1 */
80 read AMEVCNTR12_EL0 /* index 2 */
81 read AMEVCNTR13_EL0 /* index 3 */
82 read AMEVCNTR14_EL0 /* index 4 */
83 read AMEVCNTR15_EL0 /* index 5 */
84 read AMEVCNTR16_EL0 /* index 6 */
85 read AMEVCNTR17_EL0 /* index 7 */
86 read AMEVCNTR18_EL0 /* index 8 */
87 read AMEVCNTR19_EL0 /* index 9 */
88 read AMEVCNTR1A_EL0 /* index 10 */
89 read AMEVCNTR1B_EL0 /* index 11 */
90 read AMEVCNTR1C_EL0 /* index 12 */
91 read AMEVCNTR1D_EL0 /* index 13 */
92 read AMEVCNTR1E_EL0 /* index 14 */
93 read AMEVCNTR1F_EL0 /* index 15 */
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020094endfunc amu_group1_cnt_read_internal
johpow01b7d752a2020-10-08 17:29:11 -050095
96/*
97 * Accessor functions for virtual offset registers added with FEAT_AMUv1p1
98 */
99
100/*
101 * uint64_t amu_group0_voffset_read_internal(int idx);
102 *
103 * Given `idx`, read the corresponding AMU virtual offset register
104 * and return it in `x0`.
105 */
106func amu_group0_voffset_read_internal
107 adr x1, 1f
108#if ENABLE_ASSERTIONS
109 /*
110 * It can be dangerous to call this function with an
111 * out of bounds index. Ensure `idx` is valid.
112 */
113 tst x0, #~3
114 ASM_ASSERT(eq)
115 /* Make sure idx != 1 since AMEVCNTVOFF01_EL2 does not exist */
116 cmp x0, #1
117 ASM_ASSERT(ne)
118#endif
119 /*
120 * Given `idx` calculate address of mrs/ret instruction pair
121 * in the table below.
122 */
123 add x1, x1, x0, lsl #3 /* each mrs/ret sequence is 8 bytes */
124#if ENABLE_BTI
125 add x1, x1, x0, lsl #2 /* + "bti j" instruction */
126#endif
127 br x1
128
1291: read AMEVCNTVOFF00_EL2 /* index 0 */
130 .skip 8 /* AMEVCNTVOFF01_EL2 does not exist */
131#if ENABLE_BTI
132 .skip 4 /* Extra space for BTI instruction. */
133#endif
134 read AMEVCNTVOFF02_EL2 /* index 2 */
135 read AMEVCNTVOFF03_EL2 /* index 3 */
136endfunc amu_group0_voffset_read_internal
137
138/*
139 * void amu_group0_voffset_write_internal(int idx, uint64_t val);
140 *
141 * Given `idx`, write `val` to the corresponding AMU virtual offset register.
142 */
143func amu_group0_voffset_write_internal
144 adr x2, 1f
145#if ENABLE_ASSERTIONS
146 /*
147 * It can be dangerous to call this function with an
148 * out of bounds index. Ensure `idx` is valid.
149 */
150 tst x0, #~3
151 ASM_ASSERT(eq)
152 /* Make sure idx != 1 since AMEVCNTVOFF01_EL2 does not exist */
153 cmp x0, #1
154 ASM_ASSERT(ne)
155#endif
156 /*
157 * Given `idx` calculate address of mrs/ret instruction pair
158 * in the table below.
159 */
160 add x2, x2, x0, lsl #3 /* each msr/ret sequence is 8 bytes */
161#if ENABLE_BTI
162 add x2, x2, x0, lsl #2 /* + "bti j" instruction */
163#endif
164 br x2
165
1661: write AMEVCNTVOFF00_EL2 /* index 0 */
167 .skip 8 /* AMEVCNTVOFF01_EL2 does not exist */
168#if ENABLE_BTI
169 .skip 4 /* Extra space for BTI instruction. */
170#endif
171 write AMEVCNTVOFF02_EL2 /* index 2 */
172 write AMEVCNTVOFF03_EL2 /* index 3 */
173endfunc amu_group0_voffset_write_internal
174
175/*
176 * uint64_t amu_group1_voffset_read_internal(int idx);
177 *
178 * Given `idx`, read the corresponding AMU virtual offset register
179 * and return it in `x0`.
180 */
181func amu_group1_voffset_read_internal
182 adr x1, 1f
183#if ENABLE_ASSERTIONS
184 /*
185 * It can be dangerous to call this function with an
186 * out of bounds index. Ensure `idx` is valid.
187 */
188 tst x0, #~0xF
189 ASM_ASSERT(eq)
190#endif
191 /*
192 * Given `idx` calculate address of mrs/ret instruction pair
193 * in the table below.
194 */
195 add x1, x1, x0, lsl #3 /* each mrs/ret sequence is 8 bytes */
196#if ENABLE_BTI
197 add x1, x1, x0, lsl #2 /* + "bti j" instruction */
198#endif
199 br x1
200
2011: read AMEVCNTVOFF10_EL2 /* index 0 */
202 read AMEVCNTVOFF11_EL2 /* index 1 */
203 read AMEVCNTVOFF12_EL2 /* index 2 */
204 read AMEVCNTVOFF13_EL2 /* index 3 */
205 read AMEVCNTVOFF14_EL2 /* index 4 */
206 read AMEVCNTVOFF15_EL2 /* index 5 */
207 read AMEVCNTVOFF16_EL2 /* index 6 */
208 read AMEVCNTVOFF17_EL2 /* index 7 */
209 read AMEVCNTVOFF18_EL2 /* index 8 */
210 read AMEVCNTVOFF19_EL2 /* index 9 */
211 read AMEVCNTVOFF1A_EL2 /* index 10 */
212 read AMEVCNTVOFF1B_EL2 /* index 11 */
213 read AMEVCNTVOFF1C_EL2 /* index 12 */
214 read AMEVCNTVOFF1D_EL2 /* index 13 */
215 read AMEVCNTVOFF1E_EL2 /* index 14 */
216 read AMEVCNTVOFF1F_EL2 /* index 15 */
217endfunc amu_group1_voffset_read_internal
218
219/*
220 * void amu_group1_voffset_write_internal(int idx, uint64_t val);
221 *
222 * Given `idx`, write `val` to the corresponding AMU virtual offset register.
223 */
224func amu_group1_voffset_write_internal
225 adr x2, 1f
226#if ENABLE_ASSERTIONS
227 /*
228 * It can be dangerous to call this function with an
229 * out of bounds index. Ensure `idx` is valid.
230 */
231 tst x0, #~0xF
232 ASM_ASSERT(eq)
233#endif
234 /*
235 * Given `idx` calculate address of mrs/ret instruction pair
236 * in the table below.
237 */
238 add x2, x2, x0, lsl #3 /* each msr/ret sequence is 8 bytes */
239#if ENABLE_BTI
240 add x2, x2, x0, lsl #2 /* + "bti j" instruction */
241#endif
242 br x2
243
2441: write AMEVCNTVOFF10_EL2 /* index 0 */
245 write AMEVCNTVOFF11_EL2 /* index 1 */
246 write AMEVCNTVOFF12_EL2 /* index 2 */
247 write AMEVCNTVOFF13_EL2 /* index 3 */
248 write AMEVCNTVOFF14_EL2 /* index 4 */
249 write AMEVCNTVOFF15_EL2 /* index 5 */
250 write AMEVCNTVOFF16_EL2 /* index 6 */
251 write AMEVCNTVOFF17_EL2 /* index 7 */
252 write AMEVCNTVOFF18_EL2 /* index 8 */
253 write AMEVCNTVOFF19_EL2 /* index 9 */
254 write AMEVCNTVOFF1A_EL2 /* index 10 */
255 write AMEVCNTVOFF1B_EL2 /* index 11 */
256 write AMEVCNTVOFF1C_EL2 /* index 12 */
257 write AMEVCNTVOFF1D_EL2 /* index 13 */
258 write AMEVCNTVOFF1E_EL2 /* index 14 */
259 write AMEVCNTVOFF1F_EL2 /* index 15 */
260endfunc amu_group1_voffset_write_internal