blob: 1f2ae5ce049a4db16248469e62eb8bdbe7c84044 [file] [log] [blame]
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +02001/*
johpow01b7d752a2020-10-08 17:29:11 -05002 * Copyright (c) 2017-2021, Arm Limited. All rights reserved.
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +02003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <assert_macros.S>
9#include <asm_macros.S>
10
11 .globl amu_group0_cnt_read_internal
12 .globl amu_group1_cnt_read_internal
Juan Pablo Condec3cf2da2024-04-01 13:57:19 -050013 .globl amu_group1_evtype_write_internal
14 .globl amu_group1_evtype_read_internal
15 .globl amu_group1_num_counters_internal
16 .globl amu_group1_is_cnt_impl_internal
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020017
johpow01b7d752a2020-10-08 17:29:11 -050018 /* FEAT_AMUv1p1 virtualisation offset register functions */
19 .globl amu_group0_voffset_read_internal
20 .globl amu_group0_voffset_write_internal
21 .globl amu_group1_voffset_read_internal
22 .globl amu_group1_voffset_write_internal
23
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020024/*
25 * uint64_t amu_group0_cnt_read_internal(int idx);
26 *
27 * Given `idx`, read the corresponding AMU counter
28 * and return it in `x0`.
29 */
30func amu_group0_cnt_read_internal
Alexei Fedorov45ada402020-06-17 19:07:11 +010031 adr x1, 1f
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020032#if ENABLE_ASSERTIONS
33 /*
34 * It can be dangerous to call this function with an
35 * out of bounds index. Ensure `idx` is valid.
36 */
Alexei Fedorov45ada402020-06-17 19:07:11 +010037 tst x0, #~3
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020038 ASM_ASSERT(eq)
39#endif
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020040 /*
41 * Given `idx` calculate address of mrs/ret instruction pair
42 * in the table below.
43 */
Alexei Fedorov45ada402020-06-17 19:07:11 +010044 add x1, x1, x0, lsl #3 /* each mrs/ret sequence is 8 bytes */
45#if ENABLE_BTI
46 add x1, x1, x0, lsl #2 /* + "bti j" instruction */
47#endif
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020048 br x1
49
Alexei Fedorov45ada402020-06-17 19:07:11 +0100501: read AMEVCNTR00_EL0 /* index 0 */
51 read AMEVCNTR01_EL0 /* index 1 */
52 read AMEVCNTR02_EL0 /* index 2 */
53 read AMEVCNTR03_EL0 /* index 3 */
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020054endfunc amu_group0_cnt_read_internal
55
56/*
57 * uint64_t amu_group1_cnt_read_internal(int idx);
58 *
59 * Given `idx`, read the corresponding AMU counter
60 * and return it in `x0`.
61 */
62func amu_group1_cnt_read_internal
Alexei Fedorov45ada402020-06-17 19:07:11 +010063 adr x1, 1f
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020064#if ENABLE_ASSERTIONS
65 /*
66 * It can be dangerous to call this function with an
67 * out of bounds index. Ensure `idx` is valid.
68 */
Alexei Fedorov45ada402020-06-17 19:07:11 +010069 tst x0, #~0xF
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020070 ASM_ASSERT(eq)
71#endif
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020072 /*
73 * Given `idx` calculate address of mrs/ret instruction pair
74 * in the table below.
75 */
Alexei Fedorov45ada402020-06-17 19:07:11 +010076 add x1, x1, x0, lsl #3 /* each mrs/ret sequence is 8 bytes */
77#if ENABLE_BTI
78 add x1, x1, x0, lsl #2 /* + "bti j" instruction */
79#endif
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020080 br x1
81
Alexei Fedorov45ada402020-06-17 19:07:11 +0100821: read AMEVCNTR10_EL0 /* index 0 */
83 read AMEVCNTR11_EL0 /* index 1 */
84 read AMEVCNTR12_EL0 /* index 2 */
85 read AMEVCNTR13_EL0 /* index 3 */
86 read AMEVCNTR14_EL0 /* index 4 */
87 read AMEVCNTR15_EL0 /* index 5 */
88 read AMEVCNTR16_EL0 /* index 6 */
89 read AMEVCNTR17_EL0 /* index 7 */
90 read AMEVCNTR18_EL0 /* index 8 */
91 read AMEVCNTR19_EL0 /* index 9 */
92 read AMEVCNTR1A_EL0 /* index 10 */
93 read AMEVCNTR1B_EL0 /* index 11 */
94 read AMEVCNTR1C_EL0 /* index 12 */
95 read AMEVCNTR1D_EL0 /* index 13 */
96 read AMEVCNTR1E_EL0 /* index 14 */
97 read AMEVCNTR1F_EL0 /* index 15 */
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020098endfunc amu_group1_cnt_read_internal
johpow01b7d752a2020-10-08 17:29:11 -050099
100/*
101 * Accessor functions for virtual offset registers added with FEAT_AMUv1p1
102 */
103
104/*
105 * uint64_t amu_group0_voffset_read_internal(int idx);
106 *
107 * Given `idx`, read the corresponding AMU virtual offset register
108 * and return it in `x0`.
109 */
110func amu_group0_voffset_read_internal
111 adr x1, 1f
112#if ENABLE_ASSERTIONS
113 /*
114 * It can be dangerous to call this function with an
115 * out of bounds index. Ensure `idx` is valid.
116 */
117 tst x0, #~3
118 ASM_ASSERT(eq)
119 /* Make sure idx != 1 since AMEVCNTVOFF01_EL2 does not exist */
120 cmp x0, #1
121 ASM_ASSERT(ne)
122#endif
123 /*
124 * Given `idx` calculate address of mrs/ret instruction pair
125 * in the table below.
126 */
127 add x1, x1, x0, lsl #3 /* each mrs/ret sequence is 8 bytes */
128#if ENABLE_BTI
129 add x1, x1, x0, lsl #2 /* + "bti j" instruction */
130#endif
131 br x1
132
1331: read AMEVCNTVOFF00_EL2 /* index 0 */
134 .skip 8 /* AMEVCNTVOFF01_EL2 does not exist */
135#if ENABLE_BTI
136 .skip 4 /* Extra space for BTI instruction. */
137#endif
138 read AMEVCNTVOFF02_EL2 /* index 2 */
139 read AMEVCNTVOFF03_EL2 /* index 3 */
140endfunc amu_group0_voffset_read_internal
141
142/*
143 * void amu_group0_voffset_write_internal(int idx, uint64_t val);
144 *
145 * Given `idx`, write `val` to the corresponding AMU virtual offset register.
146 */
147func amu_group0_voffset_write_internal
148 adr x2, 1f
149#if ENABLE_ASSERTIONS
150 /*
151 * It can be dangerous to call this function with an
152 * out of bounds index. Ensure `idx` is valid.
153 */
154 tst x0, #~3
155 ASM_ASSERT(eq)
156 /* Make sure idx != 1 since AMEVCNTVOFF01_EL2 does not exist */
157 cmp x0, #1
158 ASM_ASSERT(ne)
159#endif
160 /*
161 * Given `idx` calculate address of mrs/ret instruction pair
162 * in the table below.
163 */
164 add x2, x2, x0, lsl #3 /* each msr/ret sequence is 8 bytes */
165#if ENABLE_BTI
166 add x2, x2, x0, lsl #2 /* + "bti j" instruction */
167#endif
168 br x2
169
1701: write AMEVCNTVOFF00_EL2 /* index 0 */
171 .skip 8 /* AMEVCNTVOFF01_EL2 does not exist */
172#if ENABLE_BTI
173 .skip 4 /* Extra space for BTI instruction. */
174#endif
175 write AMEVCNTVOFF02_EL2 /* index 2 */
176 write AMEVCNTVOFF03_EL2 /* index 3 */
177endfunc amu_group0_voffset_write_internal
178
179/*
180 * uint64_t amu_group1_voffset_read_internal(int idx);
181 *
182 * Given `idx`, read the corresponding AMU virtual offset register
183 * and return it in `x0`.
184 */
185func amu_group1_voffset_read_internal
186 adr x1, 1f
187#if ENABLE_ASSERTIONS
188 /*
189 * It can be dangerous to call this function with an
190 * out of bounds index. Ensure `idx` is valid.
191 */
192 tst x0, #~0xF
193 ASM_ASSERT(eq)
194#endif
195 /*
196 * Given `idx` calculate address of mrs/ret instruction pair
197 * in the table below.
198 */
199 add x1, x1, x0, lsl #3 /* each mrs/ret sequence is 8 bytes */
200#if ENABLE_BTI
201 add x1, x1, x0, lsl #2 /* + "bti j" instruction */
202#endif
203 br x1
204
2051: read AMEVCNTVOFF10_EL2 /* index 0 */
206 read AMEVCNTVOFF11_EL2 /* index 1 */
207 read AMEVCNTVOFF12_EL2 /* index 2 */
208 read AMEVCNTVOFF13_EL2 /* index 3 */
209 read AMEVCNTVOFF14_EL2 /* index 4 */
210 read AMEVCNTVOFF15_EL2 /* index 5 */
211 read AMEVCNTVOFF16_EL2 /* index 6 */
212 read AMEVCNTVOFF17_EL2 /* index 7 */
213 read AMEVCNTVOFF18_EL2 /* index 8 */
214 read AMEVCNTVOFF19_EL2 /* index 9 */
215 read AMEVCNTVOFF1A_EL2 /* index 10 */
216 read AMEVCNTVOFF1B_EL2 /* index 11 */
217 read AMEVCNTVOFF1C_EL2 /* index 12 */
218 read AMEVCNTVOFF1D_EL2 /* index 13 */
219 read AMEVCNTVOFF1E_EL2 /* index 14 */
220 read AMEVCNTVOFF1F_EL2 /* index 15 */
221endfunc amu_group1_voffset_read_internal
222
223/*
224 * void amu_group1_voffset_write_internal(int idx, uint64_t val);
225 *
226 * Given `idx`, write `val` to the corresponding AMU virtual offset register.
227 */
228func amu_group1_voffset_write_internal
229 adr x2, 1f
230#if ENABLE_ASSERTIONS
231 /*
232 * It can be dangerous to call this function with an
233 * out of bounds index. Ensure `idx` is valid.
234 */
235 tst x0, #~0xF
236 ASM_ASSERT(eq)
237#endif
238 /*
239 * Given `idx` calculate address of mrs/ret instruction pair
240 * in the table below.
241 */
242 add x2, x2, x0, lsl #3 /* each msr/ret sequence is 8 bytes */
243#if ENABLE_BTI
244 add x2, x2, x0, lsl #2 /* + "bti j" instruction */
245#endif
246 br x2
247
2481: write AMEVCNTVOFF10_EL2 /* index 0 */
249 write AMEVCNTVOFF11_EL2 /* index 1 */
250 write AMEVCNTVOFF12_EL2 /* index 2 */
251 write AMEVCNTVOFF13_EL2 /* index 3 */
252 write AMEVCNTVOFF14_EL2 /* index 4 */
253 write AMEVCNTVOFF15_EL2 /* index 5 */
254 write AMEVCNTVOFF16_EL2 /* index 6 */
255 write AMEVCNTVOFF17_EL2 /* index 7 */
256 write AMEVCNTVOFF18_EL2 /* index 8 */
257 write AMEVCNTVOFF19_EL2 /* index 9 */
258 write AMEVCNTVOFF1A_EL2 /* index 10 */
259 write AMEVCNTVOFF1B_EL2 /* index 11 */
260 write AMEVCNTVOFF1C_EL2 /* index 12 */
261 write AMEVCNTVOFF1D_EL2 /* index 13 */
262 write AMEVCNTVOFF1E_EL2 /* index 14 */
263 write AMEVCNTVOFF1F_EL2 /* index 15 */
264endfunc amu_group1_voffset_write_internal
Juan Pablo Condec3cf2da2024-04-01 13:57:19 -0500265
266/*
267 * uint64_t amu_group1_evtype_read_internal(int idx);
268 *
269 * Given `idx`, read the corresponding AMU event type register
270 * and return it in `x0`.
271 */
272func amu_group1_evtype_read_internal
273 adr x1, 1f
274#if ENABLE_ASSERTIONS
275 /*
276 * It can be dangerous to call this function with an
277 * out of bounds index. Ensure `idx` is valid.
278 */
279 tst x0, #~0xF
280 ASM_ASSERT(eq)
281#endif
282 /*
283 * Given `idx` calculate address of mrs/ret instruction pair
284 * in the table below.
285 */
286 add x1, x1, x0, lsl #3 /* each mrs/ret sequence is 8 bytes */
287#if ENABLE_BTI
288 add x1, x1, x0, lsl #2 /* + "bti j" instruction */
289#endif
290 br x1
291
2921: read AMEVTYPER10_EL0 /* index 0 */
293 read AMEVTYPER11_EL0 /* index 1 */
294 read AMEVTYPER12_EL0 /* index 2 */
295 read AMEVTYPER13_EL0 /* index 3 */
296 read AMEVTYPER14_EL0 /* index 4 */
297 read AMEVTYPER15_EL0 /* index 5 */
298 read AMEVTYPER16_EL0 /* index 6 */
299 read AMEVTYPER17_EL0 /* index 7 */
300 read AMEVTYPER18_EL0 /* index 8 */
301 read AMEVTYPER19_EL0 /* index 9 */
302 read AMEVTYPER1A_EL0 /* index 10 */
303 read AMEVTYPER1B_EL0 /* index 11 */
304 read AMEVTYPER1C_EL0 /* index 12 */
305 read AMEVTYPER1D_EL0 /* index 13 */
306 read AMEVTYPER1E_EL0 /* index 14 */
307 read AMEVTYPER1F_EL0 /* index 15 */
308endfunc amu_group1_evtype_read_internal
309
310/*
311 * void amu_group1_evtype_write_internal(int idx, unsigned int val);
312 *
313 * Program the AMU event type register indexed by `idx`
314 * with the value `val`.
315 */
316func amu_group1_evtype_write_internal
317 adr x2, 1f
318#if ENABLE_ASSERTIONS
319 /*
320 * It can be dangerous to call this function with an
321 * out of bounds index. Ensure `idx` is valid.
322 */
323 tst x0, #~0xF
324 ASM_ASSERT(eq)
325
326 /* val should be between [0, 65535] */
327 tst x1, #~0xFFFF
328 ASM_ASSERT(eq)
329#endif
330 /*
331 * Given `idx` calculate address of msr/ret instruction pair
332 * in the table below.
333 */
334 add x2, x2, x0, lsl #3 /* each msr/ret sequence is 8 bytes */
335#if ENABLE_BTI
336 add x2, x2, x0, lsl #2 /* + "bti j" instruction */
337#endif
338 br x2
339
3401: write AMEVTYPER10_EL0 /* index 0 */
341 write AMEVTYPER11_EL0 /* index 1 */
342 write AMEVTYPER12_EL0 /* index 2 */
343 write AMEVTYPER13_EL0 /* index 3 */
344 write AMEVTYPER14_EL0 /* index 4 */
345 write AMEVTYPER15_EL0 /* index 5 */
346 write AMEVTYPER16_EL0 /* index 6 */
347 write AMEVTYPER17_EL0 /* index 7 */
348 write AMEVTYPER18_EL0 /* index 8 */
349 write AMEVTYPER19_EL0 /* index 9 */
350 write AMEVTYPER1A_EL0 /* index 10 */
351 write AMEVTYPER1B_EL0 /* index 11 */
352 write AMEVTYPER1C_EL0 /* index 12 */
353 write AMEVTYPER1D_EL0 /* index 13 */
354 write AMEVTYPER1E_EL0 /* index 14 */
355 write AMEVTYPER1F_EL0 /* index 15 */
356endfunc amu_group1_evtype_write_internal
357
358/*
359 * uint64_t amu_group1_num_counters_internal(int idx);
360 *
361 * Given `idx`, return the number of counters implemented for group 1.
362 */
363func amu_group1_num_counters_internal
364 mrs x0, AMCGCR_EL0
365 ubfx x0, x0, AMCGCR_EL0_CG1NC_SHIFT, AMCGCR_EL0_CG1NC_LENGTH
366 ret
367endfunc amu_group1_num_counters_internal
368
369/*
370 * uint64_t amu_group1_is_counter_implemented(int idx);
371 *
372 * Given `idx`, return whether counter `idx` is implemented or not.
373 */
374func amu_group1_is_cnt_impl_internal
375#if ENABLE_ASSERTIONS
376 /*
377 * It can be dangerous to call this function with an
378 * out of bounds index. Ensure `idx` is valid.
379 */
380 tst x0, #~0xF
381 ASM_ASSERT(eq)
382#endif
383 mrs x1, AMCG1IDR_EL0
384 mov x2, #1
385 lsl x0, x2, x0
386 and x0, x1, x0
387 ret
388endfunc amu_group1_is_cnt_impl_internal