blob: 061f3fd6c5c256c0be98908833cc7aea392be1a2 [file] [log] [blame]
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +02001/*
Alexei Fedorov45ada402020-06-17 19:07:11 +01002 * Copyright (c) 2017-2020, Arm Limited. All rights reserved.
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +02003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7#include <arch.h>
8#include <assert_macros.S>
9#include <asm_macros.S>
10
11 .globl amu_group0_cnt_read_internal
12 .globl amu_group1_cnt_read_internal
13
14/*
15 * uint64_t amu_group0_cnt_read_internal(int idx);
16 *
17 * Given `idx`, read the corresponding AMU counter
18 * and return it in `x0`.
19 */
20func amu_group0_cnt_read_internal
Alexei Fedorov45ada402020-06-17 19:07:11 +010021 adr x1, 1f
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020022#if ENABLE_ASSERTIONS
23 /*
24 * It can be dangerous to call this function with an
25 * out of bounds index. Ensure `idx` is valid.
26 */
Alexei Fedorov45ada402020-06-17 19:07:11 +010027 tst x0, #~3
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020028 ASM_ASSERT(eq)
29#endif
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020030 /*
31 * Given `idx` calculate address of mrs/ret instruction pair
32 * in the table below.
33 */
Alexei Fedorov45ada402020-06-17 19:07:11 +010034 add x1, x1, x0, lsl #3 /* each mrs/ret sequence is 8 bytes */
35#if ENABLE_BTI
36 add x1, x1, x0, lsl #2 /* + "bti j" instruction */
37#endif
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020038 br x1
39
Alexei Fedorov45ada402020-06-17 19:07:11 +0100401: read AMEVCNTR00_EL0 /* index 0 */
41 read AMEVCNTR01_EL0 /* index 1 */
42 read AMEVCNTR02_EL0 /* index 2 */
43 read AMEVCNTR03_EL0 /* index 3 */
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020044endfunc amu_group0_cnt_read_internal
45
46/*
47 * uint64_t amu_group1_cnt_read_internal(int idx);
48 *
49 * Given `idx`, read the corresponding AMU counter
50 * and return it in `x0`.
51 */
52func amu_group1_cnt_read_internal
Alexei Fedorov45ada402020-06-17 19:07:11 +010053 adr x1, 1f
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020054#if ENABLE_ASSERTIONS
55 /*
56 * It can be dangerous to call this function with an
57 * out of bounds index. Ensure `idx` is valid.
58 */
Alexei Fedorov45ada402020-06-17 19:07:11 +010059 tst x0, #~0xF
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020060 ASM_ASSERT(eq)
61#endif
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020062 /*
63 * Given `idx` calculate address of mrs/ret instruction pair
64 * in the table below.
65 */
Alexei Fedorov45ada402020-06-17 19:07:11 +010066 add x1, x1, x0, lsl #3 /* each mrs/ret sequence is 8 bytes */
67#if ENABLE_BTI
68 add x1, x1, x0, lsl #2 /* + "bti j" instruction */
69#endif
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020070 br x1
71
Alexei Fedorov45ada402020-06-17 19:07:11 +0100721: read AMEVCNTR10_EL0 /* index 0 */
73 read AMEVCNTR11_EL0 /* index 1 */
74 read AMEVCNTR12_EL0 /* index 2 */
75 read AMEVCNTR13_EL0 /* index 3 */
76 read AMEVCNTR14_EL0 /* index 4 */
77 read AMEVCNTR15_EL0 /* index 5 */
78 read AMEVCNTR16_EL0 /* index 6 */
79 read AMEVCNTR17_EL0 /* index 7 */
80 read AMEVCNTR18_EL0 /* index 8 */
81 read AMEVCNTR19_EL0 /* index 9 */
82 read AMEVCNTR1A_EL0 /* index 10 */
83 read AMEVCNTR1B_EL0 /* index 11 */
84 read AMEVCNTR1C_EL0 /* index 12 */
85 read AMEVCNTR1D_EL0 /* index 13 */
86 read AMEVCNTR1E_EL0 /* index 14 */
87 read AMEVCNTR1F_EL0 /* index 15 */
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020088endfunc amu_group1_cnt_read_internal