blob: 630cc697fa382d81bb2026c23f82fddb971c93b8 [file] [log] [blame]
Manish Pandey0145ec32024-08-12 17:59:54 +01001/*
2 * Copyright (c) 2024, Arm Limited. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
Manish Pandey0145ec32024-08-12 17:59:54 +01006#include <arch.h>
7#include <arch_helpers.h>
8#include <arm_arch_svc.h>
Arvind Ram Prakash81916212024-08-15 15:08:23 -05009#include <events.h>
10#include <plat_topology.h>
11#include <platform.h>
12#include <platform_def.h>
13#include <power_management.h>
14#include <psci.h>
Manish Pandey0145ec32024-08-12 17:59:54 +010015#include <smccc.h>
16#include <sync.h>
Arvind Ram Prakash81916212024-08-15 15:08:23 -050017#include <test_helpers.h>
Manish Pandey0145ec32024-08-12 17:59:54 +010018#include <tftf_lib.h>
Manish Pandey0145ec32024-08-12 17:59:54 +010019
Arvind Ram Prakash81916212024-08-15 15:08:23 -050020static event_t cpu_has_entered_test[PLATFORM_CORE_COUNT];
21
Charlie Bareham9601dc52024-08-28 17:27:18 +010022/* Used when catching synchronous exceptions. */
23static volatile bool exception_triggered[PLATFORM_CORE_COUNT];
Arvind Ram Prakash81916212024-08-15 15:08:23 -050024
Charlie Bareham9601dc52024-08-28 17:27:18 +010025/*
26 * The whole test should only be skipped if the test was skipped on all CPUs.
27 * The test on each CPU can't return TEST_RESULT_SKIPPED, because the whole test
28 * is skipped if any of the CPUs return TEST_RESULT_SKIPPED. Instead, to skip a
29 * test, the test returns TEST_RESULT_SUCCESS, then sets a flag in the
30 * test_skipped array. This array is checked at the end by the
31 * run_asymmetric_test function.
32 */
33static volatile bool test_skipped[PLATFORM_CORE_COUNT];
34
35/*
36 * Test function which is run on each CPU. It is global so it is visible to all
37 * CPUS.
38 */
39static test_result_t (*asymmetric_test_function)(void);
Arvind Ram Prakash81916212024-08-15 15:08:23 -050040
Charlie Bareham70de3ff2024-08-20 11:27:25 +010041static bool exception_handler(void)
Arvind Ram Prakash81916212024-08-15 15:08:23 -050042{
Charlie Bareham9601dc52024-08-28 17:27:18 +010043 unsigned int mpid = read_mpidr_el1() & MPID_MASK;
44 unsigned int core_pos = platform_get_core_pos(mpid);
45
Arvind Ram Prakash81916212024-08-15 15:08:23 -050046 uint64_t esr_el2 = read_esr_el2();
Charlie Bareham9601dc52024-08-28 17:27:18 +010047
Arvind Ram Prakash81916212024-08-15 15:08:23 -050048 if (EC_BITS(esr_el2) == EC_UNKNOWN) {
Charlie Bareham70de3ff2024-08-20 11:27:25 +010049 /*
50 * This may be an undef injection, or a trap to EL2 due to a
51 * register not being present. Both cases have the same EC
52 * value.
53 */
Charlie Bareham9601dc52024-08-28 17:27:18 +010054 exception_triggered[core_pos] = true;
Arvind Ram Prakash81916212024-08-15 15:08:23 -050055 return true;
56 }
57
58 return false;
59}
60
Charlie Barehame4f2eaa2024-08-12 17:59:54 +010061static test_result_t test_trbe(void)
62{
63 unsigned int mpid = read_mpidr_el1() & MPID_MASK;
64 unsigned int core_pos = platform_get_core_pos(mpid);
Charlie Bareham9601dc52024-08-28 17:27:18 +010065 bool should_trigger_exception = is_trbe_errata_affected_core();
66
67 if (!is_feat_trbe_present()) {
68 test_skipped[core_pos] = true;
69 return TEST_RESULT_SUCCESS;
70 }
Charlie Barehame4f2eaa2024-08-12 17:59:54 +010071
Charlie Bareham70de3ff2024-08-20 11:27:25 +010072 register_custom_sync_exception_handler(exception_handler);
Charlie Bareham9601dc52024-08-28 17:27:18 +010073 exception_triggered[core_pos] = false;
Charlie Barehame4f2eaa2024-08-12 17:59:54 +010074 read_trblimitr_el1();
Charlie Bareham4397e442024-08-20 10:17:38 +010075 unregister_custom_sync_exception_handler();
Charlie Barehame4f2eaa2024-08-12 17:59:54 +010076
Charlie Bareham9601dc52024-08-28 17:27:18 +010077 /**
78 * NOTE: TRBE as an asymmetric feature is as exceptional one.
79 * Even if the hardware supports the feature, TF-A deliberately disables
80 * it at EL3. In this scenario, when the register "TRBLIMITR_EL1" is
81 * accessed, the registered undef injection handler should kick in and
82 * the exception will be handled synchronously at EL2.
83 */
84 if (exception_triggered[core_pos] != should_trigger_exception) {
Charlie Bareham70de3ff2024-08-20 11:27:25 +010085 tftf_testcase_printf("Exception triggered for core = %d "
Charlie Barehame4f2eaa2024-08-12 17:59:54 +010086 "when accessing TRB_LIMTR\n", core_pos);
Charlie Bareham9601dc52024-08-28 17:27:18 +010087 return TEST_RESULT_FAIL;
Charlie Barehame4f2eaa2024-08-12 17:59:54 +010088 }
89
Charlie Bareham9601dc52024-08-28 17:27:18 +010090 return TEST_RESULT_SUCCESS;
Charlie Barehame4f2eaa2024-08-12 17:59:54 +010091}
92
93static test_result_t test_spe(void)
94{
95 unsigned int mpid = read_mpidr_el1() & MPID_MASK;
96 unsigned int core_pos = platform_get_core_pos(mpid);
97
Charlie Bareham9601dc52024-08-28 17:27:18 +010098 /**
99 * NOTE: SPE as an asymmetric feature, we expect to access the
100 * PMSCR_EL1 register, when supported in the hardware.
101 * If the feature isn't supported, we skip the test.
102 * So on each individual CPU, we verify whether the feature's presence
103 * and based on it we access (if feature supported) or skip the test.
104 */
105 if (!is_feat_spe_supported()) {
106 test_skipped[core_pos] = true;
107 return TEST_RESULT_SUCCESS;
Charlie Barehame4f2eaa2024-08-12 17:59:54 +0100108 }
109
Charlie Bareham9601dc52024-08-28 17:27:18 +0100110 read_pmscr_el1();
111
112 return TEST_RESULT_SUCCESS;
Charlie Barehame4f2eaa2024-08-12 17:59:54 +0100113}
114
Arvind Ram Prakash81916212024-08-15 15:08:23 -0500115/*
Charlie Bareham9601dc52024-08-28 17:27:18 +0100116 * Runs on one CPU, and runs asymmetric_test_function.
Arvind Ram Prakash81916212024-08-15 15:08:23 -0500117 */
118static test_result_t non_lead_cpu_fn(void)
119{
120 unsigned int mpid = read_mpidr_el1() & MPID_MASK;
121 unsigned int core_pos = platform_get_core_pos(mpid);
Charlie Bareham9601dc52024-08-28 17:27:18 +0100122 test_result_t test_result;
Arvind Ram Prakash81916212024-08-15 15:08:23 -0500123
124 /* Signal to the lead CPU that the calling CPU has entered the test */
125 tftf_send_event(&cpu_has_entered_test[core_pos]);
126
Charlie Bareham9601dc52024-08-28 17:27:18 +0100127 test_result = asymmetric_test_function();
Arvind Ram Prakash81916212024-08-15 15:08:23 -0500128
129 /* Ensure that EL3 still functional */
130 smc_args args;
131 smc_ret_values smc_ret;
132 memset(&args, 0, sizeof(args));
133 args.fid = SMCCC_VERSION;
134 smc_ret = tftf_smc(&args);
135
136 tftf_testcase_printf("SMCCC Version = %d.%d\n",
137 (int)((smc_ret.ret0 >> SMCCC_VERSION_MAJOR_SHIFT) & SMCCC_VERSION_MAJOR_MASK),
138 (int)((smc_ret.ret0 >> SMCCC_VERSION_MINOR_SHIFT) & SMCCC_VERSION_MINOR_MASK));
139
Arvind Ram Prakash81916212024-08-15 15:08:23 -0500140 return test_result;
141}
142
Charlie Bareham9601dc52024-08-28 17:27:18 +0100143/* Set some variables that are accessible to all CPUs. */
144void test_init(test_result_t (*test_function)(void))
145{
146 int i;
147
148 for (i = 0; i < PLATFORM_CORE_COUNT; i++) {
149 test_skipped[i] = false;
150 tftf_init_event(&cpu_has_entered_test[i]);
151 }
152
153 asymmetric_test_function = test_function;
154
155 /* Ensure the above writes are seen before any read */
156 dmbsy();
157}
158
159/*
160 * Run the given test function on all CPUs. If the test is skipped on all CPUs,
161 * the whole test is skipped. This is checked using the test_skipped array.
Arvind Ram Prakash81916212024-08-15 15:08:23 -0500162 */
Charlie Bareham9601dc52024-08-28 17:27:18 +0100163test_result_t run_asymmetric_test(test_result_t (*test_function)(void))
Manish Pandey0145ec32024-08-12 17:59:54 +0100164{
Arvind Ram Prakash81916212024-08-15 15:08:23 -0500165 unsigned int lead_mpid;
166 unsigned int cpu_mpid, cpu_node;
167 unsigned int core_pos;
168 int psci_ret;
Charlie Bareham9601dc52024-08-28 17:27:18 +0100169 bool all_cpus_skipped;
170 int i;
171 uint32_t aff_info;
172 test_result_t test_result;
Charlie Barehame4f2eaa2024-08-12 17:59:54 +0100173
Arvind Ram Prakash81916212024-08-15 15:08:23 -0500174 lead_mpid = read_mpidr_el1() & MPID_MASK;
175
176 SKIP_TEST_IF_LESS_THAN_N_CPUS(2);
177
Charlie Bareham9601dc52024-08-28 17:27:18 +0100178 test_init(test_function);
179
180 /* run test on lead CPU */
181 test_result = test_function();
182
Arvind Ram Prakash81916212024-08-15 15:08:23 -0500183 /* Power on all CPUs */
184 for_each_cpu(cpu_node) {
185 cpu_mpid = tftf_get_mpidr_from_node(cpu_node);
186 /* Skip lead CPU as it is already powered on */
187 if (cpu_mpid == lead_mpid)
188 continue;
189
190 psci_ret = tftf_cpu_on(cpu_mpid, (uintptr_t) non_lead_cpu_fn, 0);
191 if (psci_ret != PSCI_E_SUCCESS) {
192 tftf_testcase_printf(
193 "Failed to power on CPU 0x%x (%d)\n",
194 cpu_mpid, psci_ret);
195 return TEST_RESULT_SKIPPED;
196 }
197 }
198
199 /* Wait for non-lead CPUs to enter the test */
200 for_each_cpu(cpu_node) {
201 cpu_mpid = tftf_get_mpidr_from_node(cpu_node);
202 /* Skip lead CPU */
203 if (cpu_mpid == lead_mpid)
204 continue;
205
206 core_pos = platform_get_core_pos(cpu_mpid);
207 tftf_wait_for_event(&cpu_has_entered_test[core_pos]);
Arvind Ram Prakash81916212024-08-15 15:08:23 -0500208 }
209
Charlie Bareham9601dc52024-08-28 17:27:18 +0100210 /* Wait for all non-lead CPUs to power down */
211 for_each_cpu(cpu_node) {
212 cpu_mpid = tftf_get_mpidr_from_node(cpu_node);
213 /* Skip lead CPU */
214 if (cpu_mpid == lead_mpid)
215 continue;
Arvind Ram Prakash81916212024-08-15 15:08:23 -0500216
Charlie Bareham9601dc52024-08-28 17:27:18 +0100217 do {
218 aff_info = tftf_psci_affinity_info(cpu_mpid,
219 MPIDR_AFFLVL0);
220 } while (aff_info != PSCI_STATE_OFF);
221 }
222
223 /*
224 * If the test was skipped on all CPUs, the whole test should be
225 * skipped.
226 */
227
228 all_cpus_skipped = true;
229 for (i = 0; i < PLATFORM_CORE_COUNT; i++) {
230 if (!test_skipped[i]) {
231 all_cpus_skipped = false;
232 break;
233 }
234 }
235
236 if (all_cpus_skipped) {
237 return TEST_RESULT_SKIPPED;
238 } else {
239 return test_result;
240 }
241}
242
243/* Test Asymmetric Support for FEAT_TRBE */
244test_result_t test_trbe_errata_asymmetric(void)
245{
246 return run_asymmetric_test(test_trbe);
247}
248
249/* Test Asymmetric Support for FEAT_SPE */
250test_result_t test_spe_asymmetric(void)
251{
252 return run_asymmetric_test(test_spe);
Manish Pandey0145ec32024-08-12 17:59:54 +0100253}