aboutsummaryrefslogtreecommitdiff
path: root/tftf/framework
diff options
context:
space:
mode:
Diffstat (limited to 'tftf/framework')
-rw-r--r--tftf/framework/aarch64/arch.c21
-rw-r--r--tftf/framework/aarch64/entrypoint.S30
-rw-r--r--tftf/framework/aarch64/exceptions.S41
-rw-r--r--tftf/framework/framework.mk18
-rw-r--r--tftf/framework/include/tftf.h2
-rw-r--r--tftf/framework/main.c33
-rw-r--r--tftf/framework/tftf.ld.S38
7 files changed, 154 insertions, 29 deletions
diff --git a/tftf/framework/aarch64/arch.c b/tftf/framework/aarch64/arch.c
index 56369ae80..f1223a1ee 100644
--- a/tftf/framework/aarch64/arch.c
+++ b/tftf/framework/aarch64/arch.c
@@ -5,7 +5,10 @@
* SPDX-License-Identifier: BSD-3-Clause
*/
+#include <arch_features.h>
#include <arch_helpers.h>
+#include <arch_features.h>
+#include <tftf_lib.h>
void tftf_arch_setup(void)
{
@@ -23,11 +26,25 @@ void tftf_arch_setup(void)
write_hcr_el2(HCR_TGE_BIT);
/*
- * Disable trap of SVE instructions to EL2.
+ * Disable trap of SVE, SME instructions to EL2.
* The fields of the CPTR_EL2 register reset to an
* architecturally UNKNOWN value.
*/
- write_cptr_el2(CPTR_EL2_RES1);
+ write_cptr_el2(CPTR_EL2_RESET_VAL);
isb();
+
+ /*
+ * Enable access to ZT0 storage when FEAT_SME2 is implemented
+ * and enable FA64 when FEAT_SME_FA64 is implemented
+ */
+ if (is_feat_sme_supported()) {
+ write_smcr_el2(SMCR_EL2_RESET_VAL);
+ isb();
+ }
+
+ /* Clear SVE hint bit */
+ if (is_armv8_2_sve_present()) {
+ tftf_smc_set_sve_hint(false);
+ }
}
}
diff --git a/tftf/framework/aarch64/entrypoint.S b/tftf/framework/aarch64/entrypoint.S
index 6aa564562..7937b8804 100644
--- a/tftf/framework/aarch64/entrypoint.S
+++ b/tftf/framework/aarch64/entrypoint.S
@@ -18,11 +18,13 @@
*/
func tftf_entrypoint
/* --------------------------------------------------------------------
- * Save arguments x0, x1 from the previous Boot loader
+ * Save arguments x0-x3 from the previous bootloader.
* --------------------------------------------------------------------
*/
mov x20, x0
mov x21, x1
+ mov x22, x2
+ mov x23, x3
bl arch_init
@@ -79,13 +81,15 @@ func tftf_entrypoint
bl platform_set_stack
/* --------------------------------------------------------------------
- * Save fw_config and hw_config addresses passed in x0, x1 from the
- * previous boot loader
+ * Save the fw_config or transfer list and hw_config addresses passed
+ * in registers x0 to x3 from the previous bootloader.
* --------------------------------------------------------------------
*/
mov x0, x20
mov x1, x21
- bl save_fw_hw_configs
+ mov x2, x22
+ mov x3, x23
+ bl save_handoff_params
/* --------------------------------------------------------------------
* tftf_cold_boot_main() will perform the remaining architectural and
@@ -204,10 +208,22 @@ el1_setup:
ret
endfunc arch_init
-/* Set fw_config and hw_config addresses passed in x0, x1 */
-func save_fw_hw_configs
+
+/* ----------------------------------------------------------------------------
+ * Save fw_config or transfer list and hw_config addresses passed in registers
+ * x0 to x3 from the previous bootloader.
+ * ----------------------------------------------------------------------------
+ */
+func save_handoff_params
+#if TRANSFER_LIST
+ adrp x4, ns_tl
+ str x3, [x4, :lo12:ns_tl]
+ str x1, [x4, :lo12:tl_signature]
+ str x0, [x4, :lo12:hw_config_base]
+#else
adrp x2, fw_config_base
str x0, [x2, :lo12:fw_config_base]
str x1, [x2, :lo12:hw_config_base]
+#endif
ret
-endfunc save_fw_hw_configs
+endfunc save_handoff_params
diff --git a/tftf/framework/aarch64/exceptions.S b/tftf/framework/aarch64/exceptions.S
index 218cca3b4..471bef7fd 100644
--- a/tftf/framework/aarch64/exceptions.S
+++ b/tftf/framework/aarch64/exceptions.S
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2018, Arm Limited. All rights reserved.
+ * Copyright (c) 2018-2022, Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -31,14 +31,18 @@ unhandled_exception serror_sp_el0
/*
* Current EL with SPx : 0x200 - 0x400.
*/
-unhandled_exception sync_exception_sp_elx
+vector_entry sync_spx
+ b sync_exception_vector_entry
+end_vector_entry sync_spx
vector_entry irq_sp_elx
b irq_vector_entry
end_vector_entry irq_sp_elx
unhandled_exception fiq_sp_elx
-unhandled_exception serror_sp_elx
+vector_entry serror_sp_elx
+ b serror_vector_entry
+end_vector_entry serror_sp_elx
/*
* Lower EL using AArch64 : 0x400 - 0x600.
@@ -95,6 +99,22 @@ unhandled_exception serror_aarch32
ldp x0, x1, [sp, #0x0]
.endm
+func sync_exception_vector_entry
+ sub sp, sp, #0x100
+ save_gp_regs
+ mov x19, sp
+ bl tftf_sync_exception_handler
+ cbnz x0, 0f
+ mov x0, x19
+ /* Save original stack pointer value on the stack */
+ add x1, x0, #0x100
+ str x1, [x0, #0xf8]
+ b print_exception
+0: restore_gp_regs
+ add sp, sp, #0x100
+ eret
+endfunc sync_exception_vector_entry
+
func irq_vector_entry
sub sp, sp, #0x100
save_gp_regs
@@ -104,6 +124,21 @@ func irq_vector_entry
eret
endfunc irq_vector_entry
+func serror_vector_entry
+ sub sp, sp, #0x100
+ save_gp_regs
+ bl tftf_serror_handler
+ cbnz x0, 1f
+ mov x0, x19
+ /* Save original stack pointer value on the stack */
+ add x1, x0, #0x100
+ str x1, [x0, #0xf8]
+ b print_exception
+1: restore_gp_regs
+ add sp, sp, #0x100
+ eret
+endfunc serror_vector_entry
+
func crash_dump
/* Save general-purpose registers on the stack. */
sub sp, sp, #0x100
diff --git a/tftf/framework/framework.mk b/tftf/framework/framework.mk
index 8ec18ea9a..b25d1d029 100644
--- a/tftf/framework/framework.mk
+++ b/tftf/framework/framework.mk
@@ -1,5 +1,5 @@
#
-# Copyright (c) 2018-2021, Arm Limited. All rights reserved.
+# Copyright (c) 2018-2023, Arm Limited. All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
#
@@ -25,9 +25,10 @@ TFTF_INCLUDES := \
-Iinclude/runtime_services \
-Iinclude/runtime_services/secure_el0_payloads \
-Iinclude/runtime_services/secure_el1_payloads \
+ -Iinclude/runtime_services/host_realm_managment \
-Ispm/cactus \
-Ispm/ivy \
- -Ispm/quark \
+ -Irealm \
-Ismc_fuzz/include
FRAMEWORK_SOURCES := ${AUTOGEN_DIR}/tests_list.c
@@ -53,7 +54,7 @@ FRAMEWORK_SOURCES += \
lib/events/events.c \
lib/extensions/amu/${ARCH}/amu.c \
lib/extensions/amu/${ARCH}/amu_helpers.S \
- lib/irq/irq.c \
+ lib/exceptions/irq.c \
lib/locks/${ARCH}/spinlock.S \
lib/power_management/hotplug/hotplug.c \
lib/power_management/suspend/${ARCH}/asm_tftf_suspend.S \
@@ -63,6 +64,8 @@ FRAMEWORK_SOURCES += \
lib/smc/${ARCH}/asm_smc.S \
lib/smc/${ARCH}/smc.c \
lib/trng/trng.c \
+ lib/errata_abi/errata_abi.c \
+ lib/transfer_list/transfer_list.c \
lib/trusted_os/trusted_os.c \
lib/utils/mp_printf.c \
lib/utils/uuid.c \
@@ -79,8 +82,15 @@ FRAMEWORK_SOURCES += ${COMPILER_RT_SRCS}
ifeq (${ARCH},aarch64)
# ARMv8.3 Pointer Authentication support files
FRAMEWORK_SOURCES += \
+ lib/exceptions/aarch64/sync.c \
+ lib/exceptions/aarch64/serror.c \
lib/extensions/pauth/aarch64/pauth.c \
- lib/extensions/pauth/aarch64/pauth_helpers.S
+ lib/extensions/pauth/aarch64/pauth_helpers.S \
+ lib/extensions/sme/aarch64/sme.c \
+ lib/extensions/sme/aarch64/sme_helpers.S \
+ lib/extensions/sme/aarch64/sme2_helpers.S \
+ lib/extensions/sve/aarch64/sve.c \
+ lib/extensions/sve/aarch64/sve_helpers.S
endif
TFTF_LINKERFILE := tftf/framework/tftf.ld.S
diff --git a/tftf/framework/include/tftf.h b/tftf/framework/include/tftf.h
index 43f1e7ea5..b9bf705b9 100644
--- a/tftf/framework/include/tftf.h
+++ b/tftf/framework/include/tftf.h
@@ -15,7 +15,7 @@
#define TFTF_WELCOME_STR "Booting trusted firmware test framework"
/* Maximum size of test output (in bytes) */
-#define TESTCASE_OUTPUT_MAX_SIZE 512
+#define TESTCASE_OUTPUT_MAX_SIZE 1024
/* Size of build message used to differentiate different TFTF binaries */
#define BUILD_MESSAGE_SIZE 0x20
diff --git a/tftf/framework/main.c b/tftf/framework/main.c
index 2350b9622..0701e281b 100644
--- a/tftf/framework/main.c
+++ b/tftf/framework/main.c
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2018-2020, Arm Limited. All rights reserved.
+ * Copyright (c) 2018-2022, Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -24,6 +24,11 @@
#include <tftf.h>
#include <tftf_lib.h>
#include <timer.h>
+#if TRANSFER_LIST
+#include <transfer_list.h>
+#endif
+
+#define MIN_RETRY_TO_POWER_ON_LEAD_CPU 10
/* version information for TFTF */
extern const char version_string[];
@@ -42,7 +47,12 @@ static u_register_t cpu_on_ctx_id_arr[PLATFORM_CORE_COUNT];
static unsigned int test_is_rebooting;
/* Parameters arg0 and arg1 passed from BL31 */
+#if TRANSFER_LIST
+u_register_t ns_tl;
+u_register_t tl_signature;
+#else
u_register_t fw_config_base;
+#endif
u_register_t hw_config_base;
static inline const test_suite_t *current_testsuite(void)
@@ -309,6 +319,7 @@ static unsigned int close_test(void)
static void __dead2 hand_over_to_lead_cpu(void)
{
int ret;
+ unsigned int tftf_cpu_pwr_on_ctr = 0U;
unsigned int mpid = read_mpidr_el1() & MPID_MASK;
unsigned int core_pos = platform_get_core_pos(mpid);
@@ -321,8 +332,24 @@ static void __dead2 hand_over_to_lead_cpu(void)
* doesn't matter because it will be overwritten by prepare_next_test().
* Pass a NULL pointer to easily catch the problem in case something
* goes wrong.
+ *
+ * In CI with four world system (Normal, Secure, Root and Realm), on few
+ * instances, while the framework tries to turn on the CPU for next-test
+ * it fails to do so and receives error code (-4 : ALREADY_ON).
+ * This is due to the fact that the lead-cpu is still powering down as
+ * per EL-3 but invisible to EL-2. Hence retrying it in a loop with a
+ * small delay in bewteen for certain iterations will resolve it.
*/
- ret = tftf_cpu_on(lead_cpu_mpid, 0, 0);
+ while (tftf_cpu_pwr_on_ctr < MIN_RETRY_TO_POWER_ON_LEAD_CPU) {
+ ret = tftf_cpu_on(lead_cpu_mpid, 0, 0);
+ if (ret == PSCI_E_SUCCESS) {
+ break;
+ } else {
+ tftf_cpu_pwr_on_ctr += 1;
+ waitms(1);
+ }
+ }
+
if (ret != PSCI_E_SUCCESS) {
ERROR("CPU%u: Failed to power on lead CPU%u (%d)\n",
core_pos, platform_get_core_pos(lead_cpu_mpid), ret);
@@ -531,7 +558,7 @@ void __dead2 tftf_cold_boot_main(void)
* authentication would fail then.
*/
#if ENABLE_PAUTH
- assert(is_armv8_3_pauth_apa_api_present());
+ assert(is_armv8_3_pauth_apa_api_apa3_present());
/*
* Program APIAKey_EL1 key and enable ARMv8.3-PAuth here as this
diff --git a/tftf/framework/tftf.ld.S b/tftf/framework/tftf.ld.S
index 9374206f1..3621d9717 100644
--- a/tftf/framework/tftf.ld.S
+++ b/tftf/framework/tftf.ld.S
@@ -1,18 +1,20 @@
/*
- * Copyright (c) 2020, Arm Limited. All rights reserved.
+ * Copyright (c) 2020-2022, Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#include <platform_def.h>
#include <xlat_tables_defs.h>
+#include <host_realm_mem_layout.h>
+
OUTPUT_FORMAT(PLATFORM_LINKER_FORMAT)
OUTPUT_ARCH(PLATFORM_LINKER_ARCH)
ENTRY(tftf_entrypoint)
MEMORY {
- RAM (rwx): ORIGIN = DRAM_BASE, LENGTH = DRAM_SIZE
+ RAM (rwx): ORIGIN = TFTF_BASE, LENGTH = DRAM_SIZE
}
@@ -45,10 +47,30 @@ SECTIONS
.data : {
__DATA_START__ = .;
*(.data*)
+ . = NEXT(PAGE_SIZE); /* This ensures tftf.bin is aligned to page size. */
__DATA_END__ = .;
} >RAM
- stacks (NOLOAD) : {
+ /* End of LOAD Sections. NOLOAD sections begin here. */
+ /*
+ * Memory for Realm Image has to follow next as it will appended to end
+ * of tftf.bin.
+ */
+ realm_payload (NOLOAD) : {
+ __REALM_PAYLOAD_START__ = .;
+ . = __REALM_PAYLOAD_START__ + REALM_MAX_LOAD_IMG_SIZE;
+ __REALM_PAYLOAD_END__ = .;
+ } >RAM
+
+ /* Memory pool for Realm payload tests. */
+ realm_pool (NOLOAD) : ALIGN(PAGE_SIZE) {
+ __REALM_POOL_START__ = .;
+ . = __REALM_POOL_START__ + (NS_REALM_SHARED_MEM_SIZE * MAX_REALM_COUNT) +
+ (PAGE_POOL_MAX_SIZE * MAX_REALM_COUNT);
+ __REALM_POOL_END__ = .;
+ } >RAM
+
+ stacks (NOLOAD) : ALIGN(16) {
__STACKS_START__ = .;
*(tftf_normal_stacks)
__STACKS_END__ = .;
@@ -56,9 +78,9 @@ SECTIONS
/*
* The .bss section gets initialised to 0 at runtime.
- * Its base address must be 16-byte aligned.
+ * Its base address is always PAGE_SIZE aligned.
*/
- .bss : ALIGN(16) {
+ .bss : {
__BSS_START__ = .;
*(SORT_BY_ALIGNMENT(.bss*))
*(COMMON)
@@ -67,10 +89,9 @@ SECTIONS
/*
* The xlat_table section is for full, aligned page tables (4K).
- * Removing them from .bss avoids forcing 4K alignment on
- * the .bss section and eliminates the unecessary zero init
+ * Removing them from .bss eliminates the unecessary zero init
*/
- xlat_table (NOLOAD) : {
+ xlat_table (NOLOAD) : ALIGN(PAGE_SIZE) {
*(xlat_table)
} >RAM
@@ -105,7 +126,6 @@ SECTIONS
__COHERENT_RAM_UNALIGNED_SIZE__ =
__COHERENT_RAM_END_UNALIGNED__ - __COHERENT_RAM_START__;
-
__TFTF_END__ = .;
__BSS_SIZE__ = SIZEOF(.bss);