aboutsummaryrefslogtreecommitdiff
path: root/lib/cpus/aarch64/cortex_a76.S
diff options
context:
space:
mode:
authorJeenu Viswambharan <jeenu.viswambharan@arm.com>2018-11-15 11:38:03 +0000
committerJeenu Viswambharan <jeenu.viswambharan@arm.com>2018-12-10 14:28:58 +0000
commit48e1d350a0021a9a2f7e34041f28273dee9eb885 (patch)
tree090c159d164bcb90c44b2e0101fc844a3754808f /lib/cpus/aarch64/cortex_a76.S
parent19b56cf4a2251e5ffcab41cdd6dd8449f8b1402a (diff)
downloadtrusted-firmware-a-48e1d350a0021a9a2f7e34041f28273dee9eb885.tar.gz
AArch64: Use SSBS for CVE_2018_3639 mitigation
The Armv8.5 extensions introduces PSTATE.SSBS (Speculation Store Bypass Safe) bit to mitigate against Variant 4 vulnerabilities. Although an Armv8.5 feature, this can be implemented by CPUs implementing earlier version of the architecture. With this patch, when both PSTATE.SSBS is implemented and DYNAMIC_WORKAROUND_CVE_2018_3639 is active, querying for SMCCC_ARCH_WORKAROUND_2 via. SMCCC_ARCH_FEATURES call would return 1 to indicate that mitigation on the PE is either permanently enabled or not required. When SSBS is implemented, SCTLR_EL3.DSSBS is initialized to 0 at reset of every BL stage. This means that EL3 always executes with mitigation applied. For Cortex A76, if the PE implements SSBS, the existing mitigation (by using a different vector table, and tweaking CPU ACTLR2) is not used. Change-Id: Ib0386c5714184144d4747951751c2fc6ba4242b6 Signed-off-by: Jeenu Viswambharan <jeenu.viswambharan@arm.com>
Diffstat (limited to 'lib/cpus/aarch64/cortex_a76.S')
-rw-r--r--lib/cpus/aarch64/cortex_a76.S13
1 files changed, 11 insertions, 2 deletions
diff --git a/lib/cpus/aarch64/cortex_a76.S b/lib/cpus/aarch64/cortex_a76.S
index 1697c55dc8..4def143737 100644
--- a/lib/cpus/aarch64/cortex_a76.S
+++ b/lib/cpus/aarch64/cortex_a76.S
@@ -208,14 +208,20 @@ endfunc cortex_a76_disable_wa_cve_2018_3639
func cortex_a76_reset_func
mov x19, x30
+
#if WORKAROUND_CVE_2018_3639
+ /* If the PE implements SSBS, we don't need the dynamic workaround */
+ mrs x0, id_aa64pfr1_el1
+ lsr x0, x0, #ID_AA64PFR1_EL1_SSBS_SHIFT
+ and x0, x0, #ID_AA64PFR1_EL1_SSBS_MASK
+ cbnz x0, 1f
+
mrs x0, CORTEX_A76_CPUACTLR2_EL1
orr x0, x0, #CORTEX_A76_CPUACTLR2_EL1_DISABLE_LOAD_PASS_STORE
msr CORTEX_A76_CPUACTLR2_EL1, x0
isb
-#endif
-#if IMAGE_BL31 && WORKAROUND_CVE_2018_3639
+#ifdef IMAGE_BL31
/*
* The Cortex-A76 generic vectors are overwritten to use the vectors
* defined above. This is required in order to apply mitigation
@@ -226,6 +232,9 @@ func cortex_a76_reset_func
isb
#endif
+1:
+#endif
+
#if ERRATA_DSU_936184
bl errata_dsu_936184_wa
#endif