Makefile: Enable strict align arch minor version
Add -mstrict-align flag, and -march minor version. These are needed to
prevent compilers generating unaligned accesses and enable
architectural features respectively.
Enable the SCTLR.A and SCTLR.SA alignment checks in all images.
TF test has several cases of code which enable the alignment checks.
Change-Id: I9a0413786caf94d0abf376aa1b4fb54fc7f2f355
Signed-off-by: Joel Hutton <Joel.Hutton@Arm.com>
diff --git a/Makefile b/Makefile
index ece61a2..e699def 100644
--- a/Makefile
+++ b/Makefile
@@ -174,11 +174,20 @@
COMMON_ASFLAGS += -g -Wa,--gdwarf-2
endif
-COMMON_ASFLAGS_aarch64 := -mgeneral-regs-only
-COMMON_CFLAGS_aarch64 := -mgeneral-regs-only
+# Set the compiler's target architecture profile based on ARM_ARCH_MINOR option
+ifeq (${ARM_ARCH_MINOR},0)
+march32-directive = -march=armv8-a
+march64-directive = -march=armv8-a
+else
+march32-directive = -march=armv8.${ARM_ARCH_MINOR}-a
+march64-directive = -march=armv8.${ARM_ARCH_MINOR}-a
+endif
-COMMON_ASFLAGS_aarch32 := -march=armv8-a
-COMMON_CFLAGS_aarch32 := -march=armv8-a
+COMMON_ASFLAGS_aarch64 := -mgeneral-regs-only ${march64-directive}
+COMMON_CFLAGS_aarch64 := -mgeneral-regs-only -mstrict-align ${march64-directive}
+
+COMMON_ASFLAGS_aarch32 := ${march32-directive}
+COMMON_CFLAGS_aarch32 := ${march32-directive} -mno-unaligned-access
COMMON_ASFLAGS += -nostdinc -ffreestanding -Wa,--fatal-warnings \
-Werror -Wmissing-include-dirs \
diff --git a/fwu/ns_bl1u/aarch32/ns_bl1u_entrypoint.S b/fwu/ns_bl1u/aarch32/ns_bl1u_entrypoint.S
index 22b2f34..b9c0d87 100644
--- a/fwu/ns_bl1u/aarch32/ns_bl1u_entrypoint.S
+++ b/fwu/ns_bl1u/aarch32/ns_bl1u_entrypoint.S
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2018, Arm Limited. All rights reserved.
+ * Copyright (c) 2018-2019, Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -19,11 +19,11 @@
stcopr r0, HVBAR
/* --------------------------------------------------------------------
- * Enable the instruction cache.
+ * Enable the instruction cache and alignment checks.
* --------------------------------------------------------------------
*/
ldcopr r0, HSCTLR
- ldr r1, =HSCTLR_I_BIT
+ ldr r1, =(HSCTLR_I_BIT | HSCTLR_A_BIT)
orr r0, r0, r1
stcopr r0, HSCTLR
isb
diff --git a/fwu/ns_bl1u/aarch64/ns_bl1u_entrypoint.S b/fwu/ns_bl1u/aarch64/ns_bl1u_entrypoint.S
index 919ec27..a2e9027 100644
--- a/fwu/ns_bl1u/aarch64/ns_bl1u_entrypoint.S
+++ b/fwu/ns_bl1u/aarch64/ns_bl1u_entrypoint.S
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2018, Arm Limited. All rights reserved.
+ * Copyright (c) 2018-2019, Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -19,10 +19,10 @@
asm_write_vbar_el1_or_el2 x1
/* --------------------------------------------------------------------
- * Enable the instruction cache and stack pointer alignment checks.
+ * Enable the instruction cache and alignment checks.
* --------------------------------------------------------------------
*/
- mov x1, #(SCTLR_I_BIT | SCTLR_SA_BIT)
+ mov x1, #(SCTLR_I_BIT | SCTLR_A_BIT | SCTLR_SA_BIT)
asm_read_sctlr_el1_or_el2
orr x0, x0, x1
asm_write_sctlr_el1_or_el2 x1
diff --git a/fwu/ns_bl2u/aarch32/ns_bl2u_entrypoint.S b/fwu/ns_bl2u/aarch32/ns_bl2u_entrypoint.S
index 8ba3549..28a4572 100644
--- a/fwu/ns_bl2u/aarch32/ns_bl2u_entrypoint.S
+++ b/fwu/ns_bl2u/aarch32/ns_bl2u_entrypoint.S
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2018, Arm Limited. All rights reserved.
+ * Copyright (c) 2018-2019, Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -19,11 +19,11 @@
stcopr r0, HVBAR
/* ---------------------------------------------------------------------
- * Enable the instruction cache.
+ * Enable the instruction cache and alignment checks.
* ---------------------------------------------------------------------
*/
ldcopr r0, HSCTLR
- ldr r1, =HSCTLR_I_BIT
+ ldr r1, =(HSCTLR_I_BIT | HSCTLR_A_BIT)
orr r0, r0, r1
stcopr r0, HSCTLR
isb
diff --git a/fwu/ns_bl2u/aarch64/ns_bl2u_entrypoint.S b/fwu/ns_bl2u/aarch64/ns_bl2u_entrypoint.S
index 4e061b3..0828f5b 100644
--- a/fwu/ns_bl2u/aarch64/ns_bl2u_entrypoint.S
+++ b/fwu/ns_bl2u/aarch64/ns_bl2u_entrypoint.S
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2018, Arm Limited. All rights reserved.
+ * Copyright (c) 2018-2019, Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -19,10 +19,10 @@
asm_write_vbar_el1_or_el2 x1
/* --------------------------------------------------------------------
- * Enable the instruction cache and stack pointer alignment checks.
+ * Enable the instruction cache and alignment checks.
* --------------------------------------------------------------------
*/
- mov x1, #(SCTLR_I_BIT | SCTLR_SA_BIT)
+ mov x1, #(SCTLR_I_BIT | SCTLR_A_BIT | SCTLR_SA_BIT)
asm_read_sctlr_el1_or_el2
orr x0, x0, x1
asm_write_sctlr_el1_or_el2 x1
diff --git a/tftf/framework/aarch64/entrypoint.S b/tftf/framework/aarch64/entrypoint.S
index 0455082..0a4afe0 100644
--- a/tftf/framework/aarch64/entrypoint.S
+++ b/tftf/framework/aarch64/entrypoint.S
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2018, Arm Limited. All rights reserved.
+ * Copyright (c) 2018-2019, Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@@ -156,8 +156,8 @@
adr x0, tftf_vector
msr vbar_el2, x0
- /* Enable the instruction cache and stack pointer alignment checks. */
- mov_imm x0, (SCTLR_EL2_RES1 | SCTLR_I_BIT | SCTLR_SA_BIT)
+ /* Enable the instruction cache and alignment checks. */
+ mov_imm x0, (SCTLR_EL2_RES1 | SCTLR_I_BIT | SCTLR_A_BIT | SCTLR_SA_BIT)
msr sctlr_el2, x0
isb
@@ -169,7 +169,7 @@
msr vbar_el1, x0
/* Enable the instruction cache and stack pointer alignment checks. */
- mov_imm x0, (SCTLR_EL1_RES1 | SCTLR_I_BIT | SCTLR_SA_BIT)
+ mov_imm x0, (SCTLR_EL1_RES1 | SCTLR_I_BIT | SCTLR_A_BIT | SCTLR_SA_BIT)
msr sctlr_el1, x0
isb