aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSandrine Bailleux <sandrine.bailleux@arm.com>2019-01-04 11:52:41 +0000
committerTrustedFirmware Code Review <review@review.trustedfirmware.org>2019-01-04 11:52:41 +0000
commitbbdb2762062547ef279256d0163e4d743f21474a (patch)
tree4d6cda2ba961c39aeecd9bc870581ff149060821
parent3f556ebf53fb3c25eb1e85110643a593e5fe5d26 (diff)
parent39caa2cfde26781c98b44f1696f684e58cd4a287 (diff)
downloadtf-a-tests-bbdb2762062547ef279256d0163e4d743f21474a.tar.gz
Merge changes from topic "sb/init-code"
* changes: Share AArch32 arch. init code between cold/warm boot code Rework TFTF AArch64 entry point code
-rw-r--r--tftf/framework/aarch32/entrypoint.S51
-rw-r--r--tftf/framework/aarch64/entrypoint.S69
2 files changed, 52 insertions, 68 deletions
diff --git a/tftf/framework/aarch32/entrypoint.S b/tftf/framework/aarch32/entrypoint.S
index 5832dd74..1f302fa8 100644
--- a/tftf/framework/aarch32/entrypoint.S
+++ b/tftf/framework/aarch32/entrypoint.S
@@ -16,22 +16,7 @@
* ----------------------------------------------------------------------------
*/
func tftf_entrypoint
- /* --------------------------------------------------------------------
- * Set the exception vectors
- * --------------------------------------------------------------------
- */
- ldr r0, =tftf_vector
- stcopr r0, HVBAR
-
- /* --------------------------------------------------------------------
- * Enable the instruction cache.
- * --------------------------------------------------------------------
- */
- ldcopr r0, HSCTLR
- ldr r1, =HSCTLR_I_BIT
- orr r0, r0, r1
- stcopr r0, HSCTLR
- isb
+ bl arch_init
/* --------------------------------------------------------------------
* This code is expected to be executed only by the primary CPU.
@@ -95,22 +80,7 @@ func tftf_hotplug_entry
*/
mov r4, r0
- /* --------------------------------------------------------------------
- * Set the exception vectors
- * --------------------------------------------------------------------
- */
- ldr r0, =tftf_vector
- stcopr r0, HVBAR
-
- /* --------------------------------------------------------------------
- * Enable the instruction cache.
- * --------------------------------------------------------------------
- */
- ldcopr r0, HSCTLR
- ldr r1, =HSCTLR_I_BIT
- orr r0, r0, r1
- stcopr r0, HSCTLR
- isb
+ bl arch_init
/* --------------------------------------------------------------------
* Give ourselves a small coherent stack to ease the pain of
@@ -154,6 +124,23 @@ func tftf_hotplug_entry
endfunc tftf_hotplug_entry
/* ----------------------------------------------------------------------------
+ * Initialize architectural state.
+ * ----------------------------------------------------------------------------
+ */
+func arch_init
+ /* Set the exception vectors. */
+ ldr r0, =tftf_vector
+ stcopr r0, HVBAR
+
+ /* Enable the instruction cache. */
+ ldr r0, =(HSCTLR_RES1 | HSCTLR_I_BIT)
+ stcopr r0, HSCTLR
+
+ isb
+ bx lr
+endfunc arch_init
+
+/* ----------------------------------------------------------------------------
* Saves the mpid of the primary core and if the primary core
* is already saved then it loops infinitely.
* ----------------------------------------------------------------------------
diff --git a/tftf/framework/aarch64/entrypoint.S b/tftf/framework/aarch64/entrypoint.S
index 34ba90f5..04550822 100644
--- a/tftf/framework/aarch64/entrypoint.S
+++ b/tftf/framework/aarch64/entrypoint.S
@@ -17,22 +17,7 @@
* ----------------------------------------------------------------------------
*/
func tftf_entrypoint
- /* --------------------------------------------------------------------
- * Set the exception vectors
- * --------------------------------------------------------------------
- */
- adr x0, tftf_vector
- asm_write_vbar_el1_or_el2 x1
-
- /* --------------------------------------------------------------------
- * Enable the instruction cache and stack pointer alignment checks.
- * --------------------------------------------------------------------
- */
- mov x1, #(SCTLR_I_BIT | SCTLR_SA_BIT)
- asm_read_sctlr_el1_or_el2
- orr x0, x0, x1
- asm_write_sctlr_el1_or_el2 x1
- isb
+ bl arch_init
/* --------------------------------------------------------------------
* This code is expected to be executed only by the primary CPU.
@@ -81,9 +66,6 @@ func tftf_entrypoint
* --------------------------------------------------------------------
*/
b tftf_cold_boot_main
-
-dead:
- b dead
endfunc tftf_entrypoint
/* ----------------------------------------------------------------------------
@@ -92,29 +74,13 @@ endfunc tftf_entrypoint
* ----------------------------------------------------------------------------
*/
func tftf_hotplug_entry
-
/* --------------------------------------------------------------------
* Preserve the context_id in a callee-saved register
* --------------------------------------------------------------------
*/
mov x19, x0
- /* --------------------------------------------------------------------
- * Set the exception vectors
- * --------------------------------------------------------------------
- */
- adr x0, tftf_vector
- asm_write_vbar_el1_or_el2 x1
-
- /* --------------------------------------------------------------------
- * Enable the instruction cache and stack pointer alignment checks.
- * --------------------------------------------------------------------
- */
- mov x1, #(SCTLR_I_BIT | SCTLR_SA_BIT)
- asm_read_sctlr_el1_or_el2
- orr x0, x0, x1
- asm_write_sctlr_el1_or_el2 x1
- isb
+ bl arch_init
/* --------------------------------------------------------------------
* Give ourselves a small coherent stack to ease the pain of
@@ -178,3 +144,34 @@ panic:
b .
ret
endfunc save_primary_mpid
+
+/* Initialize architectural state. */
+func arch_init
+ mrs x0, CurrentEL
+ cmp x0, #(MODE_EL1 << MODE_EL_SHIFT)
+ b.eq el1_setup
+
+el2_setup:
+ /* Set the exception vectors. */
+ adr x0, tftf_vector
+ msr vbar_el2, x0
+
+ /* Enable the instruction cache and stack pointer alignment checks. */
+ mov_imm x0, (SCTLR_EL2_RES1 | SCTLR_I_BIT | SCTLR_SA_BIT)
+ msr sctlr_el2, x0
+
+ isb
+ ret
+
+el1_setup:
+ /* Set the exception vectors. */
+ adr x0, tftf_vector
+ msr vbar_el1, x0
+
+ /* Enable the instruction cache and stack pointer alignment checks. */
+ mov_imm x0, (SCTLR_EL1_RES1 | SCTLR_I_BIT | SCTLR_SA_BIT)
+ msr sctlr_el1, x0
+
+ isb
+ ret
+endfunc arch_init