aboutsummaryrefslogtreecommitdiff
path: root/tftf/framework/aarch64/entrypoint.S
blob: 81fd207b406e72406a8319d682e19dbc4cb08cdf (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
/*
 * Copyright (c) 2018-2019, Arm Limited. All rights reserved.
 *
 * SPDX-License-Identifier: BSD-3-Clause
 */

#include <arch.h>
#include <asm_macros.S>
#include <tftf.h>

	.globl	tftf_entrypoint
	.globl	tftf_hotplug_entry


/* ----------------------------------------------------------------------------
 * Cold boot entry point for the primary CPU.
 * ----------------------------------------------------------------------------
 */
func tftf_entrypoint
	bl	arch_init

	/* --------------------------------------------------------------------
	 * Invalidate the RW memory used by TFTF image.
	 * This is done to safeguard against possible corruption of this
	 * memory by dirty cache lines in a system cache as a result of use
	 * by an earlier boot loader stage.
	 * --------------------------------------------------------------------
	 */
	adr	x0, __DATA_START__
	adr	x1, __DATA_END__
	sub	x1, x1, x0
	bl	inv_dcache_range

	/* --------------------------------------------------------------------
	 * This code is expected to be executed only by the primary CPU.
	 * Save the mpid for the first core that executes and if a secondary
	 * CPU has lost its way make it spin forever.
	 * --------------------------------------------------------------------
	 */
	bl	save_primary_mpid

	/* --------------------------------------------------------------------
	 * Zero out NOBITS sections. There are 2 of them:
	 *   - the .bss section;
	 *   - the coherent memory section.
	 * --------------------------------------------------------------------
	 */
	ldr	x0, =__BSS_START__
	ldr	x1, =__BSS_SIZE__
	bl	zeromem16

	ldr	x0, =__COHERENT_RAM_START__
	ldr	x1, =__COHERENT_RAM_UNALIGNED_SIZE__
	bl	zeromem16

	/* --------------------------------------------------------------------
	 * Give ourselves a small coherent stack to ease the pain of
	 * initializing the MMU
	 * --------------------------------------------------------------------
	 */
	mrs	x0, mpidr_el1
	bl	platform_set_coherent_stack

	bl	tftf_early_platform_setup
	bl	tftf_plat_arch_setup

	/* --------------------------------------------------------------------
	 * Give ourselves a stack allocated in Normal -IS-WBWA memory
	 * --------------------------------------------------------------------
	 */
	mrs	x0, mpidr_el1
	bl	platform_set_stack

	/* --------------------------------------------------------------------
	 * tftf_cold_boot_main() will perform the remaining architectural and
	 * platform setup, initialise the test framework's state, then run the
	 * tests.
	 * --------------------------------------------------------------------
	 */
	b	tftf_cold_boot_main
endfunc tftf_entrypoint

/* ----------------------------------------------------------------------------
 * Entry point for a CPU that has just been powered up.
 * In : x0 - context_id
 * ----------------------------------------------------------------------------
 */
func tftf_hotplug_entry
	/* --------------------------------------------------------------------
	 * Preserve the context_id in a callee-saved register
	 * --------------------------------------------------------------------
	 */
	mov	x19, x0

	bl	arch_init

	/* --------------------------------------------------------------------
	 * Give ourselves a small coherent stack to ease the pain of
	 * initializing the MMU
	 * --------------------------------------------------------------------
	 */
	mrs	x0, mpidr_el1
	bl	platform_set_coherent_stack

	/* --------------------------------------------------------------------
	 * Enable the MMU
	 * --------------------------------------------------------------------
	 */
	bl	tftf_plat_enable_mmu

	/* --------------------------------------------------------------------
	 * Give ourselves a stack in normal memory.
	 * --------------------------------------------------------------------
	 */
	mrs	x0, mpidr_el1
	bl	platform_set_stack

	/* --------------------------------------------------------------------
	 * Save the context_id for later retrieval by tests
	 * --------------------------------------------------------------------
	 */
	mrs	x0, mpidr_el1
	mov_imm x1, MPID_MASK
	and	x0, x0, x1
	bl	platform_get_core_pos

	mov	x1, x19

	bl	tftf_set_cpu_on_ctx_id

	/* --------------------------------------------------------------------
	 * Jump to warm boot main function
	 * --------------------------------------------------------------------
	 */
	b	tftf_warm_boot_main
endfunc tftf_hotplug_entry

/* ----------------------------------------------------------------------------
 * Saves the mpid of the primary core and if the primary core
 * is already saved then it loops infinitely.
 * ----------------------------------------------------------------------------
 */
func save_primary_mpid
	adrp	x1, tftf_primary_core
	ldr	w0, [x1, :lo12:tftf_primary_core]
	mov	w2, #INVALID_MPID
	cmp	w0, w2
	b.ne	panic
	mov_imm	x2, MPID_MASK
	mrs	x0, mpidr_el1
	and	x0, x0, x2
	str	w0, [x1, :lo12:tftf_primary_core]
	ret
panic:
	/* Primary core MPID already saved */
	b	.
	ret
endfunc save_primary_mpid

/* Initialize architectural state. */
func arch_init
	mrs	x0, CurrentEL
	cmp	x0, #(MODE_EL1 << MODE_EL_SHIFT)
	b.eq	el1_setup

el2_setup:
	/* Set the exception vectors. */
	adr	x0, tftf_vector
	msr	vbar_el2, x0

	/* Enable the instruction cache and alignment checks. */
	mov_imm	x0, (SCTLR_EL2_RES1 | SCTLR_I_BIT | SCTLR_A_BIT | SCTLR_SA_BIT)
	msr	sctlr_el2, x0

	isb
	ret

el1_setup:
	/* Set the exception vectors. */
	adr	x0, tftf_vector
	msr	vbar_el1, x0

	/* Enable the instruction cache and stack pointer alignment checks. */
	mov_imm	x0, (SCTLR_EL1_RES1 | SCTLR_I_BIT | SCTLR_A_BIT | SCTLR_SA_BIT)
	msr	sctlr_el1, x0

	isb
	ret
endfunc arch_init