aboutsummaryrefslogtreecommitdiff
path: root/lib/cpus/aarch64/wa_cve_2017_5715_mmu.S
blob: d7b6e26e40f7128ab148d3a2cac9e74f06022410 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
/*
 * Copyright (c) 2017-2018, ARM Limited and Contributors. All rights reserved.
 *
 * SPDX-License-Identifier: BSD-3-Clause
 */

#include <arch.h>
#include <arm_arch_svc.h>
#include <asm_macros.S>
#include <context.h>

	.globl	wa_cve_2017_5715_mmu_vbar

#define ESR_EL3_A64_SMC0	0x5e000000
#define ESR_EL3_A32_SMC0	0x4e000000

vector_base wa_cve_2017_5715_mmu_vbar

	.macro	apply_cve_2017_5715_wa _is_sync_exception _esr_el3_val
	stp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
	mrs	x1, sctlr_el3
	/* Disable MMU */
	bic	x1, x1, #SCTLR_M_BIT
	msr	sctlr_el3, x1
	isb
	/* Enable MMU */
	orr	x1, x1, #SCTLR_M_BIT
	msr	sctlr_el3, x1
	/*
	 * Defer ISB to avoid synchronizing twice in case we hit
	 * the workaround SMC call which will implicitly synchronize
	 * because of the ERET instruction.
	 */

	/*
	 * Ensure SMC is coming from A64/A32 state on #0
	 * with W0 = SMCCC_ARCH_WORKAROUND_1
	 *
	 * This sequence evaluates as:
	 *    (W0==SMCCC_ARCH_WORKAROUND_1) ? (ESR_EL3==SMC#0) : (NE)
	 * allowing use of a single branch operation
	 */
	.if \_is_sync_exception
		orr	w1, wzr, #SMCCC_ARCH_WORKAROUND_1
		cmp	w0, w1
		mrs	x0, esr_el3
		mov_imm	w1, \_esr_el3_val
		ccmp	w0, w1, #0, eq
		/* Static predictor will predict a fall through */
		bne	1f
		eret
1:
	.endif

	/*
	 * Synchronize now to enable the MMU.  This is required
	 * to ensure the load pair below reads the data stored earlier.
	 */
	isb
	ldp	x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
	.endm

	/* ---------------------------------------------------------------------
	 * Current EL with SP_EL0 : 0x0 - 0x200
	 * ---------------------------------------------------------------------
	 */
vector_entry mmu_sync_exception_sp_el0
	b	sync_exception_sp_el0
end_vector_entry mmu_sync_exception_sp_el0

vector_entry mmu_irq_sp_el0
	b	irq_sp_el0
end_vector_entry mmu_irq_sp_el0

vector_entry mmu_fiq_sp_el0
	b	fiq_sp_el0
end_vector_entry mmu_fiq_sp_el0

vector_entry mmu_serror_sp_el0
	b	serror_sp_el0
end_vector_entry mmu_serror_sp_el0

	/* ---------------------------------------------------------------------
	 * Current EL with SP_ELx: 0x200 - 0x400
	 * ---------------------------------------------------------------------
	 */
vector_entry mmu_sync_exception_sp_elx
	b	sync_exception_sp_elx
end_vector_entry mmu_sync_exception_sp_elx

vector_entry mmu_irq_sp_elx
	b	irq_sp_elx
end_vector_entry mmu_irq_sp_elx

vector_entry mmu_fiq_sp_elx
	b	fiq_sp_elx
end_vector_entry mmu_fiq_sp_elx

vector_entry mmu_serror_sp_elx
	b	serror_sp_elx
end_vector_entry mmu_serror_sp_elx

	/* ---------------------------------------------------------------------
	 * Lower EL using AArch64 : 0x400 - 0x600
	 * ---------------------------------------------------------------------
	 */
vector_entry mmu_sync_exception_aarch64
	apply_cve_2017_5715_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A64_SMC0
	b	sync_exception_aarch64
end_vector_entry mmu_sync_exception_aarch64

vector_entry mmu_irq_aarch64
	apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
	b	irq_aarch64
end_vector_entry mmu_irq_aarch64

vector_entry mmu_fiq_aarch64
	apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
	b	fiq_aarch64
end_vector_entry mmu_fiq_aarch64

vector_entry mmu_serror_aarch64
	apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A64_SMC0
	b	serror_aarch64
end_vector_entry mmu_serror_aarch64

	/* ---------------------------------------------------------------------
	 * Lower EL using AArch32 : 0x600 - 0x800
	 * ---------------------------------------------------------------------
	 */
vector_entry mmu_sync_exception_aarch32
	apply_cve_2017_5715_wa _is_sync_exception=1 _esr_el3_val=ESR_EL3_A32_SMC0
	b	sync_exception_aarch32
end_vector_entry mmu_sync_exception_aarch32

vector_entry mmu_irq_aarch32
	apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
	b	irq_aarch32
end_vector_entry mmu_irq_aarch32

vector_entry mmu_fiq_aarch32
	apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
	b	fiq_aarch32
end_vector_entry mmu_fiq_aarch32

vector_entry mmu_serror_aarch32
	apply_cve_2017_5715_wa _is_sync_exception=0 _esr_el3_val=ESR_EL3_A32_SMC0
	b	serror_aarch32
end_vector_entry mmu_serror_aarch32