blob: aca3952fcce4adc71241db6d067cd9f4823d3f15 [file] [log] [blame]
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +02001/*
Daniel Boulby0e4629f2021-10-26 14:01:23 +01002 * Copyright (c) 2016-2022, Arm Limited and Contributors. All rights reserved.
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +02003 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
Antonio Nino Diazdcfc4832018-11-22 15:53:23 +00007#ifndef ARCH_HELPERS_H
8#define ARCH_HELPERS_H
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +02009
Antonio Nino Diazdcfc4832018-11-22 15:53:23 +000010#include <arch.h>
11#include <cdefs.h>
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020012#include <stdint.h>
Antonio Nino Diazdcfc4832018-11-22 15:53:23 +000013#include <string.h>
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020014
15/**********************************************************************
16 * Macros which create inline functions to read or write CPU system
17 * registers
18 *********************************************************************/
19
20#define _DEFINE_COPROCR_WRITE_FUNC(_name, coproc, opc1, CRn, CRm, opc2) \
21static inline void write_## _name(u_register_t v) \
22{ \
23 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
24}
25
26#define _DEFINE_COPROCR_READ_FUNC(_name, coproc, opc1, CRn, CRm, opc2) \
27static inline u_register_t read_ ## _name(void) \
28{ \
29 u_register_t v; \
30 __asm__ volatile ("mrc "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : "=r" (v));\
31 return v; \
32}
33
34/*
35 * The undocumented %Q and %R extended asm are used to implemented the below
36 * 64 bit `mrrc` and `mcrr` instructions.
37 */
38
39#define _DEFINE_COPROCR_WRITE_FUNC_64(_name, coproc, opc1, CRm) \
40static inline void write64_## _name(uint64_t v) \
41{ \
42 __asm__ volatile ("mcrr "#coproc","#opc1", %Q0, %R0,"#CRm : : "r" (v));\
43}
44
45#define _DEFINE_COPROCR_READ_FUNC_64(_name, coproc, opc1, CRm) \
46static inline uint64_t read64_## _name(void) \
47{ uint64_t v; \
48 __asm__ volatile ("mrrc "#coproc","#opc1", %Q0, %R0,"#CRm : "=r" (v));\
49 return v; \
50}
51
52#define _DEFINE_SYSREG_READ_FUNC(_name, _reg_name) \
53static inline u_register_t read_ ## _name(void) \
54{ \
55 u_register_t v; \
56 __asm__ volatile ("mrs %0, " #_reg_name : "=r" (v)); \
57 return v; \
58}
59
60#define _DEFINE_SYSREG_WRITE_FUNC(_name, _reg_name) \
61static inline void write_ ## _name(u_register_t v) \
62{ \
63 __asm__ volatile ("msr " #_reg_name ", %0" : : "r" (v)); \
64}
65
66#define _DEFINE_SYSREG_WRITE_CONST_FUNC(_name, _reg_name) \
67static inline void write_ ## _name(const u_register_t v) \
68{ \
69 __asm__ volatile ("msr " #_reg_name ", %0" : : "i" (v)); \
70}
71
72/* Define read function for coproc register */
73#define DEFINE_COPROCR_READ_FUNC(_name, ...) \
74 _DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__)
75
Antonio Nino Diazdcfc4832018-11-22 15:53:23 +000076/* Define write function for coproc register */
77#define DEFINE_COPROCR_WRITE_FUNC(_name, ...) \
78 _DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__)
79
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +020080/* Define read & write function for coproc register */
81#define DEFINE_COPROCR_RW_FUNCS(_name, ...) \
82 _DEFINE_COPROCR_READ_FUNC(_name, __VA_ARGS__) \
83 _DEFINE_COPROCR_WRITE_FUNC(_name, __VA_ARGS__)
84
85/* Define 64 bit read function for coproc register */
86#define DEFINE_COPROCR_READ_FUNC_64(_name, ...) \
87 _DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__)
88
89/* Define 64 bit write function for coproc register */
90#define DEFINE_COPROCR_WRITE_FUNC_64(_name, ...) \
91 _DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__)
92
93/* Define 64 bit read & write function for coproc register */
94#define DEFINE_COPROCR_RW_FUNCS_64(_name, ...) \
95 _DEFINE_COPROCR_READ_FUNC_64(_name, __VA_ARGS__) \
96 _DEFINE_COPROCR_WRITE_FUNC_64(_name, __VA_ARGS__)
97
98/* Define read & write function for system register */
99#define DEFINE_SYSREG_RW_FUNCS(_name) \
100 _DEFINE_SYSREG_READ_FUNC(_name, _name) \
101 _DEFINE_SYSREG_WRITE_FUNC(_name, _name)
102
103/**********************************************************************
104 * Macros to create inline functions for tlbi operations
105 *********************************************************************/
106
107#define _DEFINE_TLBIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
108static inline void tlbi##_op(void) \
109{ \
110 u_register_t v = 0; \
111 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
112}
113
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +0200114#define _DEFINE_BPIOP_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
115static inline void bpi##_op(void) \
116{ \
117 u_register_t v = 0; \
118 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
119}
120
Antonio Nino Diazdcfc4832018-11-22 15:53:23 +0000121#define _DEFINE_TLBIOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
122static inline void tlbi##_op(u_register_t v) \
123{ \
124 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
125}
126
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +0200127/* Define function for simple TLBI operation */
128#define DEFINE_TLBIOP_FUNC(_op, ...) \
129 _DEFINE_TLBIOP_FUNC(_op, __VA_ARGS__)
130
131/* Define function for TLBI operation with register parameter */
132#define DEFINE_TLBIOP_PARAM_FUNC(_op, ...) \
133 _DEFINE_TLBIOP_PARAM_FUNC(_op, __VA_ARGS__)
134
135/* Define function for simple BPI operation */
136#define DEFINE_BPIOP_FUNC(_op, ...) \
137 _DEFINE_BPIOP_FUNC(_op, __VA_ARGS__)
138
139/**********************************************************************
140 * Macros to create inline functions for DC operations
141 *********************************************************************/
142#define _DEFINE_DCOP_PARAM_FUNC(_op, coproc, opc1, CRn, CRm, opc2) \
143static inline void dc##_op(u_register_t v) \
144{ \
145 __asm__ volatile ("mcr "#coproc","#opc1",%0,"#CRn","#CRm","#opc2 : : "r" (v));\
146}
147
148/* Define function for DC operation with register parameter */
149#define DEFINE_DCOP_PARAM_FUNC(_op, ...) \
150 _DEFINE_DCOP_PARAM_FUNC(_op, __VA_ARGS__)
151
152/**********************************************************************
153 * Macros to create inline functions for system instructions
154 *********************************************************************/
155 /* Define function for simple system instruction */
156#define DEFINE_SYSOP_FUNC(_op) \
157static inline void _op(void) \
158{ \
159 __asm__ (#_op); \
160}
161
162
163/* Define function for system instruction with type specifier */
164#define DEFINE_SYSOP_TYPE_FUNC(_op, _type) \
165static inline void _op ## _type(void) \
166{ \
167 __asm__ (#_op " " #_type); \
168}
169
170/* Define function for system instruction with register parameter */
171#define DEFINE_SYSOP_TYPE_PARAM_FUNC(_op, _type) \
172static inline void _op ## _type(u_register_t v) \
173{ \
174 __asm__ (#_op " " #_type ", %0" : : "r" (v)); \
175}
176
177void flush_dcache_range(uintptr_t addr, size_t size);
178void clean_dcache_range(uintptr_t addr, size_t size);
179void inv_dcache_range(uintptr_t addr, size_t size);
180
181void dcsw_op_louis(u_register_t op_type);
182void dcsw_op_all(u_register_t op_type);
183
184DEFINE_SYSOP_FUNC(wfi)
185DEFINE_SYSOP_FUNC(wfe)
186DEFINE_SYSOP_FUNC(sev)
187DEFINE_SYSOP_TYPE_FUNC(dsb, sy)
188DEFINE_SYSOP_TYPE_FUNC(dmb, sy)
Antonio Nino Diazdcfc4832018-11-22 15:53:23 +0000189DEFINE_SYSOP_TYPE_FUNC(dmb, st)
190
191/* dmb ld is not valid for armv7/thumb machines */
192#if ARM_ARCH_MAJOR != 7
193DEFINE_SYSOP_TYPE_FUNC(dmb, ld)
194#endif
195
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +0200196DEFINE_SYSOP_TYPE_FUNC(dsb, ish)
197DEFINE_SYSOP_TYPE_FUNC(dsb, ishst)
198DEFINE_SYSOP_TYPE_FUNC(dmb, ish)
199DEFINE_SYSOP_TYPE_FUNC(dmb, ishst)
200DEFINE_SYSOP_FUNC(isb)
201
202DEFINE_SYSREG_RW_FUNCS(spsr)
203DEFINE_SYSREG_RW_FUNCS(cpsr)
Sandrine Bailleuxa43b0032019-01-14 14:04:32 +0100204DEFINE_SYSREG_RW_FUNCS(elr_hyp)
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +0200205
206/*******************************************************************************
207 * System register accessor prototypes
208 ******************************************************************************/
209DEFINE_COPROCR_READ_FUNC(mpidr, MPIDR)
210DEFINE_COPROCR_READ_FUNC(midr, MIDR)
Antonio Nino Diaz69068db2019-01-11 13:01:45 +0000211DEFINE_COPROCR_READ_FUNC(id_mmfr4, ID_MMFR4)
Manish V Badarkhe2c518e52021-07-08 16:36:57 +0100212DEFINE_COPROCR_READ_FUNC(id_dfr0, ID_DFR0)
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +0200213DEFINE_COPROCR_READ_FUNC(id_pfr0, ID_PFR0)
214DEFINE_COPROCR_READ_FUNC(id_pfr1, ID_PFR1)
215DEFINE_COPROCR_READ_FUNC(isr, ISR)
216DEFINE_COPROCR_READ_FUNC(clidr, CLIDR)
217DEFINE_COPROCR_READ_FUNC_64(cntpct, CNTPCT_64)
Sandrine Bailleuxa43b0032019-01-14 14:04:32 +0100218DEFINE_COPROCR_READ_FUNC(hsr, HSR)
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +0200219
220DEFINE_COPROCR_RW_FUNCS(scr, SCR)
Antonio Nino Diazdcfc4832018-11-22 15:53:23 +0000221DEFINE_COPROCR_RW_FUNCS(ctr, CTR)
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +0200222DEFINE_COPROCR_RW_FUNCS(sctlr, SCTLR)
Antonio Nino Diazdcfc4832018-11-22 15:53:23 +0000223DEFINE_COPROCR_RW_FUNCS(actlr, ACTLR)
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +0200224DEFINE_COPROCR_RW_FUNCS(hsctlr, HSCTLR)
225DEFINE_COPROCR_RW_FUNCS(hcr, HCR)
226DEFINE_COPROCR_RW_FUNCS(hcptr, HCPTR)
227DEFINE_COPROCR_RW_FUNCS(cntfrq, CNTFRQ)
228DEFINE_COPROCR_RW_FUNCS(cnthctl, CNTHCTL)
229DEFINE_COPROCR_RW_FUNCS(mair0, MAIR0)
230DEFINE_COPROCR_RW_FUNCS(mair1, MAIR1)
231DEFINE_COPROCR_RW_FUNCS(hmair0, HMAIR0)
232DEFINE_COPROCR_RW_FUNCS(ttbcr, TTBCR)
233DEFINE_COPROCR_RW_FUNCS(htcr, HTCR)
234DEFINE_COPROCR_RW_FUNCS(ttbr0, TTBR0)
235DEFINE_COPROCR_RW_FUNCS_64(ttbr0, TTBR0_64)
236DEFINE_COPROCR_RW_FUNCS(ttbr1, TTBR1)
237DEFINE_COPROCR_RW_FUNCS_64(httbr, HTTBR_64)
238DEFINE_COPROCR_RW_FUNCS(vpidr, VPIDR)
239DEFINE_COPROCR_RW_FUNCS(vmpidr, VMPIDR)
240DEFINE_COPROCR_RW_FUNCS_64(vttbr, VTTBR_64)
241DEFINE_COPROCR_RW_FUNCS_64(ttbr1, TTBR1_64)
242DEFINE_COPROCR_RW_FUNCS_64(cntvoff, CNTVOFF_64)
243DEFINE_COPROCR_RW_FUNCS(csselr, CSSELR)
Antonio Nino Diazdcfc4832018-11-22 15:53:23 +0000244DEFINE_COPROCR_RW_FUNCS(hstr, HSTR)
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +0200245DEFINE_COPROCR_RW_FUNCS(cnthp_ctl_el2, CNTHP_CTL)
246DEFINE_COPROCR_RW_FUNCS(cnthp_tval_el2, CNTHP_TVAL)
247DEFINE_COPROCR_RW_FUNCS_64(cnthp_cval_el2, CNTHP_CVAL_64)
248
Antonio Nino Diaz1454f502018-11-23 13:52:54 +0000249#define get_cntp_ctl_enable(x) (((x) >> CNTP_CTL_ENABLE_SHIFT) & \
250 CNTP_CTL_ENABLE_MASK)
251#define get_cntp_ctl_imask(x) (((x) >> CNTP_CTL_IMASK_SHIFT) & \
252 CNTP_CTL_IMASK_MASK)
253#define get_cntp_ctl_istatus(x) (((x) >> CNTP_CTL_ISTATUS_SHIFT) & \
254 CNTP_CTL_ISTATUS_MASK)
255
256#define set_cntp_ctl_enable(x) ((x) |= U(1) << CNTP_CTL_ENABLE_SHIFT)
257#define set_cntp_ctl_imask(x) ((x) |= U(1) << CNTP_CTL_IMASK_SHIFT)
258
259#define clr_cntp_ctl_enable(x) ((x) &= ~(U(1) << CNTP_CTL_ENABLE_SHIFT))
260#define clr_cntp_ctl_imask(x) ((x) &= ~(U(1) << CNTP_CTL_IMASK_SHIFT))
261
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +0200262DEFINE_COPROCR_RW_FUNCS(icc_sre_el1, ICC_SRE)
263DEFINE_COPROCR_RW_FUNCS(icc_sre_el2, ICC_HSRE)
264DEFINE_COPROCR_RW_FUNCS(icc_sre_el3, ICC_MSRE)
265DEFINE_COPROCR_RW_FUNCS(icc_pmr_el1, ICC_PMR)
Antonio Nino Diazdcfc4832018-11-22 15:53:23 +0000266DEFINE_COPROCR_RW_FUNCS(icc_rpr_el1, ICC_RPR)
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +0200267DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el3, ICC_MGRPEN1)
268DEFINE_COPROCR_RW_FUNCS(icc_igrpen1_el1, ICC_IGRPEN1)
269DEFINE_COPROCR_RW_FUNCS(icc_igrpen0_el1, ICC_IGRPEN0)
270DEFINE_COPROCR_RW_FUNCS(icc_hppir0_el1, ICC_HPPIR0)
271DEFINE_COPROCR_RW_FUNCS(icc_hppir1_el1, ICC_HPPIR1)
272DEFINE_COPROCR_RW_FUNCS(icc_iar0_el1, ICC_IAR0)
273DEFINE_COPROCR_RW_FUNCS(icc_iar1_el1, ICC_IAR1)
274DEFINE_COPROCR_RW_FUNCS(icc_eoir0_el1, ICC_EOIR0)
275DEFINE_COPROCR_RW_FUNCS(icc_eoir1_el1, ICC_EOIR1)
Antonio Nino Diazdcfc4832018-11-22 15:53:23 +0000276DEFINE_COPROCR_RW_FUNCS_64(icc_sgi0r_el1, ICC_SGI0R_EL1_64)
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +0200277DEFINE_COPROCR_WRITE_FUNC_64(icc_sgi1r, ICC_SGI1R_EL1_64)
278
Antonio Nino Diazdcfc4832018-11-22 15:53:23 +0000279DEFINE_COPROCR_RW_FUNCS(hdcr, HDCR)
280DEFINE_COPROCR_RW_FUNCS(cnthp_ctl, CNTHP_CTL)
Petre-Ionut Tudorf1a45f72019-10-08 16:51:45 +0100281DEFINE_COPROCR_RW_FUNCS(pmcr, PMCR)
282DEFINE_COPROCR_RW_FUNCS(pmcntenset, PMCNTENSET)
283DEFINE_COPROCR_RW_FUNCS(pmccfiltr, PMCCFILTR)
284DEFINE_COPROCR_READ_FUNC(pmccntr, PMCCNTR)
285DEFINE_COPROCR_RW_FUNCS(pmevtyper0, PMEVTYPER0)
286DEFINE_COPROCR_READ_FUNC(pmevcntr0, PMEVCNTR0)
287DEFINE_COPROCR_READ_FUNC(dbgdidr, DBGDIDR)
Antonio Nino Diazdcfc4832018-11-22 15:53:23 +0000288
289/*
290 * Address translation
291 */
292DEFINE_COPROCR_WRITE_FUNC(ats1cpr, ATS1CPR)
293DEFINE_COPROCR_WRITE_FUNC(ats1hr, ATS1HR)
294DEFINE_COPROCR_RW_FUNCS_64(par, PAR_64)
295
296DEFINE_COPROCR_RW_FUNCS(nsacr, NSACR)
297
Manish V Badarkhe2c518e52021-07-08 16:36:57 +0100298/* AArch32 coproc registers for trace filter */
299DEFINE_COPROCR_RW_FUNCS(htrfcr, HTRFCR)
300DEFINE_COPROCR_RW_FUNCS(trfcr, TRFCR)
301
Manish V Badarkhe6d0e1b62021-07-09 13:58:28 +0100302/* AArch32 Trace System Registers */
303DEFINE_COPROCR_RW_FUNCS(trcauxctlr, TRCAUXCTLR)
304DEFINE_COPROCR_RW_FUNCS(trcrsr, TRCRSR)
305DEFINE_COPROCR_RW_FUNCS(trcbbctlr, TRCBBCTLR)
306DEFINE_COPROCR_RW_FUNCS(trcccctlr, TRCCCCTLR)
307DEFINE_COPROCR_RW_FUNCS(trcextinselr0, TRCEXTINSELR0)
308DEFINE_COPROCR_RW_FUNCS(trcextinselr1, TRCEXTINSELR1)
309DEFINE_COPROCR_RW_FUNCS(trcextinselr2, TRCEXTINSELR2)
310DEFINE_COPROCR_RW_FUNCS(trcextinselr3, TRCEXTINSELR3)
311DEFINE_COPROCR_RW_FUNCS(trcclaimset, TRCCLAIMSET)
312DEFINE_COPROCR_RW_FUNCS(trcclaimclr, TRCCLAIMCLR)
313DEFINE_COPROCR_READ_FUNC(trcdevarch, TRCDEVARCH)
314
Antonio Nino Diazdcfc4832018-11-22 15:53:23 +0000315/* AArch32 coproc registers for 32bit MMU descriptor support */
316DEFINE_COPROCR_RW_FUNCS(prrr, PRRR)
317DEFINE_COPROCR_RW_FUNCS(nmrr, NMRR)
318DEFINE_COPROCR_RW_FUNCS(dacr, DACR)
319
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +0200320DEFINE_COPROCR_RW_FUNCS(amcntenset0, AMCNTENSET0)
321DEFINE_COPROCR_RW_FUNCS(amcntenset1, AMCNTENSET1)
322DEFINE_COPROCR_RW_FUNCS(amcntenclr0, AMCNTENCLR0)
323DEFINE_COPROCR_RW_FUNCS(amcntenclr1, AMCNTENCLR1)
324
325DEFINE_COPROCR_RW_FUNCS_64(amevcntr00, AMEVCNTR00)
326DEFINE_COPROCR_RW_FUNCS_64(amevcntr01, AMEVCNTR01)
327DEFINE_COPROCR_RW_FUNCS_64(amevcntr02, AMEVCNTR02)
328DEFINE_COPROCR_RW_FUNCS_64(amevcntr03, AMEVCNTR03)
329
330/*
331 * TLBI operation prototypes
332 */
333DEFINE_TLBIOP_FUNC(all, TLBIALL)
334DEFINE_TLBIOP_FUNC(allis, TLBIALLIS)
335DEFINE_TLBIOP_PARAM_FUNC(mva, TLBIMVA)
336DEFINE_TLBIOP_PARAM_FUNC(mvaa, TLBIMVAA)
337DEFINE_TLBIOP_PARAM_FUNC(mvaais, TLBIMVAAIS)
338DEFINE_TLBIOP_PARAM_FUNC(mvahis, TLBIMVAHIS)
339
340/*
341 * BPI operation prototypes.
342 */
343DEFINE_BPIOP_FUNC(allis, BPIALLIS)
344
345/*
346 * DC operation prototypes
347 */
348DEFINE_DCOP_PARAM_FUNC(civac, DCCIMVAC)
349DEFINE_DCOP_PARAM_FUNC(ivac, DCIMVAC)
350DEFINE_DCOP_PARAM_FUNC(cvac, DCCMVAC)
351
Antonio Nino Diazdcfc4832018-11-22 15:53:23 +0000352/* Previously defined accessor functions with incomplete register names */
353#define dsb() dsbsy()
354#define dmb() dmbsy()
355
356/* dmb ld is not valid for armv7/thumb machines, so alias it to dmb */
357#if ARM_ARCH_MAJOR == 7
358#define dmbld() dmb()
359#endif
360
361#define IS_IN_SECURE() \
362 (GET_NS_BIT(read_scr()) == 0)
363
364#define IS_IN_HYP() (GET_M32(read_cpsr()) == MODE32_hyp)
365#define IS_IN_SVC() (GET_M32(read_cpsr()) == MODE32_svc)
366#define IS_IN_MON() (GET_M32(read_cpsr()) == MODE32_mon)
367#define IS_IN_EL2() IS_IN_HYP()
368/* If EL3 is AArch32, then secure PL1 and monitor mode correspond to EL3 */
369#define IS_IN_EL3() \
370 ((GET_M32(read_cpsr()) == MODE32_mon) || \
371 (IS_IN_SECURE() && (GET_M32(read_cpsr()) != MODE32_usr)))
372
373static inline unsigned int get_current_el(void)
374{
375 if (IS_IN_EL3()) {
376 return 3U;
377 } else if (IS_IN_EL2()) {
378 return 2U;
379 } else {
380 return 1U;
381 }
382}
383
384/* Macros for compatibility with AArch64 system registers */
385#define read_mpidr_el1() read_mpidr()
386
387#define read_scr_el3() read_scr()
388#define write_scr_el3(_v) write_scr(_v)
389
390#define read_hcr_el2() read_hcr()
391#define write_hcr_el2(_v) write_hcr(_v)
392
393#define read_cpacr_el1() read_cpacr()
394#define write_cpacr_el1(_v) write_cpacr(_v)
395
396#define read_cntfrq_el0() read_cntfrq()
397#define write_cntfrq_el0(_v) write_cntfrq(_v)
398#define read_isr_el1() read_isr()
399
400#define read_cntpct_el0() read64_cntpct()
401
402#define read_ctr_el0() read_ctr()
403
Petre-Ionut Tudorf1a45f72019-10-08 16:51:45 +0100404#define read_pmcr_el0() read_pmcr()
405#define write_pmcr_el0(_v) write_pmcr(_v)
406
407#define read_pmcntenset_el0() read_pmcntenset()
408#define write_pmcntenset_el0(_v) write_pmcntenset(_v)
409
410#define read_pmccfiltr_el0() read_pmccfiltr()
411#define write_pmccfiltr_el0(_v) write_pmccfiltr(_v)
412
413#define read_pmevtyper0_el0() read_pmevtyper0()
414#define write_pmevtyper0_el0(_v) write_pmevtyper0(_v)
415
416#define read_pmccntr_el0 read_pmccntr
417
418#define read_pmevcntr0_el0 read_pmevcntr0
419
Antonio Nino Diazdcfc4832018-11-22 15:53:23 +0000420#define write_icc_sgi0r_el1(_v) write64_icc_sgi0r_el1(_v)
421
422#define read_daif() read_cpsr()
423#define write_daif(flags) write_cpsr(flags)
424
Daniel Boulby0e4629f2021-10-26 14:01:23 +0100425#define read_dit() read_cpsr()
426#define write_dit(flags) write_cpsr(flags)
427
Antonio Nino Diazdcfc4832018-11-22 15:53:23 +0000428#define read_cnthp_cval_el2() read64_cnthp_cval_el2()
429#define write_cnthp_cval_el2(v) write64_cnthp_cval_el2(v)
430
431#define read_amcntenset0_el0() read_amcntenset0()
432#define read_amcntenset1_el0() read_amcntenset1()
433
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +0200434/* Helper functions to manipulate CPSR */
435static inline void enable_irq(void)
436{
437 /*
438 * The compiler memory barrier will prevent the compiler from
439 * scheduling non-volatile memory access after the write to the
440 * register.
441 *
442 * This could happen if some initialization code issues non-volatile
443 * accesses to an area used by an interrupt handler, in the assumption
444 * that it is safe as the interrupts are disabled at the time it does
445 * that (according to program order). However, non-volatile accesses
446 * are not necessarily in program order relatively with volatile inline
447 * assembly statements (and volatile accesses).
448 */
449 COMPILER_BARRIER();
450 __asm__ volatile ("cpsie i");
451 isb();
452}
453
454static inline void enable_serror(void)
455{
456 COMPILER_BARRIER();
457 __asm__ volatile ("cpsie a");
458 isb();
459}
460
461static inline void enable_fiq(void)
462{
463 COMPILER_BARRIER();
464 __asm__ volatile ("cpsie f");
465 isb();
466}
467
468static inline void disable_irq(void)
469{
470 COMPILER_BARRIER();
471 __asm__ volatile ("cpsid i");
472 isb();
473}
474
475static inline void disable_serror(void)
476{
477 COMPILER_BARRIER();
478 __asm__ volatile ("cpsid a");
479 isb();
480}
481
482static inline void disable_fiq(void)
483{
484 COMPILER_BARRIER();
485 __asm__ volatile ("cpsid f");
486 isb();
487}
488
Sandrine Bailleux3cd87d72018-10-09 11:12:55 +0200489void disable_mmu_icache(void);
490
Sandrine Bailleuxd01a4c62018-12-20 14:44:13 +0100491/* Read the count value of the system counter. */
492static inline uint64_t syscounter_read(void)
493{
494 /*
495 * The instruction barrier is needed to guarantee that we read an
496 * accurate value. Otherwise, the CPU might speculatively read it and
497 * return a stale value.
498 */
499 isb();
500 return read64_cntpct();
501}
502
Antonio Nino Diazdcfc4832018-11-22 15:53:23 +0000503#endif /* ARCH_HELPERS_H */