blob: 37d891af8ea53f44b92c9dd79361bb54dc134c40 [file] [log] [blame]
David Brazdil0f672f62019-12-10 10:32:29 +00001/* SPDX-License-Identifier: GPL-2.0-only */
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00002/*
3 * Based on arch/arm/include/asm/barrier.h
4 *
5 * Copyright (C) 2012 ARM Ltd.
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00006 */
7#ifndef __ASM_BARRIER_H
8#define __ASM_BARRIER_H
9
10#ifndef __ASSEMBLY__
11
David Brazdil0f672f62019-12-10 10:32:29 +000012#include <linux/kasan-checks.h>
13
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000014#define __nops(n) ".rept " #n "\nnop\n.endr\n"
15#define nops(n) asm volatile(__nops(n))
16
17#define sev() asm volatile("sev" : : : "memory")
18#define wfe() asm volatile("wfe" : : : "memory")
19#define wfi() asm volatile("wfi" : : : "memory")
20
21#define isb() asm volatile("isb" : : : "memory")
22#define dmb(opt) asm volatile("dmb " #opt : : : "memory")
23#define dsb(opt) asm volatile("dsb " #opt : : : "memory")
24
25#define psb_csync() asm volatile("hint #17" : : : "memory")
26#define csdb() asm volatile("hint #20" : : : "memory")
27
David Brazdil0f672f62019-12-10 10:32:29 +000028#define spec_bar() asm volatile(ALTERNATIVE("dsb nsh\nisb\n", \
29 SB_BARRIER_INSN"nop\n", \
30 ARM64_HAS_SB))
31
Olivier Deprez157378f2022-04-04 15:47:50 +020032#ifdef CONFIG_ARM64_PSEUDO_NMI
33#define pmr_sync() \
34 do { \
35 extern struct static_key_false gic_pmr_sync; \
36 \
37 if (static_branch_unlikely(&gic_pmr_sync)) \
38 dsb(sy); \
39 } while(0)
40#else
41#define pmr_sync() do {} while (0)
42#endif
43
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000044#define mb() dsb(sy)
45#define rmb() dsb(ld)
46#define wmb() dsb(st)
47
Olivier Deprez157378f2022-04-04 15:47:50 +020048#define dma_mb() dmb(osh)
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000049#define dma_rmb() dmb(oshld)
50#define dma_wmb() dmb(oshst)
51
52/*
53 * Generate a mask for array_index__nospec() that is ~0UL when 0 <= idx < sz
54 * and 0 otherwise.
55 */
56#define array_index_mask_nospec array_index_mask_nospec
57static inline unsigned long array_index_mask_nospec(unsigned long idx,
58 unsigned long sz)
59{
60 unsigned long mask;
61
62 asm volatile(
63 " cmp %1, %2\n"
64 " sbc %0, xzr, xzr\n"
65 : "=r" (mask)
66 : "r" (idx), "Ir" (sz)
67 : "cc");
68
69 csdb();
70 return mask;
71}
72
Olivier Deprez0e641232021-09-23 10:07:05 +020073/*
74 * Ensure that reads of the counter are treated the same as memory reads
75 * for the purposes of ordering by subsequent memory barriers.
76 *
77 * This insanity brought to you by speculative system register reads,
78 * out-of-order memory accesses, sequence locks and Thomas Gleixner.
79 *
80 * http://lists.infradead.org/pipermail/linux-arm-kernel/2019-February/631195.html
81 */
82#define arch_counter_enforce_ordering(val) do { \
83 u64 tmp, _val = (val); \
84 \
85 asm volatile( \
86 " eor %0, %1, %1\n" \
87 " add %0, sp, %0\n" \
88 " ldr xzr, [%0]" \
89 : "=r" (tmp) : "r" (_val)); \
90} while (0)
91
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000092#define __smp_mb() dmb(ish)
93#define __smp_rmb() dmb(ishld)
94#define __smp_wmb() dmb(ishst)
95
96#define __smp_store_release(p, v) \
97do { \
David Brazdil0f672f62019-12-10 10:32:29 +000098 typeof(p) __p = (p); \
Olivier Deprez157378f2022-04-04 15:47:50 +020099 union { __unqual_scalar_typeof(*p) __val; char __c[1]; } __u = \
100 { .__val = (__force __unqual_scalar_typeof(*p)) (v) }; \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000101 compiletime_assert_atomic_type(*p); \
David Brazdil0f672f62019-12-10 10:32:29 +0000102 kasan_check_write(__p, sizeof(*p)); \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000103 switch (sizeof(*p)) { \
104 case 1: \
105 asm volatile ("stlrb %w1, %0" \
David Brazdil0f672f62019-12-10 10:32:29 +0000106 : "=Q" (*__p) \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000107 : "r" (*(__u8 *)__u.__c) \
108 : "memory"); \
109 break; \
110 case 2: \
111 asm volatile ("stlrh %w1, %0" \
David Brazdil0f672f62019-12-10 10:32:29 +0000112 : "=Q" (*__p) \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000113 : "r" (*(__u16 *)__u.__c) \
114 : "memory"); \
115 break; \
116 case 4: \
117 asm volatile ("stlr %w1, %0" \
David Brazdil0f672f62019-12-10 10:32:29 +0000118 : "=Q" (*__p) \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000119 : "r" (*(__u32 *)__u.__c) \
120 : "memory"); \
121 break; \
122 case 8: \
123 asm volatile ("stlr %1, %0" \
David Brazdil0f672f62019-12-10 10:32:29 +0000124 : "=Q" (*__p) \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000125 : "r" (*(__u64 *)__u.__c) \
126 : "memory"); \
127 break; \
128 } \
129} while (0)
130
131#define __smp_load_acquire(p) \
132({ \
Olivier Deprez157378f2022-04-04 15:47:50 +0200133 union { __unqual_scalar_typeof(*p) __val; char __c[1]; } __u; \
David Brazdil0f672f62019-12-10 10:32:29 +0000134 typeof(p) __p = (p); \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000135 compiletime_assert_atomic_type(*p); \
David Brazdil0f672f62019-12-10 10:32:29 +0000136 kasan_check_read(__p, sizeof(*p)); \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000137 switch (sizeof(*p)) { \
138 case 1: \
139 asm volatile ("ldarb %w0, %1" \
140 : "=r" (*(__u8 *)__u.__c) \
David Brazdil0f672f62019-12-10 10:32:29 +0000141 : "Q" (*__p) : "memory"); \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000142 break; \
143 case 2: \
144 asm volatile ("ldarh %w0, %1" \
145 : "=r" (*(__u16 *)__u.__c) \
David Brazdil0f672f62019-12-10 10:32:29 +0000146 : "Q" (*__p) : "memory"); \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000147 break; \
148 case 4: \
149 asm volatile ("ldar %w0, %1" \
150 : "=r" (*(__u32 *)__u.__c) \
David Brazdil0f672f62019-12-10 10:32:29 +0000151 : "Q" (*__p) : "memory"); \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000152 break; \
153 case 8: \
154 asm volatile ("ldar %0, %1" \
155 : "=r" (*(__u64 *)__u.__c) \
David Brazdil0f672f62019-12-10 10:32:29 +0000156 : "Q" (*__p) : "memory"); \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000157 break; \
158 } \
Olivier Deprez157378f2022-04-04 15:47:50 +0200159 (typeof(*p))__u.__val; \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000160})
161
162#define smp_cond_load_relaxed(ptr, cond_expr) \
163({ \
164 typeof(ptr) __PTR = (ptr); \
Olivier Deprez157378f2022-04-04 15:47:50 +0200165 __unqual_scalar_typeof(*ptr) VAL; \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000166 for (;;) { \
167 VAL = READ_ONCE(*__PTR); \
168 if (cond_expr) \
169 break; \
170 __cmpwait_relaxed(__PTR, VAL); \
171 } \
Olivier Deprez157378f2022-04-04 15:47:50 +0200172 (typeof(*ptr))VAL; \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000173})
174
175#define smp_cond_load_acquire(ptr, cond_expr) \
176({ \
177 typeof(ptr) __PTR = (ptr); \
Olivier Deprez157378f2022-04-04 15:47:50 +0200178 __unqual_scalar_typeof(*ptr) VAL; \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000179 for (;;) { \
180 VAL = smp_load_acquire(__PTR); \
181 if (cond_expr) \
182 break; \
183 __cmpwait_relaxed(__PTR, VAL); \
184 } \
Olivier Deprez157378f2022-04-04 15:47:50 +0200185 (typeof(*ptr))VAL; \
Andrew Scullb4b6d4a2019-01-02 15:54:55 +0000186})
187
188#include <asm-generic/barrier.h>
189
190#endif /* __ASSEMBLY__ */
191
192#endif /* __ASM_BARRIER_H */