blob: 165be7f9a96447bc87138e0e4392d7b33f4868d8 [file] [log] [blame]
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00001/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef _ASM_UM_BARRIER_H_
3#define _ASM_UM_BARRIER_H_
4
David Brazdil0f672f62019-12-10 10:32:29 +00005#include <asm/alternative.h>
Andrew Scullb4b6d4a2019-01-02 15:54:55 +00006
7/*
8 * Force strict CPU ordering.
9 * And yes, this is required on UP too when we're talking
10 * to devices.
11 */
12#ifdef CONFIG_X86_32
13
14#define mb() alternative("lock; addl $0,0(%%esp)", "mfence", X86_FEATURE_XMM2)
15#define rmb() alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2)
16#define wmb() alternative("lock; addl $0,0(%%esp)", "sfence", X86_FEATURE_XMM)
17
18#else /* CONFIG_X86_32 */
19
20#define mb() asm volatile("mfence" : : : "memory")
21#define rmb() asm volatile("lfence" : : : "memory")
22#define wmb() asm volatile("sfence" : : : "memory")
23
24#endif /* CONFIG_X86_32 */
25
Andrew Scullb4b6d4a2019-01-02 15:54:55 +000026#include <asm-generic/barrier.h>
27
28#endif