blob: f2d2c0bbe21b289d376e4084e23ae5f67c043c7c [file] [log] [blame]
Kyle Swenson8d8f6542021-03-15 11:02:55 -06001/*
2 * Based on arch/arm/include/asm/barrier.h
3 *
4 * Copyright (C) 2012 ARM Ltd.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License
16 * along with this program. If not, see <http://www.gnu.org/licenses/>.
17 */
18#ifndef __ASM_BARRIER_H
19#define __ASM_BARRIER_H
20
21#ifndef __ASSEMBLY__
22
23#define sev() asm volatile("sev" : : : "memory")
24#define wfe() asm volatile("wfe" : : : "memory")
25#define wfi() asm volatile("wfi" : : : "memory")
26
27#define isb() asm volatile("isb" : : : "memory")
28#define dmb(opt) asm volatile("dmb " #opt : : : "memory")
29#define dsb(opt) asm volatile("dsb " #opt : : : "memory")
30
31#define mb() dsb(sy)
32#define rmb() dsb(ld)
33#define wmb() dsb(st)
34
35#define dma_rmb() dmb(oshld)
36#define dma_wmb() dmb(oshst)
37
38#define smp_mb() dmb(ish)
39#define smp_rmb() dmb(ishld)
40#define smp_wmb() dmb(ishst)
41
42#define smp_store_release(p, v) \
43do { \
44 union { typeof(*p) __val; char __c[1]; } __u = \
45 { .__val = (__force typeof(*p)) (v) }; \
46 compiletime_assert_atomic_type(*p); \
47 switch (sizeof(*p)) { \
48 case 1: \
49 asm volatile ("stlrb %w1, %0" \
50 : "=Q" (*p) \
51 : "r" (*(__u8 *)__u.__c) \
52 : "memory"); \
53 break; \
54 case 2: \
55 asm volatile ("stlrh %w1, %0" \
56 : "=Q" (*p) \
57 : "r" (*(__u16 *)__u.__c) \
58 : "memory"); \
59 break; \
60 case 4: \
61 asm volatile ("stlr %w1, %0" \
62 : "=Q" (*p) \
63 : "r" (*(__u32 *)__u.__c) \
64 : "memory"); \
65 break; \
66 case 8: \
67 asm volatile ("stlr %1, %0" \
68 : "=Q" (*p) \
69 : "r" (*(__u64 *)__u.__c) \
70 : "memory"); \
71 break; \
72 } \
73} while (0)
74
75#define smp_load_acquire(p) \
76({ \
77 union { typeof(*p) __val; char __c[1]; } __u; \
78 compiletime_assert_atomic_type(*p); \
79 switch (sizeof(*p)) { \
80 case 1: \
81 asm volatile ("ldarb %w0, %1" \
82 : "=r" (*(__u8 *)__u.__c) \
83 : "Q" (*p) : "memory"); \
84 break; \
85 case 2: \
86 asm volatile ("ldarh %w0, %1" \
87 : "=r" (*(__u16 *)__u.__c) \
88 : "Q" (*p) : "memory"); \
89 break; \
90 case 4: \
91 asm volatile ("ldar %w0, %1" \
92 : "=r" (*(__u32 *)__u.__c) \
93 : "Q" (*p) : "memory"); \
94 break; \
95 case 8: \
96 asm volatile ("ldar %0, %1" \
97 : "=r" (*(__u64 *)__u.__c) \
98 : "Q" (*p) : "memory"); \
99 break; \
100 } \
101 __u.__val; \
102})
103
104#define read_barrier_depends() do { } while(0)
105#define smp_read_barrier_depends() do { } while(0)
106
107#define smp_store_mb(var, value) do { WRITE_ONCE(var, value); smp_mb(); } while (0)
108#define nop() asm volatile("nop");
109
110#define smp_mb__before_atomic() smp_mb()
111#define smp_mb__after_atomic() smp_mb()
112
113#endif /* __ASSEMBLY__ */
114
115#endif /* __ASM_BARRIER_H */