]> Git Repo - linux.git/blob - arch/arm64/include/asm/barrier.h
Linux 6.14-rc3
[linux.git] / arch / arm64 / include / asm / barrier.h
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Based on arch/arm/include/asm/barrier.h
4  *
5  * Copyright (C) 2012 ARM Ltd.
6  */
7 #ifndef __ASM_BARRIER_H
8 #define __ASM_BARRIER_H
9
10 #ifndef __ASSEMBLY__
11
12 #include <linux/kasan-checks.h>
13
14 #include <asm/alternative-macros.h>
15
16 #define __nops(n)       ".rept  " #n "\nnop\n.endr\n"
17 #define nops(n)         asm volatile(__nops(n))
18
19 #define sev()           asm volatile("sev" : : : "memory")
20 #define wfe()           asm volatile("wfe" : : : "memory")
21 #define wfet(val)       asm volatile("msr s0_3_c1_c0_0, %0"     \
22                                      : : "r" (val) : "memory")
23 #define wfi()           asm volatile("wfi" : : : "memory")
24 #define wfit(val)       asm volatile("msr s0_3_c1_c0_1, %0"     \
25                                      : : "r" (val) : "memory")
26
27 #define isb()           asm volatile("isb" : : : "memory")
28 #define dmb(opt)        asm volatile("dmb " #opt : : : "memory")
29 #define dsb(opt)        asm volatile("dsb " #opt : : : "memory")
30
31 #define psb_csync()     asm volatile("hint #17" : : : "memory")
32 #define __tsb_csync()   asm volatile("hint #18" : : : "memory")
33 #define csdb()          asm volatile("hint #20" : : : "memory")
34
35 /*
36  * Data Gathering Hint:
37  * This instruction prevents merging memory accesses with Normal-NC or
38  * Device-GRE attributes before the hint instruction with any memory accesses
39  * appearing after the hint instruction.
40  */
41 #define dgh()           asm volatile("hint #6" : : : "memory")
42
43 #define spec_bar()      asm volatile(ALTERNATIVE("dsb nsh\nisb\n",              \
44                                                  SB_BARRIER_INSN"nop\n",        \
45                                                  ARM64_HAS_SB))
46
47 #ifdef CONFIG_ARM64_PSEUDO_NMI
48 #define pmr_sync()                                              \
49         do {                                                    \
50                 asm volatile(                                   \
51                 ALTERNATIVE_CB("dsb sy",                        \
52                                ARM64_HAS_GIC_PRIO_RELAXED_SYNC, \
53                                alt_cb_patch_nops)               \
54                 );                                              \
55         } while(0)
56 #else
57 #define pmr_sync()      do {} while (0)
58 #endif
59
60 #define __mb()          dsb(sy)
61 #define __rmb()         dsb(ld)
62 #define __wmb()         dsb(st)
63
64 #define __dma_mb()      dmb(osh)
65 #define __dma_rmb()     dmb(oshld)
66 #define __dma_wmb()     dmb(oshst)
67
68 #define io_stop_wc()    dgh()
69
70 #define tsb_csync()                                                             \
71         do {                                                                    \
72                 /*                                                              \
73                  * CPUs affected by Arm Erratum 2054223 or 2067961 needs        \
74                  * another TSB to ensure the trace is flushed. The barriers     \
75                  * don't have to be strictly back to back, as long as the       \
76                  * CPU is in trace prohibited state.                            \
77                  */                                                             \
78                 if (cpus_have_final_cap(ARM64_WORKAROUND_TSB_FLUSH_FAILURE))    \
79                         __tsb_csync();                                          \
80                 __tsb_csync();                                                  \
81         } while (0)
82
83 /*
84  * Generate a mask for array_index__nospec() that is ~0UL when 0 <= idx < sz
85  * and 0 otherwise.
86  */
87 #define array_index_mask_nospec array_index_mask_nospec
88 static inline unsigned long array_index_mask_nospec(unsigned long idx,
89                                                     unsigned long sz)
90 {
91         unsigned long mask;
92
93         asm volatile(
94         "       cmp     %1, %2\n"
95         "       sbc     %0, xzr, xzr\n"
96         : "=r" (mask)
97         : "r" (idx), "Ir" (sz)
98         : "cc");
99
100         csdb();
101         return mask;
102 }
103
104 /*
105  * Ensure that reads of the counter are treated the same as memory reads
106  * for the purposes of ordering by subsequent memory barriers.
107  *
108  * This insanity brought to you by speculative system register reads,
109  * out-of-order memory accesses, sequence locks and Thomas Gleixner.
110  *
111  * https://lore.kernel.org/r/[email protected]/
112  */
113 #define arch_counter_enforce_ordering(val) do {                         \
114         u64 tmp, _val = (val);                                          \
115                                                                         \
116         asm volatile(                                                   \
117         "       eor     %0, %1, %1\n"                                   \
118         "       add     %0, sp, %0\n"                                   \
119         "       ldr     xzr, [%0]"                                      \
120         : "=r" (tmp) : "r" (_val));                                     \
121 } while (0)
122
123 #define __smp_mb()      dmb(ish)
124 #define __smp_rmb()     dmb(ishld)
125 #define __smp_wmb()     dmb(ishst)
126
127 #define __smp_store_release(p, v)                                       \
128 do {                                                                    \
129         typeof(p) __p = (p);                                            \
130         union { __unqual_scalar_typeof(*p) __val; char __c[1]; } __u =  \
131                 { .__val = (__force __unqual_scalar_typeof(*p)) (v) };  \
132         compiletime_assert_atomic_type(*p);                             \
133         kasan_check_write(__p, sizeof(*p));                             \
134         switch (sizeof(*p)) {                                           \
135         case 1:                                                         \
136                 asm volatile ("stlrb %w1, %0"                           \
137                                 : "=Q" (*__p)                           \
138                                 : "rZ" (*(__u8 *)__u.__c)               \
139                                 : "memory");                            \
140                 break;                                                  \
141         case 2:                                                         \
142                 asm volatile ("stlrh %w1, %0"                           \
143                                 : "=Q" (*__p)                           \
144                                 : "rZ" (*(__u16 *)__u.__c)              \
145                                 : "memory");                            \
146                 break;                                                  \
147         case 4:                                                         \
148                 asm volatile ("stlr %w1, %0"                            \
149                                 : "=Q" (*__p)                           \
150                                 : "rZ" (*(__u32 *)__u.__c)              \
151                                 : "memory");                            \
152                 break;                                                  \
153         case 8:                                                         \
154                 asm volatile ("stlr %x1, %0"                            \
155                                 : "=Q" (*__p)                           \
156                                 : "rZ" (*(__u64 *)__u.__c)              \
157                                 : "memory");                            \
158                 break;                                                  \
159         }                                                               \
160 } while (0)
161
162 #define __smp_load_acquire(p)                                           \
163 ({                                                                      \
164         union { __unqual_scalar_typeof(*p) __val; char __c[1]; } __u;   \
165         typeof(p) __p = (p);                                            \
166         compiletime_assert_atomic_type(*p);                             \
167         kasan_check_read(__p, sizeof(*p));                              \
168         switch (sizeof(*p)) {                                           \
169         case 1:                                                         \
170                 asm volatile ("ldarb %w0, %1"                           \
171                         : "=r" (*(__u8 *)__u.__c)                       \
172                         : "Q" (*__p) : "memory");                       \
173                 break;                                                  \
174         case 2:                                                         \
175                 asm volatile ("ldarh %w0, %1"                           \
176                         : "=r" (*(__u16 *)__u.__c)                      \
177                         : "Q" (*__p) : "memory");                       \
178                 break;                                                  \
179         case 4:                                                         \
180                 asm volatile ("ldar %w0, %1"                            \
181                         : "=r" (*(__u32 *)__u.__c)                      \
182                         : "Q" (*__p) : "memory");                       \
183                 break;                                                  \
184         case 8:                                                         \
185                 asm volatile ("ldar %0, %1"                             \
186                         : "=r" (*(__u64 *)__u.__c)                      \
187                         : "Q" (*__p) : "memory");                       \
188                 break;                                                  \
189         }                                                               \
190         (typeof(*p))__u.__val;                                          \
191 })
192
193 #define smp_cond_load_relaxed(ptr, cond_expr)                           \
194 ({                                                                      \
195         typeof(ptr) __PTR = (ptr);                                      \
196         __unqual_scalar_typeof(*ptr) VAL;                               \
197         for (;;) {                                                      \
198                 VAL = READ_ONCE(*__PTR);                                \
199                 if (cond_expr)                                          \
200                         break;                                          \
201                 __cmpwait_relaxed(__PTR, VAL);                          \
202         }                                                               \
203         (typeof(*ptr))VAL;                                              \
204 })
205
206 #define smp_cond_load_acquire(ptr, cond_expr)                           \
207 ({                                                                      \
208         typeof(ptr) __PTR = (ptr);                                      \
209         __unqual_scalar_typeof(*ptr) VAL;                               \
210         for (;;) {                                                      \
211                 VAL = smp_load_acquire(__PTR);                          \
212                 if (cond_expr)                                          \
213                         break;                                          \
214                 __cmpwait_relaxed(__PTR, VAL);                          \
215         }                                                               \
216         (typeof(*ptr))VAL;                                              \
217 })
218
219 #include <asm-generic/barrier.h>
220
221 #endif  /* __ASSEMBLY__ */
222
223 #endif  /* __ASM_BARRIER_H */
This page took 0.044402 seconds and 4 git commands to generate.