1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ALPHA_CMPXCHG_H
3 #define _ALPHA_CMPXCHG_H
7 * Since it can be used to implement critical sections
8 * it must clobber "memory" (also for interrupts in UP).
11 static inline unsigned long
12 ____xchg_u8(volatile char *m, unsigned long val)
14 unsigned long ret, tmp, addr64;
28 : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
29 : "r" ((long)m), "1" (val) : "memory");
34 static inline unsigned long
35 ____xchg_u16(volatile short *m, unsigned long val)
37 unsigned long ret, tmp, addr64;
51 : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
52 : "r" ((long)m), "1" (val) : "memory");
57 static inline unsigned long
58 ____xchg_u32(volatile int *m, unsigned long val)
70 : "=&r" (val), "=&r" (dummy), "=m" (*m)
71 : "rI" (val), "m" (*m) : "memory");
76 static inline unsigned long
77 ____xchg_u64(volatile long *m, unsigned long val)
89 : "=&r" (val), "=&r" (dummy), "=m" (*m)
90 : "rI" (val), "m" (*m) : "memory");
95 /* This function doesn't exist, so you'll get a linker error
96 if something tries to do an invalid xchg(). */
97 extern void __xchg_called_with_bad_pointer(void);
99 static __always_inline unsigned long
100 ____xchg(volatile void *ptr, unsigned long x, int size)
103 size == 1 ? ____xchg_u8(ptr, x) :
104 size == 2 ? ____xchg_u16(ptr, x) :
105 size == 4 ? ____xchg_u32(ptr, x) :
106 size == 8 ? ____xchg_u64(ptr, x) :
107 (__xchg_called_with_bad_pointer(), x);
111 * Atomic compare and exchange. Compare OLD with MEM, if identical,
112 * store NEW in MEM. Return the initial value in MEM. Success is
113 * indicated by comparing RETURN with OLD.
116 static inline unsigned long
117 ____cmpxchg_u8(volatile char *m, unsigned char old, unsigned char new)
119 unsigned long prev, tmp, cmp, addr64;
121 __asm__ __volatile__(
124 "1: ldq_l %2,0(%4)\n"
136 : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
137 : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
142 static inline unsigned long
143 ____cmpxchg_u16(volatile short *m, unsigned short old, unsigned short new)
145 unsigned long prev, tmp, cmp, addr64;
147 __asm__ __volatile__(
150 "1: ldq_l %2,0(%4)\n"
162 : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
163 : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
168 static inline unsigned long
169 ____cmpxchg_u32(volatile int *m, int old, int new)
171 unsigned long prev, cmp;
173 __asm__ __volatile__(
184 : "=&r"(prev), "=&r"(cmp), "=m"(*m)
185 : "r"((long) old), "r"(new), "m"(*m) : "memory");
190 static inline unsigned long
191 ____cmpxchg_u64(volatile long *m, unsigned long old, unsigned long new)
193 unsigned long prev, cmp;
195 __asm__ __volatile__(
206 : "=&r"(prev), "=&r"(cmp), "=m"(*m)
207 : "r"((long) old), "r"(new), "m"(*m) : "memory");
212 /* This function doesn't exist, so you'll get a linker error
213 if something tries to do an invalid cmpxchg(). */
214 extern void __cmpxchg_called_with_bad_pointer(void);
216 static __always_inline unsigned long
217 ____cmpxchg(volatile void *ptr, unsigned long old, unsigned long new,
221 size == 1 ? ____cmpxchg_u8(ptr, old, new) :
222 size == 2 ? ____cmpxchg_u16(ptr, old, new) :
223 size == 4 ? ____cmpxchg_u32(ptr, old, new) :
224 size == 8 ? ____cmpxchg_u64(ptr, old, new) :
225 (__cmpxchg_called_with_bad_pointer(), old);
228 #define xchg_local(ptr, x) \
230 __typeof__(*(ptr)) _x_ = (x); \
231 (__typeof__(*(ptr))) ____xchg((ptr), (unsigned long)_x_, \
235 #define arch_cmpxchg_local(ptr, o, n) \
237 __typeof__(*(ptr)) _o_ = (o); \
238 __typeof__(*(ptr)) _n_ = (n); \
239 (__typeof__(*(ptr))) ____cmpxchg((ptr), (unsigned long)_o_, \
240 (unsigned long)_n_, \
244 #define arch_cmpxchg64_local(ptr, o, n) \
246 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
247 cmpxchg_local((ptr), (o), (n)); \
251 * The leading and the trailing memory barriers guarantee that these
252 * operations are fully ordered.
254 #define arch_xchg(ptr, x) \
256 __typeof__(*(ptr)) __ret; \
257 __typeof__(*(ptr)) _x_ = (x); \
259 __ret = (__typeof__(*(ptr))) \
260 ____xchg((ptr), (unsigned long)_x_, sizeof(*(ptr))); \
265 #define arch_cmpxchg(ptr, o, n) \
267 __typeof__(*(ptr)) __ret; \
268 __typeof__(*(ptr)) _o_ = (o); \
269 __typeof__(*(ptr)) _n_ = (n); \
271 __ret = (__typeof__(*(ptr))) ____cmpxchg((ptr), \
272 (unsigned long)_o_, (unsigned long)_n_, sizeof(*(ptr)));\
277 #define arch_cmpxchg64(ptr, o, n) \
279 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
280 arch_cmpxchg((ptr), (o), (n)); \
283 #endif /* _ALPHA_CMPXCHG_H */