1 /* SPDX-License-Identifier: GPL-2.0 */
2 /* 64-bit atomic xchg() and cmpxchg() definitions.
7 #ifndef __ARCH_SPARC64_CMPXCHG__
8 #define __ARCH_SPARC64_CMPXCHG__
10 static inline unsigned long
11 __cmpxchg_u32(volatile int *m, int old, int new)
13 __asm__ __volatile__("cas [%2], %3, %0"
15 : "0" (new), "r" (m), "r" (old)
21 static inline unsigned long xchg32(__volatile__ unsigned int *m, unsigned int val)
23 unsigned long tmp1, tmp2;
30 " bne,a,pn %%icc, 1b\n"
32 : "=&r" (val), "=&r" (tmp1), "=&r" (tmp2)
38 static inline unsigned long xchg64(__volatile__ unsigned long *m, unsigned long val)
40 unsigned long tmp1, tmp2;
45 " casx [%4], %2, %0\n"
47 " bne,a,pn %%xcc, 1b\n"
49 : "=&r" (val), "=&r" (tmp1), "=&r" (tmp2)
55 #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
57 void __xchg_called_with_bad_pointer(void);
60 * Use 4 byte cas instruction to achieve 2 byte xchg. Main logic
61 * here is to get the bit shift of the byte we are interested in.
62 * The XOR is handy for reversing the bits for big-endian byte order.
64 static inline unsigned long
65 xchg16(__volatile__ unsigned short *m, unsigned short val)
67 unsigned long maddr = (unsigned long)m;
68 int bit_shift = (((unsigned long)m & 2) ^ 2) << 3;
69 unsigned int mask = 0xffff << bit_shift;
70 unsigned int *ptr = (unsigned int *) (maddr & ~2);
71 unsigned int old32, new32, load32;
73 /* Read the old value */
78 new32 = (load32 & (~mask)) | val << bit_shift;
79 load32 = __cmpxchg_u32(ptr, old32, new32);
80 } while (load32 != old32);
82 return (load32 & mask) >> bit_shift;
85 static inline unsigned long __xchg(unsigned long x, __volatile__ void * ptr,
90 return xchg16(ptr, x);
92 return xchg32(ptr, x);
94 return xchg64(ptr, x);
96 __xchg_called_with_bad_pointer();
101 * Atomic compare and exchange. Compare OLD with MEM, if identical,
102 * store NEW in MEM. Return the initial value in MEM. Success is
103 * indicated by comparing RETURN with OLD.
106 #include <asm-generic/cmpxchg-local.h>
109 static inline unsigned long
110 __cmpxchg_u64(volatile long *m, unsigned long old, unsigned long new)
112 __asm__ __volatile__("casx [%2], %3, %0"
114 : "0" (new), "r" (m), "r" (old)
121 * Use 4 byte cas instruction to achieve 1 byte cmpxchg. Main logic
122 * here is to get the bit shift of the byte we are interested in.
123 * The XOR is handy for reversing the bits for big-endian byte order
125 static inline unsigned long
126 __cmpxchg_u8(volatile unsigned char *m, unsigned char old, unsigned char new)
128 unsigned long maddr = (unsigned long)m;
129 int bit_shift = (((unsigned long)m & 3) ^ 3) << 3;
130 unsigned int mask = 0xff << bit_shift;
131 unsigned int *ptr = (unsigned int *) (maddr & ~3);
132 unsigned int old32, new32, load;
133 unsigned int load32 = *ptr;
136 new32 = (load32 & ~mask) | (new << bit_shift);
137 old32 = (load32 & ~mask) | (old << bit_shift);
138 load32 = __cmpxchg_u32(ptr, old32, new32);
141 load = (load32 & mask) >> bit_shift;
142 } while (load == old);
147 /* This function doesn't exist, so you'll get a linker error
148 if something tries to do an invalid cmpxchg(). */
149 void __cmpxchg_called_with_bad_pointer(void);
151 static inline unsigned long
152 __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
156 return __cmpxchg_u8(ptr, old, new);
158 return __cmpxchg_u32(ptr, old, new);
160 return __cmpxchg_u64(ptr, old, new);
162 __cmpxchg_called_with_bad_pointer();
166 #define cmpxchg(ptr,o,n) \
168 __typeof__(*(ptr)) _o_ = (o); \
169 __typeof__(*(ptr)) _n_ = (n); \
170 (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \
171 (unsigned long)_n_, sizeof(*(ptr))); \
175 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
179 static inline unsigned long __cmpxchg_local(volatile void *ptr,
181 unsigned long new, int size)
185 case 8: return __cmpxchg(ptr, old, new, size);
187 return __cmpxchg_local_generic(ptr, old, new, size);
193 #define cmpxchg_local(ptr, o, n) \
194 ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
195 (unsigned long)(n), sizeof(*(ptr))))
196 #define cmpxchg64_local(ptr, o, n) \
198 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
199 cmpxchg_local((ptr), (o), (n)); \
201 #define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
203 #endif /* __ARCH_SPARC64_CMPXCHG__ */