]> Git Repo - linux.git/blob - arch/s390/include/asm/cmpxchg.h
clocksource: Exynos_mct: Use irq_force_affinity() in cpu bringup
[linux.git] / arch / s390 / include / asm / cmpxchg.h
1 /*
2  * Copyright IBM Corp. 1999, 2011
3  *
4  * Author(s): Martin Schwidefsky <[email protected]>,
5  */
6
7 #ifndef __ASM_CMPXCHG_H
8 #define __ASM_CMPXCHG_H
9
10 #include <linux/mmdebug.h>
11 #include <linux/types.h>
12 #include <linux/bug.h>
13
14 extern void __xchg_called_with_bad_pointer(void);
15
16 static inline unsigned long __xchg(unsigned long x, void *ptr, int size)
17 {
18         unsigned long addr, old;
19         int shift;
20
21         switch (size) {
22         case 1:
23                 addr = (unsigned long) ptr;
24                 shift = (3 ^ (addr & 3)) << 3;
25                 addr ^= addr & 3;
26                 asm volatile(
27                         "       l       %0,%4\n"
28                         "0:     lr      0,%0\n"
29                         "       nr      0,%3\n"
30                         "       or      0,%2\n"
31                         "       cs      %0,0,%4\n"
32                         "       jl      0b\n"
33                         : "=&d" (old), "=Q" (*(int *) addr)
34                         : "d" ((x & 0xff) << shift), "d" (~(0xff << shift)),
35                           "Q" (*(int *) addr) : "memory", "cc", "0");
36                 return old >> shift;
37         case 2:
38                 addr = (unsigned long) ptr;
39                 shift = (2 ^ (addr & 2)) << 3;
40                 addr ^= addr & 2;
41                 asm volatile(
42                         "       l       %0,%4\n"
43                         "0:     lr      0,%0\n"
44                         "       nr      0,%3\n"
45                         "       or      0,%2\n"
46                         "       cs      %0,0,%4\n"
47                         "       jl      0b\n"
48                         : "=&d" (old), "=Q" (*(int *) addr)
49                         : "d" ((x & 0xffff) << shift), "d" (~(0xffff << shift)),
50                           "Q" (*(int *) addr) : "memory", "cc", "0");
51                 return old >> shift;
52         case 4:
53                 asm volatile(
54                         "       l       %0,%3\n"
55                         "0:     cs      %0,%2,%3\n"
56                         "       jl      0b\n"
57                         : "=&d" (old), "=Q" (*(int *) ptr)
58                         : "d" (x), "Q" (*(int *) ptr)
59                         : "memory", "cc");
60                 return old;
61 #ifdef CONFIG_64BIT
62         case 8:
63                 asm volatile(
64                         "       lg      %0,%3\n"
65                         "0:     csg     %0,%2,%3\n"
66                         "       jl      0b\n"
67                         : "=&d" (old), "=m" (*(long *) ptr)
68                         : "d" (x), "Q" (*(long *) ptr)
69                         : "memory", "cc");
70                 return old;
71 #endif /* CONFIG_64BIT */
72         }
73         __xchg_called_with_bad_pointer();
74         return x;
75 }
76
77 #define xchg(ptr, x)                                                      \
78 ({                                                                        \
79         __typeof__(*(ptr)) __ret;                                         \
80         __ret = (__typeof__(*(ptr)))                                      \
81                 __xchg((unsigned long)(x), (void *)(ptr), sizeof(*(ptr)));\
82         __ret;                                                            \
83 })
84
85 /*
86  * Atomic compare and exchange.  Compare OLD with MEM, if identical,
87  * store NEW in MEM.  Return the initial value in MEM.  Success is
88  * indicated by comparing RETURN with OLD.
89  */
90
91 #define __HAVE_ARCH_CMPXCHG
92
93 extern void __cmpxchg_called_with_bad_pointer(void);
94
95 static inline unsigned long __cmpxchg(void *ptr, unsigned long old,
96                                       unsigned long new, int size)
97 {
98         unsigned long addr, prev, tmp;
99         int shift;
100
101         switch (size) {
102         case 1:
103                 addr = (unsigned long) ptr;
104                 shift = (3 ^ (addr & 3)) << 3;
105                 addr ^= addr & 3;
106                 asm volatile(
107                         "       l       %0,%2\n"
108                         "0:     nr      %0,%5\n"
109                         "       lr      %1,%0\n"
110                         "       or      %0,%3\n"
111                         "       or      %1,%4\n"
112                         "       cs      %0,%1,%2\n"
113                         "       jnl     1f\n"
114                         "       xr      %1,%0\n"
115                         "       nr      %1,%5\n"
116                         "       jnz     0b\n"
117                         "1:"
118                         : "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) addr)
119                         : "d" ((old & 0xff) << shift),
120                           "d" ((new & 0xff) << shift),
121                           "d" (~(0xff << shift))
122                         : "memory", "cc");
123                 return prev >> shift;
124         case 2:
125                 addr = (unsigned long) ptr;
126                 shift = (2 ^ (addr & 2)) << 3;
127                 addr ^= addr & 2;
128                 asm volatile(
129                         "       l       %0,%2\n"
130                         "0:     nr      %0,%5\n"
131                         "       lr      %1,%0\n"
132                         "       or      %0,%3\n"
133                         "       or      %1,%4\n"
134                         "       cs      %0,%1,%2\n"
135                         "       jnl     1f\n"
136                         "       xr      %1,%0\n"
137                         "       nr      %1,%5\n"
138                         "       jnz     0b\n"
139                         "1:"
140                         : "=&d" (prev), "=&d" (tmp), "+Q" (*(int *) addr)
141                         : "d" ((old & 0xffff) << shift),
142                           "d" ((new & 0xffff) << shift),
143                           "d" (~(0xffff << shift))
144                         : "memory", "cc");
145                 return prev >> shift;
146         case 4:
147                 asm volatile(
148                         "       cs      %0,%3,%1\n"
149                         : "=&d" (prev), "=Q" (*(int *) ptr)
150                         : "0" (old), "d" (new), "Q" (*(int *) ptr)
151                         : "memory", "cc");
152                 return prev;
153 #ifdef CONFIG_64BIT
154         case 8:
155                 asm volatile(
156                         "       csg     %0,%3,%1\n"
157                         : "=&d" (prev), "=Q" (*(long *) ptr)
158                         : "0" (old), "d" (new), "Q" (*(long *) ptr)
159                         : "memory", "cc");
160                 return prev;
161 #endif /* CONFIG_64BIT */
162         }
163         __cmpxchg_called_with_bad_pointer();
164         return old;
165 }
166
167 #define cmpxchg(ptr, o, n)                                               \
168 ({                                                                       \
169         __typeof__(*(ptr)) __ret;                                        \
170         __ret = (__typeof__(*(ptr)))                                     \
171                 __cmpxchg((ptr), (unsigned long)(o), (unsigned long)(n), \
172                           sizeof(*(ptr)));                               \
173         __ret;                                                           \
174 })
175
176 #ifdef CONFIG_64BIT
177 #define cmpxchg64(ptr, o, n)                                            \
178 ({                                                                      \
179         cmpxchg((ptr), (o), (n));                                       \
180 })
181 #else /* CONFIG_64BIT */
182 static inline unsigned long long __cmpxchg64(void *ptr,
183                                              unsigned long long old,
184                                              unsigned long long new)
185 {
186         register_pair rp_old = {.pair = old};
187         register_pair rp_new = {.pair = new};
188         unsigned long long *ullptr = ptr;
189
190         asm volatile(
191                 "       cds     %0,%2,%1"
192                 : "+d" (rp_old), "+Q" (*ullptr)
193                 : "d" (rp_new)
194                 : "memory", "cc");
195         return rp_old.pair;
196 }
197
198 #define cmpxchg64(ptr, o, n)                            \
199 ({                                                      \
200         __typeof__(*(ptr)) __ret;                       \
201         __ret = (__typeof__(*(ptr)))                    \
202                 __cmpxchg64((ptr),                      \
203                             (unsigned long long)(o),    \
204                             (unsigned long long)(n));   \
205         __ret;                                          \
206 })
207 #endif /* CONFIG_64BIT */
208
209 #define __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, insn)               \
210 ({                                                                      \
211         register __typeof__(*(p1)) __old1 asm("2") = (o1);              \
212         register __typeof__(*(p2)) __old2 asm("3") = (o2);              \
213         register __typeof__(*(p1)) __new1 asm("4") = (n1);              \
214         register __typeof__(*(p2)) __new2 asm("5") = (n2);              \
215         int cc;                                                         \
216         asm volatile(                                                   \
217                         insn   " %[old],%[new],%[ptr]\n"                \
218                 "       ipm     %[cc]\n"                                \
219                 "       srl     %[cc],28"                               \
220                 : [cc] "=d" (cc), [old] "+d" (__old1), "+d" (__old2)    \
221                 : [new] "d" (__new1), "d" (__new2),                     \
222                   [ptr] "Q" (*(p1)), "Q" (*(p2))                        \
223                 : "memory", "cc");                                      \
224         !cc;                                                            \
225 })
226
227 #define __cmpxchg_double_4(p1, p2, o1, o2, n1, n2) \
228         __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, "cds")
229
230 #define __cmpxchg_double_8(p1, p2, o1, o2, n1, n2) \
231         __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, "cdsg")
232
233 extern void __cmpxchg_double_called_with_bad_pointer(void);
234
235 #define __cmpxchg_double(p1, p2, o1, o2, n1, n2)                        \
236 ({                                                                      \
237         int __ret;                                                      \
238         switch (sizeof(*(p1))) {                                        \
239         case 4:                                                         \
240                 __ret = __cmpxchg_double_4(p1, p2, o1, o2, n1, n2);     \
241                 break;                                                  \
242         case 8:                                                         \
243                 __ret = __cmpxchg_double_8(p1, p2, o1, o2, n1, n2);     \
244                 break;                                                  \
245         default:                                                        \
246                 __cmpxchg_double_called_with_bad_pointer();             \
247         }                                                               \
248         __ret;                                                          \
249 })
250
251 #define cmpxchg_double(p1, p2, o1, o2, n1, n2)                          \
252 ({                                                                      \
253         __typeof__(p1) __p1 = (p1);                                     \
254         __typeof__(p2) __p2 = (p2);                                     \
255         int __ret;                                                      \
256         BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long));                    \
257         BUILD_BUG_ON(sizeof(*(p2)) != sizeof(long));                    \
258         VM_BUG_ON((unsigned long)((__p1) + 1) != (unsigned long)(__p2));\
259         if (sizeof(long) == 4)                                          \
260                 __ret = __cmpxchg_double_4(__p1, __p2, o1, o2, n1, n2); \
261         else                                                            \
262                 __ret = __cmpxchg_double_8(__p1, __p2, o1, o2, n1, n2); \
263         __ret;                                                          \
264 })
265
266 #define system_has_cmpxchg_double()     1
267
268 #include <asm-generic/cmpxchg-local.h>
269
270 static inline unsigned long __cmpxchg_local(void *ptr,
271                                             unsigned long old,
272                                             unsigned long new, int size)
273 {
274         switch (size) {
275         case 1:
276         case 2:
277         case 4:
278 #ifdef CONFIG_64BIT
279         case 8:
280 #endif
281                 return __cmpxchg(ptr, old, new, size);
282         default:
283                 return __cmpxchg_local_generic(ptr, old, new, size);
284         }
285
286         return old;
287 }
288
289 /*
290  * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
291  * them available.
292  */
293 #define cmpxchg_local(ptr, o, n)                                        \
294 ({                                                                      \
295         __typeof__(*(ptr)) __ret;                                       \
296         __ret = (__typeof__(*(ptr)))                                    \
297                 __cmpxchg_local((ptr), (unsigned long)(o),              \
298                                 (unsigned long)(n), sizeof(*(ptr)));    \
299         __ret;                                                          \
300 })
301
302 #define cmpxchg64_local(ptr, o, n)      cmpxchg64((ptr), (o), (n))
303
304 #endif /* __ASM_CMPXCHG_H */
This page took 0.050864 seconds and 4 git commands to generate.