]> Git Repo - linux.git/blob - arch/m68k/include/asm/cmpxchg.h
Linux 6.14-rc3
[linux.git] / arch / m68k / include / asm / cmpxchg.h
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ARCH_M68K_CMPXCHG__
3 #define __ARCH_M68K_CMPXCHG__
4
5 #include <linux/irqflags.h>
6 #include <linux/minmax.h>
7
8 #define __xg(type, x) ((volatile type *)(x))
9
10 extern unsigned long __invalid_xchg_size(unsigned long, volatile void *, int);
11
12 #ifndef CONFIG_RMW_INSNS
13 static inline unsigned long __arch_xchg(unsigned long x, volatile void * ptr, int size)
14 {
15         unsigned long flags;
16
17         local_irq_save(flags);
18
19         switch (size) {
20         case 1:
21                 swap(*(u8 *)ptr, x);
22                 break;
23         case 2:
24                 swap(*(u16 *)ptr, x);
25                 break;
26         case 4:
27                 swap(*(u32 *)ptr, x);
28                 break;
29         default:
30                 x = __invalid_xchg_size(x, ptr, size);
31                 break;
32         }
33
34         local_irq_restore(flags);
35         return x;
36 }
37 #else
38 static inline unsigned long __arch_xchg(unsigned long x, volatile void * ptr, int size)
39 {
40         switch (size) {
41         case 1:
42                 __asm__ __volatile__
43                         ("moveb %2,%0\n\t"
44                          "1:\n\t"
45                          "casb %0,%1,%2\n\t"
46                          "jne 1b"
47                          : "=&d" (x) : "d" (x), "m" (*__xg(u8, ptr)) : "memory");
48                 break;
49         case 2:
50                 __asm__ __volatile__
51                         ("movew %2,%0\n\t"
52                          "1:\n\t"
53                          "casw %0,%1,%2\n\t"
54                          "jne 1b"
55                          : "=&d" (x) : "d" (x), "m" (*__xg(u16, ptr)) : "memory");
56                 break;
57         case 4:
58                 __asm__ __volatile__
59                         ("movel %2,%0\n\t"
60                          "1:\n\t"
61                          "casl %0,%1,%2\n\t"
62                          "jne 1b"
63                          : "=&d" (x) : "d" (x), "m" (*__xg(u32, ptr)) : "memory");
64                 break;
65         default:
66                 x = __invalid_xchg_size(x, ptr, size);
67                 break;
68         }
69         return x;
70 }
71 #endif
72
73 #define arch_xchg(ptr,x) ({(__typeof__(*(ptr)))__arch_xchg((unsigned long)(x),(ptr),sizeof(*(ptr)));})
74
75 #include <asm-generic/cmpxchg-local.h>
76
77 #define arch_cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n))
78
79 extern unsigned long __invalid_cmpxchg_size(volatile void *,
80                                             unsigned long, unsigned long, int);
81
82 /*
83  * Atomic compare and exchange.  Compare OLD with MEM, if identical,
84  * store NEW in MEM.  Return the initial value in MEM.  Success is
85  * indicated by comparing RETURN with OLD.
86  */
87 #ifdef CONFIG_RMW_INSNS
88
89 static inline unsigned long __cmpxchg(volatile void *p, unsigned long old,
90                                       unsigned long new, int size)
91 {
92         switch (size) {
93         case 1:
94                 __asm__ __volatile__ ("casb %0,%2,%1"
95                                       : "=d" (old), "=m" (*(char *)p)
96                                       : "d" (new), "0" (old), "m" (*(char *)p));
97                 break;
98         case 2:
99                 __asm__ __volatile__ ("casw %0,%2,%1"
100                                       : "=d" (old), "=m" (*(short *)p)
101                                       : "d" (new), "0" (old), "m" (*(short *)p));
102                 break;
103         case 4:
104                 __asm__ __volatile__ ("casl %0,%2,%1"
105                                       : "=d" (old), "=m" (*(int *)p)
106                                       : "d" (new), "0" (old), "m" (*(int *)p));
107                 break;
108         default:
109                 old = __invalid_cmpxchg_size(p, old, new, size);
110                 break;
111         }
112         return old;
113 }
114
115 #define arch_cmpxchg(ptr, o, n)                                             \
116         ({(__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o),          \
117                         (unsigned long)(n), sizeof(*(ptr)));})
118 #define arch_cmpxchg_local(ptr, o, n)                                       \
119         ({(__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o),          \
120                         (unsigned long)(n), sizeof(*(ptr)));})
121
122 #define arch_cmpxchg64(ptr, o, n)       arch_cmpxchg64_local((ptr), (o), (n))
123
124 #else
125
126 #include <asm-generic/cmpxchg.h>
127
128 #endif
129
130 #endif /* __ARCH_M68K_CMPXCHG__ */
This page took 0.038851 seconds and 4 git commands to generate.