]> Git Repo - J-linux.git/blob - arch/s390/include/asm/percpu.h
Merge tag 'vfs-6.13-rc7.fixes' of git://git.kernel.org/pub/scm/linux/kernel/git/vfs/vfs
[J-linux.git] / arch / s390 / include / asm / percpu.h
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef __ARCH_S390_PERCPU__
3 #define __ARCH_S390_PERCPU__
4
5 #include <linux/preempt.h>
6 #include <asm/cmpxchg.h>
7 #include <asm/march.h>
8
9 /*
10  * s390 uses its own implementation for per cpu data, the offset of
11  * the cpu local data area is cached in the cpu's lowcore memory.
12  */
13 #define __my_cpu_offset get_lowcore()->percpu_offset
14
15 /*
16  * For 64 bit module code, the module may be more than 4G above the
17  * per cpu area, use weak definitions to force the compiler to
18  * generate external references.
19  */
20 #if defined(MODULE)
21 #define ARCH_NEEDS_WEAK_PER_CPU
22 #endif
23
24 /*
25  * We use a compare-and-swap loop since that uses less cpu cycles than
26  * disabling and enabling interrupts like the generic variant would do.
27  */
28 #define arch_this_cpu_to_op_simple(pcp, val, op)                        \
29 ({                                                                      \
30         typedef typeof(pcp) pcp_op_T__;                                 \
31         pcp_op_T__ old__, new__, prev__;                                \
32         pcp_op_T__ *ptr__;                                              \
33         preempt_disable_notrace();                                      \
34         ptr__ = raw_cpu_ptr(&(pcp));                                    \
35         prev__ = READ_ONCE(*ptr__);                                     \
36         do {                                                            \
37                 old__ = prev__;                                         \
38                 new__ = old__ op (val);                                 \
39                 prev__ = cmpxchg(ptr__, old__, new__);                  \
40         } while (prev__ != old__);                                      \
41         preempt_enable_notrace();                                       \
42         new__;                                                          \
43 })
44
45 #define this_cpu_add_1(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, +)
46 #define this_cpu_add_2(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, +)
47 #define this_cpu_add_return_1(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
48 #define this_cpu_add_return_2(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
49 #define this_cpu_and_1(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, &)
50 #define this_cpu_and_2(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, &)
51 #define this_cpu_or_1(pcp, val)         arch_this_cpu_to_op_simple(pcp, val, |)
52 #define this_cpu_or_2(pcp, val)         arch_this_cpu_to_op_simple(pcp, val, |)
53
54 #ifndef MARCH_HAS_Z196_FEATURES
55
56 #define this_cpu_add_4(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, +)
57 #define this_cpu_add_8(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, +)
58 #define this_cpu_add_return_4(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
59 #define this_cpu_add_return_8(pcp, val) arch_this_cpu_to_op_simple(pcp, val, +)
60 #define this_cpu_and_4(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, &)
61 #define this_cpu_and_8(pcp, val)        arch_this_cpu_to_op_simple(pcp, val, &)
62 #define this_cpu_or_4(pcp, val)         arch_this_cpu_to_op_simple(pcp, val, |)
63 #define this_cpu_or_8(pcp, val)         arch_this_cpu_to_op_simple(pcp, val, |)
64
65 #else /* MARCH_HAS_Z196_FEATURES */
66
67 #define arch_this_cpu_add(pcp, val, op1, op2, szcast)                   \
68 {                                                                       \
69         typedef typeof(pcp) pcp_op_T__;                                 \
70         pcp_op_T__ val__ = (val);                                       \
71         pcp_op_T__ old__, *ptr__;                                       \
72         preempt_disable_notrace();                                      \
73         ptr__ = raw_cpu_ptr(&(pcp));                            \
74         if (__builtin_constant_p(val__) &&                              \
75             ((szcast)val__ > -129) && ((szcast)val__ < 128)) {          \
76                 asm volatile(                                           \
77                         op2 "   %[ptr__],%[val__]\n"                    \
78                         : [ptr__] "+Q" (*ptr__)                         \
79                         : [val__] "i" ((szcast)val__)                   \
80                         : "cc");                                        \
81         } else {                                                        \
82                 asm volatile(                                           \
83                         op1 "   %[old__],%[val__],%[ptr__]\n"           \
84                         : [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)   \
85                         : [val__] "d" (val__)                           \
86                         : "cc");                                        \
87         }                                                               \
88         preempt_enable_notrace();                                       \
89 }
90
91 #define this_cpu_add_4(pcp, val) arch_this_cpu_add(pcp, val, "laa", "asi", int)
92 #define this_cpu_add_8(pcp, val) arch_this_cpu_add(pcp, val, "laag", "agsi", long)
93
94 #define arch_this_cpu_add_return(pcp, val, op)                          \
95 ({                                                                      \
96         typedef typeof(pcp) pcp_op_T__;                                 \
97         pcp_op_T__ val__ = (val);                                       \
98         pcp_op_T__ old__, *ptr__;                                       \
99         preempt_disable_notrace();                                      \
100         ptr__ = raw_cpu_ptr(&(pcp));                                    \
101         asm volatile(                                                   \
102                 op "    %[old__],%[val__],%[ptr__]\n"                   \
103                 : [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)           \
104                 : [val__] "d" (val__)                                   \
105                 : "cc");                                                \
106         preempt_enable_notrace();                                               \
107         old__ + val__;                                                  \
108 })
109
110 #define this_cpu_add_return_4(pcp, val) arch_this_cpu_add_return(pcp, val, "laa")
111 #define this_cpu_add_return_8(pcp, val) arch_this_cpu_add_return(pcp, val, "laag")
112
113 #define arch_this_cpu_to_op(pcp, val, op)                               \
114 {                                                                       \
115         typedef typeof(pcp) pcp_op_T__;                                 \
116         pcp_op_T__ val__ = (val);                                       \
117         pcp_op_T__ old__, *ptr__;                                       \
118         preempt_disable_notrace();                                      \
119         ptr__ = raw_cpu_ptr(&(pcp));                                    \
120         asm volatile(                                                   \
121                 op "    %[old__],%[val__],%[ptr__]\n"                   \
122                 : [old__] "=d" (old__), [ptr__] "+Q" (*ptr__)           \
123                 : [val__] "d" (val__)                                   \
124                 : "cc");                                                \
125         preempt_enable_notrace();                                       \
126 }
127
128 #define this_cpu_and_4(pcp, val)        arch_this_cpu_to_op(pcp, val, "lan")
129 #define this_cpu_and_8(pcp, val)        arch_this_cpu_to_op(pcp, val, "lang")
130 #define this_cpu_or_4(pcp, val)         arch_this_cpu_to_op(pcp, val, "lao")
131 #define this_cpu_or_8(pcp, val)         arch_this_cpu_to_op(pcp, val, "laog")
132
133 #endif /* MARCH_HAS_Z196_FEATURES */
134
135 #define arch_this_cpu_cmpxchg(pcp, oval, nval)                          \
136 ({                                                                      \
137         typedef typeof(pcp) pcp_op_T__;                                 \
138         pcp_op_T__ ret__;                                               \
139         pcp_op_T__ *ptr__;                                              \
140         preempt_disable_notrace();                                      \
141         ptr__ = raw_cpu_ptr(&(pcp));                                    \
142         ret__ = cmpxchg(ptr__, oval, nval);                             \
143         preempt_enable_notrace();                                       \
144         ret__;                                                          \
145 })
146
147 #define this_cpu_cmpxchg_1(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
148 #define this_cpu_cmpxchg_2(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
149 #define this_cpu_cmpxchg_4(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
150 #define this_cpu_cmpxchg_8(pcp, oval, nval) arch_this_cpu_cmpxchg(pcp, oval, nval)
151
152 #define this_cpu_cmpxchg64(pcp, o, n)   this_cpu_cmpxchg_8(pcp, o, n)
153
154 #define this_cpu_cmpxchg128(pcp, oval, nval)                            \
155 ({                                                                      \
156         typedef typeof(pcp) pcp_op_T__;                                 \
157         u128 old__, new__, ret__;                                       \
158         pcp_op_T__ *ptr__;                                              \
159         old__ = oval;                                                   \
160         new__ = nval;                                                   \
161         preempt_disable_notrace();                                      \
162         ptr__ = raw_cpu_ptr(&(pcp));                                    \
163         ret__ = cmpxchg128((void *)ptr__, old__, new__);                \
164         preempt_enable_notrace();                                       \
165         ret__;                                                          \
166 })
167
168 #define arch_this_cpu_xchg(pcp, nval)                                   \
169 ({                                                                      \
170         typeof(pcp) *ptr__;                                             \
171         typeof(pcp) ret__;                                              \
172         preempt_disable_notrace();                                      \
173         ptr__ = raw_cpu_ptr(&(pcp));                                    \
174         ret__ = xchg(ptr__, nval);                                      \
175         preempt_enable_notrace();                                       \
176         ret__;                                                          \
177 })
178
179 #define this_cpu_xchg_1(pcp, nval) arch_this_cpu_xchg(pcp, nval)
180 #define this_cpu_xchg_2(pcp, nval) arch_this_cpu_xchg(pcp, nval)
181 #define this_cpu_xchg_4(pcp, nval) arch_this_cpu_xchg(pcp, nval)
182 #define this_cpu_xchg_8(pcp, nval) arch_this_cpu_xchg(pcp, nval)
183
184 #include <asm-generic/percpu.h>
185
186 #endif /* __ARCH_S390_PERCPU__ */
This page took 0.038554 seconds and 4 git commands to generate.