]> Git Repo - linux.git/blob - arch/s390/include/asm/barrier.h
Linux 6.14-rc3
[linux.git] / arch / s390 / include / asm / barrier.h
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Copyright IBM Corp. 1999, 2009
4  *
5  * Author(s): Martin Schwidefsky <[email protected]>
6  */
7
8 #ifndef __ASM_BARRIER_H
9 #define __ASM_BARRIER_H
10
11 #include <asm/march.h>
12
13 /*
14  * Force strict CPU ordering.
15  * And yes, this is required on UP too when we're talking
16  * to devices.
17  */
18
19 #ifdef MARCH_HAS_Z196_FEATURES
20 /* Fast-BCR without checkpoint synchronization */
21 #define __ASM_BCR_SERIALIZE "bcr 14,0\n"
22 #else
23 #define __ASM_BCR_SERIALIZE "bcr 15,0\n"
24 #endif
25
26 static __always_inline void bcr_serialize(void)
27 {
28         asm volatile(__ASM_BCR_SERIALIZE : : : "memory");
29 }
30
31 #define __mb()          bcr_serialize()
32 #define __rmb()         barrier()
33 #define __wmb()         barrier()
34 #define __dma_rmb()     __mb()
35 #define __dma_wmb()     __mb()
36 #define __smp_mb()      __mb()
37 #define __smp_rmb()     __rmb()
38 #define __smp_wmb()     __wmb()
39
40 #define __smp_store_release(p, v)                                       \
41 do {                                                                    \
42         compiletime_assert_atomic_type(*p);                             \
43         barrier();                                                      \
44         WRITE_ONCE(*p, v);                                              \
45 } while (0)
46
47 #define __smp_load_acquire(p)                                           \
48 ({                                                                      \
49         typeof(*p) ___p1 = READ_ONCE(*p);                               \
50         compiletime_assert_atomic_type(*p);                             \
51         barrier();                                                      \
52         ___p1;                                                          \
53 })
54
55 #define __smp_mb__before_atomic()       barrier()
56 #define __smp_mb__after_atomic()        barrier()
57
58 /**
59  * array_index_mask_nospec - generate a mask for array_idx() that is
60  * ~0UL when the bounds check succeeds and 0 otherwise
61  * @index: array element index
62  * @size: number of elements in array
63  */
64 #define array_index_mask_nospec array_index_mask_nospec
65 static inline unsigned long array_index_mask_nospec(unsigned long index,
66                                                     unsigned long size)
67 {
68         unsigned long mask;
69
70         if (__builtin_constant_p(size) && size > 0) {
71                 asm("   clgr    %2,%1\n"
72                     "   slbgr   %0,%0\n"
73                     :"=d" (mask) : "d" (size-1), "d" (index) :"cc");
74                 return mask;
75         }
76         asm("   clgr    %1,%2\n"
77             "   slbgr   %0,%0\n"
78             :"=d" (mask) : "d" (size), "d" (index) :"cc");
79         return ~mask;
80 }
81
82 #include <asm-generic/barrier.h>
83
84 #endif /* __ASM_BARRIER_H */
This page took 0.028971 seconds and 4 git commands to generate.