1 /* atomic.S: These things are too big to do inline.
6 #include <linux/linkage.h>
8 #include <asm/backoff.h>
12 /* Two versions of the atomic routines, one that
13 * does not return a value and does not perform
14 * memory barriers, and a second which returns
15 * a value and does the barriers.
18 #define ATOMIC_OP(op) \
19 ENTRY(atomic_##op) /* %o0 = increment, %o1 = atomic_ptr */ \
23 cas [%o1], %g1, %g7; \
25 bne,pn %icc, BACKOFF_LABEL(2f, 1b); \
29 2: BACKOFF_SPIN(%o2, %o3, 1b); \
30 ENDPROC(atomic_##op); \
32 #define ATOMIC_OP_RETURN(op) \
33 ENTRY(atomic_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \
37 cas [%o1], %g1, %g7; \
39 bne,pn %icc, BACKOFF_LABEL(2f, 1b); \
43 2: BACKOFF_SPIN(%o2, %o3, 1b); \
44 ENDPROC(atomic_##op##_return);
46 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op)
52 #undef ATOMIC_OP_RETURN
55 #define ATOMIC64_OP(op) \
56 ENTRY(atomic64_##op) /* %o0 = increment, %o1 = atomic_ptr */ \
60 casx [%o1], %g1, %g7; \
62 bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \
66 2: BACKOFF_SPIN(%o2, %o3, 1b); \
67 ENDPROC(atomic64_##op); \
69 #define ATOMIC64_OP_RETURN(op) \
70 ENTRY(atomic64_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \
74 casx [%o1], %g1, %g7; \
76 bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \
80 2: BACKOFF_SPIN(%o2, %o3, 1b); \
81 ENDPROC(atomic64_##op##_return);
83 #define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op)
89 #undef ATOMIC64_OP_RETURN
92 ENTRY(atomic64_dec_if_positive) /* %o0 = atomic_ptr */
99 bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
103 2: BACKOFF_SPIN(%o2, %o3, 1b)
104 ENDPROC(atomic64_dec_if_positive)