1 /* atomic.S: These things are too big to do inline.
6 #include <linux/linkage.h>
8 #include <asm/backoff.h>
12 /* Two versions of the atomic routines, one that
13 * does not return a value and does not perform
14 * memory barriers, and a second which returns
15 * a value and does the barriers.
18 #define ATOMIC_OP(op) \
19 ENTRY(atomic_##op) /* %o0 = increment, %o1 = atomic_ptr */ \
23 cas [%o1], %g1, %g7; \
25 bne,pn %icc, BACKOFF_LABEL(2f, 1b); \
29 2: BACKOFF_SPIN(%o2, %o3, 1b); \
30 ENDPROC(atomic_##op); \
32 #define ATOMIC_OP_RETURN(op) \
33 ENTRY(atomic_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \
37 cas [%o1], %g1, %g7; \
39 bne,pn %icc, BACKOFF_LABEL(2f, 1b); \
43 2: BACKOFF_SPIN(%o2, %o3, 1b); \
44 ENDPROC(atomic_##op##_return);
46 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op)
55 #undef ATOMIC_OP_RETURN
58 #define ATOMIC64_OP(op) \
59 ENTRY(atomic64_##op) /* %o0 = increment, %o1 = atomic_ptr */ \
63 casx [%o1], %g1, %g7; \
65 bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \
69 2: BACKOFF_SPIN(%o2, %o3, 1b); \
70 ENDPROC(atomic64_##op); \
72 #define ATOMIC64_OP_RETURN(op) \
73 ENTRY(atomic64_##op##_return) /* %o0 = increment, %o1 = atomic_ptr */ \
77 casx [%o1], %g1, %g7; \
79 bne,pn %xcc, BACKOFF_LABEL(2f, 1b); \
83 2: BACKOFF_SPIN(%o2, %o3, 1b); \
84 ENDPROC(atomic64_##op##_return);
86 #define ATOMIC64_OPS(op) ATOMIC64_OP(op) ATOMIC64_OP_RETURN(op)
95 #undef ATOMIC64_OP_RETURN
98 ENTRY(atomic64_dec_if_positive) /* %o0 = atomic_ptr */
105 bne,pn %xcc, BACKOFF_LABEL(2f, 1b)
109 2: BACKOFF_SPIN(%o2, %o3, 1b)
110 ENDPROC(atomic64_dec_if_positive)