6 #include <linux/futex.h>
8 #include <asm/uaccess.h>
11 #define __FUTEX_SMP_SYNC " sync \n"
13 #define __FUTEX_SMP_SYNC
16 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
18 __asm__ __volatile__( \
22 "1: ll %1, (%3) # __futex_atomic_op1 \n" \
32 " .section .fixup,\"ax\" \n" \
36 " .section __ex_table,\"a\" \n" \
37 " "__UA_ADDR "\t1b, 4b \n" \
38 " "__UA_ADDR "\t2b, 4b \n" \
40 : "=r" (ret), "=r" (oldval) \
41 : "0" (0), "r" (uaddr), "Jr" (oparg), "i" (-EFAULT)); \
45 futex_atomic_op_inuser (int encoded_op, int __user *uaddr)
47 int op = (encoded_op >> 28) & 7;
48 int cmp = (encoded_op >> 24) & 15;
49 int oparg = (encoded_op << 8) >> 20;
50 int cmparg = (encoded_op << 20) >> 20;
52 if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
55 if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int)))
62 __futex_atomic_op("move $1, %z4", ret, oldval, uaddr, oparg);
66 __futex_atomic_op("addu $1, %1, %z4",
67 ret, oldval, uaddr, oparg);
70 __futex_atomic_op("or $1, %1, %z4",
71 ret, oldval, uaddr, oparg);
74 __futex_atomic_op("and $1, %1, %z4",
75 ret, oldval, uaddr, ~oparg);
78 __futex_atomic_op("xor $1, %1, %z4",
79 ret, oldval, uaddr, oparg);
89 case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
90 case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
91 case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
92 case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
93 case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
94 case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
95 default: ret = -ENOSYS;
102 futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)