1#ifndef _ASM_ARM_FUTEX_H
2#define _ASM_ARM_FUTEX_H
3
4#ifdef __KERNEL__
5
6#include <linux/futex.h>
7#include <linux/uaccess.h>
8#include <asm/errno.h>
9
10#define __futex_atomic_ex_table(err_reg)			\
11	"3:\n"							\
12	"	.pushsection __ex_table,\"a\"\n"		\
13	"	.align	3\n"					\
14	"	.long	1b, 4f, 2b, 4f\n"			\
15	"	.popsection\n"					\
16	"	.pushsection .text.fixup,\"ax\"\n"		\
17	"	.align	2\n"					\
18	"4:	mov	%0, " err_reg "\n"			\
19	"	b	3b\n"					\
20	"	.popsection"
21
22#ifdef CONFIG_SMP
23
24#define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg)	\
25	smp_mb();						\
26	prefetchw(uaddr);					\
27	__asm__ __volatile__(					\
28	"1:	ldrex	%1, [%3]\n"				\
29	"	" insn "\n"					\
30	"2:	strex	%2, %0, [%3]\n"				\
31	"	teq	%2, #0\n"				\
32	"	bne	1b\n"					\
33	"	mov	%0, #0\n"				\
34	__futex_atomic_ex_table("%5")				\
35	: "=&r" (ret), "=&r" (oldval), "=&r" (tmp)		\
36	: "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)		\
37	: "cc", "memory")
38
39static inline int
40futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
41			      u32 oldval, u32 newval)
42{
43	int ret;
44	u32 val;
45
46	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
47		return -EFAULT;
48
49	smp_mb();
50	/* Prefetching cannot fault */
51	prefetchw(uaddr);
52	__asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
53	"1:	ldrex	%1, [%4]\n"
54	"	teq	%1, %2\n"
55	"	ite	eq	@ explicit IT needed for the 2b label\n"
56	"2:	strexeq	%0, %3, [%4]\n"
57	"	movne	%0, #0\n"
58	"	teq	%0, #0\n"
59	"	bne	1b\n"
60	__futex_atomic_ex_table("%5")
61	: "=&r" (ret), "=&r" (val)
62	: "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
63	: "cc", "memory");
64	smp_mb();
65
66	*uval = val;
67	return ret;
68}
69
70#else /* !SMP, we can work around lack of atomic ops by disabling preemption */
71
72#include <linux/preempt.h>
73#include <asm/domain.h>
74
75#define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg)	\
76	__asm__ __volatile__(					\
77	"1:	" TUSER(ldr) "	%1, [%3]\n"			\
78	"	" insn "\n"					\
79	"2:	" TUSER(str) "	%0, [%3]\n"			\
80	"	mov	%0, #0\n"				\
81	__futex_atomic_ex_table("%5")				\
82	: "=&r" (ret), "=&r" (oldval), "=&r" (tmp)		\
83	: "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)		\
84	: "cc", "memory")
85
86static inline int
87futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
88			      u32 oldval, u32 newval)
89{
90	int ret = 0;
91	u32 val;
92
93	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
94		return -EFAULT;
95
96	__asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
97	"1:	" TUSER(ldr) "	%1, [%4]\n"
98	"	teq	%1, %2\n"
99	"	it	eq	@ explicit IT needed for the 2b label\n"
100	"2:	" TUSER(streq) "	%3, [%4]\n"
101	__futex_atomic_ex_table("%5")
102	: "+r" (ret), "=&r" (val)
103	: "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
104	: "cc", "memory");
105
106	*uval = val;
107	return ret;
108}
109
110#endif /* !SMP */
111
112static inline int
113futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr)
114{
115	int op = (encoded_op >> 28) & 7;
116	int cmp = (encoded_op >> 24) & 15;
117	int oparg = (encoded_op << 8) >> 20;
118	int cmparg = (encoded_op << 20) >> 20;
119	int oldval = 0, ret, tmp;
120
121	if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
122		oparg = 1 << oparg;
123
124	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
125		return -EFAULT;
126
127	pagefault_disable();	/* implies preempt_disable() */
128
129	switch (op) {
130	case FUTEX_OP_SET:
131		__futex_atomic_op("mov	%0, %4", ret, oldval, tmp, uaddr, oparg);
132		break;
133	case FUTEX_OP_ADD:
134		__futex_atomic_op("add	%0, %1, %4", ret, oldval, tmp, uaddr, oparg);
135		break;
136	case FUTEX_OP_OR:
137		__futex_atomic_op("orr	%0, %1, %4", ret, oldval, tmp, uaddr, oparg);
138		break;
139	case FUTEX_OP_ANDN:
140		__futex_atomic_op("and	%0, %1, %4", ret, oldval, tmp, uaddr, ~oparg);
141		break;
142	case FUTEX_OP_XOR:
143		__futex_atomic_op("eor	%0, %1, %4", ret, oldval, tmp, uaddr, oparg);
144		break;
145	default:
146		ret = -ENOSYS;
147	}
148
149	pagefault_enable();	/* subsumes preempt_enable() */
150
151	if (!ret) {
152		switch (cmp) {
153		case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
154		case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
155		case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
156		case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
157		case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
158		case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
159		default: ret = -ENOSYS;
160		}
161	}
162	return ret;
163}
164
165#endif /* __KERNEL__ */
166#endif /* _ASM_ARM_FUTEX_H */
167