1/*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License.  See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (c) 2006  Ralf Baechle (ralf@linux-mips.org)
7 */
8#ifndef _ASM_FUTEX_H
9#define _ASM_FUTEX_H
10
11#ifdef __KERNEL__
12
13#include <linux/futex.h>
14#include <asm/barrier.h>
15#include <asm/errno.h>
16#include <asm/uaccess.h>
17#include <asm/war.h>
18
19#define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)		\
20{									\
21	if (cpu_has_llsc && R10000_LLSC_WAR) {				\
22		__asm__ __volatile__(					\
23		"	.set	push				\n"	\
24		"	.set	noat				\n"	\
25		"	.set	mips3				\n"	\
26		"1:	ll	%1, %4	# __futex_atomic_op	\n"	\
27		"	.set	mips0				\n"	\
28		"	" insn	"				\n"	\
29		"	.set	mips3				\n"	\
30		"2:	sc	$1, %2				\n"	\
31		"	beqzl	$1, 1b				\n"	\
32		__WEAK_ORDERING_MB					\
33		"3:						\n"	\
34		"	.set	pop				\n"	\
35		"	.set	mips0				\n"	\
36		"	.section .fixup,\"ax\"			\n"	\
37		"4:	li	%0, %6				\n"	\
38		"	j	2b				\n"	\
39		"	.previous				\n"	\
40		"	.section __ex_table,\"a\"		\n"	\
41		"	"__UA_ADDR "\t1b, 4b			\n"	\
42		"	"__UA_ADDR "\t2b, 4b			\n"	\
43		"	.previous				\n"	\
44		: "=r" (ret), "=&r" (oldval), "=R" (*uaddr)		\
45		: "0" (0), "R" (*uaddr), "Jr" (oparg), "i" (-EFAULT)	\
46		: "memory");						\
47	} else if (cpu_has_llsc) {					\
48		__asm__ __volatile__(					\
49		"	.set	push				\n"	\
50		"	.set	noat				\n"	\
51		"	.set	mips3				\n"	\
52		"1:	ll	%1, %4	# __futex_atomic_op	\n"	\
53		"	.set	mips0				\n"	\
54		"	" insn	"				\n"	\
55		"	.set	mips3				\n"	\
56		"2:	sc	$1, %2				\n"	\
57		"	beqz	$1, 1b				\n"	\
58		__WEAK_ORDERING_MB					\
59		"3:						\n"	\
60		"	.set	pop				\n"	\
61		"	.set	mips0				\n"	\
62		"	.section .fixup,\"ax\"			\n"	\
63		"4:	li	%0, %6				\n"	\
64		"	j	2b				\n"	\
65		"	.previous				\n"	\
66		"	.section __ex_table,\"a\"		\n"	\
67		"	"__UA_ADDR "\t1b, 4b			\n"	\
68		"	"__UA_ADDR "\t2b, 4b			\n"	\
69		"	.previous				\n"	\
70		: "=r" (ret), "=&r" (oldval), "=R" (*uaddr)		\
71		: "0" (0), "R" (*uaddr), "Jr" (oparg), "i" (-EFAULT)	\
72		: "memory");						\
73	} else								\
74		ret = -ENOSYS;						\
75}
76
77static inline int
78futex_atomic_op_inuser (int encoded_op, int __user *uaddr)
79{
80	int op = (encoded_op >> 28) & 7;
81	int cmp = (encoded_op >> 24) & 15;
82	int oparg = (encoded_op << 8) >> 20;
83	int cmparg = (encoded_op << 20) >> 20;
84	int oldval = 0, ret;
85	if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
86		oparg = 1 << oparg;
87
88	if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int)))
89		return -EFAULT;
90
91	pagefault_disable();
92
93	switch (op) {
94	case FUTEX_OP_SET:
95		__futex_atomic_op("move	$1, %z5", ret, oldval, uaddr, oparg);
96		break;
97
98	case FUTEX_OP_ADD:
99		__futex_atomic_op("addu	$1, %1, %z5",
100		                  ret, oldval, uaddr, oparg);
101		break;
102	case FUTEX_OP_OR:
103		__futex_atomic_op("or	$1, %1, %z5",
104		                  ret, oldval, uaddr, oparg);
105		break;
106	case FUTEX_OP_ANDN:
107		__futex_atomic_op("and	$1, %1, %z5",
108		                  ret, oldval, uaddr, ~oparg);
109		break;
110	case FUTEX_OP_XOR:
111		__futex_atomic_op("xor	$1, %1, %z5",
112		                  ret, oldval, uaddr, oparg);
113		break;
114	default:
115		ret = -ENOSYS;
116	}
117
118	pagefault_enable();
119
120	if (!ret) {
121		switch (cmp) {
122		case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
123		case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
124		case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
125		case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
126		case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
127		case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
128		default: ret = -ENOSYS;
129		}
130	}
131	return ret;
132}
133
134static inline int
135futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
136{
137	int retval;
138
139	if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
140		return -EFAULT;
141
142	if (cpu_has_llsc && R10000_LLSC_WAR) {
143		__asm__ __volatile__(
144		"# futex_atomic_cmpxchg_inatomic			\n"
145		"	.set	push					\n"
146		"	.set	noat					\n"
147		"	.set	mips3					\n"
148		"1:	ll	%0, %2					\n"
149		"	bne	%0, %z3, 3f				\n"
150		"	.set	mips0					\n"
151		"	move	$1, %z4					\n"
152		"	.set	mips3					\n"
153		"2:	sc	$1, %1					\n"
154		"	beqzl	$1, 1b					\n"
155		__WEAK_ORDERING_MB
156		"3:							\n"
157		"	.set	pop					\n"
158		"	.section .fixup,\"ax\"				\n"
159		"4:	li	%0, %5					\n"
160		"	j	3b					\n"
161		"	.previous					\n"
162		"	.section __ex_table,\"a\"			\n"
163		"	"__UA_ADDR "\t1b, 4b				\n"
164		"	"__UA_ADDR "\t2b, 4b				\n"
165		"	.previous					\n"
166		: "=&r" (retval), "=R" (*uaddr)
167		: "R" (*uaddr), "Jr" (oldval), "Jr" (newval), "i" (-EFAULT)
168		: "memory");
169	} else if (cpu_has_llsc) {
170		__asm__ __volatile__(
171		"# futex_atomic_cmpxchg_inatomic			\n"
172		"	.set	push					\n"
173		"	.set	noat					\n"
174		"	.set	mips3					\n"
175		"1:	ll	%0, %2					\n"
176		"	bne	%0, %z3, 3f				\n"
177		"	.set	mips0					\n"
178		"	move	$1, %z4					\n"
179		"	.set	mips3					\n"
180		"2:	sc	$1, %1					\n"
181		"	beqz	$1, 1b					\n"
182		__WEAK_ORDERING_MB
183		"3:							\n"
184		"	.set	pop					\n"
185		"	.section .fixup,\"ax\"				\n"
186		"4:	li	%0, %5					\n"
187		"	j	3b					\n"
188		"	.previous					\n"
189		"	.section __ex_table,\"a\"			\n"
190		"	"__UA_ADDR "\t1b, 4b				\n"
191		"	"__UA_ADDR "\t2b, 4b				\n"
192		"	.previous					\n"
193		: "=&r" (retval), "=R" (*uaddr)
194		: "R" (*uaddr), "Jr" (oldval), "Jr" (newval), "i" (-EFAULT)
195		: "memory");
196	} else
197		return -ENOSYS;
198
199	return retval;
200}
201
202#endif
203#endif
204