1#ifndef _ARCH_MIPS_LOCAL_H
2#define _ARCH_MIPS_LOCAL_H
3
4#include <linux/percpu.h>
5#include <linux/bitops.h>
6#include <asm/atomic.h>
7#include <asm/war.h>
8
9typedef struct
10{
11	atomic_long_t a;
12} local_t;
13
14#define LOCAL_INIT(i)	{ ATOMIC_LONG_INIT(i) }
15
16#define local_read(l)	atomic_long_read(&(l)->a)
17#define local_set(l,i)	atomic_long_set(&(l)->a, (i))
18
19#define local_add(i,l)	atomic_long_add((i),(&(l)->a))
20#define local_sub(i,l)	atomic_long_sub((i),(&(l)->a))
21#define local_inc(l)	atomic_long_inc(&(l)->a)
22#define local_dec(l)	atomic_long_dec(&(l)->a)
23
24/*
25 * Same as above, but return the result value
26 */
27static __inline__ long local_add_return(long i, local_t * l)
28{
29	unsigned long result;
30
31	if (cpu_has_llsc && R10000_LLSC_WAR) {
32		unsigned long temp;
33
34		__asm__ __volatile__(
35		"	.set	mips3					\n"
36		"1:"	__LL	"%1, %2		# local_add_return	\n"
37		"	addu	%0, %1, %3				\n"
38			__SC	"%0, %2					\n"
39		"	beqzl	%0, 1b					\n"
40		"	addu	%0, %1, %3				\n"
41		"	.set	mips0					\n"
42		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
43		: "Ir" (i), "m" (l->a.counter)
44		: "memory");
45	} else if (cpu_has_llsc) {
46		unsigned long temp;
47
48		__asm__ __volatile__(
49		"	.set	mips3					\n"
50		"1:"	__LL	"%1, %2		# local_add_return	\n"
51		"	addu	%0, %1, %3				\n"
52			__SC	"%0, %2					\n"
53		"	beqz	%0, 1b					\n"
54		"	addu	%0, %1, %3				\n"
55		"	.set	mips0					\n"
56		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
57		: "Ir" (i), "m" (l->a.counter)
58		: "memory");
59	} else {
60		unsigned long flags;
61
62		local_irq_save(flags);
63		result = l->a.counter;
64		result += i;
65		l->a.counter = result;
66		local_irq_restore(flags);
67	}
68
69	return result;
70}
71
72static __inline__ long local_sub_return(long i, local_t * l)
73{
74	unsigned long result;
75
76	if (cpu_has_llsc && R10000_LLSC_WAR) {
77		unsigned long temp;
78
79		__asm__ __volatile__(
80		"	.set	mips3					\n"
81		"1:"	__LL	"%1, %2		# local_sub_return	\n"
82		"	subu	%0, %1, %3				\n"
83			__SC	"%0, %2					\n"
84		"	beqzl	%0, 1b					\n"
85		"	subu	%0, %1, %3				\n"
86		"	.set	mips0					\n"
87		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
88		: "Ir" (i), "m" (l->a.counter)
89		: "memory");
90	} else if (cpu_has_llsc) {
91		unsigned long temp;
92
93		__asm__ __volatile__(
94		"	.set	mips3					\n"
95		"1:"	__LL	"%1, %2		# local_sub_return	\n"
96		"	subu	%0, %1, %3				\n"
97			__SC	"%0, %2					\n"
98		"	beqz	%0, 1b					\n"
99		"	subu	%0, %1, %3				\n"
100		"	.set	mips0					\n"
101		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
102		: "Ir" (i), "m" (l->a.counter)
103		: "memory");
104	} else {
105		unsigned long flags;
106
107		local_irq_save(flags);
108		result = l->a.counter;
109		result -= i;
110		l->a.counter = result;
111		local_irq_restore(flags);
112	}
113
114	return result;
115}
116
117/*
118 * local_sub_if_positive - conditionally subtract integer from atomic variable
119 * @i: integer value to subtract
120 * @l: pointer of type local_t
121 *
122 * Atomically test @l and subtract @i if @l is greater or equal than @i.
123 * The function returns the old value of @l minus @i.
124 */
125static __inline__ long local_sub_if_positive(long i, local_t * l)
126{
127	unsigned long result;
128
129	if (cpu_has_llsc && R10000_LLSC_WAR) {
130		unsigned long temp;
131
132		__asm__ __volatile__(
133		"	.set	mips3					\n"
134		"1:"	__LL	"%1, %2		# local_sub_if_positive\n"
135		"	dsubu	%0, %1, %3				\n"
136		"	bltz	%0, 1f					\n"
137			__SC	"%0, %2					\n"
138		"	.set	noreorder				\n"
139		"	beqzl	%0, 1b					\n"
140		"	 dsubu	%0, %1, %3				\n"
141		"	.set	reorder					\n"
142		"1:							\n"
143		"	.set	mips0					\n"
144		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
145		: "Ir" (i), "m" (l->a.counter)
146		: "memory");
147	} else if (cpu_has_llsc) {
148		unsigned long temp;
149
150		__asm__ __volatile__(
151		"	.set	mips3					\n"
152		"1:"	__LL	"%1, %2		# local_sub_if_positive\n"
153		"	dsubu	%0, %1, %3				\n"
154		"	bltz	%0, 1f					\n"
155			__SC	"%0, %2					\n"
156		"	.set	noreorder				\n"
157		"	beqz	%0, 1b					\n"
158		"	 dsubu	%0, %1, %3				\n"
159		"	.set	reorder					\n"
160		"1:							\n"
161		"	.set	mips0					\n"
162		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
163		: "Ir" (i), "m" (l->a.counter)
164		: "memory");
165	} else {
166		unsigned long flags;
167
168		local_irq_save(flags);
169		result = l->a.counter;
170		result -= i;
171		if (result >= 0)
172			l->a.counter = result;
173		local_irq_restore(flags);
174	}
175
176	return result;
177}
178
179#define local_cmpxchg(l, o, n) \
180	((long)cmpxchg_local(&((l)->a.counter), (o), (n)))
181#define local_xchg(l, n) (xchg_local(&((l)->a.counter),(n)))
182
183/**
184 * local_add_unless - add unless the number is a given value
185 * @l: pointer of type local_t
186 * @a: the amount to add to l...
187 * @u: ...unless l is equal to u.
188 *
189 * Atomically adds @a to @l, so long as it was not @u.
190 * Returns non-zero if @l was not @u, and zero otherwise.
191 */
192#define local_add_unless(l, a, u)				\
193({								\
194	long c, old;						\
195	c = local_read(l);					\
196	while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
197		c = old;					\
198	c != (u);						\
199})
200#define local_inc_not_zero(l) local_add_unless((l), 1, 0)
201
202#define local_dec_return(l) local_sub_return(1,(l))
203#define local_inc_return(l) local_add_return(1,(l))
204
205/*
206 * local_sub_and_test - subtract value from variable and test result
207 * @i: integer value to subtract
208 * @l: pointer of type local_t
209 *
210 * Atomically subtracts @i from @l and returns
211 * true if the result is zero, or false for all
212 * other cases.
213 */
214#define local_sub_and_test(i,l) (local_sub_return((i), (l)) == 0)
215
216/*
217 * local_inc_and_test - increment and test
218 * @l: pointer of type local_t
219 *
220 * Atomically increments @l by 1
221 * and returns true if the result is zero, or false for all
222 * other cases.
223 */
224#define local_inc_and_test(l) (local_inc_return(l) == 0)
225
226/*
227 * local_dec_and_test - decrement by 1 and test
228 * @l: pointer of type local_t
229 *
230 * Atomically decrements @l by 1 and
231 * returns true if the result is 0, or false for all other
232 * cases.
233 */
234#define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
235
236/*
237 * local_dec_if_positive - decrement by 1 if old value positive
238 * @l: pointer of type local_t
239 */
240#define local_dec_if_positive(l)	local_sub_if_positive(1, l)
241
242/*
243 * local_add_negative - add and test if negative
244 * @l: pointer of type local_t
245 * @i: integer value to add
246 *
247 * Atomically adds @i to @l and returns true
248 * if the result is negative, or false when
249 * result is greater than or equal to zero.
250 */
251#define local_add_negative(i,l) (local_add_return(i, (l)) < 0)
252
253/* Use these for per-cpu local_t variables: on some archs they are
254 * much more efficient than these naive implementations.  Note they take
255 * a variable, not an address.
256 */
257
258#define __local_inc(l)		((l)->a.counter++)
259#define __local_dec(l)		((l)->a.counter++)
260#define __local_add(i,l)	((l)->a.counter+=(i))
261#define __local_sub(i,l)	((l)->a.counter-=(i))
262
263/* Need to disable preemption for the cpu local counters otherwise we could
264   still access a variable of a previous CPU in a non atomic way. */
265#define cpu_local_wrap_v(l)	 	\
266	({ local_t res__;		\
267	   preempt_disable(); 		\
268	   res__ = (l);			\
269	   preempt_enable();		\
270	   res__; })
271#define cpu_local_wrap(l)		\
272	({ preempt_disable();		\
273	   l;				\
274	   preempt_enable(); })		\
275
276#define cpu_local_read(l)    cpu_local_wrap_v(local_read(&__get_cpu_var(l)))
277#define cpu_local_set(l, i)  cpu_local_wrap(local_set(&__get_cpu_var(l), (i)))
278#define cpu_local_inc(l)     cpu_local_wrap(local_inc(&__get_cpu_var(l)))
279#define cpu_local_dec(l)     cpu_local_wrap(local_dec(&__get_cpu_var(l)))
280#define cpu_local_add(i, l)  cpu_local_wrap(local_add((i), &__get_cpu_var(l)))
281#define cpu_local_sub(i, l)  cpu_local_wrap(local_sub((i), &__get_cpu_var(l)))
282
283#define __cpu_local_inc(l)	cpu_local_inc(l)
284#define __cpu_local_dec(l)	cpu_local_dec(l)
285#define __cpu_local_add(i, l)	cpu_local_add((i), (l))
286#define __cpu_local_sub(i, l)	cpu_local_sub((i), (l))
287
288#endif /* _ARCH_MIPS_LOCAL_H */
289