1#ifndef _ASM_M32R_ATOMIC_H
2#define _ASM_M32R_ATOMIC_H
3
4/*
5 *  linux/include/asm-m32r/atomic.h
6 *
7 *  M32R version:
8 *    Copyright (C) 2001, 2002  Hitoshi Yamamoto
9 *    Copyright (C) 2004  Hirokazu Takata <takata at linux-m32r.org>
10 */
11
12#include <asm/assembler.h>
13#include <asm/system.h>
14
15/*
16 * Atomic operations that C can't guarantee us.  Useful for
17 * resource counting etc..
18 */
19
20/*
21 * Make sure gcc doesn't try to be clever and move things around
22 * on us. We need to use _exactly_ the address the user gave us,
23 * not some alias that contains the same information.
24 */
25typedef struct { volatile int counter; } atomic_t;
26
27#define ATOMIC_INIT(i)	{ (i) }
28
29/**
30 * atomic_read - read atomic variable
31 * @v: pointer of type atomic_t
32 *
33 * Atomically reads the value of @v.
34 */
35#define atomic_read(v)	((v)->counter)
36
37/**
38 * atomic_set - set atomic variable
39 * @v: pointer of type atomic_t
40 * @i: required value
41 *
42 * Atomically sets the value of @v to @i.
43 */
44#define atomic_set(v,i)	(((v)->counter) = (i))
45
46/**
47 * atomic_add_return - add integer to atomic variable and return it
48 * @i: integer value to add
49 * @v: pointer of type atomic_t
50 *
51 * Atomically adds @i to @v and return (@i + @v).
52 */
53static __inline__ int atomic_add_return(int i, atomic_t *v)
54{
55	unsigned long flags;
56	int result;
57
58	local_irq_save(flags);
59	__asm__ __volatile__ (
60		"# atomic_add_return		\n\t"
61		DCACHE_CLEAR("%0", "r4", "%1")
62		M32R_LOCK" %0, @%1;		\n\t"
63		"add	%0, %2;			\n\t"
64		M32R_UNLOCK" %0, @%1;		\n\t"
65		: "=&r" (result)
66		: "r" (&v->counter), "r" (i)
67		: "memory"
68#ifdef CONFIG_CHIP_M32700_TS1
69		, "r4"
70#endif	/* CONFIG_CHIP_M32700_TS1 */
71	);
72	local_irq_restore(flags);
73
74	return result;
75}
76
77/**
78 * atomic_sub_return - subtract integer from atomic variable and return it
79 * @i: integer value to subtract
80 * @v: pointer of type atomic_t
81 *
82 * Atomically subtracts @i from @v and return (@v - @i).
83 */
84static __inline__ int atomic_sub_return(int i, atomic_t *v)
85{
86	unsigned long flags;
87	int result;
88
89	local_irq_save(flags);
90	__asm__ __volatile__ (
91		"# atomic_sub_return		\n\t"
92		DCACHE_CLEAR("%0", "r4", "%1")
93		M32R_LOCK" %0, @%1;		\n\t"
94		"sub	%0, %2;			\n\t"
95		M32R_UNLOCK" %0, @%1;		\n\t"
96		: "=&r" (result)
97		: "r" (&v->counter), "r" (i)
98		: "memory"
99#ifdef CONFIG_CHIP_M32700_TS1
100		, "r4"
101#endif	/* CONFIG_CHIP_M32700_TS1 */
102	);
103	local_irq_restore(flags);
104
105	return result;
106}
107
108/**
109 * atomic_add - add integer to atomic variable
110 * @i: integer value to add
111 * @v: pointer of type atomic_t
112 *
113 * Atomically adds @i to @v.
114 */
115#define atomic_add(i,v) ((void) atomic_add_return((i), (v)))
116
117/**
118 * atomic_sub - subtract the atomic variable
119 * @i: integer value to subtract
120 * @v: pointer of type atomic_t
121 *
122 * Atomically subtracts @i from @v.
123 */
124#define atomic_sub(i,v) ((void) atomic_sub_return((i), (v)))
125
126/**
127 * atomic_sub_and_test - subtract value from variable and test result
128 * @i: integer value to subtract
129 * @v: pointer of type atomic_t
130 *
131 * Atomically subtracts @i from @v and returns
132 * true if the result is zero, or false for all
133 * other cases.
134 */
135#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
136
137/**
138 * atomic_inc_return - increment atomic variable and return it
139 * @v: pointer of type atomic_t
140 *
141 * Atomically increments @v by 1 and returns the result.
142 */
143static __inline__ int atomic_inc_return(atomic_t *v)
144{
145	unsigned long flags;
146	int result;
147
148	local_irq_save(flags);
149	__asm__ __volatile__ (
150		"# atomic_inc_return		\n\t"
151		DCACHE_CLEAR("%0", "r4", "%1")
152		M32R_LOCK" %0, @%1;		\n\t"
153		"addi	%0, #1;			\n\t"
154		M32R_UNLOCK" %0, @%1;		\n\t"
155		: "=&r" (result)
156		: "r" (&v->counter)
157		: "memory"
158#ifdef CONFIG_CHIP_M32700_TS1
159		, "r4"
160#endif	/* CONFIG_CHIP_M32700_TS1 */
161	);
162	local_irq_restore(flags);
163
164	return result;
165}
166
167/**
168 * atomic_dec_return - decrement atomic variable and return it
169 * @v: pointer of type atomic_t
170 *
171 * Atomically decrements @v by 1 and returns the result.
172 */
173static __inline__ int atomic_dec_return(atomic_t *v)
174{
175	unsigned long flags;
176	int result;
177
178	local_irq_save(flags);
179	__asm__ __volatile__ (
180		"# atomic_dec_return		\n\t"
181		DCACHE_CLEAR("%0", "r4", "%1")
182		M32R_LOCK" %0, @%1;		\n\t"
183		"addi	%0, #-1;		\n\t"
184		M32R_UNLOCK" %0, @%1;		\n\t"
185		: "=&r" (result)
186		: "r" (&v->counter)
187		: "memory"
188#ifdef CONFIG_CHIP_M32700_TS1
189		, "r4"
190#endif	/* CONFIG_CHIP_M32700_TS1 */
191	);
192	local_irq_restore(flags);
193
194	return result;
195}
196
197/**
198 * atomic_inc - increment atomic variable
199 * @v: pointer of type atomic_t
200 *
201 * Atomically increments @v by 1.
202 */
203#define atomic_inc(v) ((void)atomic_inc_return(v))
204
205/**
206 * atomic_dec - decrement atomic variable
207 * @v: pointer of type atomic_t
208 *
209 * Atomically decrements @v by 1.
210 */
211#define atomic_dec(v) ((void)atomic_dec_return(v))
212
213/**
214 * atomic_inc_and_test - increment and test
215 * @v: pointer of type atomic_t
216 *
217 * Atomically increments @v by 1
218 * and returns true if the result is zero, or false for all
219 * other cases.
220 */
221#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
222
223/**
224 * atomic_dec_and_test - decrement and test
225 * @v: pointer of type atomic_t
226 *
227 * Atomically decrements @v by 1 and
228 * returns true if the result is 0, or false for all
229 * other cases.
230 */
231#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
232
233/**
234 * atomic_add_negative - add and test if negative
235 * @v: pointer of type atomic_t
236 * @i: integer value to add
237 *
238 * Atomically adds @i to @v and returns true
239 * if the result is negative, or false when
240 * result is greater than or equal to zero.
241 */
242#define atomic_add_negative(i,v) (atomic_add_return((i), (v)) < 0)
243
244#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
245#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
246
247/**
248 * atomic_add_unless - add unless the number is a given value
249 * @v: pointer of type atomic_t
250 * @a: the amount to add to v...
251 * @u: ...unless v is equal to u.
252 *
253 * Atomically adds @a to @v, so long as it was not @u.
254 * Returns non-zero if @v was not @u, and zero otherwise.
255 */
256static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
257{
258	int c, old;
259	c = atomic_read(v);
260	for (;;) {
261		if (unlikely(c == (u)))
262			break;
263		old = atomic_cmpxchg((v), c, c + (a));
264		if (likely(old == c))
265			break;
266		c = old;
267	}
268	return c != (u);
269}
270
271#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
272
273static __inline__ void atomic_clear_mask(unsigned long  mask, atomic_t *addr)
274{
275	unsigned long flags;
276	unsigned long tmp;
277
278	local_irq_save(flags);
279	__asm__ __volatile__ (
280		"# atomic_clear_mask		\n\t"
281		DCACHE_CLEAR("%0", "r5", "%1")
282		M32R_LOCK" %0, @%1;		\n\t"
283		"and	%0, %2;			\n\t"
284		M32R_UNLOCK" %0, @%1;		\n\t"
285		: "=&r" (tmp)
286		: "r" (addr), "r" (~mask)
287		: "memory"
288#ifdef CONFIG_CHIP_M32700_TS1
289		, "r5"
290#endif	/* CONFIG_CHIP_M32700_TS1 */
291	);
292	local_irq_restore(flags);
293}
294
295static __inline__ void atomic_set_mask(unsigned long  mask, atomic_t *addr)
296{
297	unsigned long flags;
298	unsigned long tmp;
299
300	local_irq_save(flags);
301	__asm__ __volatile__ (
302		"# atomic_set_mask		\n\t"
303		DCACHE_CLEAR("%0", "r5", "%1")
304		M32R_LOCK" %0, @%1;		\n\t"
305		"or	%0, %2;			\n\t"
306		M32R_UNLOCK" %0, @%1;		\n\t"
307		: "=&r" (tmp)
308		: "r" (addr), "r" (mask)
309		: "memory"
310#ifdef CONFIG_CHIP_M32700_TS1
311		, "r5"
312#endif	/* CONFIG_CHIP_M32700_TS1 */
313	);
314	local_irq_restore(flags);
315}
316
317/* Atomic operations are already serializing on m32r */
318#define smp_mb__before_atomic_dec()	barrier()
319#define smp_mb__after_atomic_dec()	barrier()
320#define smp_mb__before_atomic_inc()	barrier()
321#define smp_mb__after_atomic_inc()	barrier()
322
323#include <asm-generic/atomic.h>
324#endif	/* _ASM_M32R_ATOMIC_H */
325