1/*
2 * Atomic operations that C can't guarantee us.  Useful for
3 * resource counting etc..
4 *
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
7 *
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License.  See the file "COPYING" in the main directory of this archive
10 * for more details.
11 *
12 * Copyright (C) 1996, 1997, 2000 by Ralf Baechle
13 */
14#ifndef __ASM_ATOMIC_H
15#define __ASM_ATOMIC_H
16
17#include <linux/config.h>
18
19typedef struct { volatile int counter; } atomic_t;
20
21#ifdef __KERNEL__
22#define ATOMIC_INIT(i)    { (i) }
23
24/*
25 * atomic_read - read atomic variable
26 * @v: pointer of type atomic_t
27 *
28 * Atomically reads the value of @v.  Note that the guaranteed
29 * useful range of an atomic_t is only 24 bits.
30 */
31#define atomic_read(v)	((v)->counter)
32
33/*
34 * atomic_set - set atomic variable
35 * @v: pointer of type atomic_t
36 * @i: required value
37 *
38 * Atomically sets the value of @v to @i.  Note that the guaranteed
39 * useful range of an atomic_t is only 24 bits.
40 */
41#define atomic_set(v,i)	((v)->counter = (i))
42
43#ifndef CONFIG_CPU_HAS_LLSC
44
45#include <asm/system.h>
46
47/*
48 * The MIPS I implementation is only atomic with respect to
49 * interrupts.  R3000 based multiprocessor machines are rare anyway ...
50 *
51 * atomic_add - add integer to atomic variable
52 * @i: integer value to add
53 * @v: pointer of type atomic_t
54 *
55 * Atomically adds @i to @v.  Note that the guaranteed useful range
56 * of an atomic_t is only 24 bits.
57 */
58extern __inline__ void atomic_add(int i, atomic_t * v)
59{
60	unsigned long flags;
61
62	local_irq_save(flags);
63	v->counter += i;
64	local_irq_restore(flags);
65}
66
67/*
68 * atomic_sub - subtract the atomic variable
69 * @i: integer value to subtract
70 * @v: pointer of type atomic_t
71 *
72 * Atomically subtracts @i from @v.  Note that the guaranteed
73 * useful range of an atomic_t is only 24 bits.
74 */
75extern __inline__ void atomic_sub(int i, atomic_t * v)
76{
77	unsigned long flags;
78
79	local_irq_save(flags);
80	v->counter -= i;
81	local_irq_restore(flags);
82}
83
84extern __inline__ int atomic_add_return(int i, atomic_t * v)
85{
86	unsigned long flags;
87	int temp;
88
89	local_irq_save(flags);
90	temp = v->counter;
91	temp += i;
92	v->counter = temp;
93	local_irq_restore(flags);
94
95	return temp;
96}
97
98extern __inline__ int atomic_sub_return(int i, atomic_t * v)
99{
100	unsigned long flags;
101	int temp;
102
103	local_irq_save(flags);
104	temp = v->counter;
105	temp -= i;
106	v->counter = temp;
107	local_irq_restore(flags);
108
109	return temp;
110}
111
112#else
113
114/*
115 * ... while for MIPS II and better we can use ll/sc instruction.  This
116 * implementation is SMP safe ...
117 */
118
119/*
120 * atomic_add - add integer to atomic variable
121 * @i: integer value to add
122 * @v: pointer of type atomic_t
123 *
124 * Atomically adds @i to @v.  Note that the guaranteed useful range
125 * of an atomic_t is only 24 bits.
126 */
127extern __inline__ void atomic_add(int i, atomic_t * v)
128{
129	unsigned long temp;
130
131	__asm__ __volatile__(
132		"1:   ll      %0, %1      # atomic_add\n"
133		"     addu    %0, %2                  \n"
134		"     sc      %0, %1                  \n"
135		"     beqz    %0, 1b                  \n"
136		: "=&r" (temp), "=m" (v->counter)
137		: "Ir" (i), "m" (v->counter));
138}
139
140/*
141 * atomic_sub - subtract the atomic variable
142 * @i: integer value to subtract
143 * @v: pointer of type atomic_t
144 *
145 * Atomically subtracts @i from @v.  Note that the guaranteed
146 * useful range of an atomic_t is only 24 bits.
147 */
148extern __inline__ void atomic_sub(int i, atomic_t * v)
149{
150	unsigned long temp;
151
152	__asm__ __volatile__(
153		"1:   ll      %0, %1      # atomic_sub\n"
154		"     subu    %0, %2                  \n"
155		"     sc      %0, %1                  \n"
156		"     beqz    %0, 1b                  \n"
157		: "=&r" (temp), "=m" (v->counter)
158		: "Ir" (i), "m" (v->counter));
159}
160
161/*
162 * Same as above, but return the result value
163 */
164extern __inline__ int atomic_add_return(int i, atomic_t * v)
165{
166	unsigned long temp, result;
167
168	__asm__ __volatile__(
169		".set push               # atomic_add_return\n"
170		".set noreorder                             \n"
171		"1:   ll      %1, %2                        \n"
172		"     addu    %0, %1, %3                    \n"
173		"     sc      %0, %2                        \n"
174		"     beqz    %0, 1b                        \n"
175		"     addu    %0, %1, %3                    \n"
176		"     sync                                  \n"
177		".set pop                                   \n"
178		: "=&r" (result), "=&r" (temp), "=m" (v->counter)
179		: "Ir" (i), "m" (v->counter)
180		: "memory");
181
182	return result;
183}
184
185extern __inline__ int atomic_sub_return(int i, atomic_t * v)
186{
187	unsigned long temp, result;
188
189	__asm__ __volatile__(
190		".set push                                   \n"
191		".set noreorder           # atomic_sub_return\n"
192		"1:   ll    %1, %2                           \n"
193		"     subu  %0, %1, %3                       \n"
194		"     sc    %0, %2                           \n"
195		"     beqz  %0, 1b                           \n"
196		"     subu  %0, %1, %3                       \n"
197		"     sync                                   \n"
198		".set pop                                    \n"
199		: "=&r" (result), "=&r" (temp), "=m" (v->counter)
200		: "Ir" (i), "m" (v->counter)
201		: "memory");
202
203	return result;
204}
205#endif
206
207#define atomic_dec_return(v) atomic_sub_return(1,(v))
208#define atomic_inc_return(v) atomic_add_return(1,(v))
209
210/*
211 * atomic_sub_and_test - subtract value from variable and test result
212 * @i: integer value to subtract
213 * @v: pointer of type atomic_t
214 *
215 * Atomically subtracts @i from @v and returns
216 * true if the result is zero, or false for all
217 * other cases.  Note that the guaranteed
218 * useful range of an atomic_t is only 24 bits.
219 */
220#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
221
222/*
223 * atomic_inc_and_test - increment and test
224 * @v: pointer of type atomic_t
225 *
226 * Atomically increments @v by 1
227 * and returns true if the result is zero, or false for all
228 * other cases.  Note that the guaranteed
229 * useful range of an atomic_t is only 24 bits.
230 */
231#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
232
233/*
234 * atomic_dec_and_test - decrement by 1 and test
235 * @v: pointer of type atomic_t
236 *
237 * Atomically decrements @v by 1 and
238 * returns true if the result is 0, or false for all other
239 * cases.  Note that the guaranteed
240 * useful range of an atomic_t is only 24 bits.
241 */
242#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
243
244/*
245 * atomic_inc - increment atomic variable
246 * @v: pointer of type atomic_t
247 *
248 * Atomically increments @v by 1.  Note that the guaranteed
249 * useful range of an atomic_t is only 24 bits.
250 */
251#define atomic_inc(v) atomic_add(1,(v))
252
253/*
254 * atomic_dec - decrement and test
255 * @v: pointer of type atomic_t
256 *
257 * Atomically decrements @v by 1.  Note that the guaranteed
258 * useful range of an atomic_t is only 24 bits.
259 */
260#define atomic_dec(v) atomic_sub(1,(v))
261
262/*
263 * atomic_add_negative - add and test if negative
264 * @v: pointer of type atomic_t
265 * @i: integer value to add
266 *
267 * Atomically adds @i to @v and returns true
268 * if the result is negative, or false when
269 * result is greater than or equal to zero.  Note that the guaranteed
270 * useful range of an atomic_t is only 24 bits.
271 */
272#define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
273
274/* Atomic operations are already serializing */
275#define smp_mb__before_atomic_dec()	smp_mb()
276#define smp_mb__after_atomic_dec()	smp_mb()
277#define smp_mb__before_atomic_inc()	smp_mb()
278#define smp_mb__after_atomic_inc()	smp_mb()
279
280#endif /* defined(__KERNEL__) */
281
282#endif /* __ASM_ATOMIC_H */
283