1/*
2 * Atomic operations that C can't guarantee us.  Useful for
3 * resource counting etc..
4 *
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
7 *
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License.  See the file "COPYING" in the main directory of this archive
10 * for more details.
11 *
12 * Copyright (C) 1996, 1997, 1999, 2000 by Ralf Baechle
13 */
14#ifndef _ASM_ATOMIC_H
15#define _ASM_ATOMIC_H
16
17#include <asm/sgidefs.h>
18
19typedef struct { volatile int counter; } atomic_t;
20
21#ifdef __KERNEL__
22#define ATOMIC_INIT(i)    { (i) }
23
24/*
25 * atomic_read - read atomic variable
26 * @v: pointer of type atomic_t
27 *
28 * Atomically reads the value of @v.  Note that the guaranteed
29 * useful range of an atomic_t is only 24 bits.
30 */
31#define atomic_read(v)	((v)->counter)
32
33/*
34 * atomic_set - set atomic variable
35 * @v: pointer of type atomic_t
36 * @i: required value
37 *
38 * Atomically sets the value of @v to @i.  Note that the guaranteed
39 * useful range of an atomic_t is only 24 bits.
40 */
41#define atomic_set(v,i)	((v)->counter = (i))
42
43extern __inline__ void atomic_add(int i, volatile atomic_t * v)
44{
45	unsigned long temp;
46
47	__asm__ __volatile__(
48		"1:\tll\t%0,%1\t\t\t# atomic_add\n\t"
49		"addu\t%0,%2\n\t"
50		"sc\t%0,%1\n\t"
51		"beqz\t%0,1b"
52		: "=&r" (temp), "=m" (v->counter)
53		: "Ir" (i), "m" (v->counter));
54}
55
56/*
57 * atomic_sub - subtract the atomic variable
58 * @i: integer value to subtract
59 * @v: pointer of type atomic_t
60 *
61 * Atomically subtracts @i from @v.  Note that the guaranteed
62 * useful range of an atomic_t is only 24 bits.
63 */
64extern __inline__ void atomic_sub(int i, volatile atomic_t * v)
65{
66	unsigned long temp;
67
68	__asm__ __volatile__(
69		"1:\tll\t%0,%1\t\t\t# atomic_sub\n\t"
70		"subu\t%0,%2\n\t"
71		"sc\t%0,%1\n\t"
72		"beqz\t%0,1b"
73		: "=&r" (temp), "=m" (v->counter)
74		: "Ir" (i), "m" (v->counter));
75}
76
77/*
78 * Same as above, but return the result value
79 */
80extern __inline__ int atomic_add_return(int i, atomic_t * v)
81{
82	unsigned long temp, result;
83
84	__asm__ __volatile__(
85		".set\tnoreorder\t\t\t# atomic_add_return\n"
86		"1:\tll\t%1,%2\n\t"
87		"addu\t%0,%1,%3\n\t"
88		"sc\t%0,%2\n\t"
89		"beqz\t%0,1b\n\t"
90		"addu\t%0,%1,%3\n\t"
91		"sync\n\t"
92		".set\treorder"
93		: "=&r" (result), "=&r" (temp), "=m" (v->counter)
94		: "Ir" (i), "m" (v->counter)
95		: "memory");
96
97	return result;
98}
99
100extern __inline__ int atomic_sub_return(int i, atomic_t * v)
101{
102	unsigned long temp, result;
103
104	__asm__ __volatile__(
105		".set\tnoreorder\t\t\t# atomic_sub_return\n"
106		"1:\tll\t%1,%2\n\t"
107		"subu\t%0,%1,%3\n\t"
108		"sc\t%0,%2\n\t"
109		"beqz\t%0,1b\n\t"
110		"subu\t%0,%1,%3\n\t"
111		"sync\n\t"
112		".set\treorder"
113		: "=&r" (result), "=&r" (temp), "=m" (v->counter)
114		: "Ir" (i), "m" (v->counter)
115		: "memory");
116
117	return result;
118}
119
120#define atomic_dec_return(v) atomic_sub_return(1,(v))
121#define atomic_inc_return(v) atomic_add_return(1,(v))
122
123/*
124 * atomic_sub_and_test - subtract value from variable and test result
125 * @i: integer value to subtract
126 * @v: pointer of type atomic_t
127 *
128 * Atomically subtracts @i from @v and returns
129 * true if the result is zero, or false for all
130 * other cases.  Note that the guaranteed
131 * useful range of an atomic_t is only 24 bits.
132 */
133#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
134
135/*
136 * atomic_inc_and_test - increment and test
137 * @v: pointer of type atomic_t
138 *
139 * Atomically increments @v by 1
140 * and returns true if the result is zero, or false for all
141 * other cases.  Note that the guaranteed
142 * useful range of an atomic_t is only 24 bits.
143 */
144#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
145
146/*
147 * atomic_dec_and_test - decrement by 1 and test
148 * @v: pointer of type atomic_t
149 *
150 * Atomically decrements @v by 1 and
151 * returns true if the result is 0, or false for all other
152 * cases.  Note that the guaranteed
153 * useful range of an atomic_t is only 24 bits.
154 */
155#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
156
157/*
158 * atomic_inc - increment atomic variable
159 * @v: pointer of type atomic_t
160 *
161 * Atomically increments @v by 1.  Note that the guaranteed
162 * useful range of an atomic_t is only 24 bits.
163 */
164#define atomic_inc(v) atomic_add(1,(v))
165
166/*
167 * atomic_dec - decrement and test
168 * @v: pointer of type atomic_t
169 *
170 * Atomically decrements @v by 1.  Note that the guaranteed
171 * useful range of an atomic_t is only 24 bits.
172 */
173#define atomic_dec(v) atomic_sub(1,(v))
174
175/*
176 * atomic_add_negative - add and test if negative
177 * @v: pointer of type atomic_t
178 * @i: integer value to add
179 *
180 * Atomically adds @i to @v and returns true
181 * if the result is negative, or false when
182 * result is greater than or equal to zero.  Note that the guaranteed
183 * useful range of an atomic_t is only 24 bits.
184 */
185#define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
186
187/* Atomic operations are already serializing */
188#define smp_mb__before_atomic_dec()	smp_mb()
189#define smp_mb__after_atomic_dec()	smp_mb()
190#define smp_mb__before_atomic_inc()	smp_mb()
191#define smp_mb__after_atomic_inc()	smp_mb()
192
193#endif /* defined(__KERNEL__) */
194
195#endif /* _ASM_ATOMIC_H */
196