Deleted Added
full compact
atomic.h (66695) atomic.h (67351)
1/*-
2 * Copyright (c) 1998 Doug Rabson
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright

--- 9 unchanged lines hidden (view full) ---

18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24 * SUCH DAMAGE.
25 *
1/*-
2 * Copyright (c) 1998 Doug Rabson
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright

--- 9 unchanged lines hidden (view full) ---

18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24 * SUCH DAMAGE.
25 *
26 * $FreeBSD: head/sys/i386/include/atomic.h 66695 2000-10-05 22:19:50Z jhb $
26 * $FreeBSD: head/sys/i386/include/atomic.h 67351 2000-10-20 07:00:48Z jhb $
27 */
28#ifndef _MACHINE_ATOMIC_H_
29#define _MACHINE_ATOMIC_H_
30
31/*
32 * Various simple arithmetic on memory which is atomic in the presence
33 * of interrupts and multiple processors.
34 *

--- 25 unchanged lines hidden (view full) ---

60 * kernel. Lock prefixes are generated if an SMP kernel is being
61 * built.
62 *
63 * Kernel modules call real functions which are built into the kernel.
64 * This allows kernel modules to be portable between UP and SMP systems.
65 */
66#if defined(KLD_MODULE)
67#define ATOMIC_ASM(NAME, TYPE, OP, V) \
27 */
28#ifndef _MACHINE_ATOMIC_H_
29#define _MACHINE_ATOMIC_H_
30
31/*
32 * Various simple arithmetic on memory which is atomic in the presence
33 * of interrupts and multiple processors.
34 *

--- 25 unchanged lines hidden (view full) ---

60 * kernel. Lock prefixes are generated if an SMP kernel is being
61 * built.
62 *
63 * Kernel modules call real functions which are built into the kernel.
64 * This allows kernel modules to be portable between UP and SMP systems.
65 */
66#if defined(KLD_MODULE)
67#define ATOMIC_ASM(NAME, TYPE, OP, V) \
68 void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v);
68void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v);
69
70int atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src);
71
72#else /* !KLD_MODULE */
73#if defined(SMP)
74#if defined(LOCORE)
75#define MPLOCKED lock ;
76#else

--- 69 unchanged lines hidden (view full) ---

146 "r" (src), /* 2 */
147 "m" (*(dst)) /* 3 */
148 : "memory");
149
150 return (res);
151}
152#endif /* defined(I386_CPU) */
153
69
70int atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src);
71
72#else /* !KLD_MODULE */
73#if defined(SMP)
74#if defined(LOCORE)
75#define MPLOCKED lock ;
76#else

--- 69 unchanged lines hidden (view full) ---

146 "r" (src), /* 2 */
147 "m" (*(dst)) /* 3 */
148 : "memory");
149
150 return (res);
151}
152#endif /* defined(I386_CPU) */
153
154#define atomic_cmpset_acq_int atomic_cmpset_int
155#define atomic_cmpset_rel_int atomic_cmpset_int
156
154#else
155/* gcc <= 2.8 version */
156#define ATOMIC_ASM(NAME, TYPE, OP, V) \
157static __inline void \
158atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
159{ \
160 __asm __volatile(MPLOCKED OP \
161 : "=m" (*p) \
162 : "ir" (V)); \
157#else
158/* gcc <= 2.8 version */
159#define ATOMIC_ASM(NAME, TYPE, OP, V) \
160static __inline void \
161atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
162{ \
163 __asm __volatile(MPLOCKED OP \
164 : "=m" (*p) \
165 : "ir" (V)); \
163}
166} \
167 \
168
164#endif
165#endif /* KLD_MODULE */
166
167#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ > 9)
168
169/* egcs 1.1.2+ version */
170ATOMIC_ASM(set, char, "orb %b2,%0", v)
171ATOMIC_ASM(clear, char, "andb %b2,%0", ~v)

--- 35 unchanged lines hidden (view full) ---

207
208ATOMIC_ASM(set, long, "orl %1,%0", v)
209ATOMIC_ASM(clear, long, "andl %1,%0", ~v)
210ATOMIC_ASM(add, long, "addl %1,%0", v)
211ATOMIC_ASM(subtract, long, "subl %1,%0", v)
212
213#endif
214
169#endif
170#endif /* KLD_MODULE */
171
172#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ > 9)
173
174/* egcs 1.1.2+ version */
175ATOMIC_ASM(set, char, "orb %b2,%0", v)
176ATOMIC_ASM(clear, char, "andb %b2,%0", ~v)

--- 35 unchanged lines hidden (view full) ---

212
213ATOMIC_ASM(set, long, "orl %1,%0", v)
214ATOMIC_ASM(clear, long, "andl %1,%0", ~v)
215ATOMIC_ASM(add, long, "addl %1,%0", v)
216ATOMIC_ASM(subtract, long, "subl %1,%0", v)
217
218#endif
219
220#undef ATOMIC_ASM
221
215#ifndef WANT_FUNCTIONS
222#ifndef WANT_FUNCTIONS
223#define ATOMIC_ACQ_REL(NAME, TYPE) \
224static __inline void \
225atomic_##NAME##_acq_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
226{ \
227 __asm __volatile("lock; addl $0,0(%esp)" : : : "memory");\
228 atomic_##NAME##_##TYPE(p, v); \
229} \
230 \
231static __inline void \
232atomic_##NAME##_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
233{ \
234 atomic_##NAME##_##TYPE(p, v); \
235}
236
237ATOMIC_ACQ_REL(set, char)
238ATOMIC_ACQ_REL(clear, char)
239ATOMIC_ACQ_REL(add, char)
240ATOMIC_ACQ_REL(subtract, char)
241ATOMIC_ACQ_REL(set, short)
242ATOMIC_ACQ_REL(clear, short)
243ATOMIC_ACQ_REL(add, short)
244ATOMIC_ACQ_REL(subtract, short)
245ATOMIC_ACQ_REL(set, int)
246ATOMIC_ACQ_REL(clear, int)
247ATOMIC_ACQ_REL(add, int)
248ATOMIC_ACQ_REL(subtract, int)
249ATOMIC_ACQ_REL(set, long)
250ATOMIC_ACQ_REL(clear, long)
251ATOMIC_ACQ_REL(add, long)
252ATOMIC_ACQ_REL(subtract, long)
253
254#undef ATOMIC_ACQ_REL
255
256/*
257 * We assume that a = b will do atomic loads and stores.
258 */
259#define ATOMIC_STORE_LOAD(TYPE) \
260static __inline u_##TYPE \
261atomic_load_acq_##TYPE(volatile u_##TYPE *p) \
262{ \
263 __asm __volatile("lock; addl $0,0(%esp)" : : : "memory");\
264 return (*p); \
265} \
266 \
267static __inline void \
268atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
269{ \
270 *p = v; \
271 __asm __volatile("" : : : "memory"); \
272}
273
274ATOMIC_STORE_LOAD(char)
275ATOMIC_STORE_LOAD(short)
276ATOMIC_STORE_LOAD(int)
277ATOMIC_STORE_LOAD(long)
278
279#undef ATOMIC_STORE_LOAD
280
216static __inline int
217atomic_cmpset_ptr(volatile void *dst, void *exp, void *src)
218{
219
220 return (
221 atomic_cmpset_int((volatile u_int *)dst, (u_int)exp, (u_int)src));
222}
223
281static __inline int
282atomic_cmpset_ptr(volatile void *dst, void *exp, void *src)
283{
284
285 return (
286 atomic_cmpset_int((volatile u_int *)dst, (u_int)exp, (u_int)src));
287}
288
289#define atomic_cmpset_acq_ptr atomic_cmpset_ptr
290#define atomic_cmpset_rel_ptr atomic_cmpset_ptr
291
292static __inline void *
293atomic_load_acq_ptr(volatile void *p)
294{
295 return (void *)atomic_load_acq_int((volatile u_int *)p);
296}
297
298static __inline void
299atomic_store_rel_ptr(volatile void *p, void *v)
300{
301 atomic_store_rel_int((volatile u_int *)p, (u_int)v);
302}
303
304#define ATOMIC_PTR(NAME) \
305static __inline void \
306atomic_##NAME##_ptr(volatile void *p, uintptr_t v) \
307{ \
308 atomic_##NAME##_int((volatile u_int *)p, v); \
309} \
310 \
311static __inline void \
312atomic_##NAME##_acq_ptr(volatile void *p, uintptr_t v) \
313{ \
314 atomic_##NAME##_acq_int((volatile u_int *)p, v);\
315} \
316 \
317static __inline void \
318atomic_##NAME##_rel_ptr(volatile void *p, uintptr_t v) \
319{ \
320 atomic_##NAME##_rel_int((volatile u_int *)p, v);\
321}
322
323ATOMIC_PTR(set)
324ATOMIC_PTR(clear)
325ATOMIC_PTR(add)
326ATOMIC_PTR(subtract)
327
328#undef ATOMIC_PTR
329
224static __inline u_int
225atomic_readandclear_int(volatile u_int *addr)
226{
227 u_int result;
228
229 __asm __volatile (
230 " xorl %0,%0 ; "
231 " xchgl %1,%0 ; "

--- 24 unchanged lines hidden ---
330static __inline u_int
331atomic_readandclear_int(volatile u_int *addr)
332{
333 u_int result;
334
335 __asm __volatile (
336 " xorl %0,%0 ; "
337 " xchgl %1,%0 ; "

--- 24 unchanged lines hidden ---