Deleted Added
full compact
atomic.h (105117) atomic.h (114349)
1/*-
2 * Copyright (c) 1998 Doug Rabson
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright

--- 9 unchanged lines hidden (view full) ---

18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24 * SUCH DAMAGE.
25 *
1/*-
2 * Copyright (c) 1998 Doug Rabson
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright

--- 9 unchanged lines hidden (view full) ---

18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24 * SUCH DAMAGE.
25 *
26 * $FreeBSD: head/sys/amd64/include/atomic.h 105117 2002-10-14 19:33:12Z pirzyk $
26 * $FreeBSD: head/sys/amd64/include/atomic.h 114349 2003-05-01 01:05:25Z peter $
27 */
28#ifndef _MACHINE_ATOMIC_H_
29#define _MACHINE_ATOMIC_H_
30
31/*
32 * Various simple arithmetic on memory which is atomic in the presence
33 * of interrupts and multiple processors.
34 *

--- 28 unchanged lines hidden (view full) ---

63 * Kernel modules call real functions which are built into the kernel.
64 * This allows kernel modules to be portable between UP and SMP systems.
65 */
66#if defined(KLD_MODULE)
67#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
68void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
69
70int atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src);
27 */
28#ifndef _MACHINE_ATOMIC_H_
29#define _MACHINE_ATOMIC_H_
30
31/*
32 * Various simple arithmetic on memory which is atomic in the presence
33 * of interrupts and multiple processors.
34 *

--- 28 unchanged lines hidden (view full) ---

63 * Kernel modules call real functions which are built into the kernel.
64 * This allows kernel modules to be portable between UP and SMP systems.
65 */
66#if defined(KLD_MODULE)
67#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
68void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
69
70int atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src);
71int atomic_cmpset_long(volatile u_long *dst, u_long exp, u_long src);
71
72#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
73u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p); \
74void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
75
76#else /* !KLD_MODULE */
77
78#ifdef __GNUC__
79
80/*
81 * For userland, assume the SMP case and use lock prefixes so that
82 * the binaries will run on both types of systems.
83 */
72
73#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
74u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p); \
75void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
76
77#else /* !KLD_MODULE */
78
79#ifdef __GNUC__
80
81/*
82 * For userland, assume the SMP case and use lock prefixes so that
83 * the binaries will run on both types of systems.
84 */
84#if defined(SMP) || !defined(_KERNEL)
85#if !defined(_KERNEL)
85#define MPLOCKED lock ;
86#else
87#define MPLOCKED
88#endif
89
90/*
91 * The assembly is volatilized to demark potential before-and-after side
92 * effects if an interrupt or SMP collision were to occur.

--- 19 unchanged lines hidden (view full) ---

112 *
113 * if (*dst == exp) *dst = src (all 32 bit words)
114 *
115 * Returns 0 on failure, non-zero on success
116 */
117
118#if defined(__GNUC__)
119
86#define MPLOCKED lock ;
87#else
88#define MPLOCKED
89#endif
90
91/*
92 * The assembly is volatilized to demark potential before-and-after side
93 * effects if an interrupt or SMP collision were to occur.

--- 19 unchanged lines hidden (view full) ---

113 *
114 * if (*dst == exp) *dst = src (all 32 bit words)
115 *
116 * Returns 0 on failure, non-zero on success
117 */
118
119#if defined(__GNUC__)
120
120#if defined(I386_CPU) || defined(CPU_DISABLE_CMPXCHG)
121
122static __inline int
123atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
124{
125 int res = exp;
126
121static __inline int
122atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
123{
124 int res = exp;
125
127 __asm __volatile(
128 " pushfl ; "
129 " cli ; "
130 " cmpl %0,%2 ; "
131 " jne 1f ; "
132 " movl %1,%2 ; "
133 "1: "
134 " sete %%al; "
126 __asm __volatile (
127 " " __XSTRING(MPLOCKED) " "
128 " cmpxchgl %1,%2 ; "
129 " setz %%al ; "
135 " movzbl %%al,%0 ; "
130 " movzbl %%al,%0 ; "
136 " popfl ; "
131 "1: "
137 "# atomic_cmpset_int"
138 : "+a" (res) /* 0 (result) */
139 : "r" (src), /* 1 */
140 "m" (*(dst)) /* 2 */
132 "# atomic_cmpset_int"
133 : "+a" (res) /* 0 (result) */
134 : "r" (src), /* 1 */
135 "m" (*(dst)) /* 2 */
141 : "memory");
136 : "memory");
142
143 return (res);
144}
145
137
138 return (res);
139}
140
146#else /* defined(I386_CPU) */
147
148static __inline int
141static __inline int
149atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
142atomic_cmpset_long(volatile u_long *dst, u_long exp, u_long src)
150{
143{
151 int res = exp;
144 long res = exp;
152
153 __asm __volatile (
154 " " __XSTRING(MPLOCKED) " "
145
146 __asm __volatile (
147 " " __XSTRING(MPLOCKED) " "
155 " cmpxchgl %1,%2 ; "
148 " cmpxchgq %1,%2 ; "
156 " setz %%al ; "
149 " setz %%al ; "
157 " movzbl %%al,%0 ; "
150 " movzbq %%al,%0 ; "
158 "1: "
151 "1: "
159 "# atomic_cmpset_int"
160 : "+a" (res) /* 0 (result) */
152 "# atomic_cmpset_long"
153 : "+a" (res) /* 0 (result) %rax, XXX check */
161 : "r" (src), /* 1 */
162 "m" (*(dst)) /* 2 */
163 : "memory");
164
165 return (res);
166}
154 : "r" (src), /* 1 */
155 "m" (*(dst)) /* 2 */
156 : "memory");
157
158 return (res);
159}
167
168#endif /* defined(I386_CPU) */
169
170#endif /* defined(__GNUC__) */
171
172#if defined(__GNUC__)
173
160#endif /* defined(__GNUC__) */
161
162#if defined(__GNUC__)
163
174#if defined(I386_CPU)
175
176/*
177 * We assume that a = b will do atomic loads and stores.
178 *
179 * XXX: This is _NOT_ safe on a P6 or higher because it does not guarantee
180 * memory ordering. These should only be used on a 386.
181 */
182#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
183static __inline u_##TYPE \
184atomic_load_acq_##TYPE(volatile u_##TYPE *p) \
185{ \
164#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
165static __inline u_##TYPE \
166atomic_load_acq_##TYPE(volatile u_##TYPE *p) \
167{ \
186 return (*p); \
187} \
188 \
189static __inline void \
190atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
191{ \
192 *p = v; \
193 __asm __volatile("" : : : "memory"); \
194}
195
196#else /* !defined(I386_CPU) */
197
198#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
199static __inline u_##TYPE \
200atomic_load_acq_##TYPE(volatile u_##TYPE *p) \
201{ \
202 u_##TYPE res; \
203 \
204 __asm __volatile(__XSTRING(MPLOCKED) LOP \
205 : "=a" (res), /* 0 (result) */\
206 "+m" (*p) /* 1 */ \
207 : : "memory"); \
208 \
209 return (res); \

--- 6 unchanged lines hidden (view full) ---

216atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
217{ \
218 __asm __volatile(SOP \
219 : "+m" (*p), /* 0 */ \
220 "+r" (v) /* 1 */ \
221 : : "memory"); \
222}
223
168 u_##TYPE res; \
169 \
170 __asm __volatile(__XSTRING(MPLOCKED) LOP \
171 : "=a" (res), /* 0 (result) */\
172 "+m" (*p) /* 1 */ \
173 : : "memory"); \
174 \
175 return (res); \

--- 6 unchanged lines hidden (view full) ---

182atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
183{ \
184 __asm __volatile(SOP \
185 : "+m" (*p), /* 0 */ \
186 "+r" (v) /* 1 */ \
187 : : "memory"); \
188}
189
224#endif /* defined(I386_CPU) */
225
226#else /* !defined(__GNUC__) */
227
228extern int atomic_cmpset_int(volatile u_int *, u_int, u_int);
190#else /* !defined(__GNUC__) */
191
192extern int atomic_cmpset_int(volatile u_int *, u_int, u_int);
193extern int atomic_cmpset_long(volatile u_long *, u_long, u_long);
229
230#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
231extern u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p); \
232extern void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
233
234#endif /* defined(__GNUC__) */
235
236#endif /* KLD_MODULE */

--- 8 unchanged lines hidden (view full) ---

245ATOMIC_ASM(add, short, "addw %w1,%0", "ir", v);
246ATOMIC_ASM(subtract, short, "subw %w1,%0", "ir", v);
247
248ATOMIC_ASM(set, int, "orl %1,%0", "ir", v);
249ATOMIC_ASM(clear, int, "andl %1,%0", "ir", ~v);
250ATOMIC_ASM(add, int, "addl %1,%0", "ir", v);
251ATOMIC_ASM(subtract, int, "subl %1,%0", "ir", v);
252
194
195#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
196extern u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p); \
197extern void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
198
199#endif /* defined(__GNUC__) */
200
201#endif /* KLD_MODULE */

--- 8 unchanged lines hidden (view full) ---

210ATOMIC_ASM(add, short, "addw %w1,%0", "ir", v);
211ATOMIC_ASM(subtract, short, "subw %w1,%0", "ir", v);
212
213ATOMIC_ASM(set, int, "orl %1,%0", "ir", v);
214ATOMIC_ASM(clear, int, "andl %1,%0", "ir", ~v);
215ATOMIC_ASM(add, int, "addl %1,%0", "ir", v);
216ATOMIC_ASM(subtract, int, "subl %1,%0", "ir", v);
217
253ATOMIC_ASM(set, long, "orl %1,%0", "ir", v);
254ATOMIC_ASM(clear, long, "andl %1,%0", "ir", ~v);
255ATOMIC_ASM(add, long, "addl %1,%0", "ir", v);
256ATOMIC_ASM(subtract, long, "subl %1,%0", "ir", v);
218ATOMIC_ASM(set, long, "orq %1,%0", "ir", v);
219ATOMIC_ASM(clear, long, "andq %1,%0", "ir", ~v);
220ATOMIC_ASM(add, long, "addq %1,%0", "ir", v);
221ATOMIC_ASM(subtract, long, "subq %1,%0", "ir", v);
257
258ATOMIC_STORE_LOAD(char, "cmpxchgb %b0,%1", "xchgb %b1,%0");
259ATOMIC_STORE_LOAD(short,"cmpxchgw %w0,%1", "xchgw %w1,%0");
260ATOMIC_STORE_LOAD(int, "cmpxchgl %0,%1", "xchgl %1,%0");
222
223ATOMIC_STORE_LOAD(char, "cmpxchgb %b0,%1", "xchgb %b1,%0");
224ATOMIC_STORE_LOAD(short,"cmpxchgw %w0,%1", "xchgw %w1,%0");
225ATOMIC_STORE_LOAD(int, "cmpxchgl %0,%1", "xchgl %1,%0");
261ATOMIC_STORE_LOAD(long, "cmpxchgl %0,%1", "xchgl %1,%0");
226ATOMIC_STORE_LOAD(long, "cmpxchgq %0,%1", "xchgq %1,%0");
262
263#undef ATOMIC_ASM
264#undef ATOMIC_STORE_LOAD
265
266#define atomic_set_acq_char atomic_set_char
267#define atomic_set_rel_char atomic_set_char
268#define atomic_clear_acq_char atomic_clear_char
269#define atomic_clear_rel_char atomic_clear_char

--- 25 unchanged lines hidden (view full) ---

295#define atomic_set_acq_long atomic_set_long
296#define atomic_set_rel_long atomic_set_long
297#define atomic_clear_acq_long atomic_clear_long
298#define atomic_clear_rel_long atomic_clear_long
299#define atomic_add_acq_long atomic_add_long
300#define atomic_add_rel_long atomic_add_long
301#define atomic_subtract_acq_long atomic_subtract_long
302#define atomic_subtract_rel_long atomic_subtract_long
227
228#undef ATOMIC_ASM
229#undef ATOMIC_STORE_LOAD
230
231#define atomic_set_acq_char atomic_set_char
232#define atomic_set_rel_char atomic_set_char
233#define atomic_clear_acq_char atomic_clear_char
234#define atomic_clear_rel_char atomic_clear_char

--- 25 unchanged lines hidden (view full) ---

260#define atomic_set_acq_long atomic_set_long
261#define atomic_set_rel_long atomic_set_long
262#define atomic_clear_acq_long atomic_clear_long
263#define atomic_clear_rel_long atomic_clear_long
264#define atomic_add_acq_long atomic_add_long
265#define atomic_add_rel_long atomic_add_long
266#define atomic_subtract_acq_long atomic_subtract_long
267#define atomic_subtract_rel_long atomic_subtract_long
303#define atomic_cmpset_long atomic_cmpset_int
304#define atomic_cmpset_acq_long atomic_cmpset_acq_int
305#define atomic_cmpset_rel_long atomic_cmpset_rel_int
306
307#define atomic_cmpset_acq_ptr atomic_cmpset_ptr
308#define atomic_cmpset_rel_ptr atomic_cmpset_ptr
309
310#define atomic_set_8 atomic_set_char
311#define atomic_set_acq_8 atomic_set_acq_char
312#define atomic_set_rel_8 atomic_set_rel_char
313#define atomic_clear_8 atomic_clear_char

--- 42 unchanged lines hidden (view full) ---

356#define atomic_cmpset_rel_32 atomic_cmpset_rel_int
357#define atomic_readandclear_32 atomic_readandclear_int
358
359#if !defined(WANT_FUNCTIONS)
360static __inline int
361atomic_cmpset_ptr(volatile void *dst, void *exp, void *src)
362{
363
268
269#define atomic_cmpset_acq_ptr atomic_cmpset_ptr
270#define atomic_cmpset_rel_ptr atomic_cmpset_ptr
271
272#define atomic_set_8 atomic_set_char
273#define atomic_set_acq_8 atomic_set_acq_char
274#define atomic_set_rel_8 atomic_set_rel_char
275#define atomic_clear_8 atomic_clear_char

--- 42 unchanged lines hidden (view full) ---

318#define atomic_cmpset_rel_32 atomic_cmpset_rel_int
319#define atomic_readandclear_32 atomic_readandclear_int
320
321#if !defined(WANT_FUNCTIONS)
322static __inline int
323atomic_cmpset_ptr(volatile void *dst, void *exp, void *src)
324{
325
364 return (atomic_cmpset_int((volatile u_int *)dst, (u_int)exp,
365 (u_int)src));
326 return (atomic_cmpset_long((volatile u_long *)dst,
327 (u_long)exp, (u_long)src));
366}
367
368static __inline void *
369atomic_load_acq_ptr(volatile void *p)
370{
328}
329
330static __inline void *
331atomic_load_acq_ptr(volatile void *p)
332{
371 return (void *)atomic_load_acq_int((volatile u_int *)p);
333 return (void *)atomic_load_acq_long((volatile u_long *)p);
372}
373
374static __inline void
375atomic_store_rel_ptr(volatile void *p, void *v)
376{
334}
335
336static __inline void
337atomic_store_rel_ptr(volatile void *p, void *v)
338{
377 atomic_store_rel_int((volatile u_int *)p, (u_int)v);
339 atomic_store_rel_long((volatile u_long *)p, (u_long)v);
378}
379
380#define ATOMIC_PTR(NAME) \
381static __inline void \
382atomic_##NAME##_ptr(volatile void *p, uintptr_t v) \
383{ \
340}
341
342#define ATOMIC_PTR(NAME) \
343static __inline void \
344atomic_##NAME##_ptr(volatile void *p, uintptr_t v) \
345{ \
384 atomic_##NAME##_int((volatile u_int *)p, v); \
346 atomic_##NAME##_long((volatile u_long *)p, v); \
385} \
386 \
387static __inline void \
388atomic_##NAME##_acq_ptr(volatile void *p, uintptr_t v) \
389{ \
347} \
348 \
349static __inline void \
350atomic_##NAME##_acq_ptr(volatile void *p, uintptr_t v) \
351{ \
390 atomic_##NAME##_acq_int((volatile u_int *)p, v);\
352 atomic_##NAME##_acq_long((volatile u_long *)p, v);\
391} \
392 \
393static __inline void \
394atomic_##NAME##_rel_ptr(volatile void *p, uintptr_t v) \
395{ \
353} \
354 \
355static __inline void \
356atomic_##NAME##_rel_ptr(volatile void *p, uintptr_t v) \
357{ \
396 atomic_##NAME##_rel_int((volatile u_int *)p, v);\
358 atomic_##NAME##_rel_long((volatile u_long *)p, v);\
397}
398
399ATOMIC_PTR(set)
400ATOMIC_PTR(clear)
401ATOMIC_PTR(add)
402ATOMIC_PTR(subtract)
403
404#undef ATOMIC_PTR

--- 16 unchanged lines hidden (view full) ---

421}
422
423static __inline u_long
424atomic_readandclear_long(volatile u_long *addr)
425{
426 u_long result;
427
428 __asm __volatile (
359}
360
361ATOMIC_PTR(set)
362ATOMIC_PTR(clear)
363ATOMIC_PTR(add)
364ATOMIC_PTR(subtract)
365
366#undef ATOMIC_PTR

--- 16 unchanged lines hidden (view full) ---

383}
384
385static __inline u_long
386atomic_readandclear_long(volatile u_long *addr)
387{
388 u_long result;
389
390 __asm __volatile (
429 " xorl %0,%0 ; "
430 " xchgl %1,%0 ; "
391 " xorq %0,%0 ; "
392 " xchgq %1,%0 ; "
431 "# atomic_readandclear_int"
432 : "=&r" (result) /* 0 (result) */
433 : "m" (*addr)); /* 1 (addr) */
434
435 return (result);
436}
437
438#else /* !defined(__GNUC__) */
439
440extern u_long atomic_readandclear_long(volatile u_long *);
441extern u_int atomic_readandclear_int(volatile u_int *);
442
443#endif /* defined(__GNUC__) */
444
445#endif /* !defined(WANT_FUNCTIONS) */
446#endif /* ! _MACHINE_ATOMIC_H_ */
393 "# atomic_readandclear_int"
394 : "=&r" (result) /* 0 (result) */
395 : "m" (*addr)); /* 1 (addr) */
396
397 return (result);
398}
399
400#else /* !defined(__GNUC__) */
401
402extern u_long atomic_readandclear_long(volatile u_long *);
403extern u_int atomic_readandclear_int(volatile u_int *);
404
405#endif /* defined(__GNUC__) */
406
407#endif /* !defined(WANT_FUNCTIONS) */
408#endif /* ! _MACHINE_ATOMIC_H_ */