atomic.h revision 326514
138517Sdfr/*-
238517Sdfr * Copyright (c) 1998 Doug Rabson
338517Sdfr * All rights reserved.
438517Sdfr *
538517Sdfr * Redistribution and use in source and binary forms, with or without
638517Sdfr * modification, are permitted provided that the following conditions
738517Sdfr * are met:
838517Sdfr * 1. Redistributions of source code must retain the above copyright
938517Sdfr *    notice, this list of conditions and the following disclaimer.
1038517Sdfr * 2. Redistributions in binary form must reproduce the above copyright
1138517Sdfr *    notice, this list of conditions and the following disclaimer in the
1238517Sdfr *    documentation and/or other materials provided with the distribution.
1338517Sdfr *
1438517Sdfr * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
1538517Sdfr * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
1638517Sdfr * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
1738517Sdfr * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
1838517Sdfr * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
1938517Sdfr * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
2038517Sdfr * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
2138517Sdfr * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
2238517Sdfr * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
2338517Sdfr * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
2438517Sdfr * SUCH DAMAGE.
2538517Sdfr *
2650477Speter * $FreeBSD: stable/11/sys/i386/include/atomic.h 326514 2017-12-04 09:41:57Z hselasky $
2738517Sdfr */
2838517Sdfr#ifndef _MACHINE_ATOMIC_H_
29147855Sjhb#define	_MACHINE_ATOMIC_H_
3038517Sdfr
31143063Sjoerg#ifndef _SYS_CDEFS_H_
32143063Sjoerg#error this file needs sys/cdefs.h as a prerequisite
33143063Sjoerg#endif
34143063Sjoerg
35254619Sjkim#ifdef _KERNEL
36254619Sjkim#include <machine/md_var.h>
37254619Sjkim#include <machine/specialreg.h>
38254619Sjkim#endif
39254619Sjkim
40286051Skib#ifndef __OFFSETOF_MONITORBUF
41286051Skib/*
42286051Skib * __OFFSETOF_MONITORBUF == __pcpu_offset(pc_monitorbuf).
43286051Skib *
44286051Skib * The open-coded number is used instead of the symbolic expression to
45286051Skib * avoid a dependency on sys/pcpu.h in machine/atomic.h consumers.
46286051Skib * An assertion in i386/vm_machdep.c ensures that the value is correct.
47286051Skib */
48286051Skib#define	__OFFSETOF_MONITORBUF	0x180
49185162Skmacy
50286051Skibstatic __inline void
51286051Skib__mbk(void)
52286051Skib{
53286051Skib
54286051Skib	__asm __volatile("lock; addl $0,%%fs:%0"
55286051Skib	    : "+m" (*(u_int *)__OFFSETOF_MONITORBUF) : : "memory", "cc");
56286051Skib}
57286051Skib
58286051Skibstatic __inline void
59286051Skib__mbu(void)
60286051Skib{
61286051Skib
62286051Skib	__asm __volatile("lock; addl $0,(%%esp)" : : : "memory", "cc");
63286051Skib}
64286051Skib#endif
65286051Skib
6638517Sdfr/*
67165635Sbde * Various simple operations on memory, each of which is atomic in the
68165635Sbde * presence of interrupts and multiple processors.
6938517Sdfr *
70165633Sbde * atomic_set_char(P, V)	(*(u_char *)(P) |= (V))
71165633Sbde * atomic_clear_char(P, V)	(*(u_char *)(P) &= ~(V))
72165633Sbde * atomic_add_char(P, V)	(*(u_char *)(P) += (V))
73165633Sbde * atomic_subtract_char(P, V)	(*(u_char *)(P) -= (V))
7448797Salc *
75165633Sbde * atomic_set_short(P, V)	(*(u_short *)(P) |= (V))
76165633Sbde * atomic_clear_short(P, V)	(*(u_short *)(P) &= ~(V))
77165633Sbde * atomic_add_short(P, V)	(*(u_short *)(P) += (V))
78165633Sbde * atomic_subtract_short(P, V)	(*(u_short *)(P) -= (V))
7948797Salc *
80165633Sbde * atomic_set_int(P, V)		(*(u_int *)(P) |= (V))
81165633Sbde * atomic_clear_int(P, V)	(*(u_int *)(P) &= ~(V))
82165633Sbde * atomic_add_int(P, V)		(*(u_int *)(P) += (V))
83165633Sbde * atomic_subtract_int(P, V)	(*(u_int *)(P) -= (V))
84254617Sjkim * atomic_swap_int(P, V)	(return (*(u_int *)(P)); *(u_int *)(P) = (V);)
85165635Sbde * atomic_readandclear_int(P)	(return (*(u_int *)(P)); *(u_int *)(P) = 0;)
8648797Salc *
87165633Sbde * atomic_set_long(P, V)	(*(u_long *)(P) |= (V))
88165633Sbde * atomic_clear_long(P, V)	(*(u_long *)(P) &= ~(V))
89165633Sbde * atomic_add_long(P, V)	(*(u_long *)(P) += (V))
90165633Sbde * atomic_subtract_long(P, V)	(*(u_long *)(P) -= (V))
91254617Sjkim * atomic_swap_long(P, V)	(return (*(u_long *)(P)); *(u_long *)(P) = (V);)
92165635Sbde * atomic_readandclear_long(P)	(return (*(u_long *)(P)); *(u_long *)(P) = 0;)
9338517Sdfr */
9438517Sdfr
9548797Salc/*
9649999Salc * The above functions are expanded inline in the statically-linked
9749999Salc * kernel.  Lock prefixes are generated if an SMP kernel is being
9849999Salc * built.
9949999Salc *
10049999Salc * Kernel modules call real functions which are built into the kernel.
10149999Salc * This allows kernel modules to be portable between UP and SMP systems.
10248797Salc */
103147855Sjhb#if defined(KLD_MODULE) || !defined(__GNUCLIKE_ASM)
104147855Sjhb#define	ATOMIC_ASM(NAME, TYPE, OP, CONS, V)			\
105197803Sattiliovoid atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v);	\
106197803Sattiliovoid atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
10749999Salc
108208332Sphkint	atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src);
109315371Smjgint	atomic_fcmpset_int(volatile u_int *dst, u_int *expect, u_int src);
110165633Sbdeu_int	atomic_fetchadd_int(volatile u_int *p, u_int v);
111254617Sjkimint	atomic_testandset_int(volatile u_int *p, u_int v);
112299912Ssepheint	atomic_testandclear_int(volatile u_int *p, u_int v);
113285283Skibvoid	atomic_thread_fence_acq(void);
114285283Skibvoid	atomic_thread_fence_acq_rel(void);
115285283Skibvoid	atomic_thread_fence_rel(void);
116285283Skibvoid	atomic_thread_fence_seq_cst(void);
11765514Sphk
118284901Skib#define	ATOMIC_LOAD(TYPE)					\
119236456Skibu_##TYPE	atomic_load_acq_##TYPE(volatile u_##TYPE *p)
120236456Skib#define	ATOMIC_STORE(TYPE)					\
121100251Smarkmvoid		atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
12271085Sjhb
123254620Sjkimint		atomic_cmpset_64(volatile uint64_t *, uint64_t, uint64_t);
124254619Sjkimuint64_t	atomic_load_acq_64(volatile uint64_t *);
125254619Sjkimvoid		atomic_store_rel_64(volatile uint64_t *, uint64_t);
126254620Sjkimuint64_t	atomic_swap_64(volatile uint64_t *, uint64_t);
127326514Shselaskyuint64_t	atomic_fetchadd_64(volatile uint64_t *, uint64_t);
128254619Sjkim
129147855Sjhb#else /* !KLD_MODULE && __GNUCLIKE_ASM */
13072358Smarkm
13184679Sjhb/*
132165635Sbde * For userland, always use lock prefixes so that the binaries will run
133165635Sbde * on both SMP and !SMP systems.
13484679Sjhb */
13584679Sjhb#if defined(SMP) || !defined(_KERNEL)
136165630Sbde#define	MPLOCKED	"lock ; "
13790515Sbde#else
138147855Sjhb#define	MPLOCKED
13990515Sbde#endif
14038517Sdfr
14148797Salc/*
142197803Sattilio * The assembly is volatilized to avoid code chunk removal by the compiler.
143197803Sattilio * GCC aggressively reorders operations and memory clobbering is necessary
144197803Sattilio * in order to avoid that for memory barriers.
14548797Salc */
146147855Sjhb#define	ATOMIC_ASM(NAME, TYPE, OP, CONS, V)		\
14748797Salcstatic __inline void					\
14849043Salcatomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
14948797Salc{							\
150165630Sbde	__asm __volatile(MPLOCKED OP			\
151254612Sjkim	: "+m" (*p)					\
152254612Sjkim	: CONS (V)					\
153216524Skib	: "cc");					\
154122827Sbde}							\
155197803Sattilio							\
156197803Sattiliostatic __inline void					\
157197803Sattilioatomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
158197803Sattilio{							\
159197803Sattilio	__asm __volatile(MPLOCKED OP			\
160254612Sjkim	: "+m" (*p)					\
161254612Sjkim	: CONS (V)					\
162216524Skib	: "memory", "cc");				\
163197803Sattilio}							\
164122827Sbdestruct __hack
165100327Smarkm
16665514Sphk/*
16765514Sphk * Atomic compare and set, used by the mutex functions
16865514Sphk *
169208332Sphk * if (*dst == expect) *dst = src (all 32 bit words)
17065514Sphk *
17165514Sphk * Returns 0 on failure, non-zero on success
17265514Sphk */
17365514Sphk
174197910Sattiliostatic __inline int
175208332Sphkatomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src)
176197910Sattilio{
177197910Sattilio	u_char res;
17865514Sphk
179197910Sattilio	__asm __volatile(
180197910Sattilio	"	" MPLOCKED "		"
181254614Sjkim	"	cmpxchgl %3,%1 ;	"
182197910Sattilio	"       sete	%0 ;		"
183197910Sattilio	"# atomic_cmpset_int"
184254614Sjkim	: "=q" (res),			/* 0 */
185254614Sjkim	  "+m" (*dst),			/* 1 */
186254614Sjkim	  "+a" (expect)			/* 2 */
187254614Sjkim	: "r" (src)			/* 3 */
188216524Skib	: "memory", "cc");
189197910Sattilio	return (res);
190197910Sattilio}
191197910Sattilio
192315371Smjgstatic __inline int
193315371Smjgatomic_fcmpset_int(volatile u_int *dst, u_int *expect, u_int src)
194315371Smjg{
195315371Smjg	u_char res;
196315371Smjg
197315371Smjg	__asm __volatile(
198315371Smjg	"	" MPLOCKED "		"
199315371Smjg	"	cmpxchgl %3,%1 ;	"
200315371Smjg	"       sete	%0 ;		"
201315371Smjg	"# atomic_cmpset_int"
202315371Smjg	: "=q" (res),			/* 0 */
203315371Smjg	  "+m" (*dst),			/* 1 */
204315371Smjg	  "+a" (*expect)		/* 2 */
205315371Smjg	: "r" (src)			/* 3 */
206315371Smjg	: "memory", "cc");
207315371Smjg	return (res);
208315371Smjg}
209315371Smjg
210150627Sjhb/*
211150627Sjhb * Atomically add the value of v to the integer pointed to by p and return
212150627Sjhb * the previous value of *p.
213150627Sjhb */
214150627Sjhbstatic __inline u_int
215150627Sjhbatomic_fetchadd_int(volatile u_int *p, u_int v)
216150627Sjhb{
217150627Sjhb
218165633Sbde	__asm __volatile(
219165630Sbde	"	" MPLOCKED "		"
220254610Sjkim	"	xaddl	%0,%1 ;		"
221150627Sjhb	"# atomic_fetchadd_int"
222254610Sjkim	: "+r" (v),			/* 0 */
223254612Sjkim	  "+m" (*p)			/* 1 */
224254612Sjkim	: : "cc");
225150627Sjhb	return (v);
226150627Sjhb}
227150627Sjhb
228254617Sjkimstatic __inline int
229254617Sjkimatomic_testandset_int(volatile u_int *p, u_int v)
230254617Sjkim{
231254617Sjkim	u_char res;
232254617Sjkim
233254617Sjkim	__asm __volatile(
234254617Sjkim	"	" MPLOCKED "		"
235254617Sjkim	"	btsl	%2,%1 ;		"
236254617Sjkim	"	setc	%0 ;		"
237254617Sjkim	"# atomic_testandset_int"
238254617Sjkim	: "=q" (res),			/* 0 */
239254617Sjkim	  "+m" (*p)			/* 1 */
240254617Sjkim	: "Ir" (v & 0x1f)		/* 2 */
241254617Sjkim	: "cc");
242254617Sjkim	return (res);
243254617Sjkim}
244254617Sjkim
245299912Ssephestatic __inline int
246299912Ssepheatomic_testandclear_int(volatile u_int *p, u_int v)
247299912Ssephe{
248299912Ssephe	u_char res;
249299912Ssephe
250299912Ssephe	__asm __volatile(
251299912Ssephe	"	" MPLOCKED "		"
252299912Ssephe	"	btrl	%2,%1 ;		"
253299912Ssephe	"	setc	%0 ;		"
254299912Ssephe	"# atomic_testandclear_int"
255299912Ssephe	: "=q" (res),			/* 0 */
256299912Ssephe	  "+m" (*p)			/* 1 */
257299912Ssephe	: "Ir" (v & 0x1f)		/* 2 */
258299912Ssephe	: "cc");
259299912Ssephe	return (res);
260299912Ssephe}
261299912Ssephe
262236456Skib/*
263236456Skib * We assume that a = b will do atomic loads and stores.  Due to the
264236456Skib * IA32 memory model, a simple store guarantees release semantics.
265236456Skib *
266284901Skib * However, a load may pass a store if they are performed on distinct
267286050Skib * addresses, so we need Store/Load barrier for sequentially
268286050Skib * consistent fences in SMP kernels.  We use "lock addl $0,mem" for a
269286050Skib * Store/Load barrier, as recommended by the AMD Software Optimization
270286050Skib * Guide, and not mfence.  In the kernel, we use a private per-cpu
271286078Skib * cache line for "mem", to avoid introducing false data
272286078Skib * dependencies.  In user space, we use the word at the top of the
273286078Skib * stack.
274284901Skib *
275284901Skib * For UP kernels, however, the memory of the single processor is
276284901Skib * always consistent, so we only need to stop the compiler from
277284901Skib * reordering accesses in a way that violates the semantics of acquire
278284901Skib * and release.
279236456Skib */
280286051Skib
281284901Skib#if defined(_KERNEL)
282284901Skib#if defined(SMP)
283286051Skib#define	__storeload_barrier()	__mbk()
284284901Skib#else /* _KERNEL && UP */
285286051Skib#define	__storeload_barrier()	__compiler_membar()
286284901Skib#endif /* SMP */
287284901Skib#else /* !_KERNEL */
288286051Skib#define	__storeload_barrier()	__mbu()
289284901Skib#endif /* _KERNEL*/
290284901Skib
291284901Skib#define	ATOMIC_LOAD(TYPE)					\
292284901Skibstatic __inline u_##TYPE					\
293284901Skibatomic_load_acq_##TYPE(volatile u_##TYPE *p)			\
294284901Skib{								\
295284901Skib	u_##TYPE res;						\
296284901Skib								\
297284901Skib	res = *p;						\
298284901Skib	__compiler_membar();					\
299284901Skib	return (res);						\
300284901Skib}								\
301122827Sbdestruct __hack
302100327Smarkm
303284901Skib#define	ATOMIC_STORE(TYPE)					\
304284901Skibstatic __inline void						\
305284901Skibatomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)	\
306284901Skib{								\
307284901Skib								\
308284901Skib	__compiler_membar();					\
309284901Skib	*p = v;							\
310284901Skib}								\
311284901Skibstruct __hack
312100327Smarkm
313285283Skibstatic __inline void
314285283Skibatomic_thread_fence_acq(void)
315285283Skib{
316285283Skib
317285283Skib	__compiler_membar();
318285283Skib}
319285283Skib
320285283Skibstatic __inline void
321285283Skibatomic_thread_fence_rel(void)
322285283Skib{
323285283Skib
324285283Skib	__compiler_membar();
325285283Skib}
326285283Skib
327285283Skibstatic __inline void
328285283Skibatomic_thread_fence_acq_rel(void)
329285283Skib{
330285283Skib
331285283Skib	__compiler_membar();
332285283Skib}
333285283Skib
334285283Skibstatic __inline void
335285283Skibatomic_thread_fence_seq_cst(void)
336285283Skib{
337285283Skib
338285283Skib	__storeload_barrier();
339285283Skib}
340285283Skib
341254619Sjkim#ifdef _KERNEL
342254619Sjkim
343254619Sjkim#ifdef WANT_FUNCTIONS
344254620Sjkimint		atomic_cmpset_64_i386(volatile uint64_t *, uint64_t, uint64_t);
345254620Sjkimint		atomic_cmpset_64_i586(volatile uint64_t *, uint64_t, uint64_t);
346254619Sjkimuint64_t	atomic_load_acq_64_i386(volatile uint64_t *);
347254619Sjkimuint64_t	atomic_load_acq_64_i586(volatile uint64_t *);
348254619Sjkimvoid		atomic_store_rel_64_i386(volatile uint64_t *, uint64_t);
349254619Sjkimvoid		atomic_store_rel_64_i586(volatile uint64_t *, uint64_t);
350254620Sjkimuint64_t	atomic_swap_64_i386(volatile uint64_t *, uint64_t);
351254620Sjkimuint64_t	atomic_swap_64_i586(volatile uint64_t *, uint64_t);
352254619Sjkim#endif
353254619Sjkim
354254619Sjkim/* I486 does not support SMP or CMPXCHG8B. */
355254620Sjkimstatic __inline int
356254620Sjkimatomic_cmpset_64_i386(volatile uint64_t *dst, uint64_t expect, uint64_t src)
357254620Sjkim{
358254620Sjkim	volatile uint32_t *p;
359254620Sjkim	u_char res;
360254620Sjkim
361254620Sjkim	p = (volatile uint32_t *)dst;
362254620Sjkim	__asm __volatile(
363254620Sjkim	"	pushfl ;		"
364254620Sjkim	"	cli ;			"
365254620Sjkim	"	xorl	%1,%%eax ;	"
366254620Sjkim	"	xorl	%2,%%edx ;	"
367254620Sjkim	"	orl	%%edx,%%eax ;	"
368254620Sjkim	"	jne	1f ;		"
369254620Sjkim	"	movl	%4,%1 ;		"
370254620Sjkim	"	movl	%5,%2 ;		"
371254620Sjkim	"1:				"
372254620Sjkim	"	sete	%3 ;		"
373254620Sjkim	"	popfl"
374254620Sjkim	: "+A" (expect),		/* 0 */
375254620Sjkim	  "+m" (*p),			/* 1 */
376254620Sjkim	  "+m" (*(p + 1)),		/* 2 */
377254620Sjkim	  "=q" (res)			/* 3 */
378254620Sjkim	: "r" ((uint32_t)src),		/* 4 */
379254620Sjkim	  "r" ((uint32_t)(src >> 32))	/* 5 */
380254620Sjkim	: "memory", "cc");
381254620Sjkim	return (res);
382254620Sjkim}
383254620Sjkim
384254619Sjkimstatic __inline uint64_t
385254619Sjkimatomic_load_acq_64_i386(volatile uint64_t *p)
386254619Sjkim{
387254619Sjkim	volatile uint32_t *q;
388254619Sjkim	uint64_t res;
389254619Sjkim
390254619Sjkim	q = (volatile uint32_t *)p;
391254619Sjkim	__asm __volatile(
392254619Sjkim	"	pushfl ;		"
393254619Sjkim	"	cli ;			"
394254619Sjkim	"	movl	%1,%%eax ;	"
395254619Sjkim	"	movl	%2,%%edx ;	"
396254619Sjkim	"	popfl"
397254619Sjkim	: "=&A" (res)			/* 0 */
398254619Sjkim	: "m" (*q),			/* 1 */
399254619Sjkim	  "m" (*(q + 1))		/* 2 */
400254619Sjkim	: "memory");
401254619Sjkim	return (res);
402254619Sjkim}
403254619Sjkim
404254619Sjkimstatic __inline void
405254619Sjkimatomic_store_rel_64_i386(volatile uint64_t *p, uint64_t v)
406254619Sjkim{
407254619Sjkim	volatile uint32_t *q;
408254619Sjkim
409254619Sjkim	q = (volatile uint32_t *)p;
410254619Sjkim	__asm __volatile(
411254619Sjkim	"	pushfl ;		"
412254619Sjkim	"	cli ;			"
413254619Sjkim	"	movl	%%eax,%0 ;	"
414254619Sjkim	"	movl	%%edx,%1 ;	"
415254619Sjkim	"	popfl"
416254619Sjkim	: "=m" (*q),			/* 0 */
417254619Sjkim	  "=m" (*(q + 1))		/* 1 */
418254619Sjkim	: "A" (v)			/* 2 */
419254619Sjkim	: "memory");
420254619Sjkim}
421254619Sjkim
422254619Sjkimstatic __inline uint64_t
423254620Sjkimatomic_swap_64_i386(volatile uint64_t *p, uint64_t v)
424254620Sjkim{
425254620Sjkim	volatile uint32_t *q;
426254620Sjkim	uint64_t res;
427254620Sjkim
428254620Sjkim	q = (volatile uint32_t *)p;
429254620Sjkim	__asm __volatile(
430254620Sjkim	"	pushfl ;		"
431254620Sjkim	"	cli ;			"
432254620Sjkim	"	movl	%1,%%eax ;	"
433254620Sjkim	"	movl	%2,%%edx ;	"
434254620Sjkim	"	movl	%4,%2 ;		"
435254620Sjkim	"	movl	%3,%1 ;		"
436254620Sjkim	"	popfl"
437254620Sjkim	: "=&A" (res),			/* 0 */
438254620Sjkim	  "+m" (*q),			/* 1 */
439254620Sjkim	  "+m" (*(q + 1))		/* 2 */
440254620Sjkim	: "r" ((uint32_t)v),		/* 3 */
441254620Sjkim	  "r" ((uint32_t)(v >> 32)));	/* 4 */
442254620Sjkim	return (res);
443254620Sjkim}
444254620Sjkim
445254620Sjkimstatic __inline int
446254620Sjkimatomic_cmpset_64_i586(volatile uint64_t *dst, uint64_t expect, uint64_t src)
447254620Sjkim{
448254620Sjkim	u_char res;
449254620Sjkim
450254620Sjkim	__asm __volatile(
451254620Sjkim	"	" MPLOCKED "		"
452254620Sjkim	"	cmpxchg8b %1 ;		"
453254620Sjkim	"	sete	%0"
454254620Sjkim	: "=q" (res),			/* 0 */
455254620Sjkim	  "+m" (*dst),			/* 1 */
456254620Sjkim	  "+A" (expect)			/* 2 */
457254620Sjkim	: "b" ((uint32_t)src),		/* 3 */
458254620Sjkim	  "c" ((uint32_t)(src >> 32))	/* 4 */
459254620Sjkim	: "memory", "cc");
460254620Sjkim	return (res);
461254620Sjkim}
462254620Sjkim
463254620Sjkimstatic __inline uint64_t
464254619Sjkimatomic_load_acq_64_i586(volatile uint64_t *p)
465254619Sjkim{
466254619Sjkim	uint64_t res;
467254619Sjkim
468254619Sjkim	__asm __volatile(
469254619Sjkim	"	movl	%%ebx,%%eax ;	"
470254619Sjkim	"	movl	%%ecx,%%edx ;	"
471254619Sjkim	"	" MPLOCKED "		"
472254619Sjkim	"	cmpxchg8b %1"
473254619Sjkim	: "=&A" (res),			/* 0 */
474254619Sjkim	  "+m" (*p)			/* 1 */
475254619Sjkim	: : "memory", "cc");
476254619Sjkim	return (res);
477254619Sjkim}
478254619Sjkim
479254619Sjkimstatic __inline void
480254619Sjkimatomic_store_rel_64_i586(volatile uint64_t *p, uint64_t v)
481254619Sjkim{
482254619Sjkim
483254619Sjkim	__asm __volatile(
484254619Sjkim	"	movl	%%eax,%%ebx ;	"
485254619Sjkim	"	movl	%%edx,%%ecx ;	"
486254619Sjkim	"1:				"
487254619Sjkim	"	" MPLOCKED "		"
488254619Sjkim	"	cmpxchg8b %0 ;		"
489254619Sjkim	"	jne	1b"
490254619Sjkim	: "+m" (*p),			/* 0 */
491254619Sjkim	  "+A" (v)			/* 1 */
492254619Sjkim	: : "ebx", "ecx", "memory", "cc");
493254619Sjkim}
494254619Sjkim
495254619Sjkimstatic __inline uint64_t
496254620Sjkimatomic_swap_64_i586(volatile uint64_t *p, uint64_t v)
497254620Sjkim{
498254620Sjkim
499254620Sjkim	__asm __volatile(
500254620Sjkim	"	movl	%%eax,%%ebx ;	"
501254620Sjkim	"	movl	%%edx,%%ecx ;	"
502254620Sjkim	"1:				"
503254620Sjkim	"	" MPLOCKED "		"
504254620Sjkim	"	cmpxchg8b %0 ;		"
505254620Sjkim	"	jne	1b"
506254620Sjkim	: "+m" (*p),			/* 0 */
507254620Sjkim	  "+A" (v)			/* 1 */
508254620Sjkim	: : "ebx", "ecx", "memory", "cc");
509254620Sjkim	return (v);
510254620Sjkim}
511254620Sjkim
512254620Sjkimstatic __inline int
513254620Sjkimatomic_cmpset_64(volatile uint64_t *dst, uint64_t expect, uint64_t src)
514254620Sjkim{
515254620Sjkim
516254620Sjkim	if ((cpu_feature & CPUID_CX8) == 0)
517254620Sjkim		return (atomic_cmpset_64_i386(dst, expect, src));
518254620Sjkim	else
519254620Sjkim		return (atomic_cmpset_64_i586(dst, expect, src));
520254620Sjkim}
521254620Sjkim
522254620Sjkimstatic __inline uint64_t
523254619Sjkimatomic_load_acq_64(volatile uint64_t *p)
524254619Sjkim{
525254619Sjkim
526254619Sjkim	if ((cpu_feature & CPUID_CX8) == 0)
527254619Sjkim		return (atomic_load_acq_64_i386(p));
528254619Sjkim	else
529254619Sjkim		return (atomic_load_acq_64_i586(p));
530254619Sjkim}
531254619Sjkim
532254619Sjkimstatic __inline void
533254619Sjkimatomic_store_rel_64(volatile uint64_t *p, uint64_t v)
534254619Sjkim{
535254619Sjkim
536254619Sjkim	if ((cpu_feature & CPUID_CX8) == 0)
537254619Sjkim		atomic_store_rel_64_i386(p, v);
538254619Sjkim	else
539254619Sjkim		atomic_store_rel_64_i586(p, v);
540254619Sjkim}
541254619Sjkim
542254620Sjkimstatic __inline uint64_t
543254620Sjkimatomic_swap_64(volatile uint64_t *p, uint64_t v)
544254620Sjkim{
545254620Sjkim
546254620Sjkim	if ((cpu_feature & CPUID_CX8) == 0)
547254620Sjkim		return (atomic_swap_64_i386(p, v));
548254620Sjkim	else
549254620Sjkim		return (atomic_swap_64_i586(p, v));
550254620Sjkim}
551254620Sjkim
552326514Shselaskystatic __inline uint64_t
553326514Shselaskyatomic_fetchadd_64(volatile uint64_t *p, uint64_t v)
554326514Shselasky{
555326514Shselasky
556326514Shselasky	for (;;) {
557326514Shselasky		uint64_t t = *p;
558326514Shselasky		if (atomic_cmpset_64(p, t, t + v))
559326514Shselasky			return (t);
560326514Shselasky	}
561326514Shselasky}
562326514Shselasky
563254619Sjkim#endif /* _KERNEL */
564254619Sjkim
565147855Sjhb#endif /* KLD_MODULE || !__GNUCLIKE_ASM */
566100251Smarkm
567100251SmarkmATOMIC_ASM(set,	     char,  "orb %b1,%0",  "iq",  v);
568100251SmarkmATOMIC_ASM(clear,    char,  "andb %b1,%0", "iq", ~v);
569100251SmarkmATOMIC_ASM(add,	     char,  "addb %b1,%0", "iq",  v);
570100251SmarkmATOMIC_ASM(subtract, char,  "subb %b1,%0", "iq",  v);
57171085Sjhb
572100251SmarkmATOMIC_ASM(set,	     short, "orw %w1,%0",  "ir",  v);
573100251SmarkmATOMIC_ASM(clear,    short, "andw %w1,%0", "ir", ~v);
574100251SmarkmATOMIC_ASM(add,	     short, "addw %w1,%0", "ir",  v);
575100251SmarkmATOMIC_ASM(subtract, short, "subw %w1,%0", "ir",  v);
57671085Sjhb
577100251SmarkmATOMIC_ASM(set,	     int,   "orl %1,%0",   "ir",  v);
578100251SmarkmATOMIC_ASM(clear,    int,   "andl %1,%0",  "ir", ~v);
579100251SmarkmATOMIC_ASM(add,	     int,   "addl %1,%0",  "ir",  v);
580100251SmarkmATOMIC_ASM(subtract, int,   "subl %1,%0",  "ir",  v);
58171085Sjhb
582100251SmarkmATOMIC_ASM(set,	     long,  "orl %1,%0",   "ir",  v);
583100251SmarkmATOMIC_ASM(clear,    long,  "andl %1,%0",  "ir", ~v);
584100251SmarkmATOMIC_ASM(add,	     long,  "addl %1,%0",  "ir",  v);
585100251SmarkmATOMIC_ASM(subtract, long,  "subl %1,%0",  "ir",  v);
58671085Sjhb
587284901Skib#define	ATOMIC_LOADSTORE(TYPE)				\
588284901Skib	ATOMIC_LOAD(TYPE);				\
589284901Skib	ATOMIC_STORE(TYPE)
59071023Sjhb
591284901SkibATOMIC_LOADSTORE(char);
592284901SkibATOMIC_LOADSTORE(short);
593284901SkibATOMIC_LOADSTORE(int);
594284901SkibATOMIC_LOADSTORE(long);
595236456Skib
59671085Sjhb#undef ATOMIC_ASM
597236456Skib#undef ATOMIC_LOAD
598236456Skib#undef ATOMIC_STORE
599284901Skib#undef ATOMIC_LOADSTORE
60067351Sjhb
601165635Sbde#ifndef WANT_FUNCTIONS
602147855Sjhb
603147855Sjhbstatic __inline int
604208332Sphkatomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src)
605147855Sjhb{
606147855Sjhb
607208332Sphk	return (atomic_cmpset_int((volatile u_int *)dst, (u_int)expect,
608147855Sjhb	    (u_int)src));
609147855Sjhb}
610147855Sjhb
611177276Spjdstatic __inline u_long
612177276Spjdatomic_fetchadd_long(volatile u_long *p, u_long v)
613177276Spjd{
614177276Spjd
615177276Spjd	return (atomic_fetchadd_int((volatile u_int *)p, (u_int)v));
616177276Spjd}
617177276Spjd
618254617Sjkimstatic __inline int
619254617Sjkimatomic_testandset_long(volatile u_long *p, u_int v)
620254617Sjkim{
621254617Sjkim
622254617Sjkim	return (atomic_testandset_int((volatile u_int *)p, v));
623254617Sjkim}
624254617Sjkim
625299912Ssephestatic __inline int
626299912Ssepheatomic_testandclear_long(volatile u_long *p, u_int v)
627299912Ssephe{
628299912Ssephe
629299912Ssephe	return (atomic_testandclear_int((volatile u_int *)p, v));
630299912Ssephe}
631299912Ssephe
632254617Sjkim/* Read the current value and store a new value in the destination. */
633147855Sjhb#ifdef __GNUCLIKE_ASM
634147855Sjhb
635147855Sjhbstatic __inline u_int
636254617Sjkimatomic_swap_int(volatile u_int *p, u_int v)
637147855Sjhb{
638147855Sjhb
639165633Sbde	__asm __volatile(
640147855Sjhb	"	xchgl	%1,%0 ;		"
641254617Sjkim	"# atomic_swap_int"
642254617Sjkim	: "+r" (v),			/* 0 */
643254612Sjkim	  "+m" (*p));			/* 1 */
644254617Sjkim	return (v);
645147855Sjhb}
646147855Sjhb
647147855Sjhbstatic __inline u_long
648254617Sjkimatomic_swap_long(volatile u_long *p, u_long v)
649147855Sjhb{
650147855Sjhb
651254617Sjkim	return (atomic_swap_int((volatile u_int *)p, (u_int)v));
652147855Sjhb}
653147855Sjhb
654147855Sjhb#else /* !__GNUCLIKE_ASM */
655147855Sjhb
656254617Sjkimu_int	atomic_swap_int(volatile u_int *p, u_int v);
657254617Sjkimu_long	atomic_swap_long(volatile u_long *p, u_long v);
658147855Sjhb
659147855Sjhb#endif /* __GNUCLIKE_ASM */
660147855Sjhb
661197803Sattilio#define	atomic_set_acq_char		atomic_set_barr_char
662197803Sattilio#define	atomic_set_rel_char		atomic_set_barr_char
663197803Sattilio#define	atomic_clear_acq_char		atomic_clear_barr_char
664197803Sattilio#define	atomic_clear_rel_char		atomic_clear_barr_char
665197803Sattilio#define	atomic_add_acq_char		atomic_add_barr_char
666197803Sattilio#define	atomic_add_rel_char		atomic_add_barr_char
667197803Sattilio#define	atomic_subtract_acq_char	atomic_subtract_barr_char
668197803Sattilio#define	atomic_subtract_rel_char	atomic_subtract_barr_char
66971085Sjhb
670197803Sattilio#define	atomic_set_acq_short		atomic_set_barr_short
671197803Sattilio#define	atomic_set_rel_short		atomic_set_barr_short
672197803Sattilio#define	atomic_clear_acq_short		atomic_clear_barr_short
673197803Sattilio#define	atomic_clear_rel_short		atomic_clear_barr_short
674197803Sattilio#define	atomic_add_acq_short		atomic_add_barr_short
675197803Sattilio#define	atomic_add_rel_short		atomic_add_barr_short
676197803Sattilio#define	atomic_subtract_acq_short	atomic_subtract_barr_short
677197803Sattilio#define	atomic_subtract_rel_short	atomic_subtract_barr_short
67871085Sjhb
679197803Sattilio#define	atomic_set_acq_int		atomic_set_barr_int
680197803Sattilio#define	atomic_set_rel_int		atomic_set_barr_int
681197803Sattilio#define	atomic_clear_acq_int		atomic_clear_barr_int
682197803Sattilio#define	atomic_clear_rel_int		atomic_clear_barr_int
683197803Sattilio#define	atomic_add_acq_int		atomic_add_barr_int
684197803Sattilio#define	atomic_add_rel_int		atomic_add_barr_int
685197803Sattilio#define	atomic_subtract_acq_int		atomic_subtract_barr_int
686197803Sattilio#define	atomic_subtract_rel_int		atomic_subtract_barr_int
687197910Sattilio#define	atomic_cmpset_acq_int		atomic_cmpset_int
688197910Sattilio#define	atomic_cmpset_rel_int		atomic_cmpset_int
689315371Smjg#define	atomic_fcmpset_acq_int		atomic_fcmpset_int
690315371Smjg#define	atomic_fcmpset_rel_int		atomic_fcmpset_int
69171085Sjhb
692197803Sattilio#define	atomic_set_acq_long		atomic_set_barr_long
693197803Sattilio#define	atomic_set_rel_long		atomic_set_barr_long
694197803Sattilio#define	atomic_clear_acq_long		atomic_clear_barr_long
695197803Sattilio#define	atomic_clear_rel_long		atomic_clear_barr_long
696197803Sattilio#define	atomic_add_acq_long		atomic_add_barr_long
697197803Sattilio#define	atomic_add_rel_long		atomic_add_barr_long
698197803Sattilio#define	atomic_subtract_acq_long	atomic_subtract_barr_long
699197803Sattilio#define	atomic_subtract_rel_long	atomic_subtract_barr_long
700197910Sattilio#define	atomic_cmpset_acq_long		atomic_cmpset_long
701197910Sattilio#define	atomic_cmpset_rel_long		atomic_cmpset_long
702315371Smjg#define	atomic_fcmpset_acq_long		atomic_fcmpset_long
703315371Smjg#define	atomic_fcmpset_rel_long		atomic_fcmpset_long
70471085Sjhb
705254617Sjkim#define	atomic_readandclear_int(p)	atomic_swap_int(p, 0)
706254617Sjkim#define	atomic_readandclear_long(p)	atomic_swap_long(p, 0)
707254617Sjkim
708147855Sjhb/* Operations on 8-bit bytes. */
70971085Sjhb#define	atomic_set_8		atomic_set_char
71071085Sjhb#define	atomic_set_acq_8	atomic_set_acq_char
71171085Sjhb#define	atomic_set_rel_8	atomic_set_rel_char
71271085Sjhb#define	atomic_clear_8		atomic_clear_char
71371085Sjhb#define	atomic_clear_acq_8	atomic_clear_acq_char
71471085Sjhb#define	atomic_clear_rel_8	atomic_clear_rel_char
71571085Sjhb#define	atomic_add_8		atomic_add_char
71671085Sjhb#define	atomic_add_acq_8	atomic_add_acq_char
71771085Sjhb#define	atomic_add_rel_8	atomic_add_rel_char
71871085Sjhb#define	atomic_subtract_8	atomic_subtract_char
71971085Sjhb#define	atomic_subtract_acq_8	atomic_subtract_acq_char
72071085Sjhb#define	atomic_subtract_rel_8	atomic_subtract_rel_char
72171085Sjhb#define	atomic_load_acq_8	atomic_load_acq_char
72271085Sjhb#define	atomic_store_rel_8	atomic_store_rel_char
72371085Sjhb
724147855Sjhb/* Operations on 16-bit words. */
72571085Sjhb#define	atomic_set_16		atomic_set_short
72671085Sjhb#define	atomic_set_acq_16	atomic_set_acq_short
72771085Sjhb#define	atomic_set_rel_16	atomic_set_rel_short
72871085Sjhb#define	atomic_clear_16		atomic_clear_short
72971085Sjhb#define	atomic_clear_acq_16	atomic_clear_acq_short
73071085Sjhb#define	atomic_clear_rel_16	atomic_clear_rel_short
73171085Sjhb#define	atomic_add_16		atomic_add_short
73271085Sjhb#define	atomic_add_acq_16	atomic_add_acq_short
73371085Sjhb#define	atomic_add_rel_16	atomic_add_rel_short
73471085Sjhb#define	atomic_subtract_16	atomic_subtract_short
73571085Sjhb#define	atomic_subtract_acq_16	atomic_subtract_acq_short
73671085Sjhb#define	atomic_subtract_rel_16	atomic_subtract_rel_short
73771085Sjhb#define	atomic_load_acq_16	atomic_load_acq_short
73871085Sjhb#define	atomic_store_rel_16	atomic_store_rel_short
73971085Sjhb
740147855Sjhb/* Operations on 32-bit double words. */
74171085Sjhb#define	atomic_set_32		atomic_set_int
74271085Sjhb#define	atomic_set_acq_32	atomic_set_acq_int
74371085Sjhb#define	atomic_set_rel_32	atomic_set_rel_int
74471085Sjhb#define	atomic_clear_32		atomic_clear_int
74571085Sjhb#define	atomic_clear_acq_32	atomic_clear_acq_int
74671085Sjhb#define	atomic_clear_rel_32	atomic_clear_rel_int
74771085Sjhb#define	atomic_add_32		atomic_add_int
74871085Sjhb#define	atomic_add_acq_32	atomic_add_acq_int
74971085Sjhb#define	atomic_add_rel_32	atomic_add_rel_int
75071085Sjhb#define	atomic_subtract_32	atomic_subtract_int
75171085Sjhb#define	atomic_subtract_acq_32	atomic_subtract_acq_int
75271085Sjhb#define	atomic_subtract_rel_32	atomic_subtract_rel_int
75371085Sjhb#define	atomic_load_acq_32	atomic_load_acq_int
75471085Sjhb#define	atomic_store_rel_32	atomic_store_rel_int
75571085Sjhb#define	atomic_cmpset_32	atomic_cmpset_int
75671085Sjhb#define	atomic_cmpset_acq_32	atomic_cmpset_acq_int
75771085Sjhb#define	atomic_cmpset_rel_32	atomic_cmpset_rel_int
758315371Smjg#define	atomic_fcmpset_32	atomic_fcmpset_int
759315371Smjg#define	atomic_fcmpset_acq_32	atomic_fcmpset_acq_int
760315371Smjg#define	atomic_fcmpset_rel_32	atomic_fcmpset_rel_int
761254617Sjkim#define	atomic_swap_32		atomic_swap_int
76271085Sjhb#define	atomic_readandclear_32	atomic_readandclear_int
763150627Sjhb#define	atomic_fetchadd_32	atomic_fetchadd_int
764254617Sjkim#define	atomic_testandset_32	atomic_testandset_int
765299912Ssephe#define	atomic_testandclear_32	atomic_testandclear_int
76671085Sjhb
767147855Sjhb/* Operations on pointers. */
768157212Sdes#define	atomic_set_ptr(p, v) \
769157212Sdes	atomic_set_int((volatile u_int *)(p), (u_int)(v))
770157212Sdes#define	atomic_set_acq_ptr(p, v) \
771157212Sdes	atomic_set_acq_int((volatile u_int *)(p), (u_int)(v))
772157212Sdes#define	atomic_set_rel_ptr(p, v) \
773157212Sdes	atomic_set_rel_int((volatile u_int *)(p), (u_int)(v))
774157212Sdes#define	atomic_clear_ptr(p, v) \
775157212Sdes	atomic_clear_int((volatile u_int *)(p), (u_int)(v))
776157212Sdes#define	atomic_clear_acq_ptr(p, v) \
777157212Sdes	atomic_clear_acq_int((volatile u_int *)(p), (u_int)(v))
778157212Sdes#define	atomic_clear_rel_ptr(p, v) \
779157212Sdes	atomic_clear_rel_int((volatile u_int *)(p), (u_int)(v))
780157212Sdes#define	atomic_add_ptr(p, v) \
781157212Sdes	atomic_add_int((volatile u_int *)(p), (u_int)(v))
782157212Sdes#define	atomic_add_acq_ptr(p, v) \
783157212Sdes	atomic_add_acq_int((volatile u_int *)(p), (u_int)(v))
784157212Sdes#define	atomic_add_rel_ptr(p, v) \
785157212Sdes	atomic_add_rel_int((volatile u_int *)(p), (u_int)(v))
786157212Sdes#define	atomic_subtract_ptr(p, v) \
787157212Sdes	atomic_subtract_int((volatile u_int *)(p), (u_int)(v))
788157212Sdes#define	atomic_subtract_acq_ptr(p, v) \
789157212Sdes	atomic_subtract_acq_int((volatile u_int *)(p), (u_int)(v))
790157212Sdes#define	atomic_subtract_rel_ptr(p, v) \
791157212Sdes	atomic_subtract_rel_int((volatile u_int *)(p), (u_int)(v))
792157212Sdes#define	atomic_load_acq_ptr(p) \
793157212Sdes	atomic_load_acq_int((volatile u_int *)(p))
794157212Sdes#define	atomic_store_rel_ptr(p, v) \
795157212Sdes	atomic_store_rel_int((volatile u_int *)(p), (v))
796157212Sdes#define	atomic_cmpset_ptr(dst, old, new) \
797157212Sdes	atomic_cmpset_int((volatile u_int *)(dst), (u_int)(old), (u_int)(new))
798157212Sdes#define	atomic_cmpset_acq_ptr(dst, old, new) \
799165633Sbde	atomic_cmpset_acq_int((volatile u_int *)(dst), (u_int)(old), \
800165633Sbde	    (u_int)(new))
801157212Sdes#define	atomic_cmpset_rel_ptr(dst, old, new) \
802165633Sbde	atomic_cmpset_rel_int((volatile u_int *)(dst), (u_int)(old), \
803165633Sbde	    (u_int)(new))
804315371Smjg#define	atomic_fcmpset_ptr(dst, old, new) \
805315371Smjg	atomic_fcmpset_int((volatile u_int *)(dst), (u_int *)(old), (u_int)(new))
806315371Smjg#define	atomic_fcmpset_acq_ptr(dst, old, new) \
807315371Smjg	atomic_fcmpset_acq_int((volatile u_int *)(dst), (u_int *)(old), \
808315371Smjg	    (u_int)(new))
809315371Smjg#define	atomic_fcmpset_rel_ptr(dst, old, new) \
810315371Smjg	atomic_fcmpset_rel_int((volatile u_int *)(dst), (u_int *)(old), \
811315371Smjg	    (u_int)(new))
812254617Sjkim#define	atomic_swap_ptr(p, v) \
813254617Sjkim	atomic_swap_int((volatile u_int *)(p), (u_int)(v))
814157212Sdes#define	atomic_readandclear_ptr(p) \
815157212Sdes	atomic_readandclear_int((volatile u_int *)(p))
81665514Sphk
817165635Sbde#endif /* !WANT_FUNCTIONS */
818165633Sbde
819286051Skib#if defined(_KERNEL)
820286051Skib#define	mb()	__mbk()
821286051Skib#define	wmb()	__mbk()
822286051Skib#define	rmb()	__mbk()
823286051Skib#else
824286051Skib#define	mb()	__mbu()
825286051Skib#define	wmb()	__mbu()
826286051Skib#define	rmb()	__mbu()
827286051Skib#endif
828286051Skib
829165633Sbde#endif /* !_MACHINE_ATOMIC_H_ */
830