138517Sdfr/*-
238517Sdfr * Copyright (c) 1998 Doug Rabson
338517Sdfr * All rights reserved.
438517Sdfr *
538517Sdfr * Redistribution and use in source and binary forms, with or without
638517Sdfr * modification, are permitted provided that the following conditions
738517Sdfr * are met:
838517Sdfr * 1. Redistributions of source code must retain the above copyright
938517Sdfr *    notice, this list of conditions and the following disclaimer.
1038517Sdfr * 2. Redistributions in binary form must reproduce the above copyright
1138517Sdfr *    notice, this list of conditions and the following disclaimer in the
1238517Sdfr *    documentation and/or other materials provided with the distribution.
1338517Sdfr *
1438517Sdfr * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
1538517Sdfr * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
1638517Sdfr * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
1738517Sdfr * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
1838517Sdfr * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
1938517Sdfr * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
2038517Sdfr * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
2138517Sdfr * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
2238517Sdfr * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
2338517Sdfr * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
2438517Sdfr * SUCH DAMAGE.
2538517Sdfr *
2650477Speter * $FreeBSD: stable/11/sys/i386/include/atomic.h 338588 2018-09-11 15:56:06Z hselasky $
2738517Sdfr */
2838517Sdfr#ifndef _MACHINE_ATOMIC_H_
29147855Sjhb#define	_MACHINE_ATOMIC_H_
3038517Sdfr
31143063Sjoerg#ifndef _SYS_CDEFS_H_
32143063Sjoerg#error this file needs sys/cdefs.h as a prerequisite
33143063Sjoerg#endif
34143063Sjoerg
35327195Skib#include <sys/atomic_common.h>
36327195Skib
37254619Sjkim#ifdef _KERNEL
38254619Sjkim#include <machine/md_var.h>
39254619Sjkim#include <machine/specialreg.h>
40254619Sjkim#endif
41254619Sjkim
42286051Skib#ifndef __OFFSETOF_MONITORBUF
43286051Skib/*
44286051Skib * __OFFSETOF_MONITORBUF == __pcpu_offset(pc_monitorbuf).
45286051Skib *
46286051Skib * The open-coded number is used instead of the symbolic expression to
47286051Skib * avoid a dependency on sys/pcpu.h in machine/atomic.h consumers.
48286051Skib * An assertion in i386/vm_machdep.c ensures that the value is correct.
49286051Skib */
50286051Skib#define	__OFFSETOF_MONITORBUF	0x180
51185162Skmacy
52286051Skibstatic __inline void
53286051Skib__mbk(void)
54286051Skib{
55286051Skib
56286051Skib	__asm __volatile("lock; addl $0,%%fs:%0"
57286051Skib	    : "+m" (*(u_int *)__OFFSETOF_MONITORBUF) : : "memory", "cc");
58286051Skib}
59286051Skib
60286051Skibstatic __inline void
61286051Skib__mbu(void)
62286051Skib{
63286051Skib
64286051Skib	__asm __volatile("lock; addl $0,(%%esp)" : : : "memory", "cc");
65286051Skib}
66286051Skib#endif
67286051Skib
6838517Sdfr/*
69165635Sbde * Various simple operations on memory, each of which is atomic in the
70165635Sbde * presence of interrupts and multiple processors.
7138517Sdfr *
72165633Sbde * atomic_set_char(P, V)	(*(u_char *)(P) |= (V))
73165633Sbde * atomic_clear_char(P, V)	(*(u_char *)(P) &= ~(V))
74165633Sbde * atomic_add_char(P, V)	(*(u_char *)(P) += (V))
75165633Sbde * atomic_subtract_char(P, V)	(*(u_char *)(P) -= (V))
7648797Salc *
77165633Sbde * atomic_set_short(P, V)	(*(u_short *)(P) |= (V))
78165633Sbde * atomic_clear_short(P, V)	(*(u_short *)(P) &= ~(V))
79165633Sbde * atomic_add_short(P, V)	(*(u_short *)(P) += (V))
80165633Sbde * atomic_subtract_short(P, V)	(*(u_short *)(P) -= (V))
8148797Salc *
82165633Sbde * atomic_set_int(P, V)		(*(u_int *)(P) |= (V))
83165633Sbde * atomic_clear_int(P, V)	(*(u_int *)(P) &= ~(V))
84165633Sbde * atomic_add_int(P, V)		(*(u_int *)(P) += (V))
85165633Sbde * atomic_subtract_int(P, V)	(*(u_int *)(P) -= (V))
86254617Sjkim * atomic_swap_int(P, V)	(return (*(u_int *)(P)); *(u_int *)(P) = (V);)
87165635Sbde * atomic_readandclear_int(P)	(return (*(u_int *)(P)); *(u_int *)(P) = 0;)
8848797Salc *
89165633Sbde * atomic_set_long(P, V)	(*(u_long *)(P) |= (V))
90165633Sbde * atomic_clear_long(P, V)	(*(u_long *)(P) &= ~(V))
91165633Sbde * atomic_add_long(P, V)	(*(u_long *)(P) += (V))
92165633Sbde * atomic_subtract_long(P, V)	(*(u_long *)(P) -= (V))
93254617Sjkim * atomic_swap_long(P, V)	(return (*(u_long *)(P)); *(u_long *)(P) = (V);)
94165635Sbde * atomic_readandclear_long(P)	(return (*(u_long *)(P)); *(u_long *)(P) = 0;)
9538517Sdfr */
9638517Sdfr
9748797Salc/*
9849999Salc * The above functions are expanded inline in the statically-linked
9949999Salc * kernel.  Lock prefixes are generated if an SMP kernel is being
10049999Salc * built.
10149999Salc *
10249999Salc * Kernel modules call real functions which are built into the kernel.
10349999Salc * This allows kernel modules to be portable between UP and SMP systems.
10448797Salc */
105147855Sjhb#if defined(KLD_MODULE) || !defined(__GNUCLIKE_ASM)
106147855Sjhb#define	ATOMIC_ASM(NAME, TYPE, OP, CONS, V)			\
107197803Sattiliovoid atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v);	\
108197803Sattiliovoid atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
10949999Salc
110329383Smarkjint	atomic_cmpset_char(volatile u_char *dst, u_char expect, u_char src);
111329383Smarkjint	atomic_cmpset_short(volatile u_short *dst, u_short expect, u_short src);
112208332Sphkint	atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src);
113329383Smarkjint	atomic_fcmpset_char(volatile u_char *dst, u_char *expect, u_char src);
114329383Smarkjint	atomic_fcmpset_short(volatile u_short *dst, u_short *expect,
115329383Smarkj	    u_short src);
116315371Smjgint	atomic_fcmpset_int(volatile u_int *dst, u_int *expect, u_int src);
117165633Sbdeu_int	atomic_fetchadd_int(volatile u_int *p, u_int v);
118254617Sjkimint	atomic_testandset_int(volatile u_int *p, u_int v);
119299912Ssepheint	atomic_testandclear_int(volatile u_int *p, u_int v);
120285283Skibvoid	atomic_thread_fence_acq(void);
121285283Skibvoid	atomic_thread_fence_acq_rel(void);
122285283Skibvoid	atomic_thread_fence_rel(void);
123285283Skibvoid	atomic_thread_fence_seq_cst(void);
12465514Sphk
125284901Skib#define	ATOMIC_LOAD(TYPE)					\
126236456Skibu_##TYPE	atomic_load_acq_##TYPE(volatile u_##TYPE *p)
127236456Skib#define	ATOMIC_STORE(TYPE)					\
128100251Smarkmvoid		atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
12971085Sjhb
130254620Sjkimint		atomic_cmpset_64(volatile uint64_t *, uint64_t, uint64_t);
131337893Shselaskyint		atomic_fcmpset_64(volatile uint64_t *, uint64_t *, uint64_t);
132254619Sjkimuint64_t	atomic_load_acq_64(volatile uint64_t *);
133254619Sjkimvoid		atomic_store_rel_64(volatile uint64_t *, uint64_t);
134254620Sjkimuint64_t	atomic_swap_64(volatile uint64_t *, uint64_t);
135326514Shselaskyuint64_t	atomic_fetchadd_64(volatile uint64_t *, uint64_t);
136334762Shselaskyvoid		atomic_add_64(volatile uint64_t *, uint64_t);
137334762Shselaskyvoid		atomic_subtract_64(volatile uint64_t *, uint64_t);
138254619Sjkim
139147855Sjhb#else /* !KLD_MODULE && __GNUCLIKE_ASM */
14072358Smarkm
14184679Sjhb/*
142165635Sbde * For userland, always use lock prefixes so that the binaries will run
143165635Sbde * on both SMP and !SMP systems.
14484679Sjhb */
14584679Sjhb#if defined(SMP) || !defined(_KERNEL)
146165630Sbde#define	MPLOCKED	"lock ; "
14790515Sbde#else
148147855Sjhb#define	MPLOCKED
14990515Sbde#endif
15038517Sdfr
15148797Salc/*
152197803Sattilio * The assembly is volatilized to avoid code chunk removal by the compiler.
153197803Sattilio * GCC aggressively reorders operations and memory clobbering is necessary
154197803Sattilio * in order to avoid that for memory barriers.
15548797Salc */
156147855Sjhb#define	ATOMIC_ASM(NAME, TYPE, OP, CONS, V)		\
15748797Salcstatic __inline void					\
15849043Salcatomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
15948797Salc{							\
160165630Sbde	__asm __volatile(MPLOCKED OP			\
161254612Sjkim	: "+m" (*p)					\
162254612Sjkim	: CONS (V)					\
163216524Skib	: "cc");					\
164122827Sbde}							\
165197803Sattilio							\
166197803Sattiliostatic __inline void					\
167197803Sattilioatomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
168197803Sattilio{							\
169197803Sattilio	__asm __volatile(MPLOCKED OP			\
170254612Sjkim	: "+m" (*p)					\
171254612Sjkim	: CONS (V)					\
172216524Skib	: "memory", "cc");				\
173197803Sattilio}							\
174122827Sbdestruct __hack
175100327Smarkm
17665514Sphk/*
177329383Smarkj * Atomic compare and set, used by the mutex functions.
17865514Sphk *
179329383Smarkj * cmpset:
180329383Smarkj *	if (*dst == expect)
181329383Smarkj *		*dst = src
18265514Sphk *
183329383Smarkj * fcmpset:
184329383Smarkj *	if (*dst == *expect)
185329383Smarkj *		*dst = src
186329383Smarkj *	else
187329383Smarkj *		*expect = *dst
188329383Smarkj *
189329383Smarkj * Returns 0 on failure, non-zero on success.
19065514Sphk */
191329383Smarkj#define	ATOMIC_CMPSET(TYPE, CONS)			\
192329383Smarkjstatic __inline int					\
193329383Smarkjatomic_cmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE expect, u_##TYPE src) \
194329383Smarkj{							\
195329383Smarkj	u_char res;					\
196329383Smarkj							\
197329383Smarkj	__asm __volatile(				\
198329383Smarkj	"	" MPLOCKED "		"		\
199329383Smarkj	"	cmpxchg	%3,%1 ;		"		\
200329383Smarkj	"	sete	%0 ;		"		\
201329383Smarkj	"# atomic_cmpset_" #TYPE "	"		\
202329383Smarkj	: "=q" (res),			/* 0 */		\
203329383Smarkj	  "+m" (*dst),			/* 1 */		\
204329383Smarkj	  "+a" (expect)			/* 2 */		\
205329383Smarkj	: CONS (src)			/* 3 */		\
206329383Smarkj	: "memory", "cc");				\
207329383Smarkj	return (res);					\
208329383Smarkj}							\
209329383Smarkj							\
210329383Smarkjstatic __inline int					\
211329383Smarkjatomic_fcmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE *expect, u_##TYPE src) \
212329383Smarkj{							\
213329383Smarkj	u_char res;					\
214329383Smarkj							\
215329383Smarkj	__asm __volatile(				\
216329383Smarkj	"	" MPLOCKED "		"		\
217329383Smarkj	"	cmpxchg	%3,%1 ;		"		\
218329383Smarkj	"	sete	%0 ;		"		\
219329383Smarkj	"# atomic_fcmpset_" #TYPE "	"		\
220329383Smarkj	: "=q" (res),			/* 0 */		\
221329383Smarkj	  "+m" (*dst),			/* 1 */		\
222329383Smarkj	  "+a" (*expect)		/* 2 */		\
223329383Smarkj	: CONS (src)			/* 3 */		\
224329383Smarkj	: "memory", "cc");				\
225329383Smarkj	return (res);					\
226197910Sattilio}
227197910Sattilio
228329383SmarkjATOMIC_CMPSET(char, "q");
229329383SmarkjATOMIC_CMPSET(short, "r");
230329383SmarkjATOMIC_CMPSET(int, "r");
231315371Smjg
232150627Sjhb/*
233150627Sjhb * Atomically add the value of v to the integer pointed to by p and return
234150627Sjhb * the previous value of *p.
235150627Sjhb */
236150627Sjhbstatic __inline u_int
237150627Sjhbatomic_fetchadd_int(volatile u_int *p, u_int v)
238150627Sjhb{
239150627Sjhb
240165633Sbde	__asm __volatile(
241165630Sbde	"	" MPLOCKED "		"
242254610Sjkim	"	xaddl	%0,%1 ;		"
243150627Sjhb	"# atomic_fetchadd_int"
244254610Sjkim	: "+r" (v),			/* 0 */
245254612Sjkim	  "+m" (*p)			/* 1 */
246254612Sjkim	: : "cc");
247150627Sjhb	return (v);
248150627Sjhb}
249150627Sjhb
250254617Sjkimstatic __inline int
251254617Sjkimatomic_testandset_int(volatile u_int *p, u_int v)
252254617Sjkim{
253254617Sjkim	u_char res;
254254617Sjkim
255254617Sjkim	__asm __volatile(
256254617Sjkim	"	" MPLOCKED "		"
257254617Sjkim	"	btsl	%2,%1 ;		"
258254617Sjkim	"	setc	%0 ;		"
259254617Sjkim	"# atomic_testandset_int"
260254617Sjkim	: "=q" (res),			/* 0 */
261254617Sjkim	  "+m" (*p)			/* 1 */
262254617Sjkim	: "Ir" (v & 0x1f)		/* 2 */
263254617Sjkim	: "cc");
264254617Sjkim	return (res);
265254617Sjkim}
266254617Sjkim
267299912Ssephestatic __inline int
268299912Ssepheatomic_testandclear_int(volatile u_int *p, u_int v)
269299912Ssephe{
270299912Ssephe	u_char res;
271299912Ssephe
272299912Ssephe	__asm __volatile(
273299912Ssephe	"	" MPLOCKED "		"
274299912Ssephe	"	btrl	%2,%1 ;		"
275299912Ssephe	"	setc	%0 ;		"
276299912Ssephe	"# atomic_testandclear_int"
277299912Ssephe	: "=q" (res),			/* 0 */
278299912Ssephe	  "+m" (*p)			/* 1 */
279299912Ssephe	: "Ir" (v & 0x1f)		/* 2 */
280299912Ssephe	: "cc");
281299912Ssephe	return (res);
282299912Ssephe}
283299912Ssephe
284236456Skib/*
285236456Skib * We assume that a = b will do atomic loads and stores.  Due to the
286236456Skib * IA32 memory model, a simple store guarantees release semantics.
287236456Skib *
288284901Skib * However, a load may pass a store if they are performed on distinct
289286050Skib * addresses, so we need Store/Load barrier for sequentially
290286050Skib * consistent fences in SMP kernels.  We use "lock addl $0,mem" for a
291286050Skib * Store/Load barrier, as recommended by the AMD Software Optimization
292286050Skib * Guide, and not mfence.  In the kernel, we use a private per-cpu
293286078Skib * cache line for "mem", to avoid introducing false data
294286078Skib * dependencies.  In user space, we use the word at the top of the
295286078Skib * stack.
296284901Skib *
297284901Skib * For UP kernels, however, the memory of the single processor is
298284901Skib * always consistent, so we only need to stop the compiler from
299284901Skib * reordering accesses in a way that violates the semantics of acquire
300284901Skib * and release.
301236456Skib */
302286051Skib
303284901Skib#if defined(_KERNEL)
304284901Skib#if defined(SMP)
305286051Skib#define	__storeload_barrier()	__mbk()
306284901Skib#else /* _KERNEL && UP */
307286051Skib#define	__storeload_barrier()	__compiler_membar()
308284901Skib#endif /* SMP */
309284901Skib#else /* !_KERNEL */
310286051Skib#define	__storeload_barrier()	__mbu()
311284901Skib#endif /* _KERNEL*/
312284901Skib
313284901Skib#define	ATOMIC_LOAD(TYPE)					\
314284901Skibstatic __inline u_##TYPE					\
315284901Skibatomic_load_acq_##TYPE(volatile u_##TYPE *p)			\
316284901Skib{								\
317284901Skib	u_##TYPE res;						\
318284901Skib								\
319284901Skib	res = *p;						\
320284901Skib	__compiler_membar();					\
321284901Skib	return (res);						\
322284901Skib}								\
323122827Sbdestruct __hack
324100327Smarkm
325284901Skib#define	ATOMIC_STORE(TYPE)					\
326284901Skibstatic __inline void						\
327284901Skibatomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)	\
328284901Skib{								\
329284901Skib								\
330284901Skib	__compiler_membar();					\
331284901Skib	*p = v;							\
332284901Skib}								\
333284901Skibstruct __hack
334100327Smarkm
335285283Skibstatic __inline void
336285283Skibatomic_thread_fence_acq(void)
337285283Skib{
338285283Skib
339285283Skib	__compiler_membar();
340285283Skib}
341285283Skib
342285283Skibstatic __inline void
343285283Skibatomic_thread_fence_rel(void)
344285283Skib{
345285283Skib
346285283Skib	__compiler_membar();
347285283Skib}
348285283Skib
349285283Skibstatic __inline void
350285283Skibatomic_thread_fence_acq_rel(void)
351285283Skib{
352285283Skib
353285283Skib	__compiler_membar();
354285283Skib}
355285283Skib
356285283Skibstatic __inline void
357285283Skibatomic_thread_fence_seq_cst(void)
358285283Skib{
359285283Skib
360285283Skib	__storeload_barrier();
361285283Skib}
362285283Skib
363254619Sjkim#ifdef _KERNEL
364254619Sjkim
365254619Sjkim#ifdef WANT_FUNCTIONS
366254620Sjkimint		atomic_cmpset_64_i386(volatile uint64_t *, uint64_t, uint64_t);
367254620Sjkimint		atomic_cmpset_64_i586(volatile uint64_t *, uint64_t, uint64_t);
368338588Shselaskyint		atomic_fcmpset_64_i386(volatile uint64_t *, uint64_t *, uint64_t);
369338588Shselaskyint		atomic_fcmpset_64_i586(volatile uint64_t *, uint64_t *, uint64_t);
370254619Sjkimuint64_t	atomic_load_acq_64_i386(volatile uint64_t *);
371254619Sjkimuint64_t	atomic_load_acq_64_i586(volatile uint64_t *);
372254619Sjkimvoid		atomic_store_rel_64_i386(volatile uint64_t *, uint64_t);
373254619Sjkimvoid		atomic_store_rel_64_i586(volatile uint64_t *, uint64_t);
374254620Sjkimuint64_t	atomic_swap_64_i386(volatile uint64_t *, uint64_t);
375254620Sjkimuint64_t	atomic_swap_64_i586(volatile uint64_t *, uint64_t);
376254619Sjkim#endif
377254619Sjkim
378254619Sjkim/* I486 does not support SMP or CMPXCHG8B. */
379254620Sjkimstatic __inline int
380254620Sjkimatomic_cmpset_64_i386(volatile uint64_t *dst, uint64_t expect, uint64_t src)
381254620Sjkim{
382254620Sjkim	volatile uint32_t *p;
383254620Sjkim	u_char res;
384254620Sjkim
385254620Sjkim	p = (volatile uint32_t *)dst;
386254620Sjkim	__asm __volatile(
387254620Sjkim	"	pushfl ;		"
388254620Sjkim	"	cli ;			"
389254620Sjkim	"	xorl	%1,%%eax ;	"
390254620Sjkim	"	xorl	%2,%%edx ;	"
391254620Sjkim	"	orl	%%edx,%%eax ;	"
392254620Sjkim	"	jne	1f ;		"
393254620Sjkim	"	movl	%4,%1 ;		"
394254620Sjkim	"	movl	%5,%2 ;		"
395254620Sjkim	"1:				"
396254620Sjkim	"	sete	%3 ;		"
397254620Sjkim	"	popfl"
398254620Sjkim	: "+A" (expect),		/* 0 */
399254620Sjkim	  "+m" (*p),			/* 1 */
400254620Sjkim	  "+m" (*(p + 1)),		/* 2 */
401254620Sjkim	  "=q" (res)			/* 3 */
402254620Sjkim	: "r" ((uint32_t)src),		/* 4 */
403254620Sjkim	  "r" ((uint32_t)(src >> 32))	/* 5 */
404254620Sjkim	: "memory", "cc");
405254620Sjkim	return (res);
406254620Sjkim}
407254620Sjkim
408337893Shselaskystatic __inline int
409337893Shselaskyatomic_fcmpset_64_i386(volatile uint64_t *dst, uint64_t *expect, uint64_t src)
410337893Shselasky{
411337893Shselasky
412337893Shselasky	if (atomic_cmpset_64_i386(dst, *expect, src)) {
413337893Shselasky		return (1);
414337893Shselasky	} else {
415337893Shselasky		*expect = *dst;
416337893Shselasky		return (0);
417337893Shselasky	}
418337893Shselasky}
419337893Shselasky
420254619Sjkimstatic __inline uint64_t
421254619Sjkimatomic_load_acq_64_i386(volatile uint64_t *p)
422254619Sjkim{
423254619Sjkim	volatile uint32_t *q;
424254619Sjkim	uint64_t res;
425254619Sjkim
426254619Sjkim	q = (volatile uint32_t *)p;
427254619Sjkim	__asm __volatile(
428254619Sjkim	"	pushfl ;		"
429254619Sjkim	"	cli ;			"
430254619Sjkim	"	movl	%1,%%eax ;	"
431254619Sjkim	"	movl	%2,%%edx ;	"
432254619Sjkim	"	popfl"
433254619Sjkim	: "=&A" (res)			/* 0 */
434254619Sjkim	: "m" (*q),			/* 1 */
435254619Sjkim	  "m" (*(q + 1))		/* 2 */
436254619Sjkim	: "memory");
437254619Sjkim	return (res);
438254619Sjkim}
439254619Sjkim
440254619Sjkimstatic __inline void
441254619Sjkimatomic_store_rel_64_i386(volatile uint64_t *p, uint64_t v)
442254619Sjkim{
443254619Sjkim	volatile uint32_t *q;
444254619Sjkim
445254619Sjkim	q = (volatile uint32_t *)p;
446254619Sjkim	__asm __volatile(
447254619Sjkim	"	pushfl ;		"
448254619Sjkim	"	cli ;			"
449254619Sjkim	"	movl	%%eax,%0 ;	"
450254619Sjkim	"	movl	%%edx,%1 ;	"
451254619Sjkim	"	popfl"
452254619Sjkim	: "=m" (*q),			/* 0 */
453254619Sjkim	  "=m" (*(q + 1))		/* 1 */
454254619Sjkim	: "A" (v)			/* 2 */
455254619Sjkim	: "memory");
456254619Sjkim}
457254619Sjkim
458254619Sjkimstatic __inline uint64_t
459254620Sjkimatomic_swap_64_i386(volatile uint64_t *p, uint64_t v)
460254620Sjkim{
461254620Sjkim	volatile uint32_t *q;
462254620Sjkim	uint64_t res;
463254620Sjkim
464254620Sjkim	q = (volatile uint32_t *)p;
465254620Sjkim	__asm __volatile(
466254620Sjkim	"	pushfl ;		"
467254620Sjkim	"	cli ;			"
468254620Sjkim	"	movl	%1,%%eax ;	"
469254620Sjkim	"	movl	%2,%%edx ;	"
470254620Sjkim	"	movl	%4,%2 ;		"
471254620Sjkim	"	movl	%3,%1 ;		"
472254620Sjkim	"	popfl"
473254620Sjkim	: "=&A" (res),			/* 0 */
474254620Sjkim	  "+m" (*q),			/* 1 */
475254620Sjkim	  "+m" (*(q + 1))		/* 2 */
476254620Sjkim	: "r" ((uint32_t)v),		/* 3 */
477254620Sjkim	  "r" ((uint32_t)(v >> 32)));	/* 4 */
478254620Sjkim	return (res);
479254620Sjkim}
480254620Sjkim
481254620Sjkimstatic __inline int
482254620Sjkimatomic_cmpset_64_i586(volatile uint64_t *dst, uint64_t expect, uint64_t src)
483254620Sjkim{
484254620Sjkim	u_char res;
485254620Sjkim
486254620Sjkim	__asm __volatile(
487254620Sjkim	"	" MPLOCKED "		"
488254620Sjkim	"	cmpxchg8b %1 ;		"
489254620Sjkim	"	sete	%0"
490254620Sjkim	: "=q" (res),			/* 0 */
491254620Sjkim	  "+m" (*dst),			/* 1 */
492254620Sjkim	  "+A" (expect)			/* 2 */
493254620Sjkim	: "b" ((uint32_t)src),		/* 3 */
494254620Sjkim	  "c" ((uint32_t)(src >> 32))	/* 4 */
495254620Sjkim	: "memory", "cc");
496254620Sjkim	return (res);
497254620Sjkim}
498254620Sjkim
499337893Shselaskystatic __inline int
500337893Shselaskyatomic_fcmpset_64_i586(volatile uint64_t *dst, uint64_t *expect, uint64_t src)
501337893Shselasky{
502337893Shselasky	u_char res;
503337893Shselasky
504337893Shselasky	__asm __volatile(
505337893Shselasky	"	" MPLOCKED "		"
506337893Shselasky	"	cmpxchg8b %1 ;		"
507337893Shselasky	"	sete	%0"
508337893Shselasky	: "=q" (res),			/* 0 */
509337893Shselasky	  "+m" (*dst),			/* 1 */
510337893Shselasky	  "+A" (*expect)		/* 2 */
511337893Shselasky	: "b" ((uint32_t)src),		/* 3 */
512337893Shselasky	  "c" ((uint32_t)(src >> 32))	/* 4 */
513337893Shselasky	: "memory", "cc");
514337893Shselasky	return (res);
515337893Shselasky}
516337893Shselasky
517254620Sjkimstatic __inline uint64_t
518254619Sjkimatomic_load_acq_64_i586(volatile uint64_t *p)
519254619Sjkim{
520254619Sjkim	uint64_t res;
521254619Sjkim
522254619Sjkim	__asm __volatile(
523254619Sjkim	"	movl	%%ebx,%%eax ;	"
524254619Sjkim	"	movl	%%ecx,%%edx ;	"
525254619Sjkim	"	" MPLOCKED "		"
526254619Sjkim	"	cmpxchg8b %1"
527254619Sjkim	: "=&A" (res),			/* 0 */
528254619Sjkim	  "+m" (*p)			/* 1 */
529254619Sjkim	: : "memory", "cc");
530254619Sjkim	return (res);
531254619Sjkim}
532254619Sjkim
533254619Sjkimstatic __inline void
534254619Sjkimatomic_store_rel_64_i586(volatile uint64_t *p, uint64_t v)
535254619Sjkim{
536254619Sjkim
537254619Sjkim	__asm __volatile(
538254619Sjkim	"	movl	%%eax,%%ebx ;	"
539254619Sjkim	"	movl	%%edx,%%ecx ;	"
540254619Sjkim	"1:				"
541254619Sjkim	"	" MPLOCKED "		"
542254619Sjkim	"	cmpxchg8b %0 ;		"
543254619Sjkim	"	jne	1b"
544254619Sjkim	: "+m" (*p),			/* 0 */
545254619Sjkim	  "+A" (v)			/* 1 */
546254619Sjkim	: : "ebx", "ecx", "memory", "cc");
547254619Sjkim}
548254619Sjkim
549254619Sjkimstatic __inline uint64_t
550254620Sjkimatomic_swap_64_i586(volatile uint64_t *p, uint64_t v)
551254620Sjkim{
552254620Sjkim
553254620Sjkim	__asm __volatile(
554254620Sjkim	"	movl	%%eax,%%ebx ;	"
555254620Sjkim	"	movl	%%edx,%%ecx ;	"
556254620Sjkim	"1:				"
557254620Sjkim	"	" MPLOCKED "		"
558254620Sjkim	"	cmpxchg8b %0 ;		"
559254620Sjkim	"	jne	1b"
560254620Sjkim	: "+m" (*p),			/* 0 */
561254620Sjkim	  "+A" (v)			/* 1 */
562254620Sjkim	: : "ebx", "ecx", "memory", "cc");
563254620Sjkim	return (v);
564254620Sjkim}
565254620Sjkim
566254620Sjkimstatic __inline int
567254620Sjkimatomic_cmpset_64(volatile uint64_t *dst, uint64_t expect, uint64_t src)
568254620Sjkim{
569254620Sjkim
570254620Sjkim	if ((cpu_feature & CPUID_CX8) == 0)
571254620Sjkim		return (atomic_cmpset_64_i386(dst, expect, src));
572254620Sjkim	else
573254620Sjkim		return (atomic_cmpset_64_i586(dst, expect, src));
574254620Sjkim}
575254620Sjkim
576337893Shselaskystatic __inline int
577337893Shselaskyatomic_fcmpset_64(volatile uint64_t *dst, uint64_t *expect, uint64_t src)
578337893Shselasky{
579337893Shselasky
580337893Shselasky  	if ((cpu_feature & CPUID_CX8) == 0)
581337893Shselasky		return (atomic_fcmpset_64_i386(dst, expect, src));
582337893Shselasky	else
583337893Shselasky		return (atomic_fcmpset_64_i586(dst, expect, src));
584337893Shselasky}
585337893Shselasky
586254620Sjkimstatic __inline uint64_t
587254619Sjkimatomic_load_acq_64(volatile uint64_t *p)
588254619Sjkim{
589254619Sjkim
590254619Sjkim	if ((cpu_feature & CPUID_CX8) == 0)
591254619Sjkim		return (atomic_load_acq_64_i386(p));
592254619Sjkim	else
593254619Sjkim		return (atomic_load_acq_64_i586(p));
594254619Sjkim}
595254619Sjkim
596254619Sjkimstatic __inline void
597254619Sjkimatomic_store_rel_64(volatile uint64_t *p, uint64_t v)
598254619Sjkim{
599254619Sjkim
600254619Sjkim	if ((cpu_feature & CPUID_CX8) == 0)
601254619Sjkim		atomic_store_rel_64_i386(p, v);
602254619Sjkim	else
603254619Sjkim		atomic_store_rel_64_i586(p, v);
604254619Sjkim}
605254619Sjkim
606254620Sjkimstatic __inline uint64_t
607254620Sjkimatomic_swap_64(volatile uint64_t *p, uint64_t v)
608254620Sjkim{
609254620Sjkim
610254620Sjkim	if ((cpu_feature & CPUID_CX8) == 0)
611254620Sjkim		return (atomic_swap_64_i386(p, v));
612254620Sjkim	else
613254620Sjkim		return (atomic_swap_64_i586(p, v));
614254620Sjkim}
615254620Sjkim
616326514Shselaskystatic __inline uint64_t
617326514Shselaskyatomic_fetchadd_64(volatile uint64_t *p, uint64_t v)
618326514Shselasky{
619326514Shselasky
620326514Shselasky	for (;;) {
621326514Shselasky		uint64_t t = *p;
622326514Shselasky		if (atomic_cmpset_64(p, t, t + v))
623326514Shselasky			return (t);
624326514Shselasky	}
625326514Shselasky}
626326514Shselasky
627334762Shselaskystatic __inline void
628334762Shselaskyatomic_add_64(volatile uint64_t *p, uint64_t v)
629334762Shselasky{
630334762Shselasky	uint64_t t;
631334762Shselasky
632334762Shselasky	for (;;) {
633334762Shselasky		t = *p;
634334762Shselasky		if (atomic_cmpset_64(p, t, t + v))
635334762Shselasky			break;
636334762Shselasky	}
637334762Shselasky}
638334762Shselasky
639334762Shselaskystatic __inline void
640334762Shselaskyatomic_subtract_64(volatile uint64_t *p, uint64_t v)
641334762Shselasky{
642334762Shselasky	uint64_t t;
643334762Shselasky
644334762Shselasky	for (;;) {
645334762Shselasky		t = *p;
646334762Shselasky		if (atomic_cmpset_64(p, t, t - v))
647334762Shselasky			break;
648334762Shselasky	}
649334762Shselasky}
650334762Shselasky
651254619Sjkim#endif /* _KERNEL */
652254619Sjkim
653147855Sjhb#endif /* KLD_MODULE || !__GNUCLIKE_ASM */
654100251Smarkm
655100251SmarkmATOMIC_ASM(set,	     char,  "orb %b1,%0",  "iq",  v);
656100251SmarkmATOMIC_ASM(clear,    char,  "andb %b1,%0", "iq", ~v);
657100251SmarkmATOMIC_ASM(add,	     char,  "addb %b1,%0", "iq",  v);
658100251SmarkmATOMIC_ASM(subtract, char,  "subb %b1,%0", "iq",  v);
65971085Sjhb
660100251SmarkmATOMIC_ASM(set,	     short, "orw %w1,%0",  "ir",  v);
661100251SmarkmATOMIC_ASM(clear,    short, "andw %w1,%0", "ir", ~v);
662100251SmarkmATOMIC_ASM(add,	     short, "addw %w1,%0", "ir",  v);
663100251SmarkmATOMIC_ASM(subtract, short, "subw %w1,%0", "ir",  v);
66471085Sjhb
665100251SmarkmATOMIC_ASM(set,	     int,   "orl %1,%0",   "ir",  v);
666100251SmarkmATOMIC_ASM(clear,    int,   "andl %1,%0",  "ir", ~v);
667100251SmarkmATOMIC_ASM(add,	     int,   "addl %1,%0",  "ir",  v);
668100251SmarkmATOMIC_ASM(subtract, int,   "subl %1,%0",  "ir",  v);
66971085Sjhb
670100251SmarkmATOMIC_ASM(set,	     long,  "orl %1,%0",   "ir",  v);
671100251SmarkmATOMIC_ASM(clear,    long,  "andl %1,%0",  "ir", ~v);
672100251SmarkmATOMIC_ASM(add,	     long,  "addl %1,%0",  "ir",  v);
673100251SmarkmATOMIC_ASM(subtract, long,  "subl %1,%0",  "ir",  v);
67471085Sjhb
675284901Skib#define	ATOMIC_LOADSTORE(TYPE)				\
676284901Skib	ATOMIC_LOAD(TYPE);				\
677284901Skib	ATOMIC_STORE(TYPE)
67871023Sjhb
679284901SkibATOMIC_LOADSTORE(char);
680284901SkibATOMIC_LOADSTORE(short);
681284901SkibATOMIC_LOADSTORE(int);
682284901SkibATOMIC_LOADSTORE(long);
683236456Skib
68471085Sjhb#undef ATOMIC_ASM
685236456Skib#undef ATOMIC_LOAD
686236456Skib#undef ATOMIC_STORE
687284901Skib#undef ATOMIC_LOADSTORE
68867351Sjhb
689165635Sbde#ifndef WANT_FUNCTIONS
690147855Sjhb
691147855Sjhbstatic __inline int
692208332Sphkatomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src)
693147855Sjhb{
694147855Sjhb
695208332Sphk	return (atomic_cmpset_int((volatile u_int *)dst, (u_int)expect,
696147855Sjhb	    (u_int)src));
697147855Sjhb}
698147855Sjhb
699337893Shselaskystatic __inline int
700337893Shselaskyatomic_fcmpset_long(volatile u_long *dst, u_long *expect, u_long src)
701337893Shselasky{
702337893Shselasky
703337893Shselasky	return (atomic_fcmpset_int((volatile u_int *)dst, (u_int *)expect,
704337893Shselasky	    (u_int)src));
705337893Shselasky}
706337893Shselasky
707177276Spjdstatic __inline u_long
708177276Spjdatomic_fetchadd_long(volatile u_long *p, u_long v)
709177276Spjd{
710177276Spjd
711177276Spjd	return (atomic_fetchadd_int((volatile u_int *)p, (u_int)v));
712177276Spjd}
713177276Spjd
714254617Sjkimstatic __inline int
715254617Sjkimatomic_testandset_long(volatile u_long *p, u_int v)
716254617Sjkim{
717254617Sjkim
718254617Sjkim	return (atomic_testandset_int((volatile u_int *)p, v));
719254617Sjkim}
720254617Sjkim
721299912Ssephestatic __inline int
722299912Ssepheatomic_testandclear_long(volatile u_long *p, u_int v)
723299912Ssephe{
724299912Ssephe
725299912Ssephe	return (atomic_testandclear_int((volatile u_int *)p, v));
726299912Ssephe}
727299912Ssephe
728254617Sjkim/* Read the current value and store a new value in the destination. */
729147855Sjhb#ifdef __GNUCLIKE_ASM
730147855Sjhb
731147855Sjhbstatic __inline u_int
732254617Sjkimatomic_swap_int(volatile u_int *p, u_int v)
733147855Sjhb{
734147855Sjhb
735165633Sbde	__asm __volatile(
736147855Sjhb	"	xchgl	%1,%0 ;		"
737254617Sjkim	"# atomic_swap_int"
738254617Sjkim	: "+r" (v),			/* 0 */
739254612Sjkim	  "+m" (*p));			/* 1 */
740254617Sjkim	return (v);
741147855Sjhb}
742147855Sjhb
743147855Sjhbstatic __inline u_long
744254617Sjkimatomic_swap_long(volatile u_long *p, u_long v)
745147855Sjhb{
746147855Sjhb
747254617Sjkim	return (atomic_swap_int((volatile u_int *)p, (u_int)v));
748147855Sjhb}
749147855Sjhb
750147855Sjhb#else /* !__GNUCLIKE_ASM */
751147855Sjhb
752254617Sjkimu_int	atomic_swap_int(volatile u_int *p, u_int v);
753254617Sjkimu_long	atomic_swap_long(volatile u_long *p, u_long v);
754147855Sjhb
755147855Sjhb#endif /* __GNUCLIKE_ASM */
756147855Sjhb
757197803Sattilio#define	atomic_set_acq_char		atomic_set_barr_char
758197803Sattilio#define	atomic_set_rel_char		atomic_set_barr_char
759197803Sattilio#define	atomic_clear_acq_char		atomic_clear_barr_char
760197803Sattilio#define	atomic_clear_rel_char		atomic_clear_barr_char
761197803Sattilio#define	atomic_add_acq_char		atomic_add_barr_char
762197803Sattilio#define	atomic_add_rel_char		atomic_add_barr_char
763197803Sattilio#define	atomic_subtract_acq_char	atomic_subtract_barr_char
764197803Sattilio#define	atomic_subtract_rel_char	atomic_subtract_barr_char
765329383Smarkj#define	atomic_cmpset_acq_char		atomic_cmpset_char
766329383Smarkj#define	atomic_cmpset_rel_char		atomic_cmpset_char
767329383Smarkj#define	atomic_fcmpset_acq_char		atomic_fcmpset_char
768329383Smarkj#define	atomic_fcmpset_rel_char		atomic_fcmpset_char
76971085Sjhb
770197803Sattilio#define	atomic_set_acq_short		atomic_set_barr_short
771197803Sattilio#define	atomic_set_rel_short		atomic_set_barr_short
772197803Sattilio#define	atomic_clear_acq_short		atomic_clear_barr_short
773197803Sattilio#define	atomic_clear_rel_short		atomic_clear_barr_short
774197803Sattilio#define	atomic_add_acq_short		atomic_add_barr_short
775197803Sattilio#define	atomic_add_rel_short		atomic_add_barr_short
776197803Sattilio#define	atomic_subtract_acq_short	atomic_subtract_barr_short
777197803Sattilio#define	atomic_subtract_rel_short	atomic_subtract_barr_short
778329383Smarkj#define	atomic_cmpset_acq_short		atomic_cmpset_short
779329383Smarkj#define	atomic_cmpset_rel_short		atomic_cmpset_short
780329383Smarkj#define	atomic_fcmpset_acq_short	atomic_fcmpset_short
781329383Smarkj#define	atomic_fcmpset_rel_short	atomic_fcmpset_short
78271085Sjhb
783197803Sattilio#define	atomic_set_acq_int		atomic_set_barr_int
784197803Sattilio#define	atomic_set_rel_int		atomic_set_barr_int
785197803Sattilio#define	atomic_clear_acq_int		atomic_clear_barr_int
786197803Sattilio#define	atomic_clear_rel_int		atomic_clear_barr_int
787197803Sattilio#define	atomic_add_acq_int		atomic_add_barr_int
788197803Sattilio#define	atomic_add_rel_int		atomic_add_barr_int
789197803Sattilio#define	atomic_subtract_acq_int		atomic_subtract_barr_int
790197803Sattilio#define	atomic_subtract_rel_int		atomic_subtract_barr_int
791197910Sattilio#define	atomic_cmpset_acq_int		atomic_cmpset_int
792197910Sattilio#define	atomic_cmpset_rel_int		atomic_cmpset_int
793315371Smjg#define	atomic_fcmpset_acq_int		atomic_fcmpset_int
794315371Smjg#define	atomic_fcmpset_rel_int		atomic_fcmpset_int
79571085Sjhb
796197803Sattilio#define	atomic_set_acq_long		atomic_set_barr_long
797197803Sattilio#define	atomic_set_rel_long		atomic_set_barr_long
798197803Sattilio#define	atomic_clear_acq_long		atomic_clear_barr_long
799197803Sattilio#define	atomic_clear_rel_long		atomic_clear_barr_long
800197803Sattilio#define	atomic_add_acq_long		atomic_add_barr_long
801197803Sattilio#define	atomic_add_rel_long		atomic_add_barr_long
802197803Sattilio#define	atomic_subtract_acq_long	atomic_subtract_barr_long
803197803Sattilio#define	atomic_subtract_rel_long	atomic_subtract_barr_long
804197910Sattilio#define	atomic_cmpset_acq_long		atomic_cmpset_long
805197910Sattilio#define	atomic_cmpset_rel_long		atomic_cmpset_long
806315371Smjg#define	atomic_fcmpset_acq_long		atomic_fcmpset_long
807315371Smjg#define	atomic_fcmpset_rel_long		atomic_fcmpset_long
80871085Sjhb
809254617Sjkim#define	atomic_readandclear_int(p)	atomic_swap_int(p, 0)
810254617Sjkim#define	atomic_readandclear_long(p)	atomic_swap_long(p, 0)
811254617Sjkim
812147855Sjhb/* Operations on 8-bit bytes. */
81371085Sjhb#define	atomic_set_8		atomic_set_char
81471085Sjhb#define	atomic_set_acq_8	atomic_set_acq_char
81571085Sjhb#define	atomic_set_rel_8	atomic_set_rel_char
81671085Sjhb#define	atomic_clear_8		atomic_clear_char
81771085Sjhb#define	atomic_clear_acq_8	atomic_clear_acq_char
81871085Sjhb#define	atomic_clear_rel_8	atomic_clear_rel_char
81971085Sjhb#define	atomic_add_8		atomic_add_char
82071085Sjhb#define	atomic_add_acq_8	atomic_add_acq_char
82171085Sjhb#define	atomic_add_rel_8	atomic_add_rel_char
82271085Sjhb#define	atomic_subtract_8	atomic_subtract_char
82371085Sjhb#define	atomic_subtract_acq_8	atomic_subtract_acq_char
82471085Sjhb#define	atomic_subtract_rel_8	atomic_subtract_rel_char
82571085Sjhb#define	atomic_load_acq_8	atomic_load_acq_char
82671085Sjhb#define	atomic_store_rel_8	atomic_store_rel_char
827329383Smarkj#define	atomic_cmpset_8		atomic_cmpset_char
828329383Smarkj#define	atomic_cmpset_acq_8	atomic_cmpset_acq_char
829329383Smarkj#define	atomic_cmpset_rel_8	atomic_cmpset_rel_char
830329383Smarkj#define	atomic_fcmpset_8	atomic_fcmpset_char
831329383Smarkj#define	atomic_fcmpset_acq_8	atomic_fcmpset_acq_char
832329383Smarkj#define	atomic_fcmpset_rel_8	atomic_fcmpset_rel_char
83371085Sjhb
834147855Sjhb/* Operations on 16-bit words. */
83571085Sjhb#define	atomic_set_16		atomic_set_short
83671085Sjhb#define	atomic_set_acq_16	atomic_set_acq_short
83771085Sjhb#define	atomic_set_rel_16	atomic_set_rel_short
83871085Sjhb#define	atomic_clear_16		atomic_clear_short
83971085Sjhb#define	atomic_clear_acq_16	atomic_clear_acq_short
84071085Sjhb#define	atomic_clear_rel_16	atomic_clear_rel_short
84171085Sjhb#define	atomic_add_16		atomic_add_short
84271085Sjhb#define	atomic_add_acq_16	atomic_add_acq_short
84371085Sjhb#define	atomic_add_rel_16	atomic_add_rel_short
84471085Sjhb#define	atomic_subtract_16	atomic_subtract_short
84571085Sjhb#define	atomic_subtract_acq_16	atomic_subtract_acq_short
84671085Sjhb#define	atomic_subtract_rel_16	atomic_subtract_rel_short
84771085Sjhb#define	atomic_load_acq_16	atomic_load_acq_short
84871085Sjhb#define	atomic_store_rel_16	atomic_store_rel_short
849329383Smarkj#define	atomic_cmpset_16	atomic_cmpset_short
850329383Smarkj#define	atomic_cmpset_acq_16	atomic_cmpset_acq_short
851329383Smarkj#define	atomic_cmpset_rel_16	atomic_cmpset_rel_short
852329383Smarkj#define	atomic_fcmpset_16	atomic_fcmpset_short
853329383Smarkj#define	atomic_fcmpset_acq_16	atomic_fcmpset_acq_short
854329383Smarkj#define	atomic_fcmpset_rel_16	atomic_fcmpset_rel_short
85571085Sjhb
856147855Sjhb/* Operations on 32-bit double words. */
85771085Sjhb#define	atomic_set_32		atomic_set_int
85871085Sjhb#define	atomic_set_acq_32	atomic_set_acq_int
85971085Sjhb#define	atomic_set_rel_32	atomic_set_rel_int
86071085Sjhb#define	atomic_clear_32		atomic_clear_int
86171085Sjhb#define	atomic_clear_acq_32	atomic_clear_acq_int
86271085Sjhb#define	atomic_clear_rel_32	atomic_clear_rel_int
86371085Sjhb#define	atomic_add_32		atomic_add_int
86471085Sjhb#define	atomic_add_acq_32	atomic_add_acq_int
86571085Sjhb#define	atomic_add_rel_32	atomic_add_rel_int
86671085Sjhb#define	atomic_subtract_32	atomic_subtract_int
86771085Sjhb#define	atomic_subtract_acq_32	atomic_subtract_acq_int
86871085Sjhb#define	atomic_subtract_rel_32	atomic_subtract_rel_int
86971085Sjhb#define	atomic_load_acq_32	atomic_load_acq_int
87071085Sjhb#define	atomic_store_rel_32	atomic_store_rel_int
87171085Sjhb#define	atomic_cmpset_32	atomic_cmpset_int
87271085Sjhb#define	atomic_cmpset_acq_32	atomic_cmpset_acq_int
87371085Sjhb#define	atomic_cmpset_rel_32	atomic_cmpset_rel_int
874315371Smjg#define	atomic_fcmpset_32	atomic_fcmpset_int
875315371Smjg#define	atomic_fcmpset_acq_32	atomic_fcmpset_acq_int
876315371Smjg#define	atomic_fcmpset_rel_32	atomic_fcmpset_rel_int
877254617Sjkim#define	atomic_swap_32		atomic_swap_int
87871085Sjhb#define	atomic_readandclear_32	atomic_readandclear_int
879150627Sjhb#define	atomic_fetchadd_32	atomic_fetchadd_int
880254617Sjkim#define	atomic_testandset_32	atomic_testandset_int
881299912Ssephe#define	atomic_testandclear_32	atomic_testandclear_int
88271085Sjhb
883334762Shselasky/* Operations on 64-bit quad words. */
884334762Shselasky#define	atomic_cmpset_acq_64 atomic_cmpset_64
885334762Shselasky#define	atomic_cmpset_rel_64 atomic_cmpset_64
886337893Shselasky#define	atomic_fcmpset_acq_64 atomic_fcmpset_64
887337893Shselasky#define	atomic_fcmpset_rel_64 atomic_fcmpset_64
888334762Shselasky#define	atomic_fetchadd_acq_64	atomic_fetchadd_64
889334762Shselasky#define	atomic_fetchadd_rel_64	atomic_fetchadd_64
890334762Shselasky#define	atomic_add_acq_64 atomic_add_64
891334762Shselasky#define	atomic_add_rel_64 atomic_add_64
892334762Shselasky#define	atomic_subtract_acq_64 atomic_subtract_64
893334762Shselasky#define	atomic_subtract_rel_64 atomic_subtract_64
894334762Shselasky
895147855Sjhb/* Operations on pointers. */
896157212Sdes#define	atomic_set_ptr(p, v) \
897157212Sdes	atomic_set_int((volatile u_int *)(p), (u_int)(v))
898157212Sdes#define	atomic_set_acq_ptr(p, v) \
899157212Sdes	atomic_set_acq_int((volatile u_int *)(p), (u_int)(v))
900157212Sdes#define	atomic_set_rel_ptr(p, v) \
901157212Sdes	atomic_set_rel_int((volatile u_int *)(p), (u_int)(v))
902157212Sdes#define	atomic_clear_ptr(p, v) \
903157212Sdes	atomic_clear_int((volatile u_int *)(p), (u_int)(v))
904157212Sdes#define	atomic_clear_acq_ptr(p, v) \
905157212Sdes	atomic_clear_acq_int((volatile u_int *)(p), (u_int)(v))
906157212Sdes#define	atomic_clear_rel_ptr(p, v) \
907157212Sdes	atomic_clear_rel_int((volatile u_int *)(p), (u_int)(v))
908157212Sdes#define	atomic_add_ptr(p, v) \
909157212Sdes	atomic_add_int((volatile u_int *)(p), (u_int)(v))
910157212Sdes#define	atomic_add_acq_ptr(p, v) \
911157212Sdes	atomic_add_acq_int((volatile u_int *)(p), (u_int)(v))
912157212Sdes#define	atomic_add_rel_ptr(p, v) \
913157212Sdes	atomic_add_rel_int((volatile u_int *)(p), (u_int)(v))
914157212Sdes#define	atomic_subtract_ptr(p, v) \
915157212Sdes	atomic_subtract_int((volatile u_int *)(p), (u_int)(v))
916157212Sdes#define	atomic_subtract_acq_ptr(p, v) \
917157212Sdes	atomic_subtract_acq_int((volatile u_int *)(p), (u_int)(v))
918157212Sdes#define	atomic_subtract_rel_ptr(p, v) \
919157212Sdes	atomic_subtract_rel_int((volatile u_int *)(p), (u_int)(v))
920157212Sdes#define	atomic_load_acq_ptr(p) \
921157212Sdes	atomic_load_acq_int((volatile u_int *)(p))
922157212Sdes#define	atomic_store_rel_ptr(p, v) \
923157212Sdes	atomic_store_rel_int((volatile u_int *)(p), (v))
924157212Sdes#define	atomic_cmpset_ptr(dst, old, new) \
925157212Sdes	atomic_cmpset_int((volatile u_int *)(dst), (u_int)(old), (u_int)(new))
926157212Sdes#define	atomic_cmpset_acq_ptr(dst, old, new) \
927165633Sbde	atomic_cmpset_acq_int((volatile u_int *)(dst), (u_int)(old), \
928165633Sbde	    (u_int)(new))
929157212Sdes#define	atomic_cmpset_rel_ptr(dst, old, new) \
930165633Sbde	atomic_cmpset_rel_int((volatile u_int *)(dst), (u_int)(old), \
931165633Sbde	    (u_int)(new))
932315371Smjg#define	atomic_fcmpset_ptr(dst, old, new) \
933315371Smjg	atomic_fcmpset_int((volatile u_int *)(dst), (u_int *)(old), (u_int)(new))
934315371Smjg#define	atomic_fcmpset_acq_ptr(dst, old, new) \
935315371Smjg	atomic_fcmpset_acq_int((volatile u_int *)(dst), (u_int *)(old), \
936315371Smjg	    (u_int)(new))
937315371Smjg#define	atomic_fcmpset_rel_ptr(dst, old, new) \
938315371Smjg	atomic_fcmpset_rel_int((volatile u_int *)(dst), (u_int *)(old), \
939315371Smjg	    (u_int)(new))
940254617Sjkim#define	atomic_swap_ptr(p, v) \
941254617Sjkim	atomic_swap_int((volatile u_int *)(p), (u_int)(v))
942157212Sdes#define	atomic_readandclear_ptr(p) \
943157212Sdes	atomic_readandclear_int((volatile u_int *)(p))
94465514Sphk
945165635Sbde#endif /* !WANT_FUNCTIONS */
946165633Sbde
947286051Skib#if defined(_KERNEL)
948286051Skib#define	mb()	__mbk()
949286051Skib#define	wmb()	__mbk()
950286051Skib#define	rmb()	__mbk()
951286051Skib#else
952286051Skib#define	mb()	__mbu()
953286051Skib#define	wmb()	__mbu()
954286051Skib#define	rmb()	__mbu()
955286051Skib#endif
956286051Skib
957165633Sbde#endif /* !_MACHINE_ATOMIC_H_ */
958