138517Sdfr/*-
238517Sdfr * Copyright (c) 1998 Doug Rabson
338517Sdfr * All rights reserved.
438517Sdfr *
538517Sdfr * Redistribution and use in source and binary forms, with or without
638517Sdfr * modification, are permitted provided that the following conditions
738517Sdfr * are met:
838517Sdfr * 1. Redistributions of source code must retain the above copyright
938517Sdfr *    notice, this list of conditions and the following disclaimer.
1038517Sdfr * 2. Redistributions in binary form must reproduce the above copyright
1138517Sdfr *    notice, this list of conditions and the following disclaimer in the
1238517Sdfr *    documentation and/or other materials provided with the distribution.
1338517Sdfr *
1438517Sdfr * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
1538517Sdfr * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
1638517Sdfr * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
1738517Sdfr * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
1838517Sdfr * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
1938517Sdfr * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
2038517Sdfr * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
2138517Sdfr * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
2238517Sdfr * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
2338517Sdfr * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
2438517Sdfr * SUCH DAMAGE.
2538517Sdfr *
2650477Speter * $FreeBSD: stable/10/sys/amd64/include/atomic.h 337041 2018-08-01 12:49:51Z hselasky $
2738517Sdfr */
2838517Sdfr#ifndef _MACHINE_ATOMIC_H_
29147855Sjhb#define	_MACHINE_ATOMIC_H_
3038517Sdfr
31143063Sjoerg#ifndef _SYS_CDEFS_H_
32143063Sjoerg#error this file needs sys/cdefs.h as a prerequisite
33143063Sjoerg#endif
34143063Sjoerg
35197824Sattilio#define	mb()	__asm __volatile("mfence;" : : : "memory")
36197824Sattilio#define	wmb()	__asm __volatile("sfence;" : : : "memory")
37197824Sattilio#define	rmb()	__asm __volatile("lfence;" : : : "memory")
38185162Skmacy
3938517Sdfr/*
40165635Sbde * Various simple operations on memory, each of which is atomic in the
41165635Sbde * presence of interrupts and multiple processors.
4238517Sdfr *
43165633Sbde * atomic_set_char(P, V)	(*(u_char *)(P) |= (V))
44165633Sbde * atomic_clear_char(P, V)	(*(u_char *)(P) &= ~(V))
45165633Sbde * atomic_add_char(P, V)	(*(u_char *)(P) += (V))
46165633Sbde * atomic_subtract_char(P, V)	(*(u_char *)(P) -= (V))
4748797Salc *
48165633Sbde * atomic_set_short(P, V)	(*(u_short *)(P) |= (V))
49165633Sbde * atomic_clear_short(P, V)	(*(u_short *)(P) &= ~(V))
50165633Sbde * atomic_add_short(P, V)	(*(u_short *)(P) += (V))
51165633Sbde * atomic_subtract_short(P, V)	(*(u_short *)(P) -= (V))
5248797Salc *
53165633Sbde * atomic_set_int(P, V)		(*(u_int *)(P) |= (V))
54165633Sbde * atomic_clear_int(P, V)	(*(u_int *)(P) &= ~(V))
55165633Sbde * atomic_add_int(P, V)		(*(u_int *)(P) += (V))
56165633Sbde * atomic_subtract_int(P, V)	(*(u_int *)(P) -= (V))
57254617Sjkim * atomic_swap_int(P, V)	(return (*(u_int *)(P)); *(u_int *)(P) = (V);)
58165635Sbde * atomic_readandclear_int(P)	(return (*(u_int *)(P)); *(u_int *)(P) = 0;)
5948797Salc *
60165633Sbde * atomic_set_long(P, V)	(*(u_long *)(P) |= (V))
61165633Sbde * atomic_clear_long(P, V)	(*(u_long *)(P) &= ~(V))
62165633Sbde * atomic_add_long(P, V)	(*(u_long *)(P) += (V))
63165633Sbde * atomic_subtract_long(P, V)	(*(u_long *)(P) -= (V))
64254617Sjkim * atomic_swap_long(P, V)	(return (*(u_long *)(P)); *(u_long *)(P) = (V);)
65165635Sbde * atomic_readandclear_long(P)	(return (*(u_long *)(P)); *(u_long *)(P) = 0;)
6638517Sdfr */
6738517Sdfr
6848797Salc/*
6949999Salc * The above functions are expanded inline in the statically-linked
7049999Salc * kernel.  Lock prefixes are generated if an SMP kernel is being
7149999Salc * built.
7249999Salc *
7349999Salc * Kernel modules call real functions which are built into the kernel.
7449999Salc * This allows kernel modules to be portable between UP and SMP systems.
7548797Salc */
76165578Sbde#if defined(KLD_MODULE) || !defined(__GNUCLIKE_ASM)
77147855Sjhb#define	ATOMIC_ASM(NAME, TYPE, OP, CONS, V)			\
78197803Sattiliovoid atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v);	\
79197803Sattiliovoid atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
8049999Salc
81337041Shselaskyint	atomic_cmpset_char(volatile u_char *dst, u_char expect, u_char src);
82337041Shselaskyint	atomic_cmpset_short(volatile u_short *dst, u_short expect, u_short src);
83208332Sphkint	atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src);
84208332Sphkint	atomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src);
85337041Shselaskyint	atomic_fcmpset_char(volatile u_char *dst, u_char *expect, u_char src);
86337041Shselaskyint	atomic_fcmpset_short(volatile u_short *dst, u_short *expect,
87337041Shselasky	    u_short src);
88337041Shselaskyint	atomic_fcmpset_int(volatile u_int *dst, u_int *expect, u_int src);
89337041Shselaskyint	atomic_fcmpset_long(volatile u_long *dst, u_long *expect, u_long src);
90165633Sbdeu_int	atomic_fetchadd_int(volatile u_int *p, u_int v);
91177276Spjdu_long	atomic_fetchadd_long(volatile u_long *p, u_long v);
92254617Sjkimint	atomic_testandset_int(volatile u_int *p, u_int v);
93254617Sjkimint	atomic_testandset_long(volatile u_long *p, u_int v);
94302108Ssepheint	atomic_testandclear_int(volatile u_int *p, u_int v);
95302108Ssepheint	atomic_testandclear_long(volatile u_long *p, u_int v);
9665514Sphk
97236456Skib#define	ATOMIC_LOAD(TYPE, LOP)					\
98236456Skibu_##TYPE	atomic_load_acq_##TYPE(volatile u_##TYPE *p)
99236456Skib#define	ATOMIC_STORE(TYPE)					\
100100251Smarkmvoid		atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
10171085Sjhb
102165578Sbde#else /* !KLD_MODULE && __GNUCLIKE_ASM */
10372358Smarkm
10484679Sjhb/*
105165635Sbde * For userland, always use lock prefixes so that the binaries will run
106165635Sbde * on both SMP and !SMP systems.
10784679Sjhb */
108122849Speter#if defined(SMP) || !defined(_KERNEL)
109165630Sbde#define	MPLOCKED	"lock ; "
11090515Sbde#else
111147855Sjhb#define	MPLOCKED
11290515Sbde#endif
11338517Sdfr
11448797Salc/*
115197803Sattilio * The assembly is volatilized to avoid code chunk removal by the compiler.
116197803Sattilio * GCC aggressively reorders operations and memory clobbering is necessary
117197803Sattilio * in order to avoid that for memory barriers.
11848797Salc */
119147855Sjhb#define	ATOMIC_ASM(NAME, TYPE, OP, CONS, V)		\
12048797Salcstatic __inline void					\
12149043Salcatomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
12248797Salc{							\
123165630Sbde	__asm __volatile(MPLOCKED OP			\
124254612Sjkim	: "+m" (*p)					\
125254612Sjkim	: CONS (V)					\
126216524Skib	: "cc");					\
127122940Speter}							\
128197803Sattilio							\
129197803Sattiliostatic __inline void					\
130197803Sattilioatomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
131197803Sattilio{							\
132197803Sattilio	__asm __volatile(MPLOCKED OP			\
133254612Sjkim	: "+m" (*p)					\
134254612Sjkim	: CONS (V)					\
135216524Skib	: "memory", "cc");				\
136197803Sattilio}							\
137122940Speterstruct __hack
138100327Smarkm
13965514Sphk/*
140337041Shselasky * Atomic compare and set, used by the mutex functions.
14165514Sphk *
142337041Shselasky * cmpset:
143337041Shselasky *	if (*dst == expect)
144337041Shselasky *		*dst = src
14565514Sphk *
146337041Shselasky * fcmpset:
147337041Shselasky *	if (*dst == *expect)
148337041Shselasky *		*dst = src
149337041Shselasky *	else
150337041Shselasky *		*expect = *dst
151337041Shselasky *
152337041Shselasky * Returns 0 on failure, non-zero on success.
15365514Sphk */
154337041Shselasky#define	ATOMIC_CMPSET(TYPE)				\
155337041Shselaskystatic __inline int					\
156337041Shselaskyatomic_cmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE expect, u_##TYPE src) \
157337041Shselasky{							\
158337041Shselasky	u_char res;					\
159337041Shselasky							\
160337041Shselasky	__asm __volatile(				\
161337041Shselasky	"	" MPLOCKED "		"		\
162337041Shselasky	"	cmpxchg %3,%1 ;	"			\
163337041Shselasky	"	sete	%0 ;		"		\
164337041Shselasky	"# atomic_cmpset_" #TYPE "	"		\
165337041Shselasky	: "=q" (res),			/* 0 */		\
166337041Shselasky	  "+m" (*dst),			/* 1 */		\
167337041Shselasky	  "+a" (expect)			/* 2 */		\
168337041Shselasky	: "r" (src)			/* 3 */		\
169337041Shselasky	: "memory", "cc");				\
170337041Shselasky	return (res);					\
171337041Shselasky}							\
172337041Shselasky							\
173337041Shselaskystatic __inline int					\
174337041Shselaskyatomic_fcmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE *expect, u_##TYPE src) \
175337041Shselasky{							\
176337041Shselasky	u_char res;					\
177337041Shselasky							\
178337041Shselasky	__asm __volatile(				\
179337041Shselasky	"	" MPLOCKED "		"		\
180337041Shselasky	"	cmpxchg %3,%1 ;		"		\
181337041Shselasky	"	sete	%0 ;		"		\
182337041Shselasky	"# atomic_fcmpset_" #TYPE "	"		\
183337041Shselasky	: "=q" (res),			/* 0 */		\
184337041Shselasky	  "+m" (*dst),			/* 1 */		\
185337041Shselasky	  "+a" (*expect)		/* 2 */		\
186337041Shselasky	: "r" (src)			/* 3 */		\
187337041Shselasky	: "memory", "cc");				\
188337041Shselasky	return (res);					\
189197910Sattilio}
190197910Sattilio
191337041ShselaskyATOMIC_CMPSET(char);
192337041ShselaskyATOMIC_CMPSET(short);
193337041ShselaskyATOMIC_CMPSET(int);
194337041ShselaskyATOMIC_CMPSET(long);
195197910Sattilio
196150627Sjhb/*
197150627Sjhb * Atomically add the value of v to the integer pointed to by p and return
198150627Sjhb * the previous value of *p.
199150627Sjhb */
200150627Sjhbstatic __inline u_int
201150627Sjhbatomic_fetchadd_int(volatile u_int *p, u_int v)
202150627Sjhb{
203150627Sjhb
204165633Sbde	__asm __volatile(
205165630Sbde	"	" MPLOCKED "		"
206254610Sjkim	"	xaddl	%0,%1 ;		"
207150627Sjhb	"# atomic_fetchadd_int"
208254610Sjkim	: "+r" (v),			/* 0 */
209254612Sjkim	  "+m" (*p)			/* 1 */
210254612Sjkim	: : "cc");
211150627Sjhb	return (v);
212150627Sjhb}
213150627Sjhb
214177276Spjd/*
215177276Spjd * Atomically add the value of v to the long integer pointed to by p and return
216177276Spjd * the previous value of *p.
217177276Spjd */
218177276Spjdstatic __inline u_long
219177276Spjdatomic_fetchadd_long(volatile u_long *p, u_long v)
220177276Spjd{
221177276Spjd
222177276Spjd	__asm __volatile(
223177276Spjd	"	" MPLOCKED "		"
224254610Sjkim	"	xaddq	%0,%1 ;		"
225177276Spjd	"# atomic_fetchadd_long"
226254610Sjkim	: "+r" (v),			/* 0 */
227254612Sjkim	  "+m" (*p)			/* 1 */
228254612Sjkim	: : "cc");
229177276Spjd	return (v);
230177276Spjd}
231177276Spjd
232254617Sjkimstatic __inline int
233254617Sjkimatomic_testandset_int(volatile u_int *p, u_int v)
234254617Sjkim{
235254617Sjkim	u_char res;
236254617Sjkim
237254617Sjkim	__asm __volatile(
238254617Sjkim	"	" MPLOCKED "		"
239254617Sjkim	"	btsl	%2,%1 ;		"
240254617Sjkim	"	setc	%0 ;		"
241254617Sjkim	"# atomic_testandset_int"
242254617Sjkim	: "=q" (res),			/* 0 */
243254617Sjkim	  "+m" (*p)			/* 1 */
244254617Sjkim	: "Ir" (v & 0x1f)		/* 2 */
245254617Sjkim	: "cc");
246254617Sjkim	return (res);
247254617Sjkim}
248254617Sjkim
249254617Sjkimstatic __inline int
250254617Sjkimatomic_testandset_long(volatile u_long *p, u_int v)
251254617Sjkim{
252254617Sjkim	u_char res;
253254617Sjkim
254254617Sjkim	__asm __volatile(
255254617Sjkim	"	" MPLOCKED "		"
256254617Sjkim	"	btsq	%2,%1 ;		"
257254617Sjkim	"	setc	%0 ;		"
258254617Sjkim	"# atomic_testandset_long"
259254617Sjkim	: "=q" (res),			/* 0 */
260254617Sjkim	  "+m" (*p)			/* 1 */
261254617Sjkim	: "Jr" ((u_long)(v & 0x3f))	/* 2 */
262254617Sjkim	: "cc");
263254617Sjkim	return (res);
264254617Sjkim}
265254617Sjkim
266302108Ssephestatic __inline int
267302108Ssepheatomic_testandclear_int(volatile u_int *p, u_int v)
268302108Ssephe{
269302108Ssephe	u_char res;
270302108Ssephe
271302108Ssephe	__asm __volatile(
272302108Ssephe	"	" MPLOCKED "		"
273302108Ssephe	"	btrl	%2,%1 ;		"
274302108Ssephe	"	setc	%0 ;		"
275302108Ssephe	"# atomic_testandclear_int"
276302108Ssephe	: "=q" (res),			/* 0 */
277302108Ssephe	  "+m" (*p)			/* 1 */
278302108Ssephe	: "Ir" (v & 0x1f)		/* 2 */
279302108Ssephe	: "cc");
280302108Ssephe	return (res);
281302108Ssephe}
282302108Ssephe
283302108Ssephestatic __inline int
284302108Ssepheatomic_testandclear_long(volatile u_long *p, u_int v)
285302108Ssephe{
286302108Ssephe	u_char res;
287302108Ssephe
288302108Ssephe	__asm __volatile(
289302108Ssephe	"	" MPLOCKED "		"
290302108Ssephe	"	btrq	%2,%1 ;		"
291302108Ssephe	"	setc	%0 ;		"
292302108Ssephe	"# atomic_testandclear_long"
293302108Ssephe	: "=q" (res),			/* 0 */
294302108Ssephe	  "+m" (*p)			/* 1 */
295302108Ssephe	: "Jr" ((u_long)(v & 0x3f))	/* 2 */
296302108Ssephe	: "cc");
297302108Ssephe	return (res);
298302108Ssephe}
299302108Ssephe
300236456Skib/*
301236456Skib * We assume that a = b will do atomic loads and stores.  Due to the
302236456Skib * IA32 memory model, a simple store guarantees release semantics.
303236456Skib *
304236456Skib * However, loads may pass stores, so for atomic_load_acq we have to
305236456Skib * ensure a Store/Load barrier to do the load in SMP kernels.  We use
306236456Skib * "lock cmpxchg" as recommended by the AMD Software Optimization
307236456Skib * Guide, and not mfence.  For UP kernels, however, the cache of the
308236456Skib * single processor is always consistent, so we only need to take care
309236456Skib * of the compiler.
310236456Skib */
311236456Skib#define	ATOMIC_STORE(TYPE)				\
312236456Skibstatic __inline void					\
313236456Skibatomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
314236456Skib{							\
315241374Sattilio	__compiler_membar();				\
316236456Skib	*p = v;						\
317236456Skib}							\
318236456Skibstruct __hack
319236456Skib
320148267Speter#if defined(_KERNEL) && !defined(SMP)
321148267Speter
322236456Skib#define	ATOMIC_LOAD(TYPE, LOP)				\
32367351Sjhbstatic __inline u_##TYPE				\
32467351Sjhbatomic_load_acq_##TYPE(volatile u_##TYPE *p)		\
32567351Sjhb{							\
326197803Sattilio	u_##TYPE tmp;					\
327197803Sattilio							\
328197803Sattilio	tmp = *p;					\
329241374Sattilio	__compiler_membar();				\
330197803Sattilio	return (tmp);					\
331148267Speter}							\
332148267Speterstruct __hack
333148267Speter
334165635Sbde#else /* !(_KERNEL && !SMP) */
335148267Speter
336236456Skib#define	ATOMIC_LOAD(TYPE, LOP)				\
337148267Speterstatic __inline u_##TYPE				\
338148267Speteratomic_load_acq_##TYPE(volatile u_##TYPE *p)		\
339148267Speter{							\
34071023Sjhb	u_##TYPE res;					\
34171023Sjhb							\
342165630Sbde	__asm __volatile(MPLOCKED LOP			\
343165635Sbde	: "=a" (res),			/* 0 */		\
344254612Sjkim	  "+m" (*p)			/* 1 */		\
345254612Sjkim	: : "memory", "cc");				\
34671023Sjhb	return (res);					\
34771023Sjhb}							\
348122940Speterstruct __hack
349100327Smarkm
350165635Sbde#endif /* _KERNEL && !SMP */
351148267Speter
352165578Sbde#endif /* KLD_MODULE || !__GNUCLIKE_ASM */
353100251Smarkm
354100251SmarkmATOMIC_ASM(set,	     char,  "orb %b1,%0",  "iq",  v);
355100251SmarkmATOMIC_ASM(clear,    char,  "andb %b1,%0", "iq", ~v);
356100251SmarkmATOMIC_ASM(add,	     char,  "addb %b1,%0", "iq",  v);
357100251SmarkmATOMIC_ASM(subtract, char,  "subb %b1,%0", "iq",  v);
35871085Sjhb
359100251SmarkmATOMIC_ASM(set,	     short, "orw %w1,%0",  "ir",  v);
360100251SmarkmATOMIC_ASM(clear,    short, "andw %w1,%0", "ir", ~v);
361100251SmarkmATOMIC_ASM(add,	     short, "addw %w1,%0", "ir",  v);
362100251SmarkmATOMIC_ASM(subtract, short, "subw %w1,%0", "ir",  v);
36371085Sjhb
364100251SmarkmATOMIC_ASM(set,	     int,   "orl %1,%0",   "ir",  v);
365100251SmarkmATOMIC_ASM(clear,    int,   "andl %1,%0",  "ir", ~v);
366100251SmarkmATOMIC_ASM(add,	     int,   "addl %1,%0",  "ir",  v);
367100251SmarkmATOMIC_ASM(subtract, int,   "subl %1,%0",  "ir",  v);
36871085Sjhb
369114349SpeterATOMIC_ASM(set,	     long,  "orq %1,%0",   "ir",  v);
370114349SpeterATOMIC_ASM(clear,    long,  "andq %1,%0",  "ir", ~v);
371114349SpeterATOMIC_ASM(add,	     long,  "addq %1,%0",  "ir",  v);
372114349SpeterATOMIC_ASM(subtract, long,  "subq %1,%0",  "ir",  v);
37371085Sjhb
374236456SkibATOMIC_LOAD(char,  "cmpxchgb %b0,%1");
375236456SkibATOMIC_LOAD(short, "cmpxchgw %w0,%1");
376236456SkibATOMIC_LOAD(int,   "cmpxchgl %0,%1");
377236456SkibATOMIC_LOAD(long,  "cmpxchgq %0,%1");
37871023Sjhb
379236456SkibATOMIC_STORE(char);
380236456SkibATOMIC_STORE(short);
381236456SkibATOMIC_STORE(int);
382236456SkibATOMIC_STORE(long);
383236456Skib
38471085Sjhb#undef ATOMIC_ASM
385236456Skib#undef ATOMIC_LOAD
386236456Skib#undef ATOMIC_STORE
38767351Sjhb
388165635Sbde#ifndef WANT_FUNCTIONS
389147855Sjhb
390254617Sjkim/* Read the current value and store a new value in the destination. */
391165578Sbde#ifdef __GNUCLIKE_ASM
392147855Sjhb
393147855Sjhbstatic __inline u_int
394254617Sjkimatomic_swap_int(volatile u_int *p, u_int v)
395147855Sjhb{
396147855Sjhb
397165633Sbde	__asm __volatile(
398147855Sjhb	"	xchgl	%1,%0 ;		"
399254617Sjkim	"# atomic_swap_int"
400254617Sjkim	: "+r" (v),			/* 0 */
401254612Sjkim	  "+m" (*p));			/* 1 */
402254617Sjkim	return (v);
403147855Sjhb}
404147855Sjhb
405147855Sjhbstatic __inline u_long
406254617Sjkimatomic_swap_long(volatile u_long *p, u_long v)
407147855Sjhb{
408147855Sjhb
409165633Sbde	__asm __volatile(
410147855Sjhb	"	xchgq	%1,%0 ;		"
411254617Sjkim	"# atomic_swap_long"
412254617Sjkim	: "+r" (v),			/* 0 */
413254612Sjkim	  "+m" (*p));			/* 1 */
414254617Sjkim	return (v);
415147855Sjhb}
416147855Sjhb
417165578Sbde#else /* !__GNUCLIKE_ASM */
418147855Sjhb
419254617Sjkimu_int	atomic_swap_int(volatile u_int *p, u_int v);
420254617Sjkimu_long	atomic_swap_long(volatile u_long *p, u_long v);
421147855Sjhb
422165578Sbde#endif /* __GNUCLIKE_ASM */
423147855Sjhb
424197803Sattilio#define	atomic_set_acq_char		atomic_set_barr_char
425197803Sattilio#define	atomic_set_rel_char		atomic_set_barr_char
426197803Sattilio#define	atomic_clear_acq_char		atomic_clear_barr_char
427197803Sattilio#define	atomic_clear_rel_char		atomic_clear_barr_char
428197803Sattilio#define	atomic_add_acq_char		atomic_add_barr_char
429197803Sattilio#define	atomic_add_rel_char		atomic_add_barr_char
430197803Sattilio#define	atomic_subtract_acq_char	atomic_subtract_barr_char
431197803Sattilio#define	atomic_subtract_rel_char	atomic_subtract_barr_char
432337041Shselasky#define	atomic_cmpset_acq_char		atomic_cmpset_char
433337041Shselasky#define	atomic_cmpset_rel_char		atomic_cmpset_char
434337041Shselasky#define	atomic_fcmpset_acq_char		atomic_fcmpset_char
435337041Shselasky#define	atomic_fcmpset_rel_char		atomic_fcmpset_char
43671085Sjhb
437197803Sattilio#define	atomic_set_acq_short		atomic_set_barr_short
438197803Sattilio#define	atomic_set_rel_short		atomic_set_barr_short
439197803Sattilio#define	atomic_clear_acq_short		atomic_clear_barr_short
440197803Sattilio#define	atomic_clear_rel_short		atomic_clear_barr_short
441197803Sattilio#define	atomic_add_acq_short		atomic_add_barr_short
442197803Sattilio#define	atomic_add_rel_short		atomic_add_barr_short
443197803Sattilio#define	atomic_subtract_acq_short	atomic_subtract_barr_short
444197803Sattilio#define	atomic_subtract_rel_short	atomic_subtract_barr_short
445337041Shselasky#define	atomic_cmpset_acq_short		atomic_cmpset_short
446337041Shselasky#define	atomic_cmpset_rel_short		atomic_cmpset_short
447337041Shselasky#define	atomic_fcmpset_acq_short	atomic_fcmpset_short
448337041Shselasky#define	atomic_fcmpset_rel_short	atomic_fcmpset_short
44971085Sjhb
450197803Sattilio#define	atomic_set_acq_int		atomic_set_barr_int
451197803Sattilio#define	atomic_set_rel_int		atomic_set_barr_int
452197803Sattilio#define	atomic_clear_acq_int		atomic_clear_barr_int
453197803Sattilio#define	atomic_clear_rel_int		atomic_clear_barr_int
454197803Sattilio#define	atomic_add_acq_int		atomic_add_barr_int
455197803Sattilio#define	atomic_add_rel_int		atomic_add_barr_int
456197803Sattilio#define	atomic_subtract_acq_int		atomic_subtract_barr_int
457197803Sattilio#define	atomic_subtract_rel_int		atomic_subtract_barr_int
458197910Sattilio#define	atomic_cmpset_acq_int		atomic_cmpset_int
459197910Sattilio#define	atomic_cmpset_rel_int		atomic_cmpset_int
460337041Shselasky#define	atomic_fcmpset_acq_int		atomic_fcmpset_int
461337041Shselasky#define	atomic_fcmpset_rel_int		atomic_fcmpset_int
46271085Sjhb
463197803Sattilio#define	atomic_set_acq_long		atomic_set_barr_long
464197803Sattilio#define	atomic_set_rel_long		atomic_set_barr_long
465197803Sattilio#define	atomic_clear_acq_long		atomic_clear_barr_long
466197803Sattilio#define	atomic_clear_rel_long		atomic_clear_barr_long
467197803Sattilio#define	atomic_add_acq_long		atomic_add_barr_long
468197803Sattilio#define	atomic_add_rel_long		atomic_add_barr_long
469197803Sattilio#define	atomic_subtract_acq_long	atomic_subtract_barr_long
470197803Sattilio#define	atomic_subtract_rel_long	atomic_subtract_barr_long
471197910Sattilio#define	atomic_cmpset_acq_long		atomic_cmpset_long
472197910Sattilio#define	atomic_cmpset_rel_long		atomic_cmpset_long
473337041Shselasky#define	atomic_fcmpset_acq_long		atomic_fcmpset_long
474337041Shselasky#define	atomic_fcmpset_rel_long		atomic_fcmpset_long
47571085Sjhb
476254617Sjkim#define	atomic_readandclear_int(p)	atomic_swap_int(p, 0)
477254617Sjkim#define	atomic_readandclear_long(p)	atomic_swap_long(p, 0)
478254617Sjkim
479147855Sjhb/* Operations on 8-bit bytes. */
48071085Sjhb#define	atomic_set_8		atomic_set_char
48171085Sjhb#define	atomic_set_acq_8	atomic_set_acq_char
48271085Sjhb#define	atomic_set_rel_8	atomic_set_rel_char
48371085Sjhb#define	atomic_clear_8		atomic_clear_char
48471085Sjhb#define	atomic_clear_acq_8	atomic_clear_acq_char
48571085Sjhb#define	atomic_clear_rel_8	atomic_clear_rel_char
48671085Sjhb#define	atomic_add_8		atomic_add_char
48771085Sjhb#define	atomic_add_acq_8	atomic_add_acq_char
48871085Sjhb#define	atomic_add_rel_8	atomic_add_rel_char
48971085Sjhb#define	atomic_subtract_8	atomic_subtract_char
49071085Sjhb#define	atomic_subtract_acq_8	atomic_subtract_acq_char
49171085Sjhb#define	atomic_subtract_rel_8	atomic_subtract_rel_char
49271085Sjhb#define	atomic_load_acq_8	atomic_load_acq_char
49371085Sjhb#define	atomic_store_rel_8	atomic_store_rel_char
494337041Shselasky#define	atomic_cmpset_8		atomic_cmpset_char
495337041Shselasky#define	atomic_cmpset_acq_8	atomic_cmpset_acq_char
496337041Shselasky#define	atomic_cmpset_rel_8	atomic_cmpset_rel_char
497337041Shselasky#define	atomic_fcmpset_8	atomic_fcmpset_char
498337041Shselasky#define	atomic_fcmpset_acq_8	atomic_fcmpset_acq_char
499337041Shselasky#define	atomic_fcmpset_rel_8	atomic_fcmpset_rel_char
50071085Sjhb
501147855Sjhb/* Operations on 16-bit words. */
50271085Sjhb#define	atomic_set_16		atomic_set_short
50371085Sjhb#define	atomic_set_acq_16	atomic_set_acq_short
50471085Sjhb#define	atomic_set_rel_16	atomic_set_rel_short
50571085Sjhb#define	atomic_clear_16		atomic_clear_short
50671085Sjhb#define	atomic_clear_acq_16	atomic_clear_acq_short
50771085Sjhb#define	atomic_clear_rel_16	atomic_clear_rel_short
50871085Sjhb#define	atomic_add_16		atomic_add_short
50971085Sjhb#define	atomic_add_acq_16	atomic_add_acq_short
51071085Sjhb#define	atomic_add_rel_16	atomic_add_rel_short
51171085Sjhb#define	atomic_subtract_16	atomic_subtract_short
51271085Sjhb#define	atomic_subtract_acq_16	atomic_subtract_acq_short
51371085Sjhb#define	atomic_subtract_rel_16	atomic_subtract_rel_short
51471085Sjhb#define	atomic_load_acq_16	atomic_load_acq_short
51571085Sjhb#define	atomic_store_rel_16	atomic_store_rel_short
516337041Shselasky#define	atomic_cmpset_16	atomic_cmpset_short
517337041Shselasky#define	atomic_cmpset_acq_16	atomic_cmpset_acq_short
518337041Shselasky#define	atomic_cmpset_rel_16	atomic_cmpset_rel_short
519337041Shselasky#define	atomic_fcmpset_16	atomic_fcmpset_short
520337041Shselasky#define	atomic_fcmpset_acq_16	atomic_fcmpset_acq_short
521337041Shselasky#define	atomic_fcmpset_rel_16	atomic_fcmpset_rel_short
52271085Sjhb
523147855Sjhb/* Operations on 32-bit double words. */
52471085Sjhb#define	atomic_set_32		atomic_set_int
52571085Sjhb#define	atomic_set_acq_32	atomic_set_acq_int
52671085Sjhb#define	atomic_set_rel_32	atomic_set_rel_int
52771085Sjhb#define	atomic_clear_32		atomic_clear_int
52871085Sjhb#define	atomic_clear_acq_32	atomic_clear_acq_int
52971085Sjhb#define	atomic_clear_rel_32	atomic_clear_rel_int
53071085Sjhb#define	atomic_add_32		atomic_add_int
53171085Sjhb#define	atomic_add_acq_32	atomic_add_acq_int
53271085Sjhb#define	atomic_add_rel_32	atomic_add_rel_int
53371085Sjhb#define	atomic_subtract_32	atomic_subtract_int
53471085Sjhb#define	atomic_subtract_acq_32	atomic_subtract_acq_int
53571085Sjhb#define	atomic_subtract_rel_32	atomic_subtract_rel_int
53671085Sjhb#define	atomic_load_acq_32	atomic_load_acq_int
53771085Sjhb#define	atomic_store_rel_32	atomic_store_rel_int
53871085Sjhb#define	atomic_cmpset_32	atomic_cmpset_int
53971085Sjhb#define	atomic_cmpset_acq_32	atomic_cmpset_acq_int
54071085Sjhb#define	atomic_cmpset_rel_32	atomic_cmpset_rel_int
541337041Shselasky#define	atomic_fcmpset_32	atomic_fcmpset_int
542337041Shselasky#define	atomic_fcmpset_acq_32	atomic_fcmpset_acq_int
543337041Shselasky#define	atomic_fcmpset_rel_32	atomic_fcmpset_rel_int
544254617Sjkim#define	atomic_swap_32		atomic_swap_int
54571085Sjhb#define	atomic_readandclear_32	atomic_readandclear_int
546150627Sjhb#define	atomic_fetchadd_32	atomic_fetchadd_int
547254617Sjkim#define	atomic_testandset_32	atomic_testandset_int
548302108Ssephe#define	atomic_testandclear_32	atomic_testandclear_int
54971085Sjhb
550149233Sjhb/* Operations on 64-bit quad words. */
551149233Sjhb#define	atomic_set_64		atomic_set_long
552149233Sjhb#define	atomic_set_acq_64	atomic_set_acq_long
553149233Sjhb#define	atomic_set_rel_64	atomic_set_rel_long
554149233Sjhb#define	atomic_clear_64		atomic_clear_long
555149233Sjhb#define	atomic_clear_acq_64	atomic_clear_acq_long
556149233Sjhb#define	atomic_clear_rel_64	atomic_clear_rel_long
557149233Sjhb#define	atomic_add_64		atomic_add_long
558149233Sjhb#define	atomic_add_acq_64	atomic_add_acq_long
559149233Sjhb#define	atomic_add_rel_64	atomic_add_rel_long
560149233Sjhb#define	atomic_subtract_64	atomic_subtract_long
561149233Sjhb#define	atomic_subtract_acq_64	atomic_subtract_acq_long
562149233Sjhb#define	atomic_subtract_rel_64	atomic_subtract_rel_long
563149233Sjhb#define	atomic_load_acq_64	atomic_load_acq_long
564149233Sjhb#define	atomic_store_rel_64	atomic_store_rel_long
565149233Sjhb#define	atomic_cmpset_64	atomic_cmpset_long
566149233Sjhb#define	atomic_cmpset_acq_64	atomic_cmpset_acq_long
567149233Sjhb#define	atomic_cmpset_rel_64	atomic_cmpset_rel_long
568337041Shselasky#define	atomic_fcmpset_64	atomic_fcmpset_long
569337041Shselasky#define	atomic_fcmpset_acq_64	atomic_fcmpset_acq_long
570337041Shselasky#define	atomic_fcmpset_rel_64	atomic_fcmpset_rel_long
571254617Sjkim#define	atomic_swap_64		atomic_swap_long
572149233Sjhb#define	atomic_readandclear_64	atomic_readandclear_long
573297645Shselasky#define	atomic_fetchadd_64	atomic_fetchadd_long
574254617Sjkim#define	atomic_testandset_64	atomic_testandset_long
575302108Ssephe#define	atomic_testandclear_64	atomic_testandclear_long
576149233Sjhb
577147855Sjhb/* Operations on pointers. */
578148067Sjhb#define	atomic_set_ptr		atomic_set_long
579148067Sjhb#define	atomic_set_acq_ptr	atomic_set_acq_long
580148067Sjhb#define	atomic_set_rel_ptr	atomic_set_rel_long
581148067Sjhb#define	atomic_clear_ptr	atomic_clear_long
582148067Sjhb#define	atomic_clear_acq_ptr	atomic_clear_acq_long
583148067Sjhb#define	atomic_clear_rel_ptr	atomic_clear_rel_long
584148067Sjhb#define	atomic_add_ptr		atomic_add_long
585148067Sjhb#define	atomic_add_acq_ptr	atomic_add_acq_long
586148067Sjhb#define	atomic_add_rel_ptr	atomic_add_rel_long
587148067Sjhb#define	atomic_subtract_ptr	atomic_subtract_long
588148067Sjhb#define	atomic_subtract_acq_ptr	atomic_subtract_acq_long
589148067Sjhb#define	atomic_subtract_rel_ptr	atomic_subtract_rel_long
590148067Sjhb#define	atomic_load_acq_ptr	atomic_load_acq_long
591148067Sjhb#define	atomic_store_rel_ptr	atomic_store_rel_long
592148067Sjhb#define	atomic_cmpset_ptr	atomic_cmpset_long
593148067Sjhb#define	atomic_cmpset_acq_ptr	atomic_cmpset_acq_long
594148067Sjhb#define	atomic_cmpset_rel_ptr	atomic_cmpset_rel_long
595337041Shselasky#define	atomic_fcmpset_ptr	atomic_fcmpset_long
596337041Shselasky#define	atomic_fcmpset_acq_ptr	atomic_fcmpset_acq_long
597337041Shselasky#define	atomic_fcmpset_rel_ptr	atomic_fcmpset_rel_long
598254617Sjkim#define	atomic_swap_ptr		atomic_swap_long
599148067Sjhb#define	atomic_readandclear_ptr	atomic_readandclear_long
60065514Sphk
601165635Sbde#endif /* !WANT_FUNCTIONS */
602165633Sbde
603165633Sbde#endif /* !_MACHINE_ATOMIC_H_ */
604