atomic.h revision 157212
138517Sdfr/*-
238517Sdfr * Copyright (c) 1998 Doug Rabson
338517Sdfr * All rights reserved.
438517Sdfr *
538517Sdfr * Redistribution and use in source and binary forms, with or without
638517Sdfr * modification, are permitted provided that the following conditions
738517Sdfr * are met:
838517Sdfr * 1. Redistributions of source code must retain the above copyright
938517Sdfr *    notice, this list of conditions and the following disclaimer.
1038517Sdfr * 2. Redistributions in binary form must reproduce the above copyright
1138517Sdfr *    notice, this list of conditions and the following disclaimer in the
1238517Sdfr *    documentation and/or other materials provided with the distribution.
1338517Sdfr *
1438517Sdfr * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
1538517Sdfr * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
1638517Sdfr * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
1738517Sdfr * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
1838517Sdfr * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
1938517Sdfr * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
2038517Sdfr * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
2138517Sdfr * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
2238517Sdfr * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
2338517Sdfr * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
2438517Sdfr * SUCH DAMAGE.
2538517Sdfr *
2650477Speter * $FreeBSD: head/sys/i386/include/atomic.h 157212 2006-03-28 14:34:48Z des $
2738517Sdfr */
2838517Sdfr#ifndef _MACHINE_ATOMIC_H_
29147855Sjhb#define	_MACHINE_ATOMIC_H_
3038517Sdfr
31143063Sjoerg#ifndef _SYS_CDEFS_H_
32143063Sjoerg#error this file needs sys/cdefs.h as a prerequisite
33143063Sjoerg#endif
34143063Sjoerg
3538517Sdfr/*
3638517Sdfr * Various simple arithmetic on memory which is atomic in the presence
3748797Salc * of interrupts and multiple processors.
3838517Sdfr *
3948797Salc * atomic_set_char(P, V)	(*(u_char*)(P) |= (V))
4048797Salc * atomic_clear_char(P, V)	(*(u_char*)(P) &= ~(V))
4148797Salc * atomic_add_char(P, V)	(*(u_char*)(P) += (V))
4248797Salc * atomic_subtract_char(P, V)	(*(u_char*)(P) -= (V))
4348797Salc *
4448797Salc * atomic_set_short(P, V)	(*(u_short*)(P) |= (V))
4548797Salc * atomic_clear_short(P, V)	(*(u_short*)(P) &= ~(V))
4648797Salc * atomic_add_short(P, V)	(*(u_short*)(P) += (V))
4748797Salc * atomic_subtract_short(P, V)	(*(u_short*)(P) -= (V))
4848797Salc *
4948797Salc * atomic_set_int(P, V)		(*(u_int*)(P) |= (V))
5048797Salc * atomic_clear_int(P, V)	(*(u_int*)(P) &= ~(V))
5148797Salc * atomic_add_int(P, V)		(*(u_int*)(P) += (V))
5248797Salc * atomic_subtract_int(P, V)	(*(u_int*)(P) -= (V))
5366695Sjhb * atomic_readandclear_int(P)	(return  *(u_int*)P; *(u_int*)P = 0;)
5448797Salc *
5548797Salc * atomic_set_long(P, V)	(*(u_long*)(P) |= (V))
5648797Salc * atomic_clear_long(P, V)	(*(u_long*)(P) &= ~(V))
5748797Salc * atomic_add_long(P, V)	(*(u_long*)(P) += (V))
5848797Salc * atomic_subtract_long(P, V)	(*(u_long*)(P) -= (V))
5966695Sjhb * atomic_readandclear_long(P)	(return  *(u_long*)P; *(u_long*)P = 0;)
6038517Sdfr */
6138517Sdfr
6248797Salc/*
6349999Salc * The above functions are expanded inline in the statically-linked
6449999Salc * kernel.  Lock prefixes are generated if an SMP kernel is being
6549999Salc * built.
6649999Salc *
6749999Salc * Kernel modules call real functions which are built into the kernel.
6849999Salc * This allows kernel modules to be portable between UP and SMP systems.
6948797Salc */
70147855Sjhb#if defined(KLD_MODULE) || !defined(__GNUCLIKE_ASM)
71147855Sjhb#define	ATOMIC_ASM(NAME, TYPE, OP, CONS, V)			\
72100251Smarkmvoid atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
7349999Salc
7466695Sjhbint atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src);
75150627Sjhbu_int atomic_fetchadd_int(volatile u_int *p, u_int v);
7665514Sphk
7771085Sjhb#define	ATOMIC_STORE_LOAD(TYPE, LOP, SOP)			\
7871085Sjhbu_##TYPE	atomic_load_acq_##TYPE(volatile u_##TYPE *p);	\
79100251Smarkmvoid		atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
8071085Sjhb
81147855Sjhb#else /* !KLD_MODULE && __GNUCLIKE_ASM */
8272358Smarkm
8384679Sjhb/*
8484679Sjhb * For userland, assume the SMP case and use lock prefixes so that
8584679Sjhb * the binaries will run on both types of systems.
8684679Sjhb */
8784679Sjhb#if defined(SMP) || !defined(_KERNEL)
88147855Sjhb#define	MPLOCKED	lock ;
8990515Sbde#else
90147855Sjhb#define	MPLOCKED
9190515Sbde#endif
9238517Sdfr
9348797Salc/*
9448797Salc * The assembly is volatilized to demark potential before-and-after side
9548797Salc * effects if an interrupt or SMP collision were to occur.
9648797Salc */
97147855Sjhb#define	ATOMIC_ASM(NAME, TYPE, OP, CONS, V)		\
9848797Salcstatic __inline void					\
9949043Salcatomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
10048797Salc{							\
10191469Sbmilekic	__asm __volatile(__XSTRING(MPLOCKED) OP		\
102150182Sjhb			 : "=m" (*p)			\
103150182Sjhb			 : CONS (V), "m" (*p));		\
104122827Sbde}							\
105122827Sbdestruct __hack
106100327Smarkm
10765514Sphk/*
10865514Sphk * Atomic compare and set, used by the mutex functions
10965514Sphk *
11065514Sphk * if (*dst == exp) *dst = src (all 32 bit words)
11165514Sphk *
11265514Sphk * Returns 0 on failure, non-zero on success
11365514Sphk */
11465514Sphk
115137784Sjhb#if defined(CPU_DISABLE_CMPXCHG)
116100327Smarkm
11765514Sphkstatic __inline int
11865514Sphkatomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
11965514Sphk{
12065514Sphk	int res = exp;
12165514Sphk
12265514Sphk	__asm __volatile(
12365514Sphk	"	pushfl ;		"
12465514Sphk	"	cli ;			"
125150182Sjhb	"	cmpl	%0,%3 ;		"
12665514Sphk	"	jne	1f ;		"
127150182Sjhb	"	movl	%2,%1 ;		"
12865514Sphk	"1:				"
12965514Sphk	"       sete	%%al;		"
13065514Sphk	"	movzbl	%%al,%0 ;	"
13165514Sphk	"	popfl ;			"
13265514Sphk	"# atomic_cmpset_int"
133150182Sjhb	: "+a" (res),			/* 0 (result) */
134150182Sjhb	  "=m" (*dst)			/* 1 */
135150182Sjhb	: "r" (src),			/* 2 */
136150182Sjhb	  "m" (*dst)			/* 3 */
13765514Sphk	: "memory");
13865514Sphk
13965514Sphk	return (res);
14065514Sphk}
141100327Smarkm
142137784Sjhb#else /* defined(CPU_DISABLE_CMPXCHG) */
143100327Smarkm
14465514Sphkstatic __inline int
14565514Sphkatomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
14665514Sphk{
14765514Sphk	int res = exp;
14865514Sphk
14965514Sphk	__asm __volatile (
15091469Sbmilekic	"	" __XSTRING(MPLOCKED) "	"
151150182Sjhb	"	cmpxchgl %2,%1 ;	"
15265514Sphk	"       setz	%%al ;		"
15365514Sphk	"	movzbl	%%al,%0 ;	"
15465514Sphk	"1:				"
15565514Sphk	"# atomic_cmpset_int"
156150182Sjhb	: "+a" (res),			/* 0 (result) */
157150182Sjhb	  "=m" (*dst)			/* 1 */
158150182Sjhb	: "r" (src),			/* 2 */
159150182Sjhb	  "m" (*dst)			/* 3 */
160150182Sjhb	: "memory");
16165514Sphk
16265514Sphk	return (res);
16365514Sphk}
164100327Smarkm
165137784Sjhb#endif /* defined(CPU_DISABLE_CMPXCHG) */
166100327Smarkm
167150627Sjhb/*
168150627Sjhb * Atomically add the value of v to the integer pointed to by p and return
169150627Sjhb * the previous value of *p.
170150627Sjhb */
171150627Sjhbstatic __inline u_int
172150627Sjhbatomic_fetchadd_int(volatile u_int *p, u_int v)
173150627Sjhb{
174150627Sjhb
175150627Sjhb	__asm __volatile (
176150627Sjhb	"	" __XSTRING(MPLOCKED) "	"
177150627Sjhb	"	xaddl	%0, %1 ;	"
178150627Sjhb	"# atomic_fetchadd_int"
179150627Sjhb	: "+r" (v),			/* 0 (result) */
180150627Sjhb	  "=m" (*p)			/* 1 */
181150627Sjhb	: "m" (*p));			/* 2 */
182150627Sjhb
183150627Sjhb	return (v);
184150627Sjhb}
185150627Sjhb
186137623Sjhb#if defined(_KERNEL) && !defined(SMP)
187100327Smarkm
18867351Sjhb/*
189137591Sjhb * We assume that a = b will do atomic loads and stores.  However, on a
190137591Sjhb * PentiumPro or higher, reads may pass writes, so for that case we have
191137591Sjhb * to use a serializing instruction (i.e. with LOCK) to do the load in
192137591Sjhb * SMP kernels.  For UP kernels, however, the cache of the single processor
193137591Sjhb * is always consistent, so we don't need any memory barriers.
19467351Sjhb */
195147855Sjhb#define	ATOMIC_STORE_LOAD(TYPE, LOP, SOP)		\
19667351Sjhbstatic __inline u_##TYPE				\
19767351Sjhbatomic_load_acq_##TYPE(volatile u_##TYPE *p)		\
19867351Sjhb{							\
19967351Sjhb	return (*p);					\
20067351Sjhb}							\
20167351Sjhb							\
20267351Sjhbstatic __inline void					\
20367351Sjhbatomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
20467351Sjhb{							\
20567351Sjhb	*p = v;						\
206122827Sbde}							\
207122827Sbdestruct __hack
208100327Smarkm
209137591Sjhb#else /* defined(SMP) */
21067351Sjhb
211147855Sjhb#define	ATOMIC_STORE_LOAD(TYPE, LOP, SOP)		\
21271023Sjhbstatic __inline u_##TYPE				\
21371023Sjhbatomic_load_acq_##TYPE(volatile u_##TYPE *p)		\
21471023Sjhb{							\
21571023Sjhb	u_##TYPE res;					\
21671023Sjhb							\
21791469Sbmilekic	__asm __volatile(__XSTRING(MPLOCKED) LOP	\
21871141Sjhb	: "=a" (res),			/* 0 (result) */\
219150182Sjhb	  "=m" (*p)			/* 1 */		\
220150182Sjhb	: "m" (*p)			/* 2 */		\
221150182Sjhb	: "memory");					\
22271023Sjhb							\
22371023Sjhb	return (res);					\
22471023Sjhb}							\
22571023Sjhb							\
22671023Sjhb/*							\
22771023Sjhb * The XCHG instruction asserts LOCK automagically.	\
22871023Sjhb */							\
22971023Sjhbstatic __inline void					\
23071023Sjhbatomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
23171023Sjhb{							\
23271023Sjhb	__asm __volatile(SOP				\
233150182Sjhb	: "=m" (*p),			/* 0 */		\
23471023Sjhb	  "+r" (v)			/* 1 */		\
235150182Sjhb	: "m" (*p));			/* 2 */		\
236122827Sbde}							\
237122827Sbdestruct __hack
238100327Smarkm
239137591Sjhb#endif	/* !defined(SMP) */
240100327Smarkm
241147855Sjhb#endif /* KLD_MODULE || !__GNUCLIKE_ASM */
242100251Smarkm
243100251SmarkmATOMIC_ASM(set,	     char,  "orb %b1,%0",  "iq",  v);
244100251SmarkmATOMIC_ASM(clear,    char,  "andb %b1,%0", "iq", ~v);
245100251SmarkmATOMIC_ASM(add,	     char,  "addb %b1,%0", "iq",  v);
246100251SmarkmATOMIC_ASM(subtract, char,  "subb %b1,%0", "iq",  v);
24771085Sjhb
248100251SmarkmATOMIC_ASM(set,	     short, "orw %w1,%0",  "ir",  v);
249100251SmarkmATOMIC_ASM(clear,    short, "andw %w1,%0", "ir", ~v);
250100251SmarkmATOMIC_ASM(add,	     short, "addw %w1,%0", "ir",  v);
251100251SmarkmATOMIC_ASM(subtract, short, "subw %w1,%0", "ir",  v);
25271085Sjhb
253100251SmarkmATOMIC_ASM(set,	     int,   "orl %1,%0",   "ir",  v);
254100251SmarkmATOMIC_ASM(clear,    int,   "andl %1,%0",  "ir", ~v);
255100251SmarkmATOMIC_ASM(add,	     int,   "addl %1,%0",  "ir",  v);
256100251SmarkmATOMIC_ASM(subtract, int,   "subl %1,%0",  "ir",  v);
25771085Sjhb
258100251SmarkmATOMIC_ASM(set,	     long,  "orl %1,%0",   "ir",  v);
259100251SmarkmATOMIC_ASM(clear,    long,  "andl %1,%0",  "ir", ~v);
260100251SmarkmATOMIC_ASM(add,	     long,  "addl %1,%0",  "ir",  v);
261100251SmarkmATOMIC_ASM(subtract, long,  "subl %1,%0",  "ir",  v);
26271085Sjhb
263100251SmarkmATOMIC_STORE_LOAD(char,	"cmpxchgb %b0,%1", "xchgb %b1,%0");
264100251SmarkmATOMIC_STORE_LOAD(short,"cmpxchgw %w0,%1", "xchgw %w1,%0");
265100251SmarkmATOMIC_STORE_LOAD(int,	"cmpxchgl %0,%1",  "xchgl %1,%0");
266100251SmarkmATOMIC_STORE_LOAD(long,	"cmpxchgl %0,%1",  "xchgl %1,%0");
26771023Sjhb
26871085Sjhb#undef ATOMIC_ASM
26967351Sjhb#undef ATOMIC_STORE_LOAD
27067351Sjhb
271147855Sjhb#if !defined(WANT_FUNCTIONS)
272147855Sjhb
273147855Sjhbstatic __inline int
274147855Sjhbatomic_cmpset_long(volatile u_long *dst, u_long exp, u_long src)
275147855Sjhb{
276147855Sjhb
277147855Sjhb	return (atomic_cmpset_int((volatile u_int *)dst, (u_int)exp,
278147855Sjhb	    (u_int)src));
279147855Sjhb}
280147855Sjhb
281147855Sjhb/* Read the current value and store a zero in the destination. */
282147855Sjhb#ifdef __GNUCLIKE_ASM
283147855Sjhb
284147855Sjhbstatic __inline u_int
285147855Sjhbatomic_readandclear_int(volatile u_int *addr)
286147855Sjhb{
287147855Sjhb	u_int result;
288147855Sjhb
289150182Sjhb	result = 0;
290147855Sjhb	__asm __volatile (
291147855Sjhb	"	xchgl	%1,%0 ;		"
292147855Sjhb	"# atomic_readandclear_int"
293150182Sjhb	: "+r" (result),		/* 0 (result) */
294150182Sjhb	  "=m" (*addr)			/* 1 (addr) */
295150182Sjhb	: "m" (*addr));
296147855Sjhb
297147855Sjhb	return (result);
298147855Sjhb}
299147855Sjhb
300147855Sjhbstatic __inline u_long
301147855Sjhbatomic_readandclear_long(volatile u_long *addr)
302147855Sjhb{
303147855Sjhb	u_long result;
304147855Sjhb
305150182Sjhb	result = 0;
306147855Sjhb	__asm __volatile (
307147855Sjhb	"	xchgl	%1,%0 ;		"
308147855Sjhb	"# atomic_readandclear_long"
309150182Sjhb	: "+r" (result),		/* 0 (result) */
310150182Sjhb	  "=m" (*addr)			/* 1 (addr) */
311150182Sjhb	: "m" (*addr));
312147855Sjhb
313147855Sjhb	return (result);
314147855Sjhb}
315147855Sjhb
316147855Sjhb#else /* !__GNUCLIKE_ASM */
317147855Sjhb
318147855Sjhbu_int	atomic_readandclear_int(volatile u_int *);
319147855Sjhbu_long	atomic_readandclear_long(volatile u_long *);
320147855Sjhb
321147855Sjhb#endif /* __GNUCLIKE_ASM */
322147855Sjhb
323147855Sjhb/* Acquire and release variants are identical to the normal ones. */
32471085Sjhb#define	atomic_set_acq_char		atomic_set_char
32571085Sjhb#define	atomic_set_rel_char		atomic_set_char
32671085Sjhb#define	atomic_clear_acq_char		atomic_clear_char
32771085Sjhb#define	atomic_clear_rel_char		atomic_clear_char
32871085Sjhb#define	atomic_add_acq_char		atomic_add_char
32971085Sjhb#define	atomic_add_rel_char		atomic_add_char
33071085Sjhb#define	atomic_subtract_acq_char	atomic_subtract_char
33171085Sjhb#define	atomic_subtract_rel_char	atomic_subtract_char
33271085Sjhb
33371085Sjhb#define	atomic_set_acq_short		atomic_set_short
33471085Sjhb#define	atomic_set_rel_short		atomic_set_short
33571085Sjhb#define	atomic_clear_acq_short		atomic_clear_short
33671085Sjhb#define	atomic_clear_rel_short		atomic_clear_short
33771085Sjhb#define	atomic_add_acq_short		atomic_add_short
33871085Sjhb#define	atomic_add_rel_short		atomic_add_short
33971085Sjhb#define	atomic_subtract_acq_short	atomic_subtract_short
34071085Sjhb#define	atomic_subtract_rel_short	atomic_subtract_short
34171085Sjhb
34271085Sjhb#define	atomic_set_acq_int		atomic_set_int
34371085Sjhb#define	atomic_set_rel_int		atomic_set_int
34471085Sjhb#define	atomic_clear_acq_int		atomic_clear_int
34571085Sjhb#define	atomic_clear_rel_int		atomic_clear_int
34671085Sjhb#define	atomic_add_acq_int		atomic_add_int
34771085Sjhb#define	atomic_add_rel_int		atomic_add_int
34871085Sjhb#define	atomic_subtract_acq_int		atomic_subtract_int
34971085Sjhb#define	atomic_subtract_rel_int		atomic_subtract_int
350147855Sjhb#define	atomic_cmpset_acq_int		atomic_cmpset_int
351147855Sjhb#define	atomic_cmpset_rel_int		atomic_cmpset_int
35271085Sjhb
35371085Sjhb#define	atomic_set_acq_long		atomic_set_long
35471085Sjhb#define	atomic_set_rel_long		atomic_set_long
35571085Sjhb#define	atomic_clear_acq_long		atomic_clear_long
35671085Sjhb#define	atomic_clear_rel_long		atomic_clear_long
35771085Sjhb#define	atomic_add_acq_long		atomic_add_long
35871085Sjhb#define	atomic_add_rel_long		atomic_add_long
35971085Sjhb#define	atomic_subtract_acq_long	atomic_subtract_long
36071085Sjhb#define	atomic_subtract_rel_long	atomic_subtract_long
361147855Sjhb#define	atomic_cmpset_acq_long		atomic_cmpset_long
362147855Sjhb#define	atomic_cmpset_rel_long		atomic_cmpset_long
36371085Sjhb
364147855Sjhb/* Operations on 8-bit bytes. */
36571085Sjhb#define	atomic_set_8		atomic_set_char
36671085Sjhb#define	atomic_set_acq_8	atomic_set_acq_char
36771085Sjhb#define	atomic_set_rel_8	atomic_set_rel_char
36871085Sjhb#define	atomic_clear_8		atomic_clear_char
36971085Sjhb#define	atomic_clear_acq_8	atomic_clear_acq_char
37071085Sjhb#define	atomic_clear_rel_8	atomic_clear_rel_char
37171085Sjhb#define	atomic_add_8		atomic_add_char
37271085Sjhb#define	atomic_add_acq_8	atomic_add_acq_char
37371085Sjhb#define	atomic_add_rel_8	atomic_add_rel_char
37471085Sjhb#define	atomic_subtract_8	atomic_subtract_char
37571085Sjhb#define	atomic_subtract_acq_8	atomic_subtract_acq_char
37671085Sjhb#define	atomic_subtract_rel_8	atomic_subtract_rel_char
37771085Sjhb#define	atomic_load_acq_8	atomic_load_acq_char
37871085Sjhb#define	atomic_store_rel_8	atomic_store_rel_char
37971085Sjhb
380147855Sjhb/* Operations on 16-bit words. */
38171085Sjhb#define	atomic_set_16		atomic_set_short
38271085Sjhb#define	atomic_set_acq_16	atomic_set_acq_short
38371085Sjhb#define	atomic_set_rel_16	atomic_set_rel_short
38471085Sjhb#define	atomic_clear_16		atomic_clear_short
38571085Sjhb#define	atomic_clear_acq_16	atomic_clear_acq_short
38671085Sjhb#define	atomic_clear_rel_16	atomic_clear_rel_short
38771085Sjhb#define	atomic_add_16		atomic_add_short
38871085Sjhb#define	atomic_add_acq_16	atomic_add_acq_short
38971085Sjhb#define	atomic_add_rel_16	atomic_add_rel_short
39071085Sjhb#define	atomic_subtract_16	atomic_subtract_short
39171085Sjhb#define	atomic_subtract_acq_16	atomic_subtract_acq_short
39271085Sjhb#define	atomic_subtract_rel_16	atomic_subtract_rel_short
39371085Sjhb#define	atomic_load_acq_16	atomic_load_acq_short
39471085Sjhb#define	atomic_store_rel_16	atomic_store_rel_short
39571085Sjhb
396147855Sjhb/* Operations on 32-bit double words. */
39771085Sjhb#define	atomic_set_32		atomic_set_int
39871085Sjhb#define	atomic_set_acq_32	atomic_set_acq_int
39971085Sjhb#define	atomic_set_rel_32	atomic_set_rel_int
40071085Sjhb#define	atomic_clear_32		atomic_clear_int
40171085Sjhb#define	atomic_clear_acq_32	atomic_clear_acq_int
40271085Sjhb#define	atomic_clear_rel_32	atomic_clear_rel_int
40371085Sjhb#define	atomic_add_32		atomic_add_int
40471085Sjhb#define	atomic_add_acq_32	atomic_add_acq_int
40571085Sjhb#define	atomic_add_rel_32	atomic_add_rel_int
40671085Sjhb#define	atomic_subtract_32	atomic_subtract_int
40771085Sjhb#define	atomic_subtract_acq_32	atomic_subtract_acq_int
40871085Sjhb#define	atomic_subtract_rel_32	atomic_subtract_rel_int
40971085Sjhb#define	atomic_load_acq_32	atomic_load_acq_int
41071085Sjhb#define	atomic_store_rel_32	atomic_store_rel_int
41171085Sjhb#define	atomic_cmpset_32	atomic_cmpset_int
41271085Sjhb#define	atomic_cmpset_acq_32	atomic_cmpset_acq_int
41371085Sjhb#define	atomic_cmpset_rel_32	atomic_cmpset_rel_int
41471085Sjhb#define	atomic_readandclear_32	atomic_readandclear_int
415150627Sjhb#define	atomic_fetchadd_32	atomic_fetchadd_int
41671085Sjhb
417147855Sjhb/* Operations on pointers. */
418157212Sdes#define	atomic_set_ptr(p, v) \
419157212Sdes	atomic_set_int((volatile u_int *)(p), (u_int)(v))
420157212Sdes#define	atomic_set_acq_ptr(p, v) \
421157212Sdes	atomic_set_acq_int((volatile u_int *)(p), (u_int)(v))
422157212Sdes#define	atomic_set_rel_ptr(p, v) \
423157212Sdes	atomic_set_rel_int((volatile u_int *)(p), (u_int)(v))
424157212Sdes#define	atomic_clear_ptr(p, v) \
425157212Sdes	atomic_clear_int((volatile u_int *)(p), (u_int)(v))
426157212Sdes#define	atomic_clear_acq_ptr(p, v) \
427157212Sdes	atomic_clear_acq_int((volatile u_int *)(p), (u_int)(v))
428157212Sdes#define	atomic_clear_rel_ptr(p, v) \
429157212Sdes	atomic_clear_rel_int((volatile u_int *)(p), (u_int)(v))
430157212Sdes#define	atomic_add_ptr(p, v) \
431157212Sdes	atomic_add_int((volatile u_int *)(p), (u_int)(v))
432157212Sdes#define	atomic_add_acq_ptr(p, v) \
433157212Sdes	atomic_add_acq_int((volatile u_int *)(p), (u_int)(v))
434157212Sdes#define	atomic_add_rel_ptr(p, v) \
435157212Sdes	atomic_add_rel_int((volatile u_int *)(p), (u_int)(v))
436157212Sdes#define	atomic_subtract_ptr(p, v) \
437157212Sdes	atomic_subtract_int((volatile u_int *)(p), (u_int)(v))
438157212Sdes#define	atomic_subtract_acq_ptr(p, v) \
439157212Sdes	atomic_subtract_acq_int((volatile u_int *)(p), (u_int)(v))
440157212Sdes#define	atomic_subtract_rel_ptr(p, v) \
441157212Sdes	atomic_subtract_rel_int((volatile u_int *)(p), (u_int)(v))
442157212Sdes#define	atomic_load_acq_ptr(p) \
443157212Sdes	atomic_load_acq_int((volatile u_int *)(p))
444157212Sdes#define	atomic_store_rel_ptr(p, v) \
445157212Sdes	atomic_store_rel_int((volatile u_int *)(p), (v))
446157212Sdes#define	atomic_cmpset_ptr(dst, old, new) \
447157212Sdes	atomic_cmpset_int((volatile u_int *)(dst), (u_int)(old), (u_int)(new))
448157212Sdes#define	atomic_cmpset_acq_ptr(dst, old, new) \
449157212Sdes	atomic_cmpset_acq_int((volatile u_int *)(dst), (u_int)(old), (u_int)(new))
450157212Sdes#define	atomic_cmpset_rel_ptr(dst, old, new) \
451157212Sdes	atomic_cmpset_rel_int((volatile u_int *)(dst), (u_int)(old), (u_int)(new))
452157212Sdes#define	atomic_readandclear_ptr(p) \
453157212Sdes	atomic_readandclear_int((volatile u_int *)(p))
45465514Sphk
45571085Sjhb#endif	/* !defined(WANT_FUNCTIONS) */
45638517Sdfr#endif /* ! _MACHINE_ATOMIC_H_ */
457