atomic.h revision 100327
138517Sdfr/*-
238517Sdfr * Copyright (c) 1998 Doug Rabson
338517Sdfr * All rights reserved.
438517Sdfr *
538517Sdfr * Redistribution and use in source and binary forms, with or without
638517Sdfr * modification, are permitted provided that the following conditions
738517Sdfr * are met:
838517Sdfr * 1. Redistributions of source code must retain the above copyright
938517Sdfr *    notice, this list of conditions and the following disclaimer.
1038517Sdfr * 2. Redistributions in binary form must reproduce the above copyright
1138517Sdfr *    notice, this list of conditions and the following disclaimer in the
1238517Sdfr *    documentation and/or other materials provided with the distribution.
1338517Sdfr *
1438517Sdfr * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
1538517Sdfr * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
1638517Sdfr * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
1738517Sdfr * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
1838517Sdfr * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
1938517Sdfr * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
2038517Sdfr * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
2138517Sdfr * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
2238517Sdfr * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
2338517Sdfr * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
2438517Sdfr * SUCH DAMAGE.
2538517Sdfr *
2650477Speter * $FreeBSD: head/sys/i386/include/atomic.h 100327 2002-07-18 15:56:46Z markm $
2738517Sdfr */
2838517Sdfr#ifndef _MACHINE_ATOMIC_H_
2938517Sdfr#define _MACHINE_ATOMIC_H_
3038517Sdfr
3138517Sdfr/*
3238517Sdfr * Various simple arithmetic on memory which is atomic in the presence
3348797Salc * of interrupts and multiple processors.
3438517Sdfr *
3548797Salc * atomic_set_char(P, V)	(*(u_char*)(P) |= (V))
3648797Salc * atomic_clear_char(P, V)	(*(u_char*)(P) &= ~(V))
3748797Salc * atomic_add_char(P, V)	(*(u_char*)(P) += (V))
3848797Salc * atomic_subtract_char(P, V)	(*(u_char*)(P) -= (V))
3948797Salc *
4048797Salc * atomic_set_short(P, V)	(*(u_short*)(P) |= (V))
4148797Salc * atomic_clear_short(P, V)	(*(u_short*)(P) &= ~(V))
4248797Salc * atomic_add_short(P, V)	(*(u_short*)(P) += (V))
4348797Salc * atomic_subtract_short(P, V)	(*(u_short*)(P) -= (V))
4448797Salc *
4548797Salc * atomic_set_int(P, V)		(*(u_int*)(P) |= (V))
4648797Salc * atomic_clear_int(P, V)	(*(u_int*)(P) &= ~(V))
4748797Salc * atomic_add_int(P, V)		(*(u_int*)(P) += (V))
4848797Salc * atomic_subtract_int(P, V)	(*(u_int*)(P) -= (V))
4966695Sjhb * atomic_readandclear_int(P)	(return  *(u_int*)P; *(u_int*)P = 0;)
5048797Salc *
5148797Salc * atomic_set_long(P, V)	(*(u_long*)(P) |= (V))
5248797Salc * atomic_clear_long(P, V)	(*(u_long*)(P) &= ~(V))
5348797Salc * atomic_add_long(P, V)	(*(u_long*)(P) += (V))
5448797Salc * atomic_subtract_long(P, V)	(*(u_long*)(P) -= (V))
5566695Sjhb * atomic_readandclear_long(P)	(return  *(u_long*)P; *(u_long*)P = 0;)
5638517Sdfr */
5738517Sdfr
5848797Salc/*
5949999Salc * The above functions are expanded inline in the statically-linked
6049999Salc * kernel.  Lock prefixes are generated if an SMP kernel is being
6149999Salc * built.
6249999Salc *
6349999Salc * Kernel modules call real functions which are built into the kernel.
6449999Salc * This allows kernel modules to be portable between UP and SMP systems.
6548797Salc */
6649999Salc#if defined(KLD_MODULE)
6788117Sjhb#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V)			\
68100251Smarkmvoid atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
6949999Salc
7066695Sjhbint atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src);
7165514Sphk
7271085Sjhb#define	ATOMIC_STORE_LOAD(TYPE, LOP, SOP)			\
7371085Sjhbu_##TYPE	atomic_load_acq_##TYPE(volatile u_##TYPE *p);	\
74100251Smarkmvoid		atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
7571085Sjhb
7649999Salc#else /* !KLD_MODULE */
7772358Smarkm
78100327Smarkm#ifdef __GNUC__
79100327Smarkm
8084679Sjhb/*
8184679Sjhb * For userland, assume the SMP case and use lock prefixes so that
8284679Sjhb * the binaries will run on both types of systems.
8384679Sjhb */
8484679Sjhb#if defined(SMP) || !defined(_KERNEL)
8591469Sbmilekic#define MPLOCKED	lock ;
8690515Sbde#else
8748797Salc#define MPLOCKED
8890515Sbde#endif
8938517Sdfr
9048797Salc/*
9148797Salc * The assembly is volatilized to demark potential before-and-after side
9248797Salc * effects if an interrupt or SMP collision were to occur.
9348797Salc */
9488117Sjhb#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V)		\
9548797Salcstatic __inline void					\
9649043Salcatomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
9748797Salc{							\
9891469Sbmilekic	__asm __volatile(__XSTRING(MPLOCKED) OP		\
9986301Sjhb			 : "+m" (*p)			\
10088117Sjhb			 : CONS (V));			\
10148797Salc}
102100327Smarkm
103100251Smarkm#else /* !__GNUC__ */
104100327Smarkm
105100327Smarkm#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V)				\
106100327Smarkmextern void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
107100327Smarkm
108100251Smarkm#endif /* __GNUC__ */
10951938Speter
11065514Sphk/*
11165514Sphk * Atomic compare and set, used by the mutex functions
11265514Sphk *
11365514Sphk * if (*dst == exp) *dst = src (all 32 bit words)
11465514Sphk *
11565514Sphk * Returns 0 on failure, non-zero on success
11665514Sphk */
11765514Sphk
118100251Smarkm#if defined(__GNUC__)
119100327Smarkm
12065514Sphk#if defined(I386_CPU)
121100327Smarkm
12265514Sphkstatic __inline int
12365514Sphkatomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
12465514Sphk{
12565514Sphk	int res = exp;
12665514Sphk
12765514Sphk	__asm __volatile(
12865514Sphk	"	pushfl ;		"
12965514Sphk	"	cli ;			"
13086303Sjhb	"	cmpl	%0,%2 ;		"
13165514Sphk	"	jne	1f ;		"
13286303Sjhb	"	movl	%1,%2 ;		"
13365514Sphk	"1:				"
13465514Sphk	"       sete	%%al;		"
13565514Sphk	"	movzbl	%%al,%0 ;	"
13665514Sphk	"	popfl ;			"
13765514Sphk	"# atomic_cmpset_int"
13886303Sjhb	: "+a" (res)			/* 0 (result) */
13986303Sjhb	: "r" (src),			/* 1 */
14086303Sjhb	  "m" (*(dst))			/* 2 */
14165514Sphk	: "memory");
14265514Sphk
14365514Sphk	return (res);
14465514Sphk}
145100327Smarkm
14665514Sphk#else /* defined(I386_CPU) */
147100327Smarkm
14865514Sphkstatic __inline int
14965514Sphkatomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
15065514Sphk{
15165514Sphk	int res = exp;
15265514Sphk
15365514Sphk	__asm __volatile (
15491469Sbmilekic	"	" __XSTRING(MPLOCKED) "	"
15586303Sjhb	"	cmpxchgl %1,%2 ;	"
15665514Sphk	"       setz	%%al ;		"
15765514Sphk	"	movzbl	%%al,%0 ;	"
15865514Sphk	"1:				"
15965514Sphk	"# atomic_cmpset_int"
16086303Sjhb	: "+a" (res)			/* 0 (result) */
16186303Sjhb	: "r" (src),			/* 1 */
16286303Sjhb	  "m" (*(dst))			/* 2 */
16365514Sphk	: "memory");
16465514Sphk
16565514Sphk	return (res);
16665514Sphk}
167100327Smarkm
16865514Sphk#endif /* defined(I386_CPU) */
169100327Smarkm
170100251Smarkm#endif /* defined(__GNUC__) */
17165514Sphk
172100251Smarkm#if defined(__GNUC__)
173100327Smarkm
17471023Sjhb#if defined(I386_CPU)
175100327Smarkm
17667351Sjhb/*
17767351Sjhb * We assume that a = b will do atomic loads and stores.
17871023Sjhb *
17971023Sjhb * XXX: This is _NOT_ safe on a P6 or higher because it does not guarantee
18071023Sjhb * memory ordering.  These should only be used on a 386.
18167351Sjhb */
18271023Sjhb#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP)		\
18367351Sjhbstatic __inline u_##TYPE				\
18467351Sjhbatomic_load_acq_##TYPE(volatile u_##TYPE *p)		\
18567351Sjhb{							\
18667351Sjhb	return (*p);					\
18767351Sjhb}							\
18867351Sjhb							\
18967351Sjhbstatic __inline void					\
19067351Sjhbatomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
19167351Sjhb{							\
19267351Sjhb	*p = v;						\
19367351Sjhb	__asm __volatile("" : : : "memory");		\
19467351Sjhb}
195100327Smarkm
196100251Smarkm#else /* !defined(I386_CPU) */
19767351Sjhb
19871023Sjhb#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP)		\
19971023Sjhbstatic __inline u_##TYPE				\
20071023Sjhbatomic_load_acq_##TYPE(volatile u_##TYPE *p)		\
20171023Sjhb{							\
20271023Sjhb	u_##TYPE res;					\
20371023Sjhb							\
20491469Sbmilekic	__asm __volatile(__XSTRING(MPLOCKED) LOP	\
20571141Sjhb	: "=a" (res),			/* 0 (result) */\
20671023Sjhb	  "+m" (*p)			/* 1 */		\
20788117Sjhb	: : "memory");				 	\
20871023Sjhb							\
20971023Sjhb	return (res);					\
21071023Sjhb}							\
21171023Sjhb							\
21271023Sjhb/*							\
21371023Sjhb * The XCHG instruction asserts LOCK automagically.	\
21471023Sjhb */							\
21571023Sjhbstatic __inline void					\
21671023Sjhbatomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
21771023Sjhb{							\
21871023Sjhb	__asm __volatile(SOP				\
21971023Sjhb	: "+m" (*p),			/* 0 */		\
22071023Sjhb	  "+r" (v)			/* 1 */		\
22171023Sjhb	: : "memory");				 	\
22271023Sjhb}
223100327Smarkm
22471023Sjhb#endif	/* defined(I386_CPU) */
225100327Smarkm
226100251Smarkm#else /* !defined(__GNUC__) */
227100251Smarkm
228100327Smarkmextern int atomic_cmpset_int(volatile u_int *, u_int, u_int);
229100251Smarkm
230100327Smarkm#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP)				\
231100327Smarkmextern u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p);		\
232100327Smarkmextern void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
233100327Smarkm
234100251Smarkm#endif /* defined(__GNUC__) */
235100327Smarkm
23671085Sjhb#endif /* KLD_MODULE */
23767351Sjhb
238100251SmarkmATOMIC_ASM(set,	     char,  "orb %b1,%0",  "iq",  v);
239100251SmarkmATOMIC_ASM(clear,    char,  "andb %b1,%0", "iq", ~v);
240100251SmarkmATOMIC_ASM(add,	     char,  "addb %b1,%0", "iq",  v);
241100251SmarkmATOMIC_ASM(subtract, char,  "subb %b1,%0", "iq",  v);
24271085Sjhb
243100251SmarkmATOMIC_ASM(set,	     short, "orw %w1,%0",  "ir",  v);
244100251SmarkmATOMIC_ASM(clear,    short, "andw %w1,%0", "ir", ~v);
245100251SmarkmATOMIC_ASM(add,	     short, "addw %w1,%0", "ir",  v);
246100251SmarkmATOMIC_ASM(subtract, short, "subw %w1,%0", "ir",  v);
24771085Sjhb
248100251SmarkmATOMIC_ASM(set,	     int,   "orl %1,%0",   "ir",  v);
249100251SmarkmATOMIC_ASM(clear,    int,   "andl %1,%0",  "ir", ~v);
250100251SmarkmATOMIC_ASM(add,	     int,   "addl %1,%0",  "ir",  v);
251100251SmarkmATOMIC_ASM(subtract, int,   "subl %1,%0",  "ir",  v);
25271085Sjhb
253100251SmarkmATOMIC_ASM(set,	     long,  "orl %1,%0",   "ir",  v);
254100251SmarkmATOMIC_ASM(clear,    long,  "andl %1,%0",  "ir", ~v);
255100251SmarkmATOMIC_ASM(add,	     long,  "addl %1,%0",  "ir",  v);
256100251SmarkmATOMIC_ASM(subtract, long,  "subl %1,%0",  "ir",  v);
25771085Sjhb
258100251SmarkmATOMIC_STORE_LOAD(char,	"cmpxchgb %b0,%1", "xchgb %b1,%0");
259100251SmarkmATOMIC_STORE_LOAD(short,"cmpxchgw %w0,%1", "xchgw %w1,%0");
260100251SmarkmATOMIC_STORE_LOAD(int,	"cmpxchgl %0,%1",  "xchgl %1,%0");
261100251SmarkmATOMIC_STORE_LOAD(long,	"cmpxchgl %0,%1",  "xchgl %1,%0");
26271023Sjhb
26371085Sjhb#undef ATOMIC_ASM
26467351Sjhb#undef ATOMIC_STORE_LOAD
26567351Sjhb
26671085Sjhb#define	atomic_set_acq_char		atomic_set_char
26771085Sjhb#define	atomic_set_rel_char		atomic_set_char
26871085Sjhb#define	atomic_clear_acq_char		atomic_clear_char
26971085Sjhb#define	atomic_clear_rel_char		atomic_clear_char
27071085Sjhb#define	atomic_add_acq_char		atomic_add_char
27171085Sjhb#define	atomic_add_rel_char		atomic_add_char
27271085Sjhb#define	atomic_subtract_acq_char	atomic_subtract_char
27371085Sjhb#define	atomic_subtract_rel_char	atomic_subtract_char
27471085Sjhb
27571085Sjhb#define	atomic_set_acq_short		atomic_set_short
27671085Sjhb#define	atomic_set_rel_short		atomic_set_short
27771085Sjhb#define	atomic_clear_acq_short		atomic_clear_short
27871085Sjhb#define	atomic_clear_rel_short		atomic_clear_short
27971085Sjhb#define	atomic_add_acq_short		atomic_add_short
28071085Sjhb#define	atomic_add_rel_short		atomic_add_short
28171085Sjhb#define	atomic_subtract_acq_short	atomic_subtract_short
28271085Sjhb#define	atomic_subtract_rel_short	atomic_subtract_short
28371085Sjhb
28471085Sjhb#define	atomic_set_acq_int		atomic_set_int
28571085Sjhb#define	atomic_set_rel_int		atomic_set_int
28671085Sjhb#define	atomic_clear_acq_int		atomic_clear_int
28771085Sjhb#define	atomic_clear_rel_int		atomic_clear_int
28871085Sjhb#define	atomic_add_acq_int		atomic_add_int
28971085Sjhb#define	atomic_add_rel_int		atomic_add_int
29071085Sjhb#define	atomic_subtract_acq_int		atomic_subtract_int
29171085Sjhb#define	atomic_subtract_rel_int		atomic_subtract_int
29271085Sjhb#define atomic_cmpset_acq_int		atomic_cmpset_int
29371085Sjhb#define atomic_cmpset_rel_int		atomic_cmpset_int
29471085Sjhb
29571085Sjhb#define	atomic_set_acq_long		atomic_set_long
29671085Sjhb#define	atomic_set_rel_long		atomic_set_long
29771085Sjhb#define	atomic_clear_acq_long		atomic_clear_long
29871085Sjhb#define	atomic_clear_rel_long		atomic_clear_long
29971085Sjhb#define	atomic_add_acq_long		atomic_add_long
30071085Sjhb#define	atomic_add_rel_long		atomic_add_long
30171085Sjhb#define	atomic_subtract_acq_long	atomic_subtract_long
30271085Sjhb#define	atomic_subtract_rel_long	atomic_subtract_long
30371085Sjhb#define	atomic_cmpset_long		atomic_cmpset_int
30471085Sjhb#define	atomic_cmpset_acq_long		atomic_cmpset_acq_int
30571085Sjhb#define	atomic_cmpset_rel_long		atomic_cmpset_rel_int
30671085Sjhb
30771085Sjhb#define atomic_cmpset_acq_ptr		atomic_cmpset_ptr
30871085Sjhb#define atomic_cmpset_rel_ptr		atomic_cmpset_ptr
30971085Sjhb
31071085Sjhb#define	atomic_set_8		atomic_set_char
31171085Sjhb#define	atomic_set_acq_8	atomic_set_acq_char
31271085Sjhb#define	atomic_set_rel_8	atomic_set_rel_char
31371085Sjhb#define	atomic_clear_8		atomic_clear_char
31471085Sjhb#define	atomic_clear_acq_8	atomic_clear_acq_char
31571085Sjhb#define	atomic_clear_rel_8	atomic_clear_rel_char
31671085Sjhb#define	atomic_add_8		atomic_add_char
31771085Sjhb#define	atomic_add_acq_8	atomic_add_acq_char
31871085Sjhb#define	atomic_add_rel_8	atomic_add_rel_char
31971085Sjhb#define	atomic_subtract_8	atomic_subtract_char
32071085Sjhb#define	atomic_subtract_acq_8	atomic_subtract_acq_char
32171085Sjhb#define	atomic_subtract_rel_8	atomic_subtract_rel_char
32271085Sjhb#define	atomic_load_acq_8	atomic_load_acq_char
32371085Sjhb#define	atomic_store_rel_8	atomic_store_rel_char
32471085Sjhb
32571085Sjhb#define	atomic_set_16		atomic_set_short
32671085Sjhb#define	atomic_set_acq_16	atomic_set_acq_short
32771085Sjhb#define	atomic_set_rel_16	atomic_set_rel_short
32871085Sjhb#define	atomic_clear_16		atomic_clear_short
32971085Sjhb#define	atomic_clear_acq_16	atomic_clear_acq_short
33071085Sjhb#define	atomic_clear_rel_16	atomic_clear_rel_short
33171085Sjhb#define	atomic_add_16		atomic_add_short
33271085Sjhb#define	atomic_add_acq_16	atomic_add_acq_short
33371085Sjhb#define	atomic_add_rel_16	atomic_add_rel_short
33471085Sjhb#define	atomic_subtract_16	atomic_subtract_short
33571085Sjhb#define	atomic_subtract_acq_16	atomic_subtract_acq_short
33671085Sjhb#define	atomic_subtract_rel_16	atomic_subtract_rel_short
33771085Sjhb#define	atomic_load_acq_16	atomic_load_acq_short
33871085Sjhb#define	atomic_store_rel_16	atomic_store_rel_short
33971085Sjhb
34071085Sjhb#define	atomic_set_32		atomic_set_int
34171085Sjhb#define	atomic_set_acq_32	atomic_set_acq_int
34271085Sjhb#define	atomic_set_rel_32	atomic_set_rel_int
34371085Sjhb#define	atomic_clear_32		atomic_clear_int
34471085Sjhb#define	atomic_clear_acq_32	atomic_clear_acq_int
34571085Sjhb#define	atomic_clear_rel_32	atomic_clear_rel_int
34671085Sjhb#define	atomic_add_32		atomic_add_int
34771085Sjhb#define	atomic_add_acq_32	atomic_add_acq_int
34871085Sjhb#define	atomic_add_rel_32	atomic_add_rel_int
34971085Sjhb#define	atomic_subtract_32	atomic_subtract_int
35071085Sjhb#define	atomic_subtract_acq_32	atomic_subtract_acq_int
35171085Sjhb#define	atomic_subtract_rel_32	atomic_subtract_rel_int
35271085Sjhb#define	atomic_load_acq_32	atomic_load_acq_int
35371085Sjhb#define	atomic_store_rel_32	atomic_store_rel_int
35471085Sjhb#define	atomic_cmpset_32	atomic_cmpset_int
35571085Sjhb#define	atomic_cmpset_acq_32	atomic_cmpset_acq_int
35671085Sjhb#define	atomic_cmpset_rel_32	atomic_cmpset_rel_int
35771085Sjhb#define	atomic_readandclear_32	atomic_readandclear_int
35871085Sjhb
35971085Sjhb#if !defined(WANT_FUNCTIONS)
36065514Sphkstatic __inline int
36165514Sphkatomic_cmpset_ptr(volatile void *dst, void *exp, void *src)
36265514Sphk{
36365514Sphk
36471085Sjhb	return (atomic_cmpset_int((volatile u_int *)dst, (u_int)exp,
36571085Sjhb	    (u_int)src));
36665514Sphk}
36766695Sjhb
36867351Sjhbstatic __inline void *
36967351Sjhbatomic_load_acq_ptr(volatile void *p)
37067351Sjhb{
37167351Sjhb	return (void *)atomic_load_acq_int((volatile u_int *)p);
37267351Sjhb}
37367351Sjhb
37467351Sjhbstatic __inline void
37567351Sjhbatomic_store_rel_ptr(volatile void *p, void *v)
37667351Sjhb{
37767351Sjhb	atomic_store_rel_int((volatile u_int *)p, (u_int)v);
37867351Sjhb}
37967351Sjhb
38067351Sjhb#define ATOMIC_PTR(NAME)				\
38167351Sjhbstatic __inline void					\
38267351Sjhbatomic_##NAME##_ptr(volatile void *p, uintptr_t v)	\
38367351Sjhb{							\
38467351Sjhb	atomic_##NAME##_int((volatile u_int *)p, v);	\
38567351Sjhb}							\
38667351Sjhb							\
38767351Sjhbstatic __inline void					\
38867351Sjhbatomic_##NAME##_acq_ptr(volatile void *p, uintptr_t v)	\
38967351Sjhb{							\
39067351Sjhb	atomic_##NAME##_acq_int((volatile u_int *)p, v);\
39167351Sjhb}							\
39267351Sjhb							\
39367351Sjhbstatic __inline void					\
39467351Sjhbatomic_##NAME##_rel_ptr(volatile void *p, uintptr_t v)	\
39567351Sjhb{							\
39667351Sjhb	atomic_##NAME##_rel_int((volatile u_int *)p, v);\
39767351Sjhb}
39867351Sjhb
39967351SjhbATOMIC_PTR(set)
40067351SjhbATOMIC_PTR(clear)
40167351SjhbATOMIC_PTR(add)
40267351SjhbATOMIC_PTR(subtract)
40367351Sjhb
40467351Sjhb#undef ATOMIC_PTR
40567351Sjhb
406100251Smarkm#if defined(__GNUC__)
407100327Smarkm
40866695Sjhbstatic __inline u_int
40966695Sjhbatomic_readandclear_int(volatile u_int *addr)
41066695Sjhb{
41166695Sjhb	u_int result;
41266695Sjhb
41366695Sjhb	__asm __volatile (
41466695Sjhb	"	xorl	%0,%0 ;		"
41566695Sjhb	"	xchgl	%1,%0 ;		"
41666695Sjhb	"# atomic_readandclear_int"
41766695Sjhb	: "=&r" (result)		/* 0 (result) */
41866695Sjhb	: "m" (*addr));			/* 1 (addr) */
41966695Sjhb
42066695Sjhb	return (result);
42166695Sjhb}
42266695Sjhb
42366695Sjhbstatic __inline u_long
42466695Sjhbatomic_readandclear_long(volatile u_long *addr)
42566695Sjhb{
42666695Sjhb	u_long result;
42766695Sjhb
42866695Sjhb	__asm __volatile (
42966695Sjhb	"	xorl	%0,%0 ;		"
43066695Sjhb	"	xchgl	%1,%0 ;		"
43166695Sjhb	"# atomic_readandclear_int"
43266695Sjhb	: "=&r" (result)		/* 0 (result) */
43366695Sjhb	: "m" (*addr));			/* 1 (addr) */
43466695Sjhb
43566695Sjhb	return (result);
43666695Sjhb}
437100327Smarkm
438100251Smarkm#else /* !defined(__GNUC__) */
439100327Smarkm
440100327Smarkmextern u_long	atomic_readandclear_long(volatile u_long *);
441100327Smarkmextern u_int	atomic_readandclear_int(volatile u_int *);
442100327Smarkm
443100251Smarkm#endif /* defined(__GNUC__) */
444100327Smarkm
44571085Sjhb#endif	/* !defined(WANT_FUNCTIONS) */
44638517Sdfr#endif /* ! _MACHINE_ATOMIC_H_ */
447