atomic.h revision 71023
138517Sdfr/*- 238517Sdfr * Copyright (c) 1998 Doug Rabson 338517Sdfr * All rights reserved. 438517Sdfr * 538517Sdfr * Redistribution and use in source and binary forms, with or without 638517Sdfr * modification, are permitted provided that the following conditions 738517Sdfr * are met: 838517Sdfr * 1. Redistributions of source code must retain the above copyright 938517Sdfr * notice, this list of conditions and the following disclaimer. 1038517Sdfr * 2. Redistributions in binary form must reproduce the above copyright 1138517Sdfr * notice, this list of conditions and the following disclaimer in the 1238517Sdfr * documentation and/or other materials provided with the distribution. 1338517Sdfr * 1438517Sdfr * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 1538517Sdfr * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 1638517Sdfr * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 1738517Sdfr * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 1838517Sdfr * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 1938517Sdfr * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 2038517Sdfr * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 2138517Sdfr * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 2238517Sdfr * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 2338517Sdfr * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 2438517Sdfr * SUCH DAMAGE. 2538517Sdfr * 2650477Speter * $FreeBSD: head/sys/amd64/include/atomic.h 71023 2001-01-14 09:55:21Z jhb $ 2738517Sdfr */ 2838517Sdfr#ifndef _MACHINE_ATOMIC_H_ 2938517Sdfr#define _MACHINE_ATOMIC_H_ 3038517Sdfr 3138517Sdfr/* 3238517Sdfr * Various simple arithmetic on memory which is atomic in the presence 3348797Salc * of interrupts and multiple processors. 3438517Sdfr * 3548797Salc * atomic_set_char(P, V) (*(u_char*)(P) |= (V)) 3648797Salc * atomic_clear_char(P, V) (*(u_char*)(P) &= ~(V)) 3748797Salc * atomic_add_char(P, V) (*(u_char*)(P) += (V)) 3848797Salc * atomic_subtract_char(P, V) (*(u_char*)(P) -= (V)) 3948797Salc * 4048797Salc * atomic_set_short(P, V) (*(u_short*)(P) |= (V)) 4148797Salc * atomic_clear_short(P, V) (*(u_short*)(P) &= ~(V)) 4248797Salc * atomic_add_short(P, V) (*(u_short*)(P) += (V)) 4348797Salc * atomic_subtract_short(P, V) (*(u_short*)(P) -= (V)) 4448797Salc * 4548797Salc * atomic_set_int(P, V) (*(u_int*)(P) |= (V)) 4648797Salc * atomic_clear_int(P, V) (*(u_int*)(P) &= ~(V)) 4748797Salc * atomic_add_int(P, V) (*(u_int*)(P) += (V)) 4848797Salc * atomic_subtract_int(P, V) (*(u_int*)(P) -= (V)) 4966695Sjhb * atomic_readandclear_int(P) (return *(u_int*)P; *(u_int*)P = 0;) 5048797Salc * 5148797Salc * atomic_set_long(P, V) (*(u_long*)(P) |= (V)) 5248797Salc * atomic_clear_long(P, V) (*(u_long*)(P) &= ~(V)) 5348797Salc * atomic_add_long(P, V) (*(u_long*)(P) += (V)) 5448797Salc * atomic_subtract_long(P, V) (*(u_long*)(P) -= (V)) 5566695Sjhb * atomic_readandclear_long(P) (return *(u_long*)P; *(u_long*)P = 0;) 5638517Sdfr */ 5738517Sdfr 5848797Salc/* 5949999Salc * The above functions are expanded inline in the statically-linked 6049999Salc * kernel. Lock prefixes are generated if an SMP kernel is being 6149999Salc * built. 6249999Salc * 6349999Salc * Kernel modules call real functions which are built into the kernel. 6449999Salc * This allows kernel modules to be portable between UP and SMP systems. 6548797Salc */ 6649999Salc#if defined(KLD_MODULE) 6749999Salc#define ATOMIC_ASM(NAME, TYPE, OP, V) \ 6867351Sjhbvoid atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v); 6949999Salc 7066695Sjhbint atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src); 7165514Sphk 7249999Salc#else /* !KLD_MODULE */ 7349999Salc#if defined(SMP) 7465514Sphk#if defined(LOCORE) 7565514Sphk#define MPLOCKED lock ; 7665514Sphk#else 7748797Salc#define MPLOCKED "lock ; " 7865514Sphk#endif 7948796Salc#else 8048797Salc#define MPLOCKED 8148796Salc#endif 8238517Sdfr 8348797Salc/* 8448797Salc * The assembly is volatilized to demark potential before-and-after side 8548797Salc * effects if an interrupt or SMP collision were to occur. 8648797Salc */ 8751938Speter#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ > 9) 8851938Speter/* egcs 1.1.2+ version */ 8948797Salc#define ATOMIC_ASM(NAME, TYPE, OP, V) \ 9048797Salcstatic __inline void \ 9149043Salcatomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 9248797Salc{ \ 9348797Salc __asm __volatile(MPLOCKED OP \ 9449043Salc : "=m" (*p) \ 9549043Salc : "0" (*p), "ir" (V)); \ 9648797Salc} 9751938Speter 9865514Sphk/* 9965514Sphk * Atomic compare and set, used by the mutex functions 10065514Sphk * 10165514Sphk * if (*dst == exp) *dst = src (all 32 bit words) 10265514Sphk * 10365514Sphk * Returns 0 on failure, non-zero on success 10465514Sphk */ 10565514Sphk 10665514Sphk#if defined(I386_CPU) 10765514Sphkstatic __inline int 10865514Sphkatomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src) 10965514Sphk{ 11065514Sphk int res = exp; 11165514Sphk 11265514Sphk __asm __volatile( 11365514Sphk " pushfl ; " 11465514Sphk " cli ; " 11565514Sphk " cmpl %1,%3 ; " 11665514Sphk " jne 1f ; " 11765514Sphk " movl %2,%3 ; " 11865514Sphk "1: " 11965514Sphk " sete %%al; " 12065514Sphk " movzbl %%al,%0 ; " 12165514Sphk " popfl ; " 12265514Sphk "# atomic_cmpset_int" 12365514Sphk : "=a" (res) /* 0 (result) */ 12465514Sphk : "0" (exp), /* 1 */ 12565514Sphk "r" (src), /* 2 */ 12665514Sphk "m" (*(dst)) /* 3 */ 12765514Sphk : "memory"); 12865514Sphk 12965514Sphk return (res); 13065514Sphk} 13165514Sphk#else /* defined(I386_CPU) */ 13265514Sphkstatic __inline int 13365514Sphkatomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src) 13465514Sphk{ 13565514Sphk int res = exp; 13665514Sphk 13765514Sphk __asm __volatile ( 13865514Sphk " " MPLOCKED " " 13965514Sphk " cmpxchgl %2,%3 ; " 14065514Sphk " setz %%al ; " 14165514Sphk " movzbl %%al,%0 ; " 14265514Sphk "1: " 14365514Sphk "# atomic_cmpset_int" 14465514Sphk : "=a" (res) /* 0 (result) */ 14565514Sphk : "0" (exp), /* 1 */ 14665514Sphk "r" (src), /* 2 */ 14765514Sphk "m" (*(dst)) /* 3 */ 14865514Sphk : "memory"); 14965514Sphk 15065514Sphk return (res); 15165514Sphk} 15265514Sphk#endif /* defined(I386_CPU) */ 15365514Sphk 15467587Sjhb#define atomic_cmpset_long atomic_cmpset_int 15567351Sjhb#define atomic_cmpset_acq_int atomic_cmpset_int 15667351Sjhb#define atomic_cmpset_rel_int atomic_cmpset_int 15767587Sjhb#define atomic_cmpset_acq_long atomic_cmpset_acq_int 15867587Sjhb#define atomic_cmpset_rel_long atomic_cmpset_rel_int 15967351Sjhb 16051938Speter#else 16151938Speter/* gcc <= 2.8 version */ 16251938Speter#define ATOMIC_ASM(NAME, TYPE, OP, V) \ 16351938Speterstatic __inline void \ 16451938Speteratomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 16551938Speter{ \ 16651938Speter __asm __volatile(MPLOCKED OP \ 16751938Speter : "=m" (*p) \ 16851938Speter : "ir" (V)); \ 16967351Sjhb} \ 17067351Sjhb \ 17167351Sjhb 17251938Speter#endif 17349999Salc#endif /* KLD_MODULE */ 17438517Sdfr 17551937Speter#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ > 9) 17651938Speter 17751938Speter/* egcs 1.1.2+ version */ 17860300SobrienATOMIC_ASM(set, char, "orb %b2,%0", v) 17960300SobrienATOMIC_ASM(clear, char, "andb %b2,%0", ~v) 18060300SobrienATOMIC_ASM(add, char, "addb %b2,%0", v) 18160300SobrienATOMIC_ASM(subtract, char, "subb %b2,%0", v) 18238517Sdfr 18360300SobrienATOMIC_ASM(set, short, "orw %w2,%0", v) 18460300SobrienATOMIC_ASM(clear, short, "andw %w2,%0", ~v) 18560300SobrienATOMIC_ASM(add, short, "addw %w2,%0", v) 18660300SobrienATOMIC_ASM(subtract, short, "subw %w2,%0", v) 18738517Sdfr 18848797SalcATOMIC_ASM(set, int, "orl %2,%0", v) 18948797SalcATOMIC_ASM(clear, int, "andl %2,%0", ~v) 19048797SalcATOMIC_ASM(add, int, "addl %2,%0", v) 19148797SalcATOMIC_ASM(subtract, int, "subl %2,%0", v) 19248796Salc 19348797SalcATOMIC_ASM(set, long, "orl %2,%0", v) 19448797SalcATOMIC_ASM(clear, long, "andl %2,%0", ~v) 19548797SalcATOMIC_ASM(add, long, "addl %2,%0", v) 19648797SalcATOMIC_ASM(subtract, long, "subl %2,%0", v) 19748796Salc 19851917Seivind#else 19951917Seivind 20051938Speter/* gcc <= 2.8 version */ 20151938SpeterATOMIC_ASM(set, char, "orb %1,%0", v) 20251938SpeterATOMIC_ASM(clear, char, "andb %1,%0", ~v) 20351938SpeterATOMIC_ASM(add, char, "addb %1,%0", v) 20451938SpeterATOMIC_ASM(subtract, char, "subb %1,%0", v) 20551917Seivind 20651938SpeterATOMIC_ASM(set, short, "orw %1,%0", v) 20751938SpeterATOMIC_ASM(clear, short, "andw %1,%0", ~v) 20851938SpeterATOMIC_ASM(add, short, "addw %1,%0", v) 20951938SpeterATOMIC_ASM(subtract, short, "subw %1,%0", v) 21051917Seivind 21151938SpeterATOMIC_ASM(set, int, "orl %1,%0", v) 21251938SpeterATOMIC_ASM(clear, int, "andl %1,%0", ~v) 21351938SpeterATOMIC_ASM(add, int, "addl %1,%0", v) 21451938SpeterATOMIC_ASM(subtract, int, "subl %1,%0", v) 21551938Speter 21651938SpeterATOMIC_ASM(set, long, "orl %1,%0", v) 21751938SpeterATOMIC_ASM(clear, long, "andl %1,%0", ~v) 21851938SpeterATOMIC_ASM(add, long, "addl %1,%0", v) 21951938SpeterATOMIC_ASM(subtract, long, "subl %1,%0", v) 22051938Speter 22151917Seivind#endif 22251917Seivind 22367351Sjhb#undef ATOMIC_ASM 22467351Sjhb 22565514Sphk#ifndef WANT_FUNCTIONS 22667351Sjhb#define ATOMIC_ACQ_REL(NAME, TYPE) \ 22767351Sjhbstatic __inline void \ 22867351Sjhbatomic_##NAME##_acq_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 22967351Sjhb{ \ 23067351Sjhb atomic_##NAME##_##TYPE(p, v); \ 23167351Sjhb} \ 23267351Sjhb \ 23367351Sjhbstatic __inline void \ 23467351Sjhbatomic_##NAME##_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 23567351Sjhb{ \ 23667351Sjhb atomic_##NAME##_##TYPE(p, v); \ 23767351Sjhb} 23867351Sjhb 23967351SjhbATOMIC_ACQ_REL(set, char) 24067351SjhbATOMIC_ACQ_REL(clear, char) 24167351SjhbATOMIC_ACQ_REL(add, char) 24267351SjhbATOMIC_ACQ_REL(subtract, char) 24367351SjhbATOMIC_ACQ_REL(set, short) 24467351SjhbATOMIC_ACQ_REL(clear, short) 24567351SjhbATOMIC_ACQ_REL(add, short) 24667351SjhbATOMIC_ACQ_REL(subtract, short) 24767351SjhbATOMIC_ACQ_REL(set, int) 24867351SjhbATOMIC_ACQ_REL(clear, int) 24967351SjhbATOMIC_ACQ_REL(add, int) 25067351SjhbATOMIC_ACQ_REL(subtract, int) 25167351SjhbATOMIC_ACQ_REL(set, long) 25267351SjhbATOMIC_ACQ_REL(clear, long) 25367351SjhbATOMIC_ACQ_REL(add, long) 25467351SjhbATOMIC_ACQ_REL(subtract, long) 25567351Sjhb 25667351Sjhb#undef ATOMIC_ACQ_REL 25767351Sjhb 25871023Sjhb#if defined(KLD_MODULE) 25971023Sjhb#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \ 26071023Sjhbu_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p); \ 26171023Sjhbvoid atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v); 26271023Sjhb#else 26371023Sjhb#if defined(I386_CPU) 26467351Sjhb/* 26567351Sjhb * We assume that a = b will do atomic loads and stores. 26671023Sjhb * 26771023Sjhb * XXX: This is _NOT_ safe on a P6 or higher because it does not guarantee 26871023Sjhb * memory ordering. These should only be used on a 386. 26967351Sjhb */ 27071023Sjhb#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \ 27167351Sjhbstatic __inline u_##TYPE \ 27267351Sjhbatomic_load_acq_##TYPE(volatile u_##TYPE *p) \ 27367351Sjhb{ \ 27467351Sjhb return (*p); \ 27567351Sjhb} \ 27667351Sjhb \ 27767351Sjhbstatic __inline void \ 27867351Sjhbatomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 27967351Sjhb{ \ 28067351Sjhb *p = v; \ 28167351Sjhb __asm __volatile("" : : : "memory"); \ 28267351Sjhb} 28371023Sjhb#else 28467351Sjhb 28571023Sjhb#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \ 28671023Sjhbstatic __inline u_##TYPE \ 28771023Sjhbatomic_load_acq_##TYPE(volatile u_##TYPE *p) \ 28871023Sjhb{ \ 28971023Sjhb u_##TYPE res; \ 29071023Sjhb \ 29171023Sjhb __asm __volatile(MPLOCKED LOP \ 29271023Sjhb : "+a" (res), /* 0 (result) */\ 29371023Sjhb "+m" (*p) /* 1 */ \ 29471023Sjhb : : "memory"); \ 29571023Sjhb \ 29671023Sjhb return (res); \ 29771023Sjhb} \ 29871023Sjhb \ 29971023Sjhb/* \ 30071023Sjhb * The XCHG instruction asserts LOCK automagically. \ 30171023Sjhb */ \ 30271023Sjhbstatic __inline void \ 30371023Sjhbatomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 30471023Sjhb{ \ 30571023Sjhb __asm __volatile(SOP \ 30671023Sjhb : "+m" (*p), /* 0 */ \ 30771023Sjhb "+r" (v) /* 1 */ \ 30871023Sjhb : : "memory"); \ 30971023Sjhb} 31071023Sjhb#endif /* defined(I386_CPU) */ 31171023Sjhb#endif /* defined(KLD_MODULE) */ 31267351Sjhb 31371023SjhbATOMIC_STORE_LOAD(char, "cmpxchgb %b0,%1", "xchgb %b1,%0") 31471023SjhbATOMIC_STORE_LOAD(short,"cmpxchgw %w0,%1", "xchgw %w1,%0") 31571023SjhbATOMIC_STORE_LOAD(int, "cmpxchgl %0,%1", "xchgl %1,%0") 31671023SjhbATOMIC_STORE_LOAD(long, "cmpxchgl %0,%1", "xchgl %1,%0") 31771023Sjhb 31867351Sjhb#undef ATOMIC_STORE_LOAD 31967351Sjhb 32065514Sphkstatic __inline int 32165514Sphkatomic_cmpset_ptr(volatile void *dst, void *exp, void *src) 32265514Sphk{ 32365514Sphk 32465514Sphk return ( 32565514Sphk atomic_cmpset_int((volatile u_int *)dst, (u_int)exp, (u_int)src)); 32665514Sphk} 32766695Sjhb 32867351Sjhb#define atomic_cmpset_acq_ptr atomic_cmpset_ptr 32967351Sjhb#define atomic_cmpset_rel_ptr atomic_cmpset_ptr 33067351Sjhb 33167351Sjhbstatic __inline void * 33267351Sjhbatomic_load_acq_ptr(volatile void *p) 33367351Sjhb{ 33467351Sjhb return (void *)atomic_load_acq_int((volatile u_int *)p); 33567351Sjhb} 33667351Sjhb 33767351Sjhbstatic __inline void 33867351Sjhbatomic_store_rel_ptr(volatile void *p, void *v) 33967351Sjhb{ 34067351Sjhb atomic_store_rel_int((volatile u_int *)p, (u_int)v); 34167351Sjhb} 34267351Sjhb 34367351Sjhb#define ATOMIC_PTR(NAME) \ 34467351Sjhbstatic __inline void \ 34567351Sjhbatomic_##NAME##_ptr(volatile void *p, uintptr_t v) \ 34667351Sjhb{ \ 34767351Sjhb atomic_##NAME##_int((volatile u_int *)p, v); \ 34867351Sjhb} \ 34967351Sjhb \ 35067351Sjhbstatic __inline void \ 35167351Sjhbatomic_##NAME##_acq_ptr(volatile void *p, uintptr_t v) \ 35267351Sjhb{ \ 35367351Sjhb atomic_##NAME##_acq_int((volatile u_int *)p, v);\ 35467351Sjhb} \ 35567351Sjhb \ 35667351Sjhbstatic __inline void \ 35767351Sjhbatomic_##NAME##_rel_ptr(volatile void *p, uintptr_t v) \ 35867351Sjhb{ \ 35967351Sjhb atomic_##NAME##_rel_int((volatile u_int *)p, v);\ 36067351Sjhb} 36167351Sjhb 36267351SjhbATOMIC_PTR(set) 36367351SjhbATOMIC_PTR(clear) 36467351SjhbATOMIC_PTR(add) 36567351SjhbATOMIC_PTR(subtract) 36667351Sjhb 36767351Sjhb#undef ATOMIC_PTR 36867351Sjhb 36966695Sjhbstatic __inline u_int 37066695Sjhbatomic_readandclear_int(volatile u_int *addr) 37166695Sjhb{ 37266695Sjhb u_int result; 37366695Sjhb 37466695Sjhb __asm __volatile ( 37566695Sjhb " xorl %0,%0 ; " 37666695Sjhb " xchgl %1,%0 ; " 37766695Sjhb "# atomic_readandclear_int" 37866695Sjhb : "=&r" (result) /* 0 (result) */ 37966695Sjhb : "m" (*addr)); /* 1 (addr) */ 38066695Sjhb 38166695Sjhb return (result); 38266695Sjhb} 38366695Sjhb 38466695Sjhbstatic __inline u_long 38566695Sjhbatomic_readandclear_long(volatile u_long *addr) 38666695Sjhb{ 38766695Sjhb u_long result; 38866695Sjhb 38966695Sjhb __asm __volatile ( 39066695Sjhb " xorl %0,%0 ; " 39166695Sjhb " xchgl %1,%0 ; " 39266695Sjhb "# atomic_readandclear_int" 39366695Sjhb : "=&r" (result) /* 0 (result) */ 39466695Sjhb : "m" (*addr)); /* 1 (addr) */ 39566695Sjhb 39666695Sjhb return (result); 39766695Sjhb} 39865514Sphk#endif 39965514Sphk 40067587Sjhb#define atomic_set_8 atomic_set_char 40167587Sjhb#define atomic_set_acq_8 atomic_set_acq_char 40267587Sjhb#define atomic_set_rel_8 atomic_set_rel_char 40367587Sjhb#define atomic_clear_8 atomic_clear_char 40467587Sjhb#define atomic_clear_acq_8 atomic_clear_acq_char 40567587Sjhb#define atomic_clear_rel_8 atomic_clear_rel_char 40667587Sjhb#define atomic_add_8 atomic_add_char 40767587Sjhb#define atomic_add_acq_8 atomic_add_acq_char 40867587Sjhb#define atomic_add_rel_8 atomic_add_rel_char 40967587Sjhb#define atomic_subtract_8 atomic_subtract_char 41067587Sjhb#define atomic_subtract_acq_8 atomic_subtract_acq_char 41167587Sjhb#define atomic_subtract_rel_8 atomic_subtract_rel_char 41267587Sjhb#define atomic_load_acq_8 atomic_load_acq_char 41367587Sjhb#define atomic_store_rel_8 atomic_store_rel_char 41467587Sjhb 41567587Sjhb#define atomic_set_16 atomic_set_short 41667587Sjhb#define atomic_set_acq_16 atomic_set_acq_short 41767587Sjhb#define atomic_set_rel_16 atomic_set_rel_short 41867587Sjhb#define atomic_clear_16 atomic_clear_short 41967587Sjhb#define atomic_clear_acq_16 atomic_clear_acq_short 42067587Sjhb#define atomic_clear_rel_16 atomic_clear_rel_short 42167587Sjhb#define atomic_add_16 atomic_add_short 42267587Sjhb#define atomic_add_acq_16 atomic_add_acq_short 42367587Sjhb#define atomic_add_rel_16 atomic_add_rel_short 42467587Sjhb#define atomic_subtract_16 atomic_subtract_short 42567587Sjhb#define atomic_subtract_acq_16 atomic_subtract_acq_short 42667587Sjhb#define atomic_subtract_rel_16 atomic_subtract_rel_short 42767587Sjhb#define atomic_load_acq_16 atomic_load_acq_short 42867587Sjhb#define atomic_store_rel_16 atomic_store_rel_short 42967587Sjhb 43067587Sjhb#define atomic_set_32 atomic_set_int 43167587Sjhb#define atomic_set_acq_32 atomic_set_acq_int 43267587Sjhb#define atomic_set_rel_32 atomic_set_rel_int 43367587Sjhb#define atomic_clear_32 atomic_clear_int 43467587Sjhb#define atomic_clear_acq_32 atomic_clear_acq_int 43567587Sjhb#define atomic_clear_rel_32 atomic_clear_rel_int 43667587Sjhb#define atomic_add_32 atomic_add_int 43767587Sjhb#define atomic_add_acq_32 atomic_add_acq_int 43867587Sjhb#define atomic_add_rel_32 atomic_add_rel_int 43967587Sjhb#define atomic_subtract_32 atomic_subtract_int 44067587Sjhb#define atomic_subtract_acq_32 atomic_subtract_acq_int 44167587Sjhb#define atomic_subtract_rel_32 atomic_subtract_rel_int 44267587Sjhb#define atomic_load_acq_32 atomic_load_acq_int 44367587Sjhb#define atomic_store_rel_32 atomic_store_rel_int 44467587Sjhb#define atomic_cmpset_32 atomic_cmpset_int 44567587Sjhb#define atomic_cmpset_acq_32 atomic_cmpset_acq_int 44667587Sjhb#define atomic_cmpset_rel_32 atomic_cmpset_rel_int 44767587Sjhb#define atomic_readandclear_32 atomic_readandclear_int 44867587Sjhb 44938517Sdfr#endif /* ! _MACHINE_ATOMIC_H_ */ 450