atomic.h revision 254612
138517Sdfr/*- 238517Sdfr * Copyright (c) 1998 Doug Rabson 338517Sdfr * All rights reserved. 438517Sdfr * 538517Sdfr * Redistribution and use in source and binary forms, with or without 638517Sdfr * modification, are permitted provided that the following conditions 738517Sdfr * are met: 838517Sdfr * 1. Redistributions of source code must retain the above copyright 938517Sdfr * notice, this list of conditions and the following disclaimer. 1038517Sdfr * 2. Redistributions in binary form must reproduce the above copyright 1138517Sdfr * notice, this list of conditions and the following disclaimer in the 1238517Sdfr * documentation and/or other materials provided with the distribution. 1338517Sdfr * 1438517Sdfr * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 1538517Sdfr * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 1638517Sdfr * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 1738517Sdfr * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 1838517Sdfr * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 1938517Sdfr * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 2038517Sdfr * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 2138517Sdfr * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 2238517Sdfr * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 2338517Sdfr * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 2438517Sdfr * SUCH DAMAGE. 2538517Sdfr * 2650477Speter * $FreeBSD: head/sys/i386/include/atomic.h 254612 2013-08-21 21:14:16Z jkim $ 2738517Sdfr */ 2838517Sdfr#ifndef _MACHINE_ATOMIC_H_ 29147855Sjhb#define _MACHINE_ATOMIC_H_ 3038517Sdfr 31143063Sjoerg#ifndef _SYS_CDEFS_H_ 32143063Sjoerg#error this file needs sys/cdefs.h as a prerequisite 33143063Sjoerg#endif 34143063Sjoerg 35236456Skib#define mb() __asm __volatile("lock; addl $0,(%%esp)" : : : "memory", "cc") 36236456Skib#define wmb() __asm __volatile("lock; addl $0,(%%esp)" : : : "memory", "cc") 37236456Skib#define rmb() __asm __volatile("lock; addl $0,(%%esp)" : : : "memory", "cc") 38185162Skmacy 3938517Sdfr/* 40165635Sbde * Various simple operations on memory, each of which is atomic in the 41165635Sbde * presence of interrupts and multiple processors. 4238517Sdfr * 43165633Sbde * atomic_set_char(P, V) (*(u_char *)(P) |= (V)) 44165633Sbde * atomic_clear_char(P, V) (*(u_char *)(P) &= ~(V)) 45165633Sbde * atomic_add_char(P, V) (*(u_char *)(P) += (V)) 46165633Sbde * atomic_subtract_char(P, V) (*(u_char *)(P) -= (V)) 4748797Salc * 48165633Sbde * atomic_set_short(P, V) (*(u_short *)(P) |= (V)) 49165633Sbde * atomic_clear_short(P, V) (*(u_short *)(P) &= ~(V)) 50165633Sbde * atomic_add_short(P, V) (*(u_short *)(P) += (V)) 51165633Sbde * atomic_subtract_short(P, V) (*(u_short *)(P) -= (V)) 5248797Salc * 53165633Sbde * atomic_set_int(P, V) (*(u_int *)(P) |= (V)) 54165633Sbde * atomic_clear_int(P, V) (*(u_int *)(P) &= ~(V)) 55165633Sbde * atomic_add_int(P, V) (*(u_int *)(P) += (V)) 56165633Sbde * atomic_subtract_int(P, V) (*(u_int *)(P) -= (V)) 57165635Sbde * atomic_readandclear_int(P) (return (*(u_int *)(P)); *(u_int *)(P) = 0;) 5848797Salc * 59165633Sbde * atomic_set_long(P, V) (*(u_long *)(P) |= (V)) 60165633Sbde * atomic_clear_long(P, V) (*(u_long *)(P) &= ~(V)) 61165633Sbde * atomic_add_long(P, V) (*(u_long *)(P) += (V)) 62165633Sbde * atomic_subtract_long(P, V) (*(u_long *)(P) -= (V)) 63165635Sbde * atomic_readandclear_long(P) (return (*(u_long *)(P)); *(u_long *)(P) = 0;) 6438517Sdfr */ 6538517Sdfr 6648797Salc/* 6749999Salc * The above functions are expanded inline in the statically-linked 6849999Salc * kernel. Lock prefixes are generated if an SMP kernel is being 6949999Salc * built. 7049999Salc * 7149999Salc * Kernel modules call real functions which are built into the kernel. 7249999Salc * This allows kernel modules to be portable between UP and SMP systems. 7348797Salc */ 74147855Sjhb#if defined(KLD_MODULE) || !defined(__GNUCLIKE_ASM) 75147855Sjhb#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \ 76197803Sattiliovoid atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v); \ 77197803Sattiliovoid atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v) 7849999Salc 79208332Sphkint atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src); 80165633Sbdeu_int atomic_fetchadd_int(volatile u_int *p, u_int v); 8165514Sphk 82236456Skib#define ATOMIC_LOAD(TYPE, LOP) \ 83236456Skibu_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p) 84236456Skib#define ATOMIC_STORE(TYPE) \ 85100251Smarkmvoid atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) 8671085Sjhb 87147855Sjhb#else /* !KLD_MODULE && __GNUCLIKE_ASM */ 8872358Smarkm 8984679Sjhb/* 90165635Sbde * For userland, always use lock prefixes so that the binaries will run 91165635Sbde * on both SMP and !SMP systems. 9284679Sjhb */ 9384679Sjhb#if defined(SMP) || !defined(_KERNEL) 94165630Sbde#define MPLOCKED "lock ; " 9590515Sbde#else 96147855Sjhb#define MPLOCKED 9790515Sbde#endif 9838517Sdfr 9948797Salc/* 100197803Sattilio * The assembly is volatilized to avoid code chunk removal by the compiler. 101197803Sattilio * GCC aggressively reorders operations and memory clobbering is necessary 102197803Sattilio * in order to avoid that for memory barriers. 10348797Salc */ 104147855Sjhb#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \ 10548797Salcstatic __inline void \ 10649043Salcatomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 10748797Salc{ \ 108165630Sbde __asm __volatile(MPLOCKED OP \ 109254612Sjkim : "+m" (*p) \ 110254612Sjkim : CONS (V) \ 111216524Skib : "cc"); \ 112122827Sbde} \ 113197803Sattilio \ 114197803Sattiliostatic __inline void \ 115197803Sattilioatomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 116197803Sattilio{ \ 117197803Sattilio __asm __volatile(MPLOCKED OP \ 118254612Sjkim : "+m" (*p) \ 119254612Sjkim : CONS (V) \ 120216524Skib : "memory", "cc"); \ 121197803Sattilio} \ 122122827Sbdestruct __hack 123100327Smarkm 124220404Sjkim#if defined(_KERNEL) && !defined(WANT_FUNCTIONS) 125220404Sjkim 126220404Sjkim/* I486 does not support SMP or CMPXCHG8B. */ 127220404Sjkimstatic __inline uint64_t 128220404Sjkimatomic_load_acq_64_i386(volatile uint64_t *p) 129220404Sjkim{ 130220404Sjkim volatile uint32_t *high, *low; 131220404Sjkim uint64_t res; 132220404Sjkim 133220404Sjkim low = (volatile uint32_t *)p; 134220404Sjkim high = (volatile uint32_t *)p + 1; 135220404Sjkim __asm __volatile( 136220404Sjkim " pushfl ; " 137220404Sjkim " cli ; " 138220404Sjkim " movl %1,%%eax ; " 139220404Sjkim " movl %2,%%edx ; " 140220404Sjkim " popfl" 141220404Sjkim : "=&A" (res) /* 0 */ 142220404Sjkim : "m" (*low), /* 1 */ 143220404Sjkim "m" (*high) /* 2 */ 144220404Sjkim : "memory"); 145220404Sjkim 146220404Sjkim return (res); 147220404Sjkim} 148220404Sjkim 149220404Sjkimstatic __inline void 150220404Sjkimatomic_store_rel_64_i386(volatile uint64_t *p, uint64_t v) 151220404Sjkim{ 152220404Sjkim volatile uint32_t *high, *low; 153220404Sjkim 154220404Sjkim low = (volatile uint32_t *)p; 155220404Sjkim high = (volatile uint32_t *)p + 1; 156220404Sjkim __asm __volatile( 157220404Sjkim " pushfl ; " 158220404Sjkim " cli ; " 159220404Sjkim " movl %%eax,%0 ; " 160220404Sjkim " movl %%edx,%1 ; " 161220404Sjkim " popfl" 162220404Sjkim : "=m" (*low), /* 0 */ 163220404Sjkim "=m" (*high) /* 1 */ 164220404Sjkim : "A" (v) /* 2 */ 165220404Sjkim : "memory"); 166220404Sjkim} 167220404Sjkim 168220404Sjkimstatic __inline uint64_t 169220404Sjkimatomic_load_acq_64_i586(volatile uint64_t *p) 170220404Sjkim{ 171220404Sjkim uint64_t res; 172220404Sjkim 173220404Sjkim __asm __volatile( 174220404Sjkim " movl %%ebx,%%eax ; " 175220404Sjkim " movl %%ecx,%%edx ; " 176220404Sjkim " " MPLOCKED " " 177254612Sjkim " cmpxchg8b %1" 178220404Sjkim : "=&A" (res), /* 0 */ 179254612Sjkim "+m" (*p) /* 1 */ 180254612Sjkim : : "memory", "cc"); 181220404Sjkim 182220404Sjkim return (res); 183220404Sjkim} 184220404Sjkim 185220404Sjkimstatic __inline void 186220404Sjkimatomic_store_rel_64_i586(volatile uint64_t *p, uint64_t v) 187220404Sjkim{ 188220404Sjkim 189220404Sjkim __asm __volatile( 190220404Sjkim " movl %%eax,%%ebx ; " 191220404Sjkim " movl %%edx,%%ecx ; " 192220404Sjkim "1: " 193220404Sjkim " " MPLOCKED " " 194254612Sjkim " cmpxchg8b %0 ; " 195220404Sjkim " jne 1b" 196254612Sjkim : "+m" (*p), /* 0 */ 197220404Sjkim "+A" (v) /* 1 */ 198254612Sjkim : : "ebx", "ecx", "memory", "cc"); 199220404Sjkim} 200220404Sjkim 201220404Sjkim#endif /* _KERNEL && !WANT_FUNCTIONS */ 202220404Sjkim 20365514Sphk/* 20465514Sphk * Atomic compare and set, used by the mutex functions 20565514Sphk * 206208332Sphk * if (*dst == expect) *dst = src (all 32 bit words) 20765514Sphk * 20865514Sphk * Returns 0 on failure, non-zero on success 20965514Sphk */ 21065514Sphk 211165635Sbde#ifdef CPU_DISABLE_CMPXCHG 212100327Smarkm 213197910Sattiliostatic __inline int 214208332Sphkatomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src) 215197910Sattilio{ 216197910Sattilio u_char res; 21765514Sphk 218197910Sattilio __asm __volatile( 219197910Sattilio " pushfl ; " 220197910Sattilio " cli ; " 221254612Sjkim " cmpl %3,%1 ; " 222197910Sattilio " jne 1f ; " 223197910Sattilio " movl %2,%1 ; " 224197910Sattilio "1: " 225197910Sattilio " sete %0 ; " 226197910Sattilio " popfl ; " 227197910Sattilio "# atomic_cmpset_int" 228197910Sattilio : "=q" (res), /* 0 */ 229254612Sjkim "+m" (*dst) /* 1 */ 230197910Sattilio : "r" (src), /* 2 */ 231254612Sjkim "r" (expect) /* 3 */ 232197910Sattilio : "memory"); 233197910Sattilio 234197910Sattilio return (res); 235197910Sattilio} 236197910Sattilio 237165635Sbde#else /* !CPU_DISABLE_CMPXCHG */ 238100327Smarkm 239197910Sattiliostatic __inline int 240208332Sphkatomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src) 241197910Sattilio{ 242197910Sattilio u_char res; 24365514Sphk 244197910Sattilio __asm __volatile( 245197910Sattilio " " MPLOCKED " " 246197910Sattilio " cmpxchgl %2,%1 ; " 247197910Sattilio " sete %0 ; " 248197910Sattilio "# atomic_cmpset_int" 249197910Sattilio : "=a" (res), /* 0 */ 250254612Sjkim "+m" (*dst) /* 1 */ 251197910Sattilio : "r" (src), /* 2 */ 252254612Sjkim "a" (expect) /* 3 */ 253216524Skib : "memory", "cc"); 254197910Sattilio 255197910Sattilio return (res); 256197910Sattilio} 257197910Sattilio 258165635Sbde#endif /* CPU_DISABLE_CMPXCHG */ 259100327Smarkm 260150627Sjhb/* 261150627Sjhb * Atomically add the value of v to the integer pointed to by p and return 262150627Sjhb * the previous value of *p. 263150627Sjhb */ 264150627Sjhbstatic __inline u_int 265150627Sjhbatomic_fetchadd_int(volatile u_int *p, u_int v) 266150627Sjhb{ 267150627Sjhb 268165633Sbde __asm __volatile( 269165630Sbde " " MPLOCKED " " 270254610Sjkim " xaddl %0,%1 ; " 271150627Sjhb "# atomic_fetchadd_int" 272254610Sjkim : "+r" (v), /* 0 */ 273254612Sjkim "+m" (*p) /* 1 */ 274254612Sjkim : : "cc"); 275150627Sjhb return (v); 276150627Sjhb} 277150627Sjhb 278236456Skib/* 279236456Skib * We assume that a = b will do atomic loads and stores. Due to the 280236456Skib * IA32 memory model, a simple store guarantees release semantics. 281236456Skib * 282236456Skib * However, loads may pass stores, so for atomic_load_acq we have to 283236456Skib * ensure a Store/Load barrier to do the load in SMP kernels. We use 284236456Skib * "lock cmpxchg" as recommended by the AMD Software Optimization 285236456Skib * Guide, and not mfence. For UP kernels, however, the cache of the 286236456Skib * single processor is always consistent, so we only need to take care 287236456Skib * of the compiler. 288236456Skib */ 289236456Skib#define ATOMIC_STORE(TYPE) \ 290236456Skibstatic __inline void \ 291236456Skibatomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 292236456Skib{ \ 293241374Sattilio __compiler_membar(); \ 294236456Skib *p = v; \ 295236456Skib} \ 296236456Skibstruct __hack 297236456Skib 298137623Sjhb#if defined(_KERNEL) && !defined(SMP) 299100327Smarkm 300236456Skib#define ATOMIC_LOAD(TYPE, LOP) \ 30167351Sjhbstatic __inline u_##TYPE \ 30267351Sjhbatomic_load_acq_##TYPE(volatile u_##TYPE *p) \ 30367351Sjhb{ \ 304197803Sattilio u_##TYPE tmp; \ 305197803Sattilio \ 306197803Sattilio tmp = *p; \ 307241374Sattilio __compiler_membar(); \ 308197803Sattilio return (tmp); \ 30967351Sjhb} \ 310122827Sbdestruct __hack 311100327Smarkm 312165635Sbde#else /* !(_KERNEL && !SMP) */ 31367351Sjhb 314236456Skib#define ATOMIC_LOAD(TYPE, LOP) \ 31571023Sjhbstatic __inline u_##TYPE \ 31671023Sjhbatomic_load_acq_##TYPE(volatile u_##TYPE *p) \ 31771023Sjhb{ \ 31871023Sjhb u_##TYPE res; \ 31971023Sjhb \ 320165630Sbde __asm __volatile(MPLOCKED LOP \ 321165635Sbde : "=a" (res), /* 0 */ \ 322254612Sjkim "+m" (*p) /* 1 */ \ 323254612Sjkim : : "memory", "cc"); \ 32471023Sjhb \ 32571023Sjhb return (res); \ 32671023Sjhb} \ 327122827Sbdestruct __hack 328100327Smarkm 329165635Sbde#endif /* _KERNEL && !SMP */ 330100327Smarkm 331147855Sjhb#endif /* KLD_MODULE || !__GNUCLIKE_ASM */ 332100251Smarkm 333100251SmarkmATOMIC_ASM(set, char, "orb %b1,%0", "iq", v); 334100251SmarkmATOMIC_ASM(clear, char, "andb %b1,%0", "iq", ~v); 335100251SmarkmATOMIC_ASM(add, char, "addb %b1,%0", "iq", v); 336100251SmarkmATOMIC_ASM(subtract, char, "subb %b1,%0", "iq", v); 33771085Sjhb 338100251SmarkmATOMIC_ASM(set, short, "orw %w1,%0", "ir", v); 339100251SmarkmATOMIC_ASM(clear, short, "andw %w1,%0", "ir", ~v); 340100251SmarkmATOMIC_ASM(add, short, "addw %w1,%0", "ir", v); 341100251SmarkmATOMIC_ASM(subtract, short, "subw %w1,%0", "ir", v); 34271085Sjhb 343100251SmarkmATOMIC_ASM(set, int, "orl %1,%0", "ir", v); 344100251SmarkmATOMIC_ASM(clear, int, "andl %1,%0", "ir", ~v); 345100251SmarkmATOMIC_ASM(add, int, "addl %1,%0", "ir", v); 346100251SmarkmATOMIC_ASM(subtract, int, "subl %1,%0", "ir", v); 34771085Sjhb 348100251SmarkmATOMIC_ASM(set, long, "orl %1,%0", "ir", v); 349100251SmarkmATOMIC_ASM(clear, long, "andl %1,%0", "ir", ~v); 350100251SmarkmATOMIC_ASM(add, long, "addl %1,%0", "ir", v); 351100251SmarkmATOMIC_ASM(subtract, long, "subl %1,%0", "ir", v); 35271085Sjhb 353236456SkibATOMIC_LOAD(char, "cmpxchgb %b0,%1"); 354236456SkibATOMIC_LOAD(short, "cmpxchgw %w0,%1"); 355236456SkibATOMIC_LOAD(int, "cmpxchgl %0,%1"); 356236456SkibATOMIC_LOAD(long, "cmpxchgl %0,%1"); 35771023Sjhb 358236456SkibATOMIC_STORE(char); 359236456SkibATOMIC_STORE(short); 360236456SkibATOMIC_STORE(int); 361236456SkibATOMIC_STORE(long); 362236456Skib 36371085Sjhb#undef ATOMIC_ASM 364236456Skib#undef ATOMIC_LOAD 365236456Skib#undef ATOMIC_STORE 36667351Sjhb 367165635Sbde#ifndef WANT_FUNCTIONS 368147855Sjhb 369220404Sjkim#ifdef _KERNEL 370220404Sjkimextern uint64_t (*atomic_load_acq_64)(volatile uint64_t *); 371220404Sjkimextern void (*atomic_store_rel_64)(volatile uint64_t *, uint64_t); 372220404Sjkim#endif 373220404Sjkim 374147855Sjhbstatic __inline int 375208332Sphkatomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src) 376147855Sjhb{ 377147855Sjhb 378208332Sphk return (atomic_cmpset_int((volatile u_int *)dst, (u_int)expect, 379147855Sjhb (u_int)src)); 380147855Sjhb} 381147855Sjhb 382177276Spjdstatic __inline u_long 383177276Spjdatomic_fetchadd_long(volatile u_long *p, u_long v) 384177276Spjd{ 385177276Spjd 386177276Spjd return (atomic_fetchadd_int((volatile u_int *)p, (u_int)v)); 387177276Spjd} 388177276Spjd 389147855Sjhb/* Read the current value and store a zero in the destination. */ 390147855Sjhb#ifdef __GNUCLIKE_ASM 391147855Sjhb 392147855Sjhbstatic __inline u_int 393254610Sjkimatomic_readandclear_int(volatile u_int *p) 394147855Sjhb{ 395165635Sbde u_int res; 396147855Sjhb 397165635Sbde res = 0; 398165633Sbde __asm __volatile( 399147855Sjhb " xchgl %1,%0 ; " 400147855Sjhb "# atomic_readandclear_int" 401165635Sbde : "+r" (res), /* 0 */ 402254612Sjkim "+m" (*p)); /* 1 */ 403147855Sjhb 404165635Sbde return (res); 405147855Sjhb} 406147855Sjhb 407147855Sjhbstatic __inline u_long 408254610Sjkimatomic_readandclear_long(volatile u_long *p) 409147855Sjhb{ 410165635Sbde u_long res; 411147855Sjhb 412165635Sbde res = 0; 413165633Sbde __asm __volatile( 414147855Sjhb " xchgl %1,%0 ; " 415147855Sjhb "# atomic_readandclear_long" 416165636Sbde : "+r" (res), /* 0 */ 417254612Sjkim "+m" (*p)); /* 1 */ 418147855Sjhb 419165635Sbde return (res); 420147855Sjhb} 421147855Sjhb 422147855Sjhb#else /* !__GNUCLIKE_ASM */ 423147855Sjhb 424254610Sjkimu_int atomic_readandclear_int(volatile u_int *p); 425254610Sjkimu_long atomic_readandclear_long(volatile u_long *p); 426147855Sjhb 427147855Sjhb#endif /* __GNUCLIKE_ASM */ 428147855Sjhb 429197803Sattilio#define atomic_set_acq_char atomic_set_barr_char 430197803Sattilio#define atomic_set_rel_char atomic_set_barr_char 431197803Sattilio#define atomic_clear_acq_char atomic_clear_barr_char 432197803Sattilio#define atomic_clear_rel_char atomic_clear_barr_char 433197803Sattilio#define atomic_add_acq_char atomic_add_barr_char 434197803Sattilio#define atomic_add_rel_char atomic_add_barr_char 435197803Sattilio#define atomic_subtract_acq_char atomic_subtract_barr_char 436197803Sattilio#define atomic_subtract_rel_char atomic_subtract_barr_char 43771085Sjhb 438197803Sattilio#define atomic_set_acq_short atomic_set_barr_short 439197803Sattilio#define atomic_set_rel_short atomic_set_barr_short 440197803Sattilio#define atomic_clear_acq_short atomic_clear_barr_short 441197803Sattilio#define atomic_clear_rel_short atomic_clear_barr_short 442197803Sattilio#define atomic_add_acq_short atomic_add_barr_short 443197803Sattilio#define atomic_add_rel_short atomic_add_barr_short 444197803Sattilio#define atomic_subtract_acq_short atomic_subtract_barr_short 445197803Sattilio#define atomic_subtract_rel_short atomic_subtract_barr_short 44671085Sjhb 447197803Sattilio#define atomic_set_acq_int atomic_set_barr_int 448197803Sattilio#define atomic_set_rel_int atomic_set_barr_int 449197803Sattilio#define atomic_clear_acq_int atomic_clear_barr_int 450197803Sattilio#define atomic_clear_rel_int atomic_clear_barr_int 451197803Sattilio#define atomic_add_acq_int atomic_add_barr_int 452197803Sattilio#define atomic_add_rel_int atomic_add_barr_int 453197803Sattilio#define atomic_subtract_acq_int atomic_subtract_barr_int 454197803Sattilio#define atomic_subtract_rel_int atomic_subtract_barr_int 455197910Sattilio#define atomic_cmpset_acq_int atomic_cmpset_int 456197910Sattilio#define atomic_cmpset_rel_int atomic_cmpset_int 45771085Sjhb 458197803Sattilio#define atomic_set_acq_long atomic_set_barr_long 459197803Sattilio#define atomic_set_rel_long atomic_set_barr_long 460197803Sattilio#define atomic_clear_acq_long atomic_clear_barr_long 461197803Sattilio#define atomic_clear_rel_long atomic_clear_barr_long 462197803Sattilio#define atomic_add_acq_long atomic_add_barr_long 463197803Sattilio#define atomic_add_rel_long atomic_add_barr_long 464197803Sattilio#define atomic_subtract_acq_long atomic_subtract_barr_long 465197803Sattilio#define atomic_subtract_rel_long atomic_subtract_barr_long 466197910Sattilio#define atomic_cmpset_acq_long atomic_cmpset_long 467197910Sattilio#define atomic_cmpset_rel_long atomic_cmpset_long 46871085Sjhb 469147855Sjhb/* Operations on 8-bit bytes. */ 47071085Sjhb#define atomic_set_8 atomic_set_char 47171085Sjhb#define atomic_set_acq_8 atomic_set_acq_char 47271085Sjhb#define atomic_set_rel_8 atomic_set_rel_char 47371085Sjhb#define atomic_clear_8 atomic_clear_char 47471085Sjhb#define atomic_clear_acq_8 atomic_clear_acq_char 47571085Sjhb#define atomic_clear_rel_8 atomic_clear_rel_char 47671085Sjhb#define atomic_add_8 atomic_add_char 47771085Sjhb#define atomic_add_acq_8 atomic_add_acq_char 47871085Sjhb#define atomic_add_rel_8 atomic_add_rel_char 47971085Sjhb#define atomic_subtract_8 atomic_subtract_char 48071085Sjhb#define atomic_subtract_acq_8 atomic_subtract_acq_char 48171085Sjhb#define atomic_subtract_rel_8 atomic_subtract_rel_char 48271085Sjhb#define atomic_load_acq_8 atomic_load_acq_char 48371085Sjhb#define atomic_store_rel_8 atomic_store_rel_char 48471085Sjhb 485147855Sjhb/* Operations on 16-bit words. */ 48671085Sjhb#define atomic_set_16 atomic_set_short 48771085Sjhb#define atomic_set_acq_16 atomic_set_acq_short 48871085Sjhb#define atomic_set_rel_16 atomic_set_rel_short 48971085Sjhb#define atomic_clear_16 atomic_clear_short 49071085Sjhb#define atomic_clear_acq_16 atomic_clear_acq_short 49171085Sjhb#define atomic_clear_rel_16 atomic_clear_rel_short 49271085Sjhb#define atomic_add_16 atomic_add_short 49371085Sjhb#define atomic_add_acq_16 atomic_add_acq_short 49471085Sjhb#define atomic_add_rel_16 atomic_add_rel_short 49571085Sjhb#define atomic_subtract_16 atomic_subtract_short 49671085Sjhb#define atomic_subtract_acq_16 atomic_subtract_acq_short 49771085Sjhb#define atomic_subtract_rel_16 atomic_subtract_rel_short 49871085Sjhb#define atomic_load_acq_16 atomic_load_acq_short 49971085Sjhb#define atomic_store_rel_16 atomic_store_rel_short 50071085Sjhb 501147855Sjhb/* Operations on 32-bit double words. */ 50271085Sjhb#define atomic_set_32 atomic_set_int 50371085Sjhb#define atomic_set_acq_32 atomic_set_acq_int 50471085Sjhb#define atomic_set_rel_32 atomic_set_rel_int 50571085Sjhb#define atomic_clear_32 atomic_clear_int 50671085Sjhb#define atomic_clear_acq_32 atomic_clear_acq_int 50771085Sjhb#define atomic_clear_rel_32 atomic_clear_rel_int 50871085Sjhb#define atomic_add_32 atomic_add_int 50971085Sjhb#define atomic_add_acq_32 atomic_add_acq_int 51071085Sjhb#define atomic_add_rel_32 atomic_add_rel_int 51171085Sjhb#define atomic_subtract_32 atomic_subtract_int 51271085Sjhb#define atomic_subtract_acq_32 atomic_subtract_acq_int 51371085Sjhb#define atomic_subtract_rel_32 atomic_subtract_rel_int 51471085Sjhb#define atomic_load_acq_32 atomic_load_acq_int 51571085Sjhb#define atomic_store_rel_32 atomic_store_rel_int 51671085Sjhb#define atomic_cmpset_32 atomic_cmpset_int 51771085Sjhb#define atomic_cmpset_acq_32 atomic_cmpset_acq_int 51871085Sjhb#define atomic_cmpset_rel_32 atomic_cmpset_rel_int 51971085Sjhb#define atomic_readandclear_32 atomic_readandclear_int 520150627Sjhb#define atomic_fetchadd_32 atomic_fetchadd_int 52171085Sjhb 522147855Sjhb/* Operations on pointers. */ 523157212Sdes#define atomic_set_ptr(p, v) \ 524157212Sdes atomic_set_int((volatile u_int *)(p), (u_int)(v)) 525157212Sdes#define atomic_set_acq_ptr(p, v) \ 526157212Sdes atomic_set_acq_int((volatile u_int *)(p), (u_int)(v)) 527157212Sdes#define atomic_set_rel_ptr(p, v) \ 528157212Sdes atomic_set_rel_int((volatile u_int *)(p), (u_int)(v)) 529157212Sdes#define atomic_clear_ptr(p, v) \ 530157212Sdes atomic_clear_int((volatile u_int *)(p), (u_int)(v)) 531157212Sdes#define atomic_clear_acq_ptr(p, v) \ 532157212Sdes atomic_clear_acq_int((volatile u_int *)(p), (u_int)(v)) 533157212Sdes#define atomic_clear_rel_ptr(p, v) \ 534157212Sdes atomic_clear_rel_int((volatile u_int *)(p), (u_int)(v)) 535157212Sdes#define atomic_add_ptr(p, v) \ 536157212Sdes atomic_add_int((volatile u_int *)(p), (u_int)(v)) 537157212Sdes#define atomic_add_acq_ptr(p, v) \ 538157212Sdes atomic_add_acq_int((volatile u_int *)(p), (u_int)(v)) 539157212Sdes#define atomic_add_rel_ptr(p, v) \ 540157212Sdes atomic_add_rel_int((volatile u_int *)(p), (u_int)(v)) 541157212Sdes#define atomic_subtract_ptr(p, v) \ 542157212Sdes atomic_subtract_int((volatile u_int *)(p), (u_int)(v)) 543157212Sdes#define atomic_subtract_acq_ptr(p, v) \ 544157212Sdes atomic_subtract_acq_int((volatile u_int *)(p), (u_int)(v)) 545157212Sdes#define atomic_subtract_rel_ptr(p, v) \ 546157212Sdes atomic_subtract_rel_int((volatile u_int *)(p), (u_int)(v)) 547157212Sdes#define atomic_load_acq_ptr(p) \ 548157212Sdes atomic_load_acq_int((volatile u_int *)(p)) 549157212Sdes#define atomic_store_rel_ptr(p, v) \ 550157212Sdes atomic_store_rel_int((volatile u_int *)(p), (v)) 551157212Sdes#define atomic_cmpset_ptr(dst, old, new) \ 552157212Sdes atomic_cmpset_int((volatile u_int *)(dst), (u_int)(old), (u_int)(new)) 553157212Sdes#define atomic_cmpset_acq_ptr(dst, old, new) \ 554165633Sbde atomic_cmpset_acq_int((volatile u_int *)(dst), (u_int)(old), \ 555165633Sbde (u_int)(new)) 556157212Sdes#define atomic_cmpset_rel_ptr(dst, old, new) \ 557165633Sbde atomic_cmpset_rel_int((volatile u_int *)(dst), (u_int)(old), \ 558165633Sbde (u_int)(new)) 559157212Sdes#define atomic_readandclear_ptr(p) \ 560157212Sdes atomic_readandclear_int((volatile u_int *)(p)) 56165514Sphk 562165635Sbde#endif /* !WANT_FUNCTIONS */ 563165633Sbde 564165633Sbde#endif /* !_MACHINE_ATOMIC_H_ */ 565