138517Sdfr/*- 238517Sdfr * Copyright (c) 1998 Doug Rabson 338517Sdfr * All rights reserved. 438517Sdfr * 538517Sdfr * Redistribution and use in source and binary forms, with or without 638517Sdfr * modification, are permitted provided that the following conditions 738517Sdfr * are met: 838517Sdfr * 1. Redistributions of source code must retain the above copyright 938517Sdfr * notice, this list of conditions and the following disclaimer. 1038517Sdfr * 2. Redistributions in binary form must reproduce the above copyright 1138517Sdfr * notice, this list of conditions and the following disclaimer in the 1238517Sdfr * documentation and/or other materials provided with the distribution. 1338517Sdfr * 1438517Sdfr * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 1538517Sdfr * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 1638517Sdfr * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 1738517Sdfr * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 1838517Sdfr * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 1938517Sdfr * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 2038517Sdfr * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 2138517Sdfr * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 2238517Sdfr * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 2338517Sdfr * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 2438517Sdfr * SUCH DAMAGE. 2538517Sdfr * 2650477Speter * $FreeBSD$ 2738517Sdfr */ 2838517Sdfr#ifndef _MACHINE_ATOMIC_H_ 29147855Sjhb#define _MACHINE_ATOMIC_H_ 3038517Sdfr 31143063Sjoerg#ifndef _SYS_CDEFS_H_ 32143063Sjoerg#error this file needs sys/cdefs.h as a prerequisite 33143063Sjoerg#endif 34143063Sjoerg 35197824Sattilio#define mb() __asm __volatile("mfence;" : : : "memory") 36197824Sattilio#define wmb() __asm __volatile("sfence;" : : : "memory") 37197824Sattilio#define rmb() __asm __volatile("lfence;" : : : "memory") 38185162Skmacy 3938517Sdfr/* 40165635Sbde * Various simple operations on memory, each of which is atomic in the 41165635Sbde * presence of interrupts and multiple processors. 4238517Sdfr * 43165633Sbde * atomic_set_char(P, V) (*(u_char *)(P) |= (V)) 44165633Sbde * atomic_clear_char(P, V) (*(u_char *)(P) &= ~(V)) 45165633Sbde * atomic_add_char(P, V) (*(u_char *)(P) += (V)) 46165633Sbde * atomic_subtract_char(P, V) (*(u_char *)(P) -= (V)) 4748797Salc * 48165633Sbde * atomic_set_short(P, V) (*(u_short *)(P) |= (V)) 49165633Sbde * atomic_clear_short(P, V) (*(u_short *)(P) &= ~(V)) 50165633Sbde * atomic_add_short(P, V) (*(u_short *)(P) += (V)) 51165633Sbde * atomic_subtract_short(P, V) (*(u_short *)(P) -= (V)) 5248797Salc * 53165633Sbde * atomic_set_int(P, V) (*(u_int *)(P) |= (V)) 54165633Sbde * atomic_clear_int(P, V) (*(u_int *)(P) &= ~(V)) 55165633Sbde * atomic_add_int(P, V) (*(u_int *)(P) += (V)) 56165633Sbde * atomic_subtract_int(P, V) (*(u_int *)(P) -= (V)) 57262807Sdumbbell * atomic_swap_int(P, V) (return (*(u_int *)(P)); *(u_int *)(P) = (V);) 58165635Sbde * atomic_readandclear_int(P) (return (*(u_int *)(P)); *(u_int *)(P) = 0;) 5948797Salc * 60165633Sbde * atomic_set_long(P, V) (*(u_long *)(P) |= (V)) 61165633Sbde * atomic_clear_long(P, V) (*(u_long *)(P) &= ~(V)) 62165633Sbde * atomic_add_long(P, V) (*(u_long *)(P) += (V)) 63165633Sbde * atomic_subtract_long(P, V) (*(u_long *)(P) -= (V)) 64262807Sdumbbell * atomic_swap_long(P, V) (return (*(u_long *)(P)); *(u_long *)(P) = (V);) 65165635Sbde * atomic_readandclear_long(P) (return (*(u_long *)(P)); *(u_long *)(P) = 0;) 6638517Sdfr */ 6738517Sdfr 6848797Salc/* 6949999Salc * The above functions are expanded inline in the statically-linked 7049999Salc * kernel. Lock prefixes are generated if an SMP kernel is being 7149999Salc * built. 7249999Salc * 7349999Salc * Kernel modules call real functions which are built into the kernel. 7449999Salc * This allows kernel modules to be portable between UP and SMP systems. 7548797Salc */ 76165578Sbde#if defined(KLD_MODULE) || !defined(__GNUCLIKE_ASM) 77147855Sjhb#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \ 78197803Sattiliovoid atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v); \ 79197803Sattiliovoid atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v) 8049999Salc 81208332Sphkint atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src); 82208332Sphkint atomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src); 83165633Sbdeu_int atomic_fetchadd_int(volatile u_int *p, u_int v); 84177276Spjdu_long atomic_fetchadd_long(volatile u_long *p, u_long v); 85262807Sdumbbellint atomic_testandset_int(volatile u_int *p, u_int v); 86262807Sdumbbellint atomic_testandset_long(volatile u_long *p, u_int v); 8765514Sphk 88237161Skib#define ATOMIC_LOAD(TYPE, LOP) \ 89237161Skibu_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p) 90237161Skib#define ATOMIC_STORE(TYPE) \ 91100251Smarkmvoid atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) 9271085Sjhb 93165578Sbde#else /* !KLD_MODULE && __GNUCLIKE_ASM */ 9472358Smarkm 9584679Sjhb/* 96165635Sbde * For userland, always use lock prefixes so that the binaries will run 97165635Sbde * on both SMP and !SMP systems. 9884679Sjhb */ 99122849Speter#if defined(SMP) || !defined(_KERNEL) 100165630Sbde#define MPLOCKED "lock ; " 10190515Sbde#else 102147855Sjhb#define MPLOCKED 10390515Sbde#endif 10438517Sdfr 10548797Salc/* 106197803Sattilio * The assembly is volatilized to avoid code chunk removal by the compiler. 107197803Sattilio * GCC aggressively reorders operations and memory clobbering is necessary 108197803Sattilio * in order to avoid that for memory barriers. 10948797Salc */ 110147855Sjhb#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \ 11148797Salcstatic __inline void \ 11249043Salcatomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 11348797Salc{ \ 114165630Sbde __asm __volatile(MPLOCKED OP \ 115165633Sbde : "=m" (*p) \ 116216524Skib : CONS (V), "m" (*p) \ 117216524Skib : "cc"); \ 118122940Speter} \ 119197803Sattilio \ 120197803Sattiliostatic __inline void \ 121197803Sattilioatomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 122197803Sattilio{ \ 123197803Sattilio __asm __volatile(MPLOCKED OP \ 124197803Sattilio : "=m" (*p) \ 125197803Sattilio : CONS (V), "m" (*p) \ 126216524Skib : "memory", "cc"); \ 127197803Sattilio} \ 128122940Speterstruct __hack 129100327Smarkm 13065514Sphk/* 13165514Sphk * Atomic compare and set, used by the mutex functions 13265514Sphk * 133208332Sphk * if (*dst == expect) *dst = src (all 32 bit words) 13465514Sphk * 13565514Sphk * Returns 0 on failure, non-zero on success 13665514Sphk */ 13765514Sphk 138197910Sattiliostatic __inline int 139208332Sphkatomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src) 140197910Sattilio{ 141197910Sattilio u_char res; 14265514Sphk 143197910Sattilio __asm __volatile( 144197910Sattilio " " MPLOCKED " " 145197910Sattilio " cmpxchgl %2,%1 ; " 146197910Sattilio " sete %0 ; " 147197910Sattilio "1: " 148197910Sattilio "# atomic_cmpset_int" 149197910Sattilio : "=a" (res), /* 0 */ 150197910Sattilio "=m" (*dst) /* 1 */ 151197910Sattilio : "r" (src), /* 2 */ 152208332Sphk "a" (expect), /* 3 */ 153197910Sattilio "m" (*dst) /* 4 */ 154216524Skib : "memory", "cc"); 15565514Sphk 156197910Sattilio return (res); 157197910Sattilio} 158197910Sattilio 159197910Sattiliostatic __inline int 160208332Sphkatomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src) 161197910Sattilio{ 162197910Sattilio u_char res; 163197910Sattilio 164197910Sattilio __asm __volatile( 165197910Sattilio " " MPLOCKED " " 166197910Sattilio " cmpxchgq %2,%1 ; " 167197910Sattilio " sete %0 ; " 168197910Sattilio "1: " 169197910Sattilio "# atomic_cmpset_long" 170197910Sattilio : "=a" (res), /* 0 */ 171197910Sattilio "=m" (*dst) /* 1 */ 172197910Sattilio : "r" (src), /* 2 */ 173208332Sphk "a" (expect), /* 3 */ 174197910Sattilio "m" (*dst) /* 4 */ 175216524Skib : "memory", "cc"); 176197910Sattilio 177197910Sattilio return (res); 178197910Sattilio} 179197910Sattilio 180150627Sjhb/* 181150627Sjhb * Atomically add the value of v to the integer pointed to by p and return 182150627Sjhb * the previous value of *p. 183150627Sjhb */ 184150627Sjhbstatic __inline u_int 185150627Sjhbatomic_fetchadd_int(volatile u_int *p, u_int v) 186150627Sjhb{ 187150627Sjhb 188165633Sbde __asm __volatile( 189165630Sbde " " MPLOCKED " " 190150627Sjhb " xaddl %0, %1 ; " 191150627Sjhb "# atomic_fetchadd_int" 192150627Sjhb : "+r" (v), /* 0 (result) */ 193150627Sjhb "=m" (*p) /* 1 */ 194216524Skib : "m" (*p) /* 2 */ 195216524Skib : "cc"); 196150627Sjhb return (v); 197150627Sjhb} 198150627Sjhb 199177276Spjd/* 200177276Spjd * Atomically add the value of v to the long integer pointed to by p and return 201177276Spjd * the previous value of *p. 202177276Spjd */ 203177276Spjdstatic __inline u_long 204177276Spjdatomic_fetchadd_long(volatile u_long *p, u_long v) 205177276Spjd{ 206177276Spjd 207177276Spjd __asm __volatile( 208177276Spjd " " MPLOCKED " " 209177276Spjd " xaddq %0, %1 ; " 210177276Spjd "# atomic_fetchadd_long" 211177276Spjd : "+r" (v), /* 0 (result) */ 212177276Spjd "=m" (*p) /* 1 */ 213216524Skib : "m" (*p) /* 2 */ 214216524Skib : "cc"); 215177276Spjd return (v); 216177276Spjd} 217177276Spjd 218262807Sdumbbellstatic __inline int 219262807Sdumbbellatomic_testandset_int(volatile u_int *p, u_int v) 220262807Sdumbbell{ 221262807Sdumbbell u_char res; 222262807Sdumbbell 223262807Sdumbbell __asm __volatile( 224262807Sdumbbell " " MPLOCKED " " 225262807Sdumbbell " btsl %2,%1 ; " 226262807Sdumbbell " setc %0 ; " 227262807Sdumbbell "# atomic_testandset_int" 228262807Sdumbbell : "=q" (res), /* 0 */ 229262807Sdumbbell "+m" (*p) /* 1 */ 230262807Sdumbbell : "Ir" (v & 0x1f) /* 2 */ 231262807Sdumbbell : "cc"); 232262807Sdumbbell return (res); 233262807Sdumbbell} 234262807Sdumbbell 235262807Sdumbbellstatic __inline int 236262807Sdumbbellatomic_testandset_long(volatile u_long *p, u_int v) 237262807Sdumbbell{ 238262807Sdumbbell u_char res; 239262807Sdumbbell 240262807Sdumbbell __asm __volatile( 241262807Sdumbbell " " MPLOCKED " " 242262807Sdumbbell " btsq %2,%1 ; " 243262807Sdumbbell " setc %0 ; " 244262807Sdumbbell "# atomic_testandset_long" 245262807Sdumbbell : "=q" (res), /* 0 */ 246262807Sdumbbell "+m" (*p) /* 1 */ 247262807Sdumbbell : "Jr" ((u_long)(v & 0x3f)) /* 2 */ 248262807Sdumbbell : "cc"); 249262807Sdumbbell return (res); 250262807Sdumbbell} 251262807Sdumbbell 252237161Skib/* 253237161Skib * We assume that a = b will do atomic loads and stores. Due to the 254237161Skib * IA32 memory model, a simple store guarantees release semantics. 255237161Skib * 256237161Skib * However, loads may pass stores, so for atomic_load_acq we have to 257237161Skib * ensure a Store/Load barrier to do the load in SMP kernels. We use 258237161Skib * "lock cmpxchg" as recommended by the AMD Software Optimization 259237161Skib * Guide, and not mfence. For UP kernels, however, the cache of the 260237161Skib * single processor is always consistent, so we only need to take care 261237161Skib * of the compiler. 262237161Skib */ 263237161Skib#define ATOMIC_STORE(TYPE) \ 264237161Skibstatic __inline void \ 265237161Skibatomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 266237161Skib{ \ 267254169Smarius __compiler_membar(); \ 268237161Skib *p = v; \ 269237161Skib} \ 270237161Skibstruct __hack 271237161Skib 272148267Speter#if defined(_KERNEL) && !defined(SMP) 273148267Speter 274237161Skib#define ATOMIC_LOAD(TYPE, LOP) \ 27567351Sjhbstatic __inline u_##TYPE \ 27667351Sjhbatomic_load_acq_##TYPE(volatile u_##TYPE *p) \ 27767351Sjhb{ \ 278197803Sattilio u_##TYPE tmp; \ 279197803Sattilio \ 280197803Sattilio tmp = *p; \ 281254169Smarius __compiler_membar(); \ 282197803Sattilio return (tmp); \ 283148267Speter} \ 284148267Speterstruct __hack 285148267Speter 286165635Sbde#else /* !(_KERNEL && !SMP) */ 287148267Speter 288237161Skib#define ATOMIC_LOAD(TYPE, LOP) \ 289148267Speterstatic __inline u_##TYPE \ 290148267Speteratomic_load_acq_##TYPE(volatile u_##TYPE *p) \ 291148267Speter{ \ 29271023Sjhb u_##TYPE res; \ 29371023Sjhb \ 294165630Sbde __asm __volatile(MPLOCKED LOP \ 295165635Sbde : "=a" (res), /* 0 */ \ 296150182Sjhb "=m" (*p) /* 1 */ \ 297150182Sjhb : "m" (*p) /* 2 */ \ 298216524Skib : "memory", "cc"); \ 29971023Sjhb \ 30071023Sjhb return (res); \ 30171023Sjhb} \ 302122940Speterstruct __hack 303100327Smarkm 304165635Sbde#endif /* _KERNEL && !SMP */ 305148267Speter 306165578Sbde#endif /* KLD_MODULE || !__GNUCLIKE_ASM */ 307100251Smarkm 308100251SmarkmATOMIC_ASM(set, char, "orb %b1,%0", "iq", v); 309100251SmarkmATOMIC_ASM(clear, char, "andb %b1,%0", "iq", ~v); 310100251SmarkmATOMIC_ASM(add, char, "addb %b1,%0", "iq", v); 311100251SmarkmATOMIC_ASM(subtract, char, "subb %b1,%0", "iq", v); 31271085Sjhb 313100251SmarkmATOMIC_ASM(set, short, "orw %w1,%0", "ir", v); 314100251SmarkmATOMIC_ASM(clear, short, "andw %w1,%0", "ir", ~v); 315100251SmarkmATOMIC_ASM(add, short, "addw %w1,%0", "ir", v); 316100251SmarkmATOMIC_ASM(subtract, short, "subw %w1,%0", "ir", v); 31771085Sjhb 318100251SmarkmATOMIC_ASM(set, int, "orl %1,%0", "ir", v); 319100251SmarkmATOMIC_ASM(clear, int, "andl %1,%0", "ir", ~v); 320100251SmarkmATOMIC_ASM(add, int, "addl %1,%0", "ir", v); 321100251SmarkmATOMIC_ASM(subtract, int, "subl %1,%0", "ir", v); 32271085Sjhb 323114349SpeterATOMIC_ASM(set, long, "orq %1,%0", "ir", v); 324114349SpeterATOMIC_ASM(clear, long, "andq %1,%0", "ir", ~v); 325114349SpeterATOMIC_ASM(add, long, "addq %1,%0", "ir", v); 326114349SpeterATOMIC_ASM(subtract, long, "subq %1,%0", "ir", v); 32771085Sjhb 328237161SkibATOMIC_LOAD(char, "cmpxchgb %b0,%1"); 329237161SkibATOMIC_LOAD(short, "cmpxchgw %w0,%1"); 330237161SkibATOMIC_LOAD(int, "cmpxchgl %0,%1"); 331237161SkibATOMIC_LOAD(long, "cmpxchgq %0,%1"); 33271023Sjhb 333237161SkibATOMIC_STORE(char); 334237161SkibATOMIC_STORE(short); 335237161SkibATOMIC_STORE(int); 336237161SkibATOMIC_STORE(long); 337237161Skib 33871085Sjhb#undef ATOMIC_ASM 339237161Skib#undef ATOMIC_LOAD 340237161Skib#undef ATOMIC_STORE 34167351Sjhb 342165635Sbde#ifndef WANT_FUNCTIONS 343147855Sjhb 344147855Sjhb/* Read the current value and store a zero in the destination. */ 345165578Sbde#ifdef __GNUCLIKE_ASM 346147855Sjhb 347147855Sjhbstatic __inline u_int 348147855Sjhbatomic_readandclear_int(volatile u_int *addr) 349147855Sjhb{ 350165635Sbde u_int res; 351147855Sjhb 352165635Sbde res = 0; 353165633Sbde __asm __volatile( 354147855Sjhb " xchgl %1,%0 ; " 355147855Sjhb "# atomic_readandclear_int" 356165635Sbde : "+r" (res), /* 0 */ 357165635Sbde "=m" (*addr) /* 1 */ 358150182Sjhb : "m" (*addr)); 359147855Sjhb 360165635Sbde return (res); 361147855Sjhb} 362147855Sjhb 363147855Sjhbstatic __inline u_long 364147855Sjhbatomic_readandclear_long(volatile u_long *addr) 365147855Sjhb{ 366165635Sbde u_long res; 367147855Sjhb 368165635Sbde res = 0; 369165633Sbde __asm __volatile( 370147855Sjhb " xchgq %1,%0 ; " 371147855Sjhb "# atomic_readandclear_long" 372165635Sbde : "+r" (res), /* 0 */ 373165635Sbde "=m" (*addr) /* 1 */ 374150182Sjhb : "m" (*addr)); 375147855Sjhb 376165635Sbde return (res); 377147855Sjhb} 378147855Sjhb 379262807Sdumbbellstatic __inline u_int 380262807Sdumbbellatomic_swap_int(volatile u_int *p, u_int v) 381262807Sdumbbell{ 382262807Sdumbbell 383262807Sdumbbell __asm __volatile( 384262807Sdumbbell " xchgl %1,%0 ; " 385262807Sdumbbell "# atomic_swap_int" 386262807Sdumbbell : "+r" (v), /* 0 */ 387262807Sdumbbell "+m" (*p)); /* 1 */ 388262807Sdumbbell return (v); 389262807Sdumbbell} 390262807Sdumbbell 391262807Sdumbbellstatic __inline u_long 392262807Sdumbbellatomic_swap_long(volatile u_long *p, u_long v) 393262807Sdumbbell{ 394262807Sdumbbell 395262807Sdumbbell __asm __volatile( 396262807Sdumbbell " xchgq %1,%0 ; " 397262807Sdumbbell "# atomic_swap_long" 398262807Sdumbbell : "+r" (v), /* 0 */ 399262807Sdumbbell "+m" (*p)); /* 1 */ 400262807Sdumbbell return (v); 401262807Sdumbbell} 402262807Sdumbbell 403165578Sbde#else /* !__GNUCLIKE_ASM */ 404147855Sjhb 405165635Sbdeu_int atomic_readandclear_int(volatile u_int *addr); 406165635Sbdeu_long atomic_readandclear_long(volatile u_long *addr); 407262807Sdumbbellu_int atomic_swap_int(volatile u_int *p, u_int v); 408262807Sdumbbellu_long atomic_swap_long(volatile u_long *p, u_long v); 409147855Sjhb 410165578Sbde#endif /* __GNUCLIKE_ASM */ 411147855Sjhb 412197803Sattilio#define atomic_set_acq_char atomic_set_barr_char 413197803Sattilio#define atomic_set_rel_char atomic_set_barr_char 414197803Sattilio#define atomic_clear_acq_char atomic_clear_barr_char 415197803Sattilio#define atomic_clear_rel_char atomic_clear_barr_char 416197803Sattilio#define atomic_add_acq_char atomic_add_barr_char 417197803Sattilio#define atomic_add_rel_char atomic_add_barr_char 418197803Sattilio#define atomic_subtract_acq_char atomic_subtract_barr_char 419197803Sattilio#define atomic_subtract_rel_char atomic_subtract_barr_char 42071085Sjhb 421197803Sattilio#define atomic_set_acq_short atomic_set_barr_short 422197803Sattilio#define atomic_set_rel_short atomic_set_barr_short 423197803Sattilio#define atomic_clear_acq_short atomic_clear_barr_short 424197803Sattilio#define atomic_clear_rel_short atomic_clear_barr_short 425197803Sattilio#define atomic_add_acq_short atomic_add_barr_short 426197803Sattilio#define atomic_add_rel_short atomic_add_barr_short 427197803Sattilio#define atomic_subtract_acq_short atomic_subtract_barr_short 428197803Sattilio#define atomic_subtract_rel_short atomic_subtract_barr_short 42971085Sjhb 430197803Sattilio#define atomic_set_acq_int atomic_set_barr_int 431197803Sattilio#define atomic_set_rel_int atomic_set_barr_int 432197803Sattilio#define atomic_clear_acq_int atomic_clear_barr_int 433197803Sattilio#define atomic_clear_rel_int atomic_clear_barr_int 434197803Sattilio#define atomic_add_acq_int atomic_add_barr_int 435197803Sattilio#define atomic_add_rel_int atomic_add_barr_int 436197803Sattilio#define atomic_subtract_acq_int atomic_subtract_barr_int 437197803Sattilio#define atomic_subtract_rel_int atomic_subtract_barr_int 438197910Sattilio#define atomic_cmpset_acq_int atomic_cmpset_int 439197910Sattilio#define atomic_cmpset_rel_int atomic_cmpset_int 44071085Sjhb 441197803Sattilio#define atomic_set_acq_long atomic_set_barr_long 442197803Sattilio#define atomic_set_rel_long atomic_set_barr_long 443197803Sattilio#define atomic_clear_acq_long atomic_clear_barr_long 444197803Sattilio#define atomic_clear_rel_long atomic_clear_barr_long 445197803Sattilio#define atomic_add_acq_long atomic_add_barr_long 446197803Sattilio#define atomic_add_rel_long atomic_add_barr_long 447197803Sattilio#define atomic_subtract_acq_long atomic_subtract_barr_long 448197803Sattilio#define atomic_subtract_rel_long atomic_subtract_barr_long 449197910Sattilio#define atomic_cmpset_acq_long atomic_cmpset_long 450197910Sattilio#define atomic_cmpset_rel_long atomic_cmpset_long 45171085Sjhb 452147855Sjhb/* Operations on 8-bit bytes. */ 45371085Sjhb#define atomic_set_8 atomic_set_char 45471085Sjhb#define atomic_set_acq_8 atomic_set_acq_char 45571085Sjhb#define atomic_set_rel_8 atomic_set_rel_char 45671085Sjhb#define atomic_clear_8 atomic_clear_char 45771085Sjhb#define atomic_clear_acq_8 atomic_clear_acq_char 45871085Sjhb#define atomic_clear_rel_8 atomic_clear_rel_char 45971085Sjhb#define atomic_add_8 atomic_add_char 46071085Sjhb#define atomic_add_acq_8 atomic_add_acq_char 46171085Sjhb#define atomic_add_rel_8 atomic_add_rel_char 46271085Sjhb#define atomic_subtract_8 atomic_subtract_char 46371085Sjhb#define atomic_subtract_acq_8 atomic_subtract_acq_char 46471085Sjhb#define atomic_subtract_rel_8 atomic_subtract_rel_char 46571085Sjhb#define atomic_load_acq_8 atomic_load_acq_char 46671085Sjhb#define atomic_store_rel_8 atomic_store_rel_char 46771085Sjhb 468147855Sjhb/* Operations on 16-bit words. */ 46971085Sjhb#define atomic_set_16 atomic_set_short 47071085Sjhb#define atomic_set_acq_16 atomic_set_acq_short 47171085Sjhb#define atomic_set_rel_16 atomic_set_rel_short 47271085Sjhb#define atomic_clear_16 atomic_clear_short 47371085Sjhb#define atomic_clear_acq_16 atomic_clear_acq_short 47471085Sjhb#define atomic_clear_rel_16 atomic_clear_rel_short 47571085Sjhb#define atomic_add_16 atomic_add_short 47671085Sjhb#define atomic_add_acq_16 atomic_add_acq_short 47771085Sjhb#define atomic_add_rel_16 atomic_add_rel_short 47871085Sjhb#define atomic_subtract_16 atomic_subtract_short 47971085Sjhb#define atomic_subtract_acq_16 atomic_subtract_acq_short 48071085Sjhb#define atomic_subtract_rel_16 atomic_subtract_rel_short 48171085Sjhb#define atomic_load_acq_16 atomic_load_acq_short 48271085Sjhb#define atomic_store_rel_16 atomic_store_rel_short 48371085Sjhb 484147855Sjhb/* Operations on 32-bit double words. */ 48571085Sjhb#define atomic_set_32 atomic_set_int 48671085Sjhb#define atomic_set_acq_32 atomic_set_acq_int 48771085Sjhb#define atomic_set_rel_32 atomic_set_rel_int 48871085Sjhb#define atomic_clear_32 atomic_clear_int 48971085Sjhb#define atomic_clear_acq_32 atomic_clear_acq_int 49071085Sjhb#define atomic_clear_rel_32 atomic_clear_rel_int 49171085Sjhb#define atomic_add_32 atomic_add_int 49271085Sjhb#define atomic_add_acq_32 atomic_add_acq_int 49371085Sjhb#define atomic_add_rel_32 atomic_add_rel_int 49471085Sjhb#define atomic_subtract_32 atomic_subtract_int 49571085Sjhb#define atomic_subtract_acq_32 atomic_subtract_acq_int 49671085Sjhb#define atomic_subtract_rel_32 atomic_subtract_rel_int 49771085Sjhb#define atomic_load_acq_32 atomic_load_acq_int 49871085Sjhb#define atomic_store_rel_32 atomic_store_rel_int 49971085Sjhb#define atomic_cmpset_32 atomic_cmpset_int 50071085Sjhb#define atomic_cmpset_acq_32 atomic_cmpset_acq_int 50171085Sjhb#define atomic_cmpset_rel_32 atomic_cmpset_rel_int 502262807Sdumbbell#define atomic_swap_32 atomic_swap_int 50371085Sjhb#define atomic_readandclear_32 atomic_readandclear_int 504150627Sjhb#define atomic_fetchadd_32 atomic_fetchadd_int 505262807Sdumbbell#define atomic_testandset_32 atomic_testandset_int 50671085Sjhb 507149233Sjhb/* Operations on 64-bit quad words. */ 508149233Sjhb#define atomic_set_64 atomic_set_long 509149233Sjhb#define atomic_set_acq_64 atomic_set_acq_long 510149233Sjhb#define atomic_set_rel_64 atomic_set_rel_long 511149233Sjhb#define atomic_clear_64 atomic_clear_long 512149233Sjhb#define atomic_clear_acq_64 atomic_clear_acq_long 513149233Sjhb#define atomic_clear_rel_64 atomic_clear_rel_long 514149233Sjhb#define atomic_add_64 atomic_add_long 515149233Sjhb#define atomic_add_acq_64 atomic_add_acq_long 516149233Sjhb#define atomic_add_rel_64 atomic_add_rel_long 517149233Sjhb#define atomic_subtract_64 atomic_subtract_long 518149233Sjhb#define atomic_subtract_acq_64 atomic_subtract_acq_long 519149233Sjhb#define atomic_subtract_rel_64 atomic_subtract_rel_long 520149233Sjhb#define atomic_load_acq_64 atomic_load_acq_long 521149233Sjhb#define atomic_store_rel_64 atomic_store_rel_long 522149233Sjhb#define atomic_cmpset_64 atomic_cmpset_long 523149233Sjhb#define atomic_cmpset_acq_64 atomic_cmpset_acq_long 524149233Sjhb#define atomic_cmpset_rel_64 atomic_cmpset_rel_long 525262807Sdumbbell#define atomic_swap_64 atomic_swap_long 526149233Sjhb#define atomic_readandclear_64 atomic_readandclear_long 527262807Sdumbbell#define atomic_testandset_64 atomic_testandset_long 528149233Sjhb 529147855Sjhb/* Operations on pointers. */ 530148067Sjhb#define atomic_set_ptr atomic_set_long 531148067Sjhb#define atomic_set_acq_ptr atomic_set_acq_long 532148067Sjhb#define atomic_set_rel_ptr atomic_set_rel_long 533148067Sjhb#define atomic_clear_ptr atomic_clear_long 534148067Sjhb#define atomic_clear_acq_ptr atomic_clear_acq_long 535148067Sjhb#define atomic_clear_rel_ptr atomic_clear_rel_long 536148067Sjhb#define atomic_add_ptr atomic_add_long 537148067Sjhb#define atomic_add_acq_ptr atomic_add_acq_long 538148067Sjhb#define atomic_add_rel_ptr atomic_add_rel_long 539148067Sjhb#define atomic_subtract_ptr atomic_subtract_long 540148067Sjhb#define atomic_subtract_acq_ptr atomic_subtract_acq_long 541148067Sjhb#define atomic_subtract_rel_ptr atomic_subtract_rel_long 542148067Sjhb#define atomic_load_acq_ptr atomic_load_acq_long 543148067Sjhb#define atomic_store_rel_ptr atomic_store_rel_long 544148067Sjhb#define atomic_cmpset_ptr atomic_cmpset_long 545148067Sjhb#define atomic_cmpset_acq_ptr atomic_cmpset_acq_long 546148067Sjhb#define atomic_cmpset_rel_ptr atomic_cmpset_rel_long 547262807Sdumbbell#define atomic_swap_ptr atomic_swap_long 548148067Sjhb#define atomic_readandclear_ptr atomic_readandclear_long 54965514Sphk 550165635Sbde#endif /* !WANT_FUNCTIONS */ 551165633Sbde 552165633Sbde#endif /* !_MACHINE_ATOMIC_H_ */ 553