atomic.h revision 216524
138517Sdfr/*- 238517Sdfr * Copyright (c) 1998 Doug Rabson 338517Sdfr * All rights reserved. 438517Sdfr * 538517Sdfr * Redistribution and use in source and binary forms, with or without 638517Sdfr * modification, are permitted provided that the following conditions 738517Sdfr * are met: 838517Sdfr * 1. Redistributions of source code must retain the above copyright 938517Sdfr * notice, this list of conditions and the following disclaimer. 1038517Sdfr * 2. Redistributions in binary form must reproduce the above copyright 1138517Sdfr * notice, this list of conditions and the following disclaimer in the 1238517Sdfr * documentation and/or other materials provided with the distribution. 1338517Sdfr * 1438517Sdfr * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 1538517Sdfr * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 1638517Sdfr * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 1738517Sdfr * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 1838517Sdfr * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 1938517Sdfr * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 2038517Sdfr * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 2138517Sdfr * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 2238517Sdfr * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 2338517Sdfr * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 2438517Sdfr * SUCH DAMAGE. 2538517Sdfr * 2650477Speter * $FreeBSD: head/sys/i386/include/atomic.h 216524 2010-12-18 16:41:11Z kib $ 2738517Sdfr */ 2838517Sdfr#ifndef _MACHINE_ATOMIC_H_ 29147855Sjhb#define _MACHINE_ATOMIC_H_ 3038517Sdfr 31143063Sjoerg#ifndef _SYS_CDEFS_H_ 32143063Sjoerg#error this file needs sys/cdefs.h as a prerequisite 33143063Sjoerg#endif 34143063Sjoerg 35185720Skib#define mb() __asm __volatile("lock; addl $0,(%%esp)" : : : "memory") 36185720Skib#define wmb() __asm __volatile("lock; addl $0,(%%esp)" : : : "memory") 37185720Skib#define rmb() __asm __volatile("lock; addl $0,(%%esp)" : : : "memory") 38185162Skmacy 3938517Sdfr/* 40165635Sbde * Various simple operations on memory, each of which is atomic in the 41165635Sbde * presence of interrupts and multiple processors. 4238517Sdfr * 43165633Sbde * atomic_set_char(P, V) (*(u_char *)(P) |= (V)) 44165633Sbde * atomic_clear_char(P, V) (*(u_char *)(P) &= ~(V)) 45165633Sbde * atomic_add_char(P, V) (*(u_char *)(P) += (V)) 46165633Sbde * atomic_subtract_char(P, V) (*(u_char *)(P) -= (V)) 4748797Salc * 48165633Sbde * atomic_set_short(P, V) (*(u_short *)(P) |= (V)) 49165633Sbde * atomic_clear_short(P, V) (*(u_short *)(P) &= ~(V)) 50165633Sbde * atomic_add_short(P, V) (*(u_short *)(P) += (V)) 51165633Sbde * atomic_subtract_short(P, V) (*(u_short *)(P) -= (V)) 5248797Salc * 53165633Sbde * atomic_set_int(P, V) (*(u_int *)(P) |= (V)) 54165633Sbde * atomic_clear_int(P, V) (*(u_int *)(P) &= ~(V)) 55165633Sbde * atomic_add_int(P, V) (*(u_int *)(P) += (V)) 56165633Sbde * atomic_subtract_int(P, V) (*(u_int *)(P) -= (V)) 57165635Sbde * atomic_readandclear_int(P) (return (*(u_int *)(P)); *(u_int *)(P) = 0;) 5848797Salc * 59165633Sbde * atomic_set_long(P, V) (*(u_long *)(P) |= (V)) 60165633Sbde * atomic_clear_long(P, V) (*(u_long *)(P) &= ~(V)) 61165633Sbde * atomic_add_long(P, V) (*(u_long *)(P) += (V)) 62165633Sbde * atomic_subtract_long(P, V) (*(u_long *)(P) -= (V)) 63165635Sbde * atomic_readandclear_long(P) (return (*(u_long *)(P)); *(u_long *)(P) = 0;) 6438517Sdfr */ 6538517Sdfr 6648797Salc/* 6749999Salc * The above functions are expanded inline in the statically-linked 6849999Salc * kernel. Lock prefixes are generated if an SMP kernel is being 6949999Salc * built. 7049999Salc * 7149999Salc * Kernel modules call real functions which are built into the kernel. 7249999Salc * This allows kernel modules to be portable between UP and SMP systems. 7348797Salc */ 74147855Sjhb#if defined(KLD_MODULE) || !defined(__GNUCLIKE_ASM) 75147855Sjhb#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \ 76197803Sattiliovoid atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v); \ 77197803Sattiliovoid atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v) 7849999Salc 79208332Sphkint atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src); 80165633Sbdeu_int atomic_fetchadd_int(volatile u_int *p, u_int v); 8165514Sphk 8271085Sjhb#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \ 8371085Sjhbu_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p); \ 84100251Smarkmvoid atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) 8571085Sjhb 86147855Sjhb#else /* !KLD_MODULE && __GNUCLIKE_ASM */ 8772358Smarkm 8884679Sjhb/* 89165635Sbde * For userland, always use lock prefixes so that the binaries will run 90165635Sbde * on both SMP and !SMP systems. 9184679Sjhb */ 9284679Sjhb#if defined(SMP) || !defined(_KERNEL) 93165630Sbde#define MPLOCKED "lock ; " 9490515Sbde#else 95147855Sjhb#define MPLOCKED 9690515Sbde#endif 9738517Sdfr 9848797Salc/* 99197803Sattilio * The assembly is volatilized to avoid code chunk removal by the compiler. 100197803Sattilio * GCC aggressively reorders operations and memory clobbering is necessary 101197803Sattilio * in order to avoid that for memory barriers. 10248797Salc */ 103147855Sjhb#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \ 10448797Salcstatic __inline void \ 10549043Salcatomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 10648797Salc{ \ 107165630Sbde __asm __volatile(MPLOCKED OP \ 108165633Sbde : "=m" (*p) \ 109216524Skib : CONS (V), "m" (*p) \ 110216524Skib : "cc"); \ 111122827Sbde} \ 112197803Sattilio \ 113197803Sattiliostatic __inline void \ 114197803Sattilioatomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 115197803Sattilio{ \ 116197803Sattilio __asm __volatile(MPLOCKED OP \ 117197803Sattilio : "=m" (*p) \ 118197803Sattilio : CONS (V), "m" (*p) \ 119216524Skib : "memory", "cc"); \ 120197803Sattilio} \ 121122827Sbdestruct __hack 122100327Smarkm 12365514Sphk/* 12465514Sphk * Atomic compare and set, used by the mutex functions 12565514Sphk * 126208332Sphk * if (*dst == expect) *dst = src (all 32 bit words) 12765514Sphk * 12865514Sphk * Returns 0 on failure, non-zero on success 12965514Sphk */ 13065514Sphk 131165635Sbde#ifdef CPU_DISABLE_CMPXCHG 132100327Smarkm 133197910Sattiliostatic __inline int 134208332Sphkatomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src) 135197910Sattilio{ 136197910Sattilio u_char res; 13765514Sphk 138197910Sattilio __asm __volatile( 139197910Sattilio " pushfl ; " 140197910Sattilio " cli ; " 141197910Sattilio " cmpl %3,%4 ; " 142197910Sattilio " jne 1f ; " 143197910Sattilio " movl %2,%1 ; " 144197910Sattilio "1: " 145197910Sattilio " sete %0 ; " 146197910Sattilio " popfl ; " 147197910Sattilio "# atomic_cmpset_int" 148197910Sattilio : "=q" (res), /* 0 */ 149197910Sattilio "=m" (*dst) /* 1 */ 150197910Sattilio : "r" (src), /* 2 */ 151208332Sphk "r" (expect), /* 3 */ 152197910Sattilio "m" (*dst) /* 4 */ 153197910Sattilio : "memory"); 154197910Sattilio 155197910Sattilio return (res); 156197910Sattilio} 157197910Sattilio 158165635Sbde#else /* !CPU_DISABLE_CMPXCHG */ 159100327Smarkm 160197910Sattiliostatic __inline int 161208332Sphkatomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src) 162197910Sattilio{ 163197910Sattilio u_char res; 16465514Sphk 165197910Sattilio __asm __volatile( 166197910Sattilio " " MPLOCKED " " 167197910Sattilio " cmpxchgl %2,%1 ; " 168197910Sattilio " sete %0 ; " 169197910Sattilio "1: " 170197910Sattilio "# atomic_cmpset_int" 171197910Sattilio : "=a" (res), /* 0 */ 172197910Sattilio "=m" (*dst) /* 1 */ 173197910Sattilio : "r" (src), /* 2 */ 174208332Sphk "a" (expect), /* 3 */ 175197910Sattilio "m" (*dst) /* 4 */ 176216524Skib : "memory", "cc"); 177197910Sattilio 178197910Sattilio return (res); 179197910Sattilio} 180197910Sattilio 181165635Sbde#endif /* CPU_DISABLE_CMPXCHG */ 182100327Smarkm 183150627Sjhb/* 184150627Sjhb * Atomically add the value of v to the integer pointed to by p and return 185150627Sjhb * the previous value of *p. 186150627Sjhb */ 187150627Sjhbstatic __inline u_int 188150627Sjhbatomic_fetchadd_int(volatile u_int *p, u_int v) 189150627Sjhb{ 190150627Sjhb 191165633Sbde __asm __volatile( 192165630Sbde " " MPLOCKED " " 193150627Sjhb " xaddl %0, %1 ; " 194150627Sjhb "# atomic_fetchadd_int" 195150627Sjhb : "+r" (v), /* 0 (result) */ 196150627Sjhb "=m" (*p) /* 1 */ 197216524Skib : "m" (*p) /* 2 */ 198216524Skib : "cc"); 199150627Sjhb return (v); 200150627Sjhb} 201150627Sjhb 202137623Sjhb#if defined(_KERNEL) && !defined(SMP) 203100327Smarkm 20467351Sjhb/* 205137591Sjhb * We assume that a = b will do atomic loads and stores. However, on a 206137591Sjhb * PentiumPro or higher, reads may pass writes, so for that case we have 207137591Sjhb * to use a serializing instruction (i.e. with LOCK) to do the load in 208137591Sjhb * SMP kernels. For UP kernels, however, the cache of the single processor 209197803Sattilio * is always consistent, so we only need to take care of compiler. 21067351Sjhb */ 211147855Sjhb#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \ 21267351Sjhbstatic __inline u_##TYPE \ 21367351Sjhbatomic_load_acq_##TYPE(volatile u_##TYPE *p) \ 21467351Sjhb{ \ 215197803Sattilio u_##TYPE tmp; \ 216197803Sattilio \ 217197803Sattilio tmp = *p; \ 218197803Sattilio __asm __volatile("" : : : "memory"); \ 219197803Sattilio return (tmp); \ 22067351Sjhb} \ 22167351Sjhb \ 22267351Sjhbstatic __inline void \ 22367351Sjhbatomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 22467351Sjhb{ \ 225197803Sattilio __asm __volatile("" : : : "memory"); \ 22667351Sjhb *p = v; \ 227122827Sbde} \ 228122827Sbdestruct __hack 229100327Smarkm 230165635Sbde#else /* !(_KERNEL && !SMP) */ 23167351Sjhb 232147855Sjhb#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \ 23371023Sjhbstatic __inline u_##TYPE \ 23471023Sjhbatomic_load_acq_##TYPE(volatile u_##TYPE *p) \ 23571023Sjhb{ \ 23671023Sjhb u_##TYPE res; \ 23771023Sjhb \ 238165630Sbde __asm __volatile(MPLOCKED LOP \ 239165635Sbde : "=a" (res), /* 0 */ \ 240150182Sjhb "=m" (*p) /* 1 */ \ 241150182Sjhb : "m" (*p) /* 2 */ \ 242216524Skib : "memory", "cc"); \ 24371023Sjhb \ 24471023Sjhb return (res); \ 24571023Sjhb} \ 24671023Sjhb \ 24771023Sjhb/* \ 24871023Sjhb * The XCHG instruction asserts LOCK automagically. \ 24971023Sjhb */ \ 25071023Sjhbstatic __inline void \ 25171023Sjhbatomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 25271023Sjhb{ \ 25371023Sjhb __asm __volatile(SOP \ 254150182Sjhb : "=m" (*p), /* 0 */ \ 25571023Sjhb "+r" (v) /* 1 */ \ 256197803Sattilio : "m" (*p) /* 2 */ \ 257197803Sattilio : "memory"); \ 258122827Sbde} \ 259122827Sbdestruct __hack 260100327Smarkm 261165635Sbde#endif /* _KERNEL && !SMP */ 262100327Smarkm 263147855Sjhb#endif /* KLD_MODULE || !__GNUCLIKE_ASM */ 264100251Smarkm 265100251SmarkmATOMIC_ASM(set, char, "orb %b1,%0", "iq", v); 266100251SmarkmATOMIC_ASM(clear, char, "andb %b1,%0", "iq", ~v); 267100251SmarkmATOMIC_ASM(add, char, "addb %b1,%0", "iq", v); 268100251SmarkmATOMIC_ASM(subtract, char, "subb %b1,%0", "iq", v); 26971085Sjhb 270100251SmarkmATOMIC_ASM(set, short, "orw %w1,%0", "ir", v); 271100251SmarkmATOMIC_ASM(clear, short, "andw %w1,%0", "ir", ~v); 272100251SmarkmATOMIC_ASM(add, short, "addw %w1,%0", "ir", v); 273100251SmarkmATOMIC_ASM(subtract, short, "subw %w1,%0", "ir", v); 27471085Sjhb 275100251SmarkmATOMIC_ASM(set, int, "orl %1,%0", "ir", v); 276100251SmarkmATOMIC_ASM(clear, int, "andl %1,%0", "ir", ~v); 277100251SmarkmATOMIC_ASM(add, int, "addl %1,%0", "ir", v); 278100251SmarkmATOMIC_ASM(subtract, int, "subl %1,%0", "ir", v); 27971085Sjhb 280100251SmarkmATOMIC_ASM(set, long, "orl %1,%0", "ir", v); 281100251SmarkmATOMIC_ASM(clear, long, "andl %1,%0", "ir", ~v); 282100251SmarkmATOMIC_ASM(add, long, "addl %1,%0", "ir", v); 283100251SmarkmATOMIC_ASM(subtract, long, "subl %1,%0", "ir", v); 28471085Sjhb 285100251SmarkmATOMIC_STORE_LOAD(char, "cmpxchgb %b0,%1", "xchgb %b1,%0"); 286100251SmarkmATOMIC_STORE_LOAD(short,"cmpxchgw %w0,%1", "xchgw %w1,%0"); 287100251SmarkmATOMIC_STORE_LOAD(int, "cmpxchgl %0,%1", "xchgl %1,%0"); 288100251SmarkmATOMIC_STORE_LOAD(long, "cmpxchgl %0,%1", "xchgl %1,%0"); 28971023Sjhb 29071085Sjhb#undef ATOMIC_ASM 29167351Sjhb#undef ATOMIC_STORE_LOAD 29267351Sjhb 293165635Sbde#ifndef WANT_FUNCTIONS 294147855Sjhb 295147855Sjhbstatic __inline int 296208332Sphkatomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src) 297147855Sjhb{ 298147855Sjhb 299208332Sphk return (atomic_cmpset_int((volatile u_int *)dst, (u_int)expect, 300147855Sjhb (u_int)src)); 301147855Sjhb} 302147855Sjhb 303177276Spjdstatic __inline u_long 304177276Spjdatomic_fetchadd_long(volatile u_long *p, u_long v) 305177276Spjd{ 306177276Spjd 307177276Spjd return (atomic_fetchadd_int((volatile u_int *)p, (u_int)v)); 308177276Spjd} 309177276Spjd 310147855Sjhb/* Read the current value and store a zero in the destination. */ 311147855Sjhb#ifdef __GNUCLIKE_ASM 312147855Sjhb 313147855Sjhbstatic __inline u_int 314147855Sjhbatomic_readandclear_int(volatile u_int *addr) 315147855Sjhb{ 316165635Sbde u_int res; 317147855Sjhb 318165635Sbde res = 0; 319165633Sbde __asm __volatile( 320147855Sjhb " xchgl %1,%0 ; " 321147855Sjhb "# atomic_readandclear_int" 322165635Sbde : "+r" (res), /* 0 */ 323165635Sbde "=m" (*addr) /* 1 */ 324150182Sjhb : "m" (*addr)); 325147855Sjhb 326165635Sbde return (res); 327147855Sjhb} 328147855Sjhb 329147855Sjhbstatic __inline u_long 330147855Sjhbatomic_readandclear_long(volatile u_long *addr) 331147855Sjhb{ 332165635Sbde u_long res; 333147855Sjhb 334165635Sbde res = 0; 335165633Sbde __asm __volatile( 336147855Sjhb " xchgl %1,%0 ; " 337147855Sjhb "# atomic_readandclear_long" 338165636Sbde : "+r" (res), /* 0 */ 339165635Sbde "=m" (*addr) /* 1 */ 340150182Sjhb : "m" (*addr)); 341147855Sjhb 342165635Sbde return (res); 343147855Sjhb} 344147855Sjhb 345147855Sjhb#else /* !__GNUCLIKE_ASM */ 346147855Sjhb 347165635Sbdeu_int atomic_readandclear_int(volatile u_int *addr); 348165635Sbdeu_long atomic_readandclear_long(volatile u_long *addr); 349147855Sjhb 350147855Sjhb#endif /* __GNUCLIKE_ASM */ 351147855Sjhb 352197803Sattilio#define atomic_set_acq_char atomic_set_barr_char 353197803Sattilio#define atomic_set_rel_char atomic_set_barr_char 354197803Sattilio#define atomic_clear_acq_char atomic_clear_barr_char 355197803Sattilio#define atomic_clear_rel_char atomic_clear_barr_char 356197803Sattilio#define atomic_add_acq_char atomic_add_barr_char 357197803Sattilio#define atomic_add_rel_char atomic_add_barr_char 358197803Sattilio#define atomic_subtract_acq_char atomic_subtract_barr_char 359197803Sattilio#define atomic_subtract_rel_char atomic_subtract_barr_char 36071085Sjhb 361197803Sattilio#define atomic_set_acq_short atomic_set_barr_short 362197803Sattilio#define atomic_set_rel_short atomic_set_barr_short 363197803Sattilio#define atomic_clear_acq_short atomic_clear_barr_short 364197803Sattilio#define atomic_clear_rel_short atomic_clear_barr_short 365197803Sattilio#define atomic_add_acq_short atomic_add_barr_short 366197803Sattilio#define atomic_add_rel_short atomic_add_barr_short 367197803Sattilio#define atomic_subtract_acq_short atomic_subtract_barr_short 368197803Sattilio#define atomic_subtract_rel_short atomic_subtract_barr_short 36971085Sjhb 370197803Sattilio#define atomic_set_acq_int atomic_set_barr_int 371197803Sattilio#define atomic_set_rel_int atomic_set_barr_int 372197803Sattilio#define atomic_clear_acq_int atomic_clear_barr_int 373197803Sattilio#define atomic_clear_rel_int atomic_clear_barr_int 374197803Sattilio#define atomic_add_acq_int atomic_add_barr_int 375197803Sattilio#define atomic_add_rel_int atomic_add_barr_int 376197803Sattilio#define atomic_subtract_acq_int atomic_subtract_barr_int 377197803Sattilio#define atomic_subtract_rel_int atomic_subtract_barr_int 378197910Sattilio#define atomic_cmpset_acq_int atomic_cmpset_int 379197910Sattilio#define atomic_cmpset_rel_int atomic_cmpset_int 38071085Sjhb 381197803Sattilio#define atomic_set_acq_long atomic_set_barr_long 382197803Sattilio#define atomic_set_rel_long atomic_set_barr_long 383197803Sattilio#define atomic_clear_acq_long atomic_clear_barr_long 384197803Sattilio#define atomic_clear_rel_long atomic_clear_barr_long 385197803Sattilio#define atomic_add_acq_long atomic_add_barr_long 386197803Sattilio#define atomic_add_rel_long atomic_add_barr_long 387197803Sattilio#define atomic_subtract_acq_long atomic_subtract_barr_long 388197803Sattilio#define atomic_subtract_rel_long atomic_subtract_barr_long 389197910Sattilio#define atomic_cmpset_acq_long atomic_cmpset_long 390197910Sattilio#define atomic_cmpset_rel_long atomic_cmpset_long 39171085Sjhb 392147855Sjhb/* Operations on 8-bit bytes. */ 39371085Sjhb#define atomic_set_8 atomic_set_char 39471085Sjhb#define atomic_set_acq_8 atomic_set_acq_char 39571085Sjhb#define atomic_set_rel_8 atomic_set_rel_char 39671085Sjhb#define atomic_clear_8 atomic_clear_char 39771085Sjhb#define atomic_clear_acq_8 atomic_clear_acq_char 39871085Sjhb#define atomic_clear_rel_8 atomic_clear_rel_char 39971085Sjhb#define atomic_add_8 atomic_add_char 40071085Sjhb#define atomic_add_acq_8 atomic_add_acq_char 40171085Sjhb#define atomic_add_rel_8 atomic_add_rel_char 40271085Sjhb#define atomic_subtract_8 atomic_subtract_char 40371085Sjhb#define atomic_subtract_acq_8 atomic_subtract_acq_char 40471085Sjhb#define atomic_subtract_rel_8 atomic_subtract_rel_char 40571085Sjhb#define atomic_load_acq_8 atomic_load_acq_char 40671085Sjhb#define atomic_store_rel_8 atomic_store_rel_char 40771085Sjhb 408147855Sjhb/* Operations on 16-bit words. */ 40971085Sjhb#define atomic_set_16 atomic_set_short 41071085Sjhb#define atomic_set_acq_16 atomic_set_acq_short 41171085Sjhb#define atomic_set_rel_16 atomic_set_rel_short 41271085Sjhb#define atomic_clear_16 atomic_clear_short 41371085Sjhb#define atomic_clear_acq_16 atomic_clear_acq_short 41471085Sjhb#define atomic_clear_rel_16 atomic_clear_rel_short 41571085Sjhb#define atomic_add_16 atomic_add_short 41671085Sjhb#define atomic_add_acq_16 atomic_add_acq_short 41771085Sjhb#define atomic_add_rel_16 atomic_add_rel_short 41871085Sjhb#define atomic_subtract_16 atomic_subtract_short 41971085Sjhb#define atomic_subtract_acq_16 atomic_subtract_acq_short 42071085Sjhb#define atomic_subtract_rel_16 atomic_subtract_rel_short 42171085Sjhb#define atomic_load_acq_16 atomic_load_acq_short 42271085Sjhb#define atomic_store_rel_16 atomic_store_rel_short 42371085Sjhb 424147855Sjhb/* Operations on 32-bit double words. */ 42571085Sjhb#define atomic_set_32 atomic_set_int 42671085Sjhb#define atomic_set_acq_32 atomic_set_acq_int 42771085Sjhb#define atomic_set_rel_32 atomic_set_rel_int 42871085Sjhb#define atomic_clear_32 atomic_clear_int 42971085Sjhb#define atomic_clear_acq_32 atomic_clear_acq_int 43071085Sjhb#define atomic_clear_rel_32 atomic_clear_rel_int 43171085Sjhb#define atomic_add_32 atomic_add_int 43271085Sjhb#define atomic_add_acq_32 atomic_add_acq_int 43371085Sjhb#define atomic_add_rel_32 atomic_add_rel_int 43471085Sjhb#define atomic_subtract_32 atomic_subtract_int 43571085Sjhb#define atomic_subtract_acq_32 atomic_subtract_acq_int 43671085Sjhb#define atomic_subtract_rel_32 atomic_subtract_rel_int 43771085Sjhb#define atomic_load_acq_32 atomic_load_acq_int 43871085Sjhb#define atomic_store_rel_32 atomic_store_rel_int 43971085Sjhb#define atomic_cmpset_32 atomic_cmpset_int 44071085Sjhb#define atomic_cmpset_acq_32 atomic_cmpset_acq_int 44171085Sjhb#define atomic_cmpset_rel_32 atomic_cmpset_rel_int 44271085Sjhb#define atomic_readandclear_32 atomic_readandclear_int 443150627Sjhb#define atomic_fetchadd_32 atomic_fetchadd_int 44471085Sjhb 445147855Sjhb/* Operations on pointers. */ 446157212Sdes#define atomic_set_ptr(p, v) \ 447157212Sdes atomic_set_int((volatile u_int *)(p), (u_int)(v)) 448157212Sdes#define atomic_set_acq_ptr(p, v) \ 449157212Sdes atomic_set_acq_int((volatile u_int *)(p), (u_int)(v)) 450157212Sdes#define atomic_set_rel_ptr(p, v) \ 451157212Sdes atomic_set_rel_int((volatile u_int *)(p), (u_int)(v)) 452157212Sdes#define atomic_clear_ptr(p, v) \ 453157212Sdes atomic_clear_int((volatile u_int *)(p), (u_int)(v)) 454157212Sdes#define atomic_clear_acq_ptr(p, v) \ 455157212Sdes atomic_clear_acq_int((volatile u_int *)(p), (u_int)(v)) 456157212Sdes#define atomic_clear_rel_ptr(p, v) \ 457157212Sdes atomic_clear_rel_int((volatile u_int *)(p), (u_int)(v)) 458157212Sdes#define atomic_add_ptr(p, v) \ 459157212Sdes atomic_add_int((volatile u_int *)(p), (u_int)(v)) 460157212Sdes#define atomic_add_acq_ptr(p, v) \ 461157212Sdes atomic_add_acq_int((volatile u_int *)(p), (u_int)(v)) 462157212Sdes#define atomic_add_rel_ptr(p, v) \ 463157212Sdes atomic_add_rel_int((volatile u_int *)(p), (u_int)(v)) 464157212Sdes#define atomic_subtract_ptr(p, v) \ 465157212Sdes atomic_subtract_int((volatile u_int *)(p), (u_int)(v)) 466157212Sdes#define atomic_subtract_acq_ptr(p, v) \ 467157212Sdes atomic_subtract_acq_int((volatile u_int *)(p), (u_int)(v)) 468157212Sdes#define atomic_subtract_rel_ptr(p, v) \ 469157212Sdes atomic_subtract_rel_int((volatile u_int *)(p), (u_int)(v)) 470157212Sdes#define atomic_load_acq_ptr(p) \ 471157212Sdes atomic_load_acq_int((volatile u_int *)(p)) 472157212Sdes#define atomic_store_rel_ptr(p, v) \ 473157212Sdes atomic_store_rel_int((volatile u_int *)(p), (v)) 474157212Sdes#define atomic_cmpset_ptr(dst, old, new) \ 475157212Sdes atomic_cmpset_int((volatile u_int *)(dst), (u_int)(old), (u_int)(new)) 476157212Sdes#define atomic_cmpset_acq_ptr(dst, old, new) \ 477165633Sbde atomic_cmpset_acq_int((volatile u_int *)(dst), (u_int)(old), \ 478165633Sbde (u_int)(new)) 479157212Sdes#define atomic_cmpset_rel_ptr(dst, old, new) \ 480165633Sbde atomic_cmpset_rel_int((volatile u_int *)(dst), (u_int)(old), \ 481165633Sbde (u_int)(new)) 482157212Sdes#define atomic_readandclear_ptr(p) \ 483157212Sdes atomic_readandclear_int((volatile u_int *)(p)) 48465514Sphk 485165635Sbde#endif /* !WANT_FUNCTIONS */ 486165633Sbde 487165633Sbde#endif /* !_MACHINE_ATOMIC_H_ */ 488