atomic.h revision 208332
138517Sdfr/*- 238517Sdfr * Copyright (c) 1998 Doug Rabson 338517Sdfr * All rights reserved. 438517Sdfr * 538517Sdfr * Redistribution and use in source and binary forms, with or without 638517Sdfr * modification, are permitted provided that the following conditions 738517Sdfr * are met: 838517Sdfr * 1. Redistributions of source code must retain the above copyright 938517Sdfr * notice, this list of conditions and the following disclaimer. 1038517Sdfr * 2. Redistributions in binary form must reproduce the above copyright 1138517Sdfr * notice, this list of conditions and the following disclaimer in the 1238517Sdfr * documentation and/or other materials provided with the distribution. 1338517Sdfr * 1438517Sdfr * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 1538517Sdfr * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 1638517Sdfr * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 1738517Sdfr * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 1838517Sdfr * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 1938517Sdfr * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 2038517Sdfr * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 2138517Sdfr * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 2238517Sdfr * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 2338517Sdfr * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 2438517Sdfr * SUCH DAMAGE. 2538517Sdfr * 2650477Speter * $FreeBSD: head/sys/i386/include/atomic.h 208332 2010-05-20 06:18:03Z phk $ 2738517Sdfr */ 2838517Sdfr#ifndef _MACHINE_ATOMIC_H_ 29147855Sjhb#define _MACHINE_ATOMIC_H_ 3038517Sdfr 31143063Sjoerg#ifndef _SYS_CDEFS_H_ 32143063Sjoerg#error this file needs sys/cdefs.h as a prerequisite 33143063Sjoerg#endif 34143063Sjoerg 35185720Skib#define mb() __asm __volatile("lock; addl $0,(%%esp)" : : : "memory") 36185720Skib#define wmb() __asm __volatile("lock; addl $0,(%%esp)" : : : "memory") 37185720Skib#define rmb() __asm __volatile("lock; addl $0,(%%esp)" : : : "memory") 38185162Skmacy 3938517Sdfr/* 40165635Sbde * Various simple operations on memory, each of which is atomic in the 41165635Sbde * presence of interrupts and multiple processors. 4238517Sdfr * 43165633Sbde * atomic_set_char(P, V) (*(u_char *)(P) |= (V)) 44165633Sbde * atomic_clear_char(P, V) (*(u_char *)(P) &= ~(V)) 45165633Sbde * atomic_add_char(P, V) (*(u_char *)(P) += (V)) 46165633Sbde * atomic_subtract_char(P, V) (*(u_char *)(P) -= (V)) 4748797Salc * 48165633Sbde * atomic_set_short(P, V) (*(u_short *)(P) |= (V)) 49165633Sbde * atomic_clear_short(P, V) (*(u_short *)(P) &= ~(V)) 50165633Sbde * atomic_add_short(P, V) (*(u_short *)(P) += (V)) 51165633Sbde * atomic_subtract_short(P, V) (*(u_short *)(P) -= (V)) 5248797Salc * 53165633Sbde * atomic_set_int(P, V) (*(u_int *)(P) |= (V)) 54165633Sbde * atomic_clear_int(P, V) (*(u_int *)(P) &= ~(V)) 55165633Sbde * atomic_add_int(P, V) (*(u_int *)(P) += (V)) 56165633Sbde * atomic_subtract_int(P, V) (*(u_int *)(P) -= (V)) 57165635Sbde * atomic_readandclear_int(P) (return (*(u_int *)(P)); *(u_int *)(P) = 0;) 5848797Salc * 59165633Sbde * atomic_set_long(P, V) (*(u_long *)(P) |= (V)) 60165633Sbde * atomic_clear_long(P, V) (*(u_long *)(P) &= ~(V)) 61165633Sbde * atomic_add_long(P, V) (*(u_long *)(P) += (V)) 62165633Sbde * atomic_subtract_long(P, V) (*(u_long *)(P) -= (V)) 63165635Sbde * atomic_readandclear_long(P) (return (*(u_long *)(P)); *(u_long *)(P) = 0;) 6438517Sdfr */ 6538517Sdfr 6648797Salc/* 6749999Salc * The above functions are expanded inline in the statically-linked 6849999Salc * kernel. Lock prefixes are generated if an SMP kernel is being 6949999Salc * built. 7049999Salc * 7149999Salc * Kernel modules call real functions which are built into the kernel. 7249999Salc * This allows kernel modules to be portable between UP and SMP systems. 7348797Salc */ 74147855Sjhb#if defined(KLD_MODULE) || !defined(__GNUCLIKE_ASM) 75147855Sjhb#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \ 76197803Sattiliovoid atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v); \ 77197803Sattiliovoid atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v) 7849999Salc 79208332Sphkint atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src); 80165633Sbdeu_int atomic_fetchadd_int(volatile u_int *p, u_int v); 8165514Sphk 8271085Sjhb#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \ 8371085Sjhbu_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p); \ 84100251Smarkmvoid atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) 8571085Sjhb 86147855Sjhb#else /* !KLD_MODULE && __GNUCLIKE_ASM */ 8772358Smarkm 8884679Sjhb/* 89165635Sbde * For userland, always use lock prefixes so that the binaries will run 90165635Sbde * on both SMP and !SMP systems. 9184679Sjhb */ 9284679Sjhb#if defined(SMP) || !defined(_KERNEL) 93165630Sbde#define MPLOCKED "lock ; " 9490515Sbde#else 95147855Sjhb#define MPLOCKED 9690515Sbde#endif 9738517Sdfr 9848797Salc/* 99197803Sattilio * The assembly is volatilized to avoid code chunk removal by the compiler. 100197803Sattilio * GCC aggressively reorders operations and memory clobbering is necessary 101197803Sattilio * in order to avoid that for memory barriers. 10248797Salc */ 103147855Sjhb#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \ 10448797Salcstatic __inline void \ 10549043Salcatomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 10648797Salc{ \ 107165630Sbde __asm __volatile(MPLOCKED OP \ 108165633Sbde : "=m" (*p) \ 109165633Sbde : CONS (V), "m" (*p)); \ 110122827Sbde} \ 111197803Sattilio \ 112197803Sattiliostatic __inline void \ 113197803Sattilioatomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 114197803Sattilio{ \ 115197803Sattilio __asm __volatile(MPLOCKED OP \ 116197803Sattilio : "=m" (*p) \ 117197803Sattilio : CONS (V), "m" (*p) \ 118197803Sattilio : "memory"); \ 119197803Sattilio} \ 120122827Sbdestruct __hack 121100327Smarkm 12265514Sphk/* 12365514Sphk * Atomic compare and set, used by the mutex functions 12465514Sphk * 125208332Sphk * if (*dst == expect) *dst = src (all 32 bit words) 12665514Sphk * 12765514Sphk * Returns 0 on failure, non-zero on success 12865514Sphk */ 12965514Sphk 130165635Sbde#ifdef CPU_DISABLE_CMPXCHG 131100327Smarkm 132197910Sattiliostatic __inline int 133208332Sphkatomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src) 134197910Sattilio{ 135197910Sattilio u_char res; 13665514Sphk 137197910Sattilio __asm __volatile( 138197910Sattilio " pushfl ; " 139197910Sattilio " cli ; " 140197910Sattilio " cmpl %3,%4 ; " 141197910Sattilio " jne 1f ; " 142197910Sattilio " movl %2,%1 ; " 143197910Sattilio "1: " 144197910Sattilio " sete %0 ; " 145197910Sattilio " popfl ; " 146197910Sattilio "# atomic_cmpset_int" 147197910Sattilio : "=q" (res), /* 0 */ 148197910Sattilio "=m" (*dst) /* 1 */ 149197910Sattilio : "r" (src), /* 2 */ 150208332Sphk "r" (expect), /* 3 */ 151197910Sattilio "m" (*dst) /* 4 */ 152197910Sattilio : "memory"); 153197910Sattilio 154197910Sattilio return (res); 155197910Sattilio} 156197910Sattilio 157165635Sbde#else /* !CPU_DISABLE_CMPXCHG */ 158100327Smarkm 159197910Sattiliostatic __inline int 160208332Sphkatomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src) 161197910Sattilio{ 162197910Sattilio u_char res; 16365514Sphk 164197910Sattilio __asm __volatile( 165197910Sattilio " " MPLOCKED " " 166197910Sattilio " cmpxchgl %2,%1 ; " 167197910Sattilio " sete %0 ; " 168197910Sattilio "1: " 169197910Sattilio "# atomic_cmpset_int" 170197910Sattilio : "=a" (res), /* 0 */ 171197910Sattilio "=m" (*dst) /* 1 */ 172197910Sattilio : "r" (src), /* 2 */ 173208332Sphk "a" (expect), /* 3 */ 174197910Sattilio "m" (*dst) /* 4 */ 175197910Sattilio : "memory"); 176197910Sattilio 177197910Sattilio return (res); 178197910Sattilio} 179197910Sattilio 180165635Sbde#endif /* CPU_DISABLE_CMPXCHG */ 181100327Smarkm 182150627Sjhb/* 183150627Sjhb * Atomically add the value of v to the integer pointed to by p and return 184150627Sjhb * the previous value of *p. 185150627Sjhb */ 186150627Sjhbstatic __inline u_int 187150627Sjhbatomic_fetchadd_int(volatile u_int *p, u_int v) 188150627Sjhb{ 189150627Sjhb 190165633Sbde __asm __volatile( 191165630Sbde " " MPLOCKED " " 192150627Sjhb " xaddl %0, %1 ; " 193150627Sjhb "# atomic_fetchadd_int" 194150627Sjhb : "+r" (v), /* 0 (result) */ 195150627Sjhb "=m" (*p) /* 1 */ 196150627Sjhb : "m" (*p)); /* 2 */ 197150627Sjhb 198150627Sjhb return (v); 199150627Sjhb} 200150627Sjhb 201137623Sjhb#if defined(_KERNEL) && !defined(SMP) 202100327Smarkm 20367351Sjhb/* 204137591Sjhb * We assume that a = b will do atomic loads and stores. However, on a 205137591Sjhb * PentiumPro or higher, reads may pass writes, so for that case we have 206137591Sjhb * to use a serializing instruction (i.e. with LOCK) to do the load in 207137591Sjhb * SMP kernels. For UP kernels, however, the cache of the single processor 208197803Sattilio * is always consistent, so we only need to take care of compiler. 20967351Sjhb */ 210147855Sjhb#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \ 21167351Sjhbstatic __inline u_##TYPE \ 21267351Sjhbatomic_load_acq_##TYPE(volatile u_##TYPE *p) \ 21367351Sjhb{ \ 214197803Sattilio u_##TYPE tmp; \ 215197803Sattilio \ 216197803Sattilio tmp = *p; \ 217197803Sattilio __asm __volatile("" : : : "memory"); \ 218197803Sattilio return (tmp); \ 21967351Sjhb} \ 22067351Sjhb \ 22167351Sjhbstatic __inline void \ 22267351Sjhbatomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 22367351Sjhb{ \ 224197803Sattilio __asm __volatile("" : : : "memory"); \ 22567351Sjhb *p = v; \ 226122827Sbde} \ 227122827Sbdestruct __hack 228100327Smarkm 229165635Sbde#else /* !(_KERNEL && !SMP) */ 23067351Sjhb 231147855Sjhb#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \ 23271023Sjhbstatic __inline u_##TYPE \ 23371023Sjhbatomic_load_acq_##TYPE(volatile u_##TYPE *p) \ 23471023Sjhb{ \ 23571023Sjhb u_##TYPE res; \ 23671023Sjhb \ 237165630Sbde __asm __volatile(MPLOCKED LOP \ 238165635Sbde : "=a" (res), /* 0 */ \ 239150182Sjhb "=m" (*p) /* 1 */ \ 240150182Sjhb : "m" (*p) /* 2 */ \ 241150182Sjhb : "memory"); \ 24271023Sjhb \ 24371023Sjhb return (res); \ 24471023Sjhb} \ 24571023Sjhb \ 24671023Sjhb/* \ 24771023Sjhb * The XCHG instruction asserts LOCK automagically. \ 24871023Sjhb */ \ 24971023Sjhbstatic __inline void \ 25071023Sjhbatomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 25171023Sjhb{ \ 25271023Sjhb __asm __volatile(SOP \ 253150182Sjhb : "=m" (*p), /* 0 */ \ 25471023Sjhb "+r" (v) /* 1 */ \ 255197803Sattilio : "m" (*p) /* 2 */ \ 256197803Sattilio : "memory"); \ 257122827Sbde} \ 258122827Sbdestruct __hack 259100327Smarkm 260165635Sbde#endif /* _KERNEL && !SMP */ 261100327Smarkm 262147855Sjhb#endif /* KLD_MODULE || !__GNUCLIKE_ASM */ 263100251Smarkm 264100251SmarkmATOMIC_ASM(set, char, "orb %b1,%0", "iq", v); 265100251SmarkmATOMIC_ASM(clear, char, "andb %b1,%0", "iq", ~v); 266100251SmarkmATOMIC_ASM(add, char, "addb %b1,%0", "iq", v); 267100251SmarkmATOMIC_ASM(subtract, char, "subb %b1,%0", "iq", v); 26871085Sjhb 269100251SmarkmATOMIC_ASM(set, short, "orw %w1,%0", "ir", v); 270100251SmarkmATOMIC_ASM(clear, short, "andw %w1,%0", "ir", ~v); 271100251SmarkmATOMIC_ASM(add, short, "addw %w1,%0", "ir", v); 272100251SmarkmATOMIC_ASM(subtract, short, "subw %w1,%0", "ir", v); 27371085Sjhb 274100251SmarkmATOMIC_ASM(set, int, "orl %1,%0", "ir", v); 275100251SmarkmATOMIC_ASM(clear, int, "andl %1,%0", "ir", ~v); 276100251SmarkmATOMIC_ASM(add, int, "addl %1,%0", "ir", v); 277100251SmarkmATOMIC_ASM(subtract, int, "subl %1,%0", "ir", v); 27871085Sjhb 279100251SmarkmATOMIC_ASM(set, long, "orl %1,%0", "ir", v); 280100251SmarkmATOMIC_ASM(clear, long, "andl %1,%0", "ir", ~v); 281100251SmarkmATOMIC_ASM(add, long, "addl %1,%0", "ir", v); 282100251SmarkmATOMIC_ASM(subtract, long, "subl %1,%0", "ir", v); 28371085Sjhb 284100251SmarkmATOMIC_STORE_LOAD(char, "cmpxchgb %b0,%1", "xchgb %b1,%0"); 285100251SmarkmATOMIC_STORE_LOAD(short,"cmpxchgw %w0,%1", "xchgw %w1,%0"); 286100251SmarkmATOMIC_STORE_LOAD(int, "cmpxchgl %0,%1", "xchgl %1,%0"); 287100251SmarkmATOMIC_STORE_LOAD(long, "cmpxchgl %0,%1", "xchgl %1,%0"); 28871023Sjhb 28971085Sjhb#undef ATOMIC_ASM 29067351Sjhb#undef ATOMIC_STORE_LOAD 29167351Sjhb 292165635Sbde#ifndef WANT_FUNCTIONS 293147855Sjhb 294147855Sjhbstatic __inline int 295208332Sphkatomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src) 296147855Sjhb{ 297147855Sjhb 298208332Sphk return (atomic_cmpset_int((volatile u_int *)dst, (u_int)expect, 299147855Sjhb (u_int)src)); 300147855Sjhb} 301147855Sjhb 302177276Spjdstatic __inline u_long 303177276Spjdatomic_fetchadd_long(volatile u_long *p, u_long v) 304177276Spjd{ 305177276Spjd 306177276Spjd return (atomic_fetchadd_int((volatile u_int *)p, (u_int)v)); 307177276Spjd} 308177276Spjd 309147855Sjhb/* Read the current value and store a zero in the destination. */ 310147855Sjhb#ifdef __GNUCLIKE_ASM 311147855Sjhb 312147855Sjhbstatic __inline u_int 313147855Sjhbatomic_readandclear_int(volatile u_int *addr) 314147855Sjhb{ 315165635Sbde u_int res; 316147855Sjhb 317165635Sbde res = 0; 318165633Sbde __asm __volatile( 319147855Sjhb " xchgl %1,%0 ; " 320147855Sjhb "# atomic_readandclear_int" 321165635Sbde : "+r" (res), /* 0 */ 322165635Sbde "=m" (*addr) /* 1 */ 323150182Sjhb : "m" (*addr)); 324147855Sjhb 325165635Sbde return (res); 326147855Sjhb} 327147855Sjhb 328147855Sjhbstatic __inline u_long 329147855Sjhbatomic_readandclear_long(volatile u_long *addr) 330147855Sjhb{ 331165635Sbde u_long res; 332147855Sjhb 333165635Sbde res = 0; 334165633Sbde __asm __volatile( 335147855Sjhb " xchgl %1,%0 ; " 336147855Sjhb "# atomic_readandclear_long" 337165636Sbde : "+r" (res), /* 0 */ 338165635Sbde "=m" (*addr) /* 1 */ 339150182Sjhb : "m" (*addr)); 340147855Sjhb 341165635Sbde return (res); 342147855Sjhb} 343147855Sjhb 344147855Sjhb#else /* !__GNUCLIKE_ASM */ 345147855Sjhb 346165635Sbdeu_int atomic_readandclear_int(volatile u_int *addr); 347165635Sbdeu_long atomic_readandclear_long(volatile u_long *addr); 348147855Sjhb 349147855Sjhb#endif /* __GNUCLIKE_ASM */ 350147855Sjhb 351197803Sattilio#define atomic_set_acq_char atomic_set_barr_char 352197803Sattilio#define atomic_set_rel_char atomic_set_barr_char 353197803Sattilio#define atomic_clear_acq_char atomic_clear_barr_char 354197803Sattilio#define atomic_clear_rel_char atomic_clear_barr_char 355197803Sattilio#define atomic_add_acq_char atomic_add_barr_char 356197803Sattilio#define atomic_add_rel_char atomic_add_barr_char 357197803Sattilio#define atomic_subtract_acq_char atomic_subtract_barr_char 358197803Sattilio#define atomic_subtract_rel_char atomic_subtract_barr_char 35971085Sjhb 360197803Sattilio#define atomic_set_acq_short atomic_set_barr_short 361197803Sattilio#define atomic_set_rel_short atomic_set_barr_short 362197803Sattilio#define atomic_clear_acq_short atomic_clear_barr_short 363197803Sattilio#define atomic_clear_rel_short atomic_clear_barr_short 364197803Sattilio#define atomic_add_acq_short atomic_add_barr_short 365197803Sattilio#define atomic_add_rel_short atomic_add_barr_short 366197803Sattilio#define atomic_subtract_acq_short atomic_subtract_barr_short 367197803Sattilio#define atomic_subtract_rel_short atomic_subtract_barr_short 36871085Sjhb 369197803Sattilio#define atomic_set_acq_int atomic_set_barr_int 370197803Sattilio#define atomic_set_rel_int atomic_set_barr_int 371197803Sattilio#define atomic_clear_acq_int atomic_clear_barr_int 372197803Sattilio#define atomic_clear_rel_int atomic_clear_barr_int 373197803Sattilio#define atomic_add_acq_int atomic_add_barr_int 374197803Sattilio#define atomic_add_rel_int atomic_add_barr_int 375197803Sattilio#define atomic_subtract_acq_int atomic_subtract_barr_int 376197803Sattilio#define atomic_subtract_rel_int atomic_subtract_barr_int 377197910Sattilio#define atomic_cmpset_acq_int atomic_cmpset_int 378197910Sattilio#define atomic_cmpset_rel_int atomic_cmpset_int 37971085Sjhb 380197803Sattilio#define atomic_set_acq_long atomic_set_barr_long 381197803Sattilio#define atomic_set_rel_long atomic_set_barr_long 382197803Sattilio#define atomic_clear_acq_long atomic_clear_barr_long 383197803Sattilio#define atomic_clear_rel_long atomic_clear_barr_long 384197803Sattilio#define atomic_add_acq_long atomic_add_barr_long 385197803Sattilio#define atomic_add_rel_long atomic_add_barr_long 386197803Sattilio#define atomic_subtract_acq_long atomic_subtract_barr_long 387197803Sattilio#define atomic_subtract_rel_long atomic_subtract_barr_long 388197910Sattilio#define atomic_cmpset_acq_long atomic_cmpset_long 389197910Sattilio#define atomic_cmpset_rel_long atomic_cmpset_long 39071085Sjhb 391147855Sjhb/* Operations on 8-bit bytes. */ 39271085Sjhb#define atomic_set_8 atomic_set_char 39371085Sjhb#define atomic_set_acq_8 atomic_set_acq_char 39471085Sjhb#define atomic_set_rel_8 atomic_set_rel_char 39571085Sjhb#define atomic_clear_8 atomic_clear_char 39671085Sjhb#define atomic_clear_acq_8 atomic_clear_acq_char 39771085Sjhb#define atomic_clear_rel_8 atomic_clear_rel_char 39871085Sjhb#define atomic_add_8 atomic_add_char 39971085Sjhb#define atomic_add_acq_8 atomic_add_acq_char 40071085Sjhb#define atomic_add_rel_8 atomic_add_rel_char 40171085Sjhb#define atomic_subtract_8 atomic_subtract_char 40271085Sjhb#define atomic_subtract_acq_8 atomic_subtract_acq_char 40371085Sjhb#define atomic_subtract_rel_8 atomic_subtract_rel_char 40471085Sjhb#define atomic_load_acq_8 atomic_load_acq_char 40571085Sjhb#define atomic_store_rel_8 atomic_store_rel_char 40671085Sjhb 407147855Sjhb/* Operations on 16-bit words. */ 40871085Sjhb#define atomic_set_16 atomic_set_short 40971085Sjhb#define atomic_set_acq_16 atomic_set_acq_short 41071085Sjhb#define atomic_set_rel_16 atomic_set_rel_short 41171085Sjhb#define atomic_clear_16 atomic_clear_short 41271085Sjhb#define atomic_clear_acq_16 atomic_clear_acq_short 41371085Sjhb#define atomic_clear_rel_16 atomic_clear_rel_short 41471085Sjhb#define atomic_add_16 atomic_add_short 41571085Sjhb#define atomic_add_acq_16 atomic_add_acq_short 41671085Sjhb#define atomic_add_rel_16 atomic_add_rel_short 41771085Sjhb#define atomic_subtract_16 atomic_subtract_short 41871085Sjhb#define atomic_subtract_acq_16 atomic_subtract_acq_short 41971085Sjhb#define atomic_subtract_rel_16 atomic_subtract_rel_short 42071085Sjhb#define atomic_load_acq_16 atomic_load_acq_short 42171085Sjhb#define atomic_store_rel_16 atomic_store_rel_short 42271085Sjhb 423147855Sjhb/* Operations on 32-bit double words. */ 42471085Sjhb#define atomic_set_32 atomic_set_int 42571085Sjhb#define atomic_set_acq_32 atomic_set_acq_int 42671085Sjhb#define atomic_set_rel_32 atomic_set_rel_int 42771085Sjhb#define atomic_clear_32 atomic_clear_int 42871085Sjhb#define atomic_clear_acq_32 atomic_clear_acq_int 42971085Sjhb#define atomic_clear_rel_32 atomic_clear_rel_int 43071085Sjhb#define atomic_add_32 atomic_add_int 43171085Sjhb#define atomic_add_acq_32 atomic_add_acq_int 43271085Sjhb#define atomic_add_rel_32 atomic_add_rel_int 43371085Sjhb#define atomic_subtract_32 atomic_subtract_int 43471085Sjhb#define atomic_subtract_acq_32 atomic_subtract_acq_int 43571085Sjhb#define atomic_subtract_rel_32 atomic_subtract_rel_int 43671085Sjhb#define atomic_load_acq_32 atomic_load_acq_int 43771085Sjhb#define atomic_store_rel_32 atomic_store_rel_int 43871085Sjhb#define atomic_cmpset_32 atomic_cmpset_int 43971085Sjhb#define atomic_cmpset_acq_32 atomic_cmpset_acq_int 44071085Sjhb#define atomic_cmpset_rel_32 atomic_cmpset_rel_int 44171085Sjhb#define atomic_readandclear_32 atomic_readandclear_int 442150627Sjhb#define atomic_fetchadd_32 atomic_fetchadd_int 44371085Sjhb 444147855Sjhb/* Operations on pointers. */ 445157212Sdes#define atomic_set_ptr(p, v) \ 446157212Sdes atomic_set_int((volatile u_int *)(p), (u_int)(v)) 447157212Sdes#define atomic_set_acq_ptr(p, v) \ 448157212Sdes atomic_set_acq_int((volatile u_int *)(p), (u_int)(v)) 449157212Sdes#define atomic_set_rel_ptr(p, v) \ 450157212Sdes atomic_set_rel_int((volatile u_int *)(p), (u_int)(v)) 451157212Sdes#define atomic_clear_ptr(p, v) \ 452157212Sdes atomic_clear_int((volatile u_int *)(p), (u_int)(v)) 453157212Sdes#define atomic_clear_acq_ptr(p, v) \ 454157212Sdes atomic_clear_acq_int((volatile u_int *)(p), (u_int)(v)) 455157212Sdes#define atomic_clear_rel_ptr(p, v) \ 456157212Sdes atomic_clear_rel_int((volatile u_int *)(p), (u_int)(v)) 457157212Sdes#define atomic_add_ptr(p, v) \ 458157212Sdes atomic_add_int((volatile u_int *)(p), (u_int)(v)) 459157212Sdes#define atomic_add_acq_ptr(p, v) \ 460157212Sdes atomic_add_acq_int((volatile u_int *)(p), (u_int)(v)) 461157212Sdes#define atomic_add_rel_ptr(p, v) \ 462157212Sdes atomic_add_rel_int((volatile u_int *)(p), (u_int)(v)) 463157212Sdes#define atomic_subtract_ptr(p, v) \ 464157212Sdes atomic_subtract_int((volatile u_int *)(p), (u_int)(v)) 465157212Sdes#define atomic_subtract_acq_ptr(p, v) \ 466157212Sdes atomic_subtract_acq_int((volatile u_int *)(p), (u_int)(v)) 467157212Sdes#define atomic_subtract_rel_ptr(p, v) \ 468157212Sdes atomic_subtract_rel_int((volatile u_int *)(p), (u_int)(v)) 469157212Sdes#define atomic_load_acq_ptr(p) \ 470157212Sdes atomic_load_acq_int((volatile u_int *)(p)) 471157212Sdes#define atomic_store_rel_ptr(p, v) \ 472157212Sdes atomic_store_rel_int((volatile u_int *)(p), (v)) 473157212Sdes#define atomic_cmpset_ptr(dst, old, new) \ 474157212Sdes atomic_cmpset_int((volatile u_int *)(dst), (u_int)(old), (u_int)(new)) 475157212Sdes#define atomic_cmpset_acq_ptr(dst, old, new) \ 476165633Sbde atomic_cmpset_acq_int((volatile u_int *)(dst), (u_int)(old), \ 477165633Sbde (u_int)(new)) 478157212Sdes#define atomic_cmpset_rel_ptr(dst, old, new) \ 479165633Sbde atomic_cmpset_rel_int((volatile u_int *)(dst), (u_int)(old), \ 480165633Sbde (u_int)(new)) 481157212Sdes#define atomic_readandclear_ptr(p) \ 482157212Sdes atomic_readandclear_int((volatile u_int *)(p)) 48365514Sphk 484165635Sbde#endif /* !WANT_FUNCTIONS */ 485165633Sbde 486165633Sbde#endif /* !_MACHINE_ATOMIC_H_ */ 487