atomic.h revision 185162
138517Sdfr/*- 238517Sdfr * Copyright (c) 1998 Doug Rabson 338517Sdfr * All rights reserved. 438517Sdfr * 538517Sdfr * Redistribution and use in source and binary forms, with or without 638517Sdfr * modification, are permitted provided that the following conditions 738517Sdfr * are met: 838517Sdfr * 1. Redistributions of source code must retain the above copyright 938517Sdfr * notice, this list of conditions and the following disclaimer. 1038517Sdfr * 2. Redistributions in binary form must reproduce the above copyright 1138517Sdfr * notice, this list of conditions and the following disclaimer in the 1238517Sdfr * documentation and/or other materials provided with the distribution. 1338517Sdfr * 1438517Sdfr * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 1538517Sdfr * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 1638517Sdfr * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 1738517Sdfr * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 1838517Sdfr * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 1938517Sdfr * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 2038517Sdfr * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 2138517Sdfr * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 2238517Sdfr * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 2338517Sdfr * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 2438517Sdfr * SUCH DAMAGE. 2538517Sdfr * 2650477Speter * $FreeBSD: head/sys/i386/include/atomic.h 185162 2008-11-22 05:55:56Z kmacy $ 2738517Sdfr */ 2838517Sdfr#ifndef _MACHINE_ATOMIC_H_ 29147855Sjhb#define _MACHINE_ATOMIC_H_ 3038517Sdfr 31143063Sjoerg#ifndef _SYS_CDEFS_H_ 32143063Sjoerg#error this file needs sys/cdefs.h as a prerequisite 33143063Sjoerg#endif 34143063Sjoerg 35185162Skmacy 36185162Skmacy#if defined(I686_CPU) 37185162Skmacy#define mb() __asm__ __volatile__ ("mfence;": : :"memory") 38185162Skmacy#define wmb() __asm__ __volatile__ ("sfence;": : :"memory") 39185162Skmacy#define rmb() __asm__ __volatile__ ("lfence;": : :"memory") 40185162Skmacy#else 4138517Sdfr/* 42185162Skmacy * do we need a serializing instruction? 43185162Skmacy */ 44185162Skmacy#define mb() 45185162Skmacy#define wmb() 46185162Skmacy#define rmb() 47185162Skmacy#endif 48185162Skmacy 49185162Skmacy 50185162Skmacy/* 51165635Sbde * Various simple operations on memory, each of which is atomic in the 52165635Sbde * presence of interrupts and multiple processors. 5338517Sdfr * 54165633Sbde * atomic_set_char(P, V) (*(u_char *)(P) |= (V)) 55165633Sbde * atomic_clear_char(P, V) (*(u_char *)(P) &= ~(V)) 56165633Sbde * atomic_add_char(P, V) (*(u_char *)(P) += (V)) 57165633Sbde * atomic_subtract_char(P, V) (*(u_char *)(P) -= (V)) 5848797Salc * 59165633Sbde * atomic_set_short(P, V) (*(u_short *)(P) |= (V)) 60165633Sbde * atomic_clear_short(P, V) (*(u_short *)(P) &= ~(V)) 61165633Sbde * atomic_add_short(P, V) (*(u_short *)(P) += (V)) 62165633Sbde * atomic_subtract_short(P, V) (*(u_short *)(P) -= (V)) 6348797Salc * 64165633Sbde * atomic_set_int(P, V) (*(u_int *)(P) |= (V)) 65165633Sbde * atomic_clear_int(P, V) (*(u_int *)(P) &= ~(V)) 66165633Sbde * atomic_add_int(P, V) (*(u_int *)(P) += (V)) 67165633Sbde * atomic_subtract_int(P, V) (*(u_int *)(P) -= (V)) 68165635Sbde * atomic_readandclear_int(P) (return (*(u_int *)(P)); *(u_int *)(P) = 0;) 6948797Salc * 70165633Sbde * atomic_set_long(P, V) (*(u_long *)(P) |= (V)) 71165633Sbde * atomic_clear_long(P, V) (*(u_long *)(P) &= ~(V)) 72165633Sbde * atomic_add_long(P, V) (*(u_long *)(P) += (V)) 73165633Sbde * atomic_subtract_long(P, V) (*(u_long *)(P) -= (V)) 74165635Sbde * atomic_readandclear_long(P) (return (*(u_long *)(P)); *(u_long *)(P) = 0;) 7538517Sdfr */ 7638517Sdfr 7748797Salc/* 7849999Salc * The above functions are expanded inline in the statically-linked 7949999Salc * kernel. Lock prefixes are generated if an SMP kernel is being 8049999Salc * built. 8149999Salc * 8249999Salc * Kernel modules call real functions which are built into the kernel. 8349999Salc * This allows kernel modules to be portable between UP and SMP systems. 8448797Salc */ 85147855Sjhb#if defined(KLD_MODULE) || !defined(__GNUCLIKE_ASM) 86147855Sjhb#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \ 87100251Smarkmvoid atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v) 8849999Salc 89165633Sbdeint atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src); 90165633Sbdeu_int atomic_fetchadd_int(volatile u_int *p, u_int v); 9165514Sphk 9271085Sjhb#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \ 9371085Sjhbu_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p); \ 94100251Smarkmvoid atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v) 9571085Sjhb 96147855Sjhb#else /* !KLD_MODULE && __GNUCLIKE_ASM */ 9772358Smarkm 9884679Sjhb/* 99165635Sbde * For userland, always use lock prefixes so that the binaries will run 100165635Sbde * on both SMP and !SMP systems. 10184679Sjhb */ 10284679Sjhb#if defined(SMP) || !defined(_KERNEL) 103165630Sbde#define MPLOCKED "lock ; " 10490515Sbde#else 105147855Sjhb#define MPLOCKED 10690515Sbde#endif 10738517Sdfr 10848797Salc/* 10948797Salc * The assembly is volatilized to demark potential before-and-after side 11048797Salc * effects if an interrupt or SMP collision were to occur. 11148797Salc */ 112147855Sjhb#define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \ 11348797Salcstatic __inline void \ 11449043Salcatomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 11548797Salc{ \ 116165630Sbde __asm __volatile(MPLOCKED OP \ 117165633Sbde : "=m" (*p) \ 118165633Sbde : CONS (V), "m" (*p)); \ 119122827Sbde} \ 120122827Sbdestruct __hack 121100327Smarkm 12265514Sphk/* 12365514Sphk * Atomic compare and set, used by the mutex functions 12465514Sphk * 12565514Sphk * if (*dst == exp) *dst = src (all 32 bit words) 12665514Sphk * 12765514Sphk * Returns 0 on failure, non-zero on success 12865514Sphk */ 12965514Sphk 130165635Sbde#ifdef CPU_DISABLE_CMPXCHG 131100327Smarkm 13265514Sphkstatic __inline int 13365514Sphkatomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src) 13465514Sphk{ 135165572Sbde u_char res; 13665514Sphk 13765514Sphk __asm __volatile( 13865514Sphk " pushfl ; " 13965514Sphk " cli ; " 140165572Sbde " cmpl %3,%4 ; " 14165514Sphk " jne 1f ; " 142150182Sjhb " movl %2,%1 ; " 14365514Sphk "1: " 144165572Sbde " sete %0 ; " 14565514Sphk " popfl ; " 14665514Sphk "# atomic_cmpset_int" 147165572Sbde : "=q" (res), /* 0 */ 148150182Sjhb "=m" (*dst) /* 1 */ 149150182Sjhb : "r" (src), /* 2 */ 150165572Sbde "r" (exp), /* 3 */ 151165572Sbde "m" (*dst) /* 4 */ 15265514Sphk : "memory"); 15365514Sphk 15465514Sphk return (res); 15565514Sphk} 156100327Smarkm 157165635Sbde#else /* !CPU_DISABLE_CMPXCHG */ 158100327Smarkm 15965514Sphkstatic __inline int 16065514Sphkatomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src) 16165514Sphk{ 162165572Sbde u_char res; 16365514Sphk 164165633Sbde __asm __volatile( 165165630Sbde " " MPLOCKED " " 166150182Sjhb " cmpxchgl %2,%1 ; " 167165572Sbde " sete %0 ; " 16865514Sphk "1: " 16965514Sphk "# atomic_cmpset_int" 170165572Sbde : "=a" (res), /* 0 */ 171150182Sjhb "=m" (*dst) /* 1 */ 172150182Sjhb : "r" (src), /* 2 */ 173165572Sbde "a" (exp), /* 3 */ 174165572Sbde "m" (*dst) /* 4 */ 175150182Sjhb : "memory"); 17665514Sphk 17765514Sphk return (res); 17865514Sphk} 179100327Smarkm 180165635Sbde#endif /* CPU_DISABLE_CMPXCHG */ 181100327Smarkm 182150627Sjhb/* 183150627Sjhb * Atomically add the value of v to the integer pointed to by p and return 184150627Sjhb * the previous value of *p. 185150627Sjhb */ 186150627Sjhbstatic __inline u_int 187150627Sjhbatomic_fetchadd_int(volatile u_int *p, u_int v) 188150627Sjhb{ 189150627Sjhb 190165633Sbde __asm __volatile( 191165630Sbde " " MPLOCKED " " 192150627Sjhb " xaddl %0, %1 ; " 193150627Sjhb "# atomic_fetchadd_int" 194150627Sjhb : "+r" (v), /* 0 (result) */ 195150627Sjhb "=m" (*p) /* 1 */ 196150627Sjhb : "m" (*p)); /* 2 */ 197150627Sjhb 198150627Sjhb return (v); 199150627Sjhb} 200150627Sjhb 201137623Sjhb#if defined(_KERNEL) && !defined(SMP) 202100327Smarkm 20367351Sjhb/* 204137591Sjhb * We assume that a = b will do atomic loads and stores. However, on a 205137591Sjhb * PentiumPro or higher, reads may pass writes, so for that case we have 206137591Sjhb * to use a serializing instruction (i.e. with LOCK) to do the load in 207137591Sjhb * SMP kernels. For UP kernels, however, the cache of the single processor 208137591Sjhb * is always consistent, so we don't need any memory barriers. 20967351Sjhb */ 210147855Sjhb#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \ 21167351Sjhbstatic __inline u_##TYPE \ 21267351Sjhbatomic_load_acq_##TYPE(volatile u_##TYPE *p) \ 21367351Sjhb{ \ 21467351Sjhb return (*p); \ 21567351Sjhb} \ 21667351Sjhb \ 21767351Sjhbstatic __inline void \ 21867351Sjhbatomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 21967351Sjhb{ \ 22067351Sjhb *p = v; \ 221122827Sbde} \ 222122827Sbdestruct __hack 223100327Smarkm 224165635Sbde#else /* !(_KERNEL && !SMP) */ 22567351Sjhb 226147855Sjhb#define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \ 22771023Sjhbstatic __inline u_##TYPE \ 22871023Sjhbatomic_load_acq_##TYPE(volatile u_##TYPE *p) \ 22971023Sjhb{ \ 23071023Sjhb u_##TYPE res; \ 23171023Sjhb \ 232165630Sbde __asm __volatile(MPLOCKED LOP \ 233165635Sbde : "=a" (res), /* 0 */ \ 234150182Sjhb "=m" (*p) /* 1 */ \ 235150182Sjhb : "m" (*p) /* 2 */ \ 236150182Sjhb : "memory"); \ 23771023Sjhb \ 23871023Sjhb return (res); \ 23971023Sjhb} \ 24071023Sjhb \ 24171023Sjhb/* \ 24271023Sjhb * The XCHG instruction asserts LOCK automagically. \ 24371023Sjhb */ \ 24471023Sjhbstatic __inline void \ 24571023Sjhbatomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 24671023Sjhb{ \ 24771023Sjhb __asm __volatile(SOP \ 248150182Sjhb : "=m" (*p), /* 0 */ \ 24971023Sjhb "+r" (v) /* 1 */ \ 250150182Sjhb : "m" (*p)); /* 2 */ \ 251122827Sbde} \ 252122827Sbdestruct __hack 253100327Smarkm 254165635Sbde#endif /* _KERNEL && !SMP */ 255100327Smarkm 256147855Sjhb#endif /* KLD_MODULE || !__GNUCLIKE_ASM */ 257100251Smarkm 258100251SmarkmATOMIC_ASM(set, char, "orb %b1,%0", "iq", v); 259100251SmarkmATOMIC_ASM(clear, char, "andb %b1,%0", "iq", ~v); 260100251SmarkmATOMIC_ASM(add, char, "addb %b1,%0", "iq", v); 261100251SmarkmATOMIC_ASM(subtract, char, "subb %b1,%0", "iq", v); 26271085Sjhb 263100251SmarkmATOMIC_ASM(set, short, "orw %w1,%0", "ir", v); 264100251SmarkmATOMIC_ASM(clear, short, "andw %w1,%0", "ir", ~v); 265100251SmarkmATOMIC_ASM(add, short, "addw %w1,%0", "ir", v); 266100251SmarkmATOMIC_ASM(subtract, short, "subw %w1,%0", "ir", v); 26771085Sjhb 268100251SmarkmATOMIC_ASM(set, int, "orl %1,%0", "ir", v); 269100251SmarkmATOMIC_ASM(clear, int, "andl %1,%0", "ir", ~v); 270100251SmarkmATOMIC_ASM(add, int, "addl %1,%0", "ir", v); 271100251SmarkmATOMIC_ASM(subtract, int, "subl %1,%0", "ir", v); 27271085Sjhb 273100251SmarkmATOMIC_ASM(set, long, "orl %1,%0", "ir", v); 274100251SmarkmATOMIC_ASM(clear, long, "andl %1,%0", "ir", ~v); 275100251SmarkmATOMIC_ASM(add, long, "addl %1,%0", "ir", v); 276100251SmarkmATOMIC_ASM(subtract, long, "subl %1,%0", "ir", v); 27771085Sjhb 278100251SmarkmATOMIC_STORE_LOAD(char, "cmpxchgb %b0,%1", "xchgb %b1,%0"); 279100251SmarkmATOMIC_STORE_LOAD(short,"cmpxchgw %w0,%1", "xchgw %w1,%0"); 280100251SmarkmATOMIC_STORE_LOAD(int, "cmpxchgl %0,%1", "xchgl %1,%0"); 281100251SmarkmATOMIC_STORE_LOAD(long, "cmpxchgl %0,%1", "xchgl %1,%0"); 28271023Sjhb 28371085Sjhb#undef ATOMIC_ASM 28467351Sjhb#undef ATOMIC_STORE_LOAD 28567351Sjhb 286165635Sbde#ifndef WANT_FUNCTIONS 287147855Sjhb 288147855Sjhbstatic __inline int 289147855Sjhbatomic_cmpset_long(volatile u_long *dst, u_long exp, u_long src) 290147855Sjhb{ 291147855Sjhb 292147855Sjhb return (atomic_cmpset_int((volatile u_int *)dst, (u_int)exp, 293147855Sjhb (u_int)src)); 294147855Sjhb} 295147855Sjhb 296177276Spjdstatic __inline u_long 297177276Spjdatomic_fetchadd_long(volatile u_long *p, u_long v) 298177276Spjd{ 299177276Spjd 300177276Spjd return (atomic_fetchadd_int((volatile u_int *)p, (u_int)v)); 301177276Spjd} 302177276Spjd 303147855Sjhb/* Read the current value and store a zero in the destination. */ 304147855Sjhb#ifdef __GNUCLIKE_ASM 305147855Sjhb 306147855Sjhbstatic __inline u_int 307147855Sjhbatomic_readandclear_int(volatile u_int *addr) 308147855Sjhb{ 309165635Sbde u_int res; 310147855Sjhb 311165635Sbde res = 0; 312165633Sbde __asm __volatile( 313147855Sjhb " xchgl %1,%0 ; " 314147855Sjhb "# atomic_readandclear_int" 315165635Sbde : "+r" (res), /* 0 */ 316165635Sbde "=m" (*addr) /* 1 */ 317150182Sjhb : "m" (*addr)); 318147855Sjhb 319165635Sbde return (res); 320147855Sjhb} 321147855Sjhb 322147855Sjhbstatic __inline u_long 323147855Sjhbatomic_readandclear_long(volatile u_long *addr) 324147855Sjhb{ 325165635Sbde u_long res; 326147855Sjhb 327165635Sbde res = 0; 328165633Sbde __asm __volatile( 329147855Sjhb " xchgl %1,%0 ; " 330147855Sjhb "# atomic_readandclear_long" 331165636Sbde : "+r" (res), /* 0 */ 332165635Sbde "=m" (*addr) /* 1 */ 333150182Sjhb : "m" (*addr)); 334147855Sjhb 335165635Sbde return (res); 336147855Sjhb} 337147855Sjhb 338147855Sjhb#else /* !__GNUCLIKE_ASM */ 339147855Sjhb 340165635Sbdeu_int atomic_readandclear_int(volatile u_int *addr); 341165635Sbdeu_long atomic_readandclear_long(volatile u_long *addr); 342147855Sjhb 343147855Sjhb#endif /* __GNUCLIKE_ASM */ 344147855Sjhb 345147855Sjhb/* Acquire and release variants are identical to the normal ones. */ 34671085Sjhb#define atomic_set_acq_char atomic_set_char 34771085Sjhb#define atomic_set_rel_char atomic_set_char 34871085Sjhb#define atomic_clear_acq_char atomic_clear_char 34971085Sjhb#define atomic_clear_rel_char atomic_clear_char 35071085Sjhb#define atomic_add_acq_char atomic_add_char 35171085Sjhb#define atomic_add_rel_char atomic_add_char 35271085Sjhb#define atomic_subtract_acq_char atomic_subtract_char 35371085Sjhb#define atomic_subtract_rel_char atomic_subtract_char 35471085Sjhb 35571085Sjhb#define atomic_set_acq_short atomic_set_short 35671085Sjhb#define atomic_set_rel_short atomic_set_short 35771085Sjhb#define atomic_clear_acq_short atomic_clear_short 35871085Sjhb#define atomic_clear_rel_short atomic_clear_short 35971085Sjhb#define atomic_add_acq_short atomic_add_short 36071085Sjhb#define atomic_add_rel_short atomic_add_short 36171085Sjhb#define atomic_subtract_acq_short atomic_subtract_short 36271085Sjhb#define atomic_subtract_rel_short atomic_subtract_short 36371085Sjhb 36471085Sjhb#define atomic_set_acq_int atomic_set_int 36571085Sjhb#define atomic_set_rel_int atomic_set_int 36671085Sjhb#define atomic_clear_acq_int atomic_clear_int 36771085Sjhb#define atomic_clear_rel_int atomic_clear_int 36871085Sjhb#define atomic_add_acq_int atomic_add_int 36971085Sjhb#define atomic_add_rel_int atomic_add_int 37071085Sjhb#define atomic_subtract_acq_int atomic_subtract_int 37171085Sjhb#define atomic_subtract_rel_int atomic_subtract_int 372147855Sjhb#define atomic_cmpset_acq_int atomic_cmpset_int 373147855Sjhb#define atomic_cmpset_rel_int atomic_cmpset_int 37471085Sjhb 37571085Sjhb#define atomic_set_acq_long atomic_set_long 37671085Sjhb#define atomic_set_rel_long atomic_set_long 37771085Sjhb#define atomic_clear_acq_long atomic_clear_long 37871085Sjhb#define atomic_clear_rel_long atomic_clear_long 37971085Sjhb#define atomic_add_acq_long atomic_add_long 38071085Sjhb#define atomic_add_rel_long atomic_add_long 38171085Sjhb#define atomic_subtract_acq_long atomic_subtract_long 38271085Sjhb#define atomic_subtract_rel_long atomic_subtract_long 383147855Sjhb#define atomic_cmpset_acq_long atomic_cmpset_long 384147855Sjhb#define atomic_cmpset_rel_long atomic_cmpset_long 38571085Sjhb 386147855Sjhb/* Operations on 8-bit bytes. */ 38771085Sjhb#define atomic_set_8 atomic_set_char 38871085Sjhb#define atomic_set_acq_8 atomic_set_acq_char 38971085Sjhb#define atomic_set_rel_8 atomic_set_rel_char 39071085Sjhb#define atomic_clear_8 atomic_clear_char 39171085Sjhb#define atomic_clear_acq_8 atomic_clear_acq_char 39271085Sjhb#define atomic_clear_rel_8 atomic_clear_rel_char 39371085Sjhb#define atomic_add_8 atomic_add_char 39471085Sjhb#define atomic_add_acq_8 atomic_add_acq_char 39571085Sjhb#define atomic_add_rel_8 atomic_add_rel_char 39671085Sjhb#define atomic_subtract_8 atomic_subtract_char 39771085Sjhb#define atomic_subtract_acq_8 atomic_subtract_acq_char 39871085Sjhb#define atomic_subtract_rel_8 atomic_subtract_rel_char 39971085Sjhb#define atomic_load_acq_8 atomic_load_acq_char 40071085Sjhb#define atomic_store_rel_8 atomic_store_rel_char 40171085Sjhb 402147855Sjhb/* Operations on 16-bit words. */ 40371085Sjhb#define atomic_set_16 atomic_set_short 40471085Sjhb#define atomic_set_acq_16 atomic_set_acq_short 40571085Sjhb#define atomic_set_rel_16 atomic_set_rel_short 40671085Sjhb#define atomic_clear_16 atomic_clear_short 40771085Sjhb#define atomic_clear_acq_16 atomic_clear_acq_short 40871085Sjhb#define atomic_clear_rel_16 atomic_clear_rel_short 40971085Sjhb#define atomic_add_16 atomic_add_short 41071085Sjhb#define atomic_add_acq_16 atomic_add_acq_short 41171085Sjhb#define atomic_add_rel_16 atomic_add_rel_short 41271085Sjhb#define atomic_subtract_16 atomic_subtract_short 41371085Sjhb#define atomic_subtract_acq_16 atomic_subtract_acq_short 41471085Sjhb#define atomic_subtract_rel_16 atomic_subtract_rel_short 41571085Sjhb#define atomic_load_acq_16 atomic_load_acq_short 41671085Sjhb#define atomic_store_rel_16 atomic_store_rel_short 41771085Sjhb 418147855Sjhb/* Operations on 32-bit double words. */ 41971085Sjhb#define atomic_set_32 atomic_set_int 42071085Sjhb#define atomic_set_acq_32 atomic_set_acq_int 42171085Sjhb#define atomic_set_rel_32 atomic_set_rel_int 42271085Sjhb#define atomic_clear_32 atomic_clear_int 42371085Sjhb#define atomic_clear_acq_32 atomic_clear_acq_int 42471085Sjhb#define atomic_clear_rel_32 atomic_clear_rel_int 42571085Sjhb#define atomic_add_32 atomic_add_int 42671085Sjhb#define atomic_add_acq_32 atomic_add_acq_int 42771085Sjhb#define atomic_add_rel_32 atomic_add_rel_int 42871085Sjhb#define atomic_subtract_32 atomic_subtract_int 42971085Sjhb#define atomic_subtract_acq_32 atomic_subtract_acq_int 43071085Sjhb#define atomic_subtract_rel_32 atomic_subtract_rel_int 43171085Sjhb#define atomic_load_acq_32 atomic_load_acq_int 43271085Sjhb#define atomic_store_rel_32 atomic_store_rel_int 43371085Sjhb#define atomic_cmpset_32 atomic_cmpset_int 43471085Sjhb#define atomic_cmpset_acq_32 atomic_cmpset_acq_int 43571085Sjhb#define atomic_cmpset_rel_32 atomic_cmpset_rel_int 43671085Sjhb#define atomic_readandclear_32 atomic_readandclear_int 437150627Sjhb#define atomic_fetchadd_32 atomic_fetchadd_int 43871085Sjhb 439147855Sjhb/* Operations on pointers. */ 440157212Sdes#define atomic_set_ptr(p, v) \ 441157212Sdes atomic_set_int((volatile u_int *)(p), (u_int)(v)) 442157212Sdes#define atomic_set_acq_ptr(p, v) \ 443157212Sdes atomic_set_acq_int((volatile u_int *)(p), (u_int)(v)) 444157212Sdes#define atomic_set_rel_ptr(p, v) \ 445157212Sdes atomic_set_rel_int((volatile u_int *)(p), (u_int)(v)) 446157212Sdes#define atomic_clear_ptr(p, v) \ 447157212Sdes atomic_clear_int((volatile u_int *)(p), (u_int)(v)) 448157212Sdes#define atomic_clear_acq_ptr(p, v) \ 449157212Sdes atomic_clear_acq_int((volatile u_int *)(p), (u_int)(v)) 450157212Sdes#define atomic_clear_rel_ptr(p, v) \ 451157212Sdes atomic_clear_rel_int((volatile u_int *)(p), (u_int)(v)) 452157212Sdes#define atomic_add_ptr(p, v) \ 453157212Sdes atomic_add_int((volatile u_int *)(p), (u_int)(v)) 454157212Sdes#define atomic_add_acq_ptr(p, v) \ 455157212Sdes atomic_add_acq_int((volatile u_int *)(p), (u_int)(v)) 456157212Sdes#define atomic_add_rel_ptr(p, v) \ 457157212Sdes atomic_add_rel_int((volatile u_int *)(p), (u_int)(v)) 458157212Sdes#define atomic_subtract_ptr(p, v) \ 459157212Sdes atomic_subtract_int((volatile u_int *)(p), (u_int)(v)) 460157212Sdes#define atomic_subtract_acq_ptr(p, v) \ 461157212Sdes atomic_subtract_acq_int((volatile u_int *)(p), (u_int)(v)) 462157212Sdes#define atomic_subtract_rel_ptr(p, v) \ 463157212Sdes atomic_subtract_rel_int((volatile u_int *)(p), (u_int)(v)) 464157212Sdes#define atomic_load_acq_ptr(p) \ 465157212Sdes atomic_load_acq_int((volatile u_int *)(p)) 466157212Sdes#define atomic_store_rel_ptr(p, v) \ 467157212Sdes atomic_store_rel_int((volatile u_int *)(p), (v)) 468157212Sdes#define atomic_cmpset_ptr(dst, old, new) \ 469157212Sdes atomic_cmpset_int((volatile u_int *)(dst), (u_int)(old), (u_int)(new)) 470157212Sdes#define atomic_cmpset_acq_ptr(dst, old, new) \ 471165633Sbde atomic_cmpset_acq_int((volatile u_int *)(dst), (u_int)(old), \ 472165633Sbde (u_int)(new)) 473157212Sdes#define atomic_cmpset_rel_ptr(dst, old, new) \ 474165633Sbde atomic_cmpset_rel_int((volatile u_int *)(dst), (u_int)(old), \ 475165633Sbde (u_int)(new)) 476157212Sdes#define atomic_readandclear_ptr(p) \ 477157212Sdes atomic_readandclear_int((volatile u_int *)(p)) 47865514Sphk 479165635Sbde#endif /* !WANT_FUNCTIONS */ 480165633Sbde 481165633Sbde#endif /* !_MACHINE_ATOMIC_H_ */ 482