atomic.h revision 67351
138517Sdfr/*- 238517Sdfr * Copyright (c) 1998 Doug Rabson 338517Sdfr * All rights reserved. 438517Sdfr * 538517Sdfr * Redistribution and use in source and binary forms, with or without 638517Sdfr * modification, are permitted provided that the following conditions 738517Sdfr * are met: 838517Sdfr * 1. Redistributions of source code must retain the above copyright 938517Sdfr * notice, this list of conditions and the following disclaimer. 1038517Sdfr * 2. Redistributions in binary form must reproduce the above copyright 1138517Sdfr * notice, this list of conditions and the following disclaimer in the 1238517Sdfr * documentation and/or other materials provided with the distribution. 1338517Sdfr * 1438517Sdfr * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND 1538517Sdfr * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 1638517Sdfr * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 1738517Sdfr * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE 1838517Sdfr * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 1938517Sdfr * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS 2038517Sdfr * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 2138517Sdfr * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT 2238517Sdfr * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY 2338517Sdfr * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF 2438517Sdfr * SUCH DAMAGE. 2538517Sdfr * 2650477Speter * $FreeBSD: head/sys/i386/include/atomic.h 67351 2000-10-20 07:00:48Z jhb $ 2738517Sdfr */ 2838517Sdfr#ifndef _MACHINE_ATOMIC_H_ 2938517Sdfr#define _MACHINE_ATOMIC_H_ 3038517Sdfr 3138517Sdfr/* 3238517Sdfr * Various simple arithmetic on memory which is atomic in the presence 3348797Salc * of interrupts and multiple processors. 3438517Sdfr * 3548797Salc * atomic_set_char(P, V) (*(u_char*)(P) |= (V)) 3648797Salc * atomic_clear_char(P, V) (*(u_char*)(P) &= ~(V)) 3748797Salc * atomic_add_char(P, V) (*(u_char*)(P) += (V)) 3848797Salc * atomic_subtract_char(P, V) (*(u_char*)(P) -= (V)) 3948797Salc * 4048797Salc * atomic_set_short(P, V) (*(u_short*)(P) |= (V)) 4148797Salc * atomic_clear_short(P, V) (*(u_short*)(P) &= ~(V)) 4248797Salc * atomic_add_short(P, V) (*(u_short*)(P) += (V)) 4348797Salc * atomic_subtract_short(P, V) (*(u_short*)(P) -= (V)) 4448797Salc * 4548797Salc * atomic_set_int(P, V) (*(u_int*)(P) |= (V)) 4648797Salc * atomic_clear_int(P, V) (*(u_int*)(P) &= ~(V)) 4748797Salc * atomic_add_int(P, V) (*(u_int*)(P) += (V)) 4848797Salc * atomic_subtract_int(P, V) (*(u_int*)(P) -= (V)) 4966695Sjhb * atomic_readandclear_int(P) (return *(u_int*)P; *(u_int*)P = 0;) 5048797Salc * 5148797Salc * atomic_set_long(P, V) (*(u_long*)(P) |= (V)) 5248797Salc * atomic_clear_long(P, V) (*(u_long*)(P) &= ~(V)) 5348797Salc * atomic_add_long(P, V) (*(u_long*)(P) += (V)) 5448797Salc * atomic_subtract_long(P, V) (*(u_long*)(P) -= (V)) 5566695Sjhb * atomic_readandclear_long(P) (return *(u_long*)P; *(u_long*)P = 0;) 5638517Sdfr */ 5738517Sdfr 5848797Salc/* 5949999Salc * The above functions are expanded inline in the statically-linked 6049999Salc * kernel. Lock prefixes are generated if an SMP kernel is being 6149999Salc * built. 6249999Salc * 6349999Salc * Kernel modules call real functions which are built into the kernel. 6449999Salc * This allows kernel modules to be portable between UP and SMP systems. 6548797Salc */ 6649999Salc#if defined(KLD_MODULE) 6749999Salc#define ATOMIC_ASM(NAME, TYPE, OP, V) \ 6867351Sjhbvoid atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v); 6949999Salc 7066695Sjhbint atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src); 7165514Sphk 7249999Salc#else /* !KLD_MODULE */ 7349999Salc#if defined(SMP) 7465514Sphk#if defined(LOCORE) 7565514Sphk#define MPLOCKED lock ; 7665514Sphk#else 7748797Salc#define MPLOCKED "lock ; " 7865514Sphk#endif 7948796Salc#else 8048797Salc#define MPLOCKED 8148796Salc#endif 8238517Sdfr 8348797Salc/* 8448797Salc * The assembly is volatilized to demark potential before-and-after side 8548797Salc * effects if an interrupt or SMP collision were to occur. 8648797Salc */ 8751938Speter#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ > 9) 8851938Speter/* egcs 1.1.2+ version */ 8948797Salc#define ATOMIC_ASM(NAME, TYPE, OP, V) \ 9048797Salcstatic __inline void \ 9149043Salcatomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 9248797Salc{ \ 9348797Salc __asm __volatile(MPLOCKED OP \ 9449043Salc : "=m" (*p) \ 9549043Salc : "0" (*p), "ir" (V)); \ 9648797Salc} 9751938Speter 9865514Sphk/* 9965514Sphk * Atomic compare and set, used by the mutex functions 10065514Sphk * 10165514Sphk * if (*dst == exp) *dst = src (all 32 bit words) 10265514Sphk * 10365514Sphk * Returns 0 on failure, non-zero on success 10465514Sphk */ 10565514Sphk 10665514Sphk#if defined(I386_CPU) 10765514Sphkstatic __inline int 10865514Sphkatomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src) 10965514Sphk{ 11065514Sphk int res = exp; 11165514Sphk 11265514Sphk __asm __volatile( 11365514Sphk " pushfl ; " 11465514Sphk " cli ; " 11565514Sphk " cmpl %1,%3 ; " 11665514Sphk " jne 1f ; " 11765514Sphk " movl %2,%3 ; " 11865514Sphk "1: " 11965514Sphk " sete %%al; " 12065514Sphk " movzbl %%al,%0 ; " 12165514Sphk " popfl ; " 12265514Sphk "# atomic_cmpset_int" 12365514Sphk : "=a" (res) /* 0 (result) */ 12465514Sphk : "0" (exp), /* 1 */ 12565514Sphk "r" (src), /* 2 */ 12665514Sphk "m" (*(dst)) /* 3 */ 12765514Sphk : "memory"); 12865514Sphk 12965514Sphk return (res); 13065514Sphk} 13165514Sphk#else /* defined(I386_CPU) */ 13265514Sphkstatic __inline int 13365514Sphkatomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src) 13465514Sphk{ 13565514Sphk int res = exp; 13665514Sphk 13765514Sphk __asm __volatile ( 13865514Sphk " " MPLOCKED " " 13965514Sphk " cmpxchgl %2,%3 ; " 14065514Sphk " setz %%al ; " 14165514Sphk " movzbl %%al,%0 ; " 14265514Sphk "1: " 14365514Sphk "# atomic_cmpset_int" 14465514Sphk : "=a" (res) /* 0 (result) */ 14565514Sphk : "0" (exp), /* 1 */ 14665514Sphk "r" (src), /* 2 */ 14765514Sphk "m" (*(dst)) /* 3 */ 14865514Sphk : "memory"); 14965514Sphk 15065514Sphk return (res); 15165514Sphk} 15265514Sphk#endif /* defined(I386_CPU) */ 15365514Sphk 15467351Sjhb#define atomic_cmpset_acq_int atomic_cmpset_int 15567351Sjhb#define atomic_cmpset_rel_int atomic_cmpset_int 15667351Sjhb 15751938Speter#else 15851938Speter/* gcc <= 2.8 version */ 15951938Speter#define ATOMIC_ASM(NAME, TYPE, OP, V) \ 16051938Speterstatic __inline void \ 16151938Speteratomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 16251938Speter{ \ 16351938Speter __asm __volatile(MPLOCKED OP \ 16451938Speter : "=m" (*p) \ 16551938Speter : "ir" (V)); \ 16667351Sjhb} \ 16767351Sjhb \ 16867351Sjhb 16951938Speter#endif 17049999Salc#endif /* KLD_MODULE */ 17138517Sdfr 17251937Speter#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ > 9) 17351938Speter 17451938Speter/* egcs 1.1.2+ version */ 17560300SobrienATOMIC_ASM(set, char, "orb %b2,%0", v) 17660300SobrienATOMIC_ASM(clear, char, "andb %b2,%0", ~v) 17760300SobrienATOMIC_ASM(add, char, "addb %b2,%0", v) 17860300SobrienATOMIC_ASM(subtract, char, "subb %b2,%0", v) 17938517Sdfr 18060300SobrienATOMIC_ASM(set, short, "orw %w2,%0", v) 18160300SobrienATOMIC_ASM(clear, short, "andw %w2,%0", ~v) 18260300SobrienATOMIC_ASM(add, short, "addw %w2,%0", v) 18360300SobrienATOMIC_ASM(subtract, short, "subw %w2,%0", v) 18438517Sdfr 18548797SalcATOMIC_ASM(set, int, "orl %2,%0", v) 18648797SalcATOMIC_ASM(clear, int, "andl %2,%0", ~v) 18748797SalcATOMIC_ASM(add, int, "addl %2,%0", v) 18848797SalcATOMIC_ASM(subtract, int, "subl %2,%0", v) 18948796Salc 19048797SalcATOMIC_ASM(set, long, "orl %2,%0", v) 19148797SalcATOMIC_ASM(clear, long, "andl %2,%0", ~v) 19248797SalcATOMIC_ASM(add, long, "addl %2,%0", v) 19348797SalcATOMIC_ASM(subtract, long, "subl %2,%0", v) 19448796Salc 19551917Seivind#else 19651917Seivind 19751938Speter/* gcc <= 2.8 version */ 19851938SpeterATOMIC_ASM(set, char, "orb %1,%0", v) 19951938SpeterATOMIC_ASM(clear, char, "andb %1,%0", ~v) 20051938SpeterATOMIC_ASM(add, char, "addb %1,%0", v) 20151938SpeterATOMIC_ASM(subtract, char, "subb %1,%0", v) 20251917Seivind 20351938SpeterATOMIC_ASM(set, short, "orw %1,%0", v) 20451938SpeterATOMIC_ASM(clear, short, "andw %1,%0", ~v) 20551938SpeterATOMIC_ASM(add, short, "addw %1,%0", v) 20651938SpeterATOMIC_ASM(subtract, short, "subw %1,%0", v) 20751917Seivind 20851938SpeterATOMIC_ASM(set, int, "orl %1,%0", v) 20951938SpeterATOMIC_ASM(clear, int, "andl %1,%0", ~v) 21051938SpeterATOMIC_ASM(add, int, "addl %1,%0", v) 21151938SpeterATOMIC_ASM(subtract, int, "subl %1,%0", v) 21251938Speter 21351938SpeterATOMIC_ASM(set, long, "orl %1,%0", v) 21451938SpeterATOMIC_ASM(clear, long, "andl %1,%0", ~v) 21551938SpeterATOMIC_ASM(add, long, "addl %1,%0", v) 21651938SpeterATOMIC_ASM(subtract, long, "subl %1,%0", v) 21751938Speter 21851917Seivind#endif 21951917Seivind 22067351Sjhb#undef ATOMIC_ASM 22167351Sjhb 22265514Sphk#ifndef WANT_FUNCTIONS 22367351Sjhb#define ATOMIC_ACQ_REL(NAME, TYPE) \ 22467351Sjhbstatic __inline void \ 22567351Sjhbatomic_##NAME##_acq_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 22667351Sjhb{ \ 22767351Sjhb __asm __volatile("lock; addl $0,0(%esp)" : : : "memory");\ 22867351Sjhb atomic_##NAME##_##TYPE(p, v); \ 22967351Sjhb} \ 23067351Sjhb \ 23167351Sjhbstatic __inline void \ 23267351Sjhbatomic_##NAME##_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 23367351Sjhb{ \ 23467351Sjhb atomic_##NAME##_##TYPE(p, v); \ 23567351Sjhb} 23667351Sjhb 23767351SjhbATOMIC_ACQ_REL(set, char) 23867351SjhbATOMIC_ACQ_REL(clear, char) 23967351SjhbATOMIC_ACQ_REL(add, char) 24067351SjhbATOMIC_ACQ_REL(subtract, char) 24167351SjhbATOMIC_ACQ_REL(set, short) 24267351SjhbATOMIC_ACQ_REL(clear, short) 24367351SjhbATOMIC_ACQ_REL(add, short) 24467351SjhbATOMIC_ACQ_REL(subtract, short) 24567351SjhbATOMIC_ACQ_REL(set, int) 24667351SjhbATOMIC_ACQ_REL(clear, int) 24767351SjhbATOMIC_ACQ_REL(add, int) 24867351SjhbATOMIC_ACQ_REL(subtract, int) 24967351SjhbATOMIC_ACQ_REL(set, long) 25067351SjhbATOMIC_ACQ_REL(clear, long) 25167351SjhbATOMIC_ACQ_REL(add, long) 25267351SjhbATOMIC_ACQ_REL(subtract, long) 25367351Sjhb 25467351Sjhb#undef ATOMIC_ACQ_REL 25567351Sjhb 25667351Sjhb/* 25767351Sjhb * We assume that a = b will do atomic loads and stores. 25867351Sjhb */ 25967351Sjhb#define ATOMIC_STORE_LOAD(TYPE) \ 26067351Sjhbstatic __inline u_##TYPE \ 26167351Sjhbatomic_load_acq_##TYPE(volatile u_##TYPE *p) \ 26267351Sjhb{ \ 26367351Sjhb __asm __volatile("lock; addl $0,0(%esp)" : : : "memory");\ 26467351Sjhb return (*p); \ 26567351Sjhb} \ 26667351Sjhb \ 26767351Sjhbstatic __inline void \ 26867351Sjhbatomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\ 26967351Sjhb{ \ 27067351Sjhb *p = v; \ 27167351Sjhb __asm __volatile("" : : : "memory"); \ 27267351Sjhb} 27367351Sjhb 27467351SjhbATOMIC_STORE_LOAD(char) 27567351SjhbATOMIC_STORE_LOAD(short) 27667351SjhbATOMIC_STORE_LOAD(int) 27767351SjhbATOMIC_STORE_LOAD(long) 27867351Sjhb 27967351Sjhb#undef ATOMIC_STORE_LOAD 28067351Sjhb 28165514Sphkstatic __inline int 28265514Sphkatomic_cmpset_ptr(volatile void *dst, void *exp, void *src) 28365514Sphk{ 28465514Sphk 28565514Sphk return ( 28665514Sphk atomic_cmpset_int((volatile u_int *)dst, (u_int)exp, (u_int)src)); 28765514Sphk} 28866695Sjhb 28967351Sjhb#define atomic_cmpset_acq_ptr atomic_cmpset_ptr 29067351Sjhb#define atomic_cmpset_rel_ptr atomic_cmpset_ptr 29167351Sjhb 29267351Sjhbstatic __inline void * 29367351Sjhbatomic_load_acq_ptr(volatile void *p) 29467351Sjhb{ 29567351Sjhb return (void *)atomic_load_acq_int((volatile u_int *)p); 29667351Sjhb} 29767351Sjhb 29867351Sjhbstatic __inline void 29967351Sjhbatomic_store_rel_ptr(volatile void *p, void *v) 30067351Sjhb{ 30167351Sjhb atomic_store_rel_int((volatile u_int *)p, (u_int)v); 30267351Sjhb} 30367351Sjhb 30467351Sjhb#define ATOMIC_PTR(NAME) \ 30567351Sjhbstatic __inline void \ 30667351Sjhbatomic_##NAME##_ptr(volatile void *p, uintptr_t v) \ 30767351Sjhb{ \ 30867351Sjhb atomic_##NAME##_int((volatile u_int *)p, v); \ 30967351Sjhb} \ 31067351Sjhb \ 31167351Sjhbstatic __inline void \ 31267351Sjhbatomic_##NAME##_acq_ptr(volatile void *p, uintptr_t v) \ 31367351Sjhb{ \ 31467351Sjhb atomic_##NAME##_acq_int((volatile u_int *)p, v);\ 31567351Sjhb} \ 31667351Sjhb \ 31767351Sjhbstatic __inline void \ 31867351Sjhbatomic_##NAME##_rel_ptr(volatile void *p, uintptr_t v) \ 31967351Sjhb{ \ 32067351Sjhb atomic_##NAME##_rel_int((volatile u_int *)p, v);\ 32167351Sjhb} 32267351Sjhb 32367351SjhbATOMIC_PTR(set) 32467351SjhbATOMIC_PTR(clear) 32567351SjhbATOMIC_PTR(add) 32667351SjhbATOMIC_PTR(subtract) 32767351Sjhb 32867351Sjhb#undef ATOMIC_PTR 32967351Sjhb 33066695Sjhbstatic __inline u_int 33166695Sjhbatomic_readandclear_int(volatile u_int *addr) 33266695Sjhb{ 33366695Sjhb u_int result; 33466695Sjhb 33566695Sjhb __asm __volatile ( 33666695Sjhb " xorl %0,%0 ; " 33766695Sjhb " xchgl %1,%0 ; " 33866695Sjhb "# atomic_readandclear_int" 33966695Sjhb : "=&r" (result) /* 0 (result) */ 34066695Sjhb : "m" (*addr)); /* 1 (addr) */ 34166695Sjhb 34266695Sjhb return (result); 34366695Sjhb} 34466695Sjhb 34566695Sjhbstatic __inline u_long 34666695Sjhbatomic_readandclear_long(volatile u_long *addr) 34766695Sjhb{ 34866695Sjhb u_long result; 34966695Sjhb 35066695Sjhb __asm __volatile ( 35166695Sjhb " xorl %0,%0 ; " 35266695Sjhb " xchgl %1,%0 ; " 35366695Sjhb "# atomic_readandclear_int" 35466695Sjhb : "=&r" (result) /* 0 (result) */ 35566695Sjhb : "m" (*addr)); /* 1 (addr) */ 35666695Sjhb 35766695Sjhb return (result); 35866695Sjhb} 35965514Sphk#endif 36065514Sphk 36138517Sdfr#endif /* ! _MACHINE_ATOMIC_H_ */ 362