atomic-long.h revision 361181
1/*- 2 * Copyright (c) 2010 Isilon Systems, Inc. 3 * Copyright (c) 2010 iX Systems, Inc. 4 * Copyright (c) 2010 Panasas, Inc. 5 * Copyright (c) 2013-2017 Mellanox Technologies, Ltd. 6 * All rights reserved. 7 * 8 * Redistribution and use in source and binary forms, with or without 9 * modification, are permitted provided that the following conditions 10 * are met: 11 * 1. Redistributions of source code must retain the above copyright 12 * notice unmodified, this list of conditions, and the following 13 * disclaimer. 14 * 2. Redistributions in binary form must reproduce the above copyright 15 * notice, this list of conditions and the following disclaimer in the 16 * documentation and/or other materials provided with the distribution. 17 * 18 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR 19 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES 20 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. 21 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, 22 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT 23 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 24 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 25 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF 27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 * 29 * $FreeBSD: stable/11/sys/compat/linuxkpi/common/include/asm/atomic-long.h 361181 2020-05-18 09:12:13Z hselasky $ 30 */ 31#ifndef _ATOMIC_LONG_H_ 32#define _ATOMIC_LONG_H_ 33 34#include <linux/compiler.h> 35#include <sys/types.h> 36#include <machine/atomic.h> 37 38#define ATOMIC_LONG_INIT(x) { .counter = (x) } 39 40typedef struct { 41 volatile long counter; 42} atomic_long_t; 43 44#define atomic_long_add(i, v) atomic_long_add_return((i), (v)) 45#define atomic_long_inc_return(v) atomic_long_add_return(1, (v)) 46#define atomic_long_inc_not_zero(v) atomic_long_add_unless((v), 1, 0) 47 48static inline long 49atomic_long_add_return(long i, atomic_long_t *v) 50{ 51 return i + atomic_fetchadd_long(&v->counter, i); 52} 53 54static inline void 55atomic_long_set(atomic_long_t *v, long i) 56{ 57 WRITE_ONCE(v->counter, i); 58} 59 60static inline long 61atomic_long_read(atomic_long_t *v) 62{ 63 return READ_ONCE(v->counter); 64} 65 66static inline long 67atomic_long_inc(atomic_long_t *v) 68{ 69 return atomic_fetchadd_long(&v->counter, 1) + 1; 70} 71 72static inline long 73atomic_long_dec(atomic_long_t *v) 74{ 75 return atomic_fetchadd_long(&v->counter, -1) - 1; 76} 77 78static inline long 79atomic_long_xchg(atomic_long_t *v, long val) 80{ 81#if defined(__i386__) || defined(__amd64__) || defined(__aarch64__) 82 return atomic_swap_long(&v->counter, val); 83#else 84 long ret = atomic_long_read(v); 85 86 while (!atomic_fcmpset_long(&v->counter, &ret, val)) 87 ; 88 return (ret); 89#endif 90} 91 92static inline long 93atomic_long_cmpxchg(atomic_long_t *v, long old, long new) 94{ 95 long ret = old; 96 97 for (;;) { 98 if (atomic_fcmpset_long(&v->counter, &ret, new)) 99 break; 100 if (ret != old) 101 break; 102 } 103 return (ret); 104} 105 106static inline int 107atomic_long_add_unless(atomic_long_t *v, long a, long u) 108{ 109 long c = atomic_long_read(v); 110 111 for (;;) { 112 if (unlikely(c == u)) 113 break; 114 if (likely(atomic_fcmpset_long(&v->counter, &c, c + a))) 115 break; 116 } 117 return (c != u); 118} 119 120static inline long 121atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u) 122{ 123 long c = atomic_long_read(v); 124 125 for (;;) { 126 if (unlikely(c == u)) 127 break; 128 if (likely(atomic_fcmpset_long(&v->counter, &c, c + a))) 129 break; 130 } 131 return (c); 132} 133 134static inline long 135atomic_long_dec_and_test(atomic_long_t *v) 136{ 137 long i = atomic_long_add(-1, v); 138 return i == 0 ; 139} 140 141#endif /* _ATOMIC_LONG_H_ */ 142