1/*
2 -
3 * Copyright (c) 2010 Isilon Systems, Inc.
4 * Copyright (c) 2010 iX Systems, Inc.
5 * Copyright (c) 2010 Panasas, Inc.
6 * Copyright (c) 2013, 2014 Mellanox Technologies, Ltd.
7 * All rights reserved.
8 *
9 * Redistribution and use in source and binary forms, with or without
10 * modification, are permitted provided that the following conditions
11 * are met:
12 * 1. Redistributions of source code must retain the above copyright
13 *    notice unmodified, this list of conditions, and the following
14 *    disclaimer.
15 * 2. Redistributions in binary form must reproduce the above copyright
16 *    notice, this list of conditions and the following disclaimer in the
17 *    documentation and/or other materials provided with the distribution.
18 *
19 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
20 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
21 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
22 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
23 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
24 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
28 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29
30 */
31#ifndef	_ASM_ATOMIC_H_
32#define	_ASM_ATOMIC_H_
33/*
34 #include <sys/cdefs.h>
35 #include <sys/types.h>
36 #include <machine/atomic.h>
37 */
38
39#define	mb()	__asm __volatile("mfence;" : : : "memory")
40#define	wmb()	__asm __volatile("sfence;" : : : "memory")
41#define	rmb()	__asm __volatile("lfence;" : : : "memory")
42
43typedef struct {
44	volatile u_int counter;
45} atomic_t;
46/*
47 #define	atomic_add(i, v)		atomic_add_return((i), (v))
48 #define	atomic_sub(i, v)		atomic_sub_return((i), (v))
49 #define	atomic_inc_return(v)		atomic_add_return(1, (v))
50 #define	atomic_add_negative(i, v)	(atomic_add_return((i), (v)) < 0)
51 #define	atomic_sub_and_test(i, v)	(atomic_sub_return((i), (v)) == 0)
52 #define	atomic_dec_and_test(v)		(atomic_sub_return(1, (v)) == 0)
53 #define	atomic_inc_and_test(v)		(atomic_add_return(1, (v)) == 0)
54 #define atomic_dec_return(v)             atomic_sub_return(1, (v))
55
56 static inline int
57 atomic_add_return(int i, atomic_t *v)
58 {
59 return i + atomic_fetchadd_int(&v->counter, i);
60 }
61
62 static inline int
63 atomic_sub_return(int i, atomic_t *v)
64 {
65 return atomic_fetchadd_int(&v->counter, -i) - i;
66 }
67
68 static inline void
69 atomic_set(atomic_t *v, int i)
70 {
71 atomic_store_rel_int(&v->counter, i);
72 }
73
74 static inline int
75 atomic_read(atomic_t *v)
76 {
77 return atomic_load_acq_int(&v->counter);
78 }
79
80 static inline int
81 atomic_inc(atomic_t *v)
82 {
83 return atomic_fetchadd_int(&v->counter, 1) + 1;
84 }
85
86 static inline int
87 atomic_dec(atomic_t *v)
88 {
89 return atomic_fetchadd_int(&v->counter, -1) - 1;
90 }
91
92 static inline int atomic_add_unless(atomic_t *v, int a, int u)
93 {
94 int c, old;
95 c = atomic_read(v);
96 for (;;) {
97 if (unlikely(c == (u)))
98 break;
99 old = atomic_cmpset_int(&v->counter, c, c + (a));
100 if (likely(old == c))
101 break;
102 c = old;
103 }
104 return c != (u);
105 }
106
107 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
108
109
110
111 */
112#endif	 /*_ASM_ATOMIC_H_*/
113
114