1/*-
2 * Copyright (c) 2007 Pawel Jakub Dawidek <pjd@FreeBSD.org>
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 *    notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 *    notice, this list of conditions and the following disclaimer in the
12 *    documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND
15 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17 * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE
18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24 * SUCH DAMAGE.
25 */
26
27#ifndef _OPENSOLARIS_SYS_ATOMIC_H_
28#define	_OPENSOLARIS_SYS_ATOMIC_H_
29
30#include <sys/types.h>
31#include <machine/atomic.h>
32
33#if defined(__i386__) && (defined(_KERNEL) || defined(KLD_MODULE))
34#define	I386_HAVE_ATOMIC64
35#endif
36
37#if defined(__i386__) || defined(__amd64__) || defined(__arm__)
38/* No spurious failures from fcmpset. */
39#define	STRONG_FCMPSET
40#endif
41
42#if !defined(__LP64__) && !defined(__mips_n32) && \
43    !defined(ARM_HAVE_ATOMIC64) && !defined(I386_HAVE_ATOMIC64) && \
44    !defined(HAS_EMULATED_ATOMIC64)
45extern void atomic_add_64(volatile uint64_t *target, int64_t delta);
46extern void atomic_dec_64(volatile uint64_t *target);
47extern uint64_t atomic_swap_64(volatile uint64_t *a, uint64_t value);
48extern uint64_t atomic_load_64(volatile uint64_t *a);
49extern uint64_t atomic_add_64_nv(volatile uint64_t *target, int64_t delta);
50extern uint64_t atomic_cas_64(volatile uint64_t *target, uint64_t cmp,
51    uint64_t newval);
52#endif
53
54#define	membar_producer	atomic_thread_fence_rel
55
56static __inline uint32_t
57atomic_add_32_nv(volatile uint32_t *target, int32_t delta)
58{
59	return (atomic_fetchadd_32(target, delta) + delta);
60}
61
62static __inline u_int
63atomic_add_int_nv(volatile u_int *target, int delta)
64{
65	return (atomic_add_32_nv(target, delta));
66}
67
68static __inline void
69atomic_inc_32(volatile uint32_t *target)
70{
71	atomic_add_32(target, 1);
72}
73
74static __inline uint32_t
75atomic_inc_32_nv(volatile uint32_t *target)
76{
77	return (atomic_add_32_nv(target, 1));
78}
79
80static __inline void
81atomic_dec_32(volatile uint32_t *target)
82{
83	atomic_subtract_32(target, 1);
84}
85
86static __inline uint32_t
87atomic_dec_32_nv(volatile uint32_t *target)
88{
89	return (atomic_add_32_nv(target, -1));
90}
91
92static inline uint32_t
93atomic_cas_32(volatile uint32_t *target, uint32_t cmp, uint32_t newval)
94{
95#ifdef STRONG_FCMPSET
96	(void)atomic_fcmpset_32(target, &cmp, newval);
97#else
98	uint32_t expected = cmp;
99
100	do {
101		if (atomic_fcmpset_32(target, &cmp, newval))
102			break;
103	} while (cmp == expected);
104#endif
105	return (cmp);
106}
107
108#if defined(__LP64__) || defined(__mips_n32) || \
109    defined(ARM_HAVE_ATOMIC64) || defined(I386_HAVE_ATOMIC64) || \
110    defined(HAS_EMULATED_ATOMIC64)
111static __inline void
112atomic_dec_64(volatile uint64_t *target)
113{
114	atomic_subtract_64(target, 1);
115}
116
117static inline uint64_t
118atomic_add_64_nv(volatile uint64_t *target, int64_t delta)
119{
120	return (atomic_fetchadd_64(target, delta) + delta);
121}
122
123static inline uint64_t
124atomic_cas_64(volatile uint64_t *target, uint64_t cmp, uint64_t newval)
125{
126#ifdef STRONG_FCMPSET
127	(void)atomic_fcmpset_64(target, &cmp, newval);
128#else
129	uint64_t expected = cmp;
130
131	do {
132		if (atomic_fcmpset_64(target, &cmp, newval))
133			break;
134	} while (cmp == expected);
135#endif
136	return (cmp);
137}
138#endif
139
140static __inline void
141atomic_inc_64(volatile uint64_t *target)
142{
143	atomic_add_64(target, 1);
144}
145
146static __inline uint64_t
147atomic_inc_64_nv(volatile uint64_t *target)
148{
149	return (atomic_add_64_nv(target, 1));
150}
151
152static __inline uint64_t
153atomic_dec_64_nv(volatile uint64_t *target)
154{
155	return (atomic_add_64_nv(target, -1));
156}
157
158#ifdef __LP64__
159static __inline void *
160atomic_cas_ptr(volatile void *target, void *cmp,  void *newval)
161{
162	return ((void *)atomic_cas_64((volatile uint64_t *)target,
163	    (uint64_t)cmp, (uint64_t)newval));
164}
165#else
166static __inline void *
167atomic_cas_ptr(volatile void *target, void *cmp,  void *newval)
168{
169	return ((void *)atomic_cas_32((volatile uint32_t *)target,
170	    (uint32_t)cmp, (uint32_t)newval));
171}
172#endif	/* __LP64__ */
173
174#endif	/* !_OPENSOLARIS_SYS_ATOMIC_H_ */
175