atomic64.h revision 361181
1/*-
2 * Copyright (c) 2016-2017 Mellanox Technologies, Ltd.
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 *    notice unmodified, this list of conditions, and the following
10 *    disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 *    notice, this list of conditions and the following disclaimer in the
13 *    documentation and/or other materials provided with the distribution.
14 *
15 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
16 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
17 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
18 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
19 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
20 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
21 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
22 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
24 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 *
26 * $FreeBSD: stable/11/sys/compat/linuxkpi/common/include/asm/atomic64.h 361181 2020-05-18 09:12:13Z hselasky $
27 */
28#ifndef	_ASM_ATOMIC64_H_
29#define	_ASM_ATOMIC64_H_
30
31#include <linux/compiler.h>
32#include <sys/types.h>
33#include <machine/atomic.h>
34
35typedef struct {
36	volatile int64_t counter;
37} atomic64_t;
38
39#define	ATOMIC64_INIT(x)	{ .counter = (x) }
40
41/*------------------------------------------------------------------------*
42 *	64-bit atomic operations
43 *------------------------------------------------------------------------*/
44
45#define	atomic64_add(i, v)		atomic64_add_return((i), (v))
46#define	atomic64_sub(i, v)		atomic64_sub_return((i), (v))
47#define	atomic64_inc_return(v)		atomic64_add_return(1, (v))
48#define	atomic64_add_negative(i, v)	(atomic64_add_return((i), (v)) < 0)
49#define	atomic64_add_and_test(i, v)	(atomic64_add_return((i), (v)) == 0)
50#define	atomic64_sub_and_test(i, v)	(atomic64_sub_return((i), (v)) == 0)
51#define	atomic64_dec_and_test(v)	(atomic64_sub_return(1, (v)) == 0)
52#define	atomic64_inc_and_test(v)	(atomic64_add_return(1, (v)) == 0)
53#define	atomic64_dec_return(v)		atomic64_sub_return(1, (v))
54#define	atomic64_inc_not_zero(v)	atomic64_add_unless((v), 1, 0)
55
56static inline int64_t
57atomic64_add_return(int64_t i, atomic64_t *v)
58{
59	return i + atomic_fetchadd_64(&v->counter, i);
60}
61
62static inline int64_t
63atomic64_sub_return(int64_t i, atomic64_t *v)
64{
65	return atomic_fetchadd_64(&v->counter, -i) - i;
66}
67
68static inline void
69atomic64_set(atomic64_t *v, int64_t i)
70{
71	atomic_store_rel_64(&v->counter, i);
72}
73
74static inline int64_t
75atomic64_read(atomic64_t *v)
76{
77	return READ_ONCE(v->counter);
78}
79
80static inline int64_t
81atomic64_inc(atomic64_t *v)
82{
83	return atomic_fetchadd_64(&v->counter, 1) + 1;
84}
85
86static inline int64_t
87atomic64_dec(atomic64_t *v)
88{
89	return atomic_fetchadd_64(&v->counter, -1) - 1;
90}
91
92static inline int64_t
93atomic64_add_unless(atomic64_t *v, int64_t a, int64_t u)
94{
95	int64_t c = atomic64_read(v);
96
97	for (;;) {
98		if (unlikely(c == u))
99			break;
100		if (likely(atomic_fcmpset_64(&v->counter, &c, c + a)))
101			break;
102	}
103	return (c != u);
104}
105
106static inline int64_t
107atomic64_fetch_add_unless(atomic64_t *v, int64_t a, int64_t u)
108{
109	int64_t c = atomic64_read(v);
110
111	for (;;) {
112		if (unlikely(c == u))
113			break;
114		if (likely(atomic_fcmpset_64(&v->counter, &c, c + a)))
115			break;
116	}
117	return (c);
118}
119
120static inline int64_t
121atomic64_xchg(atomic64_t *v, int64_t i)
122{
123#if defined(__i386__) || defined(__amd64__) || \
124    defined(__arm__) || defined(__aarch64__) || \
125    defined(__powerpc64__)
126	return (atomic_swap_64(&v->counter, i));
127#else
128	int64_t ret = atomic64_read(v);
129
130	while (!atomic_fcmpset_64(&v->counter, &ret, i))
131		;
132	return (ret);
133#endif
134}
135
136static inline int64_t
137atomic64_cmpxchg(atomic64_t *v, int64_t old, int64_t new)
138{
139	int64_t ret = old;
140
141	for (;;) {
142		if (atomic_fcmpset_64(&v->counter, &ret, new))
143			break;
144		if (ret != old)
145			break;
146	}
147	return (ret);
148}
149
150#endif					/* _ASM_ATOMIC64_H_ */
151