atomic.h revision 96956
1/*-
2 * Copyright (c) 1998 Doug Rabson
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 *    notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 *    notice, this list of conditions and the following disclaimer in the
12 *    documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17 * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24 * SUCH DAMAGE.
25 *
26 * $FreeBSD: head/sys/ia64/include/atomic.h 96956 2002-05-19 20:19:07Z marcel $
27 */
28
29#ifndef _MACHINE_ATOMIC_H_
30#define _MACHINE_ATOMIC_H_
31
32/*
33 * Various simple arithmetic on memory which is atomic in the presence
34 * of interrupts and SMP safe.
35 */
36
37/*
38 * Everything is built out of cmpxchg.
39 */
40#define IA64_CMPXCHG(sz, sem, p, cmpval, newval, ret)		\
41	__asm __volatile (					\
42		"mov ar.ccv=%2;;\n\t"				\
43		"cmpxchg" #sz "." #sem " %0=%4,%3,ar.ccv\n\t"	\
44		: "=r" (ret), "=m" (*p)				\
45		: "r" (cmpval), "r" (newval), "m" (*p)		\
46		: "memory")
47
48/*
49 * Some common forms of cmpxch.
50 */
51static __inline u_int32_t
52ia64_cmpxchg_acq_32(volatile u_int32_t* p, u_int32_t cmpval, u_int32_t newval)
53{
54	u_int32_t ret;
55	IA64_CMPXCHG(4, acq, p, cmpval, newval, ret);
56	return (ret);
57}
58
59static __inline u_int32_t
60ia64_cmpxchg_rel_32(volatile u_int32_t* p, u_int32_t cmpval, u_int32_t newval)
61{
62	u_int32_t ret;
63	IA64_CMPXCHG(4, rel, p, cmpval, newval, ret);
64	return (ret);
65}
66
67static __inline u_int64_t
68ia64_cmpxchg_acq_64(volatile u_int64_t* p, u_int64_t cmpval, u_int64_t newval)
69{
70	u_int64_t ret;
71	IA64_CMPXCHG(8, acq, p, cmpval, newval, ret);
72	return (ret);
73}
74
75static __inline u_int64_t
76ia64_cmpxchg_rel_64(volatile u_int64_t* p, u_int64_t cmpval, u_int64_t newval)
77{
78	u_int64_t ret;
79	IA64_CMPXCHG(8, rel, p, cmpval, newval, ret);
80	return (ret);
81}
82
83#define ATOMIC_STORE_LOAD(type, width, size)			\
84static __inline u_int##width##_t				\
85ia64_ld_acq_##width(volatile u_int##width##_t* p)		\
86{								\
87	u_int##width##_t v;					\
88								\
89	__asm __volatile ("ld" size ".acq %0=%1"		\
90			  : "=r" (v)				\
91			  : "m" (*p)				\
92			  : "memory");				\
93	return (v);						\
94}								\
95								\
96static __inline u_int##width##_t				\
97atomic_load_acq_##width(volatile u_int##width##_t* p)		\
98{								\
99	u_int##width##_t v;					\
100								\
101	__asm __volatile ("ld" size ".acq %0=%1"		\
102			  : "=r" (v)				\
103			  : "m" (*p)				\
104			  : "memory");				\
105	return (v);						\
106}								\
107								\
108static __inline u_int##width##_t				\
109atomic_load_acq_##type(volatile u_int##width##_t* p)		\
110{								\
111	u_int##width##_t v;					\
112								\
113	__asm __volatile ("ld" size ".acq %0=%1"		\
114			  : "=r" (v)				\
115			  : "m" (*p)				\
116			  : "memory");				\
117	return (v);						\
118}								\
119							       	\
120static __inline void						\
121ia64_st_rel_##width(volatile u_int##width##_t* p, u_int##width##_t v)\
122{								\
123	__asm __volatile ("st" size ".rel %0=%1"		\
124			  : "=m" (*p)				\
125			  : "r" (v)				\
126			  : "memory");				\
127}								\
128							       	\
129static __inline void						\
130atomic_store_rel_##width(volatile u_int##width##_t* p, u_int##width##_t v)\
131{								\
132	__asm __volatile ("st" size ".rel %0=%1"		\
133			  : "=m" (*p)				\
134			  : "r" (v)				\
135			  : "memory");				\
136}								\
137							       	\
138static __inline void						\
139atomic_store_rel_##type(volatile u_int##width##_t* p, u_int##width##_t v)\
140{								\
141	__asm __volatile ("st" size ".rel %0=%1"		\
142			  : "=m" (*p)				\
143			  : "r" (v)				\
144			  : "memory");				\
145}
146
147ATOMIC_STORE_LOAD(char,		8,	"1")
148ATOMIC_STORE_LOAD(short,	16,	"2")
149ATOMIC_STORE_LOAD(int,		32,	"4")
150ATOMIC_STORE_LOAD(long,		64,	"8")
151
152#undef ATOMIC_STORE_LOAD
153
154#define IA64_ATOMIC(sz, type, name, width, op)				\
155									\
156static __inline void							\
157atomic_##name##_acq_##width(volatile type *p, type v)			\
158{									\
159	type old, ret;							\
160	do {								\
161		old = *p;						\
162		IA64_CMPXCHG(sz, acq, p, old, old op v, ret);		\
163	} while (ret != old);						\
164}									\
165									\
166static __inline void							\
167atomic_##name##_rel_##width(volatile type *p, type v)			\
168{									\
169	type old, ret;							\
170	do {								\
171		old = *p;						\
172		IA64_CMPXCHG(sz, rel, p, old, old op v, ret);		\
173	} while (ret != old);						\
174}
175
176IA64_ATOMIC(1, u_int8_t,  set,	8,	|)
177IA64_ATOMIC(2, u_int16_t, set,	16,	|)
178IA64_ATOMIC(4, u_int32_t, set,	32,	|)
179IA64_ATOMIC(8, u_int64_t, set,	64,	|)
180
181IA64_ATOMIC(1, u_int8_t,  clear,	8,	&~)
182IA64_ATOMIC(2, u_int16_t, clear,	16,	&~)
183IA64_ATOMIC(4, u_int32_t, clear,	32,	&~)
184IA64_ATOMIC(8, u_int64_t, clear,	64,	&~)
185
186IA64_ATOMIC(1, u_int8_t,  add,	8,	+)
187IA64_ATOMIC(2, u_int16_t, add,	16,	+)
188IA64_ATOMIC(4, u_int32_t, add,	32,	+)
189IA64_ATOMIC(8, u_int64_t, add,	64,	+)
190
191IA64_ATOMIC(1, u_int8_t,  subtract,	8,	-)
192IA64_ATOMIC(2, u_int16_t, subtract,	16,	-)
193IA64_ATOMIC(4, u_int32_t, subtract,	32,	-)
194IA64_ATOMIC(8, u_int64_t, subtract,	64,	-)
195
196#undef IA64_ATOMIC
197#undef IA64_CMPXCHG
198
199#define atomic_set_8		atomic_set_acq_8
200#define	atomic_clear_8		atomic_clear_acq_8
201#define atomic_add_8		atomic_add_acq_8
202#define	atomic_subtract_8	atomic_subtract_acq_8
203
204#define atomic_set_16		atomic_set_acq_16
205#define	atomic_clear_16		atomic_clear_acq_16
206#define atomic_add_16		atomic_add_acq_16
207#define	atomic_subtract_16	atomic_subtract_acq_16
208
209#define atomic_set_32		atomic_set_acq_32
210#define	atomic_clear_32		atomic_clear_acq_32
211#define atomic_add_32		atomic_add_acq_32
212#define	atomic_subtract_32	atomic_subtract_acq_32
213
214#define atomic_set_64		atomic_set_acq_64
215#define	atomic_clear_64		atomic_clear_acq_64
216#define atomic_add_64		atomic_add_acq_64
217#define	atomic_subtract_64	atomic_subtract_acq_64
218
219#define atomic_set_char			atomic_set_8
220#define atomic_clear_char		atomic_clear_8
221#define atomic_add_char			atomic_add_8
222#define atomic_subtract_char		atomic_subtract_8
223#define atomic_set_acq_char		atomic_set_acq_8
224#define atomic_clear_acq_char		atomic_clear_acq_8
225#define atomic_add_acq_char		atomic_add_acq_8
226#define atomic_subtract_acq_char	atomic_subtract_acq_8
227#define atomic_set_rel_char		atomic_set_rel_8
228#define atomic_clear_rel_char		atomic_clear_rel_8
229#define atomic_add_rel_char		atomic_add_rel_8
230#define atomic_subtract_rel_char	atomic_subtract_rel_8
231
232#define atomic_set_short		atomic_set_16
233#define atomic_clear_short		atomic_clear_16
234#define atomic_add_short		atomic_add_16
235#define atomic_subtract_short		atomic_subtract_16
236#define atomic_set_acq_short		atomic_set_acq_16
237#define atomic_clear_acq_short		atomic_clear_acq_16
238#define atomic_add_acq_short		atomic_add_acq_16
239#define atomic_subtract_acq_short	atomic_subtract_acq_16
240#define atomic_set_rel_short		atomic_set_rel_16
241#define atomic_clear_rel_short		atomic_clear_rel_16
242#define atomic_add_rel_short		atomic_add_rel_16
243#define atomic_subtract_rel_short	atomic_subtract_rel_16
244
245#define atomic_set_int			atomic_set_32
246#define atomic_clear_int		atomic_clear_32
247#define atomic_add_int			atomic_add_32
248#define atomic_subtract_int		atomic_subtract_32
249#define atomic_set_acq_int		atomic_set_acq_32
250#define atomic_clear_acq_int		atomic_clear_acq_32
251#define atomic_add_acq_int		atomic_add_acq_32
252#define atomic_subtract_acq_int		atomic_subtract_acq_32
253#define atomic_set_rel_int		atomic_set_rel_32
254#define atomic_clear_rel_int		atomic_clear_rel_32
255#define atomic_add_rel_int		atomic_add_rel_32
256#define atomic_subtract_rel_int		atomic_subtract_rel_32
257
258#define atomic_set_long			atomic_set_64
259#define atomic_clear_long		atomic_clear_64
260#define atomic_add_long			atomic_add_64
261#define atomic_subtract_long		atomic_subtract_64
262#define atomic_set_acq_long		atomic_set_acq_64
263#define atomic_clear_acq_long		atomic_clear_acq_64
264#define atomic_add_acq_long		atomic_add_acq_64
265#define atomic_subtract_acq_long	atomic_subtract_acq_64
266#define atomic_set_rel_long		atomic_set_rel_64
267#define atomic_clear_rel_long		atomic_clear_rel_64
268#define atomic_add_rel_long		atomic_add_rel_64
269#define atomic_subtract_rel_long	atomic_subtract_rel_64
270
271/*
272 * Atomically compare the value stored at *p with cmpval and if the
273 * two values are equal, update the value of *p with newval. Returns
274 * zero if the compare failed, nonzero otherwise.
275 */
276static __inline int
277atomic_cmpset_acq_32(volatile u_int32_t* p, u_int32_t cmpval, u_int32_t newval)
278{
279	return ia64_cmpxchg_acq_32(p, cmpval, newval) == cmpval;
280}
281
282static __inline int
283atomic_cmpset_rel_32(volatile u_int32_t* p, u_int32_t cmpval, u_int32_t newval)
284{
285	return ia64_cmpxchg_rel_32(p, cmpval, newval) == cmpval;
286}
287
288/*
289 * Atomically compare the value stored at *p with cmpval and if the
290 * two values are equal, update the value of *p with newval. Returns
291 * zero if the compare failed, nonzero otherwise.
292 */
293static __inline int
294atomic_cmpset_acq_64(volatile u_int64_t* p, u_int64_t cmpval, u_int64_t newval)
295{
296	return ia64_cmpxchg_acq_64(p, cmpval, newval) == cmpval;
297}
298
299static __inline int
300atomic_cmpset_rel_64(volatile u_int64_t* p, u_int64_t cmpval, u_int64_t newval)
301{
302	return ia64_cmpxchg_rel_64(p, cmpval, newval) == cmpval;
303}
304
305#define atomic_cmpset_32	atomic_cmpset_acq_32
306#define atomic_cmpset_64	atomic_cmpset_acq_64
307#define	atomic_cmpset_int	atomic_cmpset_32
308#define	atomic_cmpset_long	atomic_cmpset_64
309#define atomic_cmpset_acq_int	atomic_cmpset_acq_32
310#define atomic_cmpset_rel_int	atomic_cmpset_rel_32
311#define atomic_cmpset_acq_long	atomic_cmpset_acq_64
312#define atomic_cmpset_rel_long	atomic_cmpset_rel_64
313
314static __inline int
315atomic_cmpset_acq_ptr(volatile void *dst, void *exp, void *src)
316{
317        return atomic_cmpset_acq_long((volatile u_long *)dst,
318				      (u_long)exp, (u_long)src);
319}
320
321static __inline int
322atomic_cmpset_rel_ptr(volatile void *dst, void *exp, void *src)
323{
324        return atomic_cmpset_rel_long((volatile u_long *)dst,
325				      (u_long)exp, (u_long)src);
326}
327
328#define	atomic_cmpset_ptr	atomic_cmpset_acq_ptr
329
330static __inline void *
331atomic_load_acq_ptr(volatile void *p)
332{
333	return (void *)atomic_load_acq_long((volatile u_long *)p);
334}
335
336static __inline void
337atomic_store_rel_ptr(volatile void *p, void *v)
338{
339	atomic_store_rel_long((volatile u_long *)p, (u_long)v);
340}
341
342#define ATOMIC_PTR(NAME)				\
343static __inline void					\
344atomic_##NAME##_ptr(volatile void *p, uintptr_t v)	\
345{							\
346	atomic_##NAME##_long((volatile u_long *)p, v);	\
347}							\
348							\
349static __inline void					\
350atomic_##NAME##_acq_ptr(volatile void *p, uintptr_t v)	\
351{							\
352	atomic_##NAME##_acq_long((volatile u_long *)p, v);\
353}							\
354							\
355static __inline void					\
356atomic_##NAME##_rel_ptr(volatile void *p, uintptr_t v)	\
357{							\
358	atomic_##NAME##_rel_long((volatile u_long *)p, v);\
359}
360
361ATOMIC_PTR(set)
362ATOMIC_PTR(clear)
363ATOMIC_PTR(add)
364ATOMIC_PTR(subtract)
365
366#undef ATOMIC_PTR
367
368static __inline u_int32_t
369atomic_readandclear_32(volatile u_int32_t* p)
370{
371	u_int32_t val;
372	do {
373		val = *p;
374	} while (!atomic_cmpset_32(p, val, 0));
375	return val;
376}
377
378static __inline u_int64_t
379atomic_readandclear_64(volatile u_int64_t* p)
380{
381	u_int64_t val;
382	do {
383		val = *p;
384	} while (!atomic_cmpset_64(p, val, 0));
385	return val;
386}
387
388#define atomic_readandclear_int	atomic_readandclear_32
389#define atomic_readandclear_long atomic_readandclear_64
390
391#endif /* ! _MACHINE_ATOMIC_H_ */
392