166458Sdfr/*-
266458Sdfr * Copyright (c) 1998 Doug Rabson
366458Sdfr * All rights reserved.
466458Sdfr *
566458Sdfr * Redistribution and use in source and binary forms, with or without
666458Sdfr * modification, are permitted provided that the following conditions
766458Sdfr * are met:
866458Sdfr * 1. Redistributions of source code must retain the above copyright
966458Sdfr *    notice, this list of conditions and the following disclaimer.
1066458Sdfr * 2. Redistributions in binary form must reproduce the above copyright
1166458Sdfr *    notice, this list of conditions and the following disclaimer in the
1266458Sdfr *    documentation and/or other materials provided with the distribution.
1366458Sdfr *
1466458Sdfr * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
1566458Sdfr * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
1666458Sdfr * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
1766458Sdfr * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
1866458Sdfr * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
1966458Sdfr * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
2066458Sdfr * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
2166458Sdfr * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
2266458Sdfr * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
2366458Sdfr * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
2466458Sdfr * SUCH DAMAGE.
2566458Sdfr *
2666458Sdfr * $FreeBSD$
2766458Sdfr */
2866458Sdfr
2966458Sdfr#ifndef _MACHINE_ATOMIC_H_
30135581Smarcel#define	_MACHINE_ATOMIC_H_
3166458Sdfr
32185163Smarcel#define	mb()	__asm __volatile("mf")
33185163Smarcel#define	wmb()	mb()
34185163Smarcel#define	rmb()	mb()
35185162Skmacy
3666458Sdfr/*
3766458Sdfr * Various simple arithmetic on memory which is atomic in the presence
3866458Sdfr * of interrupts and SMP safe.
3966458Sdfr */
4066458Sdfr
4166458Sdfr/*
4266458Sdfr * Everything is built out of cmpxchg.
4366458Sdfr */
44135581Smarcel#define	IA64_CMPXCHG(sz, sem, p, cmpval, newval, ret)			\
45135581Smarcel	__asm __volatile (						\
46135581Smarcel		"mov ar.ccv=%2;;\n\t"					\
47135581Smarcel		"cmpxchg" #sz "." #sem " %0=%4,%3,ar.ccv\n\t"		\
48135581Smarcel		: "=r" (ret), "=m" (*p)					\
49179382Smarcel		: "r" ((uint64_t)cmpval), "r" (newval), "m" (*p)	\
5096956Smarcel		: "memory")
5166458Sdfr
5266458Sdfr/*
5366458Sdfr * Some common forms of cmpxch.
5466458Sdfr */
55135581Smarcelstatic __inline uint32_t
56135581Smarcelia64_cmpxchg_acq_32(volatile uint32_t* p, uint32_t cmpval, uint32_t newval)
5766458Sdfr{
58135581Smarcel	uint32_t ret;
5996956Smarcel	IA64_CMPXCHG(4, acq, p, cmpval, newval, ret);
6096956Smarcel	return (ret);
6166458Sdfr}
6266458Sdfr
63135581Smarcelstatic __inline uint32_t
64135581Smarcelia64_cmpxchg_rel_32(volatile uint32_t* p, uint32_t cmpval, uint32_t newval)
6566458Sdfr{
66135581Smarcel	uint32_t ret;
6796956Smarcel	IA64_CMPXCHG(4, rel, p, cmpval, newval, ret);
6896956Smarcel	return (ret);
6966458Sdfr}
7066458Sdfr
71135581Smarcelstatic __inline uint64_t
72135581Smarcelia64_cmpxchg_acq_64(volatile uint64_t* p, uint64_t cmpval, uint64_t newval)
7366458Sdfr{
74135581Smarcel	uint64_t ret;
7596956Smarcel	IA64_CMPXCHG(8, acq, p, cmpval, newval, ret);
7696956Smarcel	return (ret);
7766458Sdfr}
7866458Sdfr
79135581Smarcelstatic __inline uint64_t
80135581Smarcelia64_cmpxchg_rel_64(volatile uint64_t* p, uint64_t cmpval, uint64_t newval)
8166458Sdfr{
82135581Smarcel	uint64_t ret;
8396956Smarcel	IA64_CMPXCHG(8, rel, p, cmpval, newval, ret);
8496956Smarcel	return (ret);
8566458Sdfr}
8666458Sdfr
87135581Smarcel#define	ATOMIC_STORE_LOAD(type, width, size)				\
88135581Smarcel	static __inline uint##width##_t					\
89135581Smarcel	ia64_ld_acq_##width(volatile uint##width##_t* p)		\
90135581Smarcel	{								\
91135581Smarcel		uint##width##_t v;					\
92135581Smarcel		__asm __volatile ("ld" size ".acq %0=%1" : "=r" (v)	\
93135581Smarcel		    : "m" (*p) : "memory");				\
94135581Smarcel		return (v);						\
95135581Smarcel	}								\
96135581Smarcel									\
97135581Smarcel	static __inline uint##width##_t					\
98135581Smarcel	atomic_load_acq_##width(volatile uint##width##_t* p)		\
99135581Smarcel	{								\
100135581Smarcel		uint##width##_t v;					\
101135581Smarcel		__asm __volatile ("ld" size ".acq %0=%1" : "=r" (v)	\
102135581Smarcel		    : "m" (*p) : "memory");				\
103135581Smarcel		return (v);						\
104135581Smarcel	}								\
105135581Smarcel									\
106135581Smarcel	static __inline uint##width##_t					\
107135581Smarcel	atomic_load_acq_##type(volatile uint##width##_t* p)		\
108135581Smarcel	{								\
109135581Smarcel		uint##width##_t v;					\
110135581Smarcel		__asm __volatile ("ld" size ".acq %0=%1" : "=r" (v)	\
111135581Smarcel		    : "m" (*p) : "memory");				\
112135581Smarcel		return (v);						\
113135581Smarcel	}								\
114135581Smarcel								       	\
115135581Smarcel	static __inline void						\
116135581Smarcel	ia64_st_rel_##width(volatile uint##width##_t* p, uint##width##_t v) \
117135581Smarcel	{								\
118135581Smarcel		__asm __volatile ("st" size ".rel %0=%1" : "=m" (*p)	\
119135581Smarcel		    : "r" (v) : "memory");				\
120135581Smarcel	}								\
121135581Smarcel									\
122135581Smarcel	static __inline void						\
123135581Smarcel	atomic_store_rel_##width(volatile uint##width##_t* p,		\
124135581Smarcel	    uint##width##_t v)						\
125135581Smarcel	{								\
126135581Smarcel		__asm __volatile ("st" size ".rel %0=%1" : "=m" (*p)	\
127135581Smarcel		    : "r" (v) : "memory");				\
128135581Smarcel	}								\
129135581Smarcel									\
130135581Smarcel	static __inline void						\
131135581Smarcel	atomic_store_rel_##type(volatile uint##width##_t* p,		\
132135581Smarcel	    uint##width##_t v)						\
133135581Smarcel	{								\
134135581Smarcel		__asm __volatile ("st" size ".rel %0=%1" : "=m" (*p)	\
135135581Smarcel		    : "r" (v) : "memory");				\
136135581Smarcel	}
13766458Sdfr
138135581SmarcelATOMIC_STORE_LOAD(char,	 8,  "1")
139135581SmarcelATOMIC_STORE_LOAD(short, 16, "2")
140135581SmarcelATOMIC_STORE_LOAD(int,	 32, "4")
141135581SmarcelATOMIC_STORE_LOAD(long,	 64, "8")
14266458Sdfr
14367351Sjhb#undef ATOMIC_STORE_LOAD
14467351Sjhb
145171662Smarcel#define	atomic_load_acq_ptr(p)		\
146171662Smarcel    ((void *)atomic_load_acq_64((volatile uint64_t *)p))
147148067Sjhb
148171662Smarcel#define	atomic_store_rel_ptr(p, v)	\
149171662Smarcel    atomic_store_rel_64((volatile uint64_t *)p, (uint64_t)v)
150171662Smarcel
151135581Smarcel#define	IA64_ATOMIC(sz, type, name, width, op)				\
152135583Smarcel	static __inline type						\
153135581Smarcel	atomic_##name##_acq_##width(volatile type *p, type v)		\
154135581Smarcel	{								\
155135581Smarcel		type old, ret;						\
156135581Smarcel		do {							\
157135581Smarcel			old = *p;					\
158135581Smarcel			IA64_CMPXCHG(sz, acq, p, old, old op v, ret);	\
159135581Smarcel		} while (ret != old);					\
160135583Smarcel		return (old);						\
161135581Smarcel	}								\
16266458Sdfr									\
163135583Smarcel	static __inline type						\
164135581Smarcel	atomic_##name##_rel_##width(volatile type *p, type v)		\
165135581Smarcel	{								\
166135581Smarcel		type old, ret;						\
167135581Smarcel		do {							\
168135581Smarcel			old = *p;					\
169135581Smarcel			IA64_CMPXCHG(sz, rel, p, old, old op v, ret);	\
170135581Smarcel		} while (ret != old);					\
171135583Smarcel		return (old);						\
172135581Smarcel	}
17366458Sdfr
174135581SmarcelIA64_ATOMIC(1, uint8_t,	 set, 8,  |)
175135581SmarcelIA64_ATOMIC(2, uint16_t, set, 16, |)
176135581SmarcelIA64_ATOMIC(4, uint32_t, set, 32, |)
177135581SmarcelIA64_ATOMIC(8, uint64_t, set, 64, |)
17866458Sdfr
179135581SmarcelIA64_ATOMIC(1, uint8_t,  clear,	8,  &~)
180135581SmarcelIA64_ATOMIC(2, uint16_t, clear,	16, &~)
181135581SmarcelIA64_ATOMIC(4, uint32_t, clear,	32, &~)
182135581SmarcelIA64_ATOMIC(8, uint64_t, clear,	64, &~)
18366458Sdfr
184135581SmarcelIA64_ATOMIC(1, uint8_t,  add, 8,  +)
185135581SmarcelIA64_ATOMIC(2, uint16_t, add, 16, +)
186135581SmarcelIA64_ATOMIC(4, uint32_t, add, 32, +)
187135581SmarcelIA64_ATOMIC(8, uint64_t, add, 64, +)
18866458Sdfr
189135581SmarcelIA64_ATOMIC(1, uint8_t,  subtract, 8,  -)
190135581SmarcelIA64_ATOMIC(2, uint16_t, subtract, 16, -)
191135581SmarcelIA64_ATOMIC(4, uint32_t, subtract, 32, -)
192135581SmarcelIA64_ATOMIC(8, uint64_t, subtract, 64, -)
19366458Sdfr
19466458Sdfr#undef IA64_ATOMIC
19566458Sdfr
196135581Smarcel#define	atomic_set_8			atomic_set_acq_8
197135581Smarcel#define	atomic_clear_8			atomic_clear_acq_8
198135581Smarcel#define	atomic_add_8			atomic_add_acq_8
199135581Smarcel#define	atomic_subtract_8		atomic_subtract_acq_8
20066458Sdfr
201135581Smarcel#define	atomic_set_16			atomic_set_acq_16
202135581Smarcel#define	atomic_clear_16			atomic_clear_acq_16
203135581Smarcel#define	atomic_add_16			atomic_add_acq_16
204135581Smarcel#define	atomic_subtract_16		atomic_subtract_acq_16
20566458Sdfr
206135581Smarcel#define	atomic_set_32			atomic_set_acq_32
207135581Smarcel#define	atomic_clear_32			atomic_clear_acq_32
208135581Smarcel#define	atomic_add_32			atomic_add_acq_32
209135581Smarcel#define	atomic_subtract_32		atomic_subtract_acq_32
21066458Sdfr
211135581Smarcel#define	atomic_set_64			atomic_set_acq_64
212135581Smarcel#define	atomic_clear_64			atomic_clear_acq_64
213135581Smarcel#define	atomic_add_64			atomic_add_acq_64
214135581Smarcel#define	atomic_subtract_64		atomic_subtract_acq_64
21566458Sdfr
216135581Smarcel#define	atomic_set_char			atomic_set_8
217135581Smarcel#define	atomic_clear_char		atomic_clear_8
218135581Smarcel#define	atomic_add_char			atomic_add_8
219135581Smarcel#define	atomic_subtract_char		atomic_subtract_8
220135581Smarcel#define	atomic_set_acq_char		atomic_set_acq_8
221135581Smarcel#define	atomic_clear_acq_char		atomic_clear_acq_8
222135581Smarcel#define	atomic_add_acq_char		atomic_add_acq_8
223135581Smarcel#define	atomic_subtract_acq_char	atomic_subtract_acq_8
224135581Smarcel#define	atomic_set_rel_char		atomic_set_rel_8
225135581Smarcel#define	atomic_clear_rel_char		atomic_clear_rel_8
226135581Smarcel#define	atomic_add_rel_char		atomic_add_rel_8
227135581Smarcel#define	atomic_subtract_rel_char	atomic_subtract_rel_8
22867351Sjhb
229135581Smarcel#define	atomic_set_short		atomic_set_16
230135581Smarcel#define	atomic_clear_short		atomic_clear_16
231135581Smarcel#define	atomic_add_short		atomic_add_16
232135581Smarcel#define	atomic_subtract_short		atomic_subtract_16
233135581Smarcel#define	atomic_set_acq_short		atomic_set_acq_16
234135581Smarcel#define	atomic_clear_acq_short		atomic_clear_acq_16
235135581Smarcel#define	atomic_add_acq_short		atomic_add_acq_16
236135581Smarcel#define	atomic_subtract_acq_short	atomic_subtract_acq_16
237135581Smarcel#define	atomic_set_rel_short		atomic_set_rel_16
238135581Smarcel#define	atomic_clear_rel_short		atomic_clear_rel_16
239135581Smarcel#define	atomic_add_rel_short		atomic_add_rel_16
240135581Smarcel#define	atomic_subtract_rel_short	atomic_subtract_rel_16
24167351Sjhb
242135581Smarcel#define	atomic_set_int			atomic_set_32
243135581Smarcel#define	atomic_clear_int		atomic_clear_32
244135581Smarcel#define	atomic_add_int			atomic_add_32
245135581Smarcel#define	atomic_subtract_int		atomic_subtract_32
246135581Smarcel#define	atomic_set_acq_int		atomic_set_acq_32
247135581Smarcel#define	atomic_clear_acq_int		atomic_clear_acq_32
248135581Smarcel#define	atomic_add_acq_int		atomic_add_acq_32
249135581Smarcel#define	atomic_subtract_acq_int		atomic_subtract_acq_32
250135581Smarcel#define	atomic_set_rel_int		atomic_set_rel_32
251135581Smarcel#define	atomic_clear_rel_int		atomic_clear_rel_32
252135581Smarcel#define	atomic_add_rel_int		atomic_add_rel_32
253135581Smarcel#define	atomic_subtract_rel_int		atomic_subtract_rel_32
25467351Sjhb
255135581Smarcel#define	atomic_set_long			atomic_set_64
256135581Smarcel#define	atomic_clear_long		atomic_clear_64
257135581Smarcel#define	atomic_add_long			atomic_add_64
258135581Smarcel#define	atomic_subtract_long		atomic_subtract_64
259135581Smarcel#define	atomic_set_acq_long		atomic_set_acq_64
260135581Smarcel#define	atomic_clear_acq_long		atomic_clear_acq_64
261135581Smarcel#define	atomic_add_acq_long		atomic_add_acq_64
262135581Smarcel#define	atomic_subtract_acq_long	atomic_subtract_acq_64
263135581Smarcel#define	atomic_set_rel_long		atomic_set_rel_64
264135581Smarcel#define	atomic_clear_rel_long		atomic_clear_rel_64
265135581Smarcel#define	atomic_add_rel_long		atomic_add_rel_64
266135581Smarcel#define	atomic_subtract_rel_long	atomic_subtract_rel_64
26767351Sjhb
268171662Smarcel/* XXX Needs casting. */
269148067Sjhb#define	atomic_set_ptr			atomic_set_64
270148067Sjhb#define	atomic_clear_ptr		atomic_clear_64
271148067Sjhb#define	atomic_add_ptr			atomic_add_64
272148067Sjhb#define	atomic_subtract_ptr		atomic_subtract_64
273148067Sjhb#define	atomic_set_acq_ptr		atomic_set_acq_64
274148067Sjhb#define	atomic_clear_acq_ptr		atomic_clear_acq_64
275148067Sjhb#define	atomic_add_acq_ptr		atomic_add_acq_64
276148067Sjhb#define	atomic_subtract_acq_ptr		atomic_subtract_acq_64
277148067Sjhb#define	atomic_set_rel_ptr		atomic_set_rel_64
278148067Sjhb#define	atomic_clear_rel_ptr		atomic_clear_rel_64
279148067Sjhb#define	atomic_add_rel_ptr		atomic_add_rel_64
280148067Sjhb#define	atomic_subtract_rel_ptr		atomic_subtract_rel_64
281148067Sjhb
282135581Smarcel#undef IA64_CMPXCHG
283135581Smarcel
28466458Sdfr/*
28566458Sdfr * Atomically compare the value stored at *p with cmpval and if the
28666458Sdfr * two values are equal, update the value of *p with newval. Returns
28766458Sdfr * zero if the compare failed, nonzero otherwise.
28866458Sdfr */
28966458Sdfrstatic __inline int
290135581Smarcelatomic_cmpset_acq_32(volatile uint32_t* p, uint32_t cmpval, uint32_t newval)
29166458Sdfr{
292135581Smarcel	return (ia64_cmpxchg_acq_32(p, cmpval, newval) == cmpval);
29366458Sdfr}
29466458Sdfr
29567351Sjhbstatic __inline int
296135581Smarcelatomic_cmpset_rel_32(volatile uint32_t* p, uint32_t cmpval, uint32_t newval)
29767351Sjhb{
298135581Smarcel	return (ia64_cmpxchg_rel_32(p, cmpval, newval) == cmpval);
29967351Sjhb}
30067351Sjhb
30166458Sdfr/*
30266458Sdfr * Atomically compare the value stored at *p with cmpval and if the
30366458Sdfr * two values are equal, update the value of *p with newval. Returns
30466458Sdfr * zero if the compare failed, nonzero otherwise.
30566458Sdfr */
30666458Sdfrstatic __inline int
307135581Smarcelatomic_cmpset_acq_64(volatile uint64_t* p, uint64_t cmpval, uint64_t newval)
30866458Sdfr{
309135581Smarcel	return (ia64_cmpxchg_acq_64(p, cmpval, newval) == cmpval);
31066458Sdfr}
31166458Sdfr
31267351Sjhbstatic __inline int
313135581Smarcelatomic_cmpset_rel_64(volatile uint64_t* p, uint64_t cmpval, uint64_t newval)
31467351Sjhb{
315135581Smarcel	return (ia64_cmpxchg_rel_64(p, cmpval, newval) == cmpval);
31667351Sjhb}
31767351Sjhb
318135581Smarcel#define	atomic_cmpset_32		atomic_cmpset_acq_32
319135581Smarcel#define	atomic_cmpset_64		atomic_cmpset_acq_64
320135581Smarcel#define	atomic_cmpset_int		atomic_cmpset_32
321135581Smarcel#define	atomic_cmpset_long		atomic_cmpset_64
322135581Smarcel#define	atomic_cmpset_acq_int		atomic_cmpset_acq_32
323135581Smarcel#define	atomic_cmpset_rel_int		atomic_cmpset_rel_32
324135581Smarcel#define	atomic_cmpset_acq_long		atomic_cmpset_acq_64
325135581Smarcel#define	atomic_cmpset_rel_long		atomic_cmpset_rel_64
32666458Sdfr
327171662Smarcel#define	atomic_cmpset_acq_ptr(p, o, n)	\
328171662Smarcel    (atomic_cmpset_acq_64((volatile uint64_t *)p, (uint64_t)o, (uint64_t)n))
329171662Smarcel
330171662Smarcel#define	atomic_cmpset_ptr		atomic_cmpset_acq_ptr
331171662Smarcel
332171662Smarcel#define	atomic_cmpset_rel_ptr(p, o, n)	\
333171662Smarcel    (atomic_cmpset_rel_64((volatile uint64_t *)p, (uint64_t)o, (uint64_t)n))
334171662Smarcel
335135581Smarcelstatic __inline uint32_t
336135581Smarcelatomic_readandclear_32(volatile uint32_t* p)
33766937Sdfr{
338135581Smarcel	uint32_t val;
33966937Sdfr	do {
34066937Sdfr		val = *p;
34166937Sdfr	} while (!atomic_cmpset_32(p, val, 0));
342135581Smarcel	return (val);
34366937Sdfr}
34466937Sdfr
345135581Smarcelstatic __inline uint64_t
346135581Smarcelatomic_readandclear_64(volatile uint64_t* p)
34766937Sdfr{
348135581Smarcel	uint64_t val;
34966937Sdfr	do {
35066937Sdfr		val = *p;
35166937Sdfr	} while (!atomic_cmpset_64(p, val, 0));
352135581Smarcel	return (val);
35366937Sdfr}
35466937Sdfr
355135581Smarcel#define	atomic_readandclear_int		atomic_readandclear_32
356135581Smarcel#define	atomic_readandclear_long	atomic_readandclear_64
357173970Sjasone#define	atomic_readandclear_ptr		atomic_readandclear_64
35866937Sdfr
359150627Sjhb/*
360150627Sjhb * Atomically add the value of v to the integer pointed to by p and return
361150627Sjhb * the previous value of *p.
362150627Sjhb *
363150627Sjhb * XXX: Should we use the fetchadd instruction here?
364150627Sjhb */
365150627Sjhbstatic __inline uint32_t
366150627Sjhbatomic_fetchadd_32(volatile uint32_t *p, uint32_t v)
367150627Sjhb{
368150627Sjhb	uint32_t value;
369150627Sjhb
370150627Sjhb	do {
371150627Sjhb		value = *p;
372150627Sjhb	} while (!atomic_cmpset_32(p, value, value + v));
373150627Sjhb	return (value);
374150627Sjhb}
375150627Sjhb
376150627Sjhb#define	atomic_fetchadd_int		atomic_fetchadd_32
377150627Sjhb
378177276Spjdstatic __inline u_long
379177276Spjdatomic_fetchadd_long(volatile u_long *p, u_long v)
380177276Spjd{
381177276Spjd	u_long value;
382177276Spjd
383177276Spjd	do {
384177276Spjd		value = *p;
385177276Spjd	} while (!atomic_cmpset_64(p, value, value + v));
386177276Spjd	return (value);
387177276Spjd}
388177276Spjd
389262004Smarcel/*
390262004Smarcel * <type> atomic_swap_<type>(volatile <type> *p, <type> v);
391262004Smarcel */
392262004Smarcel
393262004Smarcelstatic __inline uint32_t
394262004Smarcelatomic_swap_32(volatile uint32_t *p, uint32_t v)
395262004Smarcel{
396262004Smarcel	uint32_t r;
397262004Smarcel
398262004Smarcel	__asm __volatile ("xchg4 %0 = %3, %2;;" : "=r"(r), "=m"(*p) :
399262004Smarcel	    "r"(v), "m"(*p) : "memory");
400262004Smarcel	return (r);
401262004Smarcel}
402262004Smarcel
403262004Smarcelstatic __inline uint64_t
404262004Smarcelatomic_swap_64(volatile uint64_t *p, uint64_t v)
405262004Smarcel{
406262004Smarcel	uint64_t r;
407262004Smarcel
408262004Smarcel	__asm __volatile ("xchg8 %0 = %3, %2;;" : "=r"(r), "=m"(*p) :
409262004Smarcel	    "r"(v), "m"(*p) : "memory");
410262004Smarcel	return (r);
411262004Smarcel}
412262004Smarcel
413262004Smarcel#define	atomic_swap_int		atomic_swap_32
414262004Smarcel#define	atomic_swap_long	atomic_swap_64
415262004Smarcel#define	atomic_swap_ptr		atomic_swap_64
416262004Smarcel
41766458Sdfr#endif /* ! _MACHINE_ATOMIC_H_ */
418