atomic.h revision 245475
1/* $NetBSD: atomic.h,v 1.1 2002/10/19 12:22:34 bsh Exp $ */
2
3/*-
4 * Copyright (C) 2003-2004 Olivier Houchard
5 * Copyright (C) 1994-1997 Mark Brinicombe
6 * Copyright (C) 1994 Brini
7 * All rights reserved.
8 *
9 * This code is derived from software written for Brini by Mark Brinicombe
10 *
11 * Redistribution and use in source and binary forms, with or without
12 * modification, are permitted provided that the following conditions
13 * are met:
14 * 1. Redistributions of source code must retain the above copyright
15 *    notice, this list of conditions and the following disclaimer.
16 * 2. Redistributions in binary form must reproduce the above copyright
17 *    notice, this list of conditions and the following disclaimer in the
18 *    documentation and/or other materials provided with the distribution.
19 * 3. All advertising materials mentioning features or use of this software
20 *    must display the following acknowledgement:
21 *	This product includes software developed by Brini.
22 * 4. The name of Brini may not be used to endorse or promote products
23 *    derived from this software without specific prior written permission.
24 *
25 * THIS SOFTWARE IS PROVIDED BY BRINI ``AS IS'' AND ANY EXPRESS OR
26 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
27 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
28 * IN NO EVENT SHALL BRINI BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
29 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
30 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
31 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
32 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
33 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
34 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35 *
36 * $FreeBSD: head/sys/arm/include/atomic.h 245475 2013-01-15 22:08:03Z cognet $
37 */
38
39#ifndef	_MACHINE_ATOMIC_H_
40#define	_MACHINE_ATOMIC_H_
41
42#include <sys/types.h>
43
44#ifndef _KERNEL
45#include <machine/sysarch.h>
46#else
47#include <machine/cpuconf.h>
48#endif
49
50#if defined (__ARM_ARCH_7__) || defined (__ARM_ARCH_7A__)
51#define isb()  __asm __volatile("isb" : : : "memory")
52#define dsb()  __asm __volatile("dsb" : : : "memory")
53#define dmb()  __asm __volatile("dmb" : : : "memory")
54#elif defined (__ARM_ARCH_6__) || defined (__ARM_ARCH_6J__) || \
55  defined (__ARM_ARCH_6K__) || defined (__ARM_ARCH_6Z__) || \
56  defined (__ARM_ARCH_6ZK__)
57#define isb()  __asm __volatile("mcr p15, 0, %0, c7, c5, 4" : : "r" (0) : "memory")
58#define dsb()  __asm __volatile("mcr p15, 0, %0, c7, c10, 4" : : "r" (0) : "memory")
59#define dmb()  __asm __volatile("mcr p15, 0, %0, c7, c10, 5" : : "r" (0) : "memory")
60#else
61#define isb()
62#define dsb()
63#define dmb()
64#endif
65
66#define mb()   dmb()
67#define wmb()  dmb()
68#define rmb()  dmb()
69
70#ifndef I32_bit
71#define I32_bit (1 << 7)        /* IRQ disable */
72#endif
73#ifndef F32_bit
74#define F32_bit (1 << 6)        /* FIQ disable */
75#endif
76
77/*
78 * It would be nice to use _HAVE_ARMv6_INSTRUCTIONS from machine/asm.h
79 * here, but that header can't be included here because this is C
80 * code.  I would like to move the _HAVE_ARMv6_INSTRUCTIONS definition
81 * out of asm.h so it can be used in both asm and C code. - kientzle@
82 */
83#if defined (__ARM_ARCH_7__) || \
84	defined (__ARM_ARCH_7A__) || \
85	defined (__ARM_ARCH_6__) || \
86	defined (__ARM_ARCH_6J__) || \
87	defined (__ARM_ARCH_6K__) || \
88	defined (__ARM_ARCH_6Z__) || \
89	defined (__ARM_ARCH_6ZK__)
90static __inline void
91__do_dmb(void)
92{
93
94#if defined (__ARM_ARCH_7__) || defined (__ARM_ARCH_7A__)
95	__asm __volatile("dmb" : : : "memory");
96#else
97	__asm __volatile("mcr p15, 0, r0, c7, c10, 5" : : : "memory");
98#endif
99}
100
101#define ATOMIC_ACQ_REL_LONG(NAME)					\
102static __inline void							\
103atomic_##NAME##_acq_long(__volatile u_long *p, u_long v)		\
104{									\
105	atomic_##NAME##_long(p, v);					\
106	__do_dmb();							\
107}									\
108									\
109static __inline  void							\
110atomic_##NAME##_rel_long(__volatile u_long *p, u_long v)		\
111{									\
112	__do_dmb();							\
113	atomic_##NAME##_long(p, v);					\
114}
115
116#define	ATOMIC_ACQ_REL(NAME, WIDTH)					\
117static __inline  void							\
118atomic_##NAME##_acq_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
119{									\
120	atomic_##NAME##_##WIDTH(p, v);					\
121	__do_dmb();							\
122}									\
123									\
124static __inline  void							\
125atomic_##NAME##_rel_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
126{									\
127	__do_dmb();							\
128	atomic_##NAME##_##WIDTH(p, v);					\
129}
130
131static __inline void
132atomic_set_32(volatile uint32_t *address, uint32_t setmask)
133{
134	uint32_t tmp = 0, tmp2 = 0;
135
136	__asm __volatile("1: ldrex %0, [%2]\n"
137	    		    "orr %0, %0, %3\n"
138			    "strex %1, %0, [%2]\n"
139			    "cmp %1, #0\n"
140			    "bne	1b\n"
141			   : "=&r" (tmp), "+r" (tmp2)
142			   , "+r" (address), "+r" (setmask) : : "cc", "memory");
143
144}
145
146static __inline void
147atomic_set_long(volatile u_long *address, u_long setmask)
148{
149	u_long tmp = 0, tmp2 = 0;
150
151	__asm __volatile("1: ldrex %0, [%2]\n"
152	    		    "orr %0, %0, %3\n"
153			    "strex %1, %0, [%2]\n"
154			    "cmp %1, #0\n"
155			    "bne	1b\n"
156			   : "=&r" (tmp), "+r" (tmp2)
157			   , "+r" (address), "+r" (setmask) : : "cc", "memory");
158
159}
160
161static __inline void
162atomic_clear_32(volatile uint32_t *address, uint32_t setmask)
163{
164	uint32_t tmp = 0, tmp2 = 0;
165
166	__asm __volatile("1: ldrex %0, [%2]\n"
167	    		    "bic %0, %0, %3\n"
168			    "strex %1, %0, [%2]\n"
169			    "cmp %1, #0\n"
170			    "bne	1b\n"
171			   : "=&r" (tmp), "+r" (tmp2)
172			   ,"+r" (address), "+r" (setmask) : : "cc", "memory");
173}
174
175static __inline void
176atomic_clear_long(volatile u_long *address, u_long setmask)
177{
178	u_long tmp = 0, tmp2 = 0;
179
180	__asm __volatile("1: ldrex %0, [%2]\n"
181	    		    "bic %0, %0, %3\n"
182			    "strex %1, %0, [%2]\n"
183			    "cmp %1, #0\n"
184			    "bne	1b\n"
185			   : "=&r" (tmp), "+r" (tmp2)
186			   ,"+r" (address), "+r" (setmask) : : "cc", "memory");
187}
188
189static __inline u_int32_t
190atomic_cmpset_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
191{
192	uint32_t ret;
193
194	__asm __volatile("1: ldrex %0, [%1]\n"
195	                 "cmp %0, %2\n"
196			 "movne %0, #0\n"
197			 "bne 2f\n"
198			 "strex %0, %3, [%1]\n"
199			 "cmp %0, #0\n"
200			 "bne	1b\n"
201			 "moveq %0, #1\n"
202			 "2:"
203			 : "=&r" (ret)
204			 ,"+r" (p), "+r" (cmpval), "+r" (newval) : : "cc",
205			 "memory");
206	return (ret);
207}
208
209static __inline u_long
210atomic_cmpset_long(volatile u_long *p, volatile u_long cmpval, volatile u_long newval)
211{
212	u_long ret;
213
214	__asm __volatile("1: ldrex %0, [%1]\n"
215	                 "cmp %0, %2\n"
216			 "movne %0, #0\n"
217			 "bne 2f\n"
218			 "strex %0, %3, [%1]\n"
219			 "cmp %0, #0\n"
220			 "bne	1b\n"
221			 "moveq %0, #1\n"
222			 "2:"
223			 : "=&r" (ret)
224			 ,"+r" (p), "+r" (cmpval), "+r" (newval) : : "cc",
225			 "memory");
226	return (ret);
227}
228
229static __inline u_int32_t
230atomic_cmpset_acq_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
231{
232	u_int32_t ret = atomic_cmpset_32(p, cmpval, newval);
233
234	__do_dmb();
235	return (ret);
236}
237
238static __inline u_long
239atomic_cmpset_acq_long(volatile u_long *p, volatile u_long cmpval, volatile u_long newval)
240{
241	u_long ret = atomic_cmpset_long(p, cmpval, newval);
242
243	__do_dmb();
244	return (ret);
245}
246
247static __inline u_int32_t
248atomic_cmpset_rel_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
249{
250
251	__do_dmb();
252	return (atomic_cmpset_32(p, cmpval, newval));
253}
254
255static __inline u_long
256atomic_cmpset_rel_long(volatile u_long *p, volatile u_long cmpval, volatile u_long newval)
257{
258
259	__do_dmb();
260	return (atomic_cmpset_long(p, cmpval, newval));
261}
262
263
264static __inline void
265atomic_add_32(volatile u_int32_t *p, u_int32_t val)
266{
267	uint32_t tmp = 0, tmp2 = 0;
268
269	__asm __volatile("1: ldrex %0, [%2]\n"
270	    		    "add %0, %0, %3\n"
271			    "strex %1, %0, [%2]\n"
272			    "cmp %1, #0\n"
273			    "bne	1b\n"
274			    : "=&r" (tmp), "+r" (tmp2)
275			    ,"+r" (p), "+r" (val) : : "cc", "memory");
276}
277
278static __inline void
279atomic_add_long(volatile u_long *p, u_long val)
280{
281	u_long tmp = 0, tmp2 = 0;
282
283	__asm __volatile("1: ldrex %0, [%2]\n"
284	    		    "add %0, %0, %3\n"
285			    "strex %1, %0, [%2]\n"
286			    "cmp %1, #0\n"
287			    "bne	1b\n"
288			    : "=&r" (tmp), "+r" (tmp2)
289			    ,"+r" (p), "+r" (val) : : "cc", "memory");
290}
291
292static __inline void
293atomic_subtract_32(volatile u_int32_t *p, u_int32_t val)
294{
295	uint32_t tmp = 0, tmp2 = 0;
296
297	__asm __volatile("1: ldrex %0, [%2]\n"
298	    		    "sub %0, %0, %3\n"
299			    "strex %1, %0, [%2]\n"
300			    "cmp %1, #0\n"
301			    "bne	1b\n"
302			    : "=&r" (tmp), "+r" (tmp2)
303			    ,"+r" (p), "+r" (val) : : "cc", "memory");
304}
305
306static __inline void
307atomic_subtract_long(volatile u_long *p, u_long val)
308{
309	u_long tmp = 0, tmp2 = 0;
310
311	__asm __volatile("1: ldrex %0, [%2]\n"
312	    		    "sub %0, %0, %3\n"
313			    "strex %1, %0, [%2]\n"
314			    "cmp %1, #0\n"
315			    "bne	1b\n"
316			    : "=&r" (tmp), "+r" (tmp2)
317			    ,"+r" (p), "+r" (val) : : "cc", "memory");
318}
319
320ATOMIC_ACQ_REL(clear, 32)
321ATOMIC_ACQ_REL(add, 32)
322ATOMIC_ACQ_REL(subtract, 32)
323ATOMIC_ACQ_REL(set, 32)
324ATOMIC_ACQ_REL_LONG(clear)
325ATOMIC_ACQ_REL_LONG(add)
326ATOMIC_ACQ_REL_LONG(subtract)
327ATOMIC_ACQ_REL_LONG(set)
328
329#undef ATOMIC_ACQ_REL
330#undef ATOMIC_ACQ_REL_LONG
331
332static __inline uint32_t
333atomic_fetchadd_32(volatile uint32_t *p, uint32_t val)
334{
335	uint32_t tmp = 0, tmp2 = 0, ret = 0;
336
337	__asm __volatile("1: ldrex %0, [%3]\n"
338	    		    "add %1, %0, %4\n"
339			    "strex %2, %1, [%3]\n"
340			    "cmp %2, #0\n"
341			    "bne	1b\n"
342			   : "+r" (ret), "=&r" (tmp), "+r" (tmp2)
343			   ,"+r" (p), "+r" (val) : : "cc", "memory");
344	return (ret);
345}
346
347static __inline uint32_t
348atomic_readandclear_32(volatile u_int32_t *p)
349{
350	uint32_t ret, tmp = 0, tmp2 = 0;
351
352	__asm __volatile("1: ldrex %0, [%3]\n"
353	    		 "mov %1, #0\n"
354			 "strex %2, %1, [%3]\n"
355			 "cmp %2, #0\n"
356			 "bne 1b\n"
357			 : "=r" (ret), "=&r" (tmp), "+r" (tmp2)
358			 ,"+r" (p) : : "cc", "memory");
359	return (ret);
360}
361
362static __inline uint32_t
363atomic_load_acq_32(volatile uint32_t *p)
364{
365	uint32_t v;
366
367	v = *p;
368	__do_dmb();
369	return (v);
370}
371
372static __inline void
373atomic_store_rel_32(volatile uint32_t *p, uint32_t v)
374{
375
376	__do_dmb();
377	*p = v;
378}
379
380static __inline u_long
381atomic_fetchadd_long(volatile u_long *p, u_long val)
382{
383	u_long tmp = 0, tmp2 = 0, ret = 0;
384
385	__asm __volatile("1: ldrex %0, [%3]\n"
386	    		    "add %1, %0, %4\n"
387			    "strex %2, %1, [%3]\n"
388			    "cmp %2, #0\n"
389			    "bne	1b\n"
390			   : "+r" (ret), "=&r" (tmp), "+r" (tmp2)
391			   ,"+r" (p), "+r" (val) : : "cc", "memory");
392	return (ret);
393}
394
395static __inline u_long
396atomic_readandclear_long(volatile u_long *p)
397{
398	u_long ret, tmp = 0, tmp2 = 0;
399
400	__asm __volatile("1: ldrex %0, [%3]\n"
401	    		 "mov %1, #0\n"
402			 "strex %2, %1, [%3]\n"
403			 "cmp %2, #0\n"
404			 "bne 1b\n"
405			 : "=r" (ret), "=&r" (tmp), "+r" (tmp2)
406			 ,"+r" (p) : : "cc", "memory");
407	return (ret);
408}
409
410static __inline u_long
411atomic_load_acq_long(volatile u_long *p)
412{
413	u_long v;
414
415	v = *p;
416	__do_dmb();
417	return (v);
418}
419
420static __inline void
421atomic_store_rel_long(volatile u_long *p, u_long v)
422{
423
424	__do_dmb();
425	*p = v;
426}
427#else /* < armv6 */
428
429#define __with_interrupts_disabled(expr) \
430	do {						\
431		u_int cpsr_save, tmp;			\
432							\
433		__asm __volatile(			\
434			"mrs  %0, cpsr;"		\
435			"orr  %1, %0, %2;"		\
436			"msr  cpsr_all, %1;"		\
437			: "=r" (cpsr_save), "=r" (tmp)	\
438			: "I" (I32_bit | F32_bit)		\
439		        : "cc" );		\
440		(expr);				\
441		 __asm __volatile(		\
442			"msr  cpsr_all, %0"	\
443			: /* no output */	\
444			: "r" (cpsr_save)	\
445			: "cc" );		\
446	} while(0)
447
448static __inline uint32_t
449__swp(uint32_t val, volatile uint32_t *ptr)
450{
451	__asm __volatile("swp	%0, %2, [%3]"
452	    : "=&r" (val), "=m" (*ptr)
453	    : "r" (val), "r" (ptr), "m" (*ptr)
454	    : "memory");
455	return (val);
456}
457
458
459#ifdef _KERNEL
460static __inline void
461atomic_set_32(volatile uint32_t *address, uint32_t setmask)
462{
463	__with_interrupts_disabled(*address |= setmask);
464}
465
466static __inline void
467atomic_clear_32(volatile uint32_t *address, uint32_t clearmask)
468{
469	__with_interrupts_disabled(*address &= ~clearmask);
470}
471
472static __inline u_int32_t
473atomic_cmpset_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
474{
475	int ret;
476
477	__with_interrupts_disabled(
478	 {
479	    	if (*p == cmpval) {
480			*p = newval;
481			ret = 1;
482		} else {
483			ret = 0;
484		}
485	});
486	return (ret);
487}
488
489static __inline void
490atomic_add_32(volatile u_int32_t *p, u_int32_t val)
491{
492	__with_interrupts_disabled(*p += val);
493}
494
495static __inline void
496atomic_subtract_32(volatile u_int32_t *p, u_int32_t val)
497{
498	__with_interrupts_disabled(*p -= val);
499}
500
501static __inline uint32_t
502atomic_fetchadd_32(volatile uint32_t *p, uint32_t v)
503{
504	uint32_t value;
505
506	__with_interrupts_disabled(
507	{
508	    	value = *p;
509		*p += v;
510	});
511	return (value);
512}
513
514#else /* !_KERNEL */
515
516static __inline u_int32_t
517atomic_cmpset_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
518{
519	register int done, ras_start = ARM_RAS_START;
520
521	__asm __volatile("1:\n"
522	    "adr	%1, 1b\n"
523	    "str	%1, [%0]\n"
524	    "adr	%1, 2f\n"
525	    "str	%1, [%0, #4]\n"
526	    "ldr	%1, [%2]\n"
527	    "cmp	%1, %3\n"
528	    "streq	%4, [%2]\n"
529	    "2:\n"
530	    "mov	%1, #0\n"
531	    "str	%1, [%0]\n"
532	    "mov	%1, #0xffffffff\n"
533	    "str	%1, [%0, #4]\n"
534	    "moveq	%1, #1\n"
535	    "movne	%1, #0\n"
536	    : "+r" (ras_start), "=r" (done)
537	    ,"+r" (p), "+r" (cmpval), "+r" (newval) : : "cc", "memory");
538	return (done);
539}
540
541static __inline void
542atomic_add_32(volatile u_int32_t *p, u_int32_t val)
543{
544	int start, ras_start = ARM_RAS_START;
545
546	__asm __volatile("1:\n"
547	    "adr	%1, 1b\n"
548	    "str	%1, [%0]\n"
549	    "adr	%1, 2f\n"
550	    "str	%1, [%0, #4]\n"
551	    "ldr	%1, [%2]\n"
552	    "add	%1, %1, %3\n"
553	    "str	%1, [%2]\n"
554	    "2:\n"
555	    "mov	%1, #0\n"
556	    "str	%1, [%0]\n"
557	    "mov	%1, #0xffffffff\n"
558	    "str	%1, [%0, #4]\n"
559	    : "+r" (ras_start), "=r" (start), "+r" (p), "+r" (val)
560	    : : "memory");
561}
562
563static __inline void
564atomic_subtract_32(volatile u_int32_t *p, u_int32_t val)
565{
566	int start, ras_start = ARM_RAS_START;
567
568	__asm __volatile("1:\n"
569	    "adr	%1, 1b\n"
570	    "str	%1, [%0]\n"
571	    "adr	%1, 2f\n"
572	    "str	%1, [%0, #4]\n"
573	    "ldr	%1, [%2]\n"
574	    "sub	%1, %1, %3\n"
575	    "str	%1, [%2]\n"
576	    "2:\n"
577	    "mov	%1, #0\n"
578	    "str	%1, [%0]\n"
579	    "mov	%1, #0xffffffff\n"
580	    "str	%1, [%0, #4]\n"
581
582	    : "+r" (ras_start), "=r" (start), "+r" (p), "+r" (val)
583	    : : "memory");
584}
585
586static __inline void
587atomic_set_32(volatile uint32_t *address, uint32_t setmask)
588{
589	int start, ras_start = ARM_RAS_START;
590
591	__asm __volatile("1:\n"
592	    "adr	%1, 1b\n"
593	    "str	%1, [%0]\n"
594	    "adr	%1, 2f\n"
595	    "str	%1, [%0, #4]\n"
596	    "ldr	%1, [%2]\n"
597	    "orr	%1, %1, %3\n"
598	    "str	%1, [%2]\n"
599	    "2:\n"
600	    "mov	%1, #0\n"
601	    "str	%1, [%0]\n"
602	    "mov	%1, #0xffffffff\n"
603	    "str	%1, [%0, #4]\n"
604
605	    : "+r" (ras_start), "=r" (start), "+r" (address), "+r" (setmask)
606	    : : "memory");
607}
608
609static __inline void
610atomic_clear_32(volatile uint32_t *address, uint32_t clearmask)
611{
612	int start, ras_start = ARM_RAS_START;
613
614	__asm __volatile("1:\n"
615	    "adr	%1, 1b\n"
616	    "str	%1, [%0]\n"
617	    "adr	%1, 2f\n"
618	    "str	%1, [%0, #4]\n"
619	    "ldr	%1, [%2]\n"
620	    "bic	%1, %1, %3\n"
621	    "str	%1, [%2]\n"
622	    "2:\n"
623	    "mov	%1, #0\n"
624	    "str	%1, [%0]\n"
625	    "mov	%1, #0xffffffff\n"
626	    "str	%1, [%0, #4]\n"
627	    : "+r" (ras_start), "=r" (start), "+r" (address), "+r" (clearmask)
628	    : : "memory");
629
630}
631
632static __inline uint32_t
633atomic_fetchadd_32(volatile uint32_t *p, uint32_t v)
634{
635	uint32_t start, tmp, ras_start = ARM_RAS_START;
636
637	__asm __volatile("1:\n"
638	    "adr	%1, 1b\n"
639	    "str	%1, [%0]\n"
640	    "adr	%1, 2f\n"
641	    "str	%1, [%0, #4]\n"
642	    "ldr	%1, [%3]\n"
643	    "mov	%2, %1\n"
644	    "add	%2, %2, %4\n"
645	    "str	%2, [%3]\n"
646	    "2:\n"
647	    "mov	%2, #0\n"
648	    "str	%2, [%0]\n"
649	    "mov	%2, #0xffffffff\n"
650	    "str	%2, [%0, #4]\n"
651	    : "+r" (ras_start), "=r" (start), "=r" (tmp), "+r" (p), "+r" (v)
652	    : : "memory");
653	return (start);
654}
655
656#endif /* _KERNEL */
657
658
659static __inline uint32_t
660atomic_readandclear_32(volatile u_int32_t *p)
661{
662
663	return (__swp(0, p));
664}
665
666#define atomic_cmpset_rel_32	atomic_cmpset_32
667#define atomic_cmpset_acq_32	atomic_cmpset_32
668#define atomic_set_rel_32	atomic_set_32
669#define atomic_set_acq_32	atomic_set_32
670#define atomic_clear_rel_32	atomic_clear_32
671#define atomic_clear_acq_32	atomic_clear_32
672#define atomic_add_rel_32	atomic_add_32
673#define atomic_add_acq_32	atomic_add_32
674#define atomic_subtract_rel_32	atomic_subtract_32
675#define atomic_subtract_acq_32	atomic_subtract_32
676#define atomic_store_rel_32	atomic_store_32
677#define atomic_store_rel_long	atomic_store_long
678#define atomic_load_acq_32	atomic_load_32
679#define atomic_load_acq_long	atomic_load_long
680#define atomic_add_acq_long		atomic_add_long
681#define atomic_add_rel_long		atomic_add_long
682#define atomic_subtract_acq_long	atomic_subtract_long
683#define atomic_subtract_rel_long	atomic_subtract_long
684#define atomic_clear_acq_long		atomic_clear_long
685#define atomic_clear_rel_long		atomic_clear_long
686#define atomic_set_acq_long		atomic_set_long
687#define atomic_set_rel_long		atomic_set_long
688#define atomic_cmpset_acq_long		atomic_cmpset_long
689#define atomic_cmpset_rel_long		atomic_cmpset_long
690#define atomic_load_acq_long		atomic_load_long
691#undef __with_interrupts_disabled
692
693static __inline void
694atomic_add_long(volatile u_long *p, u_long v)
695{
696
697	atomic_add_32((volatile uint32_t *)p, v);
698}
699
700static __inline void
701atomic_clear_long(volatile u_long *p, u_long v)
702{
703
704	atomic_clear_32((volatile uint32_t *)p, v);
705}
706
707static __inline int
708atomic_cmpset_long(volatile u_long *dst, u_long old, u_long newe)
709{
710
711	return (atomic_cmpset_32((volatile uint32_t *)dst, old, newe));
712}
713
714static __inline u_long
715atomic_fetchadd_long(volatile u_long *p, u_long v)
716{
717
718	return (atomic_fetchadd_32((volatile uint32_t *)p, v));
719}
720
721static __inline void
722atomic_readandclear_long(volatile u_long *p)
723{
724
725	atomic_readandclear_32((volatile uint32_t *)p);
726}
727
728static __inline void
729atomic_set_long(volatile u_long *p, u_long v)
730{
731
732	atomic_set_32((volatile uint32_t *)p, v);
733}
734
735static __inline void
736atomic_subtract_long(volatile u_long *p, u_long v)
737{
738
739	atomic_subtract_32((volatile uint32_t *)p, v);
740}
741
742
743
744#endif /* Arch >= v6 */
745
746static __inline int
747atomic_load_32(volatile uint32_t *v)
748{
749
750	return (*v);
751}
752
753static __inline void
754atomic_store_32(volatile uint32_t *dst, uint32_t src)
755{
756	*dst = src;
757}
758
759static __inline int
760atomic_load_long(volatile u_long *v)
761{
762
763	return (*v);
764}
765
766static __inline void
767atomic_store_long(volatile u_long *dst, u_long src)
768{
769	*dst = src;
770}
771
772#define atomic_clear_ptr		atomic_clear_32
773#define atomic_set_ptr			atomic_set_32
774#define atomic_cmpset_ptr		atomic_cmpset_32
775#define atomic_cmpset_rel_ptr		atomic_cmpset_rel_32
776#define atomic_cmpset_acq_ptr		atomic_cmpset_acq_32
777#define atomic_store_ptr		atomic_store_32
778#define atomic_store_rel_ptr		atomic_store_rel_32
779
780#define atomic_add_int			atomic_add_32
781#define atomic_add_acq_int		atomic_add_acq_32
782#define atomic_add_rel_int		atomic_add_rel_32
783#define atomic_subtract_int		atomic_subtract_32
784#define atomic_subtract_acq_int		atomic_subtract_acq_32
785#define atomic_subtract_rel_int		atomic_subtract_rel_32
786#define atomic_clear_int		atomic_clear_32
787#define atomic_clear_acq_int		atomic_clear_acq_32
788#define atomic_clear_rel_int		atomic_clear_rel_32
789#define atomic_set_int			atomic_set_32
790#define atomic_set_acq_int		atomic_set_acq_32
791#define atomic_set_rel_int		atomic_set_rel_32
792#define atomic_cmpset_int		atomic_cmpset_32
793#define atomic_cmpset_acq_int		atomic_cmpset_acq_32
794#define atomic_cmpset_rel_int		atomic_cmpset_rel_32
795#define atomic_fetchadd_int		atomic_fetchadd_32
796#define atomic_readandclear_int		atomic_readandclear_32
797#define atomic_load_acq_int		atomic_load_acq_32
798#define atomic_store_rel_int		atomic_store_rel_32
799
800#endif /* _MACHINE_ATOMIC_H_ */
801