atomic-v4.h revision 241080
1/* $NetBSD: atomic.h,v 1.1 2002/10/19 12:22:34 bsh Exp $ */
2
3/*-
4 * Copyright (C) 2003-2004 Olivier Houchard
5 * Copyright (C) 1994-1997 Mark Brinicombe
6 * Copyright (C) 1994 Brini
7 * All rights reserved.
8 *
9 * This code is derived from software written for Brini by Mark Brinicombe
10 *
11 * Redistribution and use in source and binary forms, with or without
12 * modification, are permitted provided that the following conditions
13 * are met:
14 * 1. Redistributions of source code must retain the above copyright
15 *    notice, this list of conditions and the following disclaimer.
16 * 2. Redistributions in binary form must reproduce the above copyright
17 *    notice, this list of conditions and the following disclaimer in the
18 *    documentation and/or other materials provided with the distribution.
19 * 3. All advertising materials mentioning features or use of this software
20 *    must display the following acknowledgement:
21 *	This product includes software developed by Brini.
22 * 4. The name of Brini may not be used to endorse or promote products
23 *    derived from this software without specific prior written permission.
24 *
25 * THIS SOFTWARE IS PROVIDED BY BRINI ``AS IS'' AND ANY EXPRESS OR
26 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
27 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
28 * IN NO EVENT SHALL BRINI BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
29 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
30 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
31 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
32 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
33 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
34 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35 *
36 * $FreeBSD: head/sys/arm/include/atomic.h 241080 2012-10-01 05:12:17Z andrew $
37 */
38
39#ifndef	_MACHINE_ATOMIC_H_
40#define	_MACHINE_ATOMIC_H_
41
42#include <sys/types.h>
43
44#ifndef _KERNEL
45#include <machine/sysarch.h>
46#else
47#include <machine/cpuconf.h>
48#endif
49
50#define mb()
51#define wmb()
52#define rmb()
53
54#ifndef I32_bit
55#define I32_bit (1 << 7)        /* IRQ disable */
56#endif
57#ifndef F32_bit
58#define F32_bit (1 << 6)        /* FIQ disable */
59#endif
60
61/*
62 * It would be nice to use _HAVE_ARMv6_INSTRUCTIONS from machine/asm.h
63 * here, but that header can't be included here because this is C
64 * code.  I would like to move the _HAVE_ARMv6_INSTRUCTIONS definition
65 * out of asm.h so it can be used in both asm and C code. - kientzle@
66 */
67#if defined (__ARM_ARCH_7__) || \
68	defined (__ARM_ARCH_7A__) || \
69	defined (__ARM_ARCH_6__) || \
70	defined (__ARM_ARCH_6J__) || \
71	defined (__ARM_ARCH_6K__) || \
72	defined (__ARM_ARCH_6Z__) || \
73	defined (__ARM_ARCH_6ZK__)
74static __inline void
75__do_dmb(void)
76{
77
78#if defined (__ARM_ARCH_7__) || defined (__ARM_ARCH_7A__)
79	__asm __volatile("dmb" : : : "memory");
80#else
81	__asm __volatile("mcr p15, 0, r0, c7, c10, 5" : : : "memory");
82#endif
83}
84
85#define ATOMIC_ACQ_REL_LONG(NAME)					\
86static __inline void							\
87atomic_##NAME##_acq_long(__volatile u_long *p, u_long v)		\
88{									\
89	atomic_##NAME##_long(p, v);					\
90	__do_dmb();							\
91}									\
92									\
93static __inline  void							\
94atomic_##NAME##_rel_long(__volatile u_long *p, u_long v)		\
95{									\
96	__do_dmb();							\
97	atomic_##NAME##_long(p, v);					\
98}
99
100#define	ATOMIC_ACQ_REL(NAME, WIDTH)					\
101static __inline  void							\
102atomic_##NAME##_acq_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
103{									\
104	atomic_##NAME##_##WIDTH(p, v);					\
105	__do_dmb();							\
106}									\
107									\
108static __inline  void							\
109atomic_##NAME##_rel_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
110{									\
111	__do_dmb();							\
112	atomic_##NAME##_##WIDTH(p, v);					\
113}
114
115static __inline void
116atomic_set_32(volatile uint32_t *address, uint32_t setmask)
117{
118	uint32_t tmp = 0, tmp2 = 0;
119
120	__asm __volatile("1: ldrex %0, [%2]\n"
121	    		    "orr %0, %0, %3\n"
122			    "strex %1, %0, [%2]\n"
123			    "cmp %1, #0\n"
124			    "bne	1b\n"
125			   : "=&r" (tmp), "+r" (tmp2)
126			   , "+r" (address), "+r" (setmask) : : "cc", "memory");
127
128}
129
130static __inline void
131atomic_set_long(volatile u_long *address, u_long setmask)
132{
133	u_long tmp = 0, tmp2 = 0;
134
135	__asm __volatile("1: ldrex %0, [%2]\n"
136	    		    "orr %0, %0, %3\n"
137			    "strex %1, %0, [%2]\n"
138			    "cmp %1, #0\n"
139			    "bne	1b\n"
140			   : "=&r" (tmp), "+r" (tmp2)
141			   , "+r" (address), "+r" (setmask) : : "cc", "memory");
142
143}
144
145static __inline void
146atomic_clear_32(volatile uint32_t *address, uint32_t setmask)
147{
148	uint32_t tmp = 0, tmp2 = 0;
149
150	__asm __volatile("1: ldrex %0, [%2]\n"
151	    		    "bic %0, %0, %3\n"
152			    "strex %1, %0, [%2]\n"
153			    "cmp %1, #0\n"
154			    "bne	1b\n"
155			   : "=&r" (tmp), "+r" (tmp2)
156			   ,"+r" (address), "+r" (setmask) : : "cc", "memory");
157}
158
159static __inline void
160atomic_clear_long(volatile u_long *address, u_long setmask)
161{
162	u_long tmp = 0, tmp2 = 0;
163
164	__asm __volatile("1: ldrex %0, [%2]\n"
165	    		    "bic %0, %0, %3\n"
166			    "strex %1, %0, [%2]\n"
167			    "cmp %1, #0\n"
168			    "bne	1b\n"
169			   : "=&r" (tmp), "+r" (tmp2)
170			   ,"+r" (address), "+r" (setmask) : : "cc", "memory");
171}
172
173static __inline u_int32_t
174atomic_cmpset_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
175{
176	uint32_t ret;
177
178	__asm __volatile("1: ldrex %0, [%1]\n"
179	                 "cmp %0, %2\n"
180			 "movne %0, #0\n"
181			 "bne 2f\n"
182			 "strex %0, %3, [%1]\n"
183			 "cmp %0, #0\n"
184			 "bne	1b\n"
185			 "moveq %0, #1\n"
186			 "2:"
187			 : "=&r" (ret)
188			 ,"+r" (p), "+r" (cmpval), "+r" (newval) : : "cc",
189			 "memory");
190	return (ret);
191}
192
193static __inline u_long
194atomic_cmpset_long(volatile u_long *p, volatile u_long cmpval, volatile u_long newval)
195{
196	u_long ret;
197
198	__asm __volatile("1: ldrex %0, [%1]\n"
199	                 "cmp %0, %2\n"
200			 "movne %0, #0\n"
201			 "bne 2f\n"
202			 "strex %0, %3, [%1]\n"
203			 "cmp %0, #0\n"
204			 "bne	1b\n"
205			 "moveq %0, #1\n"
206			 "2:"
207			 : "=&r" (ret)
208			 ,"+r" (p), "+r" (cmpval), "+r" (newval) : : "cc",
209			 "memory");
210	return (ret);
211}
212
213static __inline u_int32_t
214atomic_cmpset_acq_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
215{
216	u_int32_t ret = atomic_cmpset_32(p, cmpval, newval);
217
218	__do_dmb();
219	return (ret);
220}
221
222static __inline u_long
223atomic_cmpset_acq_long(volatile u_long *p, volatile u_long cmpval, volatile u_long newval)
224{
225	u_long ret = atomic_cmpset_long(p, cmpval, newval);
226
227	__do_dmb();
228	return (ret);
229}
230
231static __inline u_int32_t
232atomic_cmpset_rel_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
233{
234
235	__do_dmb();
236	return (atomic_cmpset_32(p, cmpval, newval));
237}
238
239static __inline u_long
240atomic_cmpset_rel_long(volatile u_long *p, volatile u_long cmpval, volatile u_long newval)
241{
242
243	__do_dmb();
244	return (atomic_cmpset_long(p, cmpval, newval));
245}
246
247
248static __inline void
249atomic_add_32(volatile u_int32_t *p, u_int32_t val)
250{
251	uint32_t tmp = 0, tmp2 = 0;
252
253	__asm __volatile("1: ldrex %0, [%2]\n"
254	    		    "add %0, %0, %3\n"
255			    "strex %1, %0, [%2]\n"
256			    "cmp %1, #0\n"
257			    "bne	1b\n"
258			    : "=&r" (tmp), "+r" (tmp2)
259			    ,"+r" (p), "+r" (val) : : "cc", "memory");
260}
261
262static __inline void
263atomic_add_long(volatile u_long *p, u_long val)
264{
265	u_long tmp = 0, tmp2 = 0;
266
267	__asm __volatile("1: ldrex %0, [%2]\n"
268	    		    "add %0, %0, %3\n"
269			    "strex %1, %0, [%2]\n"
270			    "cmp %1, #0\n"
271			    "bne	1b\n"
272			    : "=&r" (tmp), "+r" (tmp2)
273			    ,"+r" (p), "+r" (val) : : "cc", "memory");
274}
275
276static __inline void
277atomic_subtract_32(volatile u_int32_t *p, u_int32_t val)
278{
279	uint32_t tmp = 0, tmp2 = 0;
280
281	__asm __volatile("1: ldrex %0, [%2]\n"
282	    		    "sub %0, %0, %3\n"
283			    "strex %1, %0, [%2]\n"
284			    "cmp %1, #0\n"
285			    "bne	1b\n"
286			    : "=&r" (tmp), "+r" (tmp2)
287			    ,"+r" (p), "+r" (val) : : "cc", "memory");
288}
289
290static __inline void
291atomic_subtract_long(volatile u_long *p, u_long val)
292{
293	u_long tmp = 0, tmp2 = 0;
294
295	__asm __volatile("1: ldrex %0, [%2]\n"
296	    		    "sub %0, %0, %3\n"
297			    "strex %1, %0, [%2]\n"
298			    "cmp %1, #0\n"
299			    "bne	1b\n"
300			    : "=&r" (tmp), "+r" (tmp2)
301			    ,"+r" (p), "+r" (val) : : "cc", "memory");
302}
303
304ATOMIC_ACQ_REL(clear, 32)
305ATOMIC_ACQ_REL(add, 32)
306ATOMIC_ACQ_REL(subtract, 32)
307ATOMIC_ACQ_REL(set, 32)
308ATOMIC_ACQ_REL_LONG(clear)
309ATOMIC_ACQ_REL_LONG(add)
310ATOMIC_ACQ_REL_LONG(subtract)
311ATOMIC_ACQ_REL_LONG(set)
312
313#undef ATOMIC_ACQ_REL
314#undef ATOMIC_ACQ_REL_LONG
315
316static __inline uint32_t
317atomic_fetchadd_32(volatile uint32_t *p, uint32_t val)
318{
319	uint32_t tmp = 0, tmp2 = 0, ret = 0;
320
321	__asm __volatile("1: ldrex %0, [%3]\n"
322	    		    "add %1, %0, %4\n"
323			    "strex %2, %1, [%3]\n"
324			    "cmp %2, #0\n"
325			    "bne	1b\n"
326			   : "+r" (ret), "=&r" (tmp), "+r" (tmp2)
327			   ,"+r" (p), "+r" (val) : : "cc", "memory");
328	return (ret);
329}
330
331static __inline uint32_t
332atomic_readandclear_32(volatile u_int32_t *p)
333{
334	uint32_t ret, tmp = 0, tmp2 = 0;
335
336	__asm __volatile("1: ldrex %0, [%3]\n"
337	    		 "mov %1, #0\n"
338			 "strex %2, %1, [%3]\n"
339			 "cmp %2, #0\n"
340			 "bne 1b\n"
341			 : "=r" (ret), "=&r" (tmp), "+r" (tmp2)
342			 ,"+r" (p) : : "cc", "memory");
343	return (ret);
344}
345
346static __inline uint32_t
347atomic_load_acq_32(volatile uint32_t *p)
348{
349	uint32_t v;
350
351	v = *p;
352	__do_dmb();
353	return (v);
354}
355
356static __inline void
357atomic_store_rel_32(volatile uint32_t *p, uint32_t v)
358{
359
360	__do_dmb();
361	*p = v;
362}
363
364static __inline u_long
365atomic_fetchadd_long(volatile u_long *p, u_long val)
366{
367	u_long tmp = 0, tmp2 = 0, ret = 0;
368
369	__asm __volatile("1: ldrex %0, [%3]\n"
370	    		    "add %1, %0, %4\n"
371			    "strex %2, %1, [%3]\n"
372			    "cmp %2, #0\n"
373			    "bne	1b\n"
374			   : "+r" (ret), "=&r" (tmp), "+r" (tmp2)
375			   ,"+r" (p), "+r" (val) : : "cc", "memory");
376	return (ret);
377}
378
379static __inline u_long
380atomic_readandclear_long(volatile u_long *p)
381{
382	u_long ret, tmp = 0, tmp2 = 0;
383
384	__asm __volatile("1: ldrex %0, [%3]\n"
385	    		 "mov %1, #0\n"
386			 "strex %2, %1, [%3]\n"
387			 "cmp %2, #0\n"
388			 "bne 1b\n"
389			 : "=r" (ret), "=&r" (tmp), "+r" (tmp2)
390			 ,"+r" (p) : : "cc", "memory");
391	return (ret);
392}
393
394static __inline u_long
395atomic_load_acq_long(volatile u_long *p)
396{
397	u_long v;
398
399	v = *p;
400	__do_dmb();
401	return (v);
402}
403
404static __inline void
405atomic_store_rel_long(volatile u_long *p, u_long v)
406{
407
408	__do_dmb();
409	*p = v;
410}
411#else /* < armv6 */
412
413#define __with_interrupts_disabled(expr) \
414	do {						\
415		u_int cpsr_save, tmp;			\
416							\
417		__asm __volatile(			\
418			"mrs  %0, cpsr;"		\
419			"orr  %1, %0, %2;"		\
420			"msr  cpsr_all, %1;"		\
421			: "=r" (cpsr_save), "=r" (tmp)	\
422			: "I" (I32_bit | F32_bit)		\
423		        : "cc" );		\
424		(expr);				\
425		 __asm __volatile(		\
426			"msr  cpsr_all, %0"	\
427			: /* no output */	\
428			: "r" (cpsr_save)	\
429			: "cc" );		\
430	} while(0)
431
432static __inline uint32_t
433__swp(uint32_t val, volatile uint32_t *ptr)
434{
435	__asm __volatile("swp	%0, %2, [%3]"
436	    : "=&r" (val), "=m" (*ptr)
437	    : "r" (val), "r" (ptr), "m" (*ptr)
438	    : "memory");
439	return (val);
440}
441
442
443#ifdef _KERNEL
444static __inline void
445atomic_set_32(volatile uint32_t *address, uint32_t setmask)
446{
447	__with_interrupts_disabled(*address |= setmask);
448}
449
450static __inline void
451atomic_clear_32(volatile uint32_t *address, uint32_t clearmask)
452{
453	__with_interrupts_disabled(*address &= ~clearmask);
454}
455
456static __inline u_int32_t
457atomic_cmpset_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
458{
459	int ret;
460
461	__with_interrupts_disabled(
462	 {
463	    	if (*p == cmpval) {
464			*p = newval;
465			ret = 1;
466		} else {
467			ret = 0;
468		}
469	});
470	return (ret);
471}
472
473static __inline void
474atomic_add_32(volatile u_int32_t *p, u_int32_t val)
475{
476	__with_interrupts_disabled(*p += val);
477}
478
479static __inline void
480atomic_subtract_32(volatile u_int32_t *p, u_int32_t val)
481{
482	__with_interrupts_disabled(*p -= val);
483}
484
485static __inline uint32_t
486atomic_fetchadd_32(volatile uint32_t *p, uint32_t v)
487{
488	uint32_t value;
489
490	__with_interrupts_disabled(
491	{
492	    	value = *p;
493		*p += v;
494	});
495	return (value);
496}
497
498#else /* !_KERNEL */
499
500static __inline u_int32_t
501atomic_cmpset_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
502{
503	register int done, ras_start = ARM_RAS_START;
504
505	__asm __volatile("1:\n"
506	    "adr	%1, 1b\n"
507	    "str	%1, [%0]\n"
508	    "adr	%1, 2f\n"
509	    "str	%1, [%0, #4]\n"
510	    "ldr	%1, [%2]\n"
511	    "cmp	%1, %3\n"
512	    "streq	%4, [%2]\n"
513	    "2:\n"
514	    "mov	%1, #0\n"
515	    "str	%1, [%0]\n"
516	    "mov	%1, #0xffffffff\n"
517	    "str	%1, [%0, #4]\n"
518	    "moveq	%1, #1\n"
519	    "movne	%1, #0\n"
520	    : "+r" (ras_start), "=r" (done)
521	    ,"+r" (p), "+r" (cmpval), "+r" (newval) : : "cc", "memory");
522	return (done);
523}
524
525static __inline void
526atomic_add_32(volatile u_int32_t *p, u_int32_t val)
527{
528	int start, ras_start = ARM_RAS_START;
529
530	__asm __volatile("1:\n"
531	    "adr	%1, 1b\n"
532	    "str	%1, [%0]\n"
533	    "adr	%1, 2f\n"
534	    "str	%1, [%0, #4]\n"
535	    "ldr	%1, [%2]\n"
536	    "add	%1, %1, %3\n"
537	    "str	%1, [%2]\n"
538	    "2:\n"
539	    "mov	%1, #0\n"
540	    "str	%1, [%0]\n"
541	    "mov	%1, #0xffffffff\n"
542	    "str	%1, [%0, #4]\n"
543	    : "+r" (ras_start), "=r" (start), "+r" (p), "+r" (val)
544	    : : "memory");
545}
546
547static __inline void
548atomic_subtract_32(volatile u_int32_t *p, u_int32_t val)
549{
550	int start, ras_start = ARM_RAS_START;
551
552	__asm __volatile("1:\n"
553	    "adr	%1, 1b\n"
554	    "str	%1, [%0]\n"
555	    "adr	%1, 2f\n"
556	    "str	%1, [%0, #4]\n"
557	    "ldr	%1, [%2]\n"
558	    "sub	%1, %1, %3\n"
559	    "str	%1, [%2]\n"
560	    "2:\n"
561	    "mov	%1, #0\n"
562	    "str	%1, [%0]\n"
563	    "mov	%1, #0xffffffff\n"
564	    "str	%1, [%0, #4]\n"
565
566	    : "+r" (ras_start), "=r" (start), "+r" (p), "+r" (val)
567	    : : "memory");
568}
569
570static __inline void
571atomic_set_32(volatile uint32_t *address, uint32_t setmask)
572{
573	int start, ras_start = ARM_RAS_START;
574
575	__asm __volatile("1:\n"
576	    "adr	%1, 1b\n"
577	    "str	%1, [%0]\n"
578	    "adr	%1, 2f\n"
579	    "str	%1, [%0, #4]\n"
580	    "ldr	%1, [%2]\n"
581	    "orr	%1, %1, %3\n"
582	    "str	%1, [%2]\n"
583	    "2:\n"
584	    "mov	%1, #0\n"
585	    "str	%1, [%0]\n"
586	    "mov	%1, #0xffffffff\n"
587	    "str	%1, [%0, #4]\n"
588
589	    : "+r" (ras_start), "=r" (start), "+r" (address), "+r" (setmask)
590	    : : "memory");
591}
592
593static __inline void
594atomic_clear_32(volatile uint32_t *address, uint32_t clearmask)
595{
596	int start, ras_start = ARM_RAS_START;
597
598	__asm __volatile("1:\n"
599	    "adr	%1, 1b\n"
600	    "str	%1, [%0]\n"
601	    "adr	%1, 2f\n"
602	    "str	%1, [%0, #4]\n"
603	    "ldr	%1, [%2]\n"
604	    "bic	%1, %1, %3\n"
605	    "str	%1, [%2]\n"
606	    "2:\n"
607	    "mov	%1, #0\n"
608	    "str	%1, [%0]\n"
609	    "mov	%1, #0xffffffff\n"
610	    "str	%1, [%0, #4]\n"
611	    : "+r" (ras_start), "=r" (start), "+r" (address), "+r" (clearmask)
612	    : : "memory");
613
614}
615
616static __inline uint32_t
617atomic_fetchadd_32(volatile uint32_t *p, uint32_t v)
618{
619	uint32_t start, tmp, ras_start = ARM_RAS_START;
620
621	__asm __volatile("1:\n"
622	    "adr	%1, 1b\n"
623	    "str	%1, [%0]\n"
624	    "adr	%1, 2f\n"
625	    "str	%1, [%0, #4]\n"
626	    "ldr	%1, [%3]\n"
627	    "mov	%2, %1\n"
628	    "add	%2, %2, %4\n"
629	    "str	%2, [%3]\n"
630	    "2:\n"
631	    "mov	%2, #0\n"
632	    "str	%2, [%0]\n"
633	    "mov	%2, #0xffffffff\n"
634	    "str	%2, [%0, #4]\n"
635	    : "+r" (ras_start), "=r" (start), "=r" (tmp), "+r" (p), "+r" (v)
636	    : : "memory");
637	return (start);
638}
639
640#endif /* _KERNEL */
641
642
643static __inline uint32_t
644atomic_readandclear_32(volatile u_int32_t *p)
645{
646
647	return (__swp(0, p));
648}
649
650#define atomic_cmpset_rel_32	atomic_cmpset_32
651#define atomic_cmpset_acq_32	atomic_cmpset_32
652#define atomic_set_rel_32	atomic_set_32
653#define atomic_set_acq_32	atomic_set_32
654#define atomic_clear_rel_32	atomic_clear_32
655#define atomic_clear_acq_32	atomic_clear_32
656#define atomic_add_rel_32	atomic_add_32
657#define atomic_add_acq_32	atomic_add_32
658#define atomic_subtract_rel_32	atomic_subtract_32
659#define atomic_subtract_acq_32	atomic_subtract_32
660#define atomic_store_rel_32	atomic_store_32
661#define atomic_store_rel_long	atomic_store_long
662#define atomic_load_acq_32	atomic_load_32
663#define atomic_load_acq_long	atomic_load_long
664#undef __with_interrupts_disabled
665
666static __inline void
667atomic_add_long(volatile u_long *p, u_long v)
668{
669
670	atomic_add_32((volatile uint32_t *)p, v);
671}
672
673static __inline void
674atomic_clear_long(volatile u_long *p, u_long v)
675{
676
677	atomic_clear_32((volatile uint32_t *)p, v);
678}
679
680static __inline int
681atomic_cmpset_long(volatile u_long *dst, u_long old, u_long newe)
682{
683
684	return (atomic_cmpset_32((volatile uint32_t *)dst, old, newe));
685}
686
687static __inline u_long
688atomic_fetchadd_long(volatile u_long *p, u_long v)
689{
690
691	return (atomic_fetchadd_32((volatile uint32_t *)p, v));
692}
693
694static __inline void
695atomic_readandclear_long(volatile u_long *p)
696{
697
698	atomic_readandclear_32((volatile uint32_t *)p);
699}
700
701static __inline void
702atomic_set_long(volatile u_long *p, u_long v)
703{
704
705	atomic_set_32((volatile uint32_t *)p, v);
706}
707
708static __inline void
709atomic_subtract_long(volatile u_long *p, u_long v)
710{
711
712	atomic_subtract_32((volatile uint32_t *)p, v);
713}
714
715
716
717#endif /* Arch >= v6 */
718
719static __inline int
720atomic_load_32(volatile uint32_t *v)
721{
722
723	return (*v);
724}
725
726static __inline void
727atomic_store_32(volatile uint32_t *dst, uint32_t src)
728{
729	*dst = src;
730}
731
732static __inline int
733atomic_load_long(volatile u_long *v)
734{
735
736	return (*v);
737}
738
739static __inline void
740atomic_store_long(volatile u_long *dst, u_long src)
741{
742	*dst = src;
743}
744
745#define atomic_add_acq_long		atomic_add_long
746#define atomic_add_rel_long		atomic_add_long
747#define atomic_subtract_acq_long	atomic_subtract_long
748#define atomic_subtract_rel_long	atomic_subtract_long
749#define atomic_clear_acq_long		atomic_clear_long
750#define atomic_clear_rel_long		atomic_clear_long
751#define atomic_set_acq_long		atomic_set_long
752#define atomic_set_rel_long		atomic_set_long
753#define atomic_cmpset_acq_long		atomic_cmpset_long
754#define atomic_cmpset_rel_long		atomic_cmpset_long
755#define atomic_load_acq_long		atomic_load_long
756
757#define atomic_clear_ptr		atomic_clear_32
758#define atomic_set_ptr			atomic_set_32
759#define atomic_cmpset_ptr		atomic_cmpset_32
760#define atomic_cmpset_rel_ptr		atomic_cmpset_rel_32
761#define atomic_cmpset_acq_ptr		atomic_cmpset_acq_32
762#define atomic_store_ptr		atomic_store_32
763#define atomic_store_rel_ptr		atomic_store_ptr
764
765#define atomic_add_int			atomic_add_32
766#define atomic_add_acq_int		atomic_add_acq_32
767#define atomic_add_rel_int		atomic_add_rel_32
768#define atomic_subtract_int		atomic_subtract_32
769#define atomic_subtract_acq_int		atomic_subtract_acq_32
770#define atomic_subtract_rel_int		atomic_subtract_rel_32
771#define atomic_clear_int		atomic_clear_32
772#define atomic_clear_acq_int		atomic_clear_acq_32
773#define atomic_clear_rel_int		atomic_clear_rel_32
774#define atomic_set_int			atomic_set_32
775#define atomic_set_acq_int		atomic_set_acq_32
776#define atomic_set_rel_int		atomic_set_rel_32
777#define atomic_cmpset_int		atomic_cmpset_32
778#define atomic_cmpset_acq_int		atomic_cmpset_acq_32
779#define atomic_cmpset_rel_int		atomic_cmpset_rel_32
780#define atomic_fetchadd_int		atomic_fetchadd_32
781#define atomic_readandclear_int		atomic_readandclear_32
782#define atomic_load_acq_int		atomic_load_acq_32
783#define atomic_store_rel_int		atomic_store_rel_32
784
785#endif /* _MACHINE_ATOMIC_H_ */
786