atomic.h revision 239268
1/* $NetBSD: atomic.h,v 1.1 2002/10/19 12:22:34 bsh Exp $ */
2
3/*-
4 * Copyright (C) 2003-2004 Olivier Houchard
5 * Copyright (C) 1994-1997 Mark Brinicombe
6 * Copyright (C) 1994 Brini
7 * All rights reserved.
8 *
9 * This code is derived from software written for Brini by Mark Brinicombe
10 *
11 * Redistribution and use in source and binary forms, with or without
12 * modification, are permitted provided that the following conditions
13 * are met:
14 * 1. Redistributions of source code must retain the above copyright
15 *    notice, this list of conditions and the following disclaimer.
16 * 2. Redistributions in binary form must reproduce the above copyright
17 *    notice, this list of conditions and the following disclaimer in the
18 *    documentation and/or other materials provided with the distribution.
19 * 3. All advertising materials mentioning features or use of this software
20 *    must display the following acknowledgement:
21 *	This product includes software developed by Brini.
22 * 4. The name of Brini may not be used to endorse or promote products
23 *    derived from this software without specific prior written permission.
24 *
25 * THIS SOFTWARE IS PROVIDED BY BRINI ``AS IS'' AND ANY EXPRESS OR
26 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
27 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
28 * IN NO EVENT SHALL BRINI BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
29 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
30 * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
31 * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
32 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
33 * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
34 * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35 *
36 * $FreeBSD: head/sys/arm/include/atomic.h 239268 2012-08-15 03:03:03Z gonzo $
37 */
38
39#ifndef	_MACHINE_ATOMIC_H_
40#define	_MACHINE_ATOMIC_H_
41
42#include <sys/types.h>
43
44#ifndef _KERNEL
45#include <machine/sysarch.h>
46#else
47#include <machine/cpuconf.h>
48#endif
49
50#define mb()
51#define wmb()
52#define rmb()
53
54#ifndef I32_bit
55#define I32_bit (1 << 7)        /* IRQ disable */
56#endif
57#ifndef F32_bit
58#define F32_bit (1 << 6)        /* FIQ disable */
59#endif
60
61/*
62 * It would be nice to use _HAVE_ARMv6_INSTRUCTIONS from machine/asm.h
63 * here, but that header can't be included here because this is C
64 * code.  I would like to move the _HAVE_ARMv6_INSTRUCTIONS definition
65 * out of asm.h so it can be used in both asm and C code. - kientzle@
66 */
67#if defined (__ARM_ARCH_7__) || \
68	defined (__ARM_ARCH_7A__) || \
69	defined (__ARM_ARCH_6__) || \
70	defined (__ARM_ARCH_6J__) || \
71	defined (__ARM_ARCH_6K__) || \
72	defined (__ARM_ARCH_6Z__) || \
73	defined (__ARM_ARCH_6ZK__)
74static __inline void
75__do_dmb(void)
76{
77
78#if defined (__ARM_ARCH_7__) || defined (__ARM_ARCH_7A__)
79	__asm __volatile("dmb" : : : "memory");
80#else
81	__asm __volatile("mcr p15, 0, r0, c7, c10, 5" : : : "memory");
82#endif
83}
84
85#define ATOMIC_ACQ_REL_LONG(NAME)					\
86static __inline void							\
87atomic_##NAME##_acq_long(__volatile u_long *p, u_long v)		\
88{									\
89	atomic_##NAME##_long(p, v);					\
90	__do_dmb();							\
91}									\
92									\
93static __inline  void							\
94atomic_##NAME##_rel_long(__volatile u_long *p, u_long v)		\
95{									\
96	__do_dmb();							\
97	atomic_##NAME##_long(p, v);					\
98}
99
100#define	ATOMIC_ACQ_REL(NAME, WIDTH)					\
101static __inline  void							\
102atomic_##NAME##_acq_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
103{									\
104	atomic_##NAME##_##WIDTH(p, v);					\
105	__do_dmb();							\
106}									\
107									\
108static __inline  void							\
109atomic_##NAME##_rel_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
110{									\
111	__do_dmb();							\
112	atomic_##NAME##_##WIDTH(p, v);					\
113}
114
115static __inline void
116atomic_set_32(volatile uint32_t *address, uint32_t setmask)
117{
118	uint32_t tmp = 0, tmp2 = 0;
119
120	__asm __volatile("1: ldrex %0, [%2]\n"
121	    		    "orr %0, %0, %3\n"
122			    "strex %1, %0, [%2]\n"
123			    "cmp %1, #0\n"
124			    "bne	1b\n"
125			   : "=&r" (tmp), "+r" (tmp2)
126			   , "+r" (address), "+r" (setmask) : : "memory");
127
128}
129
130static __inline void
131atomic_set_long(volatile u_long *address, u_long setmask)
132{
133	u_long tmp = 0, tmp2 = 0;
134
135	__asm __volatile("1: ldrex %0, [%2]\n"
136	    		    "orr %0, %0, %3\n"
137			    "strex %1, %0, [%2]\n"
138			    "cmp %1, #0\n"
139			    "bne	1b\n"
140			   : "=&r" (tmp), "+r" (tmp2)
141			   , "+r" (address), "+r" (setmask) : : "memory");
142
143}
144
145static __inline void
146atomic_clear_32(volatile uint32_t *address, uint32_t setmask)
147{
148	uint32_t tmp = 0, tmp2 = 0;
149
150	__asm __volatile("1: ldrex %0, [%2]\n"
151	    		    "bic %0, %0, %3\n"
152			    "strex %1, %0, [%2]\n"
153			    "cmp %1, #0\n"
154			    "bne	1b\n"
155			   : "=&r" (tmp), "+r" (tmp2)
156			   ,"+r" (address), "+r" (setmask) : : "memory");
157}
158
159static __inline void
160atomic_clear_long(volatile u_long *address, u_long setmask)
161{
162	u_long tmp = 0, tmp2 = 0;
163
164	__asm __volatile("1: ldrex %0, [%2]\n"
165	    		    "bic %0, %0, %3\n"
166			    "strex %1, %0, [%2]\n"
167			    "cmp %1, #0\n"
168			    "bne	1b\n"
169			   : "=&r" (tmp), "+r" (tmp2)
170			   ,"+r" (address), "+r" (setmask) : : "memory");
171}
172
173static __inline u_int32_t
174atomic_cmpset_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
175{
176	uint32_t ret;
177
178	__asm __volatile("1: ldrex %0, [%1]\n"
179	                 "cmp %0, %2\n"
180			 "movne %0, #0\n"
181			 "bne 2f\n"
182			 "strex %0, %3, [%1]\n"
183			 "cmp %0, #0\n"
184			 "bne	1b\n"
185			 "moveq %0, #1\n"
186			 "2:"
187			 : "=&r" (ret)
188			 ,"+r" (p), "+r" (cmpval), "+r" (newval) : : "memory");
189	return (ret);
190}
191
192static __inline u_long
193atomic_cmpset_long(volatile u_long *p, volatile u_long cmpval, volatile u_long newval)
194{
195	u_long ret;
196
197	__asm __volatile("1: ldrex %0, [%1]\n"
198	                 "cmp %0, %2\n"
199			 "movne %0, #0\n"
200			 "bne 2f\n"
201			 "strex %0, %3, [%1]\n"
202			 "cmp %0, #0\n"
203			 "bne	1b\n"
204			 "moveq %0, #1\n"
205			 "2:"
206			 : "=&r" (ret)
207			 ,"+r" (p), "+r" (cmpval), "+r" (newval) : : "memory");
208	return (ret);
209}
210
211static __inline u_int32_t
212atomic_cmpset_acq_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
213{
214	u_int32_t ret = atomic_cmpset_32(p, cmpval, newval);
215
216	__do_dmb();
217	return (ret);
218}
219
220static __inline u_long
221atomic_cmpset_acq_long(volatile u_long *p, volatile u_long cmpval, volatile u_long newval)
222{
223	u_long ret = atomic_cmpset_long(p, cmpval, newval);
224
225	__do_dmb();
226	return (ret);
227}
228
229static __inline u_int32_t
230atomic_cmpset_rel_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
231{
232
233	__do_dmb();
234	return (atomic_cmpset_32(p, cmpval, newval));
235}
236
237static __inline u_long
238atomic_cmpset_rel_long(volatile u_long *p, volatile u_long cmpval, volatile u_long newval)
239{
240
241	__do_dmb();
242	return (atomic_cmpset_long(p, cmpval, newval));
243}
244
245
246static __inline void
247atomic_add_32(volatile u_int32_t *p, u_int32_t val)
248{
249	uint32_t tmp = 0, tmp2 = 0;
250
251	__asm __volatile("1: ldrex %0, [%2]\n"
252	    		    "add %0, %0, %3\n"
253			    "strex %1, %0, [%2]\n"
254			    "cmp %1, #0\n"
255			    "bne	1b\n"
256			    : "=&r" (tmp), "+r" (tmp2)
257			    ,"+r" (p), "+r" (val) : : "memory");
258}
259
260static __inline void
261atomic_add_long(volatile u_long *p, u_long val)
262{
263	u_long tmp = 0, tmp2 = 0;
264
265	__asm __volatile("1: ldrex %0, [%2]\n"
266	    		    "add %0, %0, %3\n"
267			    "strex %1, %0, [%2]\n"
268			    "cmp %1, #0\n"
269			    "bne	1b\n"
270			    : "=&r" (tmp), "+r" (tmp2)
271			    ,"+r" (p), "+r" (val) : : "memory");
272}
273
274static __inline void
275atomic_subtract_32(volatile u_int32_t *p, u_int32_t val)
276{
277	uint32_t tmp = 0, tmp2 = 0;
278
279	__asm __volatile("1: ldrex %0, [%2]\n"
280	    		    "sub %0, %0, %3\n"
281			    "strex %1, %0, [%2]\n"
282			    "cmp %1, #0\n"
283			    "bne	1b\n"
284			    : "=&r" (tmp), "+r" (tmp2)
285			    ,"+r" (p), "+r" (val) : : "memory");
286}
287
288static __inline void
289atomic_subtract_long(volatile u_long *p, u_long val)
290{
291	u_long tmp = 0, tmp2 = 0;
292
293	__asm __volatile("1: ldrex %0, [%2]\n"
294	    		    "sub %0, %0, %3\n"
295			    "strex %1, %0, [%2]\n"
296			    "cmp %1, #0\n"
297			    "bne	1b\n"
298			    : "=&r" (tmp), "+r" (tmp2)
299			    ,"+r" (p), "+r" (val) : : "memory");
300}
301
302ATOMIC_ACQ_REL(clear, 32)
303ATOMIC_ACQ_REL(add, 32)
304ATOMIC_ACQ_REL(subtract, 32)
305ATOMIC_ACQ_REL(set, 32)
306ATOMIC_ACQ_REL_LONG(clear)
307ATOMIC_ACQ_REL_LONG(add)
308ATOMIC_ACQ_REL_LONG(subtract)
309ATOMIC_ACQ_REL_LONG(set)
310
311#undef ATOMIC_ACQ_REL
312#undef ATOMIC_ACQ_REL_LONG
313
314static __inline uint32_t
315atomic_fetchadd_32(volatile uint32_t *p, uint32_t val)
316{
317	uint32_t tmp = 0, tmp2 = 0, ret = 0;
318
319	__asm __volatile("1: ldrex %0, [%3]\n"
320	    		    "add %1, %0, %4\n"
321			    "strex %2, %1, [%3]\n"
322			    "cmp %2, #0\n"
323			    "bne	1b\n"
324			   : "+r" (ret), "=&r" (tmp), "+r" (tmp2)
325			   ,"+r" (p), "+r" (val) : : "memory");
326	return (ret);
327}
328
329static __inline uint32_t
330atomic_readandclear_32(volatile u_int32_t *p)
331{
332	uint32_t ret, tmp = 0, tmp2 = 0;
333
334	__asm __volatile("1: ldrex %0, [%3]\n"
335	    		 "mov %1, #0\n"
336			 "strex %2, %1, [%3]\n"
337			 "cmp %2, #0\n"
338			 "bne 1b\n"
339			 : "=r" (ret), "=&r" (tmp), "+r" (tmp2)
340			 ,"+r" (p) : : "memory");
341	return (ret);
342}
343
344static __inline uint32_t
345atomic_load_acq_32(volatile uint32_t *p)
346{
347	uint32_t v;
348
349	v = *p;
350	__do_dmb();
351	return (v);
352}
353
354static __inline void
355atomic_store_rel_32(volatile uint32_t *p, uint32_t v)
356{
357
358	__do_dmb();
359	*p = v;
360}
361
362static __inline u_long
363atomic_fetchadd_long(volatile u_long *p, u_long val)
364{
365	u_long tmp = 0, tmp2 = 0, ret = 0;
366
367	__asm __volatile("1: ldrex %0, [%3]\n"
368	    		    "add %1, %0, %4\n"
369			    "strex %2, %1, [%3]\n"
370			    "cmp %2, #0\n"
371			    "bne	1b\n"
372			   : "+r" (ret), "=&r" (tmp), "+r" (tmp2)
373			   ,"+r" (p), "+r" (val) : : "memory");
374	return (ret);
375}
376
377static __inline u_long
378atomic_readandclear_long(volatile u_long *p)
379{
380	u_long ret, tmp = 0, tmp2 = 0;
381
382	__asm __volatile("1: ldrex %0, [%3]\n"
383	    		 "mov %1, #0\n"
384			 "strex %2, %1, [%3]\n"
385			 "cmp %2, #0\n"
386			 "bne 1b\n"
387			 : "=r" (ret), "=&r" (tmp), "+r" (tmp2)
388			 ,"+r" (p) : : "memory");
389	return (ret);
390}
391
392static __inline u_long
393atomic_load_acq_long(volatile u_long *p)
394{
395	u_long v;
396
397	v = *p;
398	__do_dmb();
399	return (v);
400}
401
402static __inline void
403atomic_store_rel_long(volatile u_long *p, u_long v)
404{
405
406	__do_dmb();
407	*p = v;
408}
409#else /* < armv6 */
410
411#define __with_interrupts_disabled(expr) \
412	do {						\
413		u_int cpsr_save, tmp;			\
414							\
415		__asm __volatile(			\
416			"mrs  %0, cpsr;"		\
417			"orr  %1, %0, %2;"		\
418			"msr  cpsr_all, %1;"		\
419			: "=r" (cpsr_save), "=r" (tmp)	\
420			: "I" (I32_bit | F32_bit)		\
421		        : "cc" );		\
422		(expr);				\
423		 __asm __volatile(		\
424			"msr  cpsr_all, %0"	\
425			: /* no output */	\
426			: "r" (cpsr_save)	\
427			: "cc" );		\
428	} while(0)
429
430static __inline uint32_t
431__swp(uint32_t val, volatile uint32_t *ptr)
432{
433	__asm __volatile("swp	%0, %2, [%3]"
434	    : "=&r" (val), "=m" (*ptr)
435	    : "r" (val), "r" (ptr), "m" (*ptr)
436	    : "memory");
437	return (val);
438}
439
440
441#ifdef _KERNEL
442static __inline void
443atomic_set_32(volatile uint32_t *address, uint32_t setmask)
444{
445	__with_interrupts_disabled(*address |= setmask);
446}
447
448static __inline void
449atomic_clear_32(volatile uint32_t *address, uint32_t clearmask)
450{
451	__with_interrupts_disabled(*address &= ~clearmask);
452}
453
454static __inline u_int32_t
455atomic_cmpset_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
456{
457	int ret;
458
459	__with_interrupts_disabled(
460	 {
461	    	if (*p == cmpval) {
462			*p = newval;
463			ret = 1;
464		} else {
465			ret = 0;
466		}
467	});
468	return (ret);
469}
470
471static __inline void
472atomic_add_32(volatile u_int32_t *p, u_int32_t val)
473{
474	__with_interrupts_disabled(*p += val);
475}
476
477static __inline void
478atomic_subtract_32(volatile u_int32_t *p, u_int32_t val)
479{
480	__with_interrupts_disabled(*p -= val);
481}
482
483static __inline uint32_t
484atomic_fetchadd_32(volatile uint32_t *p, uint32_t v)
485{
486	uint32_t value;
487
488	__with_interrupts_disabled(
489	{
490	    	value = *p;
491		*p += v;
492	});
493	return (value);
494}
495
496#else /* !_KERNEL */
497
498static __inline u_int32_t
499atomic_cmpset_32(volatile u_int32_t *p, volatile u_int32_t cmpval, volatile u_int32_t newval)
500{
501	register int done, ras_start = ARM_RAS_START;
502
503	__asm __volatile("1:\n"
504	    "adr	%1, 1b\n"
505	    "str	%1, [%0]\n"
506	    "adr	%1, 2f\n"
507	    "str	%1, [%0, #4]\n"
508	    "ldr	%1, [%2]\n"
509	    "cmp	%1, %3\n"
510	    "streq	%4, [%2]\n"
511	    "2:\n"
512	    "mov	%1, #0\n"
513	    "str	%1, [%0]\n"
514	    "mov	%1, #0xffffffff\n"
515	    "str	%1, [%0, #4]\n"
516	    "moveq	%1, #1\n"
517	    "movne	%1, #0\n"
518	    : "+r" (ras_start), "=r" (done)
519	    ,"+r" (p), "+r" (cmpval), "+r" (newval) : : "memory");
520	return (done);
521}
522
523static __inline void
524atomic_add_32(volatile u_int32_t *p, u_int32_t val)
525{
526	int start, ras_start = ARM_RAS_START;
527
528	__asm __volatile("1:\n"
529	    "adr	%1, 1b\n"
530	    "str	%1, [%0]\n"
531	    "adr	%1, 2f\n"
532	    "str	%1, [%0, #4]\n"
533	    "ldr	%1, [%2]\n"
534	    "add	%1, %1, %3\n"
535	    "str	%1, [%2]\n"
536	    "2:\n"
537	    "mov	%1, #0\n"
538	    "str	%1, [%0]\n"
539	    "mov	%1, #0xffffffff\n"
540	    "str	%1, [%0, #4]\n"
541	    : "+r" (ras_start), "=r" (start), "+r" (p), "+r" (val)
542	    : : "memory");
543}
544
545static __inline void
546atomic_subtract_32(volatile u_int32_t *p, u_int32_t val)
547{
548	int start, ras_start = ARM_RAS_START;
549
550	__asm __volatile("1:\n"
551	    "adr	%1, 1b\n"
552	    "str	%1, [%0]\n"
553	    "adr	%1, 2f\n"
554	    "str	%1, [%0, #4]\n"
555	    "ldr	%1, [%2]\n"
556	    "sub	%1, %1, %3\n"
557	    "str	%1, [%2]\n"
558	    "2:\n"
559	    "mov	%1, #0\n"
560	    "str	%1, [%0]\n"
561	    "mov	%1, #0xffffffff\n"
562	    "str	%1, [%0, #4]\n"
563
564	    : "+r" (ras_start), "=r" (start), "+r" (p), "+r" (val)
565	    : : "memory");
566}
567
568static __inline void
569atomic_set_32(volatile uint32_t *address, uint32_t setmask)
570{
571	int start, ras_start = ARM_RAS_START;
572
573	__asm __volatile("1:\n"
574	    "adr	%1, 1b\n"
575	    "str	%1, [%0]\n"
576	    "adr	%1, 2f\n"
577	    "str	%1, [%0, #4]\n"
578	    "ldr	%1, [%2]\n"
579	    "orr	%1, %1, %3\n"
580	    "str	%1, [%2]\n"
581	    "2:\n"
582	    "mov	%1, #0\n"
583	    "str	%1, [%0]\n"
584	    "mov	%1, #0xffffffff\n"
585	    "str	%1, [%0, #4]\n"
586
587	    : "+r" (ras_start), "=r" (start), "+r" (address), "+r" (setmask)
588	    : : "memory");
589}
590
591static __inline void
592atomic_clear_32(volatile uint32_t *address, uint32_t clearmask)
593{
594	int start, ras_start = ARM_RAS_START;
595
596	__asm __volatile("1:\n"
597	    "adr	%1, 1b\n"
598	    "str	%1, [%0]\n"
599	    "adr	%1, 2f\n"
600	    "str	%1, [%0, #4]\n"
601	    "ldr	%1, [%2]\n"
602	    "bic	%1, %1, %3\n"
603	    "str	%1, [%2]\n"
604	    "2:\n"
605	    "mov	%1, #0\n"
606	    "str	%1, [%0]\n"
607	    "mov	%1, #0xffffffff\n"
608	    "str	%1, [%0, #4]\n"
609	    : "+r" (ras_start), "=r" (start), "+r" (address), "+r" (clearmask)
610	    : : "memory");
611
612}
613
614static __inline uint32_t
615atomic_fetchadd_32(volatile uint32_t *p, uint32_t v)
616{
617	uint32_t start, tmp, ras_start = ARM_RAS_START;
618
619	__asm __volatile("1:\n"
620	    "adr	%1, 1b\n"
621	    "str	%1, [%0]\n"
622	    "adr	%1, 2f\n"
623	    "str	%1, [%0, #4]\n"
624	    "ldr	%1, [%3]\n"
625	    "mov	%2, %1\n"
626	    "add	%2, %2, %4\n"
627	    "str	%2, [%3]\n"
628	    "2:\n"
629	    "mov	%2, #0\n"
630	    "str	%2, [%0]\n"
631	    "mov	%2, #0xffffffff\n"
632	    "str	%2, [%0, #4]\n"
633	    : "+r" (ras_start), "=r" (start), "=r" (tmp), "+r" (p), "+r" (v)
634	    : : "memory");
635	return (start);
636}
637
638#endif /* _KERNEL */
639
640
641static __inline uint32_t
642atomic_readandclear_32(volatile u_int32_t *p)
643{
644
645	return (__swp(0, p));
646}
647
648#define atomic_cmpset_rel_32	atomic_cmpset_32
649#define atomic_cmpset_acq_32	atomic_cmpset_32
650#define atomic_set_rel_32	atomic_set_32
651#define atomic_set_acq_32	atomic_set_32
652#define atomic_clear_rel_32	atomic_clear_32
653#define atomic_clear_acq_32	atomic_clear_32
654#define atomic_add_rel_32	atomic_add_32
655#define atomic_add_acq_32	atomic_add_32
656#define atomic_subtract_rel_32	atomic_subtract_32
657#define atomic_subtract_acq_32	atomic_subtract_32
658#define atomic_store_rel_32	atomic_store_32
659#define atomic_store_rel_long	atomic_store_long
660#define atomic_load_acq_32	atomic_load_32
661#define atomic_load_acq_long	atomic_load_long
662#undef __with_interrupts_disabled
663
664static __inline void
665atomic_add_long(volatile u_long *p, u_long v)
666{
667
668	atomic_add_32((volatile uint32_t *)p, v);
669}
670
671static __inline void
672atomic_clear_long(volatile u_long *p, u_long v)
673{
674
675	atomic_clear_32((volatile uint32_t *)p, v);
676}
677
678static __inline int
679atomic_cmpset_long(volatile u_long *dst, u_long old, u_long newe)
680{
681
682	return (atomic_cmpset_32((volatile uint32_t *)dst, old, newe));
683}
684
685static __inline u_long
686atomic_fetchadd_long(volatile u_long *p, u_long v)
687{
688
689	return (atomic_fetchadd_32((volatile uint32_t *)p, v));
690}
691
692static __inline void
693atomic_readandclear_long(volatile u_long *p)
694{
695
696	atomic_readandclear_32((volatile uint32_t *)p);
697}
698
699static __inline void
700atomic_set_long(volatile u_long *p, u_long v)
701{
702
703	atomic_set_32((volatile uint32_t *)p, v);
704}
705
706static __inline void
707atomic_subtract_long(volatile u_long *p, u_long v)
708{
709
710	atomic_subtract_32((volatile uint32_t *)p, v);
711}
712
713
714
715#endif /* Arch >= v6 */
716
717static __inline int
718atomic_load_32(volatile uint32_t *v)
719{
720
721	return (*v);
722}
723
724static __inline void
725atomic_store_32(volatile uint32_t *dst, uint32_t src)
726{
727	*dst = src;
728}
729
730static __inline int
731atomic_load_long(volatile u_long *v)
732{
733
734	return (*v);
735}
736
737static __inline void
738atomic_store_long(volatile u_long *dst, u_long src)
739{
740	*dst = src;
741}
742
743#define atomic_add_acq_long		atomic_add_long
744#define atomic_add_rel_long		atomic_add_long
745#define atomic_subtract_acq_long	atomic_subtract_long
746#define atomic_subtract_rel_long	atomic_subtract_long
747#define atomic_clear_acq_long		atomic_clear_long
748#define atomic_clear_rel_long		atomic_clear_long
749#define atomic_set_acq_long		atomic_set_long
750#define atomic_set_rel_long		atomic_set_long
751#define atomic_cmpset_acq_long		atomic_cmpset_long
752#define atomic_cmpset_rel_long		atomic_cmpset_long
753#define atomic_load_acq_long		atomic_load_long
754
755#define atomic_clear_ptr		atomic_clear_32
756#define atomic_set_ptr			atomic_set_32
757#define atomic_cmpset_ptr		atomic_cmpset_32
758#define atomic_cmpset_rel_ptr		atomic_cmpset_rel_32
759#define atomic_cmpset_acq_ptr		atomic_cmpset_acq_32
760#define atomic_store_ptr		atomic_store_32
761#define atomic_store_rel_ptr		atomic_store_ptr
762
763#define atomic_add_int			atomic_add_32
764#define atomic_add_acq_int		atomic_add_acq_32
765#define atomic_add_rel_int		atomic_add_rel_32
766#define atomic_subtract_int		atomic_subtract_32
767#define atomic_subtract_acq_int		atomic_subtract_acq_32
768#define atomic_subtract_rel_int		atomic_subtract_rel_32
769#define atomic_clear_int		atomic_clear_32
770#define atomic_clear_acq_int		atomic_clear_acq_32
771#define atomic_clear_rel_int		atomic_clear_rel_32
772#define atomic_set_int			atomic_set_32
773#define atomic_set_acq_int		atomic_set_acq_32
774#define atomic_set_rel_int		atomic_set_rel_32
775#define atomic_cmpset_int		atomic_cmpset_32
776#define atomic_cmpset_acq_int		atomic_cmpset_acq_32
777#define atomic_cmpset_rel_int		atomic_cmpset_rel_32
778#define atomic_fetchadd_int		atomic_fetchadd_32
779#define atomic_readandclear_int		atomic_readandclear_32
780#define atomic_load_acq_int		atomic_load_acq_32
781#define atomic_store_rel_int		atomic_store_rel_32
782
783#endif /* _MACHINE_ATOMIC_H_ */
784