1/*
2 * PowerPC atomic operations
3 */
4
5#ifndef _ASM_PPC_ATOMIC_H_
6#define _ASM_PPC_ATOMIC_H_
7
8#ifdef CONFIG_SMP
9typedef struct { volatile int counter; } atomic_t;
10#else
11typedef struct { int counter; } atomic_t;
12#endif
13
14#define ATOMIC_INIT(i)	{ (i) }
15
16#define atomic_read(v)		((v)->counter)
17#define atomic_set(v,i)		(((v)->counter) = (i))
18
19extern void atomic_clear_mask(unsigned long mask, unsigned long *addr);
20extern void atomic_set_mask(unsigned long mask, unsigned long *addr);
21
22static __inline__ int atomic_add_return(int a, atomic_t *v)
23{
24	int t;
25
26	__asm__ __volatile__("\n\
271:	lwarx	%0,0,%3\n\
28	add	%0,%2,%0\n\
29	stwcx.	%0,0,%3\n\
30	bne-	1b"
31	: "=&r" (t), "=m" (*v)
32	: "r" (a), "r" (v), "m" (*v)
33	: "cc");
34
35	return t;
36}
37
38static __inline__ int atomic_sub_return(int a, atomic_t *v)
39{
40	int t;
41
42	__asm__ __volatile__("\n\
431:	lwarx	%0,0,%3\n\
44	subf	%0,%2,%0\n\
45	stwcx.	%0,0,%3\n\
46	bne-	1b"
47	: "=&r" (t), "=m" (*v)
48	: "r" (a), "r" (v), "m" (*v)
49	: "cc");
50
51	return t;
52}
53
54static __inline__ int atomic_inc_return(atomic_t *v)
55{
56	int t;
57
58	__asm__ __volatile__("\n\
591:	lwarx	%0,0,%2\n\
60	addic	%0,%0,1\n\
61	stwcx.	%0,0,%2\n\
62	bne-	1b"
63	: "=&r" (t), "=m" (*v)
64	: "r" (v), "m" (*v)
65	: "cc");
66
67	return t;
68}
69
70static __inline__ int atomic_dec_return(atomic_t *v)
71{
72	int t;
73
74	__asm__ __volatile__("\n\
751:	lwarx	%0,0,%2\n\
76	addic	%0,%0,-1\n\
77	stwcx.	%0,0,%2\n\
78	bne	1b"
79	: "=&r" (t), "=m" (*v)
80	: "r" (v), "m" (*v)
81	: "cc");
82
83	return t;
84}
85
86#define atomic_add(a, v)		((void) atomic_add_return((a), (v)))
87#define atomic_sub(a, v)		((void) atomic_sub_return((a), (v)))
88#define atomic_sub_and_test(a, v)	(atomic_sub_return((a), (v)) == 0)
89#define atomic_inc(v)			((void) atomic_inc_return((v)))
90#define atomic_dec(v)			((void) atomic_dec_return((v)))
91#define atomic_dec_and_test(v)		(atomic_dec_return((v)) == 0)
92
93#endif /* _ASM_PPC_ATOMIC_H_ */
94