1#ifndef __ARCH_H8300_ATOMIC__
2#define __ARCH_H8300_ATOMIC__
3
4/*
5 * Atomic operations that C can't guarantee us.  Useful for
6 * resource counting etc..
7 */
8
9typedef struct { int counter; } atomic_t;
10#define ATOMIC_INIT(i)	{ (i) }
11
12#define atomic_read(v)		((v)->counter)
13#define atomic_set(v, i)	(((v)->counter) = i)
14
15#include <asm/system.h>
16#include <linux/kernel.h>
17
18static __inline__ int atomic_add_return(int i, atomic_t *v)
19{
20	int ret,flags;
21	local_irq_save(flags);
22	ret = v->counter += i;
23	local_irq_restore(flags);
24	return ret;
25}
26
27#define atomic_add(i, v) atomic_add_return(i, v)
28#define atomic_add_negative(a, v)	(atomic_add_return((a), (v)) < 0)
29
30static __inline__ int atomic_sub_return(int i, atomic_t *v)
31{
32	int ret,flags;
33	local_irq_save(flags);
34	ret = v->counter -= i;
35	local_irq_restore(flags);
36	return ret;
37}
38
39#define atomic_sub(i, v) atomic_sub_return(i, v)
40#define atomic_sub_and_test(i,v) (atomic_sub_return(i, v) == 0)
41
42static __inline__ int atomic_inc_return(atomic_t *v)
43{
44	int ret,flags;
45	local_irq_save(flags);
46	v->counter++;
47	ret = v->counter;
48	local_irq_restore(flags);
49	return ret;
50}
51
52#define atomic_inc(v) atomic_inc_return(v)
53
54/*
55 * atomic_inc_and_test - increment and test
56 * @v: pointer of type atomic_t
57 *
58 * Atomically increments @v by 1
59 * and returns true if the result is zero, or false for all
60 * other cases.
61 */
62#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
63
64static __inline__ int atomic_dec_return(atomic_t *v)
65{
66	int ret,flags;
67	local_irq_save(flags);
68	--v->counter;
69	ret = v->counter;
70	local_irq_restore(flags);
71	return ret;
72}
73
74#define atomic_dec(v) atomic_dec_return(v)
75
76static __inline__ int atomic_dec_and_test(atomic_t *v)
77{
78	int ret,flags;
79	local_irq_save(flags);
80	--v->counter;
81	ret = v->counter;
82	local_irq_restore(flags);
83	return ret == 0;
84}
85
86static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
87{
88	int ret;
89	unsigned long flags;
90
91	local_irq_save(flags);
92	ret = v->counter;
93	if (likely(ret == old))
94		v->counter = new;
95	local_irq_restore(flags);
96	return ret;
97}
98
99#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
100
101static inline int atomic_add_unless(atomic_t *v, int a, int u)
102{
103	int ret;
104	unsigned long flags;
105
106	local_irq_save(flags);
107	ret = v->counter;
108	if (ret != u)
109		v->counter += a;
110	local_irq_restore(flags);
111	return ret != u;
112}
113#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
114
115static __inline__ void atomic_clear_mask(unsigned long mask, unsigned long *v)
116{
117	__asm__ __volatile__("stc ccr,r1l\n\t"
118	                     "orc #0x80,ccr\n\t"
119	                     "mov.l %0,er0\n\t"
120	                     "and.l %1,er0\n\t"
121	                     "mov.l er0,%0\n\t"
122	                     "ldc r1l,ccr"
123                             : "=m" (*v) : "g" (~(mask)) :"er0","er1");
124}
125
126static __inline__ void atomic_set_mask(unsigned long mask, unsigned long *v)
127{
128	__asm__ __volatile__("stc ccr,r1l\n\t"
129	                     "orc #0x80,ccr\n\t"
130	                     "mov.l %0,er0\n\t"
131	                     "or.l %1,er0\n\t"
132	                     "mov.l er0,%0\n\t"
133	                     "ldc r1l,ccr"
134                             : "=m" (*v) : "g" (mask) :"er0","er1");
135}
136
137/* Atomic operations are already serializing */
138#define smp_mb__before_atomic_dec()    barrier()
139#define smp_mb__after_atomic_dec() barrier()
140#define smp_mb__before_atomic_inc()    barrier()
141#define smp_mb__after_atomic_inc() barrier()
142
143#include <asm-generic/atomic.h>
144#endif /* __ARCH_H8300_ATOMIC __ */
145