1#ifndef __ASM_SH_ATOMIC_H
2#define __ASM_SH_ATOMIC_H
3
4/*
5 * Atomic operations that C can't guarantee us.  Useful for
6 * resource counting etc..
7 *
8 */
9
10typedef struct { volatile int counter; } atomic_t;
11
12#define ATOMIC_INIT(i)	( (atomic_t) { (i) } )
13
14#define atomic_read(v)		((v)->counter)
15#define atomic_set(v,i)		((v)->counter = (i))
16
17#include <asm/system.h>
18
19/*
20 * To get proper branch prediction for the main line, we must branch
21 * forward to code at the end of this object's .text section, then
22 * branch back to restart the operation.
23 */
24
25static __inline__ void atomic_add(int i, atomic_t * v)
26{
27	unsigned long flags;
28
29	save_and_cli(flags);
30	*(long *)v += i;
31	restore_flags(flags);
32}
33
34static __inline__ void atomic_sub(int i, atomic_t *v)
35{
36	unsigned long flags;
37
38	save_and_cli(flags);
39	*(long *)v -= i;
40	restore_flags(flags);
41}
42
43static __inline__ int atomic_add_return(int i, atomic_t * v)
44{
45	unsigned long temp, flags;
46
47	save_and_cli(flags);
48	temp = *(long *)v;
49	temp += i;
50	*(long *)v = temp;
51	restore_flags(flags);
52
53	return temp;
54}
55
56static __inline__ int atomic_sub_return(int i, atomic_t * v)
57{
58	unsigned long temp, flags;
59
60	save_and_cli(flags);
61	temp = *(long *)v;
62	temp -= i;
63	*(long *)v = temp;
64	restore_flags(flags);
65
66	return temp;
67}
68
69#define atomic_dec_return(v) atomic_sub_return(1,(v))
70#define atomic_inc_return(v) atomic_add_return(1,(v))
71
72#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
73#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
74
75#define atomic_inc(v) atomic_add(1,(v))
76#define atomic_dec(v) atomic_sub(1,(v))
77
78static __inline__ void atomic_clear_mask(unsigned int mask, atomic_t *v)
79{
80	unsigned long flags;
81
82	save_and_cli(flags);
83	*(long *)v &= ~mask;
84	restore_flags(flags);
85}
86
87static __inline__ void atomic_set_mask(unsigned int mask, atomic_t *v)
88{
89	unsigned long flags;
90
91	save_and_cli(flags);
92	*(long *)v |= mask;
93	restore_flags(flags);
94}
95
96/* Atomic operations are already serializing on SH */
97#define smp_mb__before_atomic_dec()	barrier()
98#define smp_mb__after_atomic_dec()	barrier()
99#define smp_mb__before_atomic_inc()	barrier()
100#define smp_mb__after_atomic_inc()	barrier()
101
102#endif /* __ASM_SH_ATOMIC_H */
103