1
2#ifndef __ASM_ARM_ATOMIC_H
3#define __ASM_ARM_ATOMIC_H
4
5#ifdef CONFIG_SMP
6#error SMP is NOT supported
7#endif
8
9typedef struct { volatile int counter; } atomic_t;
10
11#define ATOMIC_INIT(i)	{ (i) }
12
13#ifdef __KERNEL__
14#include <asm/system.h>
15
16#define atomic_read(v) ((v)->counter)
17#define atomic_set(v,i)	(((v)->counter) = (i))
18
19static inline int atomic_add_return(int i, atomic_t *v)
20{
21        unsigned long flags;
22        int val;
23
24        local_irq_save(flags);
25        val = v->counter;
26        v->counter = val += i;
27        local_irq_restore(flags);
28
29        return val;
30}
31
32static inline int atomic_sub_return(int i, atomic_t *v)
33{
34        unsigned long flags;
35        int val;
36
37        local_irq_save(flags);
38        val = v->counter;
39        v->counter = val -= i;
40        local_irq_restore(flags);
41
42        return val;
43}
44
45static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
46{
47	int ret;
48	unsigned long flags;
49
50	local_irq_save(flags);
51	ret = v->counter;
52	if (likely(ret == old))
53		v->counter = new;
54	local_irq_restore(flags);
55
56	return ret;
57}
58
59#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
60
61static inline int atomic_add_unless(atomic_t *v, int a, int u)
62{
63	int ret;
64	unsigned long flags;
65
66	local_irq_save(flags);
67	ret = v->counter;
68	if (ret != u)
69		v->counter += a;
70	local_irq_restore(flags);
71
72	return ret != u;
73}
74#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
75
76static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
77{
78        unsigned long flags;
79
80        local_irq_save(flags);
81        *addr &= ~mask;
82        local_irq_restore(flags);
83}
84
85#define atomic_add(i, v)        (void) atomic_add_return(i, v)
86#define atomic_inc(v)           (void) atomic_add_return(1, v)
87#define atomic_sub(i, v)        (void) atomic_sub_return(i, v)
88#define atomic_dec(v)           (void) atomic_sub_return(1, v)
89
90#define atomic_inc_and_test(v)  (atomic_add_return(1, v) == 0)
91#define atomic_dec_and_test(v)  (atomic_sub_return(1, v) == 0)
92#define atomic_inc_return(v)    (atomic_add_return(1, v))
93#define atomic_dec_return(v)    (atomic_sub_return(1, v))
94
95#define atomic_add_negative(i,v) (atomic_add_return(i, v) < 0)
96
97/* Atomic operations are already serializing on ARM26 */
98#define smp_mb__before_atomic_dec()	barrier()
99#define smp_mb__after_atomic_dec()	barrier()
100#define smp_mb__before_atomic_inc()	barrier()
101#define smp_mb__after_atomic_inc()	barrier()
102
103#include <asm-generic/atomic.h>
104#endif
105#endif
106