1#ifndef __ASM_SH_ATOMIC_H 2#define __ASM_SH_ATOMIC_H 3 4/* 5 * Atomic operations that C can't guarantee us. Useful for 6 * resource counting etc.. 7 * 8 */ 9 10typedef struct { volatile int counter; } atomic_t; 11 12#define ATOMIC_INIT(i) ( (atomic_t) { (i) } ) 13 14#define atomic_read(v) ((v)->counter) 15#define atomic_set(v,i) ((v)->counter = (i)) 16 17#include <linux/compiler.h> 18#include <asm/system.h> 19 20#ifdef CONFIG_CPU_SH4A 21#include <asm/atomic-llsc.h> 22#else 23#include <asm/atomic-irq.h> 24#endif 25 26#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) 27 28#define atomic_dec_return(v) atomic_sub_return(1,(v)) 29#define atomic_inc_return(v) atomic_add_return(1,(v)) 30 31/* 32 * atomic_inc_and_test - increment and test 33 * @v: pointer of type atomic_t 34 * 35 * Atomically increments @v by 1 36 * and returns true if the result is zero, or false for all 37 * other cases. 38 */ 39#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) 40 41#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0) 42#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0) 43 44#define atomic_inc(v) atomic_add(1,(v)) 45#define atomic_dec(v) atomic_sub(1,(v)) 46 47static inline int atomic_cmpxchg(atomic_t *v, int old, int new) 48{ 49 int ret; 50 unsigned long flags; 51 52 local_irq_save(flags); 53 ret = v->counter; 54 if (likely(ret == old)) 55 v->counter = new; 56 local_irq_restore(flags); 57 58 return ret; 59} 60 61#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) 62 63static inline int atomic_add_unless(atomic_t *v, int a, int u) 64{ 65 int ret; 66 unsigned long flags; 67 68 local_irq_save(flags); 69 ret = v->counter; 70 if (ret != u) 71 v->counter += a; 72 local_irq_restore(flags); 73 74 return ret != u; 75} 76#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 77 78/* Atomic operations are already serializing on SH */ 79#define smp_mb__before_atomic_dec() barrier() 80#define smp_mb__after_atomic_dec() barrier() 81#define smp_mb__before_atomic_inc() barrier() 82#define smp_mb__after_atomic_inc() barrier() 83 84#include <asm-generic/atomic.h> 85#endif /* __ASM_SH_ATOMIC_H */ 86