1/* 2��* Copyright 2004-2009 Analog Devices Inc. 3��* 4��* Licensed under the GPL-2 or later. 5��*/ 6 7#ifndef __ARCH_BLACKFIN_ATOMIC__ 8#define __ARCH_BLACKFIN_ATOMIC__ 9 10#ifndef CONFIG_SMP 11# include <asm-generic/atomic.h> 12#else 13 14#include <linux/types.h> 15#include <asm/system.h> /* local_irq_XXX() */ 16 17/* 18 * Atomic operations that C can't guarantee us. Useful for 19 * resource counting etc.. 20 */ 21 22#define ATOMIC_INIT(i) { (i) } 23#define atomic_set(v, i) (((v)->counter) = i) 24 25#define atomic_read(v) __raw_uncached_fetch_asm(&(v)->counter) 26 27asmlinkage int __raw_uncached_fetch_asm(const volatile int *ptr); 28 29asmlinkage int __raw_atomic_update_asm(volatile int *ptr, int value); 30 31asmlinkage int __raw_atomic_clear_asm(volatile int *ptr, int value); 32 33asmlinkage int __raw_atomic_set_asm(volatile int *ptr, int value); 34 35asmlinkage int __raw_atomic_xor_asm(volatile int *ptr, int value); 36 37asmlinkage int __raw_atomic_test_asm(const volatile int *ptr, int value); 38 39static inline void atomic_add(int i, atomic_t *v) 40{ 41 __raw_atomic_update_asm(&v->counter, i); 42} 43 44static inline void atomic_sub(int i, atomic_t *v) 45{ 46 __raw_atomic_update_asm(&v->counter, -i); 47} 48 49static inline int atomic_add_return(int i, atomic_t *v) 50{ 51 return __raw_atomic_update_asm(&v->counter, i); 52} 53 54static inline int atomic_sub_return(int i, atomic_t *v) 55{ 56 return __raw_atomic_update_asm(&v->counter, -i); 57} 58 59static inline void atomic_inc(volatile atomic_t *v) 60{ 61 __raw_atomic_update_asm(&v->counter, 1); 62} 63 64static inline void atomic_dec(volatile atomic_t *v) 65{ 66 __raw_atomic_update_asm(&v->counter, -1); 67} 68 69static inline void atomic_clear_mask(int mask, atomic_t *v) 70{ 71 __raw_atomic_clear_asm(&v->counter, mask); 72} 73 74static inline void atomic_set_mask(int mask, atomic_t *v) 75{ 76 __raw_atomic_set_asm(&v->counter, mask); 77} 78 79static inline int atomic_test_mask(int mask, atomic_t *v) 80{ 81 return __raw_atomic_test_asm(&v->counter, mask); 82} 83 84/* Atomic operations are already serializing */ 85#define smp_mb__before_atomic_dec() barrier() 86#define smp_mb__after_atomic_dec() barrier() 87#define smp_mb__before_atomic_inc() barrier() 88#define smp_mb__after_atomic_inc() barrier() 89 90#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) 91#define atomic_dec_return(v) atomic_sub_return(1,(v)) 92#define atomic_inc_return(v) atomic_add_return(1,(v)) 93 94#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) 95#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) 96 97#define atomic_add_unless(v, a, u) \ 98({ \ 99 int c, old; \ 100 c = atomic_read(v); \ 101 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \ 102 c = old; \ 103 c != (u); \ 104}) 105#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) 106 107/* 108 * atomic_inc_and_test - increment and test 109 * @v: pointer of type atomic_t 110 * 111 * Atomically increments @v by 1 112 * and returns true if the result is zero, or false for all 113 * other cases. 114 */ 115#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) 116 117#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0) 118#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0) 119 120#include <asm-generic/atomic-long.h> 121 122#endif 123 124#endif 125