• Home
  • History
  • Annotate
  • Line#
  • Navigate
  • Raw
  • Download
  • only in /asuswrt-rt-n18u-9.0.0.4.380.2695/release/src-rt-6.x.4708/linux/linux-2.6.36/arch/blackfin/include/asm/
1/*
2 * Copyright 2004-2009 Analog Devices Inc.
3 *
4 * Licensed under the GPL-2 or later.
5 */
6
7#ifndef _BLACKFIN_BITOPS_H
8#define _BLACKFIN_BITOPS_H
9
10#include <linux/compiler.h>
11
12#include <asm-generic/bitops/__ffs.h>
13#include <asm-generic/bitops/ffz.h>
14#include <asm-generic/bitops/fls.h>
15#include <asm-generic/bitops/__fls.h>
16#include <asm-generic/bitops/fls64.h>
17#include <asm-generic/bitops/find.h>
18
19#ifndef _LINUX_BITOPS_H
20#error only <linux/bitops.h> can be included directly
21#endif
22
23#include <asm-generic/bitops/sched.h>
24#include <asm-generic/bitops/ffs.h>
25#include <asm-generic/bitops/const_hweight.h>
26#include <asm-generic/bitops/lock.h>
27
28#include <asm-generic/bitops/ext2-non-atomic.h>
29#include <asm-generic/bitops/ext2-atomic.h>
30#include <asm-generic/bitops/minix.h>
31
32#ifndef CONFIG_SMP
33#include <linux/irqflags.h>
34
35/*
36 * clear_bit may not imply a memory barrier
37 */
38#ifndef smp_mb__before_clear_bit
39#define smp_mb__before_clear_bit()	smp_mb()
40#define smp_mb__after_clear_bit()	smp_mb()
41#endif
42#include <asm-generic/bitops/atomic.h>
43#include <asm-generic/bitops/non-atomic.h>
44#else
45
46#include <asm/byteorder.h>	/* swab32 */
47#include <linux/linkage.h>
48
49asmlinkage int __raw_bit_set_asm(volatile unsigned long *addr, int nr);
50
51asmlinkage int __raw_bit_clear_asm(volatile unsigned long *addr, int nr);
52
53asmlinkage int __raw_bit_toggle_asm(volatile unsigned long *addr, int nr);
54
55asmlinkage int __raw_bit_test_set_asm(volatile unsigned long *addr, int nr);
56
57asmlinkage int __raw_bit_test_clear_asm(volatile unsigned long *addr, int nr);
58
59asmlinkage int __raw_bit_test_toggle_asm(volatile unsigned long *addr, int nr);
60
61asmlinkage int __raw_bit_test_asm(const volatile unsigned long *addr, int nr);
62
63static inline void set_bit(int nr, volatile unsigned long *addr)
64{
65	volatile unsigned long *a = addr + (nr >> 5);
66	__raw_bit_set_asm(a, nr & 0x1f);
67}
68
69static inline void clear_bit(int nr, volatile unsigned long *addr)
70{
71	volatile unsigned long *a = addr + (nr >> 5);
72	__raw_bit_clear_asm(a, nr & 0x1f);
73}
74
75static inline void change_bit(int nr, volatile unsigned long *addr)
76{
77	volatile unsigned long *a = addr + (nr >> 5);
78	__raw_bit_toggle_asm(a, nr & 0x1f);
79}
80
81static inline int test_bit(int nr, const volatile unsigned long *addr)
82{
83	volatile const unsigned long *a = addr + (nr >> 5);
84	return __raw_bit_test_asm(a, nr & 0x1f) != 0;
85}
86
87static inline int test_and_set_bit(int nr, volatile unsigned long *addr)
88{
89	volatile unsigned long *a = addr + (nr >> 5);
90	return __raw_bit_test_set_asm(a, nr & 0x1f);
91}
92
93static inline int test_and_clear_bit(int nr, volatile unsigned long *addr)
94{
95	volatile unsigned long *a = addr + (nr >> 5);
96	return __raw_bit_test_clear_asm(a, nr & 0x1f);
97}
98
99static inline int test_and_change_bit(int nr, volatile unsigned long *addr)
100{
101	volatile unsigned long *a = addr + (nr >> 5);
102	return __raw_bit_test_toggle_asm(a, nr & 0x1f);
103}
104
105/*
106 * clear_bit() doesn't provide any barrier for the compiler.
107 */
108#define smp_mb__before_clear_bit()	barrier()
109#define smp_mb__after_clear_bit()	barrier()
110
111#include <asm-generic/bitops/non-atomic.h>
112
113#endif /* CONFIG_SMP */
114
115/*
116 * hweightN: returns the hamming weight (i.e. the number
117 * of bits set) of a N-bit word
118 */
119
120static inline unsigned int __arch_hweight32(unsigned int w)
121{
122	unsigned int res;
123
124	__asm__ ("%0.l = ONES %1;"
125		"%0 = %0.l (Z);"
126		: "=d" (res) : "d" (w));
127	return res;
128}
129
130static inline unsigned int __arch_hweight64(__u64 w)
131{
132	return __arch_hweight32((unsigned int)(w >> 32)) +
133	       __arch_hweight32((unsigned int)w);
134}
135
136static inline unsigned int __arch_hweight16(unsigned int w)
137{
138	return __arch_hweight32(w & 0xffff);
139}
140
141static inline unsigned int __arch_hweight8(unsigned int w)
142{
143	return __arch_hweight32(w & 0xff);
144}
145
146#endif				/* _BLACKFIN_BITOPS_H */
147