1#ifndef __ASM_SH64_BITOPS_H
2#define __ASM_SH64_BITOPS_H
3
4/*
5 * This file is subject to the terms and conditions of the GNU General Public
6 * License.  See the file "COPYING" in the main directory of this archive
7 * for more details.
8 *
9 * include/asm-sh64/bitops.h
10 *
11 * Copyright (C) 2000, 2001  Paolo Alberelli
12 * Copyright (C) 2003  Paul Mundt
13 */
14
15#ifdef __KERNEL__
16#include <linux/compiler.h>
17#include <asm/system.h>
18/* For __swab32 */
19#include <asm/byteorder.h>
20
21static __inline__ void set_bit(int nr, volatile void * addr)
22{
23	int	mask;
24	volatile unsigned int *a = addr;
25	unsigned long flags;
26
27	a += nr >> 5;
28	mask = 1 << (nr & 0x1f);
29	local_irq_save(flags);
30	*a |= mask;
31	local_irq_restore(flags);
32}
33
34/*
35 * clear_bit() doesn't provide any barrier for the compiler.
36 */
37#define smp_mb__before_clear_bit()	barrier()
38#define smp_mb__after_clear_bit()	barrier()
39static inline void clear_bit(int nr, volatile unsigned long *a)
40{
41	int	mask;
42	unsigned long flags;
43
44	a += nr >> 5;
45	mask = 1 << (nr & 0x1f);
46	local_irq_save(flags);
47	*a &= ~mask;
48	local_irq_restore(flags);
49}
50
51static __inline__ void change_bit(int nr, volatile void * addr)
52{
53	int	mask;
54	volatile unsigned int *a = addr;
55	unsigned long flags;
56
57	a += nr >> 5;
58	mask = 1 << (nr & 0x1f);
59	local_irq_save(flags);
60	*a ^= mask;
61	local_irq_restore(flags);
62}
63
64static __inline__ int test_and_set_bit(int nr, volatile void * addr)
65{
66	int	mask, retval;
67	volatile unsigned int *a = addr;
68	unsigned long flags;
69
70	a += nr >> 5;
71	mask = 1 << (nr & 0x1f);
72	local_irq_save(flags);
73	retval = (mask & *a) != 0;
74	*a |= mask;
75	local_irq_restore(flags);
76
77	return retval;
78}
79
80static __inline__ int test_and_clear_bit(int nr, volatile void * addr)
81{
82	int	mask, retval;
83	volatile unsigned int *a = addr;
84	unsigned long flags;
85
86	a += nr >> 5;
87	mask = 1 << (nr & 0x1f);
88	local_irq_save(flags);
89	retval = (mask & *a) != 0;
90	*a &= ~mask;
91	local_irq_restore(flags);
92
93	return retval;
94}
95
96static __inline__ int test_and_change_bit(int nr, volatile void * addr)
97{
98	int	mask, retval;
99	volatile unsigned int *a = addr;
100	unsigned long flags;
101
102	a += nr >> 5;
103	mask = 1 << (nr & 0x1f);
104	local_irq_save(flags);
105	retval = (mask & *a) != 0;
106	*a ^= mask;
107	local_irq_restore(flags);
108
109	return retval;
110}
111
112#include <asm-generic/bitops/non-atomic.h>
113
114static __inline__ unsigned long ffz(unsigned long word)
115{
116	unsigned long result, __d2, __d3;
117
118        __asm__("gettr  tr0, %2\n\t"
119                "pta    $+32, tr0\n\t"
120                "andi   %1, 1, %3\n\t"
121                "beq    %3, r63, tr0\n\t"
122                "pta    $+4, tr0\n"
123                "0:\n\t"
124                "shlri.l        %1, 1, %1\n\t"
125                "addi   %0, 1, %0\n\t"
126                "andi   %1, 1, %3\n\t"
127                "beqi   %3, 1, tr0\n"
128                "1:\n\t"
129                "ptabs  %2, tr0\n\t"
130                : "=r" (result), "=r" (word), "=r" (__d2), "=r" (__d3)
131                : "0" (0L), "1" (word));
132
133	return result;
134}
135
136#include <asm-generic/bitops/__ffs.h>
137#include <asm-generic/bitops/find.h>
138#include <asm-generic/bitops/hweight.h>
139#include <asm-generic/bitops/sched.h>
140#include <asm-generic/bitops/ffs.h>
141#include <asm-generic/bitops/ext2-non-atomic.h>
142#include <asm-generic/bitops/ext2-atomic.h>
143#include <asm-generic/bitops/minix.h>
144#include <asm-generic/bitops/fls.h>
145#include <asm-generic/bitops/fls64.h>
146
147#endif /* __KERNEL__ */
148
149#endif /* __ASM_SH64_BITOPS_H */
150