bitops.h revision 219820
1219820Sjeff/*-
2219820Sjeff * Copyright (c) 2010 Isilon Systems, Inc.
3219820Sjeff * Copyright (c) 2010 iX Systems, Inc.
4219820Sjeff * Copyright (c) 2010 Panasas, Inc.
5219820Sjeff * All rights reserved.
6219820Sjeff *
7219820Sjeff * Redistribution and use in source and binary forms, with or without
8219820Sjeff * modification, are permitted provided that the following conditions
9219820Sjeff * are met:
10219820Sjeff * 1. Redistributions of source code must retain the above copyright
11219820Sjeff *    notice unmodified, this list of conditions, and the following
12219820Sjeff *    disclaimer.
13219820Sjeff * 2. Redistributions in binary form must reproduce the above copyright
14219820Sjeff *    notice, this list of conditions and the following disclaimer in the
15219820Sjeff *    documentation and/or other materials provided with the distribution.
16219820Sjeff *
17219820Sjeff * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18219820Sjeff * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19219820Sjeff * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20219820Sjeff * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21219820Sjeff * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22219820Sjeff * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23219820Sjeff * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24219820Sjeff * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25219820Sjeff * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26219820Sjeff * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27219820Sjeff */
28219820Sjeff#ifndef	_LINUX_BITOPS_H_
29219820Sjeff#define	_LINUX_BITOPS_H_
30219820Sjeff
31219820Sjeff#ifdef __LP64__
32219820Sjeff#define	BITS_PER_LONG		64
33219820Sjeff#else
34219820Sjeff#define	BITS_PER_LONG		32
35219820Sjeff#endif
36219820Sjeff#define	BIT_MASK(n)		(~0UL >> (BITS_PER_LONG - (n)))
37219820Sjeff#define	BITS_TO_LONGS(n)	howmany((n), BITS_PER_LONG)
38219820Sjeff
39219820Sjeffstatic inline int
40219820Sjeff__ffs(int mask)
41219820Sjeff{
42219820Sjeff	return (ffs(mask) - 1);
43219820Sjeff}
44219820Sjeff
45219820Sjeffstatic inline int
46219820Sjeff__fls(int mask)
47219820Sjeff{
48219820Sjeff	return (fls(mask) - 1);
49219820Sjeff}
50219820Sjeff
51219820Sjeffstatic inline int
52219820Sjeff__ffsl(long mask)
53219820Sjeff{
54219820Sjeff	return (ffsl(mask) - 1);
55219820Sjeff}
56219820Sjeff
57219820Sjeffstatic inline int
58219820Sjeff__flsl(long mask)
59219820Sjeff{
60219820Sjeff	return (flsl(mask) - 1);
61219820Sjeff}
62219820Sjeff
63219820Sjeff
64219820Sjeff#define	ffz(mask)	__ffs(~(mask))
65219820Sjeff
66219820Sjeffstatic inline unsigned long
67219820Sjefffind_first_bit(unsigned long *addr, unsigned long size)
68219820Sjeff{
69219820Sjeff	long mask;
70219820Sjeff	int bit;
71219820Sjeff
72219820Sjeff	for (bit = 0; size >= BITS_PER_LONG;
73219820Sjeff	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
74219820Sjeff		if (*addr == 0)
75219820Sjeff			continue;
76219820Sjeff		return (bit + __ffsl(*addr));
77219820Sjeff	}
78219820Sjeff	if (size) {
79219820Sjeff		mask = (*addr) & BIT_MASK(size);
80219820Sjeff		if (mask)
81219820Sjeff			bit += __ffsl(mask);
82219820Sjeff		else
83219820Sjeff			bit += size;
84219820Sjeff	}
85219820Sjeff	return (bit);
86219820Sjeff}
87219820Sjeff
88219820Sjeffstatic inline unsigned long
89219820Sjefffind_first_zero_bit(unsigned long *addr, unsigned long size)
90219820Sjeff{
91219820Sjeff	long mask;
92219820Sjeff	int bit;
93219820Sjeff
94219820Sjeff	for (bit = 0; size >= BITS_PER_LONG;
95219820Sjeff	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
96219820Sjeff		if (~(*addr) == 0)
97219820Sjeff			continue;
98219820Sjeff		return (bit + __ffsl(~(*addr)));
99219820Sjeff	}
100219820Sjeff	if (size) {
101219820Sjeff		mask = ~(*addr) & BIT_MASK(size);
102219820Sjeff		if (mask)
103219820Sjeff			bit += __ffsl(mask);
104219820Sjeff		else
105219820Sjeff			bit += size;
106219820Sjeff	}
107219820Sjeff	return (bit);
108219820Sjeff}
109219820Sjeff
110219820Sjeffstatic inline unsigned long
111219820Sjefffind_last_bit(unsigned long *addr, unsigned long size)
112219820Sjeff{
113219820Sjeff	long mask;
114219820Sjeff	int offs;
115219820Sjeff	int bit;
116219820Sjeff	int pos;
117219820Sjeff
118219820Sjeff	pos = size / BITS_PER_LONG;
119219820Sjeff	offs = size % BITS_PER_LONG;
120219820Sjeff	bit = BITS_PER_LONG * pos;
121219820Sjeff	addr += pos;
122219820Sjeff	if (offs) {
123219820Sjeff		mask = (*addr) & BIT_MASK(offs);
124219820Sjeff		if (mask)
125219820Sjeff			return (bit + __flsl(mask));
126219820Sjeff	}
127219820Sjeff	while (--pos) {
128219820Sjeff		addr--;
129219820Sjeff		bit -= BITS_PER_LONG;
130219820Sjeff		if (*addr)
131219820Sjeff			return (bit + __flsl(mask));
132219820Sjeff	}
133219820Sjeff	return (size);
134219820Sjeff}
135219820Sjeff
136219820Sjeffstatic inline unsigned long
137219820Sjefffind_next_bit(unsigned long *addr, unsigned long size, unsigned long offset)
138219820Sjeff{
139219820Sjeff	long mask;
140219820Sjeff	int offs;
141219820Sjeff	int bit;
142219820Sjeff	int pos;
143219820Sjeff
144219820Sjeff	if (offset >= size)
145219820Sjeff		return (size);
146219820Sjeff	pos = offset / BITS_PER_LONG;
147219820Sjeff	offs = offset % BITS_PER_LONG;
148219820Sjeff	bit = BITS_PER_LONG * pos;
149219820Sjeff	addr += pos;
150219820Sjeff	if (offs) {
151219820Sjeff		mask = (*addr) & ~BIT_MASK(offs);
152219820Sjeff		if (mask)
153219820Sjeff			return (bit + __ffsl(mask));
154219820Sjeff		bit += BITS_PER_LONG;
155219820Sjeff		addr++;
156219820Sjeff	}
157219820Sjeff	for (size -= bit; size >= BITS_PER_LONG;
158219820Sjeff	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
159219820Sjeff		if (*addr == 0)
160219820Sjeff			continue;
161219820Sjeff		return (bit + __ffsl(*addr));
162219820Sjeff	}
163219820Sjeff	if (size) {
164219820Sjeff		mask = (*addr) & BIT_MASK(size);
165219820Sjeff		if (mask)
166219820Sjeff			bit += __ffsl(mask);
167219820Sjeff		else
168219820Sjeff			bit += size;
169219820Sjeff	}
170219820Sjeff	return (bit);
171219820Sjeff}
172219820Sjeff
173219820Sjeffstatic inline unsigned long
174219820Sjefffind_next_zero_bit(unsigned long *addr, unsigned long size,
175219820Sjeff    unsigned long offset)
176219820Sjeff{
177219820Sjeff	long mask;
178219820Sjeff	int offs;
179219820Sjeff	int bit;
180219820Sjeff	int pos;
181219820Sjeff
182219820Sjeff	if (offset >= size)
183219820Sjeff		return (size);
184219820Sjeff	pos = offset / BITS_PER_LONG;
185219820Sjeff	offs = offset % BITS_PER_LONG;
186219820Sjeff	bit = BITS_PER_LONG * pos;
187219820Sjeff	addr += pos;
188219820Sjeff	if (offs) {
189219820Sjeff		mask = ~(*addr) & ~BIT_MASK(offs);
190219820Sjeff		if (mask)
191219820Sjeff			return (bit + __ffsl(mask));
192219820Sjeff		bit += BITS_PER_LONG;
193219820Sjeff		addr++;
194219820Sjeff	}
195219820Sjeff	for (size -= bit; size >= BITS_PER_LONG;
196219820Sjeff	    size -= BITS_PER_LONG, bit += BITS_PER_LONG, addr++) {
197219820Sjeff		if (~(*addr) == 0)
198219820Sjeff			continue;
199219820Sjeff		return (bit + __ffsl(~(*addr)));
200219820Sjeff	}
201219820Sjeff	if (size) {
202219820Sjeff		mask = ~(*addr) & BIT_MASK(size);
203219820Sjeff		if (mask)
204219820Sjeff			bit += __ffsl(mask);
205219820Sjeff		else
206219820Sjeff			bit += size;
207219820Sjeff	}
208219820Sjeff	return (bit);
209219820Sjeff}
210219820Sjeff
211219820Sjeffstatic inline void
212219820Sjeffbitmap_zero(unsigned long *addr, int size)
213219820Sjeff{
214219820Sjeff	int len;
215219820Sjeff
216219820Sjeff	len = BITS_TO_LONGS(size) * sizeof(long);
217219820Sjeff	memset(addr, 0, len);
218219820Sjeff}
219219820Sjeff
220219820Sjeffstatic inline void
221219820Sjeffbitmap_fill(unsigned long *addr, int size)
222219820Sjeff{
223219820Sjeff	int tail;
224219820Sjeff	int len;
225219820Sjeff
226219820Sjeff	len = (size / BITS_PER_LONG) * sizeof(long);
227219820Sjeff	memset(addr, 0xff, len);
228219820Sjeff	tail = size & (BITS_PER_LONG - 1);
229219820Sjeff	if (tail)
230219820Sjeff		addr[size / BITS_PER_LONG] = BIT_MASK(tail);
231219820Sjeff}
232219820Sjeff
233219820Sjeffstatic inline int
234219820Sjeffbitmap_full(unsigned long *addr, int size)
235219820Sjeff{
236219820Sjeff	long mask;
237219820Sjeff	int tail;
238219820Sjeff	int len;
239219820Sjeff	int i;
240219820Sjeff
241219820Sjeff	len = size / BITS_PER_LONG;
242219820Sjeff	for (i = 0; i < len; i++)
243219820Sjeff		if (addr[i] != ~0UL)
244219820Sjeff			return (0);
245219820Sjeff	tail = size & (BITS_PER_LONG - 1);
246219820Sjeff	if (tail) {
247219820Sjeff		mask = BIT_MASK(tail);
248219820Sjeff		if ((addr[i] & mask) != mask)
249219820Sjeff			return (0);
250219820Sjeff	}
251219820Sjeff	return (1);
252219820Sjeff}
253219820Sjeff
254219820Sjeffstatic inline int
255219820Sjeffbitmap_empty(unsigned long *addr, int size)
256219820Sjeff{
257219820Sjeff	long mask;
258219820Sjeff	int tail;
259219820Sjeff	int len;
260219820Sjeff	int i;
261219820Sjeff
262219820Sjeff	len = size / BITS_PER_LONG;
263219820Sjeff	for (i = 0; i < len; i++)
264219820Sjeff		if (addr[i] != 0)
265219820Sjeff			return (0);
266219820Sjeff	tail = size & (BITS_PER_LONG - 1);
267219820Sjeff	if (tail) {
268219820Sjeff		mask = BIT_MASK(tail);
269219820Sjeff		if ((addr[i] & mask) != 0)
270219820Sjeff			return (0);
271219820Sjeff	}
272219820Sjeff	return (1);
273219820Sjeff}
274219820Sjeff
275219820Sjeff#define	NBINT	(NBBY * sizeof(int))
276219820Sjeff
277219820Sjeff#define	set_bit(i, a)							\
278219820Sjeff    atomic_set_int(&((volatile int *)(a))[(i)/NBINT], 1 << (i) % NBINT)
279219820Sjeff
280219820Sjeff#define	clear_bit(i, a)							\
281219820Sjeff    atomic_clear_int(&((volatile int *)(a))[(i)/NBINT], 1 << (i) % NBINT)
282219820Sjeff
283219820Sjeff#define	test_bit(i, a)							\
284219820Sjeff    !!(atomic_load_acq_int(&((volatile int *)(a))[(i)/NBINT]) & 1 << ((i) % NBINT))
285219820Sjeff
286219820Sjeffstatic inline long
287219820Sjefftest_and_clear_bit(long bit, long *var)
288219820Sjeff{
289219820Sjeff	long val;
290219820Sjeff
291219820Sjeff	bit = 1 << bit;
292219820Sjeff	do {
293219820Sjeff		val = *(volatile long *)var;
294219820Sjeff	} while (atomic_cmpset_long(var, val, val & ~bit) == 0);
295219820Sjeff
296219820Sjeff	return !!(val & bit);
297219820Sjeff}
298219820Sjeff
299219820Sjeffstatic inline long
300219820Sjefftest_and_set_bit(long bit, long *var)
301219820Sjeff{
302219820Sjeff	long val;
303219820Sjeff
304219820Sjeff	bit = 1 << bit;
305219820Sjeff	do {
306219820Sjeff		val = *(volatile long *)var;
307219820Sjeff	} while (atomic_cmpset_long(var, val, val | bit) == 0);
308219820Sjeff
309219820Sjeff	return !!(val & bit);
310219820Sjeff}
311219820Sjeff
312219820Sjeff#endif	/* _LINUX_BITOPS_H_ */
313