blob: be3a228f389b326503b26c5cd086f7f9dd235a6a [file] [log] [blame]
Linus Torvalds1da177e2005-04-16 15:20:36 -07001#ifndef _LINUX_BITOPS_H
2#define _LINUX_BITOPS_H
3#include <asm/types.h>
4
Jiri Slabyd05be132007-10-18 23:40:31 -07005#ifdef __KERNEL__
Jiri Slaby93043ec2007-10-18 23:40:35 -07006#define BIT(nr) (1UL << (nr))
Jiri Slabyd05be132007-10-18 23:40:31 -07007#define BIT_MASK(nr) (1UL << ((nr) % BITS_PER_LONG))
8#define BIT_WORD(nr) ((nr) / BITS_PER_LONG)
Jiri Slabyd05be132007-10-18 23:40:31 -07009#define BITS_PER_BYTE 8
Eric Dumazetede9c692008-04-29 00:58:35 -070010#define BITS_TO_LONGS(nr) DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(long))
Jiri Slabyd05be132007-10-18 23:40:31 -070011#endif
12
Mathieu Poirierbfab9062014-12-09 14:38:21 -070013/*
14 * Create a contiguous bitmask starting at bit position @l and ending at
15 * position @h. For example
16 * GENMASK_ULL(39, 21) gives us the 64bit vector 0x000000ffffe00000.
17 */
18#define GENMASK(h, l) (((U32_C(1) << ((h) - (l) + 1)) - 1) << (l))
19#define GENMASK_ULL(h, l) (((U64_C(1) << ((h) - (l) + 1)) - 1) << (l))
20
Borislav Petkov4677d4a2010-05-03 14:57:11 +020021extern unsigned int __sw_hweight8(unsigned int w);
22extern unsigned int __sw_hweight16(unsigned int w);
23extern unsigned int __sw_hweight32(unsigned int w);
24extern unsigned long __sw_hweight64(__u64 w);
25
Linus Torvalds1da177e2005-04-16 15:20:36 -070026/*
Linus Torvalds1da177e2005-04-16 15:20:36 -070027 * Include this here because some architectures need generic_ffs/fls in
28 * scope
29 */
30#include <asm/bitops.h>
31
Akinobu Mita984b3f52010-03-05 13:41:37 -080032#define for_each_set_bit(bit, addr, size) \
Robert Richter1e2ad282011-11-18 12:35:21 +010033 for ((bit) = find_first_bit((addr), (size)); \
34 (bit) < (size); \
35 (bit) = find_next_bit((addr), (size), (bit) + 1))
36
37/* same as for_each_set_bit() but use bit as value to start with */
Akinobu Mita307b1cd2012-03-23 15:02:03 -070038#define for_each_set_bit_from(bit, addr, size) \
Robert Richter1e2ad282011-11-18 12:35:21 +010039 for ((bit) = find_next_bit((addr), (size), (bit)); \
40 (bit) < (size); \
Shannon Nelson3e037452007-10-16 01:27:40 -070041 (bit) = find_next_bit((addr), (size), (bit) + 1))
42
Akinobu Mita03f4a822012-03-23 15:02:04 -070043#define for_each_clear_bit(bit, addr, size) \
44 for ((bit) = find_first_zero_bit((addr), (size)); \
45 (bit) < (size); \
46 (bit) = find_next_zero_bit((addr), (size), (bit) + 1))
47
48/* same as for_each_clear_bit() but use bit as value to start with */
49#define for_each_clear_bit_from(bit, addr, size) \
50 for ((bit) = find_next_zero_bit((addr), (size), (bit)); \
51 (bit) < (size); \
52 (bit) = find_next_zero_bit((addr), (size), (bit) + 1))
53
Linus Torvalds1da177e2005-04-16 15:20:36 -070054static __inline__ int get_bitmask_order(unsigned int count)
55{
56 int order;
Peter Zijlstra9f416992010-01-22 15:59:29 +010057
Linus Torvalds1da177e2005-04-16 15:20:36 -070058 order = fls(count);
59 return order; /* We could be slightly more clever with -1 here... */
60}
61
Siddha, Suresh B94605ef2005-11-05 17:25:54 +010062static __inline__ int get_count_order(unsigned int count)
63{
64 int order;
Peter Zijlstra9f416992010-01-22 15:59:29 +010065
Siddha, Suresh B94605ef2005-11-05 17:25:54 +010066 order = fls(count) - 1;
67 if (count & (count - 1))
68 order++;
69 return order;
70}
71
Linus Torvalds1da177e2005-04-16 15:20:36 -070072static inline unsigned long hweight_long(unsigned long w)
73{
Akinobu Mitae9bebd62006-03-26 01:39:55 -080074 return sizeof(w) == 4 ? hweight32(w) : hweight64(w);
Linus Torvalds1da177e2005-04-16 15:20:36 -070075}
76
Robert P. J. Day45f8bde2007-01-26 00:57:09 -080077/**
Alexey Dobriyanf2ea0f52012-01-14 21:44:49 +030078 * rol64 - rotate a 64-bit value left
79 * @word: value to rotate
80 * @shift: bits to roll
81 */
82static inline __u64 rol64(__u64 word, unsigned int shift)
83{
84 return (word << shift) | (word >> (64 - shift));
85}
86
87/**
88 * ror64 - rotate a 64-bit value right
89 * @word: value to rotate
90 * @shift: bits to roll
91 */
92static inline __u64 ror64(__u64 word, unsigned int shift)
93{
94 return (word >> shift) | (word << (64 - shift));
95}
96
97/**
Linus Torvalds1da177e2005-04-16 15:20:36 -070098 * rol32 - rotate a 32-bit value left
Linus Torvalds1da177e2005-04-16 15:20:36 -070099 * @word: value to rotate
100 * @shift: bits to roll
101 */
102static inline __u32 rol32(__u32 word, unsigned int shift)
103{
104 return (word << shift) | (word >> (32 - shift));
105}
106
Robert P. J. Day45f8bde2007-01-26 00:57:09 -0800107/**
Linus Torvalds1da177e2005-04-16 15:20:36 -0700108 * ror32 - rotate a 32-bit value right
Linus Torvalds1da177e2005-04-16 15:20:36 -0700109 * @word: value to rotate
110 * @shift: bits to roll
111 */
112static inline __u32 ror32(__u32 word, unsigned int shift)
113{
114 return (word >> shift) | (word << (32 - shift));
115}
116
Harvey Harrison3afe3922008-03-28 14:16:01 -0700117/**
118 * rol16 - rotate a 16-bit value left
119 * @word: value to rotate
120 * @shift: bits to roll
121 */
122static inline __u16 rol16(__u16 word, unsigned int shift)
123{
124 return (word << shift) | (word >> (16 - shift));
125}
126
127/**
128 * ror16 - rotate a 16-bit value right
129 * @word: value to rotate
130 * @shift: bits to roll
131 */
132static inline __u16 ror16(__u16 word, unsigned int shift)
133{
134 return (word >> shift) | (word << (16 - shift));
135}
136
137/**
138 * rol8 - rotate an 8-bit value left
139 * @word: value to rotate
140 * @shift: bits to roll
141 */
142static inline __u8 rol8(__u8 word, unsigned int shift)
143{
144 return (word << shift) | (word >> (8 - shift));
145}
146
147/**
148 * ror8 - rotate an 8-bit value right
149 * @word: value to rotate
150 * @shift: bits to roll
151 */
152static inline __u8 ror8(__u8 word, unsigned int shift)
153{
154 return (word >> shift) | (word << (8 - shift));
155}
156
Andreas Herrmann7919a572010-08-30 19:04:01 +0000157/**
158 * sign_extend32 - sign extend a 32-bit value using specified bit as sign-bit
159 * @value: value to sign extend
160 * @index: 0 based bit index (0<=index<32) to sign bit
161 */
162static inline __s32 sign_extend32(__u32 value, int index)
163{
164 __u8 shift = 31 - index;
165 return (__s32)(value << shift) >> shift;
166}
167
Andrew Morton962749a2006-03-25 03:08:01 -0800168static inline unsigned fls_long(unsigned long l)
169{
170 if (sizeof(l) == 4)
171 return fls(l);
172 return fls64(l);
173}
174
Steven Whitehouse952043a2009-04-23 08:48:15 +0100175/**
176 * __ffs64 - find first set bit in a 64 bit word
177 * @word: The 64 bit word
178 *
179 * On 64 bit arches this is a synomyn for __ffs
180 * The result is not defined if no bits are set, so check that @word
181 * is non-zero before calling this.
182 */
183static inline unsigned long __ffs64(u64 word)
184{
185#if BITS_PER_LONG == 32
186 if (((u32)word) == 0UL)
187 return __ffs((u32)(word >> 32)) + 32;
188#elif BITS_PER_LONG != 64
189#error BITS_PER_LONG not 32 or 64
190#endif
191 return __ffs((unsigned long)word);
192}
193
Alexander van Heukelum64970b62008-03-11 16:17:19 +0100194#ifdef __KERNEL__
Alexander van Heukelum77b9bd92008-04-01 11:46:19 +0200195
Theodore Ts'o0a0ae7b32014-03-30 10:20:01 -0400196#ifndef set_mask_bits
197#define set_mask_bits(ptr, _mask, _bits) \
198({ \
199 const typeof(*ptr) mask = (_mask), bits = (_bits); \
200 typeof(*ptr) old, new; \
201 \
202 do { \
203 old = ACCESS_ONCE(*ptr); \
204 new = (old & ~mask) | bits; \
205 } while (cmpxchg(ptr, old, new) != old); \
206 \
207 new; \
208})
209#endif
210
Akinobu Mita19de85e2011-05-26 16:26:09 -0700211#ifndef find_last_bit
Rusty Russellab53d472009-01-01 10:12:19 +1030212/**
213 * find_last_bit - find the last set bit in a memory region
214 * @addr: The address to start the search at
215 * @size: The maximum size to search
216 *
217 * Returns the bit number of the first set bit, or size.
218 */
219extern unsigned long find_last_bit(const unsigned long *addr,
220 unsigned long size);
Akinobu Mita19de85e2011-05-26 16:26:09 -0700221#endif
Rusty Russellab53d472009-01-01 10:12:19 +1030222
Alexander van Heukelum64970b62008-03-11 16:17:19 +0100223#endif /* __KERNEL__ */
Linus Torvalds1da177e2005-04-16 15:20:36 -0700224#endif