| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | #ifndef _LINUX_BITOPS_H | 
 | 2 | #define _LINUX_BITOPS_H | 
 | 3 | #include <asm/types.h> | 
 | 4 |  | 
| Jiri Slaby | d05be13 | 2007-10-18 23:40:31 -0700 | [diff] [blame] | 5 | #ifdef	__KERNEL__ | 
| Jiri Slaby | 93043ec | 2007-10-18 23:40:35 -0700 | [diff] [blame] | 6 | #define BIT(nr)			(1UL << (nr)) | 
| Jiri Slaby | d05be13 | 2007-10-18 23:40:31 -0700 | [diff] [blame] | 7 | #define BIT_MASK(nr)		(1UL << ((nr) % BITS_PER_LONG)) | 
 | 8 | #define BIT_WORD(nr)		((nr) / BITS_PER_LONG) | 
| Jiri Slaby | d05be13 | 2007-10-18 23:40:31 -0700 | [diff] [blame] | 9 | #define BITS_PER_BYTE		8 | 
| Eric Dumazet | ede9c69 | 2008-04-29 00:58:35 -0700 | [diff] [blame] | 10 | #define BITS_TO_LONGS(nr)	DIV_ROUND_UP(nr, BITS_PER_BYTE * sizeof(long)) | 
| Jiri Slaby | d05be13 | 2007-10-18 23:40:31 -0700 | [diff] [blame] | 11 | #endif | 
 | 12 |  | 
| Borislav Petkov | 4677d4a | 2010-05-03 14:57:11 +0200 | [diff] [blame] | 13 | extern unsigned int __sw_hweight8(unsigned int w); | 
 | 14 | extern unsigned int __sw_hweight16(unsigned int w); | 
 | 15 | extern unsigned int __sw_hweight32(unsigned int w); | 
 | 16 | extern unsigned long __sw_hweight64(__u64 w); | 
 | 17 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 18 | /* | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 19 |  * Include this here because some architectures need generic_ffs/fls in | 
 | 20 |  * scope | 
 | 21 |  */ | 
 | 22 | #include <asm/bitops.h> | 
 | 23 |  | 
| Akinobu Mita | 984b3f5 | 2010-03-05 13:41:37 -0800 | [diff] [blame] | 24 | #define for_each_set_bit(bit, addr, size) \ | 
| Shannon Nelson | 3e03745 | 2007-10-16 01:27:40 -0700 | [diff] [blame] | 25 | 	for ((bit) = find_first_bit((addr), (size)); \ | 
 | 26 | 	     (bit) < (size); \ | 
 | 27 | 	     (bit) = find_next_bit((addr), (size), (bit) + 1)) | 
 | 28 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 29 | static __inline__ int get_bitmask_order(unsigned int count) | 
 | 30 | { | 
 | 31 | 	int order; | 
| Peter Zijlstra | 9f41699 | 2010-01-22 15:59:29 +0100 | [diff] [blame] | 32 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 33 | 	order = fls(count); | 
 | 34 | 	return order;	/* We could be slightly more clever with -1 here... */ | 
 | 35 | } | 
 | 36 |  | 
| Siddha, Suresh B | 94605ef | 2005-11-05 17:25:54 +0100 | [diff] [blame] | 37 | static __inline__ int get_count_order(unsigned int count) | 
 | 38 | { | 
 | 39 | 	int order; | 
| Peter Zijlstra | 9f41699 | 2010-01-22 15:59:29 +0100 | [diff] [blame] | 40 |  | 
| Siddha, Suresh B | 94605ef | 2005-11-05 17:25:54 +0100 | [diff] [blame] | 41 | 	order = fls(count) - 1; | 
 | 42 | 	if (count & (count - 1)) | 
 | 43 | 		order++; | 
 | 44 | 	return order; | 
 | 45 | } | 
 | 46 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 47 | static inline unsigned long hweight_long(unsigned long w) | 
 | 48 | { | 
| Akinobu Mita | e9bebd6 | 2006-03-26 01:39:55 -0800 | [diff] [blame] | 49 | 	return sizeof(w) == 4 ? hweight32(w) : hweight64(w); | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 50 | } | 
 | 51 |  | 
| Robert P. J. Day | 45f8bde | 2007-01-26 00:57:09 -0800 | [diff] [blame] | 52 | /** | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 53 |  * rol32 - rotate a 32-bit value left | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 54 |  * @word: value to rotate | 
 | 55 |  * @shift: bits to roll | 
 | 56 |  */ | 
 | 57 | static inline __u32 rol32(__u32 word, unsigned int shift) | 
 | 58 | { | 
 | 59 | 	return (word << shift) | (word >> (32 - shift)); | 
 | 60 | } | 
 | 61 |  | 
| Robert P. J. Day | 45f8bde | 2007-01-26 00:57:09 -0800 | [diff] [blame] | 62 | /** | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 63 |  * ror32 - rotate a 32-bit value right | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 64 |  * @word: value to rotate | 
 | 65 |  * @shift: bits to roll | 
 | 66 |  */ | 
 | 67 | static inline __u32 ror32(__u32 word, unsigned int shift) | 
 | 68 | { | 
 | 69 | 	return (word >> shift) | (word << (32 - shift)); | 
 | 70 | } | 
 | 71 |  | 
| Harvey Harrison | 3afe392 | 2008-03-28 14:16:01 -0700 | [diff] [blame] | 72 | /** | 
 | 73 |  * rol16 - rotate a 16-bit value left | 
 | 74 |  * @word: value to rotate | 
 | 75 |  * @shift: bits to roll | 
 | 76 |  */ | 
 | 77 | static inline __u16 rol16(__u16 word, unsigned int shift) | 
 | 78 | { | 
 | 79 | 	return (word << shift) | (word >> (16 - shift)); | 
 | 80 | } | 
 | 81 |  | 
 | 82 | /** | 
 | 83 |  * ror16 - rotate a 16-bit value right | 
 | 84 |  * @word: value to rotate | 
 | 85 |  * @shift: bits to roll | 
 | 86 |  */ | 
 | 87 | static inline __u16 ror16(__u16 word, unsigned int shift) | 
 | 88 | { | 
 | 89 | 	return (word >> shift) | (word << (16 - shift)); | 
 | 90 | } | 
 | 91 |  | 
 | 92 | /** | 
 | 93 |  * rol8 - rotate an 8-bit value left | 
 | 94 |  * @word: value to rotate | 
 | 95 |  * @shift: bits to roll | 
 | 96 |  */ | 
 | 97 | static inline __u8 rol8(__u8 word, unsigned int shift) | 
 | 98 | { | 
 | 99 | 	return (word << shift) | (word >> (8 - shift)); | 
 | 100 | } | 
 | 101 |  | 
 | 102 | /** | 
 | 103 |  * ror8 - rotate an 8-bit value right | 
 | 104 |  * @word: value to rotate | 
 | 105 |  * @shift: bits to roll | 
 | 106 |  */ | 
 | 107 | static inline __u8 ror8(__u8 word, unsigned int shift) | 
 | 108 | { | 
 | 109 | 	return (word >> shift) | (word << (8 - shift)); | 
 | 110 | } | 
 | 111 |  | 
| Andreas Herrmann | 7919a57 | 2010-08-30 19:04:01 +0000 | [diff] [blame] | 112 | /** | 
 | 113 |  * sign_extend32 - sign extend a 32-bit value using specified bit as sign-bit | 
 | 114 |  * @value: value to sign extend | 
 | 115 |  * @index: 0 based bit index (0<=index<32) to sign bit | 
 | 116 |  */ | 
 | 117 | static inline __s32 sign_extend32(__u32 value, int index) | 
 | 118 | { | 
 | 119 | 	__u8 shift = 31 - index; | 
 | 120 | 	return (__s32)(value << shift) >> shift; | 
 | 121 | } | 
 | 122 |  | 
| Andrew Morton | 962749a | 2006-03-25 03:08:01 -0800 | [diff] [blame] | 123 | static inline unsigned fls_long(unsigned long l) | 
 | 124 | { | 
 | 125 | 	if (sizeof(l) == 4) | 
 | 126 | 		return fls(l); | 
 | 127 | 	return fls64(l); | 
 | 128 | } | 
 | 129 |  | 
| Steven Whitehouse | 952043a | 2009-04-23 08:48:15 +0100 | [diff] [blame] | 130 | /** | 
 | 131 |  * __ffs64 - find first set bit in a 64 bit word | 
 | 132 |  * @word: The 64 bit word | 
 | 133 |  * | 
 | 134 |  * On 64 bit arches this is a synomyn for __ffs | 
 | 135 |  * The result is not defined if no bits are set, so check that @word | 
 | 136 |  * is non-zero before calling this. | 
 | 137 |  */ | 
 | 138 | static inline unsigned long __ffs64(u64 word) | 
 | 139 | { | 
 | 140 | #if BITS_PER_LONG == 32 | 
 | 141 | 	if (((u32)word) == 0UL) | 
 | 142 | 		return __ffs((u32)(word >> 32)) + 32; | 
 | 143 | #elif BITS_PER_LONG != 64 | 
 | 144 | #error BITS_PER_LONG not 32 or 64 | 
 | 145 | #endif | 
 | 146 | 	return __ffs((unsigned long)word); | 
 | 147 | } | 
 | 148 |  | 
| Alexander van Heukelum | 64970b6 | 2008-03-11 16:17:19 +0100 | [diff] [blame] | 149 | #ifdef __KERNEL__ | 
| Alexander van Heukelum | 77b9bd9 | 2008-04-01 11:46:19 +0200 | [diff] [blame] | 150 |  | 
| Akinobu Mita | 19de85e | 2011-05-26 16:26:09 -0700 | [diff] [blame] | 151 | #ifndef find_last_bit | 
| Rusty Russell | ab53d47 | 2009-01-01 10:12:19 +1030 | [diff] [blame] | 152 | /** | 
 | 153 |  * find_last_bit - find the last set bit in a memory region | 
 | 154 |  * @addr: The address to start the search at | 
 | 155 |  * @size: The maximum size to search | 
 | 156 |  * | 
 | 157 |  * Returns the bit number of the first set bit, or size. | 
 | 158 |  */ | 
 | 159 | extern unsigned long find_last_bit(const unsigned long *addr, | 
 | 160 | 				   unsigned long size); | 
| Akinobu Mita | 19de85e | 2011-05-26 16:26:09 -0700 | [diff] [blame] | 161 | #endif | 
| Rusty Russell | ab53d47 | 2009-01-01 10:12:19 +1030 | [diff] [blame] | 162 |  | 
| Alexander van Heukelum | 64970b6 | 2008-03-11 16:17:19 +0100 | [diff] [blame] | 163 | #endif /* __KERNEL__ */ | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 164 | #endif |