| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | /* | 
 | 2 |  * This file is subject to the terms and conditions of the GNU General Public | 
 | 3 |  * License.  See the file "COPYING" in the main directory of this archive | 
 | 4 |  * for more details. | 
 | 5 |  * | 
| Ralf Baechle | 102fa15 | 2007-02-16 17:18:50 +0000 | [diff] [blame] | 6 |  * Copyright (c) 1994 - 1997, 99, 2000, 06, 07  Ralf Baechle (ralf@linux-mips.org) | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 7 |  * Copyright (c) 1999, 2000  Silicon Graphics, Inc. | 
 | 8 |  */ | 
 | 9 | #ifndef _ASM_BITOPS_H | 
 | 10 | #define _ASM_BITOPS_H | 
 | 11 |  | 
| Jiri Slaby | 0624517 | 2007-10-18 23:40:26 -0700 | [diff] [blame] | 12 | #ifndef _LINUX_BITOPS_H | 
 | 13 | #error only <linux/bitops.h> can be included directly | 
 | 14 | #endif | 
 | 15 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 16 | #include <linux/compiler.h> | 
 | 17 | #include <linux/types.h> | 
| Ralf Baechle | 0004a9d | 2006-10-31 03:45:07 +0000 | [diff] [blame] | 18 | #include <asm/barrier.h> | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 19 | #include <asm/byteorder.h>		/* sigh ... */ | 
 | 20 | #include <asm/cpu-features.h> | 
| Ralf Baechle | 4ffd8b3 | 2006-11-30 01:14:50 +0000 | [diff] [blame] | 21 | #include <asm/sgidefs.h> | 
 | 22 | #include <asm/war.h> | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 23 |  | 
| Ralf Baechle | 49a89ef | 2007-10-11 23:46:15 +0100 | [diff] [blame] | 24 | #if _MIPS_SZLONG == 32 | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 25 | #define SZLONG_LOG 5 | 
 | 26 | #define SZLONG_MASK 31UL | 
| Maciej W. Rozycki | aac8aa7 | 2005-06-14 17:35:03 +0000 | [diff] [blame] | 27 | #define __LL		"ll	" | 
 | 28 | #define __SC		"sc	" | 
| Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 29 | #define __INS		"ins	" | 
 | 30 | #define __EXT		"ext	" | 
| Ralf Baechle | 49a89ef | 2007-10-11 23:46:15 +0100 | [diff] [blame] | 31 | #elif _MIPS_SZLONG == 64 | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 32 | #define SZLONG_LOG 6 | 
 | 33 | #define SZLONG_MASK 63UL | 
| Maciej W. Rozycki | aac8aa7 | 2005-06-14 17:35:03 +0000 | [diff] [blame] | 34 | #define __LL		"lld	" | 
 | 35 | #define __SC		"scd	" | 
| Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 36 | #define __INS		"dins	 " | 
 | 37 | #define __EXT		"dext	 " | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 38 | #endif | 
 | 39 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 40 | /* | 
 | 41 |  * clear_bit() doesn't provide any barrier for the compiler. | 
 | 42 |  */ | 
| David Daney | f252ffd | 2010-01-08 17:17:43 -0800 | [diff] [blame] | 43 | #define smp_mb__before_clear_bit()	smp_mb__before_llsc() | 
| Ralf Baechle | 17099b1 | 2007-07-14 13:24:05 +0100 | [diff] [blame] | 44 | #define smp_mb__after_clear_bit()	smp_llsc_mb() | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 45 |  | 
| Jim Quinlan | 92d1159 | 2012-09-06 11:36:55 -0400 | [diff] [blame] | 46 |  | 
 | 47 | /* | 
 | 48 |  * These are the "slower" versions of the functions and are in bitops.c. | 
 | 49 |  * These functions call raw_local_irq_{save,restore}(). | 
 | 50 |  */ | 
 | 51 | void __mips_set_bit(unsigned long nr, volatile unsigned long *addr); | 
 | 52 | void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr); | 
 | 53 | void __mips_change_bit(unsigned long nr, volatile unsigned long *addr); | 
 | 54 | int __mips_test_and_set_bit(unsigned long nr, | 
 | 55 | 			    volatile unsigned long *addr); | 
 | 56 | int __mips_test_and_set_bit_lock(unsigned long nr, | 
 | 57 | 				 volatile unsigned long *addr); | 
 | 58 | int __mips_test_and_clear_bit(unsigned long nr, | 
 | 59 | 			      volatile unsigned long *addr); | 
 | 60 | int __mips_test_and_change_bit(unsigned long nr, | 
 | 61 | 			       volatile unsigned long *addr); | 
 | 62 |  | 
 | 63 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 64 | /* | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 65 |  * set_bit - Atomically set a bit in memory | 
 | 66 |  * @nr: the bit to set | 
 | 67 |  * @addr: the address to start counting from | 
 | 68 |  * | 
 | 69 |  * This function is atomic and may not be reordered.  See __set_bit() | 
 | 70 |  * if you do not require the atomic guarantees. | 
 | 71 |  * Note that @nr may be almost arbitrarily large; this function is not | 
 | 72 |  * restricted to acting on a single-word quantity. | 
 | 73 |  */ | 
 | 74 | static inline void set_bit(unsigned long nr, volatile unsigned long *addr) | 
 | 75 | { | 
 | 76 | 	unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 
| Jim Quinlan | 9de79c5 | 2012-09-06 11:36:54 -0400 | [diff] [blame] | 77 | 	int bit = nr & SZLONG_MASK; | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 78 | 	unsigned long temp; | 
 | 79 |  | 
| David Daney | b791d11 | 2009-07-13 11:15:19 -0700 | [diff] [blame] | 80 | 	if (kernel_uses_llsc && R10000_LLSC_WAR) { | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 81 | 		__asm__ __volatile__( | 
| Maciej W. Rozycki | c4559f6 | 2005-06-23 15:57:15 +0000 | [diff] [blame] | 82 | 		"	.set	mips3					\n" | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 83 | 		"1:	" __LL "%0, %1			# set_bit	\n" | 
 | 84 | 		"	or	%0, %2					\n" | 
| Maciej W. Rozycki | aac8aa7 | 2005-06-14 17:35:03 +0000 | [diff] [blame] | 85 | 		"	" __SC	"%0, %1					\n" | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 86 | 		"	beqzl	%0, 1b					\n" | 
| Maciej W. Rozycki | aac8aa7 | 2005-06-14 17:35:03 +0000 | [diff] [blame] | 87 | 		"	.set	mips0					\n" | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 88 | 		: "=&r" (temp), "=m" (*m) | 
| Ralf Baechle | b961153 | 2007-03-05 00:56:15 +0000 | [diff] [blame] | 89 | 		: "ir" (1UL << bit), "m" (*m)); | 
| Ralf Baechle | 102fa15 | 2007-02-16 17:18:50 +0000 | [diff] [blame] | 90 | #ifdef CONFIG_CPU_MIPSR2 | 
| David Daney | b791d11 | 2009-07-13 11:15:19 -0700 | [diff] [blame] | 91 | 	} else if (kernel_uses_llsc && __builtin_constant_p(bit)) { | 
| Ralf Baechle | 7837314 | 2010-10-29 19:08:24 +0100 | [diff] [blame] | 92 | 		do { | 
 | 93 | 			__asm__ __volatile__( | 
 | 94 | 			"	" __LL "%0, %1		# set_bit	\n" | 
 | 95 | 			"	" __INS "%0, %3, %2, 1			\n" | 
 | 96 | 			"	" __SC "%0, %1				\n" | 
 | 97 | 			: "=&r" (temp), "+m" (*m) | 
 | 98 | 			: "ir" (bit), "r" (~0)); | 
 | 99 | 		} while (unlikely(!temp)); | 
| Ralf Baechle | 102fa15 | 2007-02-16 17:18:50 +0000 | [diff] [blame] | 100 | #endif /* CONFIG_CPU_MIPSR2 */ | 
| David Daney | b791d11 | 2009-07-13 11:15:19 -0700 | [diff] [blame] | 101 | 	} else if (kernel_uses_llsc) { | 
| Ralf Baechle | 7837314 | 2010-10-29 19:08:24 +0100 | [diff] [blame] | 102 | 		do { | 
 | 103 | 			__asm__ __volatile__( | 
 | 104 | 			"	.set	mips3				\n" | 
 | 105 | 			"	" __LL "%0, %1		# set_bit	\n" | 
 | 106 | 			"	or	%0, %2				\n" | 
 | 107 | 			"	" __SC	"%0, %1				\n" | 
 | 108 | 			"	.set	mips0				\n" | 
 | 109 | 			: "=&r" (temp), "+m" (*m) | 
 | 110 | 			: "ir" (1UL << bit)); | 
 | 111 | 		} while (unlikely(!temp)); | 
| Jim Quinlan | 92d1159 | 2012-09-06 11:36:55 -0400 | [diff] [blame] | 112 | 	} else | 
 | 113 | 		__mips_set_bit(nr, addr); | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 114 | } | 
 | 115 |  | 
 | 116 | /* | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 117 |  * clear_bit - Clears a bit in memory | 
 | 118 |  * @nr: Bit to clear | 
 | 119 |  * @addr: Address to start counting from | 
 | 120 |  * | 
 | 121 |  * clear_bit() is atomic and may not be reordered.  However, it does | 
 | 122 |  * not contain a memory barrier, so if it is used for locking purposes, | 
 | 123 |  * you should call smp_mb__before_clear_bit() and/or smp_mb__after_clear_bit() | 
 | 124 |  * in order to ensure changes are visible on other processors. | 
 | 125 |  */ | 
 | 126 | static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) | 
 | 127 | { | 
 | 128 | 	unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 
| Jim Quinlan | 9de79c5 | 2012-09-06 11:36:54 -0400 | [diff] [blame] | 129 | 	int bit = nr & SZLONG_MASK; | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 130 | 	unsigned long temp; | 
 | 131 |  | 
| David Daney | b791d11 | 2009-07-13 11:15:19 -0700 | [diff] [blame] | 132 | 	if (kernel_uses_llsc && R10000_LLSC_WAR) { | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 133 | 		__asm__ __volatile__( | 
| Maciej W. Rozycki | c4559f6 | 2005-06-23 15:57:15 +0000 | [diff] [blame] | 134 | 		"	.set	mips3					\n" | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 135 | 		"1:	" __LL "%0, %1			# clear_bit	\n" | 
 | 136 | 		"	and	%0, %2					\n" | 
 | 137 | 		"	" __SC "%0, %1					\n" | 
 | 138 | 		"	beqzl	%0, 1b					\n" | 
| Maciej W. Rozycki | aac8aa7 | 2005-06-14 17:35:03 +0000 | [diff] [blame] | 139 | 		"	.set	mips0					\n" | 
| Ralf Baechle | 7837314 | 2010-10-29 19:08:24 +0100 | [diff] [blame] | 140 | 		: "=&r" (temp), "+m" (*m) | 
 | 141 | 		: "ir" (~(1UL << bit))); | 
| Ralf Baechle | 102fa15 | 2007-02-16 17:18:50 +0000 | [diff] [blame] | 142 | #ifdef CONFIG_CPU_MIPSR2 | 
| David Daney | b791d11 | 2009-07-13 11:15:19 -0700 | [diff] [blame] | 143 | 	} else if (kernel_uses_llsc && __builtin_constant_p(bit)) { | 
| Ralf Baechle | 7837314 | 2010-10-29 19:08:24 +0100 | [diff] [blame] | 144 | 		do { | 
 | 145 | 			__asm__ __volatile__( | 
 | 146 | 			"	" __LL "%0, %1		# clear_bit	\n" | 
 | 147 | 			"	" __INS "%0, $0, %2, 1			\n" | 
 | 148 | 			"	" __SC "%0, %1				\n" | 
 | 149 | 			: "=&r" (temp), "+m" (*m) | 
 | 150 | 			: "ir" (bit)); | 
 | 151 | 		} while (unlikely(!temp)); | 
| Ralf Baechle | 102fa15 | 2007-02-16 17:18:50 +0000 | [diff] [blame] | 152 | #endif /* CONFIG_CPU_MIPSR2 */ | 
| David Daney | b791d11 | 2009-07-13 11:15:19 -0700 | [diff] [blame] | 153 | 	} else if (kernel_uses_llsc) { | 
| Ralf Baechle | 7837314 | 2010-10-29 19:08:24 +0100 | [diff] [blame] | 154 | 		do { | 
 | 155 | 			__asm__ __volatile__( | 
 | 156 | 			"	.set	mips3				\n" | 
 | 157 | 			"	" __LL "%0, %1		# clear_bit	\n" | 
 | 158 | 			"	and	%0, %2				\n" | 
 | 159 | 			"	" __SC "%0, %1				\n" | 
 | 160 | 			"	.set	mips0				\n" | 
 | 161 | 			: "=&r" (temp), "+m" (*m) | 
 | 162 | 			: "ir" (~(1UL << bit))); | 
 | 163 | 		} while (unlikely(!temp)); | 
| Jim Quinlan | 92d1159 | 2012-09-06 11:36:55 -0400 | [diff] [blame] | 164 | 	} else | 
 | 165 | 		__mips_clear_bit(nr, addr); | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 166 | } | 
 | 167 |  | 
 | 168 | /* | 
| Nick Piggin | 728697c | 2007-10-18 03:06:53 -0700 | [diff] [blame] | 169 |  * clear_bit_unlock - Clears a bit in memory | 
 | 170 |  * @nr: Bit to clear | 
 | 171 |  * @addr: Address to start counting from | 
 | 172 |  * | 
 | 173 |  * clear_bit() is atomic and implies release semantics before the memory | 
 | 174 |  * operation. It can be used for an unlock. | 
 | 175 |  */ | 
 | 176 | static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr) | 
 | 177 | { | 
 | 178 | 	smp_mb__before_clear_bit(); | 
 | 179 | 	clear_bit(nr, addr); | 
 | 180 | } | 
 | 181 |  | 
 | 182 | /* | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 183 |  * change_bit - Toggle a bit in memory | 
 | 184 |  * @nr: Bit to change | 
 | 185 |  * @addr: Address to start counting from | 
 | 186 |  * | 
 | 187 |  * change_bit() is atomic and may not be reordered. | 
 | 188 |  * Note that @nr may be almost arbitrarily large; this function is not | 
 | 189 |  * restricted to acting on a single-word quantity. | 
 | 190 |  */ | 
 | 191 | static inline void change_bit(unsigned long nr, volatile unsigned long *addr) | 
 | 192 | { | 
| Jim Quinlan | 9de79c5 | 2012-09-06 11:36:54 -0400 | [diff] [blame] | 193 | 	int bit = nr & SZLONG_MASK; | 
| Ralf Baechle | b961153 | 2007-03-05 00:56:15 +0000 | [diff] [blame] | 194 |  | 
| David Daney | b791d11 | 2009-07-13 11:15:19 -0700 | [diff] [blame] | 195 | 	if (kernel_uses_llsc && R10000_LLSC_WAR) { | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 196 | 		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 
 | 197 | 		unsigned long temp; | 
 | 198 |  | 
 | 199 | 		__asm__ __volatile__( | 
| Maciej W. Rozycki | c4559f6 | 2005-06-23 15:57:15 +0000 | [diff] [blame] | 200 | 		"	.set	mips3				\n" | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 201 | 		"1:	" __LL "%0, %1		# change_bit	\n" | 
 | 202 | 		"	xor	%0, %2				\n" | 
| Maciej W. Rozycki | aac8aa7 | 2005-06-14 17:35:03 +0000 | [diff] [blame] | 203 | 		"	" __SC	"%0, %1				\n" | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 204 | 		"	beqzl	%0, 1b				\n" | 
| Maciej W. Rozycki | aac8aa7 | 2005-06-14 17:35:03 +0000 | [diff] [blame] | 205 | 		"	.set	mips0				\n" | 
| Ralf Baechle | 7837314 | 2010-10-29 19:08:24 +0100 | [diff] [blame] | 206 | 		: "=&r" (temp), "+m" (*m) | 
 | 207 | 		: "ir" (1UL << bit)); | 
| David Daney | b791d11 | 2009-07-13 11:15:19 -0700 | [diff] [blame] | 208 | 	} else if (kernel_uses_llsc) { | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 209 | 		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 
 | 210 | 		unsigned long temp; | 
 | 211 |  | 
| Ralf Baechle | 7837314 | 2010-10-29 19:08:24 +0100 | [diff] [blame] | 212 | 		do { | 
 | 213 | 			__asm__ __volatile__( | 
 | 214 | 			"	.set	mips3				\n" | 
 | 215 | 			"	" __LL "%0, %1		# change_bit	\n" | 
 | 216 | 			"	xor	%0, %2				\n" | 
 | 217 | 			"	" __SC	"%0, %1				\n" | 
 | 218 | 			"	.set	mips0				\n" | 
 | 219 | 			: "=&r" (temp), "+m" (*m) | 
 | 220 | 			: "ir" (1UL << bit)); | 
 | 221 | 		} while (unlikely(!temp)); | 
| Jim Quinlan | 92d1159 | 2012-09-06 11:36:55 -0400 | [diff] [blame] | 222 | 	} else | 
 | 223 | 		__mips_change_bit(nr, addr); | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 224 | } | 
 | 225 |  | 
 | 226 | /* | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 227 |  * test_and_set_bit - Set a bit and return its old value | 
 | 228 |  * @nr: Bit to set | 
 | 229 |  * @addr: Address to count from | 
 | 230 |  * | 
 | 231 |  * This operation is atomic and cannot be reordered. | 
 | 232 |  * It also implies a memory barrier. | 
 | 233 |  */ | 
 | 234 | static inline int test_and_set_bit(unsigned long nr, | 
 | 235 | 	volatile unsigned long *addr) | 
 | 236 | { | 
| Jim Quinlan | 9de79c5 | 2012-09-06 11:36:54 -0400 | [diff] [blame] | 237 | 	int bit = nr & SZLONG_MASK; | 
| Ralf Baechle | ff72b7a | 2007-06-07 13:17:30 +0100 | [diff] [blame] | 238 | 	unsigned long res; | 
| Ralf Baechle | b961153 | 2007-03-05 00:56:15 +0000 | [diff] [blame] | 239 |  | 
| David Daney | f252ffd | 2010-01-08 17:17:43 -0800 | [diff] [blame] | 240 | 	smp_mb__before_llsc(); | 
| Nick Piggin | c8f30ae | 2007-10-18 03:06:52 -0700 | [diff] [blame] | 241 |  | 
| David Daney | b791d11 | 2009-07-13 11:15:19 -0700 | [diff] [blame] | 242 | 	if (kernel_uses_llsc && R10000_LLSC_WAR) { | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 243 | 		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 
| Ralf Baechle | ff72b7a | 2007-06-07 13:17:30 +0100 | [diff] [blame] | 244 | 		unsigned long temp; | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 245 |  | 
 | 246 | 		__asm__ __volatile__( | 
| Maciej W. Rozycki | c4559f6 | 2005-06-23 15:57:15 +0000 | [diff] [blame] | 247 | 		"	.set	mips3					\n" | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 248 | 		"1:	" __LL "%0, %1		# test_and_set_bit	\n" | 
 | 249 | 		"	or	%2, %0, %3				\n" | 
 | 250 | 		"	" __SC	"%2, %1					\n" | 
 | 251 | 		"	beqzl	%2, 1b					\n" | 
 | 252 | 		"	and	%2, %0, %3				\n" | 
| Maciej W. Rozycki | aac8aa7 | 2005-06-14 17:35:03 +0000 | [diff] [blame] | 253 | 		"	.set	mips0					\n" | 
| Ralf Baechle | 7837314 | 2010-10-29 19:08:24 +0100 | [diff] [blame] | 254 | 		: "=&r" (temp), "+m" (*m), "=&r" (res) | 
 | 255 | 		: "r" (1UL << bit) | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 256 | 		: "memory"); | 
| David Daney | b791d11 | 2009-07-13 11:15:19 -0700 | [diff] [blame] | 257 | 	} else if (kernel_uses_llsc) { | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 258 | 		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 
| Ralf Baechle | ff72b7a | 2007-06-07 13:17:30 +0100 | [diff] [blame] | 259 | 		unsigned long temp; | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 260 |  | 
| Ralf Baechle | 7837314 | 2010-10-29 19:08:24 +0100 | [diff] [blame] | 261 | 		do { | 
 | 262 | 			__asm__ __volatile__( | 
 | 263 | 			"	.set	mips3				\n" | 
 | 264 | 			"	" __LL "%0, %1	# test_and_set_bit	\n" | 
 | 265 | 			"	or	%2, %0, %3			\n" | 
 | 266 | 			"	" __SC	"%2, %1				\n" | 
 | 267 | 			"	.set	mips0				\n" | 
 | 268 | 			: "=&r" (temp), "+m" (*m), "=&r" (res) | 
 | 269 | 			: "r" (1UL << bit) | 
 | 270 | 			: "memory"); | 
 | 271 | 		} while (unlikely(!res)); | 
 | 272 |  | 
 | 273 | 		res = temp & (1UL << bit); | 
| Jim Quinlan | 92d1159 | 2012-09-06 11:36:55 -0400 | [diff] [blame] | 274 | 	} else | 
 | 275 | 		res = __mips_test_and_set_bit(nr, addr); | 
| Ralf Baechle | 0004a9d | 2006-10-31 03:45:07 +0000 | [diff] [blame] | 276 |  | 
| Ralf Baechle | 17099b1 | 2007-07-14 13:24:05 +0100 | [diff] [blame] | 277 | 	smp_llsc_mb(); | 
| Ralf Baechle | ff72b7a | 2007-06-07 13:17:30 +0100 | [diff] [blame] | 278 |  | 
 | 279 | 	return res != 0; | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 280 | } | 
 | 281 |  | 
 | 282 | /* | 
| Nick Piggin | 728697c | 2007-10-18 03:06:53 -0700 | [diff] [blame] | 283 |  * test_and_set_bit_lock - Set a bit and return its old value | 
 | 284 |  * @nr: Bit to set | 
 | 285 |  * @addr: Address to count from | 
 | 286 |  * | 
 | 287 |  * This operation is atomic and implies acquire ordering semantics | 
 | 288 |  * after the memory operation. | 
 | 289 |  */ | 
 | 290 | static inline int test_and_set_bit_lock(unsigned long nr, | 
 | 291 | 	volatile unsigned long *addr) | 
 | 292 | { | 
| Jim Quinlan | 9de79c5 | 2012-09-06 11:36:54 -0400 | [diff] [blame] | 293 | 	int bit = nr & SZLONG_MASK; | 
| Nick Piggin | 728697c | 2007-10-18 03:06:53 -0700 | [diff] [blame] | 294 | 	unsigned long res; | 
 | 295 |  | 
| David Daney | b791d11 | 2009-07-13 11:15:19 -0700 | [diff] [blame] | 296 | 	if (kernel_uses_llsc && R10000_LLSC_WAR) { | 
| Nick Piggin | 728697c | 2007-10-18 03:06:53 -0700 | [diff] [blame] | 297 | 		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 
 | 298 | 		unsigned long temp; | 
 | 299 |  | 
 | 300 | 		__asm__ __volatile__( | 
 | 301 | 		"	.set	mips3					\n" | 
 | 302 | 		"1:	" __LL "%0, %1		# test_and_set_bit	\n" | 
 | 303 | 		"	or	%2, %0, %3				\n" | 
 | 304 | 		"	" __SC	"%2, %1					\n" | 
 | 305 | 		"	beqzl	%2, 1b					\n" | 
 | 306 | 		"	and	%2, %0, %3				\n" | 
 | 307 | 		"	.set	mips0					\n" | 
| Ralf Baechle | 7837314 | 2010-10-29 19:08:24 +0100 | [diff] [blame] | 308 | 		: "=&r" (temp), "+m" (*m), "=&r" (res) | 
 | 309 | 		: "r" (1UL << bit) | 
| Nick Piggin | 728697c | 2007-10-18 03:06:53 -0700 | [diff] [blame] | 310 | 		: "memory"); | 
| David Daney | b791d11 | 2009-07-13 11:15:19 -0700 | [diff] [blame] | 311 | 	} else if (kernel_uses_llsc) { | 
| Nick Piggin | 728697c | 2007-10-18 03:06:53 -0700 | [diff] [blame] | 312 | 		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 
 | 313 | 		unsigned long temp; | 
 | 314 |  | 
| Ralf Baechle | 7837314 | 2010-10-29 19:08:24 +0100 | [diff] [blame] | 315 | 		do { | 
 | 316 | 			__asm__ __volatile__( | 
 | 317 | 			"	.set	mips3				\n" | 
 | 318 | 			"	" __LL "%0, %1	# test_and_set_bit	\n" | 
 | 319 | 			"	or	%2, %0, %3			\n" | 
 | 320 | 			"	" __SC	"%2, %1				\n" | 
 | 321 | 			"	.set	mips0				\n" | 
 | 322 | 			: "=&r" (temp), "+m" (*m), "=&r" (res) | 
 | 323 | 			: "r" (1UL << bit) | 
 | 324 | 			: "memory"); | 
 | 325 | 		} while (unlikely(!res)); | 
 | 326 |  | 
 | 327 | 		res = temp & (1UL << bit); | 
| Jim Quinlan | 92d1159 | 2012-09-06 11:36:55 -0400 | [diff] [blame] | 328 | 	} else | 
 | 329 | 		res = __mips_test_and_set_bit_lock(nr, addr); | 
| Nick Piggin | 728697c | 2007-10-18 03:06:53 -0700 | [diff] [blame] | 330 |  | 
 | 331 | 	smp_llsc_mb(); | 
 | 332 |  | 
 | 333 | 	return res != 0; | 
 | 334 | } | 
 | 335 | /* | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 336 |  * test_and_clear_bit - Clear a bit and return its old value | 
 | 337 |  * @nr: Bit to clear | 
 | 338 |  * @addr: Address to count from | 
 | 339 |  * | 
 | 340 |  * This operation is atomic and cannot be reordered. | 
 | 341 |  * It also implies a memory barrier. | 
 | 342 |  */ | 
 | 343 | static inline int test_and_clear_bit(unsigned long nr, | 
 | 344 | 	volatile unsigned long *addr) | 
 | 345 | { | 
| Jim Quinlan | 9de79c5 | 2012-09-06 11:36:54 -0400 | [diff] [blame] | 346 | 	int bit = nr & SZLONG_MASK; | 
| Ralf Baechle | ff72b7a | 2007-06-07 13:17:30 +0100 | [diff] [blame] | 347 | 	unsigned long res; | 
| Ralf Baechle | b961153 | 2007-03-05 00:56:15 +0000 | [diff] [blame] | 348 |  | 
| David Daney | f252ffd | 2010-01-08 17:17:43 -0800 | [diff] [blame] | 349 | 	smp_mb__before_llsc(); | 
| Nick Piggin | c8f30ae | 2007-10-18 03:06:52 -0700 | [diff] [blame] | 350 |  | 
| David Daney | b791d11 | 2009-07-13 11:15:19 -0700 | [diff] [blame] | 351 | 	if (kernel_uses_llsc && R10000_LLSC_WAR) { | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 352 | 		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 
| Atsushi Nemoto | 8e09ffb | 2007-06-14 00:56:31 +0900 | [diff] [blame] | 353 | 		unsigned long temp; | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 354 |  | 
 | 355 | 		__asm__ __volatile__( | 
| Maciej W. Rozycki | c4559f6 | 2005-06-23 15:57:15 +0000 | [diff] [blame] | 356 | 		"	.set	mips3					\n" | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 357 | 		"1:	" __LL	"%0, %1		# test_and_clear_bit	\n" | 
 | 358 | 		"	or	%2, %0, %3				\n" | 
 | 359 | 		"	xor	%2, %3					\n" | 
| Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 360 | 		"	" __SC	"%2, %1					\n" | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 361 | 		"	beqzl	%2, 1b					\n" | 
 | 362 | 		"	and	%2, %0, %3				\n" | 
| Maciej W. Rozycki | aac8aa7 | 2005-06-14 17:35:03 +0000 | [diff] [blame] | 363 | 		"	.set	mips0					\n" | 
| Ralf Baechle | 7837314 | 2010-10-29 19:08:24 +0100 | [diff] [blame] | 364 | 		: "=&r" (temp), "+m" (*m), "=&r" (res) | 
 | 365 | 		: "r" (1UL << bit) | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 366 | 		: "memory"); | 
| Ralf Baechle | 102fa15 | 2007-02-16 17:18:50 +0000 | [diff] [blame] | 367 | #ifdef CONFIG_CPU_MIPSR2 | 
| David Daney | b791d11 | 2009-07-13 11:15:19 -0700 | [diff] [blame] | 368 | 	} else if (kernel_uses_llsc && __builtin_constant_p(nr)) { | 
| Ralf Baechle | 102fa15 | 2007-02-16 17:18:50 +0000 | [diff] [blame] | 369 | 		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 
| Ralf Baechle | ff72b7a | 2007-06-07 13:17:30 +0100 | [diff] [blame] | 370 | 		unsigned long temp; | 
| Ralf Baechle | 102fa15 | 2007-02-16 17:18:50 +0000 | [diff] [blame] | 371 |  | 
| Ralf Baechle | 7837314 | 2010-10-29 19:08:24 +0100 | [diff] [blame] | 372 | 		do { | 
 | 373 | 			__asm__ __volatile__( | 
| Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 374 | 			"	" __LL	"%0, %1 # test_and_clear_bit	\n" | 
| Ralf Baechle | 7837314 | 2010-10-29 19:08:24 +0100 | [diff] [blame] | 375 | 			"	" __EXT "%2, %0, %3, 1			\n" | 
| Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 376 | 			"	" __INS "%0, $0, %3, 1			\n" | 
 | 377 | 			"	" __SC	"%0, %1				\n" | 
| Ralf Baechle | 7837314 | 2010-10-29 19:08:24 +0100 | [diff] [blame] | 378 | 			: "=&r" (temp), "+m" (*m), "=&r" (res) | 
 | 379 | 			: "ir" (bit) | 
 | 380 | 			: "memory"); | 
 | 381 | 		} while (unlikely(!temp)); | 
| Ralf Baechle | 102fa15 | 2007-02-16 17:18:50 +0000 | [diff] [blame] | 382 | #endif | 
| David Daney | b791d11 | 2009-07-13 11:15:19 -0700 | [diff] [blame] | 383 | 	} else if (kernel_uses_llsc) { | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 384 | 		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 
| Ralf Baechle | ff72b7a | 2007-06-07 13:17:30 +0100 | [diff] [blame] | 385 | 		unsigned long temp; | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 386 |  | 
| Ralf Baechle | 7837314 | 2010-10-29 19:08:24 +0100 | [diff] [blame] | 387 | 		do { | 
 | 388 | 			__asm__ __volatile__( | 
 | 389 | 			"	.set	mips3				\n" | 
| Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 390 | 			"	" __LL	"%0, %1 # test_and_clear_bit	\n" | 
| Ralf Baechle | 7837314 | 2010-10-29 19:08:24 +0100 | [diff] [blame] | 391 | 			"	or	%2, %0, %3			\n" | 
 | 392 | 			"	xor	%2, %3				\n" | 
| Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 393 | 			"	" __SC	"%2, %1				\n" | 
| Ralf Baechle | 7837314 | 2010-10-29 19:08:24 +0100 | [diff] [blame] | 394 | 			"	.set	mips0				\n" | 
 | 395 | 			: "=&r" (temp), "+m" (*m), "=&r" (res) | 
 | 396 | 			: "r" (1UL << bit) | 
 | 397 | 			: "memory"); | 
 | 398 | 		} while (unlikely(!res)); | 
 | 399 |  | 
 | 400 | 		res = temp & (1UL << bit); | 
| Jim Quinlan | 92d1159 | 2012-09-06 11:36:55 -0400 | [diff] [blame] | 401 | 	} else | 
 | 402 | 		res = __mips_test_and_clear_bit(nr, addr); | 
| Ralf Baechle | 0004a9d | 2006-10-31 03:45:07 +0000 | [diff] [blame] | 403 |  | 
| Ralf Baechle | 17099b1 | 2007-07-14 13:24:05 +0100 | [diff] [blame] | 404 | 	smp_llsc_mb(); | 
| Ralf Baechle | ff72b7a | 2007-06-07 13:17:30 +0100 | [diff] [blame] | 405 |  | 
 | 406 | 	return res != 0; | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 407 | } | 
 | 408 |  | 
 | 409 | /* | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 410 |  * test_and_change_bit - Change a bit and return its old value | 
 | 411 |  * @nr: Bit to change | 
 | 412 |  * @addr: Address to count from | 
 | 413 |  * | 
 | 414 |  * This operation is atomic and cannot be reordered. | 
 | 415 |  * It also implies a memory barrier. | 
 | 416 |  */ | 
 | 417 | static inline int test_and_change_bit(unsigned long nr, | 
 | 418 | 	volatile unsigned long *addr) | 
 | 419 | { | 
| Jim Quinlan | 9de79c5 | 2012-09-06 11:36:54 -0400 | [diff] [blame] | 420 | 	int bit = nr & SZLONG_MASK; | 
| Ralf Baechle | ff72b7a | 2007-06-07 13:17:30 +0100 | [diff] [blame] | 421 | 	unsigned long res; | 
| Ralf Baechle | b961153 | 2007-03-05 00:56:15 +0000 | [diff] [blame] | 422 |  | 
| David Daney | f252ffd | 2010-01-08 17:17:43 -0800 | [diff] [blame] | 423 | 	smp_mb__before_llsc(); | 
| Nick Piggin | c8f30ae | 2007-10-18 03:06:52 -0700 | [diff] [blame] | 424 |  | 
| David Daney | b791d11 | 2009-07-13 11:15:19 -0700 | [diff] [blame] | 425 | 	if (kernel_uses_llsc && R10000_LLSC_WAR) { | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 426 | 		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 
| Ralf Baechle | ff72b7a | 2007-06-07 13:17:30 +0100 | [diff] [blame] | 427 | 		unsigned long temp; | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 428 |  | 
 | 429 | 		__asm__ __volatile__( | 
| Maciej W. Rozycki | c4559f6 | 2005-06-23 15:57:15 +0000 | [diff] [blame] | 430 | 		"	.set	mips3					\n" | 
| Maciej W. Rozycki | aac8aa7 | 2005-06-14 17:35:03 +0000 | [diff] [blame] | 431 | 		"1:	" __LL	"%0, %1		# test_and_change_bit	\n" | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 432 | 		"	xor	%2, %0, %3				\n" | 
| Maciej W. Rozycki | aac8aa7 | 2005-06-14 17:35:03 +0000 | [diff] [blame] | 433 | 		"	" __SC	"%2, %1					\n" | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 434 | 		"	beqzl	%2, 1b					\n" | 
 | 435 | 		"	and	%2, %0, %3				\n" | 
| Maciej W. Rozycki | aac8aa7 | 2005-06-14 17:35:03 +0000 | [diff] [blame] | 436 | 		"	.set	mips0					\n" | 
| Ralf Baechle | 7837314 | 2010-10-29 19:08:24 +0100 | [diff] [blame] | 437 | 		: "=&r" (temp), "+m" (*m), "=&r" (res) | 
 | 438 | 		: "r" (1UL << bit) | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 439 | 		: "memory"); | 
| David Daney | b791d11 | 2009-07-13 11:15:19 -0700 | [diff] [blame] | 440 | 	} else if (kernel_uses_llsc) { | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 441 | 		unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | 
| Ralf Baechle | ff72b7a | 2007-06-07 13:17:30 +0100 | [diff] [blame] | 442 | 		unsigned long temp; | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 443 |  | 
| Ralf Baechle | 7837314 | 2010-10-29 19:08:24 +0100 | [diff] [blame] | 444 | 		do { | 
 | 445 | 			__asm__ __volatile__( | 
 | 446 | 			"	.set	mips3				\n" | 
| Ralf Baechle | 7034228 | 2013-01-22 12:59:30 +0100 | [diff] [blame] | 447 | 			"	" __LL	"%0, %1 # test_and_change_bit	\n" | 
| Ralf Baechle | 7837314 | 2010-10-29 19:08:24 +0100 | [diff] [blame] | 448 | 			"	xor	%2, %0, %3			\n" | 
 | 449 | 			"	" __SC	"\t%2, %1			\n" | 
 | 450 | 			"	.set	mips0				\n" | 
 | 451 | 			: "=&r" (temp), "+m" (*m), "=&r" (res) | 
 | 452 | 			: "r" (1UL << bit) | 
 | 453 | 			: "memory"); | 
 | 454 | 		} while (unlikely(!res)); | 
 | 455 |  | 
 | 456 | 		res = temp & (1UL << bit); | 
| Jim Quinlan | 92d1159 | 2012-09-06 11:36:55 -0400 | [diff] [blame] | 457 | 	} else | 
 | 458 | 		res = __mips_test_and_change_bit(nr, addr); | 
| Ralf Baechle | 0004a9d | 2006-10-31 03:45:07 +0000 | [diff] [blame] | 459 |  | 
| Ralf Baechle | 17099b1 | 2007-07-14 13:24:05 +0100 | [diff] [blame] | 460 | 	smp_llsc_mb(); | 
| Ralf Baechle | ff72b7a | 2007-06-07 13:17:30 +0100 | [diff] [blame] | 461 |  | 
 | 462 | 	return res != 0; | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 463 | } | 
 | 464 |  | 
| Akinobu Mita | 3c9ee7e | 2006-03-26 01:39:30 -0800 | [diff] [blame] | 465 | #include <asm-generic/bitops/non-atomic.h> | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 466 |  | 
| Ralf Baechle | 6590326 | 2005-07-12 12:50:30 +0000 | [diff] [blame] | 467 | /* | 
| Nick Piggin | 728697c | 2007-10-18 03:06:53 -0700 | [diff] [blame] | 468 |  * __clear_bit_unlock - Clears a bit in memory | 
 | 469 |  * @nr: Bit to clear | 
 | 470 |  * @addr: Address to start counting from | 
 | 471 |  * | 
 | 472 |  * __clear_bit() is non-atomic and implies release semantics before the memory | 
 | 473 |  * operation. It can be used for an unlock if no other CPUs can concurrently | 
 | 474 |  * modify other bits in the word. | 
 | 475 |  */ | 
 | 476 | static inline void __clear_bit_unlock(unsigned long nr, volatile unsigned long *addr) | 
 | 477 | { | 
 | 478 | 	smp_mb(); | 
 | 479 | 	__clear_bit(nr, addr); | 
 | 480 | } | 
 | 481 |  | 
 | 482 | /* | 
| Ralf Baechle | 6590326 | 2005-07-12 12:50:30 +0000 | [diff] [blame] | 483 |  * Return the bit position (0..63) of the most significant 1 bit in a word | 
 | 484 |  * Returns -1 if no 1 bit exists | 
 | 485 |  */ | 
| Ralf Baechle | 4816227 | 2008-10-28 09:40:35 +0000 | [diff] [blame] | 486 | static inline unsigned long __fls(unsigned long word) | 
| Ralf Baechle | 6590326 | 2005-07-12 12:50:30 +0000 | [diff] [blame] | 487 | { | 
| Ralf Baechle | 4816227 | 2008-10-28 09:40:35 +0000 | [diff] [blame] | 488 | 	int num; | 
| Ralf Baechle | 6590326 | 2005-07-12 12:50:30 +0000 | [diff] [blame] | 489 |  | 
| Ralf Baechle | 4816227 | 2008-10-28 09:40:35 +0000 | [diff] [blame] | 490 | 	if (BITS_PER_LONG == 32 && | 
| Ralf Baechle | 47740eb | 2009-04-19 03:21:22 +0200 | [diff] [blame] | 491 | 	    __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) { | 
| Ralf Baechle | 49a89ef | 2007-10-11 23:46:15 +0100 | [diff] [blame] | 492 | 		__asm__( | 
| Ralf Baechle | ec917c2c | 2005-10-07 16:58:15 +0100 | [diff] [blame] | 493 | 		"	.set	push					\n" | 
 | 494 | 		"	.set	mips32					\n" | 
 | 495 | 		"	clz	%0, %1					\n" | 
 | 496 | 		"	.set	pop					\n" | 
| Ralf Baechle | 4816227 | 2008-10-28 09:40:35 +0000 | [diff] [blame] | 497 | 		: "=r" (num) | 
 | 498 | 		: "r" (word)); | 
| Ralf Baechle | ec917c2c | 2005-10-07 16:58:15 +0100 | [diff] [blame] | 499 |  | 
| Ralf Baechle | 4816227 | 2008-10-28 09:40:35 +0000 | [diff] [blame] | 500 | 		return 31 - num; | 
| Ralf Baechle | ec917c2c | 2005-10-07 16:58:15 +0100 | [diff] [blame] | 501 | 	} | 
 | 502 |  | 
| Ralf Baechle | 4816227 | 2008-10-28 09:40:35 +0000 | [diff] [blame] | 503 | 	if (BITS_PER_LONG == 64 && | 
 | 504 | 	    __builtin_constant_p(cpu_has_mips64) && cpu_has_mips64) { | 
 | 505 | 		__asm__( | 
 | 506 | 		"	.set	push					\n" | 
 | 507 | 		"	.set	mips64					\n" | 
 | 508 | 		"	dclz	%0, %1					\n" | 
 | 509 | 		"	.set	pop					\n" | 
 | 510 | 		: "=r" (num) | 
 | 511 | 		: "r" (word)); | 
| Ralf Baechle | ec917c2c | 2005-10-07 16:58:15 +0100 | [diff] [blame] | 512 |  | 
| Ralf Baechle | 4816227 | 2008-10-28 09:40:35 +0000 | [diff] [blame] | 513 | 		return 63 - num; | 
 | 514 | 	} | 
| Ralf Baechle | 6590326 | 2005-07-12 12:50:30 +0000 | [diff] [blame] | 515 |  | 
| Ralf Baechle | 4816227 | 2008-10-28 09:40:35 +0000 | [diff] [blame] | 516 | 	num = BITS_PER_LONG - 1; | 
 | 517 |  | 
 | 518 | #if BITS_PER_LONG == 64 | 
 | 519 | 	if (!(word & (~0ul << 32))) { | 
 | 520 | 		num -= 32; | 
 | 521 | 		word <<= 32; | 
 | 522 | 	} | 
 | 523 | #endif | 
 | 524 | 	if (!(word & (~0ul << (BITS_PER_LONG-16)))) { | 
 | 525 | 		num -= 16; | 
 | 526 | 		word <<= 16; | 
 | 527 | 	} | 
 | 528 | 	if (!(word & (~0ul << (BITS_PER_LONG-8)))) { | 
 | 529 | 		num -= 8; | 
 | 530 | 		word <<= 8; | 
 | 531 | 	} | 
 | 532 | 	if (!(word & (~0ul << (BITS_PER_LONG-4)))) { | 
 | 533 | 		num -= 4; | 
 | 534 | 		word <<= 4; | 
 | 535 | 	} | 
 | 536 | 	if (!(word & (~0ul << (BITS_PER_LONG-2)))) { | 
 | 537 | 		num -= 2; | 
 | 538 | 		word <<= 2; | 
 | 539 | 	} | 
 | 540 | 	if (!(word & (~0ul << (BITS_PER_LONG-1)))) | 
 | 541 | 		num -= 1; | 
 | 542 | 	return num; | 
| Ralf Baechle | 6590326 | 2005-07-12 12:50:30 +0000 | [diff] [blame] | 543 | } | 
| Ralf Baechle | 6590326 | 2005-07-12 12:50:30 +0000 | [diff] [blame] | 544 |  | 
 | 545 | /* | 
 | 546 |  * __ffs - find first bit in word. | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 547 |  * @word: The word to search | 
 | 548 |  * | 
| Ralf Baechle | 6590326 | 2005-07-12 12:50:30 +0000 | [diff] [blame] | 549 |  * Returns 0..SZLONG-1 | 
 | 550 |  * Undefined if no bit exists, so code should check against 0 first. | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 551 |  */ | 
| Ralf Baechle | 6590326 | 2005-07-12 12:50:30 +0000 | [diff] [blame] | 552 | static inline unsigned long __ffs(unsigned long word) | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 553 | { | 
| Ralf Baechle | ddc0d00 | 2008-05-04 14:53:53 +0100 | [diff] [blame] | 554 | 	return __fls(word & -word); | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 555 | } | 
 | 556 |  | 
 | 557 | /* | 
| Atsushi Nemoto | bc81824 | 2006-04-17 21:19:12 +0900 | [diff] [blame] | 558 |  * fls - find last bit set. | 
 | 559 |  * @word: The word to search | 
 | 560 |  * | 
 | 561 |  * This is defined the same way as ffs. | 
 | 562 |  * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32. | 
 | 563 |  */ | 
| Ralf Baechle | 4816227 | 2008-10-28 09:40:35 +0000 | [diff] [blame] | 564 | static inline int fls(int x) | 
| Atsushi Nemoto | bc81824 | 2006-04-17 21:19:12 +0900 | [diff] [blame] | 565 | { | 
| Ralf Baechle | 4816227 | 2008-10-28 09:40:35 +0000 | [diff] [blame] | 566 | 	int r; | 
| Atsushi Nemoto | bc81824 | 2006-04-17 21:19:12 +0900 | [diff] [blame] | 567 |  | 
| Ralf Baechle | 47740eb | 2009-04-19 03:21:22 +0200 | [diff] [blame] | 568 | 	if (__builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) { | 
| Ralf Baechle | 4816227 | 2008-10-28 09:40:35 +0000 | [diff] [blame] | 569 | 		__asm__("clz %0, %1" : "=r" (x) : "r" (x)); | 
 | 570 |  | 
 | 571 | 		return 32 - x; | 
 | 572 | 	} | 
 | 573 |  | 
 | 574 | 	r = 32; | 
 | 575 | 	if (!x) | 
 | 576 | 		return 0; | 
 | 577 | 	if (!(x & 0xffff0000u)) { | 
 | 578 | 		x <<= 16; | 
 | 579 | 		r -= 16; | 
 | 580 | 	} | 
 | 581 | 	if (!(x & 0xff000000u)) { | 
 | 582 | 		x <<= 8; | 
 | 583 | 		r -= 8; | 
 | 584 | 	} | 
 | 585 | 	if (!(x & 0xf0000000u)) { | 
 | 586 | 		x <<= 4; | 
 | 587 | 		r -= 4; | 
 | 588 | 	} | 
 | 589 | 	if (!(x & 0xc0000000u)) { | 
 | 590 | 		x <<= 2; | 
 | 591 | 		r -= 2; | 
 | 592 | 	} | 
 | 593 | 	if (!(x & 0x80000000u)) { | 
 | 594 | 		x <<= 1; | 
 | 595 | 		r -= 1; | 
 | 596 | 	} | 
 | 597 | 	return r; | 
| Atsushi Nemoto | bc81824 | 2006-04-17 21:19:12 +0900 | [diff] [blame] | 598 | } | 
 | 599 |  | 
| Atsushi Nemoto | bc81824 | 2006-04-17 21:19:12 +0900 | [diff] [blame] | 600 | #include <asm-generic/bitops/fls64.h> | 
| Atsushi Nemoto | bc81824 | 2006-04-17 21:19:12 +0900 | [diff] [blame] | 601 |  | 
 | 602 | /* | 
| Ralf Baechle | 6590326 | 2005-07-12 12:50:30 +0000 | [diff] [blame] | 603 |  * ffs - find first bit set. | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 604 |  * @word: The word to search | 
 | 605 |  * | 
| Atsushi Nemoto | bc81824 | 2006-04-17 21:19:12 +0900 | [diff] [blame] | 606 |  * This is defined the same way as | 
 | 607 |  * the libc and compiler builtin ffs routines, therefore | 
 | 608 |  * differs in spirit from the above ffz (man ffs). | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 609 |  */ | 
| Atsushi Nemoto | bc81824 | 2006-04-17 21:19:12 +0900 | [diff] [blame] | 610 | static inline int ffs(int word) | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 611 | { | 
| Ralf Baechle | 6590326 | 2005-07-12 12:50:30 +0000 | [diff] [blame] | 612 | 	if (!word) | 
 | 613 | 		return 0; | 
 | 614 |  | 
| Atsushi Nemoto | bc81824 | 2006-04-17 21:19:12 +0900 | [diff] [blame] | 615 | 	return fls(word & -word); | 
| Ralf Baechle | 6590326 | 2005-07-12 12:50:30 +0000 | [diff] [blame] | 616 | } | 
| Ralf Baechle | 2caf190 | 2006-01-30 17:14:41 +0000 | [diff] [blame] | 617 |  | 
| Atsushi Nemoto | bc81824 | 2006-04-17 21:19:12 +0900 | [diff] [blame] | 618 | #include <asm-generic/bitops/ffz.h> | 
| Akinobu Mita | 3c9ee7e | 2006-03-26 01:39:30 -0800 | [diff] [blame] | 619 | #include <asm-generic/bitops/find.h> | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 620 |  | 
 | 621 | #ifdef __KERNEL__ | 
 | 622 |  | 
| Akinobu Mita | 3c9ee7e | 2006-03-26 01:39:30 -0800 | [diff] [blame] | 623 | #include <asm-generic/bitops/sched.h> | 
| David Daney | 1a403d1 | 2010-06-25 16:46:07 -0700 | [diff] [blame] | 624 |  | 
 | 625 | #include <asm/arch_hweight.h> | 
 | 626 | #include <asm-generic/bitops/const_hweight.h> | 
 | 627 |  | 
| Akinobu Mita | 861b5ae | 2011-03-23 16:42:02 -0700 | [diff] [blame] | 628 | #include <asm-generic/bitops/le.h> | 
| Akinobu Mita | 3c9ee7e | 2006-03-26 01:39:30 -0800 | [diff] [blame] | 629 | #include <asm-generic/bitops/ext2-atomic.h> | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 630 |  | 
 | 631 | #endif /* __KERNEL__ */ | 
 | 632 |  | 
 | 633 | #endif /* _ASM_BITOPS_H */ |