1 /* 2 * Copyright 1995, Russell King. 3 * Various bits and pieces copyrights include: 4 * Linus Torvalds (test_bit). 5 * Big endian support: Copyright 2001, Nicolas Pitre 6 * reworked by rmk. 7 * 8 * bit 0 is the LSB of an "unsigned long" quantity. 9 * 10 * Please note that the code in this file should never be included 11 * from user space. Many of these are not implemented in assembler 12 * since they would be too costly. Also, they require privileged 13 * instructions (which are not available from user mode) to ensure 14 * that they are atomic. 15 */ 16 17 #ifndef __ASM_ARM_BITOPS_H 18 #define __ASM_ARM_BITOPS_H 19 20 #ifdef __KERNEL__ 21 22 #include <linux/compiler.h> 23 #include <asm/system.h> 24 25 #define smp_mb__before_clear_bit() mb() 26 #define smp_mb__after_clear_bit() mb() 27 28 /* 29 * These functions are the basis of our bit ops. 30 * 31 * First, the atomic bitops. These use native endian. 32 */ 33 static inline void ____atomic_set_bit(unsigned int bit, volatile unsigned long *p) 34 { 35 unsigned long flags; 36 unsigned long mask = 1UL << (bit & 31); 37 38 p += bit >> 5; 39 40 local_irq_save(flags); 41 *p |= mask; 42 local_irq_restore(flags); 43 } 44 45 static inline void ____atomic_clear_bit(unsigned int bit, volatile unsigned long *p) 46 { 47 unsigned long flags; 48 unsigned long mask = 1UL << (bit & 31); 49 50 p += bit >> 5; 51 52 local_irq_save(flags); 53 *p &= ~mask; 54 local_irq_restore(flags); 55 } 56 57 static inline void ____atomic_change_bit(unsigned int bit, volatile unsigned long *p) 58 { 59 unsigned long flags; 60 unsigned long mask = 1UL << (bit & 31); 61 62 p += bit >> 5; 63 64 local_irq_save(flags); 65 *p ^= mask; 66 local_irq_restore(flags); 67 } 68 69 static inline int 70 ____atomic_test_and_set_bit(unsigned int bit, volatile unsigned long *p) 71 { 72 unsigned long flags; 73 unsigned int res; 74 unsigned long mask = 1UL << (bit & 31); 75 76 p += bit >> 5; 77 78 local_irq_save(flags); 79 res = *p; 80 *p = res | mask; 81 local_irq_restore(flags); 82 83 return res & mask; 84 } 85 86 static inline int 87 ____atomic_test_and_clear_bit(unsigned int bit, volatile unsigned long *p) 88 { 89 unsigned long flags; 90 unsigned int res; 91 unsigned long mask = 1UL << (bit & 31); 92 93 p += bit >> 5; 94 95 local_irq_save(flags); 96 res = *p; 97 *p = res & ~mask; 98 local_irq_restore(flags); 99 100 return res & mask; 101 } 102 103 static inline int 104 ____atomic_test_and_change_bit(unsigned int bit, volatile unsigned long *p) 105 { 106 unsigned long flags; 107 unsigned int res; 108 unsigned long mask = 1UL << (bit & 31); 109 110 p += bit >> 5; 111 112 local_irq_save(flags); 113 res = *p; 114 *p = res ^ mask; 115 local_irq_restore(flags); 116 117 return res & mask; 118 } 119 120 #include <asm-generic/bitops/non-atomic.h> 121 122 /* 123 * A note about Endian-ness. 124 * ------------------------- 125 * 126 * When the ARM is put into big endian mode via CR15, the processor 127 * merely swaps the order of bytes within words, thus: 128 * 129 * ------------ physical data bus bits ----------- 130 * D31 ... D24 D23 ... D16 D15 ... D8 D7 ... D0 131 * little byte 3 byte 2 byte 1 byte 0 132 * big byte 0 byte 1 byte 2 byte 3 133 * 134 * This means that reading a 32-bit word at address 0 returns the same 135 * value irrespective of the endian mode bit. 136 * 137 * Peripheral devices should be connected with the data bus reversed in 138 * "Big Endian" mode. ARM Application Note 61 is applicable, and is 139 * available from http://www.arm.com/. 140 * 141 * The following assumes that the data bus connectivity for big endian 142 * mode has been followed. 143 * 144 * Note that bit 0 is defined to be 32-bit word bit 0, not byte 0 bit 0. 145 */ 146 147 /* 148 * Little endian assembly bitops. nr = 0 -> byte 0 bit 0. 149 */ 150 extern void _set_bit_le(int nr, volatile unsigned long * p); 151 extern void _clear_bit_le(int nr, volatile unsigned long * p); 152 extern void _change_bit_le(int nr, volatile unsigned long * p); 153 extern int _test_and_set_bit_le(int nr, volatile unsigned long * p); 154 extern int _test_and_clear_bit_le(int nr, volatile unsigned long * p); 155 extern int _test_and_change_bit_le(int nr, volatile unsigned long * p); 156 extern int _find_first_zero_bit_le(const void * p, unsigned size); 157 extern int _find_next_zero_bit_le(const void * p, int size, int offset); 158 extern int _find_first_bit_le(const unsigned long *p, unsigned size); 159 extern int _find_next_bit_le(const unsigned long *p, int size, int offset); 160 161 /* 162 * Big endian assembly bitops. nr = 0 -> byte 3 bit 0. 163 */ 164 extern void _set_bit_be(int nr, volatile unsigned long * p); 165 extern void _clear_bit_be(int nr, volatile unsigned long * p); 166 extern void _change_bit_be(int nr, volatile unsigned long * p); 167 extern int _test_and_set_bit_be(int nr, volatile unsigned long * p); 168 extern int _test_and_clear_bit_be(int nr, volatile unsigned long * p); 169 extern int _test_and_change_bit_be(int nr, volatile unsigned long * p); 170 extern int _find_first_zero_bit_be(const void * p, unsigned size); 171 extern int _find_next_zero_bit_be(const void * p, int size, int offset); 172 extern int _find_first_bit_be(const unsigned long *p, unsigned size); 173 extern int _find_next_bit_be(const unsigned long *p, int size, int offset); 174 175 #ifndef CONFIG_SMP 176 /* 177 * The __* form of bitops are non-atomic and may be reordered. 178 */ 179 #define ATOMIC_BITOP_LE(name,nr,p) \ 180 (__builtin_constant_p(nr) ? \ 181 ____atomic_##name(nr, p) : \ 182 _##name##_le(nr,p)) 183 184 #define ATOMIC_BITOP_BE(name,nr,p) \ 185 (__builtin_constant_p(nr) ? \ 186 ____atomic_##name(nr, p) : \ 187 _##name##_be(nr,p)) 188 #else 189 #define ATOMIC_BITOP_LE(name,nr,p) _##name##_le(nr,p) 190 #define ATOMIC_BITOP_BE(name,nr,p) _##name##_be(nr,p) 191 #endif 192 193 #define NONATOMIC_BITOP(name,nr,p) \ 194 (____nonatomic_##name(nr, p)) 195 196 #ifndef __ARMEB__ 197 /* 198 * These are the little endian, atomic definitions. 199 */ 200 #define set_bit(nr,p) ATOMIC_BITOP_LE(set_bit,nr,p) 201 #define clear_bit(nr,p) ATOMIC_BITOP_LE(clear_bit,nr,p) 202 #define change_bit(nr,p) ATOMIC_BITOP_LE(change_bit,nr,p) 203 #define test_and_set_bit(nr,p) ATOMIC_BITOP_LE(test_and_set_bit,nr,p) 204 #define test_and_clear_bit(nr,p) ATOMIC_BITOP_LE(test_and_clear_bit,nr,p) 205 #define test_and_change_bit(nr,p) ATOMIC_BITOP_LE(test_and_change_bit,nr,p) 206 #define find_first_zero_bit(p,sz) _find_first_zero_bit_le(p,sz) 207 #define find_next_zero_bit(p,sz,off) _find_next_zero_bit_le(p,sz,off) 208 #define find_first_bit(p,sz) _find_first_bit_le(p,sz) 209 #define find_next_bit(p,sz,off) _find_next_bit_le(p,sz,off) 210 211 #define WORD_BITOFF_TO_LE(x) ((x)) 212 213 #else 214 215 /* 216 * These are the big endian, atomic definitions. 217 */ 218 #define set_bit(nr,p) ATOMIC_BITOP_BE(set_bit,nr,p) 219 #define clear_bit(nr,p) ATOMIC_BITOP_BE(clear_bit,nr,p) 220 #define change_bit(nr,p) ATOMIC_BITOP_BE(change_bit,nr,p) 221 #define test_and_set_bit(nr,p) ATOMIC_BITOP_BE(test_and_set_bit,nr,p) 222 #define test_and_clear_bit(nr,p) ATOMIC_BITOP_BE(test_and_clear_bit,nr,p) 223 #define test_and_change_bit(nr,p) ATOMIC_BITOP_BE(test_and_change_bit,nr,p) 224 #define find_first_zero_bit(p,sz) _find_first_zero_bit_be(p,sz) 225 #define find_next_zero_bit(p,sz,off) _find_next_zero_bit_be(p,sz,off) 226 #define find_first_bit(p,sz) _find_first_bit_be(p,sz) 227 #define find_next_bit(p,sz,off) _find_next_bit_be(p,sz,off) 228 229 #define WORD_BITOFF_TO_LE(x) ((x) ^ 0x18) 230 231 #endif 232 233 #if __LINUX_ARM_ARCH__ < 5 234 235 #include <asm-generic/bitops/ffz.h> 236 #include <asm-generic/bitops/__ffs.h> 237 #include <asm-generic/bitops/fls.h> 238 #include <asm-generic/bitops/ffs.h> 239 240 #else 241 242 static inline int constant_fls(int x) 243 { 244 int r = 32; 245 246 if (!x) 247 return 0; 248 if (!(x & 0xffff0000u)) { 249 x <<= 16; 250 r -= 16; 251 } 252 if (!(x & 0xff000000u)) { 253 x <<= 8; 254 r -= 8; 255 } 256 if (!(x & 0xf0000000u)) { 257 x <<= 4; 258 r -= 4; 259 } 260 if (!(x & 0xc0000000u)) { 261 x <<= 2; 262 r -= 2; 263 } 264 if (!(x & 0x80000000u)) { 265 x <<= 1; 266 r -= 1; 267 } 268 return r; 269 } 270 271 /* 272 * On ARMv5 and above those functions can be implemented around 273 * the clz instruction for much better code efficiency. 274 */ 275 276 #define fls(x) \ 277 ( __builtin_constant_p(x) ? constant_fls(x) : \ 278 ({ int __r; asm("clz\t%0, %1" : "=r"(__r) : "r"(x) : "cc"); 32-__r; }) ) 279 #define ffs(x) ({ unsigned long __t = (x); fls(__t & -__t); }) 280 #define __ffs(x) (ffs(x) - 1) 281 #define ffz(x) __ffs( ~(x) ) 282 283 #endif 284 285 #include <asm-generic/bitops/fls64.h> 286 287 #include <asm-generic/bitops/sched.h> 288 #include <asm-generic/bitops/hweight.h> 289 290 /* 291 * Ext2 is defined to use little-endian byte ordering. 292 * These do not need to be atomic. 293 */ 294 #define ext2_set_bit(nr,p) \ 295 __test_and_set_bit(WORD_BITOFF_TO_LE(nr), (unsigned long *)(p)) 296 #define ext2_set_bit_atomic(lock,nr,p) \ 297 test_and_set_bit(WORD_BITOFF_TO_LE(nr), (unsigned long *)(p)) 298 #define ext2_clear_bit(nr,p) \ 299 __test_and_clear_bit(WORD_BITOFF_TO_LE(nr), (unsigned long *)(p)) 300 #define ext2_clear_bit_atomic(lock,nr,p) \ 301 test_and_clear_bit(WORD_BITOFF_TO_LE(nr), (unsigned long *)(p)) 302 #define ext2_test_bit(nr,p) \ 303 test_bit(WORD_BITOFF_TO_LE(nr), (unsigned long *)(p)) 304 #define ext2_find_first_zero_bit(p,sz) \ 305 _find_first_zero_bit_le(p,sz) 306 #define ext2_find_next_zero_bit(p,sz,off) \ 307 _find_next_zero_bit_le(p,sz,off) 308 309 /* 310 * Minix is defined to use little-endian byte ordering. 311 * These do not need to be atomic. 312 */ 313 #define minix_set_bit(nr,p) \ 314 __set_bit(WORD_BITOFF_TO_LE(nr), (unsigned long *)(p)) 315 #define minix_test_bit(nr,p) \ 316 test_bit(WORD_BITOFF_TO_LE(nr), (unsigned long *)(p)) 317 #define minix_test_and_set_bit(nr,p) \ 318 __test_and_set_bit(WORD_BITOFF_TO_LE(nr), (unsigned long *)(p)) 319 #define minix_test_and_clear_bit(nr,p) \ 320 __test_and_clear_bit(WORD_BITOFF_TO_LE(nr), (unsigned long *)(p)) 321 #define minix_find_first_zero_bit(p,sz) \ 322 _find_first_zero_bit_le(p,sz) 323 324 #endif /* __KERNEL__ */ 325 326 #endif /* _ARM_BITOPS_H */ 327