root/arch/m68k/include/asm/bitops.h

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


DEFINITIONS

This source file includes following definitions.
  1. bset_reg_set_bit
  2. bset_mem_set_bit
  3. bfset_mem_set_bit
  4. bclr_reg_clear_bit
  5. bclr_mem_clear_bit
  6. bfclr_mem_clear_bit
  7. bchg_reg_change_bit
  8. bchg_mem_change_bit
  9. bfchg_mem_change_bit
  10. test_bit
  11. bset_reg_test_and_set_bit
  12. bset_mem_test_and_set_bit
  13. bfset_mem_test_and_set_bit
  14. bclr_reg_test_and_clear_bit
  15. bclr_mem_test_and_clear_bit
  16. bfclr_mem_test_and_clear_bit
  17. bchg_reg_test_and_change_bit
  18. bchg_mem_test_and_change_bit
  19. bfchg_mem_test_and_change_bit
  20. find_first_zero_bit
  21. find_next_zero_bit
  22. find_first_bit
  23. find_next_bit
  24. ffz
  25. __ffs
  26. ffs
  27. ffs
  28. __ffs
  29. fls
  30. __fls

   1 #ifndef _M68K_BITOPS_H
   2 #define _M68K_BITOPS_H
   3 /*
   4  * Copyright 1992, Linus Torvalds.
   5  *
   6  * This file is subject to the terms and conditions of the GNU General Public
   7  * License.  See the file COPYING in the main directory of this archive
   8  * for more details.
   9  */
  10 
  11 #ifndef _LINUX_BITOPS_H
  12 #error only <linux/bitops.h> can be included directly
  13 #endif
  14 
  15 #include <linux/compiler.h>
  16 #include <asm/barrier.h>
  17 
  18 /*
  19  *      Bit access functions vary across the ColdFire and 68k families.
  20  *      So we will break them out here, and then macro in the ones we want.
  21  *
  22  *      ColdFire - supports standard bset/bclr/bchg with register operand only
  23  *      68000    - supports standard bset/bclr/bchg with memory operand
  24  *      >= 68020 - also supports the bfset/bfclr/bfchg instructions
  25  *
  26  *      Although it is possible to use only the bset/bclr/bchg with register
  27  *      operands on all platforms you end up with larger generated code.
  28  *      So we use the best form possible on a given platform.
  29  */
  30 
  31 static inline void bset_reg_set_bit(int nr, volatile unsigned long *vaddr)
  32 {
  33         char *p = (char *)vaddr + (nr ^ 31) / 8;
  34 
  35         __asm__ __volatile__ ("bset %1,(%0)"
  36                 :
  37                 : "a" (p), "di" (nr & 7)
  38                 : "memory");
  39 }
  40 
  41 static inline void bset_mem_set_bit(int nr, volatile unsigned long *vaddr)
  42 {
  43         char *p = (char *)vaddr + (nr ^ 31) / 8;
  44 
  45         __asm__ __volatile__ ("bset %1,%0"
  46                 : "+m" (*p)
  47                 : "di" (nr & 7));
  48 }
  49 
  50 static inline void bfset_mem_set_bit(int nr, volatile unsigned long *vaddr)
  51 {
  52         __asm__ __volatile__ ("bfset %1{%0:#1}"
  53                 :
  54                 : "d" (nr ^ 31), "o" (*vaddr)
  55                 : "memory");
  56 }
  57 
  58 #if defined(CONFIG_COLDFIRE)
  59 #define set_bit(nr, vaddr)      bset_reg_set_bit(nr, vaddr)
  60 #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
  61 #define set_bit(nr, vaddr)      bset_mem_set_bit(nr, vaddr)
  62 #else
  63 #define set_bit(nr, vaddr)      (__builtin_constant_p(nr) ? \
  64                                 bset_mem_set_bit(nr, vaddr) : \
  65                                 bfset_mem_set_bit(nr, vaddr))
  66 #endif
  67 
  68 #define __set_bit(nr, vaddr)    set_bit(nr, vaddr)
  69 
  70 
  71 static inline void bclr_reg_clear_bit(int nr, volatile unsigned long *vaddr)
  72 {
  73         char *p = (char *)vaddr + (nr ^ 31) / 8;
  74 
  75         __asm__ __volatile__ ("bclr %1,(%0)"
  76                 :
  77                 : "a" (p), "di" (nr & 7)
  78                 : "memory");
  79 }
  80 
  81 static inline void bclr_mem_clear_bit(int nr, volatile unsigned long *vaddr)
  82 {
  83         char *p = (char *)vaddr + (nr ^ 31) / 8;
  84 
  85         __asm__ __volatile__ ("bclr %1,%0"
  86                 : "+m" (*p)
  87                 : "di" (nr & 7));
  88 }
  89 
  90 static inline void bfclr_mem_clear_bit(int nr, volatile unsigned long *vaddr)
  91 {
  92         __asm__ __volatile__ ("bfclr %1{%0:#1}"
  93                 :
  94                 : "d" (nr ^ 31), "o" (*vaddr)
  95                 : "memory");
  96 }
  97 
  98 #if defined(CONFIG_COLDFIRE)
  99 #define clear_bit(nr, vaddr)    bclr_reg_clear_bit(nr, vaddr)
 100 #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
 101 #define clear_bit(nr, vaddr)    bclr_mem_clear_bit(nr, vaddr)
 102 #else
 103 #define clear_bit(nr, vaddr)    (__builtin_constant_p(nr) ? \
 104                                 bclr_mem_clear_bit(nr, vaddr) : \
 105                                 bfclr_mem_clear_bit(nr, vaddr))
 106 #endif
 107 
 108 #define __clear_bit(nr, vaddr)  clear_bit(nr, vaddr)
 109 
 110 
 111 static inline void bchg_reg_change_bit(int nr, volatile unsigned long *vaddr)
 112 {
 113         char *p = (char *)vaddr + (nr ^ 31) / 8;
 114 
 115         __asm__ __volatile__ ("bchg %1,(%0)"
 116                 :
 117                 : "a" (p), "di" (nr & 7)
 118                 : "memory");
 119 }
 120 
 121 static inline void bchg_mem_change_bit(int nr, volatile unsigned long *vaddr)
 122 {
 123         char *p = (char *)vaddr + (nr ^ 31) / 8;
 124 
 125         __asm__ __volatile__ ("bchg %1,%0"
 126                 : "+m" (*p)
 127                 : "di" (nr & 7));
 128 }
 129 
 130 static inline void bfchg_mem_change_bit(int nr, volatile unsigned long *vaddr)
 131 {
 132         __asm__ __volatile__ ("bfchg %1{%0:#1}"
 133                 :
 134                 : "d" (nr ^ 31), "o" (*vaddr)
 135                 : "memory");
 136 }
 137 
 138 #if defined(CONFIG_COLDFIRE)
 139 #define change_bit(nr, vaddr)   bchg_reg_change_bit(nr, vaddr)
 140 #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
 141 #define change_bit(nr, vaddr)   bchg_mem_change_bit(nr, vaddr)
 142 #else
 143 #define change_bit(nr, vaddr)   (__builtin_constant_p(nr) ? \
 144                                 bchg_mem_change_bit(nr, vaddr) : \
 145                                 bfchg_mem_change_bit(nr, vaddr))
 146 #endif
 147 
 148 #define __change_bit(nr, vaddr) change_bit(nr, vaddr)
 149 
 150 
 151 static inline int test_bit(int nr, const volatile unsigned long *vaddr)
 152 {
 153         return (vaddr[nr >> 5] & (1UL << (nr & 31))) != 0;
 154 }
 155 
 156 
 157 static inline int bset_reg_test_and_set_bit(int nr,
 158                                             volatile unsigned long *vaddr)
 159 {
 160         char *p = (char *)vaddr + (nr ^ 31) / 8;
 161         char retval;
 162 
 163         __asm__ __volatile__ ("bset %2,(%1); sne %0"
 164                 : "=d" (retval)
 165                 : "a" (p), "di" (nr & 7)
 166                 : "memory");
 167         return retval;
 168 }
 169 
 170 static inline int bset_mem_test_and_set_bit(int nr,
 171                                             volatile unsigned long *vaddr)
 172 {
 173         char *p = (char *)vaddr + (nr ^ 31) / 8;
 174         char retval;
 175 
 176         __asm__ __volatile__ ("bset %2,%1; sne %0"
 177                 : "=d" (retval), "+m" (*p)
 178                 : "di" (nr & 7));
 179         return retval;
 180 }
 181 
 182 static inline int bfset_mem_test_and_set_bit(int nr,
 183                                              volatile unsigned long *vaddr)
 184 {
 185         char retval;
 186 
 187         __asm__ __volatile__ ("bfset %2{%1:#1}; sne %0"
 188                 : "=d" (retval)
 189                 : "d" (nr ^ 31), "o" (*vaddr)
 190                 : "memory");
 191         return retval;
 192 }
 193 
 194 #if defined(CONFIG_COLDFIRE)
 195 #define test_and_set_bit(nr, vaddr)     bset_reg_test_and_set_bit(nr, vaddr)
 196 #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
 197 #define test_and_set_bit(nr, vaddr)     bset_mem_test_and_set_bit(nr, vaddr)
 198 #else
 199 #define test_and_set_bit(nr, vaddr)     (__builtin_constant_p(nr) ? \
 200                                         bset_mem_test_and_set_bit(nr, vaddr) : \
 201                                         bfset_mem_test_and_set_bit(nr, vaddr))
 202 #endif
 203 
 204 #define __test_and_set_bit(nr, vaddr)   test_and_set_bit(nr, vaddr)
 205 
 206 
 207 static inline int bclr_reg_test_and_clear_bit(int nr,
 208                                               volatile unsigned long *vaddr)
 209 {
 210         char *p = (char *)vaddr + (nr ^ 31) / 8;
 211         char retval;
 212 
 213         __asm__ __volatile__ ("bclr %2,(%1); sne %0"
 214                 : "=d" (retval)
 215                 : "a" (p), "di" (nr & 7)
 216                 : "memory");
 217         return retval;
 218 }
 219 
 220 static inline int bclr_mem_test_and_clear_bit(int nr,
 221                                               volatile unsigned long *vaddr)
 222 {
 223         char *p = (char *)vaddr + (nr ^ 31) / 8;
 224         char retval;
 225 
 226         __asm__ __volatile__ ("bclr %2,%1; sne %0"
 227                 : "=d" (retval), "+m" (*p)
 228                 : "di" (nr & 7));
 229         return retval;
 230 }
 231 
 232 static inline int bfclr_mem_test_and_clear_bit(int nr,
 233                                                volatile unsigned long *vaddr)
 234 {
 235         char retval;
 236 
 237         __asm__ __volatile__ ("bfclr %2{%1:#1}; sne %0"
 238                 : "=d" (retval)
 239                 : "d" (nr ^ 31), "o" (*vaddr)
 240                 : "memory");
 241         return retval;
 242 }
 243 
 244 #if defined(CONFIG_COLDFIRE)
 245 #define test_and_clear_bit(nr, vaddr)   bclr_reg_test_and_clear_bit(nr, vaddr)
 246 #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
 247 #define test_and_clear_bit(nr, vaddr)   bclr_mem_test_and_clear_bit(nr, vaddr)
 248 #else
 249 #define test_and_clear_bit(nr, vaddr)   (__builtin_constant_p(nr) ? \
 250                                         bclr_mem_test_and_clear_bit(nr, vaddr) : \
 251                                         bfclr_mem_test_and_clear_bit(nr, vaddr))
 252 #endif
 253 
 254 #define __test_and_clear_bit(nr, vaddr) test_and_clear_bit(nr, vaddr)
 255 
 256 
 257 static inline int bchg_reg_test_and_change_bit(int nr,
 258                                                volatile unsigned long *vaddr)
 259 {
 260         char *p = (char *)vaddr + (nr ^ 31) / 8;
 261         char retval;
 262 
 263         __asm__ __volatile__ ("bchg %2,(%1); sne %0"
 264                 : "=d" (retval)
 265                 : "a" (p), "di" (nr & 7)
 266                 : "memory");
 267         return retval;
 268 }
 269 
 270 static inline int bchg_mem_test_and_change_bit(int nr,
 271                                                volatile unsigned long *vaddr)
 272 {
 273         char *p = (char *)vaddr + (nr ^ 31) / 8;
 274         char retval;
 275 
 276         __asm__ __volatile__ ("bchg %2,%1; sne %0"
 277                 : "=d" (retval), "+m" (*p)
 278                 : "di" (nr & 7));
 279         return retval;
 280 }
 281 
 282 static inline int bfchg_mem_test_and_change_bit(int nr,
 283                                                 volatile unsigned long *vaddr)
 284 {
 285         char retval;
 286 
 287         __asm__ __volatile__ ("bfchg %2{%1:#1}; sne %0"
 288                 : "=d" (retval)
 289                 : "d" (nr ^ 31), "o" (*vaddr)
 290                 : "memory");
 291         return retval;
 292 }
 293 
 294 #if defined(CONFIG_COLDFIRE)
 295 #define test_and_change_bit(nr, vaddr)  bchg_reg_test_and_change_bit(nr, vaddr)
 296 #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
 297 #define test_and_change_bit(nr, vaddr)  bchg_mem_test_and_change_bit(nr, vaddr)
 298 #else
 299 #define test_and_change_bit(nr, vaddr)  (__builtin_constant_p(nr) ? \
 300                                         bchg_mem_test_and_change_bit(nr, vaddr) : \
 301                                         bfchg_mem_test_and_change_bit(nr, vaddr))
 302 #endif
 303 
 304 #define __test_and_change_bit(nr, vaddr) test_and_change_bit(nr, vaddr)
 305 
 306 
 307 /*
 308  *      The true 68020 and more advanced processors support the "bfffo"
 309  *      instruction for finding bits. ColdFire and simple 68000 parts
 310  *      (including CPU32) do not support this. They simply use the generic
 311  *      functions.
 312  */
 313 #if defined(CONFIG_CPU_HAS_NO_BITFIELDS)
 314 #include <asm-generic/bitops/ffz.h>
 315 #else
 316 
 317 static inline int find_first_zero_bit(const unsigned long *vaddr,
 318                                       unsigned size)
 319 {
 320         const unsigned long *p = vaddr;
 321         int res = 32;
 322         unsigned int words;
 323         unsigned long num;
 324 
 325         if (!size)
 326                 return 0;
 327 
 328         words = (size + 31) >> 5;
 329         while (!(num = ~*p++)) {
 330                 if (!--words)
 331                         goto out;
 332         }
 333 
 334         __asm__ __volatile__ ("bfffo %1{#0,#0},%0"
 335                               : "=d" (res) : "d" (num & -num));
 336         res ^= 31;
 337 out:
 338         res += ((long)p - (long)vaddr - 4) * 8;
 339         return res < size ? res : size;
 340 }
 341 #define find_first_zero_bit find_first_zero_bit
 342 
 343 static inline int find_next_zero_bit(const unsigned long *vaddr, int size,
 344                                      int offset)
 345 {
 346         const unsigned long *p = vaddr + (offset >> 5);
 347         int bit = offset & 31UL, res;
 348 
 349         if (offset >= size)
 350                 return size;
 351 
 352         if (bit) {
 353                 unsigned long num = ~*p++ & (~0UL << bit);
 354                 offset -= bit;
 355 
 356                 /* Look for zero in first longword */
 357                 __asm__ __volatile__ ("bfffo %1{#0,#0},%0"
 358                                       : "=d" (res) : "d" (num & -num));
 359                 if (res < 32) {
 360                         offset += res ^ 31;
 361                         return offset < size ? offset : size;
 362                 }
 363                 offset += 32;
 364 
 365                 if (offset >= size)
 366                         return size;
 367         }
 368         /* No zero yet, search remaining full bytes for a zero */
 369         return offset + find_first_zero_bit(p, size - offset);
 370 }
 371 #define find_next_zero_bit find_next_zero_bit
 372 
 373 static inline int find_first_bit(const unsigned long *vaddr, unsigned size)
 374 {
 375         const unsigned long *p = vaddr;
 376         int res = 32;
 377         unsigned int words;
 378         unsigned long num;
 379 
 380         if (!size)
 381                 return 0;
 382 
 383         words = (size + 31) >> 5;
 384         while (!(num = *p++)) {
 385                 if (!--words)
 386                         goto out;
 387         }
 388 
 389         __asm__ __volatile__ ("bfffo %1{#0,#0},%0"
 390                               : "=d" (res) : "d" (num & -num));
 391         res ^= 31;
 392 out:
 393         res += ((long)p - (long)vaddr - 4) * 8;
 394         return res < size ? res : size;
 395 }
 396 #define find_first_bit find_first_bit
 397 
 398 static inline int find_next_bit(const unsigned long *vaddr, int size,
 399                                 int offset)
 400 {
 401         const unsigned long *p = vaddr + (offset >> 5);
 402         int bit = offset & 31UL, res;
 403 
 404         if (offset >= size)
 405                 return size;
 406 
 407         if (bit) {
 408                 unsigned long num = *p++ & (~0UL << bit);
 409                 offset -= bit;
 410 
 411                 /* Look for one in first longword */
 412                 __asm__ __volatile__ ("bfffo %1{#0,#0},%0"
 413                                       : "=d" (res) : "d" (num & -num));
 414                 if (res < 32) {
 415                         offset += res ^ 31;
 416                         return offset < size ? offset : size;
 417                 }
 418                 offset += 32;
 419 
 420                 if (offset >= size)
 421                         return size;
 422         }
 423         /* No one yet, search remaining full bytes for a one */
 424         return offset + find_first_bit(p, size - offset);
 425 }
 426 #define find_next_bit find_next_bit
 427 
 428 /*
 429  * ffz = Find First Zero in word. Undefined if no zero exists,
 430  * so code should check against ~0UL first..
 431  */
 432 static inline unsigned long ffz(unsigned long word)
 433 {
 434         int res;
 435 
 436         __asm__ __volatile__ ("bfffo %1{#0,#0},%0"
 437                               : "=d" (res) : "d" (~word & -~word));
 438         return res ^ 31;
 439 }
 440 
 441 #endif
 442 
 443 #include <asm-generic/bitops/find.h>
 444 
 445 #ifdef __KERNEL__
 446 
 447 #if defined(CONFIG_CPU_HAS_NO_BITFIELDS)
 448 
 449 /*
 450  *      The newer ColdFire family members support a "bitrev" instruction
 451  *      and we can use that to implement a fast ffs. Older Coldfire parts,
 452  *      and normal 68000 parts don't have anything special, so we use the
 453  *      generic functions for those.
 454  */
 455 #if (defined(__mcfisaaplus__) || defined(__mcfisac__)) && \
 456         !defined(CONFIG_M68000) && !defined(CONFIG_MCPU32)
 457 static inline unsigned long __ffs(unsigned long x)
 458 {
 459         __asm__ __volatile__ ("bitrev %0; ff1 %0"
 460                 : "=d" (x)
 461                 : "0" (x));
 462         return x;
 463 }
 464 
 465 static inline int ffs(int x)
 466 {
 467         if (!x)
 468                 return 0;
 469         return __ffs(x) + 1;
 470 }
 471 
 472 #else
 473 #include <asm-generic/bitops/ffs.h>
 474 #include <asm-generic/bitops/__ffs.h>
 475 #endif
 476 
 477 #include <asm-generic/bitops/fls.h>
 478 #include <asm-generic/bitops/__fls.h>
 479 
 480 #else
 481 
 482 /*
 483  *      ffs: find first bit set. This is defined the same way as
 484  *      the libc and compiler builtin ffs routines, therefore
 485  *      differs in spirit from the above ffz (man ffs).
 486  */
 487 static inline int ffs(int x)
 488 {
 489         int cnt;
 490 
 491         __asm__ ("bfffo %1{#0:#0},%0"
 492                 : "=d" (cnt)
 493                 : "dm" (x & -x));
 494         return 32 - cnt;
 495 }
 496 
 497 static inline unsigned long __ffs(unsigned long x)
 498 {
 499         return ffs(x) - 1;
 500 }
 501 
 502 /*
 503  *      fls: find last bit set.
 504  */
 505 static inline int fls(unsigned int x)
 506 {
 507         int cnt;
 508 
 509         __asm__ ("bfffo %1{#0,#0},%0"
 510                 : "=d" (cnt)
 511                 : "dm" (x));
 512         return 32 - cnt;
 513 }
 514 
 515 static inline int __fls(int x)
 516 {
 517         return fls(x) - 1;
 518 }
 519 
 520 #endif
 521 
 522 /* Simple test-and-set bit locks */
 523 #define test_and_set_bit_lock   test_and_set_bit
 524 #define clear_bit_unlock        clear_bit
 525 #define __clear_bit_unlock      clear_bit_unlock
 526 
 527 #include <asm-generic/bitops/ext2-atomic.h>
 528 #include <asm-generic/bitops/le.h>
 529 #include <asm-generic/bitops/fls64.h>
 530 #include <asm-generic/bitops/sched.h>
 531 #include <asm-generic/bitops/hweight.h>
 532 #endif /* __KERNEL__ */
 533 
 534 #endif /* _M68K_BITOPS_H */

/* [<][>][^][v][top][bottom][index][help] */