root/arch/powerpc/include/asm/word-at-a-time.h

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


DEFINITIONS

This source file includes following definitions.
  1. prep_zero_mask
  2. find_zero
  3. has_zero
  4. zero_bytemask
  5. has_zero
  6. prep_zero_mask
  7. create_zero_mask
  8. find_zero
  9. zero_bytemask
  10. count_masked_bytes
  11. create_zero_mask
  12. find_zero
  13. has_zero
  14. prep_zero_mask
  15. load_unaligned_zeropad

   1 #ifndef _ASM_WORD_AT_A_TIME_H
   2 #define _ASM_WORD_AT_A_TIME_H
   3 
   4 /*
   5  * Word-at-a-time interfaces for PowerPC.
   6  */
   7 
   8 #include <linux/kernel.h>
   9 #include <asm/asm-compat.h>
  10 #include <asm/ppc_asm.h>
  11 
  12 #ifdef __BIG_ENDIAN__
  13 
  14 struct word_at_a_time {
  15         const unsigned long high_bits, low_bits;
  16 };
  17 
  18 #define WORD_AT_A_TIME_CONSTANTS { REPEAT_BYTE(0xfe) + 1, REPEAT_BYTE(0x7f) }
  19 
  20 /* Bit set in the bytes that have a zero */
  21 static inline long prep_zero_mask(unsigned long val, unsigned long rhs, const struct word_at_a_time *c)
  22 {
  23         unsigned long mask = (val & c->low_bits) + c->low_bits;
  24         return ~(mask | rhs);
  25 }
  26 
  27 #define create_zero_mask(mask) (mask)
  28 
  29 static inline long find_zero(unsigned long mask)
  30 {
  31         long leading_zero_bits;
  32 
  33         asm (PPC_CNTLZL "%0,%1" : "=r" (leading_zero_bits) : "r" (mask));
  34         return leading_zero_bits >> 3;
  35 }
  36 
  37 static inline bool has_zero(unsigned long val, unsigned long *data, const struct word_at_a_time *c)
  38 {
  39         unsigned long rhs = val | c->low_bits;
  40         *data = rhs;
  41         return (val + c->high_bits) & ~rhs;
  42 }
  43 
  44 static inline unsigned long zero_bytemask(unsigned long mask)
  45 {
  46         return ~1ul << __fls(mask);
  47 }
  48 
  49 #else
  50 
  51 #ifdef CONFIG_64BIT
  52 
  53 /* unused */
  54 struct word_at_a_time {
  55 };
  56 
  57 #define WORD_AT_A_TIME_CONSTANTS { }
  58 
  59 /* This will give us 0xff for a NULL char and 0x00 elsewhere */
  60 static inline unsigned long has_zero(unsigned long a, unsigned long *bits, const struct word_at_a_time *c)
  61 {
  62         unsigned long ret;
  63         unsigned long zero = 0;
  64 
  65         asm("cmpb %0,%1,%2" : "=r" (ret) : "r" (a), "r" (zero));
  66         *bits = ret;
  67 
  68         return ret;
  69 }
  70 
  71 static inline unsigned long prep_zero_mask(unsigned long a, unsigned long bits, const struct word_at_a_time *c)
  72 {
  73         return bits;
  74 }
  75 
  76 /* Alan Modra's little-endian strlen tail for 64-bit */
  77 static inline unsigned long create_zero_mask(unsigned long bits)
  78 {
  79         unsigned long leading_zero_bits;
  80         long trailing_zero_bit_mask;
  81 
  82         asm("addi       %1,%2,-1\n\t"
  83             "andc       %1,%1,%2\n\t"
  84             "popcntd    %0,%1"
  85                 : "=r" (leading_zero_bits), "=&r" (trailing_zero_bit_mask)
  86                 : "b" (bits));
  87 
  88         return leading_zero_bits;
  89 }
  90 
  91 static inline unsigned long find_zero(unsigned long mask)
  92 {
  93         return mask >> 3;
  94 }
  95 
  96 /* This assumes that we never ask for an all 1s bitmask */
  97 static inline unsigned long zero_bytemask(unsigned long mask)
  98 {
  99         return (1UL << mask) - 1;
 100 }
 101 
 102 #else   /* 32-bit case */
 103 
 104 struct word_at_a_time {
 105         const unsigned long one_bits, high_bits;
 106 };
 107 
 108 #define WORD_AT_A_TIME_CONSTANTS { REPEAT_BYTE(0x01), REPEAT_BYTE(0x80) }
 109 
 110 /*
 111  * This is largely generic for little-endian machines, but the
 112  * optimal byte mask counting is probably going to be something
 113  * that is architecture-specific. If you have a reliably fast
 114  * bit count instruction, that might be better than the multiply
 115  * and shift, for example.
 116  */
 117 
 118 /* Carl Chatfield / Jan Achrenius G+ version for 32-bit */
 119 static inline long count_masked_bytes(long mask)
 120 {
 121         /* (000000 0000ff 00ffff ffffff) -> ( 1 1 2 3 ) */
 122         long a = (0x0ff0001+mask) >> 23;
 123         /* Fix the 1 for 00 case */
 124         return a & mask;
 125 }
 126 
 127 static inline unsigned long create_zero_mask(unsigned long bits)
 128 {
 129         bits = (bits - 1) & ~bits;
 130         return bits >> 7;
 131 }
 132 
 133 static inline unsigned long find_zero(unsigned long mask)
 134 {
 135         return count_masked_bytes(mask);
 136 }
 137 
 138 /* Return nonzero if it has a zero */
 139 static inline unsigned long has_zero(unsigned long a, unsigned long *bits, const struct word_at_a_time *c)
 140 {
 141         unsigned long mask = ((a - c->one_bits) & ~a) & c->high_bits;
 142         *bits = mask;
 143         return mask;
 144 }
 145 
 146 static inline unsigned long prep_zero_mask(unsigned long a, unsigned long bits, const struct word_at_a_time *c)
 147 {
 148         return bits;
 149 }
 150 
 151 /* The mask we created is directly usable as a bytemask */
 152 #define zero_bytemask(mask) (mask)
 153 
 154 #endif /* CONFIG_64BIT */
 155 
 156 #endif /* __BIG_ENDIAN__ */
 157 
 158 /*
 159  * We use load_unaligned_zero() in a selftest, which builds a userspace
 160  * program. Some linker scripts seem to discard the .fixup section, so allow
 161  * the test code to use a different section name.
 162  */
 163 #ifndef FIXUP_SECTION
 164 #define FIXUP_SECTION ".fixup"
 165 #endif
 166 
 167 static inline unsigned long load_unaligned_zeropad(const void *addr)
 168 {
 169         unsigned long ret, offset, tmp;
 170 
 171         asm(
 172         "1:     " PPC_LL "%[ret], 0(%[addr])\n"
 173         "2:\n"
 174         ".section " FIXUP_SECTION ",\"ax\"\n"
 175         "3:     "
 176 #ifdef __powerpc64__
 177         "clrrdi         %[tmp], %[addr], 3\n\t"
 178         "clrlsldi       %[offset], %[addr], 61, 3\n\t"
 179         "ld             %[ret], 0(%[tmp])\n\t"
 180 #ifdef __BIG_ENDIAN__
 181         "sld            %[ret], %[ret], %[offset]\n\t"
 182 #else
 183         "srd            %[ret], %[ret], %[offset]\n\t"
 184 #endif
 185 #else
 186         "clrrwi         %[tmp], %[addr], 2\n\t"
 187         "clrlslwi       %[offset], %[addr], 30, 3\n\t"
 188         "lwz            %[ret], 0(%[tmp])\n\t"
 189 #ifdef __BIG_ENDIAN__
 190         "slw            %[ret], %[ret], %[offset]\n\t"
 191 #else
 192         "srw            %[ret], %[ret], %[offset]\n\t"
 193 #endif
 194 #endif
 195         "b      2b\n"
 196         ".previous\n"
 197         EX_TABLE(1b, 3b)
 198         : [tmp] "=&b" (tmp), [offset] "=&r" (offset), [ret] "=&r" (ret)
 199         : [addr] "b" (addr), "m" (*(unsigned long *)addr));
 200 
 201         return ret;
 202 }
 203 
 204 #undef FIXUP_SECTION
 205 
 206 #endif /* _ASM_WORD_AT_A_TIME_H */

/* [<][>][^][v][top][bottom][index][help] */