root/arch/xtensa/include/asm/cmpxchg.h

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


DEFINITIONS

This source file includes following definitions.
  1. __cmpxchg_u32
  2. __cmpxchg
  3. __cmpxchg_local
  4. xchg_u32
  5. xchg_small
  6. __xchg

   1 /*
   2  * Atomic xchg and cmpxchg operations.
   3  *
   4  * This file is subject to the terms and conditions of the GNU General Public
   5  * License.  See the file "COPYING" in the main directory of this archive
   6  * for more details.
   7  *
   8  * Copyright (C) 2001 - 2005 Tensilica Inc.
   9  */
  10 
  11 #ifndef _XTENSA_CMPXCHG_H
  12 #define _XTENSA_CMPXCHG_H
  13 
  14 #ifndef __ASSEMBLY__
  15 
  16 #include <linux/bits.h>
  17 #include <linux/stringify.h>
  18 
  19 /*
  20  * cmpxchg
  21  */
  22 
  23 static inline unsigned long
  24 __cmpxchg_u32(volatile int *p, int old, int new)
  25 {
  26 #if XCHAL_HAVE_EXCLUSIVE
  27         unsigned long tmp, result;
  28 
  29         __asm__ __volatile__(
  30                         "1:     l32ex   %0, %3\n"
  31                         "       bne     %0, %4, 2f\n"
  32                         "       mov     %1, %2\n"
  33                         "       s32ex   %1, %3\n"
  34                         "       getex   %1\n"
  35                         "       beqz    %1, 1b\n"
  36                         "2:\n"
  37                         : "=&a" (result), "=&a" (tmp)
  38                         : "a" (new), "a" (p), "a" (old)
  39                         : "memory"
  40                         );
  41 
  42         return result;
  43 #elif XCHAL_HAVE_S32C1I
  44         __asm__ __volatile__(
  45                         "       wsr     %2, scompare1\n"
  46                         "       s32c1i  %0, %1, 0\n"
  47                         : "+a" (new)
  48                         : "a" (p), "a" (old)
  49                         : "memory"
  50                         );
  51 
  52         return new;
  53 #else
  54         __asm__ __volatile__(
  55                         "       rsil    a15, "__stringify(TOPLEVEL)"\n"
  56                         "       l32i    %0, %1, 0\n"
  57                         "       bne     %0, %2, 1f\n"
  58                         "       s32i    %3, %1, 0\n"
  59                         "1:\n"
  60                         "       wsr     a15, ps\n"
  61                         "       rsync\n"
  62                         : "=&a" (old)
  63                         : "a" (p), "a" (old), "r" (new)
  64                         : "a15", "memory");
  65         return old;
  66 #endif
  67 }
  68 /* This function doesn't exist, so you'll get a linker error
  69  * if something tries to do an invalid cmpxchg(). */
  70 
  71 extern void __cmpxchg_called_with_bad_pointer(void);
  72 
  73 static __inline__ unsigned long
  74 __cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
  75 {
  76         switch (size) {
  77         case 4:  return __cmpxchg_u32(ptr, old, new);
  78         default: __cmpxchg_called_with_bad_pointer();
  79                  return old;
  80         }
  81 }
  82 
  83 #define cmpxchg(ptr,o,n)                                                      \
  84         ({ __typeof__(*(ptr)) _o_ = (o);                                      \
  85            __typeof__(*(ptr)) _n_ = (n);                                      \
  86            (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_,          \
  87                                         (unsigned long)_n_, sizeof (*(ptr))); \
  88         })
  89 
  90 #include <asm-generic/cmpxchg-local.h>
  91 
  92 static inline unsigned long __cmpxchg_local(volatile void *ptr,
  93                                       unsigned long old,
  94                                       unsigned long new, int size)
  95 {
  96         switch (size) {
  97         case 4:
  98                 return __cmpxchg_u32(ptr, old, new);
  99         default:
 100                 return __cmpxchg_local_generic(ptr, old, new, size);
 101         }
 102 
 103         return old;
 104 }
 105 
 106 /*
 107  * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
 108  * them available.
 109  */
 110 #define cmpxchg_local(ptr, o, n)                                               \
 111         ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
 112                         (unsigned long)(n), sizeof(*(ptr))))
 113 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
 114 #define cmpxchg64(ptr, o, n)    cmpxchg64_local((ptr), (o), (n))
 115 
 116 /*
 117  * xchg_u32
 118  *
 119  * Note that a15 is used here because the register allocation
 120  * done by the compiler is not guaranteed and a window overflow
 121  * may not occur between the rsil and wsr instructions. By using
 122  * a15 in the rsil, the machine is guaranteed to be in a state
 123  * where no register reference will cause an overflow.
 124  */
 125 
 126 static inline unsigned long xchg_u32(volatile int * m, unsigned long val)
 127 {
 128 #if XCHAL_HAVE_EXCLUSIVE
 129         unsigned long tmp, result;
 130 
 131         __asm__ __volatile__(
 132                         "1:     l32ex   %0, %3\n"
 133                         "       mov     %1, %2\n"
 134                         "       s32ex   %1, %3\n"
 135                         "       getex   %1\n"
 136                         "       beqz    %1, 1b\n"
 137                         : "=&a" (result), "=&a" (tmp)
 138                         : "a" (val), "a" (m)
 139                         : "memory"
 140                         );
 141 
 142         return result;
 143 #elif XCHAL_HAVE_S32C1I
 144         unsigned long tmp, result;
 145         __asm__ __volatile__(
 146                         "1:     l32i    %1, %2, 0\n"
 147                         "       mov     %0, %3\n"
 148                         "       wsr     %1, scompare1\n"
 149                         "       s32c1i  %0, %2, 0\n"
 150                         "       bne     %0, %1, 1b\n"
 151                         : "=&a" (result), "=&a" (tmp)
 152                         : "a" (m), "a" (val)
 153                         : "memory"
 154                         );
 155         return result;
 156 #else
 157         unsigned long tmp;
 158         __asm__ __volatile__(
 159                         "       rsil    a15, "__stringify(TOPLEVEL)"\n"
 160                         "       l32i    %0, %1, 0\n"
 161                         "       s32i    %2, %1, 0\n"
 162                         "       wsr     a15, ps\n"
 163                         "       rsync\n"
 164                         : "=&a" (tmp)
 165                         : "a" (m), "a" (val)
 166                         : "a15", "memory");
 167         return tmp;
 168 #endif
 169 }
 170 
 171 #define xchg(ptr,x) \
 172         ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
 173 
 174 static inline u32 xchg_small(volatile void *ptr, u32 x, int size)
 175 {
 176         int off = (unsigned long)ptr % sizeof(u32);
 177         volatile u32 *p = ptr - off;
 178 #ifdef __BIG_ENDIAN
 179         int bitoff = (sizeof(u32) - size - off) * BITS_PER_BYTE;
 180 #else
 181         int bitoff = off * BITS_PER_BYTE;
 182 #endif
 183         u32 bitmask = ((0x1 << size * BITS_PER_BYTE) - 1) << bitoff;
 184         u32 oldv, newv;
 185         u32 ret;
 186 
 187         do {
 188                 oldv = READ_ONCE(*p);
 189                 ret = (oldv & bitmask) >> bitoff;
 190                 newv = (oldv & ~bitmask) | (x << bitoff);
 191         } while (__cmpxchg_u32(p, oldv, newv) != oldv);
 192 
 193         return ret;
 194 }
 195 
 196 /*
 197  * This only works if the compiler isn't horribly bad at optimizing.
 198  * gcc-2.5.8 reportedly can't handle this, but I define that one to
 199  * be dead anyway.
 200  */
 201 
 202 extern void __xchg_called_with_bad_pointer(void);
 203 
 204 static __inline__ unsigned long
 205 __xchg(unsigned long x, volatile void * ptr, int size)
 206 {
 207         switch (size) {
 208         case 1:
 209                 return xchg_small(ptr, x, 1);
 210         case 2:
 211                 return xchg_small(ptr, x, 2);
 212         case 4:
 213                 return xchg_u32(ptr, x);
 214         default:
 215                 __xchg_called_with_bad_pointer();
 216                 return x;
 217         }
 218 }
 219 
 220 #endif /* __ASSEMBLY__ */
 221 
 222 #endif /* _XTENSA_CMPXCHG_H */

/* [<][>][^][v][top][bottom][index][help] */