root/arch/openrisc/include/asm/atomic.h

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


DEFINITIONS

This source file includes following definitions.
  1. atomic_fetch_add_unless

   1 /*
   2  * Copyright (C) 2014 Stefan Kristiansson <stefan.kristiansson@saunalahti.fi>
   3  *
   4  * This file is licensed under the terms of the GNU General Public License
   5  * version 2.  This program is licensed "as is" without any warranty of any
   6  * kind, whether express or implied.
   7  */
   8 
   9 #ifndef __ASM_OPENRISC_ATOMIC_H
  10 #define __ASM_OPENRISC_ATOMIC_H
  11 
  12 #include <linux/types.h>
  13 
  14 /* Atomically perform op with v->counter and i */
  15 #define ATOMIC_OP(op)                                                   \
  16 static inline void atomic_##op(int i, atomic_t *v)                      \
  17 {                                                                       \
  18         int tmp;                                                        \
  19                                                                         \
  20         __asm__ __volatile__(                                           \
  21                 "1:     l.lwa   %0,0(%1)        \n"                     \
  22                 "       l." #op " %0,%0,%2      \n"                     \
  23                 "       l.swa   0(%1),%0        \n"                     \
  24                 "       l.bnf   1b              \n"                     \
  25                 "        l.nop                  \n"                     \
  26                 : "=&r"(tmp)                                            \
  27                 : "r"(&v->counter), "r"(i)                              \
  28                 : "cc", "memory");                                      \
  29 }
  30 
  31 /* Atomically perform op with v->counter and i, return the result */
  32 #define ATOMIC_OP_RETURN(op)                                            \
  33 static inline int atomic_##op##_return(int i, atomic_t *v)              \
  34 {                                                                       \
  35         int tmp;                                                        \
  36                                                                         \
  37         __asm__ __volatile__(                                           \
  38                 "1:     l.lwa   %0,0(%1)        \n"                     \
  39                 "       l." #op " %0,%0,%2      \n"                     \
  40                 "       l.swa   0(%1),%0        \n"                     \
  41                 "       l.bnf   1b              \n"                     \
  42                 "        l.nop                  \n"                     \
  43                 : "=&r"(tmp)                                            \
  44                 : "r"(&v->counter), "r"(i)                              \
  45                 : "cc", "memory");                                      \
  46                                                                         \
  47         return tmp;                                                     \
  48 }
  49 
  50 /* Atomically perform op with v->counter and i, return orig v->counter */
  51 #define ATOMIC_FETCH_OP(op)                                             \
  52 static inline int atomic_fetch_##op(int i, atomic_t *v)                 \
  53 {                                                                       \
  54         int tmp, old;                                                   \
  55                                                                         \
  56         __asm__ __volatile__(                                           \
  57                 "1:     l.lwa   %0,0(%2)        \n"                     \
  58                 "       l." #op " %1,%0,%3      \n"                     \
  59                 "       l.swa   0(%2),%1        \n"                     \
  60                 "       l.bnf   1b              \n"                     \
  61                 "        l.nop                  \n"                     \
  62                 : "=&r"(old), "=&r"(tmp)                                \
  63                 : "r"(&v->counter), "r"(i)                              \
  64                 : "cc", "memory");                                      \
  65                                                                         \
  66         return old;                                                     \
  67 }
  68 
  69 ATOMIC_OP_RETURN(add)
  70 ATOMIC_OP_RETURN(sub)
  71 
  72 ATOMIC_FETCH_OP(add)
  73 ATOMIC_FETCH_OP(sub)
  74 ATOMIC_FETCH_OP(and)
  75 ATOMIC_FETCH_OP(or)
  76 ATOMIC_FETCH_OP(xor)
  77 
  78 ATOMIC_OP(and)
  79 ATOMIC_OP(or)
  80 ATOMIC_OP(xor)
  81 
  82 #undef ATOMIC_FETCH_OP
  83 #undef ATOMIC_OP_RETURN
  84 #undef ATOMIC_OP
  85 
  86 #define atomic_add_return       atomic_add_return
  87 #define atomic_sub_return       atomic_sub_return
  88 #define atomic_fetch_add        atomic_fetch_add
  89 #define atomic_fetch_sub        atomic_fetch_sub
  90 #define atomic_fetch_and        atomic_fetch_and
  91 #define atomic_fetch_or         atomic_fetch_or
  92 #define atomic_fetch_xor        atomic_fetch_xor
  93 #define atomic_and      atomic_and
  94 #define atomic_or       atomic_or
  95 #define atomic_xor      atomic_xor
  96 
  97 /*
  98  * Atomically add a to v->counter as long as v is not already u.
  99  * Returns the original value at v->counter.
 100  *
 101  * This is often used through atomic_inc_not_zero()
 102  */
 103 static inline int atomic_fetch_add_unless(atomic_t *v, int a, int u)
 104 {
 105         int old, tmp;
 106 
 107         __asm__ __volatile__(
 108                 "1:     l.lwa %0, 0(%2)         \n"
 109                 "       l.sfeq %0, %4           \n"
 110                 "       l.bf 2f                 \n"
 111                 "        l.add %1, %0, %3       \n"
 112                 "       l.swa 0(%2), %1         \n"
 113                 "       l.bnf 1b                \n"
 114                 "        l.nop                  \n"
 115                 "2:                             \n"
 116                 : "=&r"(old), "=&r" (tmp)
 117                 : "r"(&v->counter), "r"(a), "r"(u)
 118                 : "cc", "memory");
 119 
 120         return old;
 121 }
 122 #define atomic_fetch_add_unless atomic_fetch_add_unless
 123 
 124 #include <asm-generic/atomic.h>
 125 
 126 #endif /* __ASM_OPENRISC_ATOMIC_H */

/* [<][>][^][v][top][bottom][index][help] */