root/arch/x86/include/asm/atomic64_64.h

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


DEFINITIONS

This source file includes following definitions.
  1. arch_atomic64_read
  2. arch_atomic64_set
  3. arch_atomic64_add
  4. arch_atomic64_sub
  5. arch_atomic64_sub_and_test
  6. arch_atomic64_inc
  7. arch_atomic64_dec
  8. arch_atomic64_dec_and_test
  9. arch_atomic64_inc_and_test
  10. arch_atomic64_add_negative
  11. arch_atomic64_add_return
  12. arch_atomic64_sub_return
  13. arch_atomic64_fetch_add
  14. arch_atomic64_fetch_sub
  15. arch_atomic64_cmpxchg
  16. arch_atomic64_try_cmpxchg
  17. arch_atomic64_xchg
  18. arch_atomic64_and
  19. arch_atomic64_fetch_and
  20. arch_atomic64_or
  21. arch_atomic64_fetch_or
  22. arch_atomic64_xor
  23. arch_atomic64_fetch_xor

   1 /* SPDX-License-Identifier: GPL-2.0 */
   2 #ifndef _ASM_X86_ATOMIC64_64_H
   3 #define _ASM_X86_ATOMIC64_64_H
   4 
   5 #include <linux/types.h>
   6 #include <asm/alternative.h>
   7 #include <asm/cmpxchg.h>
   8 
   9 /* The 64-bit atomic type */
  10 
  11 #define ATOMIC64_INIT(i)        { (i) }
  12 
  13 /**
  14  * arch_atomic64_read - read atomic64 variable
  15  * @v: pointer of type atomic64_t
  16  *
  17  * Atomically reads the value of @v.
  18  * Doesn't imply a read memory barrier.
  19  */
  20 static inline s64 arch_atomic64_read(const atomic64_t *v)
  21 {
  22         return READ_ONCE((v)->counter);
  23 }
  24 
  25 /**
  26  * arch_atomic64_set - set atomic64 variable
  27  * @v: pointer to type atomic64_t
  28  * @i: required value
  29  *
  30  * Atomically sets the value of @v to @i.
  31  */
  32 static inline void arch_atomic64_set(atomic64_t *v, s64 i)
  33 {
  34         WRITE_ONCE(v->counter, i);
  35 }
  36 
  37 /**
  38  * arch_atomic64_add - add integer to atomic64 variable
  39  * @i: integer value to add
  40  * @v: pointer to type atomic64_t
  41  *
  42  * Atomically adds @i to @v.
  43  */
  44 static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v)
  45 {
  46         asm volatile(LOCK_PREFIX "addq %1,%0"
  47                      : "=m" (v->counter)
  48                      : "er" (i), "m" (v->counter) : "memory");
  49 }
  50 
  51 /**
  52  * arch_atomic64_sub - subtract the atomic64 variable
  53  * @i: integer value to subtract
  54  * @v: pointer to type atomic64_t
  55  *
  56  * Atomically subtracts @i from @v.
  57  */
  58 static inline void arch_atomic64_sub(s64 i, atomic64_t *v)
  59 {
  60         asm volatile(LOCK_PREFIX "subq %1,%0"
  61                      : "=m" (v->counter)
  62                      : "er" (i), "m" (v->counter) : "memory");
  63 }
  64 
  65 /**
  66  * arch_atomic64_sub_and_test - subtract value from variable and test result
  67  * @i: integer value to subtract
  68  * @v: pointer to type atomic64_t
  69  *
  70  * Atomically subtracts @i from @v and returns
  71  * true if the result is zero, or false for all
  72  * other cases.
  73  */
  74 static inline bool arch_atomic64_sub_and_test(s64 i, atomic64_t *v)
  75 {
  76         return GEN_BINARY_RMWcc(LOCK_PREFIX "subq", v->counter, e, "er", i);
  77 }
  78 #define arch_atomic64_sub_and_test arch_atomic64_sub_and_test
  79 
  80 /**
  81  * arch_atomic64_inc - increment atomic64 variable
  82  * @v: pointer to type atomic64_t
  83  *
  84  * Atomically increments @v by 1.
  85  */
  86 static __always_inline void arch_atomic64_inc(atomic64_t *v)
  87 {
  88         asm volatile(LOCK_PREFIX "incq %0"
  89                      : "=m" (v->counter)
  90                      : "m" (v->counter) : "memory");
  91 }
  92 #define arch_atomic64_inc arch_atomic64_inc
  93 
  94 /**
  95  * arch_atomic64_dec - decrement atomic64 variable
  96  * @v: pointer to type atomic64_t
  97  *
  98  * Atomically decrements @v by 1.
  99  */
 100 static __always_inline void arch_atomic64_dec(atomic64_t *v)
 101 {
 102         asm volatile(LOCK_PREFIX "decq %0"
 103                      : "=m" (v->counter)
 104                      : "m" (v->counter) : "memory");
 105 }
 106 #define arch_atomic64_dec arch_atomic64_dec
 107 
 108 /**
 109  * arch_atomic64_dec_and_test - decrement and test
 110  * @v: pointer to type atomic64_t
 111  *
 112  * Atomically decrements @v by 1 and
 113  * returns true if the result is 0, or false for all other
 114  * cases.
 115  */
 116 static inline bool arch_atomic64_dec_and_test(atomic64_t *v)
 117 {
 118         return GEN_UNARY_RMWcc(LOCK_PREFIX "decq", v->counter, e);
 119 }
 120 #define arch_atomic64_dec_and_test arch_atomic64_dec_and_test
 121 
 122 /**
 123  * arch_atomic64_inc_and_test - increment and test
 124  * @v: pointer to type atomic64_t
 125  *
 126  * Atomically increments @v by 1
 127  * and returns true if the result is zero, or false for all
 128  * other cases.
 129  */
 130 static inline bool arch_atomic64_inc_and_test(atomic64_t *v)
 131 {
 132         return GEN_UNARY_RMWcc(LOCK_PREFIX "incq", v->counter, e);
 133 }
 134 #define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
 135 
 136 /**
 137  * arch_atomic64_add_negative - add and test if negative
 138  * @i: integer value to add
 139  * @v: pointer to type atomic64_t
 140  *
 141  * Atomically adds @i to @v and returns true
 142  * if the result is negative, or false when
 143  * result is greater than or equal to zero.
 144  */
 145 static inline bool arch_atomic64_add_negative(s64 i, atomic64_t *v)
 146 {
 147         return GEN_BINARY_RMWcc(LOCK_PREFIX "addq", v->counter, s, "er", i);
 148 }
 149 #define arch_atomic64_add_negative arch_atomic64_add_negative
 150 
 151 /**
 152  * arch_atomic64_add_return - add and return
 153  * @i: integer value to add
 154  * @v: pointer to type atomic64_t
 155  *
 156  * Atomically adds @i to @v and returns @i + @v
 157  */
 158 static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v)
 159 {
 160         return i + xadd(&v->counter, i);
 161 }
 162 
 163 static inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v)
 164 {
 165         return arch_atomic64_add_return(-i, v);
 166 }
 167 
 168 static inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
 169 {
 170         return xadd(&v->counter, i);
 171 }
 172 
 173 static inline s64 arch_atomic64_fetch_sub(s64 i, atomic64_t *v)
 174 {
 175         return xadd(&v->counter, -i);
 176 }
 177 
 178 static inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
 179 {
 180         return arch_cmpxchg(&v->counter, old, new);
 181 }
 182 
 183 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
 184 static __always_inline bool arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
 185 {
 186         return try_cmpxchg(&v->counter, old, new);
 187 }
 188 
 189 static inline s64 arch_atomic64_xchg(atomic64_t *v, s64 new)
 190 {
 191         return arch_xchg(&v->counter, new);
 192 }
 193 
 194 static inline void arch_atomic64_and(s64 i, atomic64_t *v)
 195 {
 196         asm volatile(LOCK_PREFIX "andq %1,%0"
 197                         : "+m" (v->counter)
 198                         : "er" (i)
 199                         : "memory");
 200 }
 201 
 202 static inline s64 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
 203 {
 204         s64 val = arch_atomic64_read(v);
 205 
 206         do {
 207         } while (!arch_atomic64_try_cmpxchg(v, &val, val & i));
 208         return val;
 209 }
 210 
 211 static inline void arch_atomic64_or(s64 i, atomic64_t *v)
 212 {
 213         asm volatile(LOCK_PREFIX "orq %1,%0"
 214                         : "+m" (v->counter)
 215                         : "er" (i)
 216                         : "memory");
 217 }
 218 
 219 static inline s64 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
 220 {
 221         s64 val = arch_atomic64_read(v);
 222 
 223         do {
 224         } while (!arch_atomic64_try_cmpxchg(v, &val, val | i));
 225         return val;
 226 }
 227 
 228 static inline void arch_atomic64_xor(s64 i, atomic64_t *v)
 229 {
 230         asm volatile(LOCK_PREFIX "xorq %1,%0"
 231                         : "+m" (v->counter)
 232                         : "er" (i)
 233                         : "memory");
 234 }
 235 
 236 static inline s64 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
 237 {
 238         s64 val = arch_atomic64_read(v);
 239 
 240         do {
 241         } while (!arch_atomic64_try_cmpxchg(v, &val, val ^ i));
 242         return val;
 243 }
 244 
 245 #endif /* _ASM_X86_ATOMIC64_64_H */

/* [<][>][^][v][top][bottom][index][help] */