root/arch/x86/include/asm/atomic64_32.h

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


DEFINITIONS

This source file includes following definitions.
  1. arch_atomic64_cmpxchg
  2. arch_atomic64_xchg
  3. arch_atomic64_set
  4. arch_atomic64_read
  5. arch_atomic64_add_return
  6. arch_atomic64_sub_return
  7. arch_atomic64_inc_return
  8. arch_atomic64_dec_return
  9. arch_atomic64_add
  10. arch_atomic64_sub
  11. arch_atomic64_inc
  12. arch_atomic64_dec
  13. arch_atomic64_add_unless
  14. arch_atomic64_inc_not_zero
  15. arch_atomic64_dec_if_positive
  16. arch_atomic64_and
  17. arch_atomic64_fetch_and
  18. arch_atomic64_or
  19. arch_atomic64_fetch_or
  20. arch_atomic64_xor
  21. arch_atomic64_fetch_xor
  22. arch_atomic64_fetch_add

   1 /* SPDX-License-Identifier: GPL-2.0 */
   2 #ifndef _ASM_X86_ATOMIC64_32_H
   3 #define _ASM_X86_ATOMIC64_32_H
   4 
   5 #include <linux/compiler.h>
   6 #include <linux/types.h>
   7 //#include <asm/cmpxchg.h>
   8 
   9 /* An 64bit atomic type */
  10 
  11 typedef struct {
  12         s64 __aligned(8) counter;
  13 } atomic64_t;
  14 
  15 #define ATOMIC64_INIT(val)      { (val) }
  16 
  17 #define __ATOMIC64_DECL(sym) void atomic64_##sym(atomic64_t *, ...)
  18 #ifndef ATOMIC64_EXPORT
  19 #define ATOMIC64_DECL_ONE __ATOMIC64_DECL
  20 #else
  21 #define ATOMIC64_DECL_ONE(sym) __ATOMIC64_DECL(sym); \
  22         ATOMIC64_EXPORT(atomic64_##sym)
  23 #endif
  24 
  25 #ifdef CONFIG_X86_CMPXCHG64
  26 #define __alternative_atomic64(f, g, out, in...) \
  27         asm volatile("call %P[func]" \
  28                      : out : [func] "i" (atomic64_##g##_cx8), ## in)
  29 
  30 #define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8)
  31 #else
  32 #define __alternative_atomic64(f, g, out, in...) \
  33         alternative_call(atomic64_##f##_386, atomic64_##g##_cx8, \
  34                          X86_FEATURE_CX8, ASM_OUTPUT2(out), ## in)
  35 
  36 #define ATOMIC64_DECL(sym) ATOMIC64_DECL_ONE(sym##_cx8); \
  37         ATOMIC64_DECL_ONE(sym##_386)
  38 
  39 ATOMIC64_DECL_ONE(add_386);
  40 ATOMIC64_DECL_ONE(sub_386);
  41 ATOMIC64_DECL_ONE(inc_386);
  42 ATOMIC64_DECL_ONE(dec_386);
  43 #endif
  44 
  45 #define alternative_atomic64(f, out, in...) \
  46         __alternative_atomic64(f, f, ASM_OUTPUT2(out), ## in)
  47 
  48 ATOMIC64_DECL(read);
  49 ATOMIC64_DECL(set);
  50 ATOMIC64_DECL(xchg);
  51 ATOMIC64_DECL(add_return);
  52 ATOMIC64_DECL(sub_return);
  53 ATOMIC64_DECL(inc_return);
  54 ATOMIC64_DECL(dec_return);
  55 ATOMIC64_DECL(dec_if_positive);
  56 ATOMIC64_DECL(inc_not_zero);
  57 ATOMIC64_DECL(add_unless);
  58 
  59 #undef ATOMIC64_DECL
  60 #undef ATOMIC64_DECL_ONE
  61 #undef __ATOMIC64_DECL
  62 #undef ATOMIC64_EXPORT
  63 
  64 /**
  65  * arch_atomic64_cmpxchg - cmpxchg atomic64 variable
  66  * @v: pointer to type atomic64_t
  67  * @o: expected value
  68  * @n: new value
  69  *
  70  * Atomically sets @v to @n if it was equal to @o and returns
  71  * the old value.
  72  */
  73 
  74 static inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 o, s64 n)
  75 {
  76         return arch_cmpxchg64(&v->counter, o, n);
  77 }
  78 
  79 /**
  80  * arch_atomic64_xchg - xchg atomic64 variable
  81  * @v: pointer to type atomic64_t
  82  * @n: value to assign
  83  *
  84  * Atomically xchgs the value of @v to @n and returns
  85  * the old value.
  86  */
  87 static inline s64 arch_atomic64_xchg(atomic64_t *v, s64 n)
  88 {
  89         s64 o;
  90         unsigned high = (unsigned)(n >> 32);
  91         unsigned low = (unsigned)n;
  92         alternative_atomic64(xchg, "=&A" (o),
  93                              "S" (v), "b" (low), "c" (high)
  94                              : "memory");
  95         return o;
  96 }
  97 
  98 /**
  99  * arch_atomic64_set - set atomic64 variable
 100  * @v: pointer to type atomic64_t
 101  * @i: value to assign
 102  *
 103  * Atomically sets the value of @v to @n.
 104  */
 105 static inline void arch_atomic64_set(atomic64_t *v, s64 i)
 106 {
 107         unsigned high = (unsigned)(i >> 32);
 108         unsigned low = (unsigned)i;
 109         alternative_atomic64(set, /* no output */,
 110                              "S" (v), "b" (low), "c" (high)
 111                              : "eax", "edx", "memory");
 112 }
 113 
 114 /**
 115  * arch_atomic64_read - read atomic64 variable
 116  * @v: pointer to type atomic64_t
 117  *
 118  * Atomically reads the value of @v and returns it.
 119  */
 120 static inline s64 arch_atomic64_read(const atomic64_t *v)
 121 {
 122         s64 r;
 123         alternative_atomic64(read, "=&A" (r), "c" (v) : "memory");
 124         return r;
 125 }
 126 
 127 /**
 128  * arch_atomic64_add_return - add and return
 129  * @i: integer value to add
 130  * @v: pointer to type atomic64_t
 131  *
 132  * Atomically adds @i to @v and returns @i + *@v
 133  */
 134 static inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v)
 135 {
 136         alternative_atomic64(add_return,
 137                              ASM_OUTPUT2("+A" (i), "+c" (v)),
 138                              ASM_NO_INPUT_CLOBBER("memory"));
 139         return i;
 140 }
 141 
 142 /*
 143  * Other variants with different arithmetic operators:
 144  */
 145 static inline s64 arch_atomic64_sub_return(s64 i, atomic64_t *v)
 146 {
 147         alternative_atomic64(sub_return,
 148                              ASM_OUTPUT2("+A" (i), "+c" (v)),
 149                              ASM_NO_INPUT_CLOBBER("memory"));
 150         return i;
 151 }
 152 
 153 static inline s64 arch_atomic64_inc_return(atomic64_t *v)
 154 {
 155         s64 a;
 156         alternative_atomic64(inc_return, "=&A" (a),
 157                              "S" (v) : "memory", "ecx");
 158         return a;
 159 }
 160 #define arch_atomic64_inc_return arch_atomic64_inc_return
 161 
 162 static inline s64 arch_atomic64_dec_return(atomic64_t *v)
 163 {
 164         s64 a;
 165         alternative_atomic64(dec_return, "=&A" (a),
 166                              "S" (v) : "memory", "ecx");
 167         return a;
 168 }
 169 #define arch_atomic64_dec_return arch_atomic64_dec_return
 170 
 171 /**
 172  * arch_atomic64_add - add integer to atomic64 variable
 173  * @i: integer value to add
 174  * @v: pointer to type atomic64_t
 175  *
 176  * Atomically adds @i to @v.
 177  */
 178 static inline s64 arch_atomic64_add(s64 i, atomic64_t *v)
 179 {
 180         __alternative_atomic64(add, add_return,
 181                                ASM_OUTPUT2("+A" (i), "+c" (v)),
 182                                ASM_NO_INPUT_CLOBBER("memory"));
 183         return i;
 184 }
 185 
 186 /**
 187  * arch_atomic64_sub - subtract the atomic64 variable
 188  * @i: integer value to subtract
 189  * @v: pointer to type atomic64_t
 190  *
 191  * Atomically subtracts @i from @v.
 192  */
 193 static inline s64 arch_atomic64_sub(s64 i, atomic64_t *v)
 194 {
 195         __alternative_atomic64(sub, sub_return,
 196                                ASM_OUTPUT2("+A" (i), "+c" (v)),
 197                                ASM_NO_INPUT_CLOBBER("memory"));
 198         return i;
 199 }
 200 
 201 /**
 202  * arch_atomic64_inc - increment atomic64 variable
 203  * @v: pointer to type atomic64_t
 204  *
 205  * Atomically increments @v by 1.
 206  */
 207 static inline void arch_atomic64_inc(atomic64_t *v)
 208 {
 209         __alternative_atomic64(inc, inc_return, /* no output */,
 210                                "S" (v) : "memory", "eax", "ecx", "edx");
 211 }
 212 #define arch_atomic64_inc arch_atomic64_inc
 213 
 214 /**
 215  * arch_atomic64_dec - decrement atomic64 variable
 216  * @v: pointer to type atomic64_t
 217  *
 218  * Atomically decrements @v by 1.
 219  */
 220 static inline void arch_atomic64_dec(atomic64_t *v)
 221 {
 222         __alternative_atomic64(dec, dec_return, /* no output */,
 223                                "S" (v) : "memory", "eax", "ecx", "edx");
 224 }
 225 #define arch_atomic64_dec arch_atomic64_dec
 226 
 227 /**
 228  * arch_atomic64_add_unless - add unless the number is a given value
 229  * @v: pointer of type atomic64_t
 230  * @a: the amount to add to v...
 231  * @u: ...unless v is equal to u.
 232  *
 233  * Atomically adds @a to @v, so long as it was not @u.
 234  * Returns non-zero if the add was done, zero otherwise.
 235  */
 236 static inline int arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
 237 {
 238         unsigned low = (unsigned)u;
 239         unsigned high = (unsigned)(u >> 32);
 240         alternative_atomic64(add_unless,
 241                              ASM_OUTPUT2("+A" (a), "+c" (low), "+D" (high)),
 242                              "S" (v) : "memory");
 243         return (int)a;
 244 }
 245 
 246 static inline int arch_atomic64_inc_not_zero(atomic64_t *v)
 247 {
 248         int r;
 249         alternative_atomic64(inc_not_zero, "=&a" (r),
 250                              "S" (v) : "ecx", "edx", "memory");
 251         return r;
 252 }
 253 #define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero
 254 
 255 static inline s64 arch_atomic64_dec_if_positive(atomic64_t *v)
 256 {
 257         s64 r;
 258         alternative_atomic64(dec_if_positive, "=&A" (r),
 259                              "S" (v) : "ecx", "memory");
 260         return r;
 261 }
 262 #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
 263 
 264 #undef alternative_atomic64
 265 #undef __alternative_atomic64
 266 
 267 static inline void arch_atomic64_and(s64 i, atomic64_t *v)
 268 {
 269         s64 old, c = 0;
 270 
 271         while ((old = arch_atomic64_cmpxchg(v, c, c & i)) != c)
 272                 c = old;
 273 }
 274 
 275 static inline s64 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
 276 {
 277         s64 old, c = 0;
 278 
 279         while ((old = arch_atomic64_cmpxchg(v, c, c & i)) != c)
 280                 c = old;
 281 
 282         return old;
 283 }
 284 
 285 static inline void arch_atomic64_or(s64 i, atomic64_t *v)
 286 {
 287         s64 old, c = 0;
 288 
 289         while ((old = arch_atomic64_cmpxchg(v, c, c | i)) != c)
 290                 c = old;
 291 }
 292 
 293 static inline s64 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
 294 {
 295         s64 old, c = 0;
 296 
 297         while ((old = arch_atomic64_cmpxchg(v, c, c | i)) != c)
 298                 c = old;
 299 
 300         return old;
 301 }
 302 
 303 static inline void arch_atomic64_xor(s64 i, atomic64_t *v)
 304 {
 305         s64 old, c = 0;
 306 
 307         while ((old = arch_atomic64_cmpxchg(v, c, c ^ i)) != c)
 308                 c = old;
 309 }
 310 
 311 static inline s64 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
 312 {
 313         s64 old, c = 0;
 314 
 315         while ((old = arch_atomic64_cmpxchg(v, c, c ^ i)) != c)
 316                 c = old;
 317 
 318         return old;
 319 }
 320 
 321 static inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
 322 {
 323         s64 old, c = 0;
 324 
 325         while ((old = arch_atomic64_cmpxchg(v, c, c + i)) != c)
 326                 c = old;
 327 
 328         return old;
 329 }
 330 
 331 #define arch_atomic64_fetch_sub(i, v)   arch_atomic64_fetch_add(-(i), (v))
 332 
 333 #endif /* _ASM_X86_ATOMIC64_32_H */

/* [<][>][^][v][top][bottom][index][help] */