root/arch/s390/include/asm/atomic_ops.h

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


DEFINITIONS

This source file includes following definitions.
  1. __atomic_cmpxchg
  2. __atomic_cmpxchg_bool
  3. __atomic64_cmpxchg
  4. __atomic64_cmpxchg_bool

   1 /* SPDX-License-Identifier: GPL-2.0 */
   2 /*
   3  * Low level function for atomic operations
   4  *
   5  * Copyright IBM Corp. 1999, 2016
   6  */
   7 
   8 #ifndef __ARCH_S390_ATOMIC_OPS__
   9 #define __ARCH_S390_ATOMIC_OPS__
  10 
  11 #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
  12 
  13 #define __ATOMIC_OP(op_name, op_type, op_string, op_barrier)            \
  14 static inline op_type op_name(op_type val, op_type *ptr)                \
  15 {                                                                       \
  16         op_type old;                                                    \
  17                                                                         \
  18         asm volatile(                                                   \
  19                 op_string "     %[old],%[val],%[ptr]\n"                 \
  20                 op_barrier                                              \
  21                 : [old] "=d" (old), [ptr] "+Q" (*ptr)                   \
  22                 : [val] "d" (val) : "cc", "memory");                    \
  23         return old;                                                     \
  24 }                                                                       \
  25 
  26 #define __ATOMIC_OPS(op_name, op_type, op_string)                       \
  27         __ATOMIC_OP(op_name, op_type, op_string, "\n")                  \
  28         __ATOMIC_OP(op_name##_barrier, op_type, op_string, "bcr 14,0\n")
  29 
  30 __ATOMIC_OPS(__atomic_add, int, "laa")
  31 __ATOMIC_OPS(__atomic_and, int, "lan")
  32 __ATOMIC_OPS(__atomic_or,  int, "lao")
  33 __ATOMIC_OPS(__atomic_xor, int, "lax")
  34 
  35 __ATOMIC_OPS(__atomic64_add, long, "laag")
  36 __ATOMIC_OPS(__atomic64_and, long, "lang")
  37 __ATOMIC_OPS(__atomic64_or,  long, "laog")
  38 __ATOMIC_OPS(__atomic64_xor, long, "laxg")
  39 
  40 #undef __ATOMIC_OPS
  41 #undef __ATOMIC_OP
  42 
  43 #define __ATOMIC_CONST_OP(op_name, op_type, op_string, op_barrier)      \
  44 static __always_inline void op_name(op_type val, op_type *ptr)          \
  45 {                                                                       \
  46         asm volatile(                                                   \
  47                 op_string "     %[ptr],%[val]\n"                        \
  48                 op_barrier                                              \
  49                 : [ptr] "+Q" (*ptr) : [val] "i" (val) : "cc", "memory");\
  50 }
  51 
  52 #define __ATOMIC_CONST_OPS(op_name, op_type, op_string)                 \
  53         __ATOMIC_CONST_OP(op_name, op_type, op_string, "\n")            \
  54         __ATOMIC_CONST_OP(op_name##_barrier, op_type, op_string, "bcr 14,0\n")
  55 
  56 __ATOMIC_CONST_OPS(__atomic_add_const, int, "asi")
  57 __ATOMIC_CONST_OPS(__atomic64_add_const, long, "agsi")
  58 
  59 #undef __ATOMIC_CONST_OPS
  60 #undef __ATOMIC_CONST_OP
  61 
  62 #else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
  63 
  64 #define __ATOMIC_OP(op_name, op_string)                                 \
  65 static inline int op_name(int val, int *ptr)                            \
  66 {                                                                       \
  67         int old, new;                                                   \
  68                                                                         \
  69         asm volatile(                                                   \
  70                 "0:     lr      %[new],%[old]\n"                        \
  71                 op_string "     %[new],%[val]\n"                        \
  72                 "       cs      %[old],%[new],%[ptr]\n"                 \
  73                 "       jl      0b"                                     \
  74                 : [old] "=d" (old), [new] "=&d" (new), [ptr] "+Q" (*ptr)\
  75                 : [val] "d" (val), "0" (*ptr) : "cc", "memory");        \
  76         return old;                                                     \
  77 }
  78 
  79 #define __ATOMIC_OPS(op_name, op_string)                                \
  80         __ATOMIC_OP(op_name, op_string)                                 \
  81         __ATOMIC_OP(op_name##_barrier, op_string)
  82 
  83 __ATOMIC_OPS(__atomic_add, "ar")
  84 __ATOMIC_OPS(__atomic_and, "nr")
  85 __ATOMIC_OPS(__atomic_or,  "or")
  86 __ATOMIC_OPS(__atomic_xor, "xr")
  87 
  88 #undef __ATOMIC_OPS
  89 
  90 #define __ATOMIC64_OP(op_name, op_string)                               \
  91 static inline long op_name(long val, long *ptr)                         \
  92 {                                                                       \
  93         long old, new;                                                  \
  94                                                                         \
  95         asm volatile(                                                   \
  96                 "0:     lgr     %[new],%[old]\n"                        \
  97                 op_string "     %[new],%[val]\n"                        \
  98                 "       csg     %[old],%[new],%[ptr]\n"                 \
  99                 "       jl      0b"                                     \
 100                 : [old] "=d" (old), [new] "=&d" (new), [ptr] "+Q" (*ptr)\
 101                 : [val] "d" (val), "0" (*ptr) : "cc", "memory");        \
 102         return old;                                                     \
 103 }
 104 
 105 #define __ATOMIC64_OPS(op_name, op_string)                              \
 106         __ATOMIC64_OP(op_name, op_string)                               \
 107         __ATOMIC64_OP(op_name##_barrier, op_string)
 108 
 109 __ATOMIC64_OPS(__atomic64_add, "agr")
 110 __ATOMIC64_OPS(__atomic64_and, "ngr")
 111 __ATOMIC64_OPS(__atomic64_or,  "ogr")
 112 __ATOMIC64_OPS(__atomic64_xor, "xgr")
 113 
 114 #undef __ATOMIC64_OPS
 115 
 116 #define __atomic_add_const(val, ptr)            __atomic_add(val, ptr)
 117 #define __atomic_add_const_barrier(val, ptr)    __atomic_add(val, ptr)
 118 #define __atomic64_add_const(val, ptr)          __atomic64_add(val, ptr)
 119 #define __atomic64_add_const_barrier(val, ptr)  __atomic64_add(val, ptr)
 120 
 121 #endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
 122 
 123 static inline int __atomic_cmpxchg(int *ptr, int old, int new)
 124 {
 125         return __sync_val_compare_and_swap(ptr, old, new);
 126 }
 127 
 128 static inline int __atomic_cmpxchg_bool(int *ptr, int old, int new)
 129 {
 130         return __sync_bool_compare_and_swap(ptr, old, new);
 131 }
 132 
 133 static inline long __atomic64_cmpxchg(long *ptr, long old, long new)
 134 {
 135         return __sync_val_compare_and_swap(ptr, old, new);
 136 }
 137 
 138 static inline long __atomic64_cmpxchg_bool(long *ptr, long old, long new)
 139 {
 140         return __sync_bool_compare_and_swap(ptr, old, new);
 141 }
 142 
 143 #endif /* __ARCH_S390_ATOMIC_OPS__  */

/* [<][>][^][v][top][bottom][index][help] */