Lines Matching refs:v
58 static __inline__ void atomic_set(atomic_t *v, int i) in atomic_set() argument
61 _atomic_spin_lock_irqsave(v, flags); in atomic_set()
63 v->counter = i; in atomic_set()
65 _atomic_spin_unlock_irqrestore(v, flags); in atomic_set()
68 static __inline__ int atomic_read(const atomic_t *v) in atomic_read() argument
70 return ACCESS_ONCE((v)->counter); in atomic_read()
74 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) argument
75 #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) argument
86 static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u) in __atomic_add_unless() argument
89 c = atomic_read(v); in __atomic_add_unless()
93 old = atomic_cmpxchg((v), c, c + (a)); in __atomic_add_unless()
102 static __inline__ void atomic_##op(int i, atomic_t *v) \
106 _atomic_spin_lock_irqsave(v, flags); \
107 v->counter c_op i; \
108 _atomic_spin_unlock_irqrestore(v, flags); \
112 static __inline__ int atomic_##op##_return(int i, atomic_t *v) \
117 _atomic_spin_lock_irqsave(v, flags); \
118 ret = (v->counter c_op i); \
119 _atomic_spin_unlock_irqrestore(v, flags); \
133 #define atomic_inc(v) (atomic_add( 1,(v))) argument
134 #define atomic_dec(v) (atomic_add( -1,(v))) argument
136 #define atomic_inc_return(v) (atomic_add_return( 1,(v))) argument
137 #define atomic_dec_return(v) (atomic_add_return( -1,(v))) argument
139 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) argument
149 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) argument
151 #define atomic_dec_and_test(v) (atomic_dec_return(v) == 0) argument
153 #define atomic_sub_and_test(i,v) (atomic_sub_return((i),(v)) == 0) argument
162 static __inline__ void atomic64_##op(s64 i, atomic64_t *v) \
166 _atomic_spin_lock_irqsave(v, flags); \
167 v->counter c_op i; \
168 _atomic_spin_unlock_irqrestore(v, flags); \
172 static __inline__ s64 atomic64_##op##_return(s64 i, atomic64_t *v) \
177 _atomic_spin_lock_irqsave(v, flags); \
178 ret = (v->counter c_op i); \
179 _atomic_spin_unlock_irqrestore(v, flags); \
194 atomic64_set(atomic64_t *v, s64 i) in atomic64_set() argument
197 _atomic_spin_lock_irqsave(v, flags); in atomic64_set()
199 v->counter = i; in atomic64_set()
201 _atomic_spin_unlock_irqrestore(v, flags); in atomic64_set()
205 atomic64_read(const atomic64_t *v) in atomic64_read() argument
207 return ACCESS_ONCE((v)->counter); in atomic64_read()
210 #define atomic64_inc(v) (atomic64_add( 1,(v))) argument
211 #define atomic64_dec(v) (atomic64_add( -1,(v))) argument
213 #define atomic64_inc_return(v) (atomic64_add_return( 1,(v))) argument
214 #define atomic64_dec_return(v) (atomic64_add_return( -1,(v))) argument
216 #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0) argument
218 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0) argument
219 #define atomic64_dec_and_test(v) (atomic64_dec_return(v) == 0) argument
220 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i),(v)) == 0) argument
223 #define atomic64_cmpxchg(v, o, n) \ argument
224 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
225 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) argument
236 static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) in atomic64_add_unless() argument
239 c = atomic64_read(v); in atomic64_add_unless()
243 old = atomic64_cmpxchg((v), c, c + (a)); in atomic64_add_unless()
251 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) argument
260 static inline long atomic64_dec_if_positive(atomic64_t *v) in atomic64_dec_if_positive() argument
263 c = atomic64_read(v); in atomic64_dec_if_positive()
268 old = atomic64_cmpxchg((v), c, dec); in atomic64_dec_if_positive()