Lines Matching refs:lock
158 static __always_inline void clear_pending_set_locked(struct qspinlock *lock) in clear_pending_set_locked() argument
160 struct __qspinlock *l = (void *)lock; in clear_pending_set_locked()
175 static __always_inline u32 xchg_tail(struct qspinlock *lock, u32 tail) in xchg_tail() argument
177 struct __qspinlock *l = (void *)lock; in xchg_tail()
190 static __always_inline void clear_pending_set_locked(struct qspinlock *lock) in clear_pending_set_locked() argument
192 atomic_add(-_Q_PENDING_VAL + _Q_LOCKED_VAL, &lock->val); in clear_pending_set_locked()
205 static __always_inline u32 xchg_tail(struct qspinlock *lock, u32 tail) in xchg_tail() argument
207 u32 old, new, val = atomic_read(&lock->val); in xchg_tail()
211 old = atomic_cmpxchg(&lock->val, val, new); in xchg_tail()
227 static __always_inline void set_locked(struct qspinlock *lock) in set_locked() argument
229 struct __qspinlock *l = (void *)lock; in set_locked()
242 static __always_inline void __pv_kick_node(struct qspinlock *lock, in __pv_kick_node() argument
244 static __always_inline void __pv_wait_head(struct qspinlock *lock, in __pv_wait_head() argument
281 void queued_spin_lock_slowpath(struct qspinlock *lock, u32 val) in queued_spin_lock_slowpath() argument
292 if (virt_spin_lock(lock)) in queued_spin_lock_slowpath()
301 while ((val = atomic_read(&lock->val)) == _Q_PENDING_VAL) in queued_spin_lock_slowpath()
322 old = atomic_cmpxchg(&lock->val, val, new); in queued_spin_lock_slowpath()
345 while ((val = smp_load_acquire(&lock->val.counter)) & _Q_LOCKED_MASK) in queued_spin_lock_slowpath()
353 clear_pending_set_locked(lock); in queued_spin_lock_slowpath()
375 if (queued_spin_trylock(lock)) in queued_spin_lock_slowpath()
384 old = xchg_tail(lock, tail); in queued_spin_lock_slowpath()
410 pv_wait_head(lock, node); in queued_spin_lock_slowpath()
411 while ((val = smp_load_acquire(&lock->val.counter)) & _Q_LOCKED_PENDING_MASK) in queued_spin_lock_slowpath()
426 set_locked(lock); in queued_spin_lock_slowpath()
429 old = atomic_cmpxchg(&lock->val, val, _Q_LOCKED_VAL); in queued_spin_lock_slowpath()
443 pv_kick_node(lock, next); in queued_spin_lock_slowpath()