Lines Matching refs:lock
28 static __always_inline int queued_spin_is_locked(struct qspinlock *lock) in queued_spin_is_locked() argument
53 return atomic_read(&lock->val) & _Q_LOCKED_MASK; in queued_spin_is_locked()
66 static __always_inline int queued_spin_value_unlocked(struct qspinlock lock) in queued_spin_value_unlocked() argument
68 return !atomic_read(&lock.val); in queued_spin_value_unlocked()
76 static __always_inline int queued_spin_is_contended(struct qspinlock *lock) in queued_spin_is_contended() argument
78 return atomic_read(&lock->val) & ~_Q_LOCKED_MASK; in queued_spin_is_contended()
85 static __always_inline int queued_spin_trylock(struct qspinlock *lock) in queued_spin_trylock() argument
87 if (!atomic_read(&lock->val) && in queued_spin_trylock()
88 (atomic_cmpxchg(&lock->val, 0, _Q_LOCKED_VAL) == 0)) in queued_spin_trylock()
93 extern void queued_spin_lock_slowpath(struct qspinlock *lock, u32 val);
99 static __always_inline void queued_spin_lock(struct qspinlock *lock) in queued_spin_lock() argument
103 val = atomic_cmpxchg(&lock->val, 0, _Q_LOCKED_VAL); in queued_spin_lock()
106 queued_spin_lock_slowpath(lock, val); in queued_spin_lock()
114 static __always_inline void queued_spin_unlock(struct qspinlock *lock) in queued_spin_unlock() argument
120 atomic_sub(_Q_LOCKED_VAL, &lock->val); in queued_spin_unlock()
131 static inline void queued_spin_unlock_wait(struct qspinlock *lock) in queued_spin_unlock_wait() argument
135 while (atomic_read(&lock->val) & _Q_LOCKED_MASK) in queued_spin_unlock_wait()
140 static __always_inline bool virt_spin_lock(struct qspinlock *lock) in virt_spin_lock() argument