Lines Matching refs:lock

39 extern void queued_read_lock_slowpath(struct qrwlock *lock, u32 cnts);
40 extern void queued_write_lock_slowpath(struct qrwlock *lock);
46 static inline int queued_read_can_lock(struct qrwlock *lock) in queued_read_can_lock() argument
48 return !(atomic_read(&lock->cnts) & _QW_WMASK); in queued_read_can_lock()
55 static inline int queued_write_can_lock(struct qrwlock *lock) in queued_write_can_lock() argument
57 return !atomic_read(&lock->cnts); in queued_write_can_lock()
65 static inline int queued_read_trylock(struct qrwlock *lock) in queued_read_trylock() argument
69 cnts = atomic_read(&lock->cnts); in queued_read_trylock()
71 cnts = (u32)atomic_add_return_acquire(_QR_BIAS, &lock->cnts); in queued_read_trylock()
74 atomic_sub(_QR_BIAS, &lock->cnts); in queued_read_trylock()
84 static inline int queued_write_trylock(struct qrwlock *lock) in queued_write_trylock() argument
88 cnts = atomic_read(&lock->cnts); in queued_write_trylock()
92 return likely(atomic_cmpxchg_acquire(&lock->cnts, in queued_write_trylock()
99 static inline void queued_read_lock(struct qrwlock *lock) in queued_read_lock() argument
103 cnts = atomic_add_return_acquire(_QR_BIAS, &lock->cnts); in queued_read_lock()
108 queued_read_lock_slowpath(lock, cnts); in queued_read_lock()
115 static inline void queued_write_lock(struct qrwlock *lock) in queued_write_lock() argument
118 if (atomic_cmpxchg_acquire(&lock->cnts, 0, _QW_LOCKED) == 0) in queued_write_lock()
121 queued_write_lock_slowpath(lock); in queued_write_lock()
128 static inline void queued_read_unlock(struct qrwlock *lock) in queued_read_unlock() argument
133 (void)atomic_sub_return_release(_QR_BIAS, &lock->cnts); in queued_read_unlock()
140 static inline void queued_write_unlock(struct qrwlock *lock) in queued_write_unlock() argument
142 smp_store_release((u8 *)&lock->cnts, 0); in queued_write_unlock()