Lines Matching refs:lock
42 arch_spinlock_t lock; member
54 rspin_until_writer_unlock(struct qrwlock *lock, u32 cnts) in rspin_until_writer_unlock() argument
58 cnts = atomic_read_acquire(&lock->cnts); in rspin_until_writer_unlock()
67 void queued_read_lock_slowpath(struct qrwlock *lock, u32 cnts) in queued_read_lock_slowpath() argument
81 rspin_until_writer_unlock(lock, cnts); in queued_read_lock_slowpath()
84 atomic_sub(_QR_BIAS, &lock->cnts); in queued_read_lock_slowpath()
89 arch_spin_lock(&lock->wait_lock); in queued_read_lock_slowpath()
96 cnts = atomic_add_return_acquire(_QR_BIAS, &lock->cnts) - _QR_BIAS; in queued_read_lock_slowpath()
97 rspin_until_writer_unlock(lock, cnts); in queued_read_lock_slowpath()
102 arch_spin_unlock(&lock->wait_lock); in queued_read_lock_slowpath()
110 void queued_write_lock_slowpath(struct qrwlock *lock) in queued_write_lock_slowpath() argument
115 arch_spin_lock(&lock->wait_lock); in queued_write_lock_slowpath()
118 if (!atomic_read(&lock->cnts) && in queued_write_lock_slowpath()
119 (atomic_cmpxchg_acquire(&lock->cnts, 0, _QW_LOCKED) == 0)) in queued_write_lock_slowpath()
127 struct __qrwlock *l = (struct __qrwlock *)lock; in queued_write_lock_slowpath()
138 cnts = atomic_read(&lock->cnts); in queued_write_lock_slowpath()
140 (atomic_cmpxchg_acquire(&lock->cnts, _QW_WAITING, in queued_write_lock_slowpath()
147 arch_spin_unlock(&lock->wait_lock); in queued_write_lock_slowpath()