Lines Matching refs:lock

47 static inline int arch_spin_is_locked(arch_spinlock_t *lock)  in arch_spin_is_locked()  argument
50 u32 val = READ_ONCE(lock->lock); in arch_spin_is_locked()
55 static inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() argument
58 __insn_fetchadd4(&lock->lock, 1U << __ARCH_SPIN_CURRENT_SHIFT); in arch_spin_unlock()
61 void arch_spin_unlock_wait(arch_spinlock_t *lock);
63 void arch_spin_lock_slow(arch_spinlock_t *lock, u32 val);
69 static inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
71 u32 val = __insn_fetchadd4(&lock->lock, 1); in arch_spin_lock()
74 arch_spin_lock_slow(lock, ticket); in arch_spin_lock()
78 int arch_spin_trylock(arch_spinlock_t *lock);
81 #define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock) argument
104 return !arch_write_val_locked(rw->lock); in arch_read_can_lock()
113 return rw->lock == 0; in arch_write_can_lock()
120 u32 val = __insn_fetchaddgez4(&rw->lock, 1); in arch_read_lock()
129 u32 val = __insn_fetchor4(&rw->lock, __WRITE_LOCK_BIT); in arch_write_lock()
137 __insn_fetchadd4(&rw->lock, -1); in arch_read_unlock()
143 __insn_exch4(&rw->lock, 0); /* Avoid waiting in the write buffer. */ in arch_write_unlock()
148 return !arch_write_val_locked(__insn_fetchaddgez4(&rw->lock, 1)); in arch_read_trylock()
153 u32 val = __insn_fetchor4(&rw->lock, __WRITE_LOCK_BIT); in arch_write_trylock()
157 __insn_fetchand4(&rw->lock, ~__WRITE_LOCK_BIT); in arch_write_trylock()
161 #define arch_read_lock_flags(lock, flags) arch_read_lock(lock) argument
162 #define arch_write_lock_flags(lock, flags) arch_write_lock(lock) argument