Lines Matching refs:lock

51 static inline void __ticket_enter_slowpath(arch_spinlock_t *lock)  in __ticket_enter_slowpath()  argument
53 set_bit(0, (volatile unsigned long *)&lock->tickets.head); in __ticket_enter_slowpath()
57 static __always_inline void __ticket_lock_spinning(arch_spinlock_t *lock, in __ticket_lock_spinning() argument
61 static inline void __ticket_unlock_kick(arch_spinlock_t *lock, in __ticket_unlock_kick() argument
72 static inline void __ticket_check_and_clear_slowpath(arch_spinlock_t *lock, in __ticket_check_and_clear_slowpath() argument
84 cmpxchg(&lock->head_tail, old.head_tail, new.head_tail); in __ticket_check_and_clear_slowpath()
88 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() argument
90 return __tickets_equal(lock.tickets.head, lock.tickets.tail); in arch_spin_value_unlocked()
106 static __always_inline void arch_spin_lock(arch_spinlock_t *lock) in arch_spin_lock() argument
110 inc = xadd(&lock->tickets, inc); in arch_spin_lock()
118 inc.head = READ_ONCE(lock->tickets.head); in arch_spin_lock()
123 __ticket_lock_spinning(lock, inc.tail); in arch_spin_lock()
126 __ticket_check_and_clear_slowpath(lock, inc.head); in arch_spin_lock()
131 static __always_inline int arch_spin_trylock(arch_spinlock_t *lock) in arch_spin_trylock() argument
135 old.tickets = READ_ONCE(lock->tickets); in arch_spin_trylock()
143 return cmpxchg(&lock->head_tail, old.head_tail, new.head_tail) == old.head_tail; in arch_spin_trylock()
146 static __always_inline void arch_spin_unlock(arch_spinlock_t *lock) in arch_spin_unlock() argument
154 head = xadd(&lock->tickets.head, TICKET_LOCK_INC); in arch_spin_unlock()
158 __ticket_unlock_kick(lock, (head + TICKET_LOCK_INC)); in arch_spin_unlock()
161 __add(&lock->tickets.head, TICKET_LOCK_INC, UNLOCK_LOCK_PREFIX); in arch_spin_unlock()
164 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() argument
166 struct __raw_tickets tmp = READ_ONCE(lock->tickets); in arch_spin_is_locked()
171 static inline int arch_spin_is_contended(arch_spinlock_t *lock) in arch_spin_is_contended() argument
173 struct __raw_tickets tmp = READ_ONCE(lock->tickets); in arch_spin_is_contended()
180 static __always_inline void arch_spin_lock_flags(arch_spinlock_t *lock, in arch_spin_lock_flags() argument
183 arch_spin_lock(lock); in arch_spin_lock_flags()
186 static inline void arch_spin_unlock_wait(arch_spinlock_t *lock) in arch_spin_unlock_wait() argument
188 __ticket_t head = READ_ONCE(lock->tickets.head); in arch_spin_unlock_wait()
191 struct __raw_tickets tmp = READ_ONCE(lock->tickets); in arch_spin_unlock_wait()
221 #define arch_read_lock_flags(lock, flags) arch_read_lock(lock) argument
222 #define arch_write_lock_flags(lock, flags) arch_write_lock(lock) argument
224 #define arch_spin_relax(lock) cpu_relax() argument
225 #define arch_read_relax(lock) cpu_relax() argument
226 #define arch_write_relax(lock) cpu_relax() argument