Home
last modified time | relevance | path

Searched refs:arch_spin_value_unlocked (Results 1 – 10 of 10) sorted by relevance

/linux-4.4.14/arch/arm64/include/asm/
Dspinlock.h125 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
132 return !arch_spin_value_unlocked(READ_ONCE(*lock)); in arch_spin_is_locked()
/linux-4.4.14/arch/powerpc/include/asm/
Dspinlock.h55 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
63 return !arch_spin_value_unlocked(*lock); in arch_spin_is_locked()
/linux-4.4.14/arch/arm/include/asm/
Dspinlock.h116 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
123 return !arch_spin_value_unlocked(READ_ONCE(*lock)); in arch_spin_is_locked()
/linux-4.4.14/arch/s390/include/asm/
Dspinlock.h49 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
62 return likely(arch_spin_value_unlocked(*lp) && in arch_spin_trylock_once()
/linux-4.4.14/include/asm-generic/
Dqspinlock.h157 #define arch_spin_value_unlocked(l) queued_spin_value_unlocked(l) macro
/linux-4.4.14/arch/alpha/include/asm/
Dspinlock.h19 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
/linux-4.4.14/lib/
Dlockref.c14 while (likely(arch_spin_value_unlocked(old.lock.rlock.raw_lock))) { \
/linux-4.4.14/arch/mips/include/asm/
Dspinlock.h45 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
/linux-4.4.14/arch/x86/include/asm/
Dspinlock.h88 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
/linux-4.4.14/arch/ia64/include/asm/
Dspinlock.h105 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function