Home
last modified time | relevance | path

Searched refs:arch_spin_value_unlocked (Results 1 – 7 of 7) sorted by relevance

/linux-4.1.27/arch/arm64/include/asm/
Dspinlock.h95 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
102 return !arch_spin_value_unlocked(READ_ONCE(*lock)); in arch_spin_is_locked()
/linux-4.1.27/arch/arm/include/asm/
Dspinlock.h116 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
123 return !arch_spin_value_unlocked(READ_ONCE(*lock)); in arch_spin_is_locked()
/linux-4.1.27/arch/powerpc/include/asm/
Dspinlock.h57 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
65 return !arch_spin_value_unlocked(*lock); in arch_spin_is_locked()
/linux-4.1.27/arch/s390/include/asm/
Dspinlock.h49 static inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
62 return likely(arch_spin_value_unlocked(*lp) && in arch_spin_trylock_once()
/linux-4.1.27/lib/
Dlockref.c22 while (likely(arch_spin_value_unlocked(old.lock.rlock.raw_lock))) { \
/linux-4.1.27/arch/x86/include/asm/
Dspinlock.h84 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function
/linux-4.1.27/arch/ia64/include/asm/
Dspinlock.h105 static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) in arch_spin_value_unlocked() function