Home
last modified time | relevance | path

Searched defs:arch_spin_is_locked (Results 1 – 25 of 25) sorted by relevance

/linux-4.4.14/include/linux/
Dspinlock_up.h26 #define arch_spin_is_locked(x) ((x)->slock == 0) macro
69 #define arch_spin_is_locked(lock) ((void)(lock), 0) macro
/linux-4.4.14/tools/lib/lockdep/uinclude/linux/
Dspinlock.h20 static inline bool arch_spin_is_locked(arch_spinlock_t *mutex) in arch_spin_is_locked() function
/linux-4.4.14/arch/tile/include/asm/
Dspinlock_32.h36 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
Dspinlock_64.h47 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/linux-4.4.14/include/asm-generic/
Dqspinlock.h155 #define arch_spin_is_locked(l) queued_spin_is_locked(l) macro
/linux-4.4.14/arch/hexagon/include/asm/
Dspinlock.h181 #define arch_spin_is_locked(x) ((x)->lock != 0) macro
/linux-4.4.14/arch/metag/include/asm/
Dspinlock_lock1.h7 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
Dspinlock_lnkget.h10 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/linux-4.4.14/arch/blackfin/include/asm/
Dspinlock.h27 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/linux-4.4.14/arch/xtensa/include/asm/
Dspinlock.h31 #define arch_spin_is_locked(x) ((x)->slock != 0) macro
/linux-4.4.14/arch/parisc/include/asm/
Dspinlock.h9 static inline int arch_spin_is_locked(arch_spinlock_t *x) in arch_spin_is_locked() function
/linux-4.4.14/arch/alpha/include/asm/
Dspinlock.h15 #define arch_spin_is_locked(x) ((x)->lock != 0) macro
/linux-4.4.14/arch/sparc/include/asm/
Dspinlock_32.h14 #define arch_spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0) macro
Dspinlock_64.h24 #define arch_spin_is_locked(lp) ((lp)->lock != 0) macro
/linux-4.4.14/arch/mn10300/include/asm/
Dspinlock.h25 #define arch_spin_is_locked(x) (*(volatile signed char *)(&(x)->slock) != 0) macro
/linux-4.4.14/arch/sh/include/asm/
Dspinlock.h26 #define arch_spin_is_locked(x) ((x)->lock <= 0) macro
/linux-4.4.14/arch/m32r/include/asm/
Dspinlock.h28 #define arch_spin_is_locked(x) (*(volatile int *)(&(x)->slock) <= 0) macro
/linux-4.4.14/arch/arm64/include/asm/
Dspinlock.h130 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/linux-4.4.14/arch/x86/include/asm/
Dspinlock.h164 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/linux-4.4.14/arch/mips/include/asm/
Dspinlock.h38 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/linux-4.4.14/arch/arm/include/asm/
Dspinlock.h121 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/linux-4.4.14/arch/powerpc/include/asm/
Dspinlock.h60 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/linux-4.4.14/arch/s390/include/asm/
Dspinlock.h54 static inline int arch_spin_is_locked(arch_spinlock_t *lp) in arch_spin_is_locked() function
/linux-4.4.14/arch/ia64/include/asm/
Dspinlock.h110 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/linux-4.4.14/arch/arc/include/asm/
Dspinlock.h16 #define arch_spin_is_locked(x) ((x)->slock != __ARCH_SPIN_LOCK_UNLOCKED__) macro