Home
last modified time | relevance | path

Searched refs:arch_spin_is_locked (Results 1 – 29 of 29) sorted by relevance

/linux-4.1.27/include/linux/
Dspinlock_up.h26 #define arch_spin_is_locked(x) ((x)->slock == 0) macro
69 #define arch_spin_is_locked(lock) ((void)(lock), 0) macro
83 do { cpu_relax(); } while (arch_spin_is_locked(lock))
Dspinlock.h107 #define raw_spin_is_locked(lock) arch_spin_is_locked(&(lock)->raw_lock)
/linux-4.1.27/arch/parisc/include/asm/
Dspinlock.h9 static inline int arch_spin_is_locked(arch_spinlock_t *x) in arch_spin_is_locked() function
17 do { cpu_relax(); } while (arch_spin_is_locked(x))
115 while (arch_spin_is_locked(&rw->lock) && rw->counter >= 0) in arch_read_trylock()
/linux-4.1.27/arch/hexagon/include/asm/
Dspinlock.h180 do {while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
181 #define arch_spin_is_locked(x) ((x)->lock != 0) macro
/linux-4.1.27/arch/xtensa/include/asm/
Dspinlock.h31 #define arch_spin_is_locked(x) ((x)->slock != 0) macro
33 do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
/linux-4.1.27/arch/blackfin/include/asm/
Dspinlock.h27 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
51 while (arch_spin_is_locked(lock)) in arch_spin_unlock_wait()
/linux-4.1.27/arch/sh/include/asm/
Dspinlock.h26 #define arch_spin_is_locked(x) ((x)->lock <= 0) macro
29 do { while (arch_spin_is_locked(x)) cpu_relax(); } while (0)
/linux-4.1.27/arch/arm64/include/asm/
Dspinlock.h30 do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
100 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/linux-4.1.27/arch/mn10300/include/asm/
Dspinlock.h25 #define arch_spin_is_locked(x) (*(volatile signed char *)(&(x)->slock) != 0) macro
26 #define arch_spin_unlock_wait(x) do { barrier(); } while (arch_spin_is_locked(x))
/linux-4.1.27/arch/arc/include/asm/
Dspinlock.h16 #define arch_spin_is_locked(x) ((x)->slock != __ARCH_SPIN_LOCK_UNLOCKED__) macro
19 do { while (arch_spin_is_locked(x)) cpu_relax(); } while (0)
/linux-4.1.27/arch/m32r/include/asm/
Dspinlock.h28 #define arch_spin_is_locked(x) (*(volatile int *)(&(x)->slock) <= 0) macro
31 do { cpu_relax(); } while (arch_spin_is_locked(x))
/linux-4.1.27/arch/sparc/include/asm/
Dspinlock_32.h14 #define arch_spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0) macro
17 do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
Dspinlock_64.h24 #define arch_spin_is_locked(lp) ((lp)->lock != 0) macro
/linux-4.1.27/tools/lib/lockdep/uinclude/linux/
Dspinlock.h20 static inline bool arch_spin_is_locked(arch_spinlock_t *mutex) in arch_spin_is_locked() function
/linux-4.1.27/arch/arm/include/asm/
Dspinlock.h54 do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
121 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/linux-4.1.27/arch/powerpc/include/asm/
Dspinlock.h62 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
171 do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
/linux-4.1.27/arch/metag/include/asm/
Dspinlock.h11 do { while (arch_spin_is_locked(lock)) cpu_relax(); } while (0)
Dspinlock_lnkget.h10 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
Dspinlock_lock1.h7 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/linux-4.1.27/arch/s390/include/asm/
Dspinlock.h54 static inline int arch_spin_is_locked(arch_spinlock_t *lp) in arch_spin_is_locked() function
99 while (arch_spin_is_locked(lock)) in arch_spin_unlock_wait()
/linux-4.1.27/arch/mips/include/asm/
Dspinlock.h38 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
47 while (arch_spin_is_locked(x)) { cpu_relax(); }
/linux-4.1.27/arch/tile/include/asm/
Dspinlock_32.h36 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
Dspinlock_64.h45 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/linux-4.1.27/arch/tile/lib/
Dspinlock_64.c68 while (arch_spin_is_locked(lock)) in arch_spin_unlock_wait()
Dspinlock_32.c68 while (arch_spin_is_locked(lock)) in arch_spin_unlock_wait()
/linux-4.1.27/arch/alpha/include/asm/
Dspinlock.h15 #define arch_spin_is_locked(x) ((x)->lock != 0) macro
/linux-4.1.27/arch/x86/include/asm/
Dspinlock.h160 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/linux-4.1.27/arch/ia64/include/asm/
Dspinlock.h110 static inline int arch_spin_is_locked(arch_spinlock_t *lock) in arch_spin_is_locked() function
/linux-4.1.27/kernel/locking/
Dlockdep.c100 if (debug_locks && !arch_spin_is_locked(&lockdep_lock)) { in graph_unlock()