arch_spinlock_t 19 arch/alpha/include/asm/spinlock.h static inline int arch_spin_value_unlocked(arch_spinlock_t lock) arch_spinlock_t 24 arch/alpha/include/asm/spinlock.h static inline void arch_spin_unlock(arch_spinlock_t * lock) arch_spinlock_t 30 arch/alpha/include/asm/spinlock.h static inline void arch_spin_lock(arch_spinlock_t * lock) arch_spinlock_t 50 arch/alpha/include/asm/spinlock.h static inline int arch_spin_trylock(arch_spinlock_t *lock) arch_spinlock_t 107 arch/arc/include/asm/smp.h extern arch_spinlock_t smp_atomic_ops_lock; arch_spinlock_t 108 arch/arc/include/asm/smp.h extern arch_spinlock_t smp_bitops_lock; arch_spinlock_t 17 arch/arc/include/asm/spinlock.h static inline void arch_spin_lock(arch_spinlock_t *lock) arch_spinlock_t 44 arch/arc/include/asm/spinlock.h static inline int arch_spin_trylock(arch_spinlock_t *lock) arch_spinlock_t 67 arch/arc/include/asm/spinlock.h static inline void arch_spin_unlock(arch_spinlock_t *lock) arch_spinlock_t 222 arch/arc/include/asm/spinlock.h static inline void arch_spin_lock(arch_spinlock_t *lock) arch_spinlock_t 250 arch/arc/include/asm/spinlock.h static inline int arch_spin_trylock(arch_spinlock_t *lock) arch_spinlock_t 267 arch/arc/include/asm/spinlock.h static inline void arch_spin_unlock(arch_spinlock_t *lock) arch_spinlock_t 27 arch/arc/include/asm/spinlock_types.h arch_spinlock_t lock_mutex; arch_spinlock_t 31 arch/arc/kernel/smp.c arch_spinlock_t smp_atomic_ops_lock = __ARCH_SPIN_LOCK_UNLOCKED; arch_spinlock_t 32 arch/arc/kernel/smp.c arch_spinlock_t smp_bitops_lock = __ARCH_SPIN_LOCK_UNLOCKED; arch_spinlock_t 183 arch/arm/common/mcpm_entry.c static arch_spinlock_t mcpm_lock = __ARCH_SPIN_LOCK_UNLOCKED; arch_spinlock_t 56 arch/arm/include/asm/spinlock.h static inline void arch_spin_lock(arch_spinlock_t *lock) arch_spinlock_t 60 arch/arm/include/asm/spinlock.h arch_spinlock_t lockval; arch_spinlock_t 81 arch/arm/include/asm/spinlock.h static inline int arch_spin_trylock(arch_spinlock_t *lock) arch_spinlock_t 107 arch/arm/include/asm/spinlock.h static inline void arch_spin_unlock(arch_spinlock_t *lock) arch_spinlock_t 114 arch/arm/include/asm/spinlock.h static inline int arch_spin_value_unlocked(arch_spinlock_t lock) arch_spinlock_t 119 arch/arm/include/asm/spinlock.h static inline int arch_spin_is_locked(arch_spinlock_t *lock) arch_spinlock_t 124 arch/arm/include/asm/spinlock.h static inline int arch_spin_is_contended(arch_spinlock_t *lock) arch_spinlock_t 294 arch/arm/kernel/traps.c static arch_spinlock_t die_lock = __ARCH_SPIN_LOCK_UNLOCKED; arch_spinlock_t 14 arch/csky/include/asm/spinlock.h static inline void arch_spin_lock(arch_spinlock_t *lock) arch_spinlock_t 16 arch/csky/include/asm/spinlock.h arch_spinlock_t lockval; arch_spinlock_t 37 arch/csky/include/asm/spinlock.h static inline int arch_spin_trylock(arch_spinlock_t *lock) arch_spinlock_t 65 arch/csky/include/asm/spinlock.h static inline void arch_spin_unlock(arch_spinlock_t *lock) arch_spinlock_t 71 arch/csky/include/asm/spinlock.h static inline int arch_spin_value_unlocked(arch_spinlock_t lock) arch_spinlock_t 76 arch/csky/include/asm/spinlock.h static inline int arch_spin_is_locked(arch_spinlock_t *lock) arch_spinlock_t 81 arch/csky/include/asm/spinlock.h static inline int arch_spin_is_contended(arch_spinlock_t *lock) arch_spinlock_t 99 arch/csky/include/asm/spinlock.h static inline void arch_spin_lock(arch_spinlock_t *lock) arch_spinlock_t 116 arch/csky/include/asm/spinlock.h static inline void arch_spin_unlock(arch_spinlock_t *lock) arch_spinlock_t 122 arch/csky/include/asm/spinlock.h static inline int arch_spin_trylock(arch_spinlock_t *lock) arch_spinlock_t 115 arch/hexagon/include/asm/spinlock.h static inline void arch_spin_lock(arch_spinlock_t *lock) arch_spinlock_t 130 arch/hexagon/include/asm/spinlock.h static inline void arch_spin_unlock(arch_spinlock_t *lock) arch_spinlock_t 136 arch/hexagon/include/asm/spinlock.h static inline unsigned int arch_spin_trylock(arch_spinlock_t *lock) arch_spinlock_t 43 arch/ia64/include/asm/spinlock.h static __always_inline void __ticket_spin_lock(arch_spinlock_t *lock) arch_spinlock_t 63 arch/ia64/include/asm/spinlock.h static __always_inline int __ticket_spin_trylock(arch_spinlock_t *lock) arch_spinlock_t 72 arch/ia64/include/asm/spinlock.h static __always_inline void __ticket_spin_unlock(arch_spinlock_t *lock) arch_spinlock_t 82 arch/ia64/include/asm/spinlock.h static inline int __ticket_spin_is_locked(arch_spinlock_t *lock) arch_spinlock_t 89 arch/ia64/include/asm/spinlock.h static inline int __ticket_spin_is_contended(arch_spinlock_t *lock) arch_spinlock_t 96 arch/ia64/include/asm/spinlock.h static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) arch_spinlock_t 101 arch/ia64/include/asm/spinlock.h static inline int arch_spin_is_locked(arch_spinlock_t *lock) arch_spinlock_t 106 arch/ia64/include/asm/spinlock.h static inline int arch_spin_is_contended(arch_spinlock_t *lock) arch_spinlock_t 112 arch/ia64/include/asm/spinlock.h static __always_inline void arch_spin_lock(arch_spinlock_t *lock) arch_spinlock_t 117 arch/ia64/include/asm/spinlock.h static __always_inline int arch_spin_trylock(arch_spinlock_t *lock) arch_spinlock_t 122 arch/ia64/include/asm/spinlock.h static __always_inline void arch_spin_unlock(arch_spinlock_t *lock) arch_spinlock_t 127 arch/ia64/include/asm/spinlock.h static __always_inline void arch_spin_lock_flags(arch_spinlock_t *lock, arch_spinlock_t 24 arch/mips/sgi-ip27/ip27-nmi.c static arch_spinlock_t nmi_lock = __ARCH_SPIN_LOCK_UNLOCKED; arch_spinlock_t 32 arch/parisc/include/asm/atomic.h extern arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned; arch_spinlock_t 37 arch/parisc/include/asm/atomic.h arch_spinlock_t *s = ATOMIC_HASH(l); \ arch_spinlock_t 43 arch/parisc/include/asm/atomic.h arch_spinlock_t *s = ATOMIC_HASH(l); \ arch_spinlock_t 20 arch/parisc/include/asm/futex.h arch_spinlock_t *s = (arch_spinlock_t *)&lws_lock_start[index]; arch_spinlock_t 30 arch/parisc/include/asm/futex.h arch_spinlock_t *s = (arch_spinlock_t *)&lws_lock_start[index]; arch_spinlock_t 10 arch/parisc/include/asm/spinlock.h static inline int arch_spin_is_locked(arch_spinlock_t *x) arch_spinlock_t 18 arch/parisc/include/asm/spinlock.h static inline void arch_spin_lock_flags(arch_spinlock_t *x, arch_spinlock_t 35 arch/parisc/include/asm/spinlock.h static inline void arch_spin_unlock(arch_spinlock_t *x) arch_spinlock_t 48 arch/parisc/include/asm/spinlock.h static inline int arch_spin_trylock(arch_spinlock_t *x) arch_spinlock_t 16 arch/parisc/include/asm/spinlock_types.h arch_spinlock_t lock; arch_spinlock_t 15 arch/parisc/lib/bitops.c arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned = { arch_spinlock_t 59 arch/powerpc/include/asm/rtas.h arch_spinlock_t lock; arch_spinlock_t 51 arch/powerpc/include/asm/spinlock.h static __always_inline int arch_spin_value_unlocked(arch_spinlock_t lock) arch_spinlock_t 56 arch/powerpc/include/asm/spinlock.h static inline int arch_spin_is_locked(arch_spinlock_t *lock) arch_spinlock_t 66 arch/powerpc/include/asm/spinlock.h static inline unsigned long __arch_spin_trylock(arch_spinlock_t *lock) arch_spinlock_t 86 arch/powerpc/include/asm/spinlock.h static inline int arch_spin_trylock(arch_spinlock_t *lock) arch_spinlock_t 107 arch/powerpc/include/asm/spinlock.h void splpar_spin_yield(arch_spinlock_t *lock); arch_spinlock_t 110 arch/powerpc/include/asm/spinlock.h static inline void splpar_spin_yield(arch_spinlock_t *lock) {}; arch_spinlock_t 128 arch/powerpc/include/asm/spinlock.h static inline void spin_yield(arch_spinlock_t *lock) arch_spinlock_t 144 arch/powerpc/include/asm/spinlock.h static inline void arch_spin_lock(arch_spinlock_t *lock) arch_spinlock_t 159 arch/powerpc/include/asm/spinlock.h void arch_spin_lock_flags(arch_spinlock_t *lock, unsigned long flags) arch_spinlock_t 179 arch/powerpc/include/asm/spinlock.h static inline void arch_spin_unlock(arch_spinlock_t *lock) arch_spinlock_t 1237 arch/powerpc/kernel/rtas.c static arch_spinlock_t timebase_lock; arch_spinlock_t 154 arch/powerpc/kernel/traps.c static arch_spinlock_t die_lock = __ARCH_SPIN_LOCK_UNLOCKED; arch_spinlock_t 98 arch/powerpc/kvm/book3s_xics.h arch_spinlock_t lock; arch_spinlock_t 89 arch/powerpc/kvm/book3s_xive.h arch_spinlock_t lock; arch_spinlock_t 21 arch/powerpc/lib/locks.c void splpar_spin_yield(arch_spinlock_t *lock) arch_spinlock_t 99 arch/powerpc/platforms/pasemi/setup.c static arch_spinlock_t timebase_lock; arch_spinlock_t 22 arch/riscv/include/asm/spinlock.h static inline void arch_spin_unlock(arch_spinlock_t *lock) arch_spinlock_t 27 arch/riscv/include/asm/spinlock.h static inline int arch_spin_trylock(arch_spinlock_t *lock) arch_spinlock_t 41 arch/riscv/include/asm/spinlock.h static inline void arch_spin_lock(arch_spinlock_t *lock) arch_spinlock_t 36 arch/s390/include/asm/spinlock.h void arch_spin_relax(arch_spinlock_t *lock); arch_spinlock_t 39 arch/s390/include/asm/spinlock.h void arch_spin_lock_wait(arch_spinlock_t *); arch_spinlock_t 40 arch/s390/include/asm/spinlock.h int arch_spin_trylock_retry(arch_spinlock_t *); arch_spinlock_t 48 arch/s390/include/asm/spinlock.h static inline int arch_spin_value_unlocked(arch_spinlock_t lock) arch_spinlock_t 53 arch/s390/include/asm/spinlock.h static inline int arch_spin_is_locked(arch_spinlock_t *lp) arch_spinlock_t 58 arch/s390/include/asm/spinlock.h static inline int arch_spin_trylock_once(arch_spinlock_t *lp) arch_spinlock_t 64 arch/s390/include/asm/spinlock.h static inline void arch_spin_lock(arch_spinlock_t *lp) arch_spinlock_t 70 arch/s390/include/asm/spinlock.h static inline void arch_spin_lock_flags(arch_spinlock_t *lp, arch_spinlock_t 78 arch/s390/include/asm/spinlock.h static inline int arch_spin_trylock(arch_spinlock_t *lp) arch_spinlock_t 85 arch/s390/include/asm/spinlock.h static inline void arch_spin_unlock(arch_spinlock_t *lp) arch_spinlock_t 17 arch/s390/include/asm/spinlock_types.h arch_spinlock_t wait; arch_spinlock_t 117 arch/s390/lib/spinlock.c static inline void arch_spin_lock_queued(arch_spinlock_t *lp) arch_spinlock_t 211 arch/s390/lib/spinlock.c static inline void arch_spin_lock_classic(arch_spinlock_t *lp) arch_spinlock_t 243 arch/s390/lib/spinlock.c void arch_spin_lock_wait(arch_spinlock_t *lp) arch_spinlock_t 253 arch/s390/lib/spinlock.c int arch_spin_trylock_retry(arch_spinlock_t *lp) arch_spinlock_t 314 arch/s390/lib/spinlock.c void arch_spin_relax(arch_spinlock_t *lp) arch_spinlock_t 28 arch/sh/include/asm/spinlock-cas.h static inline void arch_spin_lock(arch_spinlock_t *lock) arch_spinlock_t 33 arch/sh/include/asm/spinlock-cas.h static inline void arch_spin_unlock(arch_spinlock_t *lock) arch_spinlock_t 38 arch/sh/include/asm/spinlock-cas.h static inline int arch_spin_trylock(arch_spinlock_t *lock) arch_spinlock_t 26 arch/sh/include/asm/spinlock-llsc.h static inline void arch_spin_lock(arch_spinlock_t *lock) arch_spinlock_t 46 arch/sh/include/asm/spinlock-llsc.h static inline void arch_spin_unlock(arch_spinlock_t *lock) arch_spinlock_t 61 arch/sh/include/asm/spinlock-llsc.h static inline int arch_spin_trylock(arch_spinlock_t *lock) arch_spinlock_t 18 arch/sparc/include/asm/spinlock_32.h static inline void arch_spin_lock(arch_spinlock_t *lock) arch_spinlock_t 38 arch/sparc/include/asm/spinlock_32.h static inline int arch_spin_trylock(arch_spinlock_t *lock) arch_spinlock_t 48 arch/sparc/include/asm/spinlock_32.h static inline void arch_spin_unlock(arch_spinlock_t *lock) arch_spinlock_t 301 arch/x86/kernel/dumpstack.c static arch_spinlock_t die_lock = __ARCH_SPIN_LOCK_UNLOCKED; arch_spinlock_t 673 arch/x86/kernel/hpet.c arch_spinlock_t lock; arch_spinlock_t 215 arch/x86/kernel/tsc_sync.c static arch_spinlock_t sync_lock = __ARCH_SPIN_LOCK_UNLOCKED; arch_spinlock_t 26 include/asm-generic/qrwlock_types.h arch_spinlock_t wait_lock; arch_spinlock_t 21 include/linux/spinlock_types.h arch_spinlock_t raw_lock; arch_spinlock_t 29 include/linux/spinlock_up.h static inline void arch_spin_lock(arch_spinlock_t *lock) arch_spinlock_t 35 include/linux/spinlock_up.h static inline int arch_spin_trylock(arch_spinlock_t *lock) arch_spinlock_t 45 include/linux/spinlock_up.h static inline void arch_spin_unlock(arch_spinlock_t *lock) arch_spinlock_t 223 kernel/bpf/helpers.c arch_spinlock_t *l = (void *)lock; arch_spinlock_t 226 kernel/bpf/helpers.c arch_spinlock_t lock; arch_spinlock_t 237 kernel/bpf/helpers.c arch_spinlock_t *l = (void *)lock; arch_spinlock_t 87 kernel/locking/lockdep.c static arch_spinlock_t lockdep_lock = (arch_spinlock_t)__ARCH_SPIN_LOCK_UNLOCKED; arch_spinlock_t 26 kernel/locking/spinlock_debug.c lock->raw_lock = (arch_spinlock_t)__ARCH_SPIN_LOCK_UNLOCKED; arch_spinlock_t 448 kernel/trace/ring_buffer.c arch_spinlock_t lock; arch_spinlock_t 1302 kernel/trace/ring_buffer.c cpu_buffer->lock = (arch_spinlock_t)__ARCH_SPIN_LOCK_UNLOCKED; arch_spinlock_t 1940 kernel/trace/trace.c static arch_spinlock_t trace_cmdline_lock = __ARCH_SPIN_LOCK_UNLOCKED; arch_spinlock_t 8415 kernel/trace/trace.c tr->max_lock = (arch_spinlock_t)__ARCH_SPIN_LOCK_UNLOCKED; arch_spinlock_t 9196 kernel/trace/trace.c global_trace.max_lock = (arch_spinlock_t)__ARCH_SPIN_LOCK_UNLOCKED; arch_spinlock_t 282 kernel/trace/trace.h arch_spinlock_t max_lock; arch_spinlock_t 88 kernel/trace/trace_clock.c arch_spinlock_t lock; arch_spinlock_t 91 kernel/trace/trace_clock.c .lock = (arch_spinlock_t)__ARCH_SPIN_LOCK_UNLOCKED, arch_spinlock_t 33 kernel/trace/trace_sched_wakeup.c static arch_spinlock_t wakeup_lock = arch_spinlock_t 34 kernel/trace/trace_sched_wakeup.c (arch_spinlock_t)__ARCH_SPIN_LOCK_UNLOCKED; arch_spinlock_t 29 kernel/trace/trace_stack.c static arch_spinlock_t stack_trace_max_lock = arch_spinlock_t 30 kernel/trace/trace_stack.c (arch_spinlock_t)__ARCH_SPIN_LOCK_UNLOCKED; arch_spinlock_t 25 tools/include/linux/spinlock.h static inline void arch_spin_lock(arch_spinlock_t *mutex) arch_spinlock_t 30 tools/include/linux/spinlock.h static inline void arch_spin_unlock(arch_spinlock_t *mutex) arch_spinlock_t 35 tools/include/linux/spinlock.h static inline bool arch_spin_is_locked(arch_spinlock_t *mutex)