raw_spin_trylock 360 arch/x86/kernel/nmi.c while (!raw_spin_trylock(&nmi_reason_lock)) { raw_spin_trylock 483 arch/x86/platform/uv/uv_nmi.c if (raw_spin_trylock(&hub_nmi->nmi_lock)) { raw_spin_trylock 298 include/linux/spinlock.h raw_spin_trylock(lock) ? \ raw_spin_trylock 305 include/linux/spinlock.h raw_spin_trylock(lock) ? \ raw_spin_trylock 348 include/linux/spinlock.h return raw_spin_trylock(&lock->rlock); raw_spin_trylock 564 kernel/debug/debug_core.c if (raw_spin_trylock(&dbg_master_lock)) raw_spin_trylock 579 kernel/debug/debug_core.c if (raw_spin_trylock(&dbg_master_lock)) { raw_spin_trylock 586 kernel/locking/rtmutex.c if (!raw_spin_trylock(&lock->wait_lock)) { raw_spin_trylock 367 kernel/printk/printk_safe.c raw_spin_trylock(&logbuf_lock)) { raw_spin_trylock 409 kernel/rcu/rcu.h bool ___locked = raw_spin_trylock(&ACCESS_PRIVATE(p, lock)); \ raw_spin_trylock 2316 kernel/rcu/tree.c !raw_spin_trylock(&rnp->fqslock); raw_spin_trylock 1516 kernel/rcu/tree_plugin.h if (raw_spin_trylock(&rdp->nocb_bypass_lock)) raw_spin_trylock 1550 kernel/rcu/tree_plugin.h return raw_spin_trylock(&rdp->nocb_bypass_lock); raw_spin_trylock 2033 kernel/sched/sched.h if (unlikely(!raw_spin_trylock(&busiest->lock))) { raw_spin_trylock 4053 kernel/trace/ring_buffer.c if (raw_spin_trylock(&cpu_buffer->reader_lock))