_Q_LOCKED_VAL 100 arch/x86/include/asm/qspinlock.h } while (atomic_cmpxchg(&lock->val, 0, _Q_LOCKED_VAL) != 0); _Q_LOCKED_VAL 65 include/asm-generic/qspinlock.h return likely(atomic_try_cmpxchg_acquire(&lock->val, &val, _Q_LOCKED_VAL)); _Q_LOCKED_VAL 78 include/asm-generic/qspinlock.h if (likely(atomic_try_cmpxchg_acquire(&lock->val, &val, _Q_LOCKED_VAL))) _Q_LOCKED_VAL 162 kernel/locking/qspinlock.c WRITE_ONCE(lock->locked_pending, _Q_LOCKED_VAL); _Q_LOCKED_VAL 206 kernel/locking/qspinlock.c atomic_add(-_Q_PENDING_VAL + _Q_LOCKED_VAL, &lock->val); _Q_LOCKED_VAL 262 kernel/locking/qspinlock.c WRITE_ONCE(lock->locked, _Q_LOCKED_VAL); _Q_LOCKED_VAL 535 kernel/locking/qspinlock.c if (atomic_try_cmpxchg_relaxed(&lock->val, &val, _Q_LOCKED_VAL)) _Q_LOCKED_VAL 91 kernel/locking/qspinlock_paravirt.h (cmpxchg_acquire(&lock->locked, 0, _Q_LOCKED_VAL) == 0)) { _Q_LOCKED_VAL 123 kernel/locking/qspinlock_paravirt.h _Q_LOCKED_VAL) == _Q_PENDING_VAL); _Q_LOCKED_VAL 145 kernel/locking/qspinlock_paravirt.h new = (val & ~_Q_PENDING_MASK) | _Q_LOCKED_VAL; _Q_LOCKED_VAL 462 kernel/locking/qspinlock_paravirt.h WRITE_ONCE(lock->locked, _Q_LOCKED_VAL); _Q_LOCKED_VAL 485 kernel/locking/qspinlock_paravirt.h return (u32)(atomic_read(&lock->val) | _Q_LOCKED_VAL); _Q_LOCKED_VAL 556 kernel/locking/qspinlock_paravirt.h locked = cmpxchg_release(&lock->locked, _Q_LOCKED_VAL, 0); _Q_LOCKED_VAL 557 kernel/locking/qspinlock_paravirt.h if (likely(locked == _Q_LOCKED_VAL))