Searched refs:queued_spin_unlock (Results 1 - 10 of 10) sorted by relevance

/linux-4.4.14/arch/x86/include/asm/
H A Dqspinlock.h8 #define queued_spin_unlock queued_spin_unlock macro
10 * queued_spin_unlock - release a queued spinlock
31 static inline void queued_spin_unlock(struct qspinlock *lock) queued_spin_unlock() function
36 static inline void queued_spin_unlock(struct qspinlock *lock) queued_spin_unlock() function
H A Dparavirt.h701 PVOP_VCALLEE1(pv_lock_ops.queued_spin_unlock, lock); pv_queued_spin_unlock()
H A Dparavirt_types.h344 struct paravirt_callee_save queued_spin_unlock; member in struct:pv_lock_ops
/linux-4.4.14/arch/x86/kernel/
H A Dparavirt-spinlocks.c21 return pv_lock_ops.queued_spin_unlock.func == pv_is_native_spin_unlock()
30 .queued_spin_unlock = PV_CALLEE_SAVE(__native_queued_spin_unlock),
H A Dparavirt_patch_32.c15 DEF_NATIVE(pv_lock_ops, queued_spin_unlock, "movb $0, (%eax)");
55 case PARAVIRT_PATCH(pv_lock_ops.queued_spin_unlock): PARAVIRT_PATCH()
H A Dparavirt_patch_64.c25 DEF_NATIVE(pv_lock_ops, queued_spin_unlock, "movb $0, (%rdi)");
68 case PARAVIRT_PATCH(pv_lock_ops.queued_spin_unlock): PARAVIRT_PATCH()
H A Dkvm.c869 pv_lock_ops.queued_spin_unlock = PV_CALLEE_SAVE(__pv_queued_spin_unlock); kvm_spinlock_init()
/linux-4.4.14/include/asm-generic/
H A Dqspinlock.h109 #ifndef queued_spin_unlock
111 * queued_spin_unlock - release a queued spinlock
114 static __always_inline void queued_spin_unlock(struct qspinlock *lock) queued_spin_unlock() function
160 #define arch_spin_unlock(l) queued_spin_unlock(l)
/linux-4.4.14/arch/x86/xen/
H A Dspinlock.c334 pv_lock_ops.queued_spin_unlock = PV_CALLEE_SAVE(__pv_queued_spin_unlock); xen_init_spinlocks()
/linux-4.4.14/kernel/locking/
H A Dqspinlock_paravirt.h312 * queued_spin_unlock().

Completed in 218 milliseconds