Home
last modified time | relevance | path

Searched refs:queued_spin_unlock (Results 1 – 9 of 9) sorted by relevance

/linux-4.4.14/arch/x86/include/asm/
Dqspinlock.h8 #define queued_spin_unlock queued_spin_unlock macro
31 static inline void queued_spin_unlock(struct qspinlock *lock) in queued_spin_unlock() function
36 static inline void queued_spin_unlock(struct qspinlock *lock) in queued_spin_unlock() function
Dparavirt_types.h344 struct paravirt_callee_save queued_spin_unlock; member
Dparavirt.h701 PVOP_VCALLEE1(pv_lock_ops.queued_spin_unlock, lock); in pv_queued_spin_unlock()
/linux-4.4.14/include/asm-generic/
Dqspinlock.h109 #ifndef queued_spin_unlock
114 static __always_inline void queued_spin_unlock(struct qspinlock *lock) in queued_spin_unlock() function
160 #define arch_spin_unlock(l) queued_spin_unlock(l)
/linux-4.4.14/arch/x86/kernel/
Dparavirt-spinlocks.c21 return pv_lock_ops.queued_spin_unlock.func == in pv_is_native_spin_unlock()
30 .queued_spin_unlock = PV_CALLEE_SAVE(__native_queued_spin_unlock),
Dparavirt_patch_32.c15 DEF_NATIVE(pv_lock_ops, queued_spin_unlock, "movb $0, (%eax)");
55 case PARAVIRT_PATCH(pv_lock_ops.queued_spin_unlock): in native_patch()
Dparavirt_patch_64.c25 DEF_NATIVE(pv_lock_ops, queued_spin_unlock, "movb $0, (%rdi)");
68 case PARAVIRT_PATCH(pv_lock_ops.queued_spin_unlock): in native_patch()
Dkvm.c869 pv_lock_ops.queued_spin_unlock = PV_CALLEE_SAVE(__pv_queued_spin_unlock); in kvm_spinlock_init()
/linux-4.4.14/arch/x86/xen/
Dspinlock.c334 pv_lock_ops.queued_spin_unlock = PV_CALLEE_SAVE(__pv_queued_spin_unlock); in xen_init_spinlocks()