queued_spin_unlock   17 arch/mips/include/asm/spinlock.h #define	queued_spin_unlock queued_spin_unlock
queued_spin_unlock   77 arch/x86/hyperv/hv_spinlock.c 	pv_ops.lock.queued_spin_unlock = PV_CALLEE_SAVE(__pv_queued_spin_unlock);
queued_spin_unlock  647 arch/x86/include/asm/paravirt.h 	PVOP_VCALLEE1(lock.queued_spin_unlock, lock);
queued_spin_unlock  315 arch/x86/include/asm/paravirt_types.h 	struct paravirt_callee_save queued_spin_unlock;
queued_spin_unlock   36 arch/x86/include/asm/qspinlock.h #define	queued_spin_unlock queued_spin_unlock
queued_spin_unlock  847 arch/x86/kernel/kvm.c 	pv_ops.lock.queued_spin_unlock =
queued_spin_unlock   20 arch/x86/kernel/paravirt-spinlocks.c 	return pv_ops.lock.queued_spin_unlock.func ==
queued_spin_unlock  441 arch/x86/kernel/paravirt.c 	.lock.queued_spin_unlock	=
queued_spin_unlock   71 arch/x86/kernel/paravirt_patch.c 	unsigned char queued_spin_unlock[3];
queued_spin_unlock   79 arch/x86/kernel/paravirt_patch.c 	.queued_spin_unlock	= { 0xc6, 0x07, 0x00 },	// movb $0, (%rdi)
queued_spin_unlock   81 arch/x86/kernel/paravirt_patch.c 	.queued_spin_unlock	= { 0xc6, 0x00, 0x00 },	// movb $0, (%eax)
queued_spin_unlock  111 arch/x86/kernel/paravirt_patch.c 	case PARAVIRT_PATCH(lock.queued_spin_unlock):
queued_spin_unlock  113 arch/x86/kernel/paravirt_patch.c 			return PATCH(lock, queued_spin_unlock, insn_buff, len);
queued_spin_unlock  131 arch/x86/xen/spinlock.c 	pv_ops.lock.queued_spin_unlock =
queued_spin_unlock   84 include/asm-generic/qspinlock.h #ifndef queued_spin_unlock
queued_spin_unlock  114 include/asm-generic/qspinlock.h #define arch_spin_unlock(l)		queued_spin_unlock(l)