VCPU_SREG_LDTR   1572 arch/x86/kvm/emulate.c 				      VCPU_SREG_LDTR))
VCPU_SREG_LDTR   1768 arch/x86/kvm/emulate.c 	case VCPU_SREG_LDTR:
VCPU_SREG_LDTR   2523 arch/x86/kvm/emulate.c 	ctxt->ops->set_segment(ctxt, selector, &desc, 0, VCPU_SREG_LDTR);
VCPU_SREG_LDTR   2591 arch/x86/kvm/emulate.c 	ctxt->ops->set_segment(ctxt, selector, &desc, base3, VCPU_SREG_LDTR);
VCPU_SREG_LDTR   3068 arch/x86/kvm/emulate.c 	tss->ldt = get_segment_selector(ctxt, VCPU_SREG_LDTR);
VCPU_SREG_LDTR   3092 arch/x86/kvm/emulate.c 	set_segment_selector(ctxt, tss->ldt, VCPU_SREG_LDTR);
VCPU_SREG_LDTR   3104 arch/x86/kvm/emulate.c 	ret = __load_segment_descriptor(ctxt, tss->ldt, VCPU_SREG_LDTR, cpl,
VCPU_SREG_LDTR   3212 arch/x86/kvm/emulate.c 	set_segment_selector(ctxt, tss->ldt_selector, VCPU_SREG_LDTR);
VCPU_SREG_LDTR   3237 arch/x86/kvm/emulate.c 	ret = __load_segment_descriptor(ctxt, tss->ldt_selector, VCPU_SREG_LDTR,
VCPU_SREG_LDTR   3782 arch/x86/kvm/emulate.c 	return em_store_sreg(ctxt, VCPU_SREG_LDTR);
VCPU_SREG_LDTR   3791 arch/x86/kvm/emulate.c 	return load_segment_descriptor(ctxt, sel, VCPU_SREG_LDTR);
VCPU_SREG_LDTR   2445 arch/x86/kvm/svm.c 	case VCPU_SREG_LDTR: return &save->ldtr;
VCPU_SREG_LDTR   3084 arch/x86/kvm/vmx/vmx.c 	if (vmx->rmode.vm86_active && seg != VCPU_SREG_LDTR) {
VCPU_SREG_LDTR   3165 arch/x86/kvm/vmx/vmx.c 	if (vmx->rmode.vm86_active && seg != VCPU_SREG_LDTR) {
VCPU_SREG_LDTR   3189 arch/x86/kvm/vmx/vmx.c 	if (enable_unrestricted_guest && (seg != VCPU_SREG_LDTR))
VCPU_SREG_LDTR   3348 arch/x86/kvm/vmx/vmx.c 	vmx_get_segment(vcpu, &ldtr, VCPU_SREG_LDTR);
VCPU_SREG_LDTR   7777 arch/x86/kvm/x86.c 	kvm_get_segment(vcpu, &seg, VCPU_SREG_LDTR);
VCPU_SREG_LDTR   7841 arch/x86/kvm/x86.c 	kvm_get_segment(vcpu, &seg, VCPU_SREG_LDTR);
VCPU_SREG_LDTR   8734 arch/x86/kvm/x86.c 	kvm_get_segment(vcpu, &sregs->ldt, VCPU_SREG_LDTR);
VCPU_SREG_LDTR   8935 arch/x86/kvm/x86.c 	kvm_set_segment(vcpu, &sregs->ldt, VCPU_SREG_LDTR);