VCPU_SREG_TR 1674 arch/x86/kvm/emulate.c if (seg == VCPU_SREG_TR && (selector & (1 << 2))) VCPU_SREG_TR 1679 arch/x86/kvm/emulate.c if (seg == VCPU_SREG_CS || seg == VCPU_SREG_TR) VCPU_SREG_TR 1758 arch/x86/kvm/emulate.c case VCPU_SREG_TR: VCPU_SREG_TR 2517 arch/x86/kvm/emulate.c ctxt->ops->set_segment(ctxt, selector, &desc, 0, VCPU_SREG_TR); VCPU_SREG_TR 2580 arch/x86/kvm/emulate.c ctxt->ops->set_segment(ctxt, selector, &desc, base3, VCPU_SREG_TR); VCPU_SREG_TR 2989 arch/x86/kvm/emulate.c ops->get_segment(ctxt, &tr, &tr_seg, &base3, VCPU_SREG_TR); VCPU_SREG_TR 3313 arch/x86/kvm/emulate.c u16 old_tss_sel = get_segment_selector(ctxt, VCPU_SREG_TR); VCPU_SREG_TR 3315 arch/x86/kvm/emulate.c ops->get_cached_segment_base(ctxt, VCPU_SREG_TR); VCPU_SREG_TR 3393 arch/x86/kvm/emulate.c ops->set_segment(ctxt, tss_selector, &next_tss_desc, 0, VCPU_SREG_TR); VCPU_SREG_TR 3796 arch/x86/kvm/emulate.c return em_store_sreg(ctxt, VCPU_SREG_TR); VCPU_SREG_TR 3805 arch/x86/kvm/emulate.c return load_segment_descriptor(ctxt, sel, VCPU_SREG_TR); VCPU_SREG_TR 2444 arch/x86/kvm/svm.c case VCPU_SREG_TR: return &save->tr; VCPU_SREG_TR 2491 arch/x86/kvm/svm.c case VCPU_SREG_TR: VCPU_SREG_TR 3924 arch/x86/kvm/vmx/nested.c vmx_set_segment(vcpu, &seg, VCPU_SREG_TR); VCPU_SREG_TR 2663 arch/x86/kvm/vmx/vmx.c vmx_set_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_TR], VCPU_SREG_TR); VCPU_SREG_TR 2722 arch/x86/kvm/vmx/vmx.c vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_TR], VCPU_SREG_TR); VCPU_SREG_TR 3086 arch/x86/kvm/vmx/vmx.c if (seg == VCPU_SREG_TR VCPU_SREG_TR 3167 arch/x86/kvm/vmx/vmx.c if (seg == VCPU_SREG_TR) VCPU_SREG_TR 3330 arch/x86/kvm/vmx/vmx.c vmx_get_segment(vcpu, &tr, VCPU_SREG_TR); VCPU_SREG_TR 7771 arch/x86/kvm/x86.c kvm_get_segment(vcpu, &seg, VCPU_SREG_TR); VCPU_SREG_TR 7831 arch/x86/kvm/x86.c kvm_get_segment(vcpu, &seg, VCPU_SREG_TR); VCPU_SREG_TR 8733 arch/x86/kvm/x86.c kvm_get_segment(vcpu, &sregs->tr, VCPU_SREG_TR); VCPU_SREG_TR 8934 arch/x86/kvm/x86.c kvm_set_segment(vcpu, &sregs->tr, VCPU_SREG_TR);