Lines Matching refs:rmode
566 } rmode; member
1649 if (to_vmx(vcpu)->rmode.vm86_active) in update_exception_bitmap()
2193 if (to_vmx(vcpu)->rmode.vm86_active) { in vmx_get_rflags()
2195 save_rflags = to_vmx(vcpu)->rmode.save_rflags; in vmx_get_rflags()
2207 if (to_vmx(vcpu)->rmode.vm86_active) { in vmx_set_rflags()
2208 to_vmx(vcpu)->rmode.save_rflags = rflags; in vmx_set_rflags()
2288 if (vmx->rmode.vm86_active) { in vmx_queue_exception()
3442 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_ES], VCPU_SREG_ES); in enter_pmode()
3443 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_DS], VCPU_SREG_DS); in enter_pmode()
3444 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_FS], VCPU_SREG_FS); in enter_pmode()
3445 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_GS], VCPU_SREG_GS); in enter_pmode()
3446 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_SS], VCPU_SREG_SS); in enter_pmode()
3447 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_CS], VCPU_SREG_CS); in enter_pmode()
3449 vmx->rmode.vm86_active = 0; in enter_pmode()
3453 vmx_set_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_TR], VCPU_SREG_TR); in enter_pmode()
3457 flags |= vmx->rmode.save_rflags & ~RMODE_GUEST_OWNED_EFLAGS_BITS; in enter_pmode()
3465 fix_pmode_seg(vcpu, VCPU_SREG_CS, &vmx->rmode.segs[VCPU_SREG_CS]); in enter_pmode()
3466 fix_pmode_seg(vcpu, VCPU_SREG_SS, &vmx->rmode.segs[VCPU_SREG_SS]); in enter_pmode()
3467 fix_pmode_seg(vcpu, VCPU_SREG_ES, &vmx->rmode.segs[VCPU_SREG_ES]); in enter_pmode()
3468 fix_pmode_seg(vcpu, VCPU_SREG_DS, &vmx->rmode.segs[VCPU_SREG_DS]); in enter_pmode()
3469 fix_pmode_seg(vcpu, VCPU_SREG_FS, &vmx->rmode.segs[VCPU_SREG_FS]); in enter_pmode()
3470 fix_pmode_seg(vcpu, VCPU_SREG_GS, &vmx->rmode.segs[VCPU_SREG_GS]); in enter_pmode()
3511 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_TR], VCPU_SREG_TR); in enter_rmode()
3512 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_ES], VCPU_SREG_ES); in enter_rmode()
3513 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_DS], VCPU_SREG_DS); in enter_rmode()
3514 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_FS], VCPU_SREG_FS); in enter_rmode()
3515 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_GS], VCPU_SREG_GS); in enter_rmode()
3516 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_SS], VCPU_SREG_SS); in enter_rmode()
3517 vmx_get_segment(vcpu, &vmx->rmode.segs[VCPU_SREG_CS], VCPU_SREG_CS); in enter_rmode()
3519 vmx->rmode.vm86_active = 1; in enter_rmode()
3536 vmx->rmode.save_rflags = flags; in enter_rmode()
3544 fix_rmode_seg(VCPU_SREG_SS, &vmx->rmode.segs[VCPU_SREG_SS]); in enter_rmode()
3545 fix_rmode_seg(VCPU_SREG_CS, &vmx->rmode.segs[VCPU_SREG_CS]); in enter_rmode()
3546 fix_rmode_seg(VCPU_SREG_ES, &vmx->rmode.segs[VCPU_SREG_ES]); in enter_rmode()
3547 fix_rmode_seg(VCPU_SREG_DS, &vmx->rmode.segs[VCPU_SREG_DS]); in enter_rmode()
3548 fix_rmode_seg(VCPU_SREG_GS, &vmx->rmode.segs[VCPU_SREG_GS]); in enter_rmode()
3549 fix_rmode_seg(VCPU_SREG_FS, &vmx->rmode.segs[VCPU_SREG_FS]); in enter_rmode()
3718 if (vmx->rmode.vm86_active && (cr0 & X86_CR0_PE)) in vmx_set_cr0()
3721 if (!vmx->rmode.vm86_active && !(cr0 & X86_CR0_PE)) in vmx_set_cr0()
3792 (to_vmx(vcpu)->rmode.vm86_active ? in vmx_set_cr4()
3840 if (vmx->rmode.vm86_active && seg != VCPU_SREG_LDTR) { in vmx_get_segment()
3841 *var = vmx->rmode.segs[seg]; in vmx_get_segment()
3875 if (to_vmx(vcpu)->rmode.vm86_active) { in vmx_get_segment_base()
3886 if (unlikely(vmx->rmode.vm86_active)) in vmx_get_cpl()
3922 if (vmx->rmode.vm86_active && seg != VCPU_SREG_LDTR) { in vmx_set_segment()
3923 vmx->rmode.segs[seg] = *var; in vmx_set_segment()
3927 fix_rmode_seg(seg, &vmx->rmode.segs[seg]); in vmx_set_segment()
4878 vmx->rmode.vm86_active = 0; in vmx_vcpu_reset()
5026 if (vmx->rmode.vm86_active) { in vmx_inject_irq()
5066 if (vmx->rmode.vm86_active) { in vmx_inject_nmi()
5288 if (vmx->rmode.vm86_active && rmode_exception(vcpu, ex_no)) in handle_exception()