root_hpa 393 arch/x86/include/asm/kvm_host.h void (*invlpg)(struct kvm_vcpu *vcpu, gva_t gva, hpa_t root_hpa); root_hpa 396 arch/x86/include/asm/kvm_host.h hpa_t root_hpa; root_hpa 2658 arch/x86/kvm/mmu.c BUG_ON(root != vcpu->arch.mmu->root_hpa); root_hpa 2672 arch/x86/kvm/mmu.c shadow_walk_init_using_root(iterator, vcpu, vcpu->arch.mmu->root_hpa, root_hpa 3335 arch/x86/kvm/mmu.c if (!VALID_PAGE(vcpu->arch.mmu->root_hpa)) root_hpa 3545 arch/x86/kvm/mmu.c if (!VALID_PAGE(vcpu->arch.mmu->root_hpa)) root_hpa 3700 arch/x86/kvm/mmu.c static void mmu_free_root_page(struct kvm *kvm, hpa_t *root_hpa, root_hpa 3705 arch/x86/kvm/mmu.c if (!VALID_PAGE(*root_hpa)) root_hpa 3708 arch/x86/kvm/mmu.c sp = page_header(*root_hpa & PT64_BASE_ADDR_MASK); root_hpa 3713 arch/x86/kvm/mmu.c *root_hpa = INVALID_PAGE; root_hpa 3727 arch/x86/kvm/mmu.c if (!(free_active_root && VALID_PAGE(mmu->root_hpa))) { root_hpa 3747 arch/x86/kvm/mmu.c mmu_free_root_page(vcpu->kvm, &mmu->root_hpa, root_hpa 3755 arch/x86/kvm/mmu.c mmu->root_hpa = INVALID_PAGE; root_hpa 3792 arch/x86/kvm/mmu.c vcpu->arch.mmu->root_hpa = __pa(sp->spt); root_hpa 3810 arch/x86/kvm/mmu.c vcpu->arch.mmu->root_hpa = __pa(vcpu->arch.mmu->pae_root); root_hpa 3836 arch/x86/kvm/mmu.c hpa_t root = vcpu->arch.mmu->root_hpa; root_hpa 3850 arch/x86/kvm/mmu.c vcpu->arch.mmu->root_hpa = root; root_hpa 3890 arch/x86/kvm/mmu.c vcpu->arch.mmu->root_hpa = __pa(vcpu->arch.mmu->pae_root); root_hpa 3914 arch/x86/kvm/mmu.c vcpu->arch.mmu->root_hpa = __pa(vcpu->arch.mmu->lm_root); root_hpa 3939 arch/x86/kvm/mmu.c if (!VALID_PAGE(vcpu->arch.mmu->root_hpa)) root_hpa 3945 arch/x86/kvm/mmu.c hpa_t root = vcpu->arch.mmu->root_hpa; root_hpa 4050 arch/x86/kvm/mmu.c if (!VALID_PAGE(vcpu->arch.mmu->root_hpa)) root_hpa 4146 arch/x86/kvm/mmu.c if (!VALID_PAGE(vcpu->arch.mmu->root_hpa)) root_hpa 4174 arch/x86/kvm/mmu.c MMU_WARN_ON(!VALID_PAGE(vcpu->arch.mmu->root_hpa)); root_hpa 4292 arch/x86/kvm/mmu.c MMU_WARN_ON(!VALID_PAGE(vcpu->arch.mmu->root_hpa)); root_hpa 4370 arch/x86/kvm/mmu.c root.hpa = mmu->root_hpa; root_hpa 4381 arch/x86/kvm/mmu.c mmu->root_hpa = root.hpa; root_hpa 4427 arch/x86/kvm/mmu.c page_header(mmu->root_hpa)); root_hpa 5241 arch/x86/kvm/mmu.c vcpu->arch.mmu->root_hpa = INVALID_PAGE; root_hpa 5297 arch/x86/kvm/mmu.c WARN_ON(VALID_PAGE(vcpu->arch.root_mmu.root_hpa)); root_hpa 5299 arch/x86/kvm/mmu.c WARN_ON(VALID_PAGE(vcpu->arch.guest_mmu.root_hpa)); root_hpa 5619 arch/x86/kvm/mmu.c mmu->invlpg(vcpu, gva, mmu->root_hpa); root_hpa 5648 arch/x86/kvm/mmu.c mmu->invlpg(vcpu, gva, mmu->root_hpa); root_hpa 5800 arch/x86/kvm/mmu.c vcpu->arch.root_mmu.root_hpa = INVALID_PAGE; root_hpa 5806 arch/x86/kvm/mmu.c vcpu->arch.guest_mmu.root_hpa = INVALID_PAGE; root_hpa 78 arch/x86/kvm/mmu.h if (likely(vcpu->arch.mmu->root_hpa != INVALID_PAGE)) root_hpa 100 arch/x86/kvm/mmu.h if (VALID_PAGE(vcpu->arch.mmu->root_hpa)) root_hpa 101 arch/x86/kvm/mmu.h vcpu->arch.mmu->set_cr3(vcpu, vcpu->arch.mmu->root_hpa | root_hpa 59 arch/x86/kvm/mmu_audit.c if (!VALID_PAGE(vcpu->arch.mmu->root_hpa)) root_hpa 63 arch/x86/kvm/mmu_audit.c hpa_t root = vcpu->arch.mmu->root_hpa; root_hpa 640 arch/x86/kvm/paging_tmpl.h if (!VALID_PAGE(vcpu->arch.mmu->root_hpa)) root_hpa 892 arch/x86/kvm/paging_tmpl.h static void FNAME(invlpg)(struct kvm_vcpu *vcpu, gva_t gva, hpa_t root_hpa) root_hpa 907 arch/x86/kvm/paging_tmpl.h if (!VALID_PAGE(root_hpa)) { root_hpa 913 arch/x86/kvm/paging_tmpl.h for_each_shadow_entry_using_root(vcpu, root_hpa, gva, iterator) { root_hpa 2956 arch/x86/kvm/vmx/vmx.c u64 construct_eptp(struct kvm_vcpu *vcpu, unsigned long root_hpa) root_hpa 2965 arch/x86/kvm/vmx/vmx.c eptp |= (root_hpa & PAGE_MASK); root_hpa 329 arch/x86/kvm/vmx/vmx.h u64 construct_eptp(struct kvm_vcpu *vcpu, unsigned long root_hpa); root_hpa 489 arch/x86/kvm/vmx/vmx.h u64 construct_eptp(struct kvm_vcpu *vcpu, unsigned long root_hpa); root_hpa 495 arch/x86/kvm/vmx/vmx.h if (!VALID_PAGE(vcpu->arch.mmu->root_hpa)) root_hpa 498 arch/x86/kvm/vmx/vmx.h vcpu->arch.mmu->root_hpa));