Lines Matching refs:context

3518 				   struct kvm_mmu *context)  in nonpaging_init_context()  argument
3520 context->page_fault = nonpaging_page_fault; in nonpaging_init_context()
3521 context->gva_to_gpa = nonpaging_gva_to_gpa; in nonpaging_init_context()
3522 context->sync_page = nonpaging_sync_page; in nonpaging_init_context()
3523 context->invlpg = nonpaging_invlpg; in nonpaging_init_context()
3524 context->update_pte = nonpaging_update_pte; in nonpaging_init_context()
3525 context->root_level = 0; in nonpaging_init_context()
3526 context->shadow_root_level = PT32E_ROOT_LEVEL; in nonpaging_init_context()
3527 context->root_hpa = INVALID_PAGE; in nonpaging_init_context()
3528 context->direct_map = true; in nonpaging_init_context()
3529 context->nx = false; in nonpaging_init_context()
3588 struct kvm_mmu *context) in reset_rsvds_bits_mask() argument
3595 context->bad_mt_xwr = 0; in reset_rsvds_bits_mask()
3597 if (!context->nx) in reset_rsvds_bits_mask()
3609 switch (context->root_level) { in reset_rsvds_bits_mask()
3612 context->rsvd_bits_mask[0][1] = 0; in reset_rsvds_bits_mask()
3613 context->rsvd_bits_mask[0][0] = 0; in reset_rsvds_bits_mask()
3614 context->rsvd_bits_mask[1][0] = context->rsvd_bits_mask[0][0]; in reset_rsvds_bits_mask()
3617 context->rsvd_bits_mask[1][1] = 0; in reset_rsvds_bits_mask()
3623 context->rsvd_bits_mask[1][1] = rsvd_bits(17, 21); in reset_rsvds_bits_mask()
3626 context->rsvd_bits_mask[1][1] = rsvd_bits(13, 21); in reset_rsvds_bits_mask()
3629 context->rsvd_bits_mask[0][2] = in reset_rsvds_bits_mask()
3632 context->rsvd_bits_mask[0][1] = exb_bit_rsvd | in reset_rsvds_bits_mask()
3634 context->rsvd_bits_mask[0][0] = exb_bit_rsvd | in reset_rsvds_bits_mask()
3636 context->rsvd_bits_mask[1][1] = exb_bit_rsvd | in reset_rsvds_bits_mask()
3639 context->rsvd_bits_mask[1][0] = context->rsvd_bits_mask[0][0]; in reset_rsvds_bits_mask()
3642 context->rsvd_bits_mask[0][3] = exb_bit_rsvd | in reset_rsvds_bits_mask()
3644 context->rsvd_bits_mask[0][2] = exb_bit_rsvd | in reset_rsvds_bits_mask()
3646 context->rsvd_bits_mask[0][1] = exb_bit_rsvd | in reset_rsvds_bits_mask()
3648 context->rsvd_bits_mask[0][0] = exb_bit_rsvd | in reset_rsvds_bits_mask()
3650 context->rsvd_bits_mask[1][3] = context->rsvd_bits_mask[0][3]; in reset_rsvds_bits_mask()
3651 context->rsvd_bits_mask[1][2] = exb_bit_rsvd | in reset_rsvds_bits_mask()
3654 context->rsvd_bits_mask[1][1] = exb_bit_rsvd | in reset_rsvds_bits_mask()
3657 context->rsvd_bits_mask[1][0] = context->rsvd_bits_mask[0][0]; in reset_rsvds_bits_mask()
3663 struct kvm_mmu *context, bool execonly) in reset_rsvds_bits_mask_ept() argument
3668 context->rsvd_bits_mask[0][3] = in reset_rsvds_bits_mask_ept()
3670 context->rsvd_bits_mask[0][2] = in reset_rsvds_bits_mask_ept()
3672 context->rsvd_bits_mask[0][1] = in reset_rsvds_bits_mask_ept()
3674 context->rsvd_bits_mask[0][0] = rsvd_bits(maxphyaddr, 51); in reset_rsvds_bits_mask_ept()
3677 context->rsvd_bits_mask[1][3] = context->rsvd_bits_mask[0][3]; in reset_rsvds_bits_mask_ept()
3678 context->rsvd_bits_mask[1][2] = in reset_rsvds_bits_mask_ept()
3680 context->rsvd_bits_mask[1][1] = in reset_rsvds_bits_mask_ept()
3682 context->rsvd_bits_mask[1][0] = context->rsvd_bits_mask[0][0]; in reset_rsvds_bits_mask_ept()
3690 context->bad_mt_xwr |= (1ull << pte); in reset_rsvds_bits_mask_ept()
3778 struct kvm_mmu *context, in paging64_init_context_common() argument
3781 context->nx = is_nx(vcpu); in paging64_init_context_common()
3782 context->root_level = level; in paging64_init_context_common()
3784 reset_rsvds_bits_mask(vcpu, context); in paging64_init_context_common()
3785 update_permission_bitmask(vcpu, context, false); in paging64_init_context_common()
3786 update_last_pte_bitmap(vcpu, context); in paging64_init_context_common()
3789 context->page_fault = paging64_page_fault; in paging64_init_context_common()
3790 context->gva_to_gpa = paging64_gva_to_gpa; in paging64_init_context_common()
3791 context->sync_page = paging64_sync_page; in paging64_init_context_common()
3792 context->invlpg = paging64_invlpg; in paging64_init_context_common()
3793 context->update_pte = paging64_update_pte; in paging64_init_context_common()
3794 context->shadow_root_level = level; in paging64_init_context_common()
3795 context->root_hpa = INVALID_PAGE; in paging64_init_context_common()
3796 context->direct_map = false; in paging64_init_context_common()
3800 struct kvm_mmu *context) in paging64_init_context() argument
3802 paging64_init_context_common(vcpu, context, PT64_ROOT_LEVEL); in paging64_init_context()
3806 struct kvm_mmu *context) in paging32_init_context() argument
3808 context->nx = false; in paging32_init_context()
3809 context->root_level = PT32_ROOT_LEVEL; in paging32_init_context()
3811 reset_rsvds_bits_mask(vcpu, context); in paging32_init_context()
3812 update_permission_bitmask(vcpu, context, false); in paging32_init_context()
3813 update_last_pte_bitmap(vcpu, context); in paging32_init_context()
3815 context->page_fault = paging32_page_fault; in paging32_init_context()
3816 context->gva_to_gpa = paging32_gva_to_gpa; in paging32_init_context()
3817 context->sync_page = paging32_sync_page; in paging32_init_context()
3818 context->invlpg = paging32_invlpg; in paging32_init_context()
3819 context->update_pte = paging32_update_pte; in paging32_init_context()
3820 context->shadow_root_level = PT32E_ROOT_LEVEL; in paging32_init_context()
3821 context->root_hpa = INVALID_PAGE; in paging32_init_context()
3822 context->direct_map = false; in paging32_init_context()
3826 struct kvm_mmu *context) in paging32E_init_context() argument
3828 paging64_init_context_common(vcpu, context, PT32E_ROOT_LEVEL); in paging32E_init_context()
3833 struct kvm_mmu *context = &vcpu->arch.mmu; in init_kvm_tdp_mmu() local
3835 context->base_role.word = 0; in init_kvm_tdp_mmu()
3836 context->page_fault = tdp_page_fault; in init_kvm_tdp_mmu()
3837 context->sync_page = nonpaging_sync_page; in init_kvm_tdp_mmu()
3838 context->invlpg = nonpaging_invlpg; in init_kvm_tdp_mmu()
3839 context->update_pte = nonpaging_update_pte; in init_kvm_tdp_mmu()
3840 context->shadow_root_level = kvm_x86_ops->get_tdp_level(); in init_kvm_tdp_mmu()
3841 context->root_hpa = INVALID_PAGE; in init_kvm_tdp_mmu()
3842 context->direct_map = true; in init_kvm_tdp_mmu()
3843 context->set_cr3 = kvm_x86_ops->set_tdp_cr3; in init_kvm_tdp_mmu()
3844 context->get_cr3 = get_cr3; in init_kvm_tdp_mmu()
3845 context->get_pdptr = kvm_pdptr_read; in init_kvm_tdp_mmu()
3846 context->inject_page_fault = kvm_inject_page_fault; in init_kvm_tdp_mmu()
3849 context->nx = false; in init_kvm_tdp_mmu()
3850 context->gva_to_gpa = nonpaging_gva_to_gpa; in init_kvm_tdp_mmu()
3851 context->root_level = 0; in init_kvm_tdp_mmu()
3853 context->nx = is_nx(vcpu); in init_kvm_tdp_mmu()
3854 context->root_level = PT64_ROOT_LEVEL; in init_kvm_tdp_mmu()
3855 reset_rsvds_bits_mask(vcpu, context); in init_kvm_tdp_mmu()
3856 context->gva_to_gpa = paging64_gva_to_gpa; in init_kvm_tdp_mmu()
3858 context->nx = is_nx(vcpu); in init_kvm_tdp_mmu()
3859 context->root_level = PT32E_ROOT_LEVEL; in init_kvm_tdp_mmu()
3860 reset_rsvds_bits_mask(vcpu, context); in init_kvm_tdp_mmu()
3861 context->gva_to_gpa = paging64_gva_to_gpa; in init_kvm_tdp_mmu()
3863 context->nx = false; in init_kvm_tdp_mmu()
3864 context->root_level = PT32_ROOT_LEVEL; in init_kvm_tdp_mmu()
3865 reset_rsvds_bits_mask(vcpu, context); in init_kvm_tdp_mmu()
3866 context->gva_to_gpa = paging32_gva_to_gpa; in init_kvm_tdp_mmu()
3869 update_permission_bitmask(vcpu, context, false); in init_kvm_tdp_mmu()
3870 update_last_pte_bitmap(vcpu, context); in init_kvm_tdp_mmu()
3877 struct kvm_mmu *context = &vcpu->arch.mmu; in kvm_init_shadow_mmu() local
3879 MMU_WARN_ON(VALID_PAGE(context->root_hpa)); in kvm_init_shadow_mmu()
3882 nonpaging_init_context(vcpu, context); in kvm_init_shadow_mmu()
3884 paging64_init_context(vcpu, context); in kvm_init_shadow_mmu()
3886 paging32E_init_context(vcpu, context); in kvm_init_shadow_mmu()
3888 paging32_init_context(vcpu, context); in kvm_init_shadow_mmu()
3890 context->base_role.nxe = is_nx(vcpu); in kvm_init_shadow_mmu()
3891 context->base_role.cr4_pae = !!is_pae(vcpu); in kvm_init_shadow_mmu()
3892 context->base_role.cr0_wp = is_write_protection(vcpu); in kvm_init_shadow_mmu()
3893 context->base_role.smep_andnot_wp in kvm_init_shadow_mmu()
3895 context->base_role.smap_andnot_wp in kvm_init_shadow_mmu()
3902 struct kvm_mmu *context = &vcpu->arch.mmu; in kvm_init_shadow_ept_mmu() local
3904 MMU_WARN_ON(VALID_PAGE(context->root_hpa)); in kvm_init_shadow_ept_mmu()
3906 context->shadow_root_level = kvm_x86_ops->get_tdp_level(); in kvm_init_shadow_ept_mmu()
3908 context->nx = true; in kvm_init_shadow_ept_mmu()
3909 context->page_fault = ept_page_fault; in kvm_init_shadow_ept_mmu()
3910 context->gva_to_gpa = ept_gva_to_gpa; in kvm_init_shadow_ept_mmu()
3911 context->sync_page = ept_sync_page; in kvm_init_shadow_ept_mmu()
3912 context->invlpg = ept_invlpg; in kvm_init_shadow_ept_mmu()
3913 context->update_pte = ept_update_pte; in kvm_init_shadow_ept_mmu()
3914 context->root_level = context->shadow_root_level; in kvm_init_shadow_ept_mmu()
3915 context->root_hpa = INVALID_PAGE; in kvm_init_shadow_ept_mmu()
3916 context->direct_map = false; in kvm_init_shadow_ept_mmu()
3918 update_permission_bitmask(vcpu, context, true); in kvm_init_shadow_ept_mmu()
3919 reset_rsvds_bits_mask_ept(vcpu, context, execonly); in kvm_init_shadow_ept_mmu()
3925 struct kvm_mmu *context = &vcpu->arch.mmu; in init_kvm_softmmu() local
3928 context->set_cr3 = kvm_x86_ops->set_cr3; in init_kvm_softmmu()
3929 context->get_cr3 = get_cr3; in init_kvm_softmmu()
3930 context->get_pdptr = kvm_pdptr_read; in init_kvm_softmmu()
3931 context->inject_page_fault = kvm_inject_page_fault; in init_kvm_softmmu()