Searched refs:gfn_end (Results 1 – 6 of 6) sorted by relevance
/linux-4.4.14/arch/x86/kvm/ |
D | mmu.h | 176 void kvm_zap_gfn_range(struct kvm *kvm, gfn_t gfn_start, gfn_t gfn_end);
|
D | mmu.c | 1515 gfn_t gfn_start, gfn_end; in kvm_handle_hva_range() local 1527 gfn_end = hva_to_gfn_memslot(hva_end + PAGE_SIZE - 1, memslot); in kvm_handle_hva_range() 1531 gfn_start, gfn_end - 1, in kvm_handle_hva_range() 4568 void kvm_zap_gfn_range(struct kvm *kvm, gfn_t gfn_start, gfn_t gfn_end) in kvm_zap_gfn_range() argument 4581 end = min(gfn_end, memslot->base_gfn + memslot->npages); in kvm_zap_gfn_range()
|
/linux-4.4.14/arch/powerpc/kvm/ |
D | e500_mmu_host.c | 408 unsigned long gfn_start, gfn_end; in kvmppc_e500_shadow_map() local 412 gfn_end = gfn_start + tsize_pages; in kvmppc_e500_shadow_map() 416 if (gfn_end + pfn - gfn > end) in kvmppc_e500_shadow_map()
|
D | book3s_pr.c | 262 gfn_t gfn, gfn_end; in do_kvm_unmap_hva() local 274 gfn_end = hva_to_gfn_memslot(hva_end + PAGE_SIZE - 1, memslot); in do_kvm_unmap_hva() 277 gfn_end << PAGE_SHIFT); in do_kvm_unmap_hva()
|
D | book3s_64_mmu_hv.c | 681 gfn_t gfn, gfn_end; in kvm_handle_hva_range() local 693 gfn_end = hva_to_gfn_memslot(hva_end + PAGE_SIZE - 1, memslot); in kvm_handle_hva_range() 695 for (; gfn < gfn_end; ++gfn) { in kvm_handle_hva_range()
|
/linux-4.4.14/arch/arm/kvm/ |
D | mmu.c | 1482 gfn_t gfn, gfn_end; in handle_hva_to_gpa() local 1495 gfn_end = hva_to_gfn_memslot(hva_end + PAGE_SIZE - 1, memslot); in handle_hva_to_gpa() 1497 for (; gfn < gfn_end; ++gfn) { in handle_hva_to_gpa()
|