Lines Matching refs:memslot
115 static void mark_page_dirty_in_slot(struct kvm_memory_slot *memslot, gfn_t gfn);
506 static void kvm_destroy_dirty_bitmap(struct kvm_memory_slot *memslot) in kvm_destroy_dirty_bitmap() argument
508 if (!memslot->dirty_bitmap) in kvm_destroy_dirty_bitmap()
511 kvfree(memslot->dirty_bitmap); in kvm_destroy_dirty_bitmap()
512 memslot->dirty_bitmap = NULL; in kvm_destroy_dirty_bitmap()
531 struct kvm_memory_slot *memslot; in kvm_free_memslots() local
536 kvm_for_each_memslot(memslot, slots) in kvm_free_memslots()
537 kvm_free_memslot(kvm, memslot, NULL); in kvm_free_memslots()
703 static int kvm_create_dirty_bitmap(struct kvm_memory_slot *memslot) in kvm_create_dirty_bitmap() argument
705 unsigned long dirty_bytes = 2 * kvm_dirty_bitmap_bytes(memslot); in kvm_create_dirty_bitmap()
707 memslot->dirty_bitmap = kvm_kvzalloc(dirty_bytes); in kvm_create_dirty_bitmap()
708 if (!memslot->dirty_bitmap) in kvm_create_dirty_bitmap()
1025 struct kvm_memory_slot *memslot; in kvm_get_dirty_log() local
1037 memslot = id_to_memslot(slots, id); in kvm_get_dirty_log()
1039 if (!memslot->dirty_bitmap) in kvm_get_dirty_log()
1042 n = kvm_dirty_bitmap_bytes(memslot); in kvm_get_dirty_log()
1045 any = memslot->dirty_bitmap[i]; in kvm_get_dirty_log()
1048 if (copy_to_user(log->dirty_bitmap, memslot->dirty_bitmap, n)) in kvm_get_dirty_log()
1087 struct kvm_memory_slot *memslot; in kvm_get_dirty_log_protect() local
1100 memslot = id_to_memslot(slots, id); in kvm_get_dirty_log_protect()
1102 dirty_bitmap = memslot->dirty_bitmap; in kvm_get_dirty_log_protect()
1107 n = kvm_dirty_bitmap_bytes(memslot); in kvm_get_dirty_log_protect()
1128 kvm_arch_mmu_enable_log_dirty_pt_masked(kvm, memslot, in kvm_get_dirty_log_protect()
1170 struct kvm_memory_slot *memslot = gfn_to_memslot(kvm, gfn); in kvm_is_visible_gfn() local
1172 if (!memslot || memslot->id >= KVM_USER_MEM_SLOTS || in kvm_is_visible_gfn()
1173 memslot->flags & KVM_MEMSLOT_INVALID) in kvm_is_visible_gfn()
1735 static int __kvm_write_guest_page(struct kvm_memory_slot *memslot, gfn_t gfn, in __kvm_write_guest_page() argument
1741 addr = gfn_to_hva_memslot(memslot, gfn); in __kvm_write_guest_page()
1747 mark_page_dirty_in_slot(memslot, gfn); in __kvm_write_guest_page()
1824 ghc->memslot = gfn_to_memslot(kvm, start_gfn); in kvm_gfn_to_hva_cache_init()
1825 ghc->hva = gfn_to_hva_many(ghc->memslot, start_gfn, NULL); in kvm_gfn_to_hva_cache_init()
1834 ghc->memslot = gfn_to_memslot(kvm, start_gfn); in kvm_gfn_to_hva_cache_init()
1835 ghc->hva = gfn_to_hva_many(ghc->memslot, start_gfn, in kvm_gfn_to_hva_cache_init()
1842 ghc->memslot = NULL; in kvm_gfn_to_hva_cache_init()
1859 if (unlikely(!ghc->memslot)) in kvm_write_guest_cached()
1868 mark_page_dirty_in_slot(ghc->memslot, ghc->gpa >> PAGE_SHIFT); in kvm_write_guest_cached()
1885 if (unlikely(!ghc->memslot)) in kvm_read_guest_cached()
1926 static void mark_page_dirty_in_slot(struct kvm_memory_slot *memslot, in mark_page_dirty_in_slot() argument
1929 if (memslot && memslot->dirty_bitmap) { in mark_page_dirty_in_slot()
1930 unsigned long rel_gfn = gfn - memslot->base_gfn; in mark_page_dirty_in_slot()
1932 set_bit_le(rel_gfn, memslot->dirty_bitmap); in mark_page_dirty_in_slot()
1938 struct kvm_memory_slot *memslot; in mark_page_dirty() local
1940 memslot = gfn_to_memslot(kvm, gfn); in mark_page_dirty()
1941 mark_page_dirty_in_slot(memslot, gfn); in mark_page_dirty()
1947 struct kvm_memory_slot *memslot; in kvm_vcpu_mark_page_dirty() local
1949 memslot = kvm_vcpu_gfn_to_memslot(vcpu, gfn); in kvm_vcpu_mark_page_dirty()
1950 mark_page_dirty_in_slot(memslot, gfn); in kvm_vcpu_mark_page_dirty()