Lines Matching refs:iterator
1444 rmap_walk_init_level(struct slot_rmap_walk_iterator *iterator, int level) in rmap_walk_init_level() argument
1446 iterator->level = level; in rmap_walk_init_level()
1447 iterator->gfn = iterator->start_gfn; in rmap_walk_init_level()
1448 iterator->rmap = __gfn_to_rmap(iterator->gfn, level, iterator->slot); in rmap_walk_init_level()
1449 iterator->end_rmap = __gfn_to_rmap(iterator->end_gfn, level, in rmap_walk_init_level()
1450 iterator->slot); in rmap_walk_init_level()
1454 slot_rmap_walk_init(struct slot_rmap_walk_iterator *iterator, in slot_rmap_walk_init() argument
1458 iterator->slot = slot; in slot_rmap_walk_init()
1459 iterator->start_level = start_level; in slot_rmap_walk_init()
1460 iterator->end_level = end_level; in slot_rmap_walk_init()
1461 iterator->start_gfn = start_gfn; in slot_rmap_walk_init()
1462 iterator->end_gfn = end_gfn; in slot_rmap_walk_init()
1464 rmap_walk_init_level(iterator, iterator->start_level); in slot_rmap_walk_init()
1467 static bool slot_rmap_walk_okay(struct slot_rmap_walk_iterator *iterator) in slot_rmap_walk_okay() argument
1469 return !!iterator->rmap; in slot_rmap_walk_okay()
1472 static void slot_rmap_walk_next(struct slot_rmap_walk_iterator *iterator) in slot_rmap_walk_next() argument
1474 if (++iterator->rmap <= iterator->end_rmap) { in slot_rmap_walk_next()
1475 iterator->gfn += (1UL << KVM_HPAGE_GFN_SHIFT(iterator->level)); in slot_rmap_walk_next()
1479 if (++iterator->level > iterator->end_level) { in slot_rmap_walk_next()
1480 iterator->rmap = NULL; in slot_rmap_walk_next()
1484 rmap_walk_init_level(iterator, iterator->level); in slot_rmap_walk_next()
1507 struct slot_rmap_walk_iterator iterator; in kvm_handle_hva_range() local
1532 &iterator) in kvm_handle_hva_range()
1533 ret |= handler(kvm, iterator.rmap, memslot, in kvm_handle_hva_range()
1534 iterator.gfn, iterator.level, data); in kvm_handle_hva_range()
2152 static void shadow_walk_init(struct kvm_shadow_walk_iterator *iterator, in shadow_walk_init() argument
2155 iterator->addr = addr; in shadow_walk_init()
2156 iterator->shadow_addr = vcpu->arch.mmu.root_hpa; in shadow_walk_init()
2157 iterator->level = vcpu->arch.mmu.shadow_root_level; in shadow_walk_init()
2159 if (iterator->level == PT64_ROOT_LEVEL && in shadow_walk_init()
2162 --iterator->level; in shadow_walk_init()
2164 if (iterator->level == PT32E_ROOT_LEVEL) { in shadow_walk_init()
2165 iterator->shadow_addr in shadow_walk_init()
2167 iterator->shadow_addr &= PT64_BASE_ADDR_MASK; in shadow_walk_init()
2168 --iterator->level; in shadow_walk_init()
2169 if (!iterator->shadow_addr) in shadow_walk_init()
2170 iterator->level = 0; in shadow_walk_init()
2174 static bool shadow_walk_okay(struct kvm_shadow_walk_iterator *iterator) in shadow_walk_okay() argument
2176 if (iterator->level < PT_PAGE_TABLE_LEVEL) in shadow_walk_okay()
2179 iterator->index = SHADOW_PT_INDEX(iterator->addr, iterator->level); in shadow_walk_okay()
2180 iterator->sptep = ((u64 *)__va(iterator->shadow_addr)) + iterator->index; in shadow_walk_okay()
2184 static void __shadow_walk_next(struct kvm_shadow_walk_iterator *iterator, in __shadow_walk_next() argument
2187 if (is_last_spte(spte, iterator->level)) { in __shadow_walk_next()
2188 iterator->level = 0; in __shadow_walk_next()
2192 iterator->shadow_addr = spte & PT64_BASE_ADDR_MASK; in __shadow_walk_next()
2193 --iterator->level; in __shadow_walk_next()
2196 static void shadow_walk_next(struct kvm_shadow_walk_iterator *iterator) in shadow_walk_next() argument
2198 return __shadow_walk_next(iterator, *iterator->sptep); in shadow_walk_next()
2715 struct kvm_shadow_walk_iterator iterator; in __direct_map() local
2723 for_each_shadow_entry(vcpu, (u64)gfn << PAGE_SHIFT, iterator) { in __direct_map()
2724 if (iterator.level == level) { in __direct_map()
2725 mmu_set_spte(vcpu, iterator.sptep, ACC_ALL, in __direct_map()
2728 direct_pte_prefetch(vcpu, iterator.sptep); in __direct_map()
2733 drop_large_spte(vcpu, iterator.sptep); in __direct_map()
2734 if (!is_shadow_present_pte(*iterator.sptep)) { in __direct_map()
2735 u64 base_addr = iterator.addr; in __direct_map()
2737 base_addr &= PT64_LVL_ADDR_MASK(iterator.level); in __direct_map()
2739 sp = kvm_mmu_get_page(vcpu, pseudo_gfn, iterator.addr, in __direct_map()
2740 iterator.level - 1, in __direct_map()
2741 1, ACC_ALL, iterator.sptep); in __direct_map()
2743 link_shadow_page(iterator.sptep, sp, true); in __direct_map()
2902 struct kvm_shadow_walk_iterator iterator; in fast_page_fault() local
2914 for_each_shadow_entry_lockless(vcpu, gva, iterator, spte) in fast_page_fault()
2915 if (!is_shadow_present_pte(spte) || iterator.level < level) in fast_page_fault()
2927 sp = page_header(__pa(iterator.sptep)); in fast_page_fault()
2967 ret = fast_pf_fix_direct_spte(vcpu, sp, iterator.sptep, spte); in fast_page_fault()
2969 trace_fast_page_fault(vcpu, gva, error_code, iterator.sptep, in fast_page_fault()
3320 struct kvm_shadow_walk_iterator iterator; in walk_shadow_page_get_mmio_spte() local
3330 for (shadow_walk_init(&iterator, vcpu, addr), in walk_shadow_page_get_mmio_spte()
3331 leaf = root = iterator.level; in walk_shadow_page_get_mmio_spte()
3332 shadow_walk_okay(&iterator); in walk_shadow_page_get_mmio_spte()
3333 __shadow_walk_next(&iterator, spte)) { in walk_shadow_page_get_mmio_spte()
3334 spte = mmu_spte_get_lockless(iterator.sptep); in walk_shadow_page_get_mmio_spte()
3343 iterator.level); in walk_shadow_page_get_mmio_spte()
4508 struct slot_rmap_walk_iterator iterator; in slot_handle_level_range() local
4512 end_gfn, &iterator) { in slot_handle_level_range()
4513 if (iterator.rmap) in slot_handle_level_range()
4514 flush |= fn(kvm, iterator.rmap); in slot_handle_level_range()