_walker 229 arch/x86/kvm/mmu.c #define for_each_shadow_entry_using_root(_vcpu, _root, _addr, _walker) \ _walker 230 arch/x86/kvm/mmu.c for (shadow_walk_init_using_root(&(_walker), (_vcpu), \ _walker 232 arch/x86/kvm/mmu.c shadow_walk_okay(&(_walker)); \ _walker 233 arch/x86/kvm/mmu.c shadow_walk_next(&(_walker))) _walker 235 arch/x86/kvm/mmu.c #define for_each_shadow_entry(_vcpu, _addr, _walker) \ _walker 236 arch/x86/kvm/mmu.c for (shadow_walk_init(&(_walker), _vcpu, _addr); \ _walker 237 arch/x86/kvm/mmu.c shadow_walk_okay(&(_walker)); \ _walker 238 arch/x86/kvm/mmu.c shadow_walk_next(&(_walker))) _walker 240 arch/x86/kvm/mmu.c #define for_each_shadow_entry_lockless(_vcpu, _addr, _walker, spte) \ _walker 241 arch/x86/kvm/mmu.c for (shadow_walk_init(&(_walker), _vcpu, _addr); \ _walker 242 arch/x86/kvm/mmu.c shadow_walk_okay(&(_walker)) && \ _walker 243 arch/x86/kvm/mmu.c ({ spte = mmu_spte_get_lockless(_walker.sptep); 1; }); \ _walker 244 arch/x86/kvm/mmu.c __shadow_walk_next(&(_walker), spte))