_vcpu             229 arch/x86/kvm/mmu.c #define for_each_shadow_entry_using_root(_vcpu, _root, _addr, _walker)     \
_vcpu             230 arch/x86/kvm/mmu.c 	for (shadow_walk_init_using_root(&(_walker), (_vcpu),              \
_vcpu             235 arch/x86/kvm/mmu.c #define for_each_shadow_entry(_vcpu, _addr, _walker)            \
_vcpu             236 arch/x86/kvm/mmu.c 	for (shadow_walk_init(&(_walker), _vcpu, _addr);	\
_vcpu             240 arch/x86/kvm/mmu.c #define for_each_shadow_entry_lockless(_vcpu, _addr, _walker, spte)	\
_vcpu             241 arch/x86/kvm/mmu.c 	for (shadow_walk_init(&(_walker), _vcpu, _addr);		\