rmapp             100 arch/powerpc/include/asm/kvm_book3s_64.h #define for_each_nest_rmap_safe(pos, node, rmapp)			       \
rmapp             103 arch/powerpc/include/asm/kvm_book3s_64.h 	     (*(rmapp) = ((RMAP_NESTED_IS_SINGLE_ENTRY & ((u64) (node))) ?     \
rmapp             626 arch/powerpc/include/asm/kvm_book3s_64.h 			     unsigned long *rmapp, struct rmap_nested **n_rmap);
rmapp             627 arch/powerpc/include/asm/kvm_book3s_64.h extern void kvmhv_insert_nest_rmap(struct kvm *kvm, unsigned long *rmapp,
rmapp             629 arch/powerpc/include/asm/kvm_book3s_64.h extern void kvmhv_update_nest_rmap_rc_list(struct kvm *kvm, unsigned long *rmapp,
rmapp             814 arch/powerpc/kvm/book3s_64_mmu_hv.c 			      unsigned long *rmapp, unsigned long gfn)
rmapp             824 arch/powerpc/kvm/book3s_64_mmu_hv.c 		*rmapp &= ~(KVMPPC_RMAP_PRESENT | KVMPPC_RMAP_INDEX);
rmapp             831 arch/powerpc/kvm/book3s_64_mmu_hv.c 		*rmapp = (*rmapp & ~KVMPPC_RMAP_INDEX) | j;
rmapp             844 arch/powerpc/kvm/book3s_64_mmu_hv.c 		*rmapp |= rcbits << KVMPPC_RMAP_RC_SHIFT;
rmapp             859 arch/powerpc/kvm/book3s_64_mmu_hv.c 	unsigned long *rmapp;
rmapp             861 arch/powerpc/kvm/book3s_64_mmu_hv.c 	rmapp = &memslot->arch.rmap[gfn - memslot->base_gfn];
rmapp             863 arch/powerpc/kvm/book3s_64_mmu_hv.c 		lock_rmap(rmapp);
rmapp             864 arch/powerpc/kvm/book3s_64_mmu_hv.c 		if (!(*rmapp & KVMPPC_RMAP_PRESENT)) {
rmapp             865 arch/powerpc/kvm/book3s_64_mmu_hv.c 			unlock_rmap(rmapp);
rmapp             874 arch/powerpc/kvm/book3s_64_mmu_hv.c 		i = *rmapp & KVMPPC_RMAP_INDEX;
rmapp             878 arch/powerpc/kvm/book3s_64_mmu_hv.c 			unlock_rmap(rmapp);
rmapp             884 arch/powerpc/kvm/book3s_64_mmu_hv.c 		kvmppc_unmap_hpte(kvm, i, memslot, rmapp, gfn);
rmapp             885 arch/powerpc/kvm/book3s_64_mmu_hv.c 		unlock_rmap(rmapp);
rmapp             905 arch/powerpc/kvm/book3s_64_mmu_hv.c 	unsigned long *rmapp;
rmapp             908 arch/powerpc/kvm/book3s_64_mmu_hv.c 	rmapp = memslot->arch.rmap;
rmapp             921 arch/powerpc/kvm/book3s_64_mmu_hv.c 		if (*rmapp & KVMPPC_RMAP_PRESENT)
rmapp             923 arch/powerpc/kvm/book3s_64_mmu_hv.c 		++rmapp;
rmapp             934 arch/powerpc/kvm/book3s_64_mmu_hv.c 	unsigned long *rmapp;
rmapp             936 arch/powerpc/kvm/book3s_64_mmu_hv.c 	rmapp = &memslot->arch.rmap[gfn - memslot->base_gfn];
rmapp             938 arch/powerpc/kvm/book3s_64_mmu_hv.c 	lock_rmap(rmapp);
rmapp             939 arch/powerpc/kvm/book3s_64_mmu_hv.c 	if (*rmapp & KVMPPC_RMAP_REFERENCED) {
rmapp             940 arch/powerpc/kvm/book3s_64_mmu_hv.c 		*rmapp &= ~KVMPPC_RMAP_REFERENCED;
rmapp             943 arch/powerpc/kvm/book3s_64_mmu_hv.c 	if (!(*rmapp & KVMPPC_RMAP_PRESENT)) {
rmapp             944 arch/powerpc/kvm/book3s_64_mmu_hv.c 		unlock_rmap(rmapp);
rmapp             948 arch/powerpc/kvm/book3s_64_mmu_hv.c 	i = head = *rmapp & KVMPPC_RMAP_INDEX;
rmapp             959 arch/powerpc/kvm/book3s_64_mmu_hv.c 			unlock_rmap(rmapp);
rmapp             978 arch/powerpc/kvm/book3s_64_mmu_hv.c 	unlock_rmap(rmapp);
rmapp             997 arch/powerpc/kvm/book3s_64_mmu_hv.c 	unsigned long *rmapp;
rmapp             999 arch/powerpc/kvm/book3s_64_mmu_hv.c 	rmapp = &memslot->arch.rmap[gfn - memslot->base_gfn];
rmapp            1000 arch/powerpc/kvm/book3s_64_mmu_hv.c 	if (*rmapp & KVMPPC_RMAP_REFERENCED)
rmapp            1003 arch/powerpc/kvm/book3s_64_mmu_hv.c 	lock_rmap(rmapp);
rmapp            1004 arch/powerpc/kvm/book3s_64_mmu_hv.c 	if (*rmapp & KVMPPC_RMAP_REFERENCED)
rmapp            1007 arch/powerpc/kvm/book3s_64_mmu_hv.c 	if (*rmapp & KVMPPC_RMAP_PRESENT) {
rmapp            1008 arch/powerpc/kvm/book3s_64_mmu_hv.c 		i = head = *rmapp & KVMPPC_RMAP_INDEX;
rmapp            1019 arch/powerpc/kvm/book3s_64_mmu_hv.c 	unlock_rmap(rmapp);
rmapp            1048 arch/powerpc/kvm/book3s_64_mmu_hv.c static int kvm_test_clear_dirty_npages(struct kvm *kvm, unsigned long *rmapp)
rmapp            1058 arch/powerpc/kvm/book3s_64_mmu_hv.c 	lock_rmap(rmapp);
rmapp            1059 arch/powerpc/kvm/book3s_64_mmu_hv.c 	if (!(*rmapp & KVMPPC_RMAP_PRESENT)) {
rmapp            1060 arch/powerpc/kvm/book3s_64_mmu_hv.c 		unlock_rmap(rmapp);
rmapp            1064 arch/powerpc/kvm/book3s_64_mmu_hv.c 	i = head = *rmapp & KVMPPC_RMAP_INDEX;
rmapp            1091 arch/powerpc/kvm/book3s_64_mmu_hv.c 			unlock_rmap(rmapp);
rmapp            1125 arch/powerpc/kvm/book3s_64_mmu_hv.c 	unlock_rmap(rmapp);
rmapp            1151 arch/powerpc/kvm/book3s_64_mmu_hv.c 	unsigned long *rmapp;
rmapp            1154 arch/powerpc/kvm/book3s_64_mmu_hv.c 	rmapp = memslot->arch.rmap;
rmapp            1156 arch/powerpc/kvm/book3s_64_mmu_hv.c 		int npages = kvm_test_clear_dirty_npages(kvm, rmapp);
rmapp            1164 arch/powerpc/kvm/book3s_64_mmu_hv.c 		++rmapp;
rmapp            1296 arch/powerpc/kvm/book3s_64_mmu_hv.c 			unsigned long *rmapp;
rmapp            1297 arch/powerpc/kvm/book3s_64_mmu_hv.c 			rmapp = &memslot->arch.rmap[gfn - memslot->base_gfn];
rmapp            1299 arch/powerpc/kvm/book3s_64_mmu_hv.c 			lock_rmap(rmapp);
rmapp            1300 arch/powerpc/kvm/book3s_64_mmu_hv.c 			kvmppc_unmap_hpte(kvm, idx, memslot, rmapp, gfn);
rmapp            1301 arch/powerpc/kvm/book3s_64_mmu_hv.c 			unlock_rmap(rmapp);
rmapp             566 arch/powerpc/kvm/book3s_64_mmu_radix.c 		      unsigned long *rmapp, struct rmap_nested **n_rmap)
rmapp             645 arch/powerpc/kvm/book3s_64_mmu_radix.c 		if (rmapp && n_rmap)
rmapp             646 arch/powerpc/kvm/book3s_64_mmu_radix.c 			kvmhv_insert_nest_rmap(kvm, rmapp, n_rmap);
rmapp             697 arch/powerpc/kvm/book3s_64_mmu_radix.c 		if (rmapp && n_rmap)
rmapp             698 arch/powerpc/kvm/book3s_64_mmu_radix.c 			kvmhv_insert_nest_rmap(kvm, rmapp, n_rmap);
rmapp             723 arch/powerpc/kvm/book3s_64_mmu_radix.c 	if (rmapp && n_rmap)
rmapp             724 arch/powerpc/kvm/book3s_64_mmu_radix.c 		kvmhv_insert_nest_rmap(kvm, rmapp, n_rmap);
rmapp             990 arch/powerpc/kvm/book3s_64_mmu_radix.c 	unsigned long old, *rmapp;
rmapp             998 arch/powerpc/kvm/book3s_64_mmu_radix.c 		rmapp = &memslot->arch.rmap[gfn - memslot->base_gfn];
rmapp             999 arch/powerpc/kvm/book3s_64_mmu_radix.c 		kvmhv_update_nest_rmap_rc_list(kvm, rmapp, _PAGE_ACCESSED, 0,
rmapp            1031 arch/powerpc/kvm/book3s_64_mmu_radix.c 	unsigned long old, *rmapp;
rmapp            1043 arch/powerpc/kvm/book3s_64_mmu_radix.c 		rmapp = &memslot->arch.rmap[gfn - memslot->base_gfn];
rmapp            1044 arch/powerpc/kvm/book3s_64_mmu_radix.c 		kvmhv_update_nest_rmap_rc_list(kvm, rmapp, _PAGE_DIRTY, 0,
rmapp             759 arch/powerpc/kvm/book3s_hv_nested.c void kvmhv_insert_nest_rmap(struct kvm *kvm, unsigned long *rmapp,
rmapp             762 arch/powerpc/kvm/book3s_hv_nested.c 	struct llist_node *entry = ((struct llist_head *) rmapp)->first;
rmapp             767 arch/powerpc/kvm/book3s_hv_nested.c 	if (!(*rmapp)) {
rmapp             769 arch/powerpc/kvm/book3s_hv_nested.c 		*rmapp = new_rmap | RMAP_NESTED_IS_SINGLE_ENTRY;
rmapp             780 arch/powerpc/kvm/book3s_hv_nested.c 	rmap = *rmapp;
rmapp             782 arch/powerpc/kvm/book3s_hv_nested.c 		*rmapp = 0UL;
rmapp             783 arch/powerpc/kvm/book3s_hv_nested.c 	llist_add(&((*n_rmap)->list), (struct llist_head *) rmapp);
rmapp             824 arch/powerpc/kvm/book3s_hv_nested.c void kvmhv_update_nest_rmap_rc_list(struct kvm *kvm, unsigned long *rmapp,
rmapp             828 arch/powerpc/kvm/book3s_hv_nested.c 	struct llist_node *entry = ((struct llist_head *) rmapp)->first;
rmapp             863 arch/powerpc/kvm/book3s_hv_nested.c static void kvmhv_remove_nest_rmap_list(struct kvm *kvm, unsigned long *rmapp,
rmapp             866 arch/powerpc/kvm/book3s_hv_nested.c 	struct llist_node *entry = llist_del_all((struct llist_head *) rmapp);
rmapp             904 arch/powerpc/kvm/book3s_hv_nested.c 		unsigned long rmap, *rmapp = &free->arch.rmap[page];
rmapp             908 arch/powerpc/kvm/book3s_hv_nested.c 		entry = llist_del_all((struct llist_head *) rmapp);
rmapp            1272 arch/powerpc/kvm/book3s_hv_nested.c 	unsigned long *rmapp;
rmapp            1416 arch/powerpc/kvm/book3s_hv_nested.c 	rmapp = &memslot->arch.rmap[gfn - memslot->base_gfn];
rmapp            1418 arch/powerpc/kvm/book3s_hv_nested.c 				mmu_seq, gp->shadow_lpid, rmapp, &n_rmap);