orige 414 arch/powerpc/include/asm/kvm_host.h u64 orige; orige 581 arch/powerpc/kernel/asm-offsets.c OFFSET(VCPU_SLB_E, kvmppc_slb, orige); orige 420 arch/powerpc/kvm/book3s_64_mmu.c slbe->orige = rb & (ESID_MASK | SLB_ESID_V); orige 449 arch/powerpc/kvm/book3s_64_mmu.c return slbe->orige; orige 479 arch/powerpc/kvm/book3s_64_mmu.c slbe->orige = 0; orige 494 arch/powerpc/kvm/book3s_64_mmu.c vcpu->arch.slb[i].orige = 0; orige 317 arch/powerpc/kvm/book3s_64_mmu_hv.c if (!(vcpu->arch.slb[i].orige & SLB_ESID_V)) orige 325 arch/powerpc/kvm/book3s_64_mmu_hv.c if (((vcpu->arch.slb[i].orige ^ eaddr) & mask) == 0) orige 441 arch/powerpc/kvm/book3s_hv.c vcpu->arch.slb[r].orige, vcpu->arch.slb[r].origv); orige 1545 arch/powerpc/kvm/book3s_hv.c sregs->u.s.ppc64.slb[i].slbe = vcpu->arch.slb[i].orige; orige 1564 arch/powerpc/kvm/book3s_hv.c vcpu->arch.slb[j].orige = sregs->u.s.ppc64.slb[i].slbe; orige 1450 arch/powerpc/kvm/book3s_pr.c sregs->u.s.ppc64.slb[i].slbe = vcpu->arch.slb[i].orige | i;