Lines Matching refs:eaddr
70 asm volatile ("tlbie %0" : : "r" (pte->pte.eaddr) : "memory"); in kvmppc_mmu_invalidate_pte()
117 static u32 *kvmppc_mmu_get_pteg(struct kvm_vcpu *vcpu, u32 vsid, u32 eaddr, in kvmppc_mmu_get_pteg() argument
123 page = (eaddr & ~ESID_MASK) >> 12; in kvmppc_mmu_get_pteg()
149 u32 eaddr = orig_pte->eaddr; in kvmppc_mmu_map_page() local
169 vcpu->arch.mmu.esid_to_vsid(vcpu, orig_pte->eaddr >> SID_SHIFT, &vsid); in kvmppc_mmu_map_page()
172 kvmppc_mmu_map_segment(vcpu, eaddr); in kvmppc_mmu_map_page()
179 ((eaddr & ~ESID_MASK) >> VPN_SHIFT); in kvmppc_mmu_map_page()
187 pteg = kvmppc_mmu_get_pteg(vcpu, vsid, eaddr, primary); in kvmppc_mmu_map_page()
205 pteg0 = ((eaddr & 0x0fffffff) >> 22) | (vsid << 7) | PTE_V | in kvmppc_mmu_map_page()
254 orig_pte->eaddr, (ulong)pteg, vpn, in kvmppc_mmu_map_page()
313 int kvmppc_mmu_map_segment(struct kvm_vcpu *vcpu, ulong eaddr) in kvmppc_mmu_map_segment() argument
315 u32 esid = eaddr >> SID_SHIFT; in kvmppc_mmu_map_segment()