gtlbe 215 arch/powerpc/kvm/e500.c struct kvm_book3e_206_tlb_entry *gtlbe) gtlbe 217 arch/powerpc/kvm/e500.c return kvmppc_e500_get_sid(to_e500(vcpu), get_tlb_ts(gtlbe), gtlbe 218 arch/powerpc/kvm/e500.c get_tlb_tid(gtlbe), get_cur_pr(vcpu), 0); gtlbe 233 arch/powerpc/kvm/e500.c struct kvm_book3e_206_tlb_entry *gtlbe) gtlbe 241 arch/powerpc/kvm/e500.c ts = get_tlb_ts(gtlbe); gtlbe 242 arch/powerpc/kvm/e500.c tid = get_tlb_tid(gtlbe); gtlbe 270 arch/powerpc/kvm/e500.c eaddr = get_tlb_eaddr(gtlbe); gtlbe 283 arch/powerpc/kvm/e500.h struct kvm_book3e_206_tlb_entry *gtlbe); gtlbe 287 arch/powerpc/kvm/e500.h #define kvmppc_e500_get_tlb_stid(vcpu, gtlbe) get_tlb_tid(gtlbe) gtlbe 289 arch/powerpc/kvm/e500.h #define get_tlb_sts(gtlbe) (gtlbe->mas1 & MAS1_TS) gtlbe 311 arch/powerpc/kvm/e500.h struct kvm_book3e_206_tlb_entry *gtlbe); gtlbe 322 arch/powerpc/kvm/e500.h #define get_tlb_sts(gtlbe) (MAS1_TS) gtlbe 180 arch/powerpc/kvm/e500_mmu.c struct kvm_book3e_206_tlb_entry *gtlbe) gtlbe 184 arch/powerpc/kvm/e500_mmu.c size = get_tlb_bytes(gtlbe); gtlbe 185 arch/powerpc/kvm/e500_mmu.c start = get_tlb_eaddr(gtlbe) & ~(size - 1); gtlbe 194 arch/powerpc/kvm/e500_mmu.c struct kvm_book3e_206_tlb_entry *gtlbe) gtlbe 199 arch/powerpc/kvm/e500_mmu.c if (!get_tlb_v(gtlbe)) gtlbe 202 arch/powerpc/kvm/e500_mmu.c size = get_tlb_bytes(gtlbe); gtlbe 203 arch/powerpc/kvm/e500_mmu.c start = get_tlb_eaddr(gtlbe) & ~(size - 1); gtlbe 214 arch/powerpc/kvm/e500_mmu.c struct kvm_book3e_206_tlb_entry *gtlbe = gtlbe 217 arch/powerpc/kvm/e500_mmu.c if (unlikely(get_tlb_iprot(gtlbe))) gtlbe 220 arch/powerpc/kvm/e500_mmu.c if (tlbsel == 1 && kvmppc_need_recalc_tlb1map_range(vcpu_e500, gtlbe)) gtlbe 223 arch/powerpc/kvm/e500_mmu.c gtlbe->mas1 = 0; gtlbe 326 arch/powerpc/kvm/e500_mmu.c struct kvm_book3e_206_tlb_entry *gtlbe; gtlbe 331 arch/powerpc/kvm/e500_mmu.c gtlbe = get_entry(vcpu_e500, tlbsel, esel); gtlbe 334 arch/powerpc/kvm/e500_mmu.c vcpu->arch.shared->mas1 = gtlbe->mas1; gtlbe 335 arch/powerpc/kvm/e500_mmu.c vcpu->arch.shared->mas2 = gtlbe->mas2; gtlbe 336 arch/powerpc/kvm/e500_mmu.c vcpu->arch.shared->mas7_3 = gtlbe->mas7_3; gtlbe 347 arch/powerpc/kvm/e500_mmu.c struct kvm_book3e_206_tlb_entry *gtlbe = NULL; gtlbe 352 arch/powerpc/kvm/e500_mmu.c gtlbe = get_entry(vcpu_e500, tlbsel, esel); gtlbe 357 arch/powerpc/kvm/e500_mmu.c if (gtlbe) { gtlbe 362 arch/powerpc/kvm/e500_mmu.c vcpu->arch.shared->mas1 = gtlbe->mas1; gtlbe 363 arch/powerpc/kvm/e500_mmu.c vcpu->arch.shared->mas2 = gtlbe->mas2; gtlbe 364 arch/powerpc/kvm/e500_mmu.c vcpu->arch.shared->mas7_3 = gtlbe->mas7_3; gtlbe 393 arch/powerpc/kvm/e500_mmu.c struct kvm_book3e_206_tlb_entry *gtlbe; gtlbe 401 arch/powerpc/kvm/e500_mmu.c gtlbe = get_entry(vcpu_e500, tlbsel, esel); gtlbe 403 arch/powerpc/kvm/e500_mmu.c if (get_tlb_v(gtlbe)) { gtlbe 406 arch/powerpc/kvm/e500_mmu.c kvmppc_need_recalc_tlb1map_range(vcpu_e500, gtlbe)) gtlbe 410 arch/powerpc/kvm/e500_mmu.c gtlbe->mas1 = vcpu->arch.shared->mas1; gtlbe 411 arch/powerpc/kvm/e500_mmu.c gtlbe->mas2 = vcpu->arch.shared->mas2; gtlbe 413 arch/powerpc/kvm/e500_mmu.c gtlbe->mas2 &= 0xffffffffUL; gtlbe 414 arch/powerpc/kvm/e500_mmu.c gtlbe->mas7_3 = vcpu->arch.shared->mas7_3; gtlbe 416 arch/powerpc/kvm/e500_mmu.c trace_kvm_booke206_gtlb_write(vcpu->arch.shared->mas0, gtlbe->mas1, gtlbe 417 arch/powerpc/kvm/e500_mmu.c gtlbe->mas2, gtlbe->mas7_3); gtlbe 428 arch/powerpc/kvm/e500_mmu.c kvmppc_set_tlb1map_range(vcpu, gtlbe); gtlbe 434 arch/powerpc/kvm/e500_mmu.c if (tlbe_is_host_safe(vcpu, gtlbe)) { gtlbe 435 arch/powerpc/kvm/e500_mmu.c u64 eaddr = get_tlb_eaddr(gtlbe); gtlbe 436 arch/powerpc/kvm/e500_mmu.c u64 raddr = get_tlb_raddr(gtlbe); gtlbe 439 arch/powerpc/kvm/e500_mmu.c gtlbe->mas1 &= ~MAS1_TSIZE(~0); gtlbe 440 arch/powerpc/kvm/e500_mmu.c gtlbe->mas1 |= MAS1_TSIZE(BOOK3E_PAGESZ_4K); gtlbe 527 arch/powerpc/kvm/e500_mmu.c struct kvm_book3e_206_tlb_entry *gtlbe; gtlbe 530 arch/powerpc/kvm/e500_mmu.c gtlbe = get_entry(vcpu_e500, tlbsel_of(index), esel_of(index)); gtlbe 531 arch/powerpc/kvm/e500_mmu.c pgmask = get_tlb_bytes(gtlbe) - 1; gtlbe 533 arch/powerpc/kvm/e500_mmu.c return get_tlb_raddr(gtlbe) | (eaddr & pgmask); gtlbe 142 arch/powerpc/kvm/e500_mmu_host.c struct kvm_book3e_206_tlb_entry *gtlbe, gtlbe 149 arch/powerpc/kvm/e500_mmu_host.c stid = kvmppc_e500_get_tlb_stid(&vcpu_e500->vcpu, gtlbe); gtlbe 187 arch/powerpc/kvm/e500_mmu_host.c struct kvm_book3e_206_tlb_entry *gtlbe = gtlbe 234 arch/powerpc/kvm/e500_mmu_host.c kvmppc_e500_tlbil_one(vcpu_e500, gtlbe); gtlbe 246 arch/powerpc/kvm/e500_mmu_host.c struct kvm_book3e_206_tlb_entry *gtlbe, gtlbe 253 arch/powerpc/kvm/e500_mmu_host.c ref->flags |= (gtlbe->mas2 & MAS2_ATTRIB_MASK) | wimg; gtlbe 258 arch/powerpc/kvm/e500_mmu_host.c if (tlbe_is_writable(gtlbe)) gtlbe 306 arch/powerpc/kvm/e500_mmu_host.c struct kvm_book3e_206_tlb_entry *gtlbe, gtlbe 316 arch/powerpc/kvm/e500_mmu_host.c stlbe->mas1 = MAS1_TSIZE(tsize) | get_tlb_sts(gtlbe) | MAS1_VALID; gtlbe 319 arch/powerpc/kvm/e500_mmu_host.c e500_shadow_mas3_attrib(gtlbe->mas7_3, pr); gtlbe 323 arch/powerpc/kvm/e500_mmu_host.c u64 gvaddr, gfn_t gfn, struct kvm_book3e_206_tlb_entry *gtlbe, gtlbe 389 arch/powerpc/kvm/e500_mmu_host.c tsize = (gtlbe->mas1 & MAS1_TSIZE_MASK) >> gtlbe 428 arch/powerpc/kvm/e500_mmu_host.c tsize = (gtlbe->mas1 & MAS1_TSIZE_MASK) >> gtlbe 493 arch/powerpc/kvm/e500_mmu_host.c kvmppc_e500_ref_setup(ref, gtlbe, pfn, wimg); gtlbe 495 arch/powerpc/kvm/e500_mmu_host.c kvmppc_e500_setup_stlbe(&vcpu_e500->vcpu, gtlbe, tsize, gtlbe 514 arch/powerpc/kvm/e500_mmu_host.c struct kvm_book3e_206_tlb_entry *gtlbe; gtlbe 520 arch/powerpc/kvm/e500_mmu_host.c gtlbe = get_entry(vcpu_e500, 0, esel); gtlbe 523 arch/powerpc/kvm/e500_mmu_host.c r = kvmppc_e500_shadow_map(vcpu_e500, get_tlb_eaddr(gtlbe), gtlbe 524 arch/powerpc/kvm/e500_mmu_host.c get_tlb_raddr(gtlbe) >> PAGE_SHIFT, gtlbe 525 arch/powerpc/kvm/e500_mmu_host.c gtlbe, 0, stlbe, ref); gtlbe 529 arch/powerpc/kvm/e500_mmu_host.c write_stlbe(vcpu_e500, gtlbe, stlbe, stlbsel, sesel); gtlbe 560 arch/powerpc/kvm/e500_mmu_host.c u64 gvaddr, gfn_t gfn, struct kvm_book3e_206_tlb_entry *gtlbe, gtlbe 567 arch/powerpc/kvm/e500_mmu_host.c r = kvmppc_e500_shadow_map(vcpu_e500, gvaddr, gfn, gtlbe, 1, stlbe, gtlbe 575 arch/powerpc/kvm/e500_mmu_host.c write_stlbe(vcpu_e500, gtlbe, stlbe, 0, 0); gtlbe 581 arch/powerpc/kvm/e500_mmu_host.c write_stlbe(vcpu_e500, gtlbe, stlbe, 1, sesel); gtlbe 591 arch/powerpc/kvm/e500_mmu_host.c struct kvm_book3e_206_tlb_entry *gtlbe, stlbe; gtlbe 595 arch/powerpc/kvm/e500_mmu_host.c gtlbe = get_entry(vcpu_e500, tlbsel, esel); gtlbe 605 arch/powerpc/kvm/e500_mmu_host.c kvmppc_e500_setup_stlbe(vcpu, gtlbe, BOOK3E_PAGESZ_4K, gtlbe 607 arch/powerpc/kvm/e500_mmu_host.c write_stlbe(vcpu_e500, gtlbe, &stlbe, 0, 0); gtlbe 613 arch/powerpc/kvm/e500_mmu_host.c kvmppc_e500_tlb1_map(vcpu_e500, eaddr, gfn, gtlbe, &stlbe, gtlbe 56 arch/powerpc/kvm/e500mc.c struct kvm_book3e_206_tlb_entry *gtlbe) gtlbe 63 arch/powerpc/kvm/e500mc.c ts = get_tlb_ts(gtlbe); gtlbe 64 arch/powerpc/kvm/e500mc.c tid = get_tlb_tid(gtlbe); gtlbe 68 arch/powerpc/kvm/e500mc.c eaddr = get_tlb_eaddr(gtlbe);