UASM_i_LW 172 arch/mips/kvm/entry.c UASM_i_LW(p, tmp, offsetof(struct pt_regs, cp0_epc), frame); UASM_i_LW 176 arch/mips/kvm/entry.c UASM_i_LW(p, tmp, offsetof(struct pt_regs, cp0_cause), frame); UASM_i_LW 263 arch/mips/kvm/entry.c UASM_i_LW(&p, K0, offsetof(struct kvm_vcpu_arch, guest_ebase), K1); UASM_i_LW 305 arch/mips/kvm/entry.c UASM_i_LW(&p, T0, offsetof(struct kvm_vcpu_arch, pc), K1); UASM_i_LW 321 arch/mips/kvm/entry.c UASM_i_LW(&p, S0, (int)offsetof(struct kvm_vcpu, kvm) - UASM_i_LW 323 arch/mips/kvm/entry.c UASM_i_LW(&p, A0, offsetof(struct kvm, arch.gpa_mm.pgd), S0); UASM_i_LW 369 arch/mips/kvm/entry.c UASM_i_LW(&p, T0, offsetof(struct kvm_vcpu_arch, cop0), K1); UASM_i_LW 370 arch/mips/kvm/entry.c UASM_i_LW(&p, T0, offsetof(struct mips_coproc, reg[MIPS_CP0_STATUS][0]), UASM_i_LW 389 arch/mips/kvm/entry.c UASM_i_LW(&p, K0, 0, T3); UASM_i_LW 400 arch/mips/kvm/entry.c UASM_i_LW(&p, T2, uasm_rel_lo((long)&cpu_data[0].asid_mask), AT); UASM_i_LW 413 arch/mips/kvm/entry.c UASM_i_LW(&p, A0, (int)offsetof(struct mm_struct, pgd) - UASM_i_LW 434 arch/mips/kvm/entry.c UASM_i_LW(&p, i, offsetof(struct kvm_vcpu_arch, gprs[i]), K1); UASM_i_LW 439 arch/mips/kvm/entry.c UASM_i_LW(&p, K0, offsetof(struct kvm_vcpu_arch, hi), K1); UASM_i_LW 442 arch/mips/kvm/entry.c UASM_i_LW(&p, K0, offsetof(struct kvm_vcpu_arch, lo), K1); UASM_i_LW 447 arch/mips/kvm/entry.c UASM_i_LW(&p, K0, offsetof(struct kvm_vcpu_arch, gprs[K0]), K1); UASM_i_LW 448 arch/mips/kvm/entry.c UASM_i_LW(&p, K1, offsetof(struct kvm_vcpu_arch, gprs[K1]), K1); UASM_i_LW 522 arch/mips/kvm/entry.c UASM_i_LW(&p, K0, offsetof(struct kvm_vcpu, arch.gprs[K0]), K1); UASM_i_LW 634 arch/mips/kvm/entry.c UASM_i_LW(&p, S0, offsetof(struct kvm_vcpu, run), S1); UASM_i_LW 675 arch/mips/kvm/entry.c UASM_i_LW(&p, K0, uasm_rel_lo((long)&ebase), K0); UASM_i_LW 713 arch/mips/kvm/entry.c UASM_i_LW(&p, K0, offsetof(struct kvm_vcpu_arch, host_entryhi), UASM_i_LW 724 arch/mips/kvm/entry.c UASM_i_LW(&p, A0, UASM_i_LW 768 arch/mips/kvm/entry.c UASM_i_LW(&p, GP, offsetof(struct kvm_vcpu_arch, host_gp), K1); UASM_i_LW 771 arch/mips/kvm/entry.c UASM_i_LW(&p, SP, offsetof(struct kvm_vcpu_arch, host_stack), K1); UASM_i_LW 876 arch/mips/kvm/entry.c UASM_i_LW(&p, T0, offsetof(struct kvm_vcpu_arch, guest_ebase), K1); UASM_i_LW 914 arch/mips/kvm/entry.c UASM_i_LW(&p, K1, offsetof(struct kvm_vcpu_arch, host_stack), K1); UASM_i_LW 928 arch/mips/kvm/entry.c UASM_i_LW(&p, i, offsetof(struct pt_regs, regs[i]), K1); UASM_i_LW 937 arch/mips/kvm/entry.c UASM_i_LW(&p, RA, offsetof(struct pt_regs, regs[RA]), K1); UASM_i_LW 399 arch/mips/mm/tlbex.c UASM_i_LW(p, 1, offsetof(struct tlb_reg_save, a), K0); UASM_i_LW 400 arch/mips/mm/tlbex.c UASM_i_LW(p, 2, offsetof(struct tlb_reg_save, b), K0); UASM_i_LW 680 arch/mips/mm/tlbex.c UASM_i_LW(p, 1, scratchpad_offset(0), 0); UASM_i_LW 722 arch/mips/mm/tlbex.c UASM_i_LW(p, tmp, 0, pmd); UASM_i_LW 771 arch/mips/mm/tlbex.c UASM_i_LW(p, pte, 0, ptr); /* Needed because SC killed our PTE */ UASM_i_LW 954 arch/mips/mm/tlbex.c UASM_i_LW(p, 1, scratchpad_offset(0), 0); UASM_i_LW 1033 arch/mips/mm/tlbex.c UASM_i_LW(p, ptr, 0, ptr); UASM_i_LW 1039 arch/mips/mm/tlbex.c UASM_i_LW(p, ptr, 0, ptr); UASM_i_LW 1082 arch/mips/mm/tlbex.c UASM_i_LW(p, tmp, pte_off_even, ptep); /* get even pte */ UASM_i_LW 1083 arch/mips/mm/tlbex.c UASM_i_LW(p, ptep, pte_off_odd, ptep); /* get odd pte */ UASM_i_LW 1211 arch/mips/mm/tlbex.c UASM_i_LW(p, ptr, 0, ptr); UASM_i_LW 1227 arch/mips/mm/tlbex.c UASM_i_LW(p, scratch, 0, ptr); UASM_i_LW 1256 arch/mips/mm/tlbex.c UASM_i_LW(p, even, 0, ptr); /* get even pte */ UASM_i_LW 1257 arch/mips/mm/tlbex.c UASM_i_LW(p, odd, sizeof(pte_t), ptr); /* get odd pte */ UASM_i_LW 1279 arch/mips/mm/tlbex.c UASM_i_LW(p, scratch, scratchpad_offset(0), 0); UASM_i_LW 1281 arch/mips/mm/tlbex.c UASM_i_LW(p, scratch, scratchpad_offset(0), 0); UASM_i_LW 1360 arch/mips/mm/tlbex.c UASM_i_LW(&p, htlb_info.huge_pte, 0, K1); UASM_i_LW 1684 arch/mips/mm/tlbex.c UASM_i_LW(p, pte, 0, ptr); UASM_i_LW 2068 arch/mips/mm/tlbex.c UASM_i_LW(p, wr.r2, 0, wr.r2);