/linux-4.4.14/arch/powerpc/kvm/ |
D | book3s_32_mmu.c | 158 if (kvmppc_get_msr(vcpu) & MSR_PR) { in kvmppc_mmu_book3s_32_xlate_bat() 240 if ((sr_kp(sre) && (kvmppc_get_msr(vcpu) & MSR_PR)) || in kvmppc_mmu_book3s_32_xlate_pte() 241 (sr_ks(sre) && !(kvmppc_get_msr(vcpu) & MSR_PR))) in kvmppc_mmu_book3s_32_xlate_pte() 318 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_mmu_book3s_32_xlate() 400 if (msr & MSR_PR) in kvmppc_mmu_book3s_32_esid_to_vsid()
|
D | book3s_64_mmu.c | 229 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_mmu_book3s_64_xlate() 272 if ((kvmppc_get_msr(vcpu) & MSR_PR) && slbe->Kp) in kvmppc_mmu_book3s_64_xlate() 274 else if (!(kvmppc_get_msr(vcpu) & MSR_PR) && slbe->Ks) in kvmppc_mmu_book3s_64_xlate() 317 !(kvmppc_get_msr(vcpu) & MSR_PR)) in kvmppc_mmu_book3s_64_xlate() 572 return mp_ea && !(kvmppc_get_msr(vcpu) & MSR_PR) && in segment_contains_magic_page() 633 if (kvmppc_get_msr(vcpu) & MSR_PR) in kvmppc_mmu_book3s_64_esid_to_vsid() 643 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_mmu_book3s_64_esid_to_vsid()
|
D | book3s_pr.c | 327 smsr |= MSR_ME | MSR_RI | MSR_IR | MSR_DR | MSR_PR | MSR_EE; in kvmppc_recalc_shadow_msr() 366 if ((kvmppc_get_msr(vcpu) & (MSR_PR|MSR_IR|MSR_DR)) != in kvmppc_set_msr_pr() 367 (old_msr & (MSR_PR|MSR_IR|MSR_DR))) { in kvmppc_set_msr_pr() 372 if (!(msr & MSR_PR) && vcpu->arch.magic_page_pa) { in kvmppc_set_msr_pr() 391 !(old_msr & MSR_PR) && !(old_msr & MSR_SF) && (msr & MSR_SF)) { in kvmppc_set_msr_pr() 818 if (!(kvmppc_get_msr(vcpu) & MSR_PR)) in kvmppc_emulate_fac() 1038 if (kvmppc_get_msr(vcpu) & MSR_PR) { in kvmppc_handle_exit_pr() 1096 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_handle_exit_pr() 1128 } else if (!(kvmppc_get_msr(vcpu) & MSR_PR) && in kvmppc_handle_exit_pr()
|
D | book3s_32_mmu_host.c | 96 if (kvmppc_get_msr(vcpu) & MSR_PR) in find_sid_vsid() 282 if (kvmppc_get_msr(vcpu) & MSR_PR) in create_sid_map()
|
D | book3s_64_mmu_host.c | 62 if (kvmppc_get_msr(vcpu) & MSR_PR) in find_sid_vsid() 235 if (kvmppc_get_msr(vcpu) & MSR_PR) in create_sid_map()
|
D | book3s_emulate.c | 84 if ((kvmppc_get_msr(vcpu) & MSR_PR) && level > PRIV_PROBLEM) in spr_allowed() 200 if ((kvmppc_get_msr(vcpu) & MSR_PR) || in kvmppc_core_emulate_op_pr()
|
D | e500.h | 223 return !!(vcpu->arch.shared->msr & MSR_PR); in get_cur_pr()
|
D | booke.c | 401 crit = crit && !(vcpu->arch.shared->msr & MSR_PR); in kvmppc_booke_irqprio_deliver() 1094 if (vcpu->arch.shared->msr & (MSR_PR | MSR_GS)) { in kvmppc_handle_exit() 1198 if (!(vcpu->arch.shared->msr & MSR_PR)) { in kvmppc_handle_exit() 1212 if (!(vcpu->arch.shared->msr & MSR_PR) && in kvmppc_handle_exit() 1233 if (!(vcpu->arch.shared->msr & MSR_PR) && in kvmppc_handle_exit() 1941 if (!(vcpu->arch.shared->msr & MSR_PR) && in kvmppc_xlate()
|
D | e500_mmu_host.c | 313 u32 pr = vcpu->arch.shared->msr & MSR_PR; in kvmppc_e500_setup_stlbe() 674 pr = vcpu->arch.shared->msr & MSR_PR; in kvmppc_load_last_inst()
|
D | powerpc.c | 334 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_st() 373 !(kvmppc_get_msr(vcpu) & MSR_PR)) { in kvmppc_ld()
|
D | booke_interrupts.S | 90 andi. r4, r4, MSR_PR
|
D | book3s.c | 119 crit = crit && !(kvmppc_get_msr(vcpu) & MSR_PR); in kvmppc_critical_section()
|
D | book3s_hv_rm_mmu.c | 958 key = (vcpu->arch.shregs.msr & MSR_PR) ? SLB_VSID_KP : SLB_VSID_KS; in kvmppc_hpte_hv_fault()
|
D | book3s_64_mmu_hv.c | 351 key = (vcpu->arch.shregs.msr & MSR_PR) ? SLB_VSID_KP : SLB_VSID_KS; in kvmppc_mmu_book3s_64_hv_xlate()
|
D | book3s_hv.c | 2720 !(vcpu->arch.shregs.msr & MSR_PR)) { in kvmppc_vcpu_run_hv()
|
D | book3s_hv_rmhandlers.S | 1884 andi. r0,r11,MSR_PR
|
/linux-4.4.14/arch/powerpc/include/asm/ |
D | reg_booke.h | 46 #define MSR_USER32 (MSR_ | MSR_PR | MSR_EE) 50 #define MSR_USER (MSR_KERNEL|MSR_PR|MSR_EE) 53 #define MSR_USER (MSR_KERNEL|MSR_PR|MSR_EE)
|
D | ptrace.h | 107 #define user_mode(regs) (((regs)->msr & MSR_PR) != 0)
|
D | reg.h | 86 #define MSR_PR __MASK(MSR_PR_LG) /* Problem State / Privilege Level */ macro 128 #define MSR_USER32 (MSR_ | MSR_PR | MSR_EE) 133 #define MSR_USER (MSR_KERNEL|MSR_PR|MSR_EE)
|
D | exception-64s.h | 290 andi. r10,r12,MSR_PR; /* See if coming from user */ \
|
D | ppc_asm.h | 62 andi. r10,r12,MSR_PR; /* Restore cr0 (coming from user) */ \
|
/linux-4.4.14/arch/powerpc/kernel/ |
D | exceptions-64e.S | 76 andi. r3,r3,MSR_PR 153 andi. r3,r3,MSR_PR 295 andi. r10,r11,MSR_PR; /* save stack pointer */ \ 626 andi. r0,r12,MSR_PR; 645 andi. r0,r12,MSR_PR; 764 1: andi. r14,r11,MSR_PR; /* check for userspace again */ 828 1: andi. r14,r11,MSR_PR; /* check for userspace again */ 1059 andi. r6,r10,MSR_PR
|
D | head_booke.h | 42 andi. r11, r11, MSR_PR; /* check whether user or kernel */\ 130 andi. r11,r11,MSR_PR; \
|
D | entry_64.S | 62 andi. r10,r12,MSR_PR 221 andi. r6,r8,MSR_PR 660 andi. r3,r3,MSR_PR 883 andi. r0,r3,MSR_PR
|
D | head_40x.S | 113 andi. r11,r11,MSR_PR; \ 152 andi. r11,r11,MSR_PR; \ 696 andi. r10,r9,MSR_IR|MSR_PR /* check supervisor + MMU off */
|
D | entry_32.S | 143 andi. r2,r9,MSR_PR 754 andi. r0,r3,MSR_PR 999 andi. r3,r3,MSR_PR; \
|
D | head_8xx.S | 142 andi. r11,r11,MSR_PR; \
|
D | head_32.S | 258 andi. r11,r11,MSR_PR; \
|
D | process.c | 969 {MSR_PR, "PR"},
|
D | traps.c | 1161 if (!(regs->msr & MSR_PR) && /* not user-mode */ in program_check_exception()
|
D | exceptions-64s.S | 1405 andi. r11,r12,MSR_PR /* See if coming from user. */
|
/linux-4.4.14/arch/powerpc/xmon/ |
D | xmon.c | 454 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) == (MSR_IR|MSR_64BIT)) in xmon_core() 586 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) == (MSR_IR|MSR_64BIT)) { in xmon_core() 636 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) != (MSR_IR|MSR_64BIT)) in xmon_bpt() 667 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) != (MSR_IR|MSR_64BIT)) in xmon_break_match() 677 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) != (MSR_IR|MSR_64BIT)) in xmon_iabr_match() 702 if ((regs->msr & (MSR_IR|MSR_PR|MSR_64BIT)) == (MSR_IR|MSR_64BIT)) { in xmon_fault_handler() 1003 if ((regs->msr & (MSR_64BIT|MSR_PR|MSR_IR)) == (MSR_64BIT|MSR_IR)) { in do_step() 1468 if (regs->msr & MSR_PR) in print_bug_trap()
|
/linux-4.4.14/arch/powerpc/lib/ |
D | sstep.c | 709 if (regs->msr & MSR_PR) in analyse_instr() 944 if (regs->msr & MSR_PR) in analyse_instr() 950 if (regs->msr & MSR_PR) in analyse_instr() 958 if (regs->msr & MSR_PR) in analyse_instr() 1915 !(regs->msr & MSR_PR) && in emulate_step()
|
/linux-4.4.14/drivers/misc/cxl/ |
D | cxl.h | 172 #define CXL_PSL_SR_An_PR MSR_PR /* Problem state, GA1: 1 */
|
/linux-4.4.14/arch/powerpc/perf/ |
D | core-book3s.c | 224 if (regs->msr & MSR_PR) in perf_flags_from_msr()
|