Searched refs:MSR_VEC (Results 1 – 14 of 14) sorted by relevance
/linux-4.4.14/arch/powerpc/kernel/ |
D | vector.S | 19 oris r5,r6,MSR_VEC@h 44 oris r3,r3,MSR_VEC@h 83 oris r5,r5,MSR_VEC@h 114 lis r10,MSR_VEC@h 134 oris r9,r9,MSR_VEC@h 138 oris r12,r12,MSR_VEC@h 159 andis. r4,r3,MSR_VEC@h 161 oris r3,r3,MSR_VEC@h 175 oris r5,r5,MSR_VEC@h 196 lis r3,(MSR_VEC|MSR_VSX)@h [all …]
|
D | signal_64.c | 123 msr |= MSR_VEC; in setup_sigcontext() 227 if (msr & MSR_VEC) in setup_tm_sigcontexts() 239 msr |= MSR_VEC; in setup_tm_sigcontexts() 247 if (msr & MSR_VEC) in setup_tm_sigcontexts() 366 regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX); in restore_sigcontext() 375 if (v_regs != NULL && (msr & MSR_VEC) != 0) in restore_sigcontext() 485 regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX); in restore_tm_sigcontexts() 498 if (v_regs != NULL && tm_v_regs != NULL && (msr & MSR_VEC) != 0) { in restore_tm_sigcontexts() 555 if (msr & MSR_VEC) { in restore_tm_sigcontexts() 557 regs->msr |= MSR_VEC; in restore_tm_sigcontexts()
|
D | process.c | 176 if (current->thread.regs && (current->thread.regs->msr & MSR_VEC)) in enable_kernel_altivec() 194 if (tsk->thread.regs->msr & MSR_VEC) { in flush_altivec_to_thread() 547 if (msr_diff & MSR_VEC) in tm_reclaim_thread() 551 msr_diff &= MSR_FP | MSR_VEC | MSR_VSX | MSR_FE0 | MSR_FE1; in tm_reclaim_thread() 700 if (msr & MSR_VEC) { in tm_recheckpoint_new_task() 702 new->thread.regs->msr |= MSR_VEC; in tm_recheckpoint_new_task() 745 msr_diff &= MSR_FP | MSR_VEC | MSR_VSX; in restore_tm_state() 751 if (msr_diff & MSR_VEC) { in restore_tm_state() 810 if (prev->thread.regs && (prev->thread.regs->msr & MSR_VEC)) in __switch_to() 836 new->thread.regs->msr |= MSR_VEC; in __switch_to() [all …]
|
D | signal_32.c | 430 msr |= MSR_VEC; in save_user_regs() 552 if (msr & MSR_VEC) { in save_tm_user_regs() 567 msr |= MSR_VEC; in save_tm_user_regs() 580 if (msr & MSR_VEC) { in save_tm_user_regs() 704 regs->msr &= ~MSR_VEC; in restore_user_regs() 705 if (msr & MSR_VEC) { in restore_user_regs() 811 regs->msr &= ~MSR_VEC; in restore_tm_user_regs() 812 if (msr & MSR_VEC) { in restore_tm_user_regs() 903 if (msr & MSR_VEC) { in restore_tm_user_regs() 905 regs->msr |= MSR_VEC; in restore_tm_user_regs()
|
D | tm.S | 126 oris r15, r15, MSR_VEC@h 150 andis. r0, r4, MSR_VEC@h 338 lis r5, MSR_VEC@h 357 andis. r0, r4, MSR_VEC@h
|
D | traps.c | 1489 if (regs->msr & MSR_VEC) { in fp_unavailable_tm() 1506 regs->msr |= MSR_VEC; in altivec_unavailable_tm() 1507 tm_recheckpoint(¤t->thread, MSR_VEC); in altivec_unavailable_tm() 1534 if ((orig_msr & (MSR_FP | MSR_VEC)) == (MSR_FP | MSR_VEC)) { in vsx_unavailable_tm() 1542 regs->msr |= MSR_VEC | MSR_FP | current->thread.fpexc_mode | in vsx_unavailable_tm() 1552 if (orig_msr & MSR_VEC) in vsx_unavailable_tm()
|
D | entry_64.S | 465 oris r0,r0,MSR_VEC@h /* Disable altivec */
|
D | entry_32.S | 613 oris r0,r0,MSR_VEC@h /* Disable altivec */
|
/linux-4.4.14/arch/powerpc/kvm/ |
D | book3s_pr.c | 133 kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_core_vcpu_put_pr() 657 msr |= MSR_FP | MSR_VEC; in kvmppc_giveup_ext() 679 if (msr & MSR_VEC) { in kvmppc_giveup_ext() 680 if (current->thread.regs->msr & MSR_VEC) in kvmppc_giveup_ext() 738 msr = MSR_FP | MSR_VEC | MSR_VSX; in kvmppc_handle_ext() 758 if (msr & MSR_VEC) { in kvmppc_handle_ext() 794 if (lost_ext & MSR_VEC) { in kvmppc_handle_lost_ext() 1168 ext_msr = MSR_VEC; in kvmppc_handle_exit_pr() 1495 if (current->thread.regs->msr & MSR_VEC) in kvmppc_vcpu_run_pr() 1517 kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX); in kvmppc_vcpu_run_pr()
|
D | booke.c | 182 if (!(current->thread.regs->msr & MSR_VEC)) { in kvmppc_load_guest_altivec() 186 current->thread.regs->msr |= MSR_VEC; in kvmppc_load_guest_altivec() 200 if (current->thread.regs->msr & MSR_VEC) in kvmppc_save_guest_altivec()
|
D | book3s_hv_rmhandlers.S | 667 oris r5, r5, (MSR_VEC | MSR_VSX)@h 2574 oris r8,r8,MSR_VEC@h 2609 oris r8,r8,MSR_VEC@h
|
/linux-4.4.14/arch/powerpc/lib/ |
D | ldstfp.S | 231 oris r7,r6,MSR_VEC@h 259 oris r7,r6,MSR_VEC@h
|
D | sstep.c | 1378 if (!(regs->msr & MSR_VEC)) in analyse_instr() 1385 if (!(regs->msr & MSR_VEC)) in analyse_instr()
|
/linux-4.4.14/arch/powerpc/include/asm/ |
D | reg.h | 78 #define MSR_VEC __MASK(MSR_VEC_LG) /* Enable AltiVec */ macro
|