/linux-4.4.14/arch/powerpc/platforms/52xx/ |
D | mpc52xx_sleep.S | 20 lwz r8, 0x14(r6) /* intr->main_mask */ 21 ori r8, r8, 0x1 22 xori r8, r8, 0x1 23 stw r8, 0x14(r6) 27 li r8, 0x1 28 stw r8, 0x40(r6) /* intr->main_emulate */ 68 lwz r8, 0x4(r4) /* sdram->ctrl */ 70 oris r8, r8, 0x8000 /* mode_en */ 71 stw r8, 0x4(r4) 74 ori r8, r8, 0x0002 /* soft_pre */ [all …]
|
D | lite5200_sleep.S | 45 mr r8, r4 /* save MBAR va */ 108 lwz r4, SDRAM_CTRL(r8) 112 stw r4, SDRAM_CTRL(r8) 116 stw r4, SDRAM_CTRL(r8) 121 stw r4, SDRAM_CTRL(r8) 134 stw r4, SDRAM_CTRL(r8) 142 lwz r4, CDM_CE(r8) 145 stw r4, CDM_CE(r8) 155 stb r4, GPIOW_GPIOE(r8) /* enable gpio_wkup1 */ 158 stb r4, GPIOW_DVO(r8) /* "output" high */ [all …]
|
/linux-4.4.14/arch/avr32/lib/ |
D | findbit.S | 19 1: ld.w r8, r12[0] 20 com r8 33 lsr r8, r10, 5 37 lsl r8, 2 38 add r12, r8 43 ld.w r8, r12[0] 44 com r8 46 lsr r8, r8, r10 55 1: ld.w r8, r12[0] 56 com r8 [all …]
|
D | memcpy.S | 31 3: ld.w r8, r11++ 33 st.w r12++, r8 42 ld.ub r8, r11++ 43 st.b r12++, r8 44 ld.ub r8, r11++ 45 st.b r12++, r8 46 ld.ub r8, r11++ 47 st.b r12++, r8 57 ld.ub r8, r11++ 58 st.b r12++, r8 [all …]
|
D | copy_user.S | 29 branch_if_kernel r8, __copy_user 30 ret_if_privileged r8, r11, r10, r10 37 branch_if_kernel r8, __copy_user 38 ret_if_privileged r8, r12, r10, r10 53 10: ld.w r8, r11++ 54 11: st.w r12++, r8 66 20: ld.ub r8, r11++ 67 21: st.b r12++, r8 70 22: ld.ub r8, r11++ 71 23: st.b r12++, r8 [all …]
|
D | io-readsb.S | 11 1: ld.ub r8, r12[0] 13 st.b r11++, r8 33 1: ldins.b r8:t, r12[0] 34 ldins.b r8:u, r12[0] 35 ldins.b r8:l, r12[0] 36 ldins.b r8:b, r12[0] 37 st.w r11++, r8 44 3: ld.ub r8, r12[0] 46 st.b r11++, r8
|
D | io-writesb.S | 11 1: ld.ub r8, r11++ 13 st.b r12[0], r8 33 1: ld.w r8, r11++ 34 bfextu r9, r8, 24, 8 36 bfextu r9, r8, 16, 8 38 bfextu r9, r8, 8, 8 40 st.b r12[0], r8 47 3: ld.ub r8, r11++ 49 st.b r12[0], r8
|
D | clear_user.S | 17 branch_if_kernel r8, __clear_user 18 ret_if_privileged r8, r12, r11, r11 24 mov r8, 0 31 10: st.w r12++, r8 41 11: st.h r12++, r8 44 12: st.b r12++, r8 53 13: st.b r12++, r8 55 14: st.b r12++, r8 57 15: st.b r12++, r8
|
D | memset.S | 28 mov r8, r12 38 4: st.w r8++, r11 54 st.b r8++, r11 55 st.b r8++, r11 58 6: st.b r8++, r11 67 st.b r8++, r11 68 st.b r8++, r11 69 st.b r8++, r11
|
D | io-readsw.S | 14 ld.uh r8, r12[0] 16 st.h r11++, r8 32 1: ldins.h r8:t, r12[0] 33 ldins.h r8:b, r12[0] 34 st.w r11++, r8 41 ld.uh r8, r12[0] 42 st.h r11++, r8
|
D | io-writesw.S | 10 ld.uh r8, r11++ 12 st.h r12[0], r8 26 1: ld.w r8, r11++ 27 bfextu r9, r8, 16, 16 29 st.h r12[0], r8 36 ld.uh r8, r11++ 37 st.h r12[0], r8
|
D | strncpy_from_user.S | 32 branch_if_kernel r8, __strncpy_from_user 33 ret_if_privileged r8, r11, r10, r9 43 1: ld.ub r8, r11++ 44 st.b r12++, r8 45 cp.w r8, 0
|
D | strnlen_user.S | 20 branch_if_kernel r8, __strnlen_user 21 sub r8, r11, 1 22 add r8, r12 31 10: ld.ub r8, r12++ 32 cp.w r8, 0
|
D | csum_partial.S | 33 mov r8, 0 40 6: ld.ub r8, r12++ 41 lsl r8, 8 42 7: or r9, r8
|
D | csum_partial_copy_generic.S | 78 cp.w r8, 0 80 st.w r8[0], r9 92 lddsp r8, sp[20] 93 cp.w r8, 0 95 st.w r8[0], r9
|
D | io-writesl.S | 15 1: ld.w r8, r11++ 17 st.w r12[0], r8
|
D | io-readsl.S | 19 1: ld.w r8, r12[0] 21 st.w r11++, r8
|
D | __avr32_asr64.S | 22 lsl r8, r11, r9 25 or r10, r8
|
D | __avr32_lsr64.S | 22 lsl r8, r11, r9 25 or r10, r8
|
D | __avr32_lsl64.S | 22 lsr r8, r10, r9 25 or r11, r8
|
/linux-4.4.14/arch/powerpc/mm/ |
D | hash_low_32.S | 54 addis r8,r7,mmu_hash_lock@h 55 ori r8,r8,mmu_hash_lock@l 58 11: lwz r6,0(r8) 61 10: lwarx r6,0,r8 64 stwcx. r0,0,r8 71 mfspr r8,SPRN_SPRG_THREAD /* current task's THREAD (phys) */ 73 lwz r5,PGDIR(r8) /* virt page-table root */ 81 lwz r8,0(r5) /* get pmd entry */ 82 rlwinm. r8,r8,0,0,19 /* extract address of pte page */ 84 rlwinm r8,r4,13,19,29 /* Compute pgdir/pmd offset */ [all …]
|
D | tlb_nohash_low.S | 159 andi. r0,r8,1 /* Check if way 0 is bolted */ 174 srwi r8,r8,1 /* Next boltmap bit */ 180 lwz r8,0(r10) /* Load boltmap entry */ 438 mflr r8 446 tlbsx 0,r8 487 mtlr r8
|
D | hash_low_64.S | 61 std r8,STK_PARAM(R8)(r1) 202 li r8,MMU_PAGE_4K /* page size */ 227 li r8,MMU_PAGE_4K /* page size */ 301 ld r8,STK_PARAM(R9)(r1) /* segment size */ 355 std r8,STK_PARAM(R8)(r1) 531 li r8,MMU_PAGE_4K /* page size */ 560 li r8,MMU_PAGE_4K /* page size */ 598 ld r8,STK_PARAM(R8)(r1) /* flags */ 669 ld r8,STK_PARAM(R9)(r1) /* segment size */ 718 std r8,STK_PARAM(R8)(r1) [all …]
|
/linux-4.4.14/arch/score/kernel/ |
D | entry.S | 38 mfcr r8, cr0 39 srli r8, r8, 1 40 slli r8, r8, 1 41 mtcr r8, cr0 53 mfcr r8, cr0 54 ori r8, 1 55 mtcr r8, cr0 118 la r8, nmi_exception_handler 119 brl r8 124 mfcr r8, cr6 [all …]
|
D | head.S | 38 xor r8, r8, r8 43 sw r8, [r30] /* clean memory. */
|
/linux-4.4.14/arch/x86/entry/ |
D | entry_64_compat.S | 79 xorq %r8,%r8 80 pushq %r8 /* pt_regs->ip = 0 (placeholder) */ 87 pushq %r8 /* pt_regs->r8 = 0 */ 88 pushq %r8 /* pt_regs->r9 = 0 */ 89 pushq %r8 /* pt_regs->r10 = 0 */ 90 pushq %r8 /* pt_regs->r11 = 0 */ 93 pushq %r8 /* pt_regs->r12 = 0 */ 94 pushq %r8 /* pt_regs->r13 = 0 */ 95 pushq %r8 /* pt_regs->r14 = 0 */ 96 pushq %r8 /* pt_regs->r15 = 0 */ [all …]
|
D | thunk_64.S | 23 pushq %r8 59 popq %r8
|
/linux-4.4.14/arch/arm/lib/ |
D | memset.S | 39 stmfd sp!, {r8, lr} 42 UNWIND( .save {r8, lr} ) 43 mov r8, r1 47 stmgeia ip!, {r1, r3, r8, lr} @ 64 bytes at a time. 48 stmgeia ip!, {r1, r3, r8, lr} 49 stmgeia ip!, {r1, r3, r8, lr} 50 stmgeia ip!, {r1, r3, r8, lr} 52 ldmeqfd sp!, {r8, pc} @ Now <64 bytes to go. 57 stmneia ip!, {r1, r3, r8, lr} 58 stmneia ip!, {r1, r3, r8, lr} [all …]
|
D | copy_template.S | 92 stmfd sp!, {r5 - r8} 97 UNWIND( .save {r5 - r8} ) @ in second stmfd block 116 4: ldr8w r1, r3, r4, r5, r6, r7, r8, ip, lr, abort=20f 118 str8w r0, r3, r4, r5, r6, r7, r8, ip, lr, abort=20f 139 ldr1w r1, r8, abort=20f 157 str1w r0, r8, abort=20f 162 7: ldmfd sp!, {r5 - r8} 228 ldr4w r1, r8, r9, ip, lr, abort=19f 237 orr r7, r7, r8, lspush #\push 238 mov r8, r8, lspull #\pull [all …]
|
D | csumpartialcopygeneric.S | 46 .Ldst_16bit: load2b r8, ip 48 adcs sum, sum, r8, put_byte_0 49 strb r8, [dst], #1 73 1: load2b r8, ip 75 adcs sum, sum, r8, put_byte_0 76 strb r8, [dst], #1 85 load1b r8 86 adcs sum, sum, r8, put_byte_0 @ update checksum 87 strb r8, [dst], #1 178 1: load4l r5, r6, r7, r8 [all …]
|
D | memmove.S | 53 stmfd sp!, {r5 - r8} 58 UNWIND( .save {r5 - r8} ) @ in second stmfd block 77 4: ldmdb r1!, {r3, r4, r5, r6, r7, r8, ip, lr} 79 stmdb r0!, {r3, r4, r5, r6, r7, r8, ip, lr} 94 W(ldr) r8, [r1, #-4]! 105 W(str) r8, [r0, #-4]! 110 7: ldmfd sp!, {r5 - r8} 172 13: ldmdb r1!, {r7, r8, r9, ip} 180 orr r9, r9, r8, lspull #\pull 181 mov r8, r8, lspush #\push [all …]
|
D | csumpartialcopyuser.S | 23 stmfd sp!, {r1, r2, r4 - r8, ip, lr} 28 ldmfd sp!, {r1, r2, r4 - r8, ip, lr} 34 stmfd sp!, {r1, r2, r4 - r8, lr} 38 ldmfd sp!, {r1, r2, r4 - r8, pc}
|
D | csumpartialcopy.S | 21 stmfd sp!, {r1, r4 - r8, lr} 25 ldmfd sp!, {r1, r4 - r8, pc}
|
/linux-4.4.14/arch/sh/kernel/cpu/sh2a/ |
D | entry.S | 74 mov r2,r8 ! r8 = previus stack top 77 mov.l @r8+,r2 78 mov.l @r8+,r0 79 mov.l @r8+,r1 87 mov r2,r8 ! r8 = previous stack top 90 mov.l @r8+,r2 ! old R2 91 mov.l @r8+,r0 ! old R0 92 mov.l @r8+,r1 ! old R1 93 mov.l @r8+,r10 ! old PC 94 mov.l @r8+,r11 ! old SR [all …]
|
/linux-4.4.14/arch/microblaze/lib/ |
D | fastcopy.S | 107 andi r8, r6, 0xfffffffc /* as = s & ~3 */ 109 lwi r11, r8, 0 /* h = *(as + 0) */ 119 lwi r12, r8, 4 /* v = *(as + 4) */ 124 lwi r12, r8, 8 /* v = *(as + 8) */ 129 lwi r12, r8, 12 /* v = *(as + 12) */ 134 lwi r12, r8, 16 /* v = *(as + 16) */ 139 lwi r12, r8, 20 /* v = *(as + 20) */ 144 lwi r12, r8, 24 /* v = *(as + 24) */ 149 lwi r12, r8, 28 /* v = *(as + 28) */ 154 lwi r12, r8, 32 /* v = *(as + 32) */ [all …]
|
/linux-4.4.14/arch/arm/mm/ |
D | abort-lv4t.S | 28 ldr r8, [r4] @ read arm instruction 30 tst r8, #1 << 20 @ L = 1 -> write? 32 and r7, r8, #15 << 24 54 mov r1, r8 58 tst r8, #1 << 21 @ check writeback bit 62 and r6, r8, r7 63 and r9, r8, r7, lsl #1 65 and r9, r8, r7, lsl #2 67 and r9, r8, r7, lsl #3 72 and r9, r8, #15 << 16 @ Extract 'n' from instruction [all …]
|
D | pv-fixup-asm.S | 23 stmfd sp!, {r4-r8, lr} 25 mrc p15, 0, r8, c1, c0, 0 @ read control reg 26 bic ip, r8, #CR_M @ disable caches and MMU 83 mcr p15, 0, r8, c1, c0, 0 @ re-enable MMU 87 ldmfd sp!, {r4-r8, pc}
|
D | l2c-l2x0-resume.S | 19 ldmia r0, {r1, r2, r3, r4, r5, r6, r7, r8} 27 @ r8 = pwr_ctrl 40 strcs r8, [r1, #L310_POWER_CTRL]
|
D | proc-v6.S | 148 mrc p15, 0, r8, c1, c0, 2 @ co-processor access control 172 mcr p15, 0, r8, c1, c0, 2 @ co-processor access control 216 ALT_SMP(orr r8, r8, #TTB_FLAGS_SMP) 217 ALT_UP(orr r8, r8, #TTB_FLAGS_UP) 218 mcr p15, 0, r8, c2, c0, 1 @ load TTB1
|
/linux-4.4.14/arch/ia64/lib/ |
D | ip_fast_csum.S | 31 #define ret0 r8 116 add r8=r16,r17 119 add r8=r8,r18 121 add r8=r8,r19 124 add r8=r8,r15 126 shr.u r10=r8,32 // now fold sum into short 127 zxt4 r11=r8 129 add r8=r10,r11 131 shr.u r10=r8,16 // yeah, keep it rolling 132 zxt2 r11=r8 [all …]
|
D | strncpy_from_user.S | 23 mov r8=0 32 EX(.Lexit, ld1 r8=[in1],1) 34 EX(.Lexit, st1 [in0]=r8,1) 35 cmp.ne p6,p7=r8,r0 40 (p6) mov r8=in2 // buffer filled up---return buffer length 41 (p7) sub r8=in1,r9,1 // return string length (excluding NUL character)
|
D | xor.S | 29 mov r8 = in1 46 (p[6+1])st8.nta [r8] = d[1], 8 66 mov r8 = in1 86 (p[6+1])st8.nta [r8] = d[1], 8 106 mov r8 = in1 129 (p[6+1])st8.nta [r8] = d[1], 8 149 mov r8 = in1 174 (p[6+1])st8.nta [r8] = d[1], 8
|
D | strnlen_user.S | 32 EXCLR(.Lexit, ld1 r8=[in0],1) 35 cmp.eq p6,p0=r8,r0 42 mov r8=r9
|
D | flush.S | 38 sub r8=r22,r23 // number of strides - 1 46 mov ar.lc=r8 91 sub r8=r22,r23 // number of strides - 1 100 mov ar.lc=r8
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/fuc/ |
D | kernel.fuc | 102 push $r8 103 nv_iord($r8, NV_PPWR_TIMER_LOW) 106 sub b32 $r9 $r8 109 pop $r8 123 push $r8 124 nv_iord($r8, NV_PPWR_TIMER_LOW) 131 sub b32 $r9 $r8 135 pop $r8 141 // $r8 - NV_PPWR_INTR 182 push $r8 [all …]
|
D | memx.fuc | 85 movw $r8 0x1610 86 nv_rd32($r7, $r8) 91 nv_wr32($r8, $r7) 95 nv_rd32($r8, $r6) 96 and $r8 $r7 97 nv_wr32($r6, $r8) 100 nv_rd32($r8, $r6) 101 and $r8 $r7 102 nv_wr32($r6, $r8) 105 nv_rd32($r8, $r6) [all …]
|
/linux-4.4.14/arch/powerpc/kernel/vdso32/ |
D | cacheflush.S | 39 subf r8,r6,r4 /* compute length */ 40 add r8,r8,r5 /* ensure we get enough */ 42 srw. r8,r8,r9 /* compute line count */ 45 mtctr r8 56 subf r8,r6,r4 /* compute length */ 57 add r8,r8,r5 59 srw. r8,r8,r9 /* compute line count */ 62 mtctr r8
|
D | gettimeofday.S | 111 cmpl cr0,r8,r0 /* check if updated */ 224 1: lwz r8,(CFG_TB_UPDATE_COUNT+LOPART)(r9) 225 andi. r0,r8,1 /* pending update ? loop */ 227 xor r0,r8,r8 /* create dependency */ 292 cmplw cr0,r8,r0 /* check if updated */
|
/linux-4.4.14/arch/powerpc/kernel/vdso64/ |
D | cacheflush.S | 39 subf r8,r6,r4 /* compute length */ 40 add r8,r8,r5 /* ensure we get enough */ 42 srw. r8,r8,r9 /* compute line count */ 45 mtctr r8 56 subf r8,r6,r4 /* compute length */ 57 add r8,r8,r5 59 srw. r8,r8,r9 /* compute line count */ 62 mtctr r8
|
D | gettimeofday.S | 98 cmpld cr0,r0,r8 /* check if updated */ 207 1: ld r8,CFG_TB_UPDATE_COUNT(r3) 208 andi. r0,r8,1 /* pending update ? loop */ 210 xor r0,r8,r8 /* create dependency */ 233 cmpld r0,r8 /* check if updated */
|
/linux-4.4.14/arch/score/lib/ |
D | checksum.S | 35 lw r8, [src, offset + 0x00]; \ 39 ADDC(sum, r8); \ 43 lw r8, [src, offset + 0x10]; \ 47 ADDC(sum, r8); \ 77 andri.c r8, r5, 0x4 /*Len >= 4?*/ 81 andri.c r8, src, 0x3 /*src is 4bytes aligned, so use LW!!*/ 97 andri.c r8, r5, 0x2 104 andri.c r8, r5, 0x1 147 lbu r8, [src] 149 slli r8, r8, 8 [all …]
|
D | string.S | 36 ldi r8, 0 39 cmp.c r6, r8 43 addi r8, 1 44 cmp.c r8, r9 52 mv r4, r8 55 ldi r8, 0 56 mv r4, r8 142 0: lbu r8, [r6] 144 1: sb r8, [r7]
|
/linux-4.4.14/arch/sh/kernel/ |
D | relocate_kernel.S | 34 mov.l r8, @-r15 47 stc sr, r8 48 or r9, r8 49 ldc r8, sr 62 stc sr, r8 63 and r9, r8 64 ldc r8, sr 94 stc sr, r8 95 and r9, r8 96 ldc r8, sr [all …]
|
D | entry-common.S | 88 get_current_thread_info r8, r0 236 get_current_thread_info r8, r0 251 mov r8, r0 253 mov.l 1f, r8 254 add r0, r8 255 mov.l @r8, r8 256 jsr @r8 293 mov.l 1f, r8 294 jsr @r8 302 mov.l 1f, r8 [all …]
|
D | head_32.S | 172 mov.l @r7, r8 173 and r11, r8 174 cmp/eq r0, r8 /* Check for valid __MEMORY_START mappings */ 197 mov #(PMB_UB >> 8), r8 198 shll8 r8 200 or r0, r8 201 or r9, r8 202 mov.l r8, @r1 203 mov r2, r8 204 add r7, r8 [all …]
|
/linux-4.4.14/arch/sh/lib64/ |
D | udivdi3.S | 34 mulu.l r5,r3,r8 37 shlld r8,r0,r8 39 sub r2,r8,r2 44 shlld r5,r0,r8 48 add r8,r21,r8 61 add r8,r7,r8 64 add r8,r5,r2 71 shlri r25,32,r8 76 mulu.l r5,r8,r5 81 shlri r5,14-1,r8 [all …]
|
D | strcpy.S | 47 movi -1, r8 48 SHLO r8, r7, r8 49 mcmv r4, r8, r9
|
D | copy_page.S | 57 addi r7, 64, r8 87 bgt/l r8, r2, tr1
|
/linux-4.4.14/arch/powerpc/kernel/ |
D | head_8xx.S | 606 add r10, r10, r8 ;b 151f 748 lis r8, MI_RSV4I@h 749 ori r8, r8, 0x1c00 751 mtspr SPRN_MI_CTR, r8 /* Set instruction MMU control */ 756 mr r8, r10 769 lis r8, KERNELBASE@h /* Create vaddr for TLB */ 770 ori r8, r8, MI_EVALID /* Mark it valid */ 771 mtspr SPRN_MI_EPN, r8 772 mtspr SPRN_MD_EPN, r8 773 li r8, MI_PS8MEG | (2 << 5) /* Set 8M byte page, APG 2 */ [all …]
|
D | head_booke.h | 100 mfspr r8,SPRN_PIR; \ 101 slwi r8,r8,2; \ 102 addis r8,r8,level##_STACK_BASE@ha; \ 103 lwz r8,level##_STACK_BASE@l(r8); \ 104 addi r8,r8,EXC_LVL_FRAME_OVERHEAD; 107 lis r8,level##_STACK_BASE@ha; \ 108 lwz r8,level##_STACK_BASE@l(r8); \ 109 addi r8,r8,EXC_LVL_FRAME_OVERHEAD; 121 mtspr SPRN_SPRG_WSCRATCH_##exc_level,r8; \ 123 stw r9,GPR9(r8); /* save various registers */\ [all …]
|
D | misc_64.S | 85 subf r8,r6,r4 /* compute length */ 86 add r8,r8,r5 /* ensure we get enough */ 88 srw. r8,r8,r9 /* compute line count */ 90 mtctr r8 101 subf r8,r6,r4 /* compute length */ 102 add r8,r8,r5 104 srw. r8,r8,r9 /* compute line count */ 106 mtctr r8 131 subf r8,r6,r4 /* compute length */ 132 add r8,r8,r5 /* ensure we get enough */ [all …]
|
D | misc_32.S | 99 mulhwu r8,r10,r6 101 adde r4,r4,r8 129 lis r8,__got2_end@ha 130 addi r8,r8,__got2_end@l 131 subf r8,r7,r8 132 srwi. r8,r8,2 134 mtctr r8 546 lwz r8,12(r4); \ 550 stw r8,12(r3); \ 617 rlwinm r8,r7,0,32 # t3 = (count < 32) ? 32 : 0 [all …]
|
D | fsl_booke_entry_mapping.S | 83 li r8,-1 85 slw r6,r8,r6 /* convert to mask */ 90 mfspr r8,SPRN_MAS3 94 and r8,r6,r8 98 or r25,r8,r9 99 ori r8,r25,(MAS3_SX|MAS3_SW|MAS3_SR) 114 mtspr SPRN_MAS3,r8 176 mtspr SPRN_MAS3,r8 233 mfspr r8,SPRN_MAS1 234 rlwinm r8,r8,0,2,0 /* clear IPROT */ [all …]
|
D | head_32.S | 119 0: mflr r8 /* r8 = runtime addr here */ 120 addis r8,r8,(_stext - 0b)@ha 121 addi r8,r8,(_stext - 0b)@l /* current runtime base addr */ 1140 li r8,0x7f /* valid, block length = 8MB */ 1142 mtspr SPRN_IBAT0L,r8 /* lower BAT register */ 1144 addis r8,r8,0x800000@h 1146 mtspr SPRN_IBAT1L,r8 1148 addis r8,r8,0x800000@h 1150 mtspr SPRN_IBAT2L,r8 1154 4: tophys(r8,r11) [all …]
|
D | reloc_32.S | 55 li r8, 0 66 lwz r8, 4(r11) /* r8 = Total Rela relocs size */ 80 cmpwi r8, 0 97 subf r8, r6, r8 /* relaz -= relaent */ 195 cmpwi r8, 0 /* relasz = 0 ? */ 198 subf r8, r6, r8 /* relasz -= relaent */
|
D | idle_power4.S | 63 ld r8,TI_LOCAL_FLAGS(r9) /* set napping bit */ 64 ori r8,r8,_TLF_NAPPING /* so when we take an exception */ 65 std r8,TI_LOCAL_FLAGS(r9) /* it will return to our caller */
|
D | entry_32.S | 94 mfspr r8,SPRN_SPRG_THREAD 95 lwz r0,KSP_LIMIT(r8) 98 stw r0,KSP_LIMIT(r8) 118 mfspr r8,SPRN_SPRG_THREAD 119 lwz r0,KSP_LIMIT(r8) 122 stw r0,KSP_LIMIT(r8) 245 lwz r8,GPR8(r1) 329 lwz r8,GPR8(r1) 358 li r8,-MAX_ERRNO 361 cmplw 0,r3,r8 [all …]
|
D | reloc_64.S | 37 li r8,0 48 ld r8,8(r11) /* get RELACOUNT value in r8 */ 52 cmpdi cr1,r8,0 71 mtctr r8
|
D | entry_64.S | 81 std r8,GPR8(r1) 172 clrldi r8,r8,32 183 ld r8,_MSR(r1) 186 andi. r10,r8,MSR_RI 221 andi. r6,r8,MSR_PR 233 mtspr SPRN_SRR1,r8 263 ld r8,GPR8(r1) 521 ld r8,KSP(r4) /* new stack pointer */ 524 clrrdi r6,r8,28 /* get its ESID */ 527 clrrdi r6,r8,40 /* get its 1T ESID */ [all …]
|
D | tm.S | 233 std r8, GPR13(r7) 360 addi r8, r3, THREAD_VRSTATE 362 lvx v0, r8, r5 364 REST_32VRS(0, r5, r8) /* r5 scratch, r8 ptr */ 373 addi r8, r3, THREAD_FPSTATE 374 lfd fr0, FPSTATE_FPSCR(r8) 386 ld r8, _XER(r7) 390 mtxer r8
|
D | idle_6xx.S | 139 lwz r8,TI_LOCAL_FLAGS(r9) /* set napping bit */ 140 ori r8,r8,_TLF_NAPPING /* so when we take an exception */ 141 stw r8,TI_LOCAL_FLAGS(r9) /* it will return to our caller */
|
D | head_64.S | 390 mr r28,r8 447 mr r8,r26 527 addis r8,r3,(4f - _stext)@ha /* Jump to the copy of this code */ 528 addi r12,r8,(4f - _stext)@l /* that we just made */ 786 LOAD_REG_ADDR(r8,__bss_start) 787 sub r11,r11,r8 /* bss size */ 791 addi r8,r8,-8 794 3: stdu r0,8(r8)
|
D | head_fsl_booke.S | 85 0: mflr r8 86 addis r3,r8,(is_second_reloc - 0b)@ha 104 addis r4,r8,(kernstart_addr - 0b)@ha 108 addis r6,r8,(memstart_addr - 0b)@ha 926 mfmsr r8 929 rlwimi r9,r8,28,0x00000001 /* turn MSR[DS] into MAS6[SAS] */ 934 mfspr r8,SPRN_MAS1 936 rlwinm r9,r8,25,0x1f /* r9 = log2(page size) */ 1098 mfspr r8,SPRN_HID0 1099 ori r9,r8,HID0_DCFA@l [all …]
|
/linux-4.4.14/arch/arm/crypto/ |
D | aes-armv4.S | 259 and r8,lr,r0,lsr#8 265 ldr r5,[r10,r8,lsl#2] @ Te2[s0>>8] 266 and r8,lr,r1 273 ldr r8,[r10,r8,lsl#2] @ Te3[s1>>0] 278 eor r5,r5,r8,ror#8 279 and r8,lr,r2,lsr#16 @ i1 284 ldr r8,[r10,r8,lsl#2] @ Te1[s2>>16] 291 eor r1,r1,r8,ror#8 292 and r8,lr,r3,lsr#8 @ i1 297 ldr r8,[r10,r8,lsl#2] @ Te2[s3>>8] [all …]
|
D | sha256-core.S_shipped | 104 ldmia r0,{r4,r5,r6,r7,r8,r9,r10,r11} 120 eor r0,r8,r8,ror#5 122 eor r0,r0,r8,ror#19 @ Sigma1(e) 137 eor r0,r8,r8,ror#5 139 eor r0,r0,r8,ror#19 @ Sigma1(e) 146 and r2,r2,r8 202 eor r2,r8,r9 260 eor r2,r7,r8 264 eor r2,r2,r8 @ Ch(e,f,g) 316 add r8,r8,r2 @ h+=X[i] [all …]
|
D | sha1-armv4-large.S | 64 ldr r8,.LK_00_19 75 add r7,r8,r7,ror#2 @ E+=K_00_19 84 add r7,r8,r7,ror#2 @ E+=K_00_19 100 add r6,r8,r6,ror#2 @ E+=K_00_19 109 add r6,r8,r6,ror#2 @ E+=K_00_19 125 add r5,r8,r5,ror#2 @ E+=K_00_19 134 add r5,r8,r5,ror#2 @ E+=K_00_19 150 add r4,r8,r4,ror#2 @ E+=K_00_19 159 add r4,r8,r4,ror#2 @ E+=K_00_19 175 add r3,r8,r3,ror#2 @ E+=K_00_19 [all …]
|
D | aesbs-core.S_shipped | 1076 ldr r8, [ip] @ IV is 1st arg on the stack 1117 vld1.8 {q15}, [r8] @ load IV 1318 vst1.8 {q15}, [r8] @ return IV 1333 ldr r8, [ip] @ ctr is 1st arg on the stack 1351 vld1.8 {q0}, [r8] @ load counter 1352 add r8, r6, #.LREVM0SR-.LM0 @ borrow r8 1370 vld1.8 {q0}, [r8] @ load counter 1371 adrl r8, .LREVM0SR @ borrow r8 1406 vldmia r8, {q8} @ .LREVM0SR 1409 sub r6, r8, #.LREVM0SR-.LSR @ pass constants [all …]
|
/linux-4.4.14/arch/powerpc/lib/ |
D | copypage_64.S | 31 srd r8,r5,r11 33 mtctr r8 41 srdi r8,r5,7 /* page is copied in 128 byte strides */ 42 addi r8,r8,-1 /* one stride copied outside loop */ 44 mtctr r8 49 ldu r8,24(r4) 55 std r8,32(r3) 65 ld r8,64(r4) 71 std r8,96(r3) 81 ldu r8,128(r4) [all …]
|
D | copy_32.S | 18 lwz r8,8(r4); \ 22 stw r8,8(r6); \ 30 lwz r8,8(r4); \ 38 stw r8,8(r6); \ 99 add r8,r7,r5 100 srwi r9,r8,LG_CACHELINE_BYTES 114 clrlwi r5,r8,32-LG_CACHELINE_BYTES 150 add r8,r4,r5 152 cmplw 1,r3,r8 164 andi. r8,r0,3 /* get it word-aligned first */ [all …]
|
D | memcpy_64.S | 67 mr r8,r9 70 std r8,8(r3) 71 2: ldu r8,16(r4) 74 3: std r8,8(r3) 113 sld r8,r0,r10 117 # s1<< in r8, d0=(s0<<|s1>>) in r7, s3 in r0, s2 in r9, nix in r6 & r12 122 sld r8,r0,r10 129 or r12,r8,r12 131 sld r8,r0,r10 135 # d0=(s0<<|s1>>) in r12, s1<< in r6, s2>> in r7, s2<< in r8, s3 in r9 [all …]
|
D | copypage_power7.S | 42 lis r8,0x8000 /* GO=1 */ 43 clrldi r8,r8,32 54 dcbt r0,r8,0b01010 /* all streams GO */ 79 li r8,48 89 lvx v4,r4,r8 98 stvx v4,r3,r8 128 ld r8,32(r4) 145 std r8,32(r3)
|
D | div64.S | 24 li r8,0 47 add r8,r8,r11 # and add the estimate to the accumulated 54 add r8,r8,r0 57 stw r8,4(r3)
|
D | copyuser_64.S | 73 mr r8,r6 79 270: std r8,8(r3) 81 222: ld r8,8(r4) 87 272: std r8,8(r3) 132 sLd r8,r0,r10 140 sLd r8,r0,r10 147 or r12,r8,r12 149 sLd r8,r0,r10 159 or r12,r8,r12 162 sLd r8,r0,r10 [all …]
|
D | string_64.S | 66 mr r3,r8 85 mr r8,r3 133 6: mr r8,r3 181 mr r8,r3 192 mr r8,r3
|
D | ldstfp.S | 233 li r8,STKFRM-16 237 stvx v0,r1,r8 243 lvx v0,r1,r8 261 li r8,STKFRM-16 265 stvx v0,r1,r8 271 lvx v0,r1,r8 329 li r8,STKFRM-16 357 li r8,STKFRM-16
|
/linux-4.4.14/arch/powerpc/crypto/ |
D | aes-spe-keys.S | 37 xor r8,r8,r8; \ 84 LOAD_KEY(r8,r4,12) 88 stw r8,12(r3) 93 mr r14,r8 /* apply LS_BOX to 4th temp */ 100 xor r8,r8,r7 104 stw r8,12(r3) 126 LOAD_KEY(r8,r4,12) 132 stw r8,12(r3) 146 xor r8,r8,r7 147 xor r9,r9,r8 [all …]
|
/linux-4.4.14/arch/powerpc/boot/ |
D | crt0.S | 75 9: lwz r8,0(r12) /* get tag */ 76 cmpwi r8,0 78 cmpwi r8,RELA 82 11: addis r8,r8,(-RELACOUNT)@ha 83 cmpwi r8,RELACOUNT@l 114 lwz r8,p_etext-p_base(r10) 118 cmplw cr0,r9,r8 125 lwz r8,p_end-p_base(r10) 129 cmplw cr0,r9,r8 133 lwz r8,p_pstack-p_base(r10) [all …]
|
D | div64.S | 24 li r8,0 47 add r8,r8,r11 # and add the estimate to the accumulated 54 add r8,r8,r0 57 stw r8,4(r3) 81 rlwinm r8,r7,0,32 # t3 = (count < 32) ? 32 : 0 84 slw r7,r7,r8 # t2 = (count < 32) ? 0 : t2
|
D | virtex405-head.S | 25 li r8,256 26 mtctr r8
|
D | util.S | 86 addze r8,r5 92 cmpw 0,r5,r8
|
/linux-4.4.14/arch/ia64/include/asm/ |
D | futex.h | 10 register unsigned long r8 __asm ("r8") = 0; \ 16 : "+r" (r8), "=r" (oldval) \ 19 ret = r8; \ 24 register unsigned long r8 __asm ("r8") = 0; \ 37 : "+r" (r8), "=r" (val), "=&r" (oldval), \ 41 if (unlikely (r8)) \ 44 ret = r8; \ 109 register unsigned long r8 __asm ("r8") = 0; in futex_atomic_cmpxchg_inatomic() 117 : "+r" (r8), "=&r" (prev) in futex_atomic_cmpxchg_inatomic() 122 return r8; in futex_atomic_cmpxchg_inatomic()
|
D | syscall.h | 38 return regs->r10 == -1 ? regs->r8:0; in syscall_get_error() 44 return regs->r8; in syscall_get_return_value() 53 regs->r8 = -error; in syscall_set_return_value() 56 regs->r8 = val; in syscall_set_return_value()
|
D | ptrace.h | 68 return regs->r8; in regs_return_value() 70 return -regs->r8; in regs_return_value() 111 # define force_successful_syscall_return() (task_pt_regs(current)->r8 = 0)
|
/linux-4.4.14/arch/x86/kernel/ |
D | relocate_kernel_64.S | 78 movq PTR(PA_CONTROL_PAGE)(%rsi), %r8 95 lea PAGE_SIZE(%r8), %rsp 98 addq $(identity_mapped - relocate_kernel), %r8 99 pushq %r8 181 popq %r8 182 subq $(1b - relocate_kernel), %r8 183 movq CP_PA_SWAP_PAGE(%r8), %r10 184 movq CP_PA_BACKUP_PAGES_MAP(%r8), %rdi 185 movq CP_PA_TABLE_PAGE(%r8), %rax 187 lea PAGE_SIZE(%r8), %rsp [all …]
|
D | perf_regs.c | 47 PT_REGS_OFFSET(PERF_REG_X86_R8, r8), 143 regs_user_copy->r8 = user_regs->r8; in perf_get_regs_user()
|
/linux-4.4.14/arch/arm/mach-omap1/ |
D | ams-delta-fiq-handler.S | 110 mov r8, #2 @ reset FIQ agreement 111 str r8, [r12, #IRQ_CONTROL_REG_OFFSET] 116 mov r8, #1 117 orr r8, r11, r8, lsl r10 @ mask spurious interrupt 118 str r8, [r12, #IRQ_MIR_REG_OFFSET] 149 ldr r8, [r12, #OMAP1510_GPIO_DATA_INPUT] @ fetch GPIO input 155 ands r8, r8, #KEYBRD_DATA_MASK @ check start bit - detected? 158 @ r8 contains KEYBRD_DATA_MASK, use it 159 str r8, [r9, #BUF_STATE] @ enter data processing state 174 @ r8 still contains GPIO input bits [all …]
|
D | sleep.S | 88 ldr r8, [r6, #EMIFS_CONFIG_ASM_OFFSET & 0xff] 89 orr r9, r8, #IDLE_EMIFS_REQUEST & 0xff 135 str r8, [r6, #EMIFS_CONFIG_ASM_OFFSET & 0xff] 243 ldr r8, [r6, #EMIFS_CONFIG_ASM_OFFSET & 0xff] 244 orr r9, r8, #IDLE_EMIFS_REQUEST & 0xff 363 str r8, [r6, #EMIFS_CONFIG_ASM_OFFSET & 0xff]
|
/linux-4.4.14/arch/avr32/kernel/ |
D | entry-avr32b.S | 213 cp.w r8, NR_syscalls 217 ld.w lr, lr[r8 << 2] 218 mov r8, r5 /* 5th argument (6th is pushed by stub) */ 236 popm r8-r9 237 mtsr SYSREG_RAR_SUP, r8 265 pushm r8-r12 267 popm r8-r12 307 mfsr r8, SYSREG_RSR_EX 310 mov r12, r8 311 andh r8, (MODE_MASK >> 16), COH [all …]
|
D | switch_to.S | 24 ld.w r8, r10++ 30 mtsr SYSREG_SR, r8
|
/linux-4.4.14/arch/powerpc/kvm/ |
D | book3s_hv_rmhandlers.S | 95 lwz r8, HSTATE_PMC5(r13) 101 mtspr SPRN_PMC5, r8 113 ld r8, HSTATE_MMCR2(r13) 115 mtspr SPRN_MMCR2, r8 146 ld r8, 112+PPC_LR_STKOFF(r1) 163 mtsrr0 r8 169 11: mtspr SPRN_HSRR0, r8 175 14: mtspr SPRN_HSRR0, r8 179 15: mtspr SPRN_HSRR0, r8 198 ld r8,VCORE_LPCR(r5) [all …]
|
D | book3s_hv_interrupts.S | 76 mfspr r8, SPRN_MMCR2 103 std r8, HSTATE_MMCR2(r13) 110 mfspr r8, SPRN_PMC5 116 stw r8, HSTATE_PMC5(r13) 124 mfspr r8,SPRN_DEC 126 mtspr SPRN_HDEC,r8 127 extsw r8,r8 128 add r8,r8,r7 129 std r8,HSTATE_DECEXP(r13)
|
D | book3s_64_slb.S | 51 li r8, 0 52 stb r8, 3(r11) 128 li r8, SLB_NUM_BOLTED 129 stb r8, 3(r11) 135 li r8, SLBSHADOW_SAVEAREA 139 LDX_BE r10, r11, r8 145 addi r8, r8, SHADOW_SLB_ENTRY_LEN
|
D | book3s_segment.S | 95 mfspr r8, SPRN_FSCR 96 std r8, HSTATE_HOST_FSCR(r13) 123 PPC_LL r8, SVCPU_CTR(r3) 128 mtctr r8 144 PPC_LL r8, SVCPU_R8(r3) 196 PPC_STL r8, SVCPU_R8(r13) 225 PPC_LL r8, HSTATE_SCRATCH0(r13) 229 PPC_STL r8, SVCPU_R12(r13) 237 mfctr r8 243 PPC_STL r8, SVCPU_CTR(r13) [all …]
|
D | bookehv_interrupts.S | 79 lwz r8, VCPU_HOST_PID(r4) 85 mtspr SPRN_PID, r8 90 mfspr r8, SPRN_TBRL 93 stw r8, VCPU_TIMING_EXIT_TBL(r4) 98 oris r8, r6, MSR_CE@h 100 ori r8, r8, MSR_ME | MSR_RI 110 cmpw r6, r8 153 mfspr r8, SPRN_ESR 154 PPC_STL r8, VCPU_FAULT_ESR(r4) 189 PPC_STL r8, VCPU_GPR(R8)(r4) [all …]
|
D | booke_interrupts.S | 153 stw r8, VCPU_GPR(R8)(r4) 163 mfspr r8, SPRN_TBRL 167 stw r8, VCPU_TIMING_EXIT_TBL(r4) 175 mfmsr r8 176 ori r7, r8, MSR_DS 180 mtmsr r8 441 lis r8, kvmppc_booke_handlers@ha 442 lwz r8, kvmppc_booke_handlers@l(r8) 443 mtspr SPRN_IVPR, r8 471 mfspr r8, SPRN_TBRU [all …]
|
/linux-4.4.14/arch/ia64/hp/sim/boot/ |
D | boot_head.S | 60 mov r8=-1 74 mov r8=0 /* status = 0 */ 81 mov r8=0 /* status = 0 */ 88 mov r8=0 /* status = 0 */ 96 movl r8=524288 /* flush 512k million cache lines (16MB) */ 98 mov ar.lc=r8 99 movl r8=0xe000000000000000 101 .loop: fc r8 102 add r8=32,r8 109 mov r8=r0 [all …]
|
/linux-4.4.14/arch/tile/lib/ |
D | memcpy_32.S | 105 { blzt r4, .Lcopy_8_check; slti_u r8, r2, 8 } 118 EX: { lw r3, r1; addi r1, r1, 4; slti_u r8, r2, 16 } 123 { bzt r8, .Lcopy_8_loop; slti_u r4, r2, 4 } 187 { bz r4, .Lcopy_8_check; slti_u r8, r2, 8 } 194 { bz r4, .Lcopy_8_check; slti_u r8, r2, 8 } 201 { bz r4, .Lcopy_8_check; slti_u r8, r2, 8 } 272 EX: { lw r8, r1; addi r1, r1, 4; slt_u r13, r20, r15 }/* r8 = WORD_13 */ 323 EX: sw r0, r8 /* store(WORD_13) */ 346 { slti_u r8, r2, 20; sub r4, zero, r0 } 347 { bnzt r8, .Lcopy_unaligned_few; andi r4, r4, 3 } [all …]
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/ |
D | com.fuc | 35 ld b32 $r8 D[$r13 + 0x0] // GET 37 xor $r8 8 38 cmpu b32 $r8 $r9 46 and $r8 $r9 7 47 shl b32 $r8 3 48 add b32 $r8 $r13 49 add b32 $r8 8 50 st b32 D[$r8 + 0x0] $r14 51 st b32 D[$r8 + 0x4] $r15 69 ld b32 $r8 D[$r13 + 0x0] // GET [all …]
|
/linux-4.4.14/arch/arm/mach-omap2/ |
D | sram242x.S | 48 mov r8, r3 @ capture force parameter 87 cmp r8, #0x1 @ if forced unlock exit 207 ldr r8, [r10] @ get value 209 and r8, r8, r7 @ apply mask to clear bits 210 orr r8, r8, r9 @ bulld value for L0/L1-volt operation. 211 str r8, [r10] @ set up for change. 213 orr r8, r8, r7 @ build value for force 214 str r8, [r10] @ Force transition to L1 217 ldr r8, [r10] @ get value 218 add r8, r8, #0x2 @ give it at most 62uS (min 31+) [all …]
|
D | sram243x.S | 48 mov r8, r3 @ capture force parameter 87 cmp r8, #0x1 @ if forced unlock exit 207 ldr r8, [r10] @ get value 209 and r8, r8, r7 @ apply mask to clear bits 210 orr r8, r8, r9 @ bulld value for L0/L1-volt operation. 211 str r8, [r10] @ set up for change. 213 orr r8, r8, r7 @ build value for force 214 str r8, [r10] @ Force transition to L1 217 ldr r8, [r10] @ get value 218 add r8, r8, #0x2 @ give it at most 62uS (min 31+) [all …]
|
D | sleep44xx.S | 109 mov r8, r0 110 ldr r9, [r8, #OMAP_TYPE_OFFSET] 115 ldreq r0, [r8, #SCU_OFFSET0] 116 ldrne r0, [r8, #SCU_OFFSET1] 126 ldreq r1, [r8, #SCU_OFFSET0] 127 ldrne r1, [r8, #SCU_OFFSET1] 150 mov r8, r0 153 ldreq r0, [r8, #L2X0_SAVE_OFFSET0] @ Retrieve L2 state from SAR 154 ldrne r0, [r8, #L2X0_SAVE_OFFSET1] @ memory. 212 mov r8, r0 [all …]
|
/linux-4.4.14/arch/parisc/lib/ |
D | fixup.S | 69 get_fault_ip %r1,%r8 71 ldi -EFAULT, %r8 77 get_fault_ip %r1,%r8 79 ldi -EFAULT, %r8 86 get_fault_ip %r1,%r8 89 ldi -EFAULT, %r8 93 get_fault_ip %r1,%r8 96 ldi -EFAULT, %r8
|
/linux-4.4.14/arch/sh/kernel/cpu/sh2/ |
D | entry.S | 84 mov.l r8,@-r15 90 mov r2,r8 ! copy user -> kernel stack 91 mov.l @(0,r8),r3 93 mov.l @(4,r8),r2 95 mov.l @(12,r8),r1 97 mov.l @(8,r8),r0 133 mov.l r8,@-r2 144 mov #64,r8 145 cmp/hs r8,r9 147 mov #32,r8 [all …]
|
/linux-4.4.14/arch/sh/lib/ |
D | copy_page.S | 27 mov.l r8,@-r15 32 mov r5,r8 36 add r0,r8 59 cmp/eq r11,r8 65 mov.l @r15+,r8 98 mov.l r8,@-r15 194 EX( mov.l @r5+,r8 ) 209 EX( mov.l r8,@(16,r4) ) 245 EX( mov.l @r5+,r8 ) 251 xtrct r8,r1 [all …]
|
/linux-4.4.14/arch/ia64/kernel/ |
D | fsys.S | 55 mov r8=ENOSYS 75 add r8=IA64_PID_LEVEL_OFFSET,r17 77 ld4 r8=[r8] // r8 = pid->level 80 shl r8=r8,IA64_UPID_SHIFT 82 add r17=r17,r8 // r17 = &pid->numbers[pid->level] 84 ld4 r8=[r17] // r8 = pid->numbers[pid->level].nr 105 add r8=IA64_PID_LEVEL_OFFSET,r17 108 ld4 r8=[r8] // r8 = pid->level 111 shl r8=r8,IA64_UPID_SHIFT 113 add r17=r17,r8 // r17 = &pid->numbers[pid->level] [all …]
|
D | pal.S | 45 mov r8=-1 62 mov r8 = ip 66 adds r8 = 1f-1b,r8 80 mov rp = r8 151 mov r8 = ip // save ip to compute branch 162 adds r8 = 1f-1b,r8 // calculate return address for call 166 tpa r8=r8 // convert rp to physical 177 mov rp = r8 // install return address (physical)
|
D | mca_drv_asm.S | 37 mov out0=r8 // poisoned address 53 mov r8=r0
|
D | entry.S | 72 cmp4.ge p6,p7=r8,r0 74 sxt4 r8=r8 // return 64-bit result 194 MOV_TO_KR(CURRENT, in0, r8, r9) // update "current" application register 195 mov r8=r13 // return pointer to previously running task 213 MOV_TO_ITIR(p0, r25, r8) 214 MOV_TO_IFA(in0, r8) // VA of next task... 217 MOV_TO_KR(CURRENT_STACK, r26, r8, r9) // remember last page we mapped... 220 SSM_PSR_IC_AND_SRLZ_D(r8, r9) // reenable the psr.ic bit 504 cmp.lt p6,p0=r8,r0 // check tracehook 539 cmp.lt p6,p0=r8,r0 // syscall failed? [all …]
|
/linux-4.4.14/arch/powerpc/platforms/pseries/ |
D | hvCall.S | 42 std r8,STK_PARAM(R8)(r1); \ 54 ld r8,STACK_FRAME_OVERHEAD+STK_PARAM(R8)(r1); \ 143 mr r7,r8 144 mr r8,r9 170 mr r7,r8 171 mr r8,r9 207 mr r7,r8 208 mr r8,r9 237 mr r7,r8 238 mr r8,r9 [all …]
|
/linux-4.4.14/arch/unicore32/lib/ |
D | copy_template.S | 71 stm.w (r5 - r8), [sp-] 75 4: ldr8w r1, r3, r4, r5, r6, r7, r8, r10, r11, abort=20f 77 str8w r0, r3, r4, r5, r6, r7, r8, r10, r11, abort=20f 91 ldr1w r1, r8, abort=20f 102 str1w r0, r8, abort=20f 105 7: ldm.w (r5 - r8), [sp]+ 148 ldr4w r1, r8, r9, r10, r11, abort=19f 157 or r7, r7, r8 push #\b 158 mov r8, r8 pull #\a 159 or r8, r8, r9 push #\b [all …]
|
/linux-4.4.14/arch/x86/lib/ |
D | memcpy_64.S | 73 movq 0*8(%rsi), %r8 79 movq %r8, 0*8(%rdi) 102 movq -1*8(%rsi), %r8 107 movq %r8, -1*8(%rdi) 127 movq 0*8(%rsi), %r8 131 movq %r8, 0*8(%rdi) 143 movq 0*8(%rsi), %r8 145 movq %r8, 0*8(%rdi) 170 movzbq 1(%rsi), %r8
|
D | memmove_64.S | 38 mov %rsi, %r8 39 add %rdx, %r8 40 cmp %rdi, %r8 68 movq 3*8(%rsi), %r8 74 movq %r8, 3*8(%rdi) 134 movq -4*8(%rsi), %r8 140 movq %r8, -4*8(%rdi) 158 movq -1*8(%rsi, %rdx), %r8 162 movq %r8, -1*8(%rdi, %rdx)
|
D | copy_user_64.S | 84 1: movq (%rsi),%r8 88 5: movq %r8,(%rdi) 92 9: movq 4*8(%rsi),%r8 96 13: movq %r8,4*8(%rdi) 108 18: movq (%rsi),%r8 109 19: movq %r8,(%rdi) 260 1: movq (%rsi),%r8 264 5: movnti %r8,(%rdi) 268 9: movq 4*8(%rsi),%r8 272 13: movnti %r8,4*8(%rdi) [all …]
|
D | copy_page_64.S | 33 movq 0x8*3(%rsi), %r8 44 movq %r8, 0x8*3(%rdi) 63 movq 0x8*3(%rsi), %r8 72 movq %r8, 0x8*3(%rdi)
|
D | memset_64.S | 132 movq $8,%r8 133 subq %r9,%r8 134 addq %r8,%rdi 135 subq %r8,%rdx
|
D | csum-copy_64.S | 60 movq %r8, (%rsp) 83 movq 8(%rdi), %r8 102 adcq %r8, %rax 115 movq %r8, 8(%rsi)
|
D | rwsem.S | 69 pushq %r8; \ 78 popq %r8; \
|
/linux-4.4.14/arch/avr32/mach-at32ap/ |
D | pm-at32ap700x.S | 41 get_thread_info r8 42 ld.w r9, r8[TI_flags] 46 st.w r8[TI_flags], r9 59 ld.w r9, r8[TI_flags] 61 st.w r8[TI_flags], r9 92 mtsr SYSREG_RAR_INT0, r8 126 sub r8, pc, . - 1f /* return address for irq handler */ 149 sub r8, pc, . - 1f /* return address for irq handler */
|
/linux-4.4.14/arch/x86/crypto/ |
D | salsa20-x86_64-asm_64.S | 10 mov %rdi,%r8 40 movq 0(%r8),%rcx 42 movq 8(%r8),%r9 44 movq 16(%r8),%rax 46 movq 24(%r8),%r10 48 movq 32(%r8),%r11 50 movq 40(%r8),%r12 52 movq 48(%r8),%r13 54 movq 56(%r8),%r14 72 movq %r8,120(%rsp) [all …]
|
D | aes-x86_64-asm_64.S | 47 #define R8 %r8 52 #define prologue(FUNC,KEY,B128,B192,r1,r2,r3,r4,r5,r6,r7,r8,r9,r10,r11) \ argument 56 leaq KEY+48(r8),r9; \ 62 movl 480(r8),r10 ## E; \ 73 #define epilogue(FUNC,r1,r2,r3,r4,r5,r6,r7,r8,r9) \ argument 79 movl r8 ## E,12(r9); \ 83 #define round(TAB,OFFSET,r1,r2,r3,r4,r5,r6,r7,r8,ra,rb,rc,rd) \ argument 93 xorl OFFSET(r8),ra ## E; \ 94 xorl OFFSET+4(r8),rb ## E; \ 124 xorl OFFSET+8(r8),rc ## E; \ [all …]
|
D | poly1305-avx2-x86_64.S | 34 #define u0 0x00(%r8) 35 #define u1 0x04(%r8) 36 #define u2 0x08(%r8) 37 #define u3 0x0c(%r8) 38 #define u4 0x10(%r8) 39 #define w0 0x14(%r8) 40 #define w1 0x18(%r8) 41 #define w2 0x1c(%r8) 42 #define w3 0x20(%r8) 43 #define w4 0x24(%r8) [all …]
|
/linux-4.4.14/arch/arm/common/ |
D | mcpm_head.S | 76 ldmia r5, {r0, r6, r7, r8, r11} 80 add r8, r5, r8 @ r8 = mcpm_sync 90 mla r8, r0, r10, r8 @ r8 = sync cluster base 95 mla r5, r9, r5, r8 @ r5 = sync cpu address 110 ldrb r0, [r8, #MCPM_SYNC_CLUSTER_CLUSTER] 124 strb r0, [r8, #MCPM_SYNC_CLUSTER_INBOUND] 133 ldrb r0, [r8, #MCPM_SYNC_CLUSTER_CLUSTER] 155 strb r0, [r8, #MCPM_SYNC_CLUSTER_CLUSTER] 162 strb r0, [r8, #MCPM_SYNC_CLUSTER_INBOUND] 173 ldrb r0, [r8, #MCPM_SYNC_CLUSTER_CLUSTER]
|
/linux-4.4.14/arch/arm/mach-imx/ |
D | suspend-imx6.S | 106 ldr r8, [r7], #0x4 108 str r9, [r11, r8] 167 add r8, r1, r4 168 add r9, r8, r7 213 ldr r8, =PM_INFO_MMDC_IO_VAL_OFFSET 214 add r8, r8, r0 219 ldr r9, [r8], #0x8 227 ldr r9, [r8], #0x8 229 ldr r9, [r8], #0x8 232 ldr r9, [r8] [all …]
|
D | ssi-fiq.S | 62 and r10, r10, r8 /* r10: current buffer offset */ 79 lsr r11, r8, #16 /* r11: buffer size */ 81 lslgt r8, r11, #16 82 addle r8, #8
|
/linux-4.4.14/arch/powerpc/platforms/83xx/ |
D | suspend-asm.S | 84 mfspr r8, SPRN_DABR2 91 stw r8, SS_DABR+4(r3) 98 mfsdr1 r8 104 stw r8, SS_SDR1(r3) 233 lis r8, TMP_VIRT_IMMR@h 234 ori r4, r8, 0x001e /* 1 MByte accessible from Kernel Space only */ 289 stw r4, 0x0024(r8) 290 stw r4, 0x002c(r8) 291 stw r4, 0x0034(r8) 292 stw r4, 0x003c(r8) [all …]
|
/linux-4.4.14/arch/hexagon/lib/ |
D | memset.S | 47 r8 = r0 /* leave r0 intact for return val */ define 64 memb(r8++#1) = r4 76 memb(r8++#1) = r4 89 memh(r8++#2) = r4 102 memw(r8++#4) = r4 125 memd(r8++#8) = r5:4 142 memw(r8++#4) = r4 153 memh(r8++#2) = r4 159 memb(r8++#1) = r4 252 r8 = r3 define [all …]
|
/linux-4.4.14/tools/testing/selftests/powerpc/copyloops/ |
D | memcpy_64.S | 67 mr r8,r9 70 std r8,8(r3) 71 2: ldu r8,16(r4) 74 3: std r8,8(r3) 113 sld r8,r0,r10 117 # s1<< in r8, d0=(s0<<|s1>>) in r7, s3 in r0, s2 in r9, nix in r6 & r12 122 sld r8,r0,r10 129 or r12,r8,r12 131 sld r8,r0,r10 135 # d0=(s0<<|s1>>) in r12, s1<< in r6, s2>> in r7, s2<< in r8, s3 in r9 [all …]
|
D | copyuser_64.S | 73 mr r8,r6 79 270: std r8,8(r3) 81 222: ld r8,8(r4) 87 272: std r8,8(r3) 132 sLd r8,r0,r10 140 sLd r8,r0,r10 147 or r12,r8,r12 149 sLd r8,r0,r10 159 or r12,r8,r12 162 sLd r8,r0,r10 [all …]
|
/linux-4.4.14/drivers/power/ |
D | intel_mid_battery.c | 259 u8 r8; in pmic_battery_read_status() local 280 if (intel_scu_ipc_ioread8(PMIC_BATT_CHR_SCHRGINT_ADDR, &r8)) { in pmic_battery_read_status() 292 if (r8 & PMIC_BATT_CHR_SBATDET_MASK) { in pmic_battery_read_status() 303 if (r8 & PMIC_BATT_CHR_SBATOVP_MASK) { in pmic_battery_read_status() 308 } else if (r8 & PMIC_BATT_CHR_STEMP_MASK) { in pmic_battery_read_status() 315 if (r8 & PMIC_BATT_CHR_SDCLMT_MASK) { in pmic_battery_read_status() 323 if (r8 & PMIC_BATT_CHR_SUSBDET_MASK) { in pmic_battery_read_status() 332 if (r8 & PMIC_BATT_CHR_SUSBOVP_MASK) { in pmic_battery_read_status() 354 if (r8 & PMIC_BATT_CHR_SCOMP_MASK) { in pmic_battery_read_status() 561 u8 r8; in pmic_battery_handle_intrpt() local [all …]
|
/linux-4.4.14/arch/arc/lib/ |
D | strcpy-700.S | 27 mov r8,0x01010101 29 ror r12,r8 30 sub r2,r3,r8 41 sub r2,r3,r8 46 sub r2,r4,r8
|
D | memcpy-archs.S | 77 LOADX (r8, r1) 81 STOREX (r8, r3) 121 ld.ab r8, [r1,4] 128 SHIFT_1 (r9, r8, 24) 130 SHIFT_2 (r5, r8, 8) 166 ld.ab r8, [r1,4] 173 SHIFT_1 (r9, r8, 16) 175 SHIFT_2 (r5, r8, 16) 208 ld.ab r8, [r1,4] 215 SHIFT_1 (r9, r8, 8) [all …]
|
/linux-4.4.14/arch/powerpc/include/asm/ |
D | epapr_hcalls.h | 291 register uintptr_t r8 __asm__("r8"); in ev_byte_channel_send() 300 r8 = be32_to_cpu(p[3]); in ev_byte_channel_send() 304 "+r" (r4), "+r" (r5), "+r" (r6), "+r" (r7), "+r" (r8) in ev_byte_channel_send() 334 register uintptr_t r8 __asm__("r8"); in ev_byte_channel_receive() 343 "=r" (r5), "=r" (r6), "=r" (r7), "=r" (r8) in ev_byte_channel_receive() 351 p[3] = cpu_to_be32(r8); in ev_byte_channel_receive() 475 unsigned long register r8 asm("r8") = in[5]; in epapr_hypercall() 483 "=r"(r7), "=r"(r8), "=r"(r9), "=r"(r10), "=r"(r11), in epapr_hypercall() 485 : "r"(r3), "r"(r4), "r"(r5), "r"(r6), "r"(r7), "r"(r8), in epapr_hypercall() 493 out[4] = r8; in epapr_hypercall()
|
D | fsl_hcalls.h | 133 register uintptr_t r8 __asm__("r8"); in fh_partition_get_dtprop() 143 r8 = propvalue_addr >> 32; in fh_partition_get_dtprop() 147 r8 = 0; in fh_partition_get_dtprop() 157 "+r" (r8), "+r" (r9), "+r" (r10) in fh_partition_get_dtprop() 185 register uintptr_t r8 __asm__("r8"); in fh_partition_set_dtprop() 197 r8 = propvalue_addr >> 32; in fh_partition_set_dtprop() 201 r8 = 0; in fh_partition_set_dtprop() 211 "+r" (r8), "+r" (r9), "+r" (r10) in fh_partition_set_dtprop()
|
D | exception-64e.h | 168 std r8,EX_TLB_R8(r12); \ 174 ld r8,EX_TLB_R8(r12); \ 183 ld r8,PACA_EXTLB+EX_TLB_ESR(r13); \ 184 cmpdi cr2,r8,-1; \
|
D | ftrace.h | 22 stw r8, 32(r1); \ 39 lwz r8, 32(r1); \
|
/linux-4.4.14/drivers/net/wireless/b43/ |
D | phy_a.c | 102 u16 freq, r8, tmp; in aphy_channel_switch() local 106 r8 = b43_radio_read16(dev, 0x0008); in aphy_channel_switch() 108 b43_radio_write16(dev, 0x0008, r8); in aphy_channel_switch() 121 r8 = 3 * freq / 116; /* is equal to r8 = freq * 0.025862 */ in aphy_channel_switch() 123 b43_radio_write16(dev, 0x0007, (r8 << 4) | r8); in aphy_channel_switch() 124 b43_radio_write16(dev, 0x0020, (r8 << 4) | r8); in aphy_channel_switch() 125 b43_radio_write16(dev, 0x0021, (r8 << 4) | r8); in aphy_channel_switch() 126 b43_radio_maskset(dev, 0x0022, 0x000F, (r8 << 4)); in aphy_channel_switch() 127 b43_radio_write16(dev, 0x002A, (r8 << 4)); in aphy_channel_switch() 128 b43_radio_write16(dev, 0x002B, (r8 << 4)); in aphy_channel_switch() [all …]
|
/linux-4.4.14/arch/arm/mach-s3c24xx/ |
D | sleep-s3c2410.S | 50 ldr r8, [r5] @ get MISCCR (and ensure in TLB) 54 orr r8, r8, #S3C2410_MISCCR_SDSLEEP @ SDRAM power-down signals 66 streq r8, [r5] @ SDRAM power-down config
|
/linux-4.4.14/arch/sh/kernel/cpu/sh3/ |
D | entry.S | 137 mov r5, r8 150 mov r8, r5 193 mov.l 1f, r8 212 ! - restore r8, r9, r10, r11, r12, r13, r14, r15 from the stack 217 ! r8 passes SR bitmask, overwritten with restored data on return 219 ! BL=0 on entry, on exit BL=1 (depending on r8). 232 or r8, r9 235 mov.l @r15+, r8 253 mov.l 7f, r8 494 mov r15, r8 ! trap handlers take saved regs in r8
|
D | swsusp.S | 63 mov.l 3f, r8 99 mov r8, r5 ! save r8 in r5 124 mov r5, r8 ! restore old r8 134 mov r5, r8 ! restore old r8
|
/linux-4.4.14/arch/sh/kernel/cpu/shmobile/ |
D | sleep.S | 63 mov.l r8, @-r15 222 stc sr, r8 223 and r9, r8 224 or r10, r8 225 ldc r8, sr 360 mov.l @r15+, r8 384 stc sr, r8 385 and r9, r8 386 or r10, r8 387 ldc r8, sr
|
/linux-4.4.14/drivers/parisc/ |
D | superio.c | 292 u8 r8; in superio_mask_irq() local 302 r8 = inb(IC_PIC1+1); in superio_mask_irq() 303 r8 |= (1 << irq); in superio_mask_irq() 304 outb (r8,IC_PIC1+1); in superio_mask_irq() 310 u8 r8; in superio_unmask_irq() local 319 r8 = inb(IC_PIC1+1); in superio_unmask_irq() 320 r8 &= ~(1 << irq); in superio_unmask_irq() 321 outb (r8,IC_PIC1+1); in superio_unmask_irq()
|
/linux-4.4.14/arch/arm/kvm/ |
D | interrupts_head.S | 97 mrs r8, LR_fiq 121 msr LR_fiq, r8 175 msr LR_fiq, r8 256 mrrc p15, 1, r8, r9, c2 @ TTBR 1 271 strd r8, r9, [r2] 283 mrc p15, 0, r8, c5, c1, 0 @ ADFSR 298 str r8, [vcpu, #CP15_OFFSET(c5_ADFSR)] 353 ldr r8, [vcpu, #CP15_OFFSET(c5_ADFSR)] 366 mcr p15, 0, r8, c5, c1, 0 @ ADFSR 382 ldrd r8, r9, [r12] [all …]
|
/linux-4.4.14/arch/m32r/kernel/ |
D | entry.S | 132 GET_THREAD_INFO(r8) 133 ld r0, R0(r8) 134 ld r1, R1(r8) 141 GET_THREAD_INFO(r8) 215 ld r9, @(TI_FLAGS, r8)
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/ce/fuc/ |
D | com.fuc | 273 add b32 $r8 $r6 0x180 274 shl b32 $r8 8 275 iowr I[$r8] $r7 437 clear b32 $r8 464 st b8 D[$sp + $r8] $r12 465 add b32 $r8 1 476 ld b32 $r8 D[$r0 + #ctx_xcnt] 477 mulu $r6 $r8 484 mulu $r7 $r8 542 extr $r8 $r7 4:7 [all …]
|
/linux-4.4.14/arch/powerpc/platforms/ps3/ |
D | hvcall.S | 126 std r8, -48(r1); \ 142 std r8, 0(r11); \ 256 stdu r8, -40(r1); \ 271 std r8, 0(r11); \ 287 std r8, -40(r1); \ 303 std r8, 0(r11); \ 321 std r8, -40(r1); \ 338 std r8, 0(r11); \ 423 stdu r8, -32(r1); \ 451 std r8, -32(r1); \ [all …]
|
/linux-4.4.14/arch/unicore32/boot/compressed/ |
D | head.S | 29 ldm (r1, r2, r3, r5, r6, r7, r8), [r0]+ 48 add r8, r8, r0 69 csub.a r7, r8 168 .word _got_end @ r8
|
/linux-4.4.14/arch/arm/kernel/ |
D | entry-common.S | 150 ARM( add r8, sp, #S_PC ) 151 ARM( stmdb r8, {sp, lr}^ ) @ Calling sp, lr 152 THUMB( mov r8, sp ) 153 THUMB( store_user_sp_lr r8, r10, S_SP ) @ calling sp, lr 154 mrs r8, spsr @ called from non-FIQ mode, so ok. 156 str r8, [sp, #S_PSR] @ Save CPSR 176 tst r8, #PSR_T_BIT 191 tst r8, #PSR_T_BIT @ this is SPSR from save_user_regs 315 @ r8 = syscall table
|
D | entry-header.S | 98 @ r8-r12 is OK. 100 ldmia r9!, {r8, r10-r12} 113 str r8, [sp, #S_IP] 114 add r8, sp, #S_SP 116 stmia r8!, {r9-r12} 118 str r0, [r8] 381 tbl .req r8 @ syscall table pointer 382 why .req r8 @ Linux syscall (!= 0)
|
D | head.S | 110 ldmia r3, {r4, r8} 112 add r8, r8, r4 @ PHYS_OFFSET 114 ldr r8, =PLAT_PHYS_OFFSET @ always constant in this case 154 mov r8, r4, lsr #12 @ TTBR1 is swapper_pg_dir pfn 156 mov r8, r4 @ set TTBR1 to swapper_pg_dir 181 pgtbl r4, r8 @ page table address 249 orr r3, r8, r7 281 subne r3, r0, r8 405 ldr r8, [r3, #8] @ get secondary_data.swapper_pg_dir
|
D | fiqasm.S | 30 ldmia r0!, {r8 - r12} 43 stmia r0!, {r8 - r12}
|
/linux-4.4.14/arch/avr32/include/asm/ |
D | asm.h | 59 pushm r8-r9,r10,r11,r12,lr 68 popm r8-r9,r10,r11,r12,lr 73 stmts --sp, r0,r1,r2,r3,r4,r5,r6,r7,r8,r9,r10,r11,r12,sp,lr 84 ldmts sp++, r0,r1,r2,r3,r4,r5,r6,r7,r8,r9,r10,r11,r12,sp,lr
|
/linux-4.4.14/drivers/media/dvb-frontends/ |
D | stv6110.c | 183 u8 r8, ret = 0x04; in stv6110_set_bandwidth() local 187 r8 = 31; in stv6110_set_bandwidth() 189 r8 = 0; in stv6110_set_bandwidth() 191 r8 = (bandwidth / 2) / 1000000 - 5; in stv6110_set_bandwidth() 196 priv->regs[RSTV6110_CTRL3] |= (r8 & 0x1f); in stv6110_set_bandwidth() 374 u8 r8 = 0; in stv6110_get_bandwidth() local 379 r8 = priv->regs[RSTV6110_CTRL3] & 0x1f; in stv6110_get_bandwidth() 380 *bandwidth = (r8 + 5) * 2000000;/* x2 for ZIF tuner BW/2 = F+5 Mhz */ in stv6110_get_bandwidth()
|
/linux-4.4.14/arch/s390/kernel/ |
D | relocate_kernel.S | 80 lgr %r8,%r5 # r8 = r5 81 nill %r8,0xf000 # masking 82 0: mvcle %r6,%r8,0x0 # copy PAGE_SIZE bytes from r8 to r6 - pad with 0
|
D | head_kdump.S | 41 lghi %r8,DATAMOVER_ADDR # Target of data mover 42 mvc 0(256,%r8),0(%r10) # Copy data mover code 44 agr %r8,%r2 # Copy data mover to 45 mvc 0(256,%r8),0(%r10) # reserved mem
|
D | entry.S | 88 tmhh %r8,0x0001 # interrupting from user ? 96 tmhh %r8,0x0001 # retest problem state after cleanup 130 stg %r8,__LC_RETURN_PSW 262 stmg %r8,%r15,__LC_SAVE_AREA_SYNC 279 llgh %r8,__PT_INT_CODE+2(%r11) 280 slag %r8,%r8,2 # shift and test for svc 0 287 slag %r8,%r1,2 292 lgf %r9,0(%r8,%r10) # get system call add. 380 lghi %r8,0 # svc 0 returns -ENOSYS 384 slag %r8,%r1,2 [all …]
|
/linux-4.4.14/arch/avr32/mm/ |
D | copy_page.S | 24 ld.d r8, r11++ 25 st.d r12++, r8
|
/linux-4.4.14/arch/sh/kernel/cpu/sh5/ |
D | switchto.S | 57 addi.l r0, (63*8), r8 ! base of pt_regs.trregs 114 st.q r8, (5*8), r45 115 st.q r8, (6*8), r46 116 st.q r8, (7*8), r47 143 ld.q r8, (5*8), r45 144 ld.q r8, (6*8), r46 145 ld.q r8, (7*8), r47
|
/linux-4.4.14/drivers/misc/sgi-xp/ |
D | xp_nofault.S | 26 mov r8=r0 // Stage a success return value 34 mov r8=1 // Return value of 1
|
/linux-4.4.14/arch/x86/purgatory/ |
D | entry64.S | 48 movq r8(%rip), %r8 71 r8: .quad 0x0 label
|
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/sec/fuc/ |
D | g98.fuc0s | 186 add b32 $r8 $r6 0x180 187 shl b32 $r8 8 188 iowr I[$r8] $r7 456 shr b32 $r8 $r5 8 458 or $r4 $r8 464 shr b32 $r8 $r7 8 466 or $r6 $r8 470 ld b32 $r8 D[$r0 + #ctx_mode] 471 shl b32 $r8 2 474 ld b16 $r9 D[$r8 + #sec_dtable] [all …]
|
/linux-4.4.14/tools/testing/selftests/powerpc/pmu/ebb/ |
D | busy_loop.S | 41 li r8, 0x0808 42 std r8, -136(%r1) 101 cmpwi r8, 0x0808 152 li r8, 0xad 188 ld r8, -136(%r1) 189 cmpwi r8, 0x0808
|
/linux-4.4.14/sound/oss/ |
D | vidc_fill.S | 137 stmfd sp!, {r4 - r8, lr} 138 ldr r8, =dma_start 139 ldmia r8, {r0, r1, r2, r3, r4, r5} 156 stmia r8, {r0, r1} 168 ldmdb r8, {r3, r4, r5} 187 ldmfd sp!, {r4 - r8, lr}
|
/linux-4.4.14/arch/arm/include/asm/ |
D | entry-macro-multi.S | 36 mov r8, lr 38 ret r8
|
/linux-4.4.14/arch/ia64/sn/kernel/sn2/ |
D | ptc_deadlock.S | 39 mov r8=r0 88 (p8) add r8=1,r8
|
/linux-4.4.14/arch/cris/arch-v10/lib/ |
D | checksum.S | 25 ;; only r0 - r8 have to be saved, the other ones are clobber-able 29 movem $r8,[$sp] 55 add.d $r8,$r12 71 movem [$sp+],$r8 ; restore regs
|
D | checksumcopy.S | 29 ;; only r0 - r8 have to be saved, the other ones are clobber-able 33 movem $r8, [$sp] 61 add.d $r8,$r13 77 movem [$sp+],$r8 ; restore regs
|
/linux-4.4.14/arch/arm/boot/bootp/ |
D | init.S | 37 @ r8 = initrd end 60 stmia r9, {r5, r6, r7, r8, r10} 86 .word initrd_size @ r8
|
/linux-4.4.14/arch/cris/arch-v32/lib/ |
D | checksum.S | 23 movem $r8,[$sp] 40 addc $r8,$r12 52 movem [$sp+],$r8 ; restore regs
|
D | checksumcopy.S | 27 movem $r8,[$sp] 44 addc $r8,$r13 55 movem [$sp+],$r8 ; restore regs
|
/linux-4.4.14/arch/arm/mach-tegra/ |
D | reset-handler.S | 46 check_cpu_part_num 0xc09, r8, r9 72 cmp r8, r9 88 cmp r8, r9 180 ldr r8, [r12, #RESET_DATA(MASK_LP1)] 181 tst r8, r11 @ if in_lp1
|
/linux-4.4.14/arch/cris/include/uapi/asm/ |
D | ptrace_v10.h | 67 unsigned long r8; member 97 unsigned long r8; member
|
D | ptrace_v32.h | 67 unsigned long r8; member 104 unsigned long r8; member
|
D | elf_v10.h | 35 (_r)->r9 = 0; (_r)->r8 = 0; (_r)->r7 = 0; (_r)->r6 = 0; \ 55 pr_reg[8] = regs->r8; \
|
D | elf_v32.h | 24 (_r)->r9 = 0; (_r)->r8 = 0; (_r)->r7 = 0; (_r)->r6 = 0; \ 51 pr_reg[8] = regs->r8; \
|
/linux-4.4.14/arch/arm/boot/compressed/ |
D | ll_char_wr.S | 38 @ Smashable regs: {r0 - r3}, [r4 - r7], (r8 - fp), [ip], (sp), [lr], (pc) 65 @ Smashable regs: {r0 - r3}, [r4], {r5 - r7}, (r8 - fp), [ip], (sp), {lr}, (pc) 83 @ Smashable regs: {r0 - r3}, [r4], {r5 - r7}, (r8 - fp), [ip], (sp), {lr}, (pc) 111 @ Smashable regs: {r0 - r3}, [r4], {r5, r6}, [r7], (r8 - fp), [ip], (sp), [lr], (pc)
|
/linux-4.4.14/arch/arc/kernel/ |
D | signal.c | 85 uregs.scratch.r8 = regs->r8; in stash_usr_regs() 130 regs->r8 = uregs.scratch.r8; in restore_usr_regs() 382 regs->r8 = __NR_restart_syscall; in do_signal()
|
D | ptrace.c | 63 REG_O_ONE(scratch.r8, &ptregs->r8); in genregs_get() 153 REG_IN_ONE(scratch.r8, &ptregs->r8); in genregs_set() 242 return regs->r8; in syscall_trace_entry()
|
/linux-4.4.14/arch/unicore32/kernel/ |
D | sleep.S | 92 movl r8, #0x800001ff @ epip4d 93 stw r8, [r1+], #0xc 110 mov r8, #0x1 141 stw r8, [r1]
|
/linux-4.4.14/arch/powerpc/platforms/powernv/ |
D | subcore-asm.S | 49 mfspr r8, SPRN_PMCR 82 mtspr SPRN_PMCR, r8
|
/linux-4.4.14/arch/parisc/kernel/ |
D | entry.S | 158 ldi \code, %r8 191 va = r8 /* virtual address for which the trap occurred */ 1050 copy %r8,%r26 1054 copy %r8,%r26 1083 extrd,u,*<> %r8,PSW_W_BIT,1,%r0 1143 va = r8 /* virtual address for which the trap occurred */ 1358 extrw,u %r9,15,5,%r8 /* Get index register # */ 1362 extrw,u %r9,10,5,%r8 /* Get base register # */ 1368 mfctl %ipsw,%r8 1370 or %r8,%r9,%r8 /* Set PSW_N */ [all …]
|
/linux-4.4.14/arch/microblaze/include/asm/ |
D | syscall.h | 50 case 3: return regs->r8; in microblaze_get_syscall_arg() 70 regs->r8 = val; in microblaze_set_syscall_arg()
|
/linux-4.4.14/tools/testing/selftests/x86/ |
D | test_syscall_vdso.c | 69 uint64_t r8, r9, r10, r11; member 122 …printf(" 8:%016llx 9:%016llx 10:%016llx 11:%016llx\n", regs64.r8 , regs64.r9 , regs64.r10, reg… in print_regs64() 130 uint64_t *r64 = ®s64.r8; in check_regs64()
|
/linux-4.4.14/arch/arm/mach-sa1100/ |
D | sleep.S | 100 orr r8, r7, #MDREFR_SLFRSH 107 bic r11, r8, #MDREFR_SLFRSH 128 str r8, [r6]
|
/linux-4.4.14/arch/arm/mach-pxa/ |
D | sleep.S | 70 ldr r8, [r6] @ keep original value for resume 113 ldr r8, [r6] @ keep original value for resume 116 bic r7, r8, #CCCR_M_MASK | CCCR_N_MASK 148 str r8, [r6]
|
/linux-4.4.14/arch/microblaze/kernel/ |
D | head.S | 69 addi r8, r0, 0xFFFFFFFF 70 mts rshr, r8 79 msrclr r8, 0 /* clear nothing - just read msr for test */ 80 cmpu r8, r8, r1 /* r1 must contain msr reg content */
|
/linux-4.4.14/arch/nios2/include/asm/ |
D | syscall.h | 86 *args++ = regs->r8; in syscall_get_arguments() 125 regs->r8 = *args++; in syscall_set_arguments()
|
/linux-4.4.14/arch/x86/xen/ |
D | xen-asm.S | 127 push %r8 135 pop %r8
|
/linux-4.4.14/arch/arm/mach-shmobile/ |
D | headsmp.S | 58 ldr r8, [r5, r1, lsl #2] 59 cmp r8, r0
|
/linux-4.4.14/arch/parisc/include/asm/ |
D | assembly.h | 188 STREG %r8, PT_GR8 (\regs) 222 LDREG PT_GR8 (\regs), %r8 352 std %r8, -104(%r30) 378 ldd -104(%r30), %r8 396 stw %r8, -108(%r30) 422 ldw -108(%r30), %r8 467 mfctl %cr22, %r8 468 STREG %r8, PT_PSW(\regs)
|
/linux-4.4.14/arch/arc/include/asm/ |
D | ptrace.h | 33 unsigned long r12, r11, r10, r9, r8, r7, r6, r5, r4, r3, r2, r1, r0; member 90 unsigned long r0, r1, r2, r3, r4, r5, r6, r7, r8, r9, r10, r11; member
|