/linux-4.1.27/arch/powerpc/platforms/52xx/ |
D | mpc52xx_sleep.S | 20 lwz r8, 0x14(r6) /* intr->main_mask */ 21 ori r8, r8, 0x1 22 xori r8, r8, 0x1 23 stw r8, 0x14(r6) 27 li r8, 0x1 28 stw r8, 0x40(r6) /* intr->main_emulate */ 68 lwz r8, 0x4(r4) /* sdram->ctrl */ 70 oris r8, r8, 0x8000 /* mode_en */ 71 stw r8, 0x4(r4) 74 ori r8, r8, 0x0002 /* soft_pre */ [all …]
|
D | lite5200_sleep.S | 45 mr r8, r4 /* save MBAR va */ 108 lwz r4, SDRAM_CTRL(r8) 112 stw r4, SDRAM_CTRL(r8) 116 stw r4, SDRAM_CTRL(r8) 121 stw r4, SDRAM_CTRL(r8) 134 stw r4, SDRAM_CTRL(r8) 142 lwz r4, CDM_CE(r8) 145 stw r4, CDM_CE(r8) 155 stb r4, GPIOW_GPIOE(r8) /* enable gpio_wkup1 */ 158 stb r4, GPIOW_DVO(r8) /* "output" high */ [all …]
|
/linux-4.1.27/arch/avr32/lib/ |
D | findbit.S | 19 1: ld.w r8, r12[0] 20 com r8 33 lsr r8, r10, 5 37 lsl r8, 2 38 add r12, r8 43 ld.w r8, r12[0] 44 com r8 46 lsr r8, r8, r10 55 1: ld.w r8, r12[0] 56 com r8 [all …]
|
D | memcpy.S | 31 3: ld.w r8, r11++ 33 st.w r12++, r8 42 ld.ub r8, r11++ 43 st.b r12++, r8 44 ld.ub r8, r11++ 45 st.b r12++, r8 46 ld.ub r8, r11++ 47 st.b r12++, r8 57 ld.ub r8, r11++ 58 st.b r12++, r8 [all …]
|
D | copy_user.S | 29 branch_if_kernel r8, __copy_user 30 ret_if_privileged r8, r11, r10, r10 37 branch_if_kernel r8, __copy_user 38 ret_if_privileged r8, r12, r10, r10 53 10: ld.w r8, r11++ 54 11: st.w r12++, r8 66 20: ld.ub r8, r11++ 67 21: st.b r12++, r8 70 22: ld.ub r8, r11++ 71 23: st.b r12++, r8 [all …]
|
D | io-readsb.S | 11 1: ld.ub r8, r12[0] 13 st.b r11++, r8 33 1: ldins.b r8:t, r12[0] 34 ldins.b r8:u, r12[0] 35 ldins.b r8:l, r12[0] 36 ldins.b r8:b, r12[0] 37 st.w r11++, r8 44 3: ld.ub r8, r12[0] 46 st.b r11++, r8
|
D | io-writesb.S | 11 1: ld.ub r8, r11++ 13 st.b r12[0], r8 33 1: ld.w r8, r11++ 34 bfextu r9, r8, 24, 8 36 bfextu r9, r8, 16, 8 38 bfextu r9, r8, 8, 8 40 st.b r12[0], r8 47 3: ld.ub r8, r11++ 49 st.b r12[0], r8
|
D | clear_user.S | 17 branch_if_kernel r8, __clear_user 18 ret_if_privileged r8, r12, r11, r11 24 mov r8, 0 31 10: st.w r12++, r8 41 11: st.h r12++, r8 44 12: st.b r12++, r8 53 13: st.b r12++, r8 55 14: st.b r12++, r8 57 15: st.b r12++, r8
|
D | memset.S | 28 mov r8, r12 38 4: st.w r8++, r11 54 st.b r8++, r11 55 st.b r8++, r11 58 6: st.b r8++, r11 67 st.b r8++, r11 68 st.b r8++, r11 69 st.b r8++, r11
|
D | io-readsw.S | 14 ld.uh r8, r12[0] 16 st.h r11++, r8 32 1: ldins.h r8:t, r12[0] 33 ldins.h r8:b, r12[0] 34 st.w r11++, r8 41 ld.uh r8, r12[0] 42 st.h r11++, r8
|
D | io-writesw.S | 10 ld.uh r8, r11++ 12 st.h r12[0], r8 26 1: ld.w r8, r11++ 27 bfextu r9, r8, 16, 16 29 st.h r12[0], r8 36 ld.uh r8, r11++ 37 st.h r12[0], r8
|
D | strncpy_from_user.S | 32 branch_if_kernel r8, __strncpy_from_user 33 ret_if_privileged r8, r11, r10, r9 43 1: ld.ub r8, r11++ 44 st.b r12++, r8 45 cp.w r8, 0
|
D | strnlen_user.S | 20 branch_if_kernel r8, __strnlen_user 21 sub r8, r11, 1 22 add r8, r12 31 10: ld.ub r8, r12++ 32 cp.w r8, 0
|
D | csum_partial.S | 33 mov r8, 0 40 6: ld.ub r8, r12++ 41 lsl r8, 8 42 7: or r9, r8
|
D | csum_partial_copy_generic.S | 78 cp.w r8, 0 80 st.w r8[0], r9 92 lddsp r8, sp[20] 93 cp.w r8, 0 95 st.w r8[0], r9
|
D | io-writesl.S | 15 1: ld.w r8, r11++ 17 st.w r12[0], r8
|
D | io-readsl.S | 19 1: ld.w r8, r12[0] 21 st.w r11++, r8
|
D | __avr32_lsl64.S | 22 lsr r8, r10, r9 25 or r11, r8
|
D | __avr32_lsr64.S | 22 lsl r8, r11, r9 25 or r10, r8
|
D | __avr32_asr64.S | 22 lsl r8, r11, r9 25 or r10, r8
|
/linux-4.1.27/arch/powerpc/mm/ |
D | hash_low_32.S | 54 addis r8,r7,mmu_hash_lock@h 55 ori r8,r8,mmu_hash_lock@l 58 11: lwz r6,0(r8) 61 10: lwarx r6,0,r8 64 stwcx. r0,0,r8 71 mfspr r8,SPRN_SPRG_THREAD /* current task's THREAD (phys) */ 73 lwz r5,PGDIR(r8) /* virt page-table root */ 81 lwz r8,0(r5) /* get pmd entry */ 82 rlwinm. r8,r8,0,0,19 /* extract address of pte page */ 84 rlwinm r8,r4,13,19,29 /* Compute pgdir/pmd offset */ [all …]
|
D | hash_low_64.S | 61 std r8,STK_PARAM(R8)(r1) 202 li r8,MMU_PAGE_4K /* page size */ 227 li r8,MMU_PAGE_4K /* page size */ 301 ld r8,STK_PARAM(R9)(r1) /* segment size */ 355 std r8,STK_PARAM(R8)(r1) 531 li r8,MMU_PAGE_4K /* page size */ 560 li r8,MMU_PAGE_4K /* page size */ 598 ld r8,STK_PARAM(R8)(r1) /* flags */ 669 ld r8,STK_PARAM(R9)(r1) /* segment size */ 718 std r8,STK_PARAM(R8)(r1) [all …]
|
/linux-4.1.27/arch/score/kernel/ |
D | entry.S | 38 mfcr r8, cr0 39 srli r8, r8, 1 40 slli r8, r8, 1 41 mtcr r8, cr0 53 mfcr r8, cr0 54 ori r8, 1 55 mtcr r8, cr0 118 la r8, nmi_exception_handler 119 brl r8 124 mfcr r8, cr6 [all …]
|
D | head.S | 38 xor r8, r8, r8 43 sw r8, [r30] /* clean memory. */
|
/linux-4.1.27/arch/arm/lib/ |
D | memset.S | 38 stmfd sp!, {r8, lr} 41 UNWIND( .save {r8, lr} ) 42 mov r8, r1 46 stmgeia ip!, {r1, r3, r8, lr} @ 64 bytes at a time. 47 stmgeia ip!, {r1, r3, r8, lr} 48 stmgeia ip!, {r1, r3, r8, lr} 49 stmgeia ip!, {r1, r3, r8, lr} 51 ldmeqfd sp!, {r8, pc} @ Now <64 bytes to go. 56 stmneia ip!, {r1, r3, r8, lr} 57 stmneia ip!, {r1, r3, r8, lr} [all …]
|
D | copy_template.S | 92 stmfd sp!, {r5 - r8} 97 UNWIND( .save {r5 - r8} ) @ in second stmfd block 116 4: ldr8w r1, r3, r4, r5, r6, r7, r8, ip, lr, abort=20f 118 str8w r0, r3, r4, r5, r6, r7, r8, ip, lr, abort=20f 139 ldr1w r1, r8, abort=20f 157 str1w r0, r8, abort=20f 162 7: ldmfd sp!, {r5 - r8} 228 ldr4w r1, r8, r9, ip, lr, abort=19f 237 orr r7, r7, r8, lspush #\push 238 mov r8, r8, lspull #\pull [all …]
|
D | csumpartialcopygeneric.S | 46 .Ldst_16bit: load2b r8, ip 48 adcs sum, sum, r8, put_byte_0 49 strb r8, [dst], #1 73 1: load2b r8, ip 75 adcs sum, sum, r8, put_byte_0 76 strb r8, [dst], #1 85 load1b r8 86 adcs sum, sum, r8, put_byte_0 @ update checksum 87 strb r8, [dst], #1 178 1: load4l r5, r6, r7, r8 [all …]
|
D | memmove.S | 53 stmfd sp!, {r5 - r8} 58 UNWIND( .save {r5 - r8} ) @ in second stmfd block 77 4: ldmdb r1!, {r3, r4, r5, r6, r7, r8, ip, lr} 79 stmdb r0!, {r3, r4, r5, r6, r7, r8, ip, lr} 94 W(ldr) r8, [r1, #-4]! 105 W(str) r8, [r0, #-4]! 110 7: ldmfd sp!, {r5 - r8} 172 13: ldmdb r1!, {r7, r8, r9, ip} 180 orr r9, r9, r8, lspull #\pull 181 mov r8, r8, lspush #\push [all …]
|
D | csumpartialcopy.S | 21 stmfd sp!, {r1, r4 - r8, lr} 25 ldmfd sp!, {r1, r4 - r8, pc}
|
D | csumpartialcopyuser.S | 21 stmfd sp!, {r1, r2, r4 - r8, lr} 25 ldmfd sp!, {r1, r2, r4 - r8, pc}
|
/linux-4.1.27/arch/arm/mm/ |
D | abort-lv4t.S | 28 ldr r8, [r4] @ read arm instruction 29 tst r8, #1 << 20 @ L = 1 -> write? 31 and r7, r8, #15 << 24 53 mov r1, r8 57 tst r8, #1 << 21 @ check writeback bit 61 and r6, r8, r7 62 and r9, r8, r7, lsl #1 64 and r9, r8, r7, lsl #2 66 and r9, r8, r7, lsl #3 71 and r9, r8, #15 << 16 @ Extract 'n' from instruction [all …]
|
D | l2c-l2x0-resume.S | 19 ldmia r0, {r1, r2, r3, r4, r5, r6, r7, r8} 27 @ r8 = pwr_ctrl 40 strcs r8, [r1, #L310_POWER_CTRL]
|
D | proc-v6.S | 148 mrc p15, 0, r8, c1, c0, 2 @ co-processor access control 172 mcr p15, 0, r8, c1, c0, 2 @ co-processor access control 216 ALT_SMP(orr r8, r8, #TTB_FLAGS_SMP) 217 ALT_UP(orr r8, r8, #TTB_FLAGS_UP) 218 mcr p15, 0, r8, c2, c0, 1 @ load TTB1
|
/linux-4.1.27/arch/sh/kernel/cpu/sh2a/ |
D | entry.S | 74 mov r2,r8 ! r8 = previus stack top 77 mov.l @r8+,r2 78 mov.l @r8+,r0 79 mov.l @r8+,r1 87 mov r2,r8 ! r8 = previous stack top 90 mov.l @r8+,r2 ! old R2 91 mov.l @r8+,r0 ! old R0 92 mov.l @r8+,r1 ! old R1 93 mov.l @r8+,r10 ! old PC 94 mov.l @r8+,r11 ! old SR [all …]
|
/linux-4.1.27/arch/microblaze/lib/ |
D | fastcopy.S | 107 andi r8, r6, 0xfffffffc /* as = s & ~3 */ 109 lwi r11, r8, 0 /* h = *(as + 0) */ 119 lwi r12, r8, 4 /* v = *(as + 4) */ 124 lwi r12, r8, 8 /* v = *(as + 8) */ 129 lwi r12, r8, 12 /* v = *(as + 12) */ 134 lwi r12, r8, 16 /* v = *(as + 16) */ 139 lwi r12, r8, 20 /* v = *(as + 20) */ 144 lwi r12, r8, 24 /* v = *(as + 24) */ 149 lwi r12, r8, 28 /* v = *(as + 28) */ 154 lwi r12, r8, 32 /* v = *(as + 32) */ [all …]
|
/linux-4.1.27/arch/ia64/lib/ |
D | ip_fast_csum.S | 31 #define ret0 r8 116 add r8=r16,r17 119 add r8=r8,r18 121 add r8=r8,r19 124 add r8=r8,r15 126 shr.u r10=r8,32 // now fold sum into short 127 zxt4 r11=r8 129 add r8=r10,r11 131 shr.u r10=r8,16 // yeah, keep it rolling 132 zxt2 r11=r8 [all …]
|
D | strncpy_from_user.S | 23 mov r8=0 32 EX(.Lexit, ld1 r8=[in1],1) 34 EX(.Lexit, st1 [in0]=r8,1) 35 cmp.ne p6,p7=r8,r0 40 (p6) mov r8=in2 // buffer filled up---return buffer length 41 (p7) sub r8=in1,r9,1 // return string length (excluding NUL character)
|
D | xor.S | 29 mov r8 = in1 46 (p[6+1])st8.nta [r8] = d[1], 8 66 mov r8 = in1 86 (p[6+1])st8.nta [r8] = d[1], 8 106 mov r8 = in1 129 (p[6+1])st8.nta [r8] = d[1], 8 149 mov r8 = in1 174 (p[6+1])st8.nta [r8] = d[1], 8
|
D | strnlen_user.S | 32 EXCLR(.Lexit, ld1 r8=[in0],1) 35 cmp.eq p6,p0=r8,r0 42 mov r8=r9
|
D | flush.S | 38 sub r8=r22,r23 // number of strides - 1 46 mov ar.lc=r8 91 sub r8=r22,r23 // number of strides - 1 100 mov ar.lc=r8
|
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/fuc/ |
D | kernel.fuc | 102 push $r8 103 nv_iord($r8, NV_PPWR_TIMER_LOW) 106 sub b32 $r9 $r8 109 pop $r8 123 push $r8 124 nv_iord($r8, NV_PPWR_TIMER_LOW) 131 sub b32 $r9 $r8 135 pop $r8 141 // $r8 - NV_PPWR_INTR 182 push $r8 [all …]
|
D | memx.fuc | 85 movw $r8 0x1610 86 nv_rd32($r7, $r8) 91 nv_wr32($r8, $r7) 95 nv_rd32($r8, $r6) 96 and $r8 $r7 97 nv_wr32($r6, $r8) 100 nv_rd32($r8, $r6) 101 and $r8 $r7 102 nv_wr32($r6, $r8) 105 nv_rd32($r8, $r6) [all …]
|
/linux-4.1.27/arch/powerpc/kernel/vdso64/ |
D | cacheflush.S | 39 subf r8,r6,r4 /* compute length */ 40 add r8,r8,r5 /* ensure we get enough */ 42 srw. r8,r8,r9 /* compute line count */ 45 mtctr r8 56 subf r8,r6,r4 /* compute length */ 57 add r8,r8,r5 59 srw. r8,r8,r9 /* compute line count */ 62 mtctr r8
|
D | gettimeofday.S | 98 cmpld cr0,r0,r8 /* check if updated */ 207 1: ld r8,CFG_TB_UPDATE_COUNT(r3) 208 andi. r0,r8,1 /* pending update ? loop */ 210 xor r0,r8,r8 /* create dependency */ 233 cmpld r0,r8 /* check if updated */
|
/linux-4.1.27/arch/powerpc/kernel/vdso32/ |
D | cacheflush.S | 39 subf r8,r6,r4 /* compute length */ 40 add r8,r8,r5 /* ensure we get enough */ 42 srw. r8,r8,r9 /* compute line count */ 45 mtctr r8 56 subf r8,r6,r4 /* compute length */ 57 add r8,r8,r5 59 srw. r8,r8,r9 /* compute line count */ 62 mtctr r8
|
D | gettimeofday.S | 111 cmpl cr0,r8,r0 /* check if updated */ 224 1: lwz r8,(CFG_TB_UPDATE_COUNT+LOPART)(r9) 225 andi. r0,r8,1 /* pending update ? loop */ 227 xor r0,r8,r8 /* create dependency */ 292 cmplw cr0,r8,r0 /* check if updated */
|
/linux-4.1.27/arch/sh/kernel/ |
D | relocate_kernel.S | 34 mov.l r8, @-r15 47 stc sr, r8 48 or r9, r8 49 ldc r8, sr 62 stc sr, r8 63 and r9, r8 64 ldc r8, sr 94 stc sr, r8 95 and r9, r8 96 ldc r8, sr [all …]
|
D | entry-common.S | 88 get_current_thread_info r8, r0 236 get_current_thread_info r8, r0 251 mov r8, r0 253 mov.l 1f, r8 254 add r0, r8 255 mov.l @r8, r8 256 jsr @r8 293 mov.l 1f, r8 294 jsr @r8 302 mov.l 1f, r8 [all …]
|
D | head_32.S | 172 mov.l @r7, r8 173 and r11, r8 174 cmp/eq r0, r8 /* Check for valid __MEMORY_START mappings */ 197 mov #(PMB_UB >> 8), r8 198 shll8 r8 200 or r0, r8 201 or r9, r8 202 mov.l r8, @r1 203 mov r2, r8 204 add r7, r8 [all …]
|
/linux-4.1.27/arch/score/lib/ |
D | checksum.S | 35 lw r8, [src, offset + 0x00]; \ 39 ADDC(sum, r8); \ 43 lw r8, [src, offset + 0x10]; \ 47 ADDC(sum, r8); \ 77 andri.c r8, r5, 0x4 /*Len >= 4?*/ 81 andri.c r8, src, 0x3 /*src is 4bytes aligned, so use LW!!*/ 97 andri.c r8, r5, 0x2 104 andri.c r8, r5, 0x1 147 lbu r8, [src] 149 slli r8, r8, 8 [all …]
|
D | string.S | 36 ldi r8, 0 39 cmp.c r6, r8 43 addi r8, 1 44 cmp.c r8, r9 52 mv r4, r8 55 ldi r8, 0 56 mv r4, r8 142 0: lbu r8, [r6] 144 1: sb r8, [r7]
|
/linux-4.1.27/arch/sh/lib64/ |
D | udivdi3.S | 34 mulu.l r5,r3,r8 37 shlld r8,r0,r8 39 sub r2,r8,r2 44 shlld r5,r0,r8 48 add r8,r21,r8 61 add r8,r7,r8 64 add r8,r5,r2 71 shlri r25,32,r8 76 mulu.l r5,r8,r5 81 shlri r5,14-1,r8 [all …]
|
D | strcpy.S | 47 movi -1, r8 48 SHLO r8, r7, r8 49 mcmv r4, r8, r9
|
D | copy_page.S | 57 addi r7, 64, r8 87 bgt/l r8, r2, tr1
|
/linux-4.1.27/arch/arm/crypto/ |
D | aes-armv4.S | 259 and r8,lr,r0,lsr#8 265 ldr r5,[r10,r8,lsl#2] @ Te2[s0>>8] 266 and r8,lr,r1 273 ldr r8,[r10,r8,lsl#2] @ Te3[s1>>0] 278 eor r5,r5,r8,ror#8 279 and r8,lr,r2,lsr#16 @ i1 284 ldr r8,[r10,r8,lsl#2] @ Te1[s2>>16] 291 eor r1,r1,r8,ror#8 292 and r8,lr,r3,lsr#8 @ i1 297 ldr r8,[r10,r8,lsl#2] @ Te2[s3>>8] [all …]
|
D | sha256-core.S_shipped | 104 ldmia r0,{r4,r5,r6,r7,r8,r9,r10,r11} 120 eor r0,r8,r8,ror#5 122 eor r0,r0,r8,ror#19 @ Sigma1(e) 137 eor r0,r8,r8,ror#5 139 eor r0,r0,r8,ror#19 @ Sigma1(e) 146 and r2,r2,r8 202 eor r2,r8,r9 260 eor r2,r7,r8 264 eor r2,r2,r8 @ Ch(e,f,g) 316 add r8,r8,r2 @ h+=X[i] [all …]
|
D | sha1-armv4-large.S | 64 ldr r8,.LK_00_19 75 add r7,r8,r7,ror#2 @ E+=K_00_19 84 add r7,r8,r7,ror#2 @ E+=K_00_19 100 add r6,r8,r6,ror#2 @ E+=K_00_19 109 add r6,r8,r6,ror#2 @ E+=K_00_19 125 add r5,r8,r5,ror#2 @ E+=K_00_19 134 add r5,r8,r5,ror#2 @ E+=K_00_19 150 add r4,r8,r4,ror#2 @ E+=K_00_19 159 add r4,r8,r4,ror#2 @ E+=K_00_19 175 add r3,r8,r3,ror#2 @ E+=K_00_19 [all …]
|
D | aesbs-core.S_shipped | 1076 ldr r8, [ip] @ IV is 1st arg on the stack 1117 vld1.8 {q15}, [r8] @ load IV 1318 vst1.8 {q15}, [r8] @ return IV 1333 ldr r8, [ip] @ ctr is 1st arg on the stack 1351 vld1.8 {q0}, [r8] @ load counter 1352 add r8, r6, #.LREVM0SR-.LM0 @ borrow r8 1370 vld1.8 {q0}, [r8] @ load counter 1371 adrl r8, .LREVM0SR @ borrow r8 1406 vldmia r8, {q8} @ .LREVM0SR 1409 sub r6, r8, #.LREVM0SR-.LSR @ pass constants [all …]
|
/linux-4.1.27/arch/powerpc/kernel/ |
D | misc_64.S | 84 subf r8,r6,r4 /* compute length */ 85 add r8,r8,r5 /* ensure we get enough */ 87 srw. r8,r8,r9 /* compute line count */ 89 mtctr r8 100 subf r8,r6,r4 /* compute length */ 101 add r8,r8,r5 103 srw. r8,r8,r9 /* compute line count */ 105 mtctr r8 130 subf r8,r6,r4 /* compute length */ 131 add r8,r8,r5 /* ensure we get enough */ [all …]
|
D | head_8xx.S | 579 add r10, r10, r8 ;b 151f 721 lis r8, MI_RSV4I@h 722 ori r8, r8, 0x1c00 724 mtspr SPRN_MI_CTR, r8 /* Set instruction MMU control */ 729 mr r8, r10 742 lis r8, KERNELBASE@h /* Create vaddr for TLB */ 743 ori r8, r8, MI_EVALID /* Mark it valid */ 744 mtspr SPRN_MI_EPN, r8 745 mtspr SPRN_MD_EPN, r8 746 li r8, MI_PS8MEG /* Set 8M byte page */ [all …]
|
D | head_booke.h | 100 mfspr r8,SPRN_PIR; \ 101 slwi r8,r8,2; \ 102 addis r8,r8,level##_STACK_BASE@ha; \ 103 lwz r8,level##_STACK_BASE@l(r8); \ 104 addi r8,r8,EXC_LVL_FRAME_OVERHEAD; 107 lis r8,level##_STACK_BASE@ha; \ 108 lwz r8,level##_STACK_BASE@l(r8); \ 109 addi r8,r8,EXC_LVL_FRAME_OVERHEAD; 121 mtspr SPRN_SPRG_WSCRATCH_##exc_level,r8; \ 123 stw r9,GPR9(r8); /* save various registers */\ [all …]
|
D | misc_32.S | 99 mulhwu r8,r10,r6 101 adde r4,r4,r8 129 lis r8,__got2_end@ha 130 addi r8,r8,__got2_end@l 131 subf r8,r7,r8 132 srwi. r8,r8,2 134 mtctr r8 546 lwz r8,12(r4); \ 550 stw r8,12(r3); \ 636 rlwinm r8,r7,0,32 # t3 = (count < 32) ? 32 : 0 [all …]
|
D | fsl_booke_entry_mapping.S | 83 li r8,-1 85 slw r6,r8,r6 /* convert to mask */ 90 mfspr r8,SPRN_MAS3 94 and r8,r6,r8 98 or r25,r8,r9 99 ori r8,r25,(MAS3_SX|MAS3_SW|MAS3_SR) 114 mtspr SPRN_MAS3,r8 173 mtspr SPRN_MAS3,r8 230 mfspr r8,SPRN_MAS1 231 rlwinm r8,r8,0,2,0 /* clear IPROT */ [all …]
|
D | head_32.S | 119 0: mflr r8 /* r8 = runtime addr here */ 120 addis r8,r8,(_stext - 0b)@ha 121 addi r8,r8,(_stext - 0b)@l /* current runtime base addr */ 1140 li r8,0x7f /* valid, block length = 8MB */ 1142 mtspr SPRN_IBAT0L,r8 /* lower BAT register */ 1144 addis r8,r8,0x800000@h 1146 mtspr SPRN_IBAT1L,r8 1148 addis r8,r8,0x800000@h 1150 mtspr SPRN_IBAT2L,r8 1154 4: tophys(r8,r11) [all …]
|
D | reloc_32.S | 55 li r8, 0 66 lwz r8, 4(r11) /* r8 = Total Rela relocs size */ 80 cmpwi r8, 0 97 subf r8, r6, r8 /* relaz -= relaent */ 195 cmpwi r8, 0 /* relasz = 0 ? */ 198 subf r8, r6, r8 /* relasz -= relaent */
|
D | idle_power4.S | 63 ld r8,TI_LOCAL_FLAGS(r9) /* set napping bit */ 64 ori r8,r8,_TLF_NAPPING /* so when we take an exception */ 65 std r8,TI_LOCAL_FLAGS(r9) /* it will return to our caller */
|
D | entry_32.S | 93 mfspr r8,SPRN_SPRG_THREAD 94 lwz r0,KSP_LIMIT(r8) 97 stw r0,KSP_LIMIT(r8) 117 mfspr r8,SPRN_SPRG_THREAD 118 lwz r0,KSP_LIMIT(r8) 121 stw r0,KSP_LIMIT(r8) 244 lwz r8,GPR8(r1) 328 lwz r8,GPR8(r1) 357 li r8,-_LAST_ERRNO 360 cmplw 0,r3,r8 [all …]
|
D | reloc_64.S | 37 li r8,0 48 ld r8,8(r11) /* get RELACOUNT value in r8 */ 52 cmpdi cr1,r8,0 71 mtctr r8
|
D | entry_64.S | 73 std r8,GPR8(r1) 165 clrldi r8,r8,32 176 ld r8,_MSR(r1) 179 andi. r10,r8,MSR_RI 214 andi. r6,r8,MSR_PR 226 mtspr SPRN_SRR1,r8 252 ld r8,GPR8(r1) 474 ld r8,KSP(r4) /* new stack pointer */ 477 clrrdi r6,r8,28 /* get its ESID */ 480 clrrdi r6,r8,40 /* get its 1T ESID */ [all …]
|
D | tm.S | 233 std r8, GPR13(r7) 360 addi r8, r3, THREAD_VRSTATE 362 lvx v0, r8, r5 364 REST_32VRS(0, r5, r8) /* r5 scratch, r8 ptr */ 373 addi r8, r3, THREAD_FPSTATE 374 lfd fr0, FPSTATE_FPSCR(r8) 386 ld r8, _XER(r7) 390 mtxer r8
|
D | head_64.S | 374 mr r28,r8 431 mr r8,r26 492 addis r8,r3,(4f - _stext)@ha /* Jump to the copy of this code */ 493 addi r12,r8,(4f - _stext)@l /* that we just made */ 751 LOAD_REG_ADDR(r8,__bss_start) 752 sub r11,r11,r8 /* bss size */ 756 addi r8,r8,-8 759 3: stdu r0,8(r8)
|
D | idle_6xx.S | 139 lwz r8,TI_LOCAL_FLAGS(r9) /* set napping bit */ 140 ori r8,r8,_TLF_NAPPING /* so when we take an exception */ 141 stw r8,TI_LOCAL_FLAGS(r9) /* it will return to our caller */
|
D | head_fsl_booke.S | 85 0: mflr r8 86 addis r3,r8,(is_second_reloc - 0b)@ha 104 addis r4,r8,(kernstart_addr - 0b)@ha 108 addis r6,r8,(memstart_addr - 0b)@ha 926 mfmsr r8 929 rlwimi r9,r8,28,0x00000001 /* turn MSR[DS] into MAS6[SAS] */ 934 mfspr r8,SPRN_MAS1 936 rlwinm r9,r8,25,0x1f /* r9 = log2(page size) */ 1098 mfspr r8,SPRN_HID0 1099 ori r9,r8,HID0_DCFA@l [all …]
|
D | cpu_setup_6xx.S | 91 ori r8,r11,HID0_ICFI 93 ori r8,r8,HID0_DCI /* unless it wasn't enabled */ 95 mtspr SPRN_HID0,r8 /* enable and invalidate caches */ 108 ori r8,r11,HID0_BTCD 110 mtspr SPRN_HID0,r8 /* flush branch target address cache */
|
/linux-4.1.27/arch/powerpc/lib/ |
D | copypage_64.S | 31 srd r8,r5,r11 33 mtctr r8 41 srdi r8,r5,7 /* page is copied in 128 byte strides */ 42 addi r8,r8,-1 /* one stride copied outside loop */ 44 mtctr r8 49 ldu r8,24(r4) 55 std r8,32(r3) 65 ld r8,64(r4) 71 std r8,96(r3) 81 ldu r8,128(r4) [all …]
|
D | memcpy_64.S | 67 mr r8,r9 70 std r8,8(r3) 71 2: ldu r8,16(r4) 74 3: std r8,8(r3) 113 sld r8,r0,r10 117 # s1<< in r8, d0=(s0<<|s1>>) in r7, s3 in r0, s2 in r9, nix in r6 & r12 122 sld r8,r0,r10 129 or r12,r8,r12 131 sld r8,r0,r10 135 # d0=(s0<<|s1>>) in r12, s1<< in r6, s2>> in r7, s2<< in r8, s3 in r9 [all …]
|
D | copy_32.S | 18 lwz r8,8(r4); \ 22 stw r8,8(r6); \ 30 lwz r8,8(r4); \ 38 stw r8,8(r6); \ 111 lwzu r8,8(r4) 113 stwu r8,8(r6) 152 lwzu r8,-8(r4) 154 stwu r8,-8(r6) 188 andi. r8,r0,3 /* get it word-aligned first */ 189 mtctr r8 [all …]
|
D | copypage_power7.S | 42 lis r8,0x8000 /* GO=1 */ 43 clrldi r8,r8,32 54 dcbt r0,r8,0b01010 /* all streams GO */ 79 li r8,48 89 lvx v4,r4,r8 98 stvx v4,r3,r8 128 ld r8,32(r4) 145 std r8,32(r3)
|
D | div64.S | 24 li r8,0 47 add r8,r8,r11 # and add the estimate to the accumulated 54 add r8,r8,r0 57 stw r8,4(r3)
|
D | copyuser_64.S | 73 mr r8,r6 79 270: std r8,8(r3) 81 222: ld r8,8(r4) 87 272: std r8,8(r3) 132 sLd r8,r0,r10 140 sLd r8,r0,r10 147 or r12,r8,r12 149 sLd r8,r0,r10 159 or r12,r8,r12 162 sLd r8,r0,r10 [all …]
|
D | string_64.S | 66 mr r3,r8 85 mr r8,r3 133 6: mr r8,r3 181 mr r8,r3 192 mr r8,r3
|
D | ldstfp.S | 233 li r8,STKFRM-16 237 stvx v0,r1,r8 243 lvx v0,r1,r8 261 li r8,STKFRM-16 265 stvx v0,r1,r8 271 lvx v0,r1,r8 329 li r8,STKFRM-16 357 li r8,STKFRM-16
|
/linux-4.1.27/arch/powerpc/kvm/ |
D | book3s_hv_rmhandlers.S | 95 lwz r8, HSTATE_PMC5(r13) 101 mtspr SPRN_PMC5, r8 113 ld r8, HSTATE_MMCR2(r13) 115 mtspr SPRN_MMCR2, r8 142 ld r8, 112+PPC_LR_STKOFF(r1) 157 mtsrr0 r8 163 11: mtspr SPRN_HSRR0, r8 169 14: mtspr SPRN_HSRR0, r8 188 ld r8,VCORE_LPCR(r5) 189 mtspr SPRN_LPCR,r8 [all …]
|
D | book3s_hv_interrupts.S | 76 mfspr r8, SPRN_MMCR2 103 std r8, HSTATE_MMCR2(r13) 110 mfspr r8, SPRN_PMC5 116 stw r8, HSTATE_PMC5(r13) 124 mfspr r8,SPRN_DEC 126 mtspr SPRN_HDEC,r8 127 extsw r8,r8 128 add r8,r8,r7 129 std r8,HSTATE_DECEXP(r13)
|
D | book3s_64_slb.S | 51 li r8, 0 52 stb r8, 3(r11) 128 li r8, SLB_NUM_BOLTED 129 stb r8, 3(r11) 135 li r8, SLBSHADOW_SAVEAREA 139 LDX_BE r10, r11, r8 145 addi r8, r8, SHADOW_SLB_ENTRY_LEN
|
D | book3s_segment.S | 95 mfspr r8, SPRN_FSCR 96 std r8, HSTATE_HOST_FSCR(r13) 123 PPC_LL r8, SVCPU_CTR(r3) 128 mtctr r8 144 PPC_LL r8, SVCPU_R8(r3) 196 PPC_STL r8, SVCPU_R8(r13) 225 PPC_LL r8, HSTATE_SCRATCH0(r13) 229 PPC_STL r8, SVCPU_R12(r13) 237 mfctr r8 243 PPC_STL r8, SVCPU_CTR(r13) [all …]
|
D | bookehv_interrupts.S | 79 lwz r8, VCPU_HOST_PID(r4) 85 mtspr SPRN_PID, r8 90 mfspr r8, SPRN_TBRL 93 stw r8, VCPU_TIMING_EXIT_TBL(r4) 98 oris r8, r6, MSR_CE@h 100 ori r8, r8, MSR_ME | MSR_RI 110 cmpw r6, r8 153 mfspr r8, SPRN_ESR 154 PPC_STL r8, VCPU_FAULT_ESR(r4) 189 PPC_STL r8, VCPU_GPR(R8)(r4) [all …]
|
D | booke_interrupts.S | 153 stw r8, VCPU_GPR(R8)(r4) 163 mfspr r8, SPRN_TBRL 167 stw r8, VCPU_TIMING_EXIT_TBL(r4) 175 mfmsr r8 176 ori r7, r8, MSR_DS 180 mtmsr r8 441 lis r8, kvmppc_booke_handlers@ha 442 lwz r8, kvmppc_booke_handlers@l(r8) 443 mtspr SPRN_IVPR, r8 471 mfspr r8, SPRN_TBRU [all …]
|
/linux-4.1.27/arch/powerpc/crypto/ |
D | aes-spe-keys.S | 37 xor r8,r8,r8; \ 84 LOAD_KEY(r8,r4,12) 88 stw r8,12(r3) 93 mr r14,r8 /* apply LS_BOX to 4th temp */ 100 xor r8,r8,r7 104 stw r8,12(r3) 126 LOAD_KEY(r8,r4,12) 132 stw r8,12(r3) 146 xor r8,r8,r7 147 xor r9,r9,r8 [all …]
|
/linux-4.1.27/arch/powerpc/boot/ |
D | crt0.S | 75 9: lwz r8,0(r12) /* get tag */ 76 cmpwi r8,0 78 cmpwi r8,RELA 82 11: addis r8,r8,(-RELACOUNT)@ha 83 cmpwi r8,RELACOUNT@l 114 lwz r8,p_etext-p_base(r10) 118 cmplw cr0,r9,r8 125 lwz r8,p_end-p_base(r10) 129 cmplw cr0,r9,r8 133 lwz r8,p_pstack-p_base(r10) [all …]
|
D | div64.S | 24 li r8,0 47 add r8,r8,r11 # and add the estimate to the accumulated 54 add r8,r8,r0 57 stw r8,4(r3) 81 rlwinm r8,r7,0,32 # t3 = (count < 32) ? 32 : 0 84 slw r7,r7,r8 # t2 = (count < 32) ? 0 : t2
|
D | virtex405-head.S | 25 li r8,256 26 mtctr r8
|
D | util.S | 86 addze r8,r5 92 cmpw 0,r5,r8
|
D | string.S | 141 lwzu r8,8(r4) 143 stwu r8,8(r6) 190 lwzu r8,-8(r4) 192 stwu r8,-8(r6)
|
/linux-4.1.27/arch/ia64/include/asm/ |
D | futex.h | 10 register unsigned long r8 __asm ("r8") = 0; \ 16 : "+r" (r8), "=r" (oldval) \ 19 ret = r8; \ 24 register unsigned long r8 __asm ("r8") = 0; \ 37 : "+r" (r8), "=r" (val), "=&r" (oldval), \ 41 if (unlikely (r8)) \ 44 ret = r8; \ 109 register unsigned long r8 __asm ("r8") = 0; in futex_atomic_cmpxchg_inatomic() 117 : "+r" (r8), "=&r" (prev) in futex_atomic_cmpxchg_inatomic() 122 return r8; in futex_atomic_cmpxchg_inatomic()
|
D | syscall.h | 38 return regs->r10 == -1 ? regs->r8:0; in syscall_get_error() 44 return regs->r8; in syscall_get_return_value() 53 regs->r8 = -error; in syscall_set_return_value() 56 regs->r8 = val; in syscall_set_return_value()
|
D | ptrace.h | 68 return regs->r8; in regs_return_value() 70 return -regs->r8; in regs_return_value() 111 # define force_successful_syscall_return() (task_pt_regs(current)->r8 = 0)
|
/linux-4.1.27/arch/x86/kernel/ |
D | relocate_kernel_64.S | 78 movq PTR(PA_CONTROL_PAGE)(%rsi), %r8 95 lea PAGE_SIZE(%r8), %rsp 98 addq $(identity_mapped - relocate_kernel), %r8 99 pushq %r8 181 popq %r8 182 subq $(1b - relocate_kernel), %r8 183 movq CP_PA_SWAP_PAGE(%r8), %r10 184 movq CP_PA_BACKUP_PAGES_MAP(%r8), %rdi 185 movq CP_PA_TABLE_PAGE(%r8), %rax 187 lea PAGE_SIZE(%r8), %rsp [all …]
|
D | perf_regs.c | 47 PT_REGS_OFFSET(PERF_REG_X86_R8, r8), 143 regs_user_copy->r8 = user_regs->r8; in perf_get_regs_user()
|
/linux-4.1.27/arch/arm/mach-omap1/ |
D | ams-delta-fiq-handler.S | 111 mov r8, #2 @ reset FIQ agreement 112 str r8, [r12, #IRQ_CONTROL_REG_OFFSET] 117 mov r8, #1 118 orr r8, r11, r8, lsl r10 @ mask spurious interrupt 119 str r8, [r12, #IRQ_MIR_REG_OFFSET] 150 ldr r8, [r12, #OMAP1510_GPIO_DATA_INPUT] @ fetch GPIO input 156 ands r8, r8, #KEYBRD_DATA_MASK @ check start bit - detected? 159 @ r8 contains KEYBRD_DATA_MASK, use it 160 str r8, [r9, #BUF_STATE] @ enter data processing state 175 @ r8 still contains GPIO input bits [all …]
|
D | sleep.S | 88 ldr r8, [r6, #EMIFS_CONFIG_ASM_OFFSET & 0xff] 89 orr r9, r8, #IDLE_EMIFS_REQUEST & 0xff 135 str r8, [r6, #EMIFS_CONFIG_ASM_OFFSET & 0xff] 243 ldr r8, [r6, #EMIFS_CONFIG_ASM_OFFSET & 0xff] 244 orr r9, r8, #IDLE_EMIFS_REQUEST & 0xff 363 str r8, [r6, #EMIFS_CONFIG_ASM_OFFSET & 0xff]
|
/linux-4.1.27/arch/avr32/kernel/ |
D | entry-avr32b.S | 213 cp.w r8, NR_syscalls 217 ld.w lr, lr[r8 << 2] 218 mov r8, r5 /* 5th argument (6th is pushed by stub) */ 236 popm r8-r9 237 mtsr SYSREG_RAR_SUP, r8 265 pushm r8-r12 267 popm r8-r12 307 mfsr r8, SYSREG_RSR_EX 310 mov r12, r8 311 andh r8, (MODE_MASK >> 16), COH [all …]
|
D | switch_to.S | 24 ld.w r8, r10++ 30 mtsr SYSREG_SR, r8
|
/linux-4.1.27/arch/ia64/hp/sim/boot/ |
D | boot_head.S | 60 mov r8=-1 74 mov r8=0 /* status = 0 */ 81 mov r8=0 /* status = 0 */ 88 mov r8=0 /* status = 0 */ 96 movl r8=524288 /* flush 512k million cache lines (16MB) */ 98 mov ar.lc=r8 99 movl r8=0xe000000000000000 101 .loop: fc r8 102 add r8=32,r8 109 mov r8=r0 [all …]
|
/linux-4.1.27/arch/tile/lib/ |
D | memcpy_32.S | 105 { blzt r4, .Lcopy_8_check; slti_u r8, r2, 8 } 118 EX: { lw r3, r1; addi r1, r1, 4; slti_u r8, r2, 16 } 123 { bzt r8, .Lcopy_8_loop; slti_u r4, r2, 4 } 187 { bz r4, .Lcopy_8_check; slti_u r8, r2, 8 } 194 { bz r4, .Lcopy_8_check; slti_u r8, r2, 8 } 201 { bz r4, .Lcopy_8_check; slti_u r8, r2, 8 } 272 EX: { lw r8, r1; addi r1, r1, 4; slt_u r13, r20, r15 }/* r8 = WORD_13 */ 323 EX: sw r0, r8 /* store(WORD_13) */ 346 { slti_u r8, r2, 20; sub r4, zero, r0 } 347 { bnzt r8, .Lcopy_unaligned_few; andi r4, r4, 3 } [all …]
|
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/engine/gr/fuc/ |
D | com.fuc | 35 ld b32 $r8 D[$r13 + 0x0] // GET 37 xor $r8 8 38 cmpu b32 $r8 $r9 46 and $r8 $r9 7 47 shl b32 $r8 3 48 add b32 $r8 $r13 49 add b32 $r8 8 50 st b32 D[$r8 + 0x0] $r14 51 st b32 D[$r8 + 0x4] $r15 69 ld b32 $r8 D[$r13 + 0x0] // GET [all …]
|
/linux-4.1.27/arch/arm/mach-omap2/ |
D | sram243x.S | 48 mov r8, r3 @ capture force parameter 87 cmp r8, #0x1 @ if forced unlock exit 207 ldr r8, [r10] @ get value 209 and r8, r8, r7 @ apply mask to clear bits 210 orr r8, r8, r9 @ bulld value for L0/L1-volt operation. 211 str r8, [r10] @ set up for change. 213 orr r8, r8, r7 @ build value for force 214 str r8, [r10] @ Force transition to L1 217 ldr r8, [r10] @ get value 218 add r8, r8, #0x2 @ give it at most 62uS (min 31+) [all …]
|
D | sram242x.S | 48 mov r8, r3 @ capture force parameter 87 cmp r8, #0x1 @ if forced unlock exit 207 ldr r8, [r10] @ get value 209 and r8, r8, r7 @ apply mask to clear bits 210 orr r8, r8, r9 @ bulld value for L0/L1-volt operation. 211 str r8, [r10] @ set up for change. 213 orr r8, r8, r7 @ build value for force 214 str r8, [r10] @ Force transition to L1 217 ldr r8, [r10] @ get value 218 add r8, r8, #0x2 @ give it at most 62uS (min 31+) [all …]
|
D | sleep44xx.S | 109 mov r8, r0 110 ldr r9, [r8, #OMAP_TYPE_OFFSET] 115 ldreq r0, [r8, #SCU_OFFSET0] 116 ldrne r0, [r8, #SCU_OFFSET1] 126 ldreq r1, [r8, #SCU_OFFSET0] 127 ldrne r1, [r8, #SCU_OFFSET1] 150 mov r8, r0 153 ldreq r0, [r8, #L2X0_SAVE_OFFSET0] @ Retrieve L2 state from SAR 154 ldrne r0, [r8, #L2X0_SAVE_OFFSET1] @ memory. 212 mov r8, r0 [all …]
|
/linux-4.1.27/arch/parisc/lib/ |
D | fixup.S | 69 get_fault_ip %r1,%r8 71 ldi -EFAULT, %r8 77 get_fault_ip %r1,%r8 79 ldi -EFAULT, %r8 86 get_fault_ip %r1,%r8 89 ldi -EFAULT, %r8 93 get_fault_ip %r1,%r8 96 ldi -EFAULT, %r8
|
D | memcpy.c | 324 register double r1, r2, r3, r4, r5, r6, r7, r8; in pa_memcpy_internal() 342 flddma(s_space, pds, r8, pmc_load_exc); in pa_memcpy_internal() 346 fstdma(d_space, r8, pdd, pmc_store_exc); in pa_memcpy_internal() 356 register unsigned int r1,r2,r3,r4,r5,r6,r7,r8; in pa_memcpy_internal() local 370 ldwma(s_space, pws, r8, pmc_load_exc); in pa_memcpy_internal() 374 stwma(d_space, r8, pwd, pmc_store_exc); in pa_memcpy_internal()
|
/linux-4.1.27/arch/sh/kernel/cpu/sh2/ |
D | entry.S | 84 mov.l r8,@-r15 90 mov r2,r8 ! copy user -> kernel stack 91 mov.l @(0,r8),r3 93 mov.l @(4,r8),r2 95 mov.l @(12,r8),r1 97 mov.l @(8,r8),r0 133 mov.l r8,@-r2 144 mov #64,r8 145 cmp/hs r8,r9 147 mov #32,r8 [all …]
|
/linux-4.1.27/arch/sh/lib/ |
D | copy_page.S | 27 mov.l r8,@-r15 32 mov r5,r8 36 add r0,r8 59 cmp/eq r11,r8 65 mov.l @r15+,r8 98 mov.l r8,@-r15 194 EX( mov.l @r5+,r8 ) 209 EX( mov.l r8,@(16,r4) ) 245 EX( mov.l @r5+,r8 ) 251 xtrct r8,r1 [all …]
|
/linux-4.1.27/arch/ia64/kernel/ |
D | fsys.S | 55 mov r8=ENOSYS 75 add r8=IA64_PID_LEVEL_OFFSET,r17 77 ld4 r8=[r8] // r8 = pid->level 80 shl r8=r8,IA64_UPID_SHIFT 82 add r17=r17,r8 // r17 = &pid->numbers[pid->level] 84 ld4 r8=[r17] // r8 = pid->numbers[pid->level].nr 105 add r8=IA64_PID_LEVEL_OFFSET,r17 108 ld4 r8=[r8] // r8 = pid->level 111 shl r8=r8,IA64_UPID_SHIFT 113 add r17=r17,r8 // r17 = &pid->numbers[pid->level] [all …]
|
D | pal.S | 45 mov r8=-1 62 mov r8 = ip 66 adds r8 = 1f-1b,r8 80 mov rp = r8 151 mov r8 = ip // save ip to compute branch 162 adds r8 = 1f-1b,r8 // calculate return address for call 166 tpa r8=r8 // convert rp to physical 177 mov rp = r8 // install return address (physical)
|
D | mca_drv_asm.S | 37 mov out0=r8 // poisoned address 53 mov r8=r0
|
D | entry.S | 73 cmp4.ge p6,p7=r8,r0 75 sxt4 r8=r8 // return 64-bit result 196 MOV_TO_KR(CURRENT, in0, r8, r9) // update "current" application register 197 mov r8=r13 // return pointer to previously running task 215 MOV_TO_ITIR(p0, r25, r8) 216 MOV_TO_IFA(in0, r8) // VA of next task... 219 MOV_TO_KR(CURRENT_STACK, r26, r8, r9) // remember last page we mapped... 222 SSM_PSR_IC_AND_SRLZ_D(r8, r9) // reenable the psr.ic bit 507 cmp.lt p6,p0=r8,r0 // check tracehook 542 cmp.lt p6,p0=r8,r0 // syscall failed? [all …]
|
/linux-4.1.27/arch/powerpc/platforms/pseries/ |
D | hvCall.S | 42 std r8,STK_PARAM(R8)(r1); \ 54 ld r8,STACK_FRAME_OVERHEAD+STK_PARAM(R8)(r1); \ 143 mr r7,r8 144 mr r8,r9 170 mr r7,r8 171 mr r8,r9 207 mr r7,r8 208 mr r8,r9 237 mr r7,r8 238 mr r8,r9 [all …]
|
/linux-4.1.27/arch/unicore32/lib/ |
D | copy_template.S | 71 stm.w (r5 - r8), [sp-] 75 4: ldr8w r1, r3, r4, r5, r6, r7, r8, r10, r11, abort=20f 77 str8w r0, r3, r4, r5, r6, r7, r8, r10, r11, abort=20f 91 ldr1w r1, r8, abort=20f 102 str1w r0, r8, abort=20f 105 7: ldm.w (r5 - r8), [sp]+ 148 ldr4w r1, r8, r9, r10, r11, abort=19f 157 or r7, r7, r8 push #\b 158 mov r8, r8 pull #\a 159 or r8, r8, r9 push #\b [all …]
|
/linux-4.1.27/arch/x86/lib/ |
D | memcpy_64.S | 75 movq 0*8(%rsi), %r8 81 movq %r8, 0*8(%rdi) 104 movq -1*8(%rsi), %r8 109 movq %r8, -1*8(%rdi) 129 movq 0*8(%rsi), %r8 133 movq %r8, 0*8(%rdi) 145 movq 0*8(%rsi), %r8 147 movq %r8, 0*8(%rdi) 172 movzbq 1(%rsi), %r8
|
D | memmove_64.S | 40 mov %rsi, %r8 41 add %rdx, %r8 42 cmp %rdi, %r8 70 movq 3*8(%rsi), %r8 76 movq %r8, 3*8(%rdi) 136 movq -4*8(%rsi), %r8 142 movq %r8, -4*8(%rdi) 160 movq -1*8(%rsi, %rdx), %r8 164 movq %r8, -1*8(%rdi, %rdx)
|
D | copy_user_nocache_64.S | 60 1: movq (%rsi),%r8 64 5: movnti %r8,(%rdi) 68 9: movq 4*8(%rsi),%r8 72 13: movnti %r8,4*8(%rdi) 84 18: movq (%rsi),%r8 85 19: movnti %r8,(%rdi)
|
D | copy_page_64.S | 40 movq 0x8*3(%rsi), %r8 51 movq %r8, 0x8*3(%rdi) 70 movq 0x8*3(%rsi), %r8 79 movq %r8, 0x8*3(%rdi)
|
D | memset_64.S | 136 movq $8,%r8 137 subq %r9,%r8 138 addq %r8,%rdi 139 subq %r8,%rdx
|
D | copy_user_64.S | 116 1: movq (%rsi),%r8 120 5: movq %r8,(%rdi) 124 9: movq 4*8(%rsi),%r8 128 13: movq %r8,4*8(%rdi) 140 18: movq (%rsi),%r8 141 19: movq %r8,(%rdi)
|
D | csum-copy_64.S | 68 movq %r8, (%rsp) 91 movq 8(%rdi), %r8 110 adcq %r8, %rax 123 movq %r8, 8(%rsi)
|
D | thunk_64.S | 25 pushq_cfi_reg r8 66 popq_cfi_reg r8
|
D | rwsem.S | 70 pushq_cfi_reg r8; \ 79 popq_cfi_reg r8; \
|
/linux-4.1.27/arch/avr32/mach-at32ap/ |
D | pm-at32ap700x.S | 41 get_thread_info r8 42 ld.w r9, r8[TI_flags] 46 st.w r8[TI_flags], r9 59 ld.w r9, r8[TI_flags] 61 st.w r8[TI_flags], r9 92 mtsr SYSREG_RAR_INT0, r8 126 sub r8, pc, . - 1f /* return address for irq handler */ 149 sub r8, pc, . - 1f /* return address for irq handler */
|
/linux-4.1.27/arch/x86/crypto/ |
D | salsa20-x86_64-asm_64.S | 10 mov %rdi,%r8 40 movq 0(%r8),%rcx 42 movq 8(%r8),%r9 44 movq 16(%r8),%rax 46 movq 24(%r8),%r10 48 movq 32(%r8),%r11 50 movq 40(%r8),%r12 52 movq 48(%r8),%r13 54 movq 56(%r8),%r14 72 movq %r8,120(%rsp) [all …]
|
D | aes-x86_64-asm_64.S | 47 #define R8 %r8 52 #define prologue(FUNC,KEY,B128,B192,r1,r2,r3,r4,r5,r6,r7,r8,r9,r10,r11) \ argument 56 leaq KEY+48(r8),r9; \ 62 movl 480(r8),r10 ## E; \ 73 #define epilogue(FUNC,r1,r2,r3,r4,r5,r6,r7,r8,r9) \ argument 79 movl r8 ## E,12(r9); \ 83 #define round(TAB,OFFSET,r1,r2,r3,r4,r5,r6,r7,r8,ra,rb,rc,rd) \ argument 93 xorl OFFSET(r8),ra ## E; \ 94 xorl OFFSET+4(r8),rb ## E; \ 124 xorl OFFSET+8(r8),rc ## E; \ [all …]
|
/linux-4.1.27/arch/arm/common/ |
D | mcpm_head.S | 76 ldmia r5, {r0, r6, r7, r8, r11} 80 add r8, r5, r8 @ r8 = mcpm_sync 90 mla r8, r0, r10, r8 @ r8 = sync cluster base 95 mla r5, r9, r5, r8 @ r5 = sync cpu address 110 ldrb r0, [r8, #MCPM_SYNC_CLUSTER_CLUSTER] 124 strb r0, [r8, #MCPM_SYNC_CLUSTER_INBOUND] 133 ldrb r0, [r8, #MCPM_SYNC_CLUSTER_CLUSTER] 155 strb r0, [r8, #MCPM_SYNC_CLUSTER_CLUSTER] 162 strb r0, [r8, #MCPM_SYNC_CLUSTER_INBOUND] 173 ldrb r0, [r8, #MCPM_SYNC_CLUSTER_CLUSTER]
|
/linux-4.1.27/arch/arm/mach-imx/ |
D | suspend-imx6.S | 103 ldr r8, [r7], #0x4 105 str r9, [r11, r8] 164 add r8, r1, r4 165 add r9, r8, r7 210 ldr r8, =PM_INFO_MMDC_IO_VAL_OFFSET 211 add r8, r8, r0 216 ldr r9, [r8], #0x8 224 ldr r9, [r8], #0x8 226 ldr r9, [r8], #0x8 229 ldr r9, [r8] [all …]
|
D | ssi-fiq.S | 62 and r10, r10, r8 /* r10: current buffer offset */ 79 lsr r11, r8, #16 /* r11: buffer size */ 81 lslgt r8, r11, #16 82 addle r8, #8
|
/linux-4.1.27/arch/powerpc/platforms/83xx/ |
D | suspend-asm.S | 84 mfspr r8, SPRN_DABR2 91 stw r8, SS_DABR+4(r3) 98 mfsdr1 r8 104 stw r8, SS_SDR1(r3) 233 lis r8, TMP_VIRT_IMMR@h 234 ori r4, r8, 0x001e /* 1 MByte accessible from Kernel Space only */ 289 stw r4, 0x0024(r8) 290 stw r4, 0x002c(r8) 291 stw r4, 0x0034(r8) 292 stw r4, 0x003c(r8) [all …]
|
/linux-4.1.27/arch/hexagon/lib/ |
D | memset.S | 47 r8 = r0 /* leave r0 intact for return val */ define 64 memb(r8++#1) = r4 76 memb(r8++#1) = r4 89 memh(r8++#2) = r4 102 memw(r8++#4) = r4 125 memd(r8++#8) = r5:4 142 memw(r8++#4) = r4 153 memh(r8++#2) = r4 159 memb(r8++#1) = r4 252 r8 = r3 define [all …]
|
/linux-4.1.27/tools/testing/selftests/powerpc/copyloops/ |
D | memcpy_64.S | 67 mr r8,r9 70 std r8,8(r3) 71 2: ldu r8,16(r4) 74 3: std r8,8(r3) 113 sld r8,r0,r10 117 # s1<< in r8, d0=(s0<<|s1>>) in r7, s3 in r0, s2 in r9, nix in r6 & r12 122 sld r8,r0,r10 129 or r12,r8,r12 131 sld r8,r0,r10 135 # d0=(s0<<|s1>>) in r12, s1<< in r6, s2>> in r7, s2<< in r8, s3 in r9 [all …]
|
D | copyuser_64.S | 73 mr r8,r6 79 270: std r8,8(r3) 81 222: ld r8,8(r4) 87 272: std r8,8(r3) 132 sLd r8,r0,r10 140 sLd r8,r0,r10 147 or r12,r8,r12 149 sLd r8,r0,r10 159 or r12,r8,r12 162 sLd r8,r0,r10 [all …]
|
/linux-4.1.27/drivers/power/ |
D | intel_mid_battery.c | 259 u8 r8; in pmic_battery_read_status() local 280 if (intel_scu_ipc_ioread8(PMIC_BATT_CHR_SCHRGINT_ADDR, &r8)) { in pmic_battery_read_status() 292 if (r8 & PMIC_BATT_CHR_SBATDET_MASK) { in pmic_battery_read_status() 303 if (r8 & PMIC_BATT_CHR_SBATOVP_MASK) { in pmic_battery_read_status() 308 } else if (r8 & PMIC_BATT_CHR_STEMP_MASK) { in pmic_battery_read_status() 315 if (r8 & PMIC_BATT_CHR_SDCLMT_MASK) { in pmic_battery_read_status() 323 if (r8 & PMIC_BATT_CHR_SUSBDET_MASK) { in pmic_battery_read_status() 332 if (r8 & PMIC_BATT_CHR_SUSBOVP_MASK) { in pmic_battery_read_status() 354 if (r8 & PMIC_BATT_CHR_SCOMP_MASK) { in pmic_battery_read_status() 561 u8 r8; in pmic_battery_handle_intrpt() local [all …]
|
/linux-4.1.27/arch/arc/lib/ |
D | strcpy-700.S | 27 mov r8,0x01010101 29 ror r12,r8 30 sub r2,r3,r8 41 sub r2,r3,r8 46 sub r2,r4,r8
|
/linux-4.1.27/arch/powerpc/include/asm/ |
D | epapr_hcalls.h | 291 register uintptr_t r8 __asm__("r8"); in ev_byte_channel_send() 300 r8 = be32_to_cpu(p[3]); in ev_byte_channel_send() 304 "+r" (r4), "+r" (r5), "+r" (r6), "+r" (r7), "+r" (r8) in ev_byte_channel_send() 334 register uintptr_t r8 __asm__("r8"); in ev_byte_channel_receive() 343 "=r" (r5), "=r" (r6), "=r" (r7), "=r" (r8) in ev_byte_channel_receive() 351 p[3] = cpu_to_be32(r8); in ev_byte_channel_receive() 475 unsigned long register r8 asm("r8") = in[5]; in epapr_hypercall() 483 "=r"(r7), "=r"(r8), "=r"(r9), "=r"(r10), "=r"(r11), in epapr_hypercall() 485 : "r"(r3), "r"(r4), "r"(r5), "r"(r6), "r"(r7), "r"(r8), in epapr_hypercall() 493 out[4] = r8; in epapr_hypercall()
|
D | fsl_hcalls.h | 133 register uintptr_t r8 __asm__("r8"); in fh_partition_get_dtprop() 143 r8 = propvalue_addr >> 32; in fh_partition_get_dtprop() 147 r8 = 0; in fh_partition_get_dtprop() 157 "+r" (r8), "+r" (r9), "+r" (r10) in fh_partition_get_dtprop() 185 register uintptr_t r8 __asm__("r8"); in fh_partition_set_dtprop() 197 r8 = propvalue_addr >> 32; in fh_partition_set_dtprop() 201 r8 = 0; in fh_partition_set_dtprop() 211 "+r" (r8), "+r" (r9), "+r" (r10) in fh_partition_set_dtprop()
|
D | exception-64e.h | 167 std r8,EX_TLB_R8(r12); \ 173 ld r8,EX_TLB_R8(r12); \ 182 ld r8,PACA_EXTLB+EX_TLB_ESR(r13); \ 183 cmpdi cr2,r8,-1; \
|
D | ftrace.h | 22 stw r8, 32(r1); \ 39 lwz r8, 32(r1); \
|
/linux-4.1.27/arch/s390/kernel/ |
D | sclp.S | 37 la %r8,LC_EXT_NEW_PSW # register int handler 41 la %r8,LC_EXT_NEW_PSW_64 # register int handler 64 bit 44 mvc .LoldpswS1-.LbaseS1(16,%r13),0(%r8) 45 mvc 0(16,%r8),0(%r9) 49 stm %r6,%r7,0(%r8) 82 mvc 0(16,%r8),.LoldpswS1-.LbaseS1(%r13) 209 l %r8,.LsccbS0-.LbaseS4(%r13) # prepare write data sccb 210 mvc 0(.LmtoS4-.LwritesccbS4,%r8),.LwritesccbS4-.LbaseS4(%r13) 211 la %r7,.LmtoS4-.LwritesccbS4(%r8) # current mto addr 232 lh %r9,.LmdbS4-.LwritesccbS4(%r8) # update mdb length [all …]
|
D | entry.S | 86 tmhh %r8,0x0001 # interrupting from user ? 117 tmhh %r8,0x0001 # interrupting from user ? 125 tmhh %r8,0x0001 # retest problem state after cleanup 157 stg %r8,__LC_RETURN_PSW 204 stmg %r8,%r15,__LC_SAVE_AREA_SYNC 221 llgh %r8,__PT_INT_CODE+2(%r11) 222 slag %r8,%r8,2 # shift and test for svc 0 229 slag %r8,%r1,2 234 lgf %r9,0(%r8,%r10) # get system call add. 315 lghi %r8,0 # svc 0 returns -ENOSYS [all …]
|
D | relocate_kernel.S | 80 lgr %r8,%r5 # r8 = r5 81 nill %r8,0xf000 # masking 82 0: mvcle %r6,%r8,0x0 # copy PAGE_SIZE bytes from r8 to r6 - pad with 0
|
D | head_kdump.S | 41 lghi %r8,DATAMOVER_ADDR # Target of data mover 42 mvc 0(256,%r8),0(%r10) # Copy data mover code 44 agr %r8,%r2 # Copy data mover to 45 mvc 0(256,%r8),0(%r10) # reserved mem
|
/linux-4.1.27/drivers/net/wireless/b43/ |
D | phy_a.c | 102 u16 freq, r8, tmp; in aphy_channel_switch() local 106 r8 = b43_radio_read16(dev, 0x0008); in aphy_channel_switch() 108 b43_radio_write16(dev, 0x0008, r8); in aphy_channel_switch() 121 r8 = 3 * freq / 116; /* is equal to r8 = freq * 0.025862 */ in aphy_channel_switch() 123 b43_radio_write16(dev, 0x0007, (r8 << 4) | r8); in aphy_channel_switch() 124 b43_radio_write16(dev, 0x0020, (r8 << 4) | r8); in aphy_channel_switch() 125 b43_radio_write16(dev, 0x0021, (r8 << 4) | r8); in aphy_channel_switch() 126 b43_radio_maskset(dev, 0x0022, 0x000F, (r8 << 4)); in aphy_channel_switch() 127 b43_radio_write16(dev, 0x002A, (r8 << 4)); in aphy_channel_switch() 128 b43_radio_write16(dev, 0x002B, (r8 << 4)); in aphy_channel_switch() [all …]
|
/linux-4.1.27/arch/arm/mach-s3c24xx/ |
D | sleep-s3c2410.S | 50 ldr r8, [r5] @ get MISCCR (and ensure in TLB) 54 orr r8, r8, #S3C2410_MISCCR_SDSLEEP @ SDRAM power-down signals 66 streq r8, [r5] @ SDRAM power-down config
|
/linux-4.1.27/arch/sh/kernel/cpu/sh3/ |
D | entry.S | 137 mov r5, r8 150 mov r8, r5 193 mov.l 1f, r8 212 ! - restore r8, r9, r10, r11, r12, r13, r14, r15 from the stack 217 ! r8 passes SR bitmask, overwritten with restored data on return 219 ! BL=0 on entry, on exit BL=1 (depending on r8). 232 or r8, r9 235 mov.l @r15+, r8 253 mov.l 7f, r8 494 mov r15, r8 ! trap handlers take saved regs in r8
|
D | swsusp.S | 63 mov.l 3f, r8 99 mov r8, r5 ! save r8 in r5 124 mov r5, r8 ! restore old r8 134 mov r5, r8 ! restore old r8
|
/linux-4.1.27/arch/sh/kernel/cpu/shmobile/ |
D | sleep.S | 63 mov.l r8, @-r15 222 stc sr, r8 223 and r9, r8 224 or r10, r8 225 ldc r8, sr 360 mov.l @r15+, r8 384 stc sr, r8 385 and r9, r8 386 or r10, r8 387 ldc r8, sr
|
/linux-4.1.27/drivers/parisc/ |
D | superio.c | 292 u8 r8; in superio_mask_irq() local 302 r8 = inb(IC_PIC1+1); in superio_mask_irq() 303 r8 |= (1 << irq); in superio_mask_irq() 304 outb (r8,IC_PIC1+1); in superio_mask_irq() 310 u8 r8; in superio_unmask_irq() local 319 r8 = inb(IC_PIC1+1); in superio_unmask_irq() 320 r8 &= ~(1 << irq); in superio_unmask_irq() 321 outb (r8,IC_PIC1+1); in superio_unmask_irq()
|
/linux-4.1.27/arch/arm/kvm/ |
D | interrupts_head.S | 97 mrs r8, LR_fiq 121 msr LR_fiq, r8 175 msr LR_fiq, r8 256 mrrc p15, 1, r8, r9, c2 @ TTBR 1 271 strd r8, r9, [r2] 283 mrc p15, 0, r8, c5, c1, 0 @ ADFSR 298 str r8, [vcpu, #CP15_OFFSET(c5_ADFSR)] 353 ldr r8, [vcpu, #CP15_OFFSET(c5_ADFSR)] 366 mcr p15, 0, r8, c5, c1, 0 @ ADFSR 382 ldrd r8, r9, [r12] [all …]
|
/linux-4.1.27/arch/m32r/kernel/ |
D | entry.S | 132 GET_THREAD_INFO(r8) 133 ld r0, R0(r8) 134 ld r1, R1(r8) 141 GET_THREAD_INFO(r8) 215 ld r9, @(TI_FLAGS, r8)
|
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/engine/ce/fuc/ |
D | com.fuc | 273 add b32 $r8 $r6 0x180 274 shl b32 $r8 8 275 iowr I[$r8] $r7 437 clear b32 $r8 464 st b8 D[$sp + $r8] $r12 465 add b32 $r8 1 476 ld b32 $r8 D[$r0 + #ctx_xcnt] 477 mulu $r6 $r8 484 mulu $r7 $r8 542 extr $r8 $r7 4:7 [all …]
|
/linux-4.1.27/arch/powerpc/platforms/ps3/ |
D | hvcall.S | 126 std r8, -48(r1); \ 142 std r8, 0(r11); \ 256 stdu r8, -40(r1); \ 271 std r8, 0(r11); \ 287 std r8, -40(r1); \ 303 std r8, 0(r11); \ 321 std r8, -40(r1); \ 338 std r8, 0(r11); \ 423 stdu r8, -32(r1); \ 451 std r8, -32(r1); \ [all …]
|
/linux-4.1.27/arch/unicore32/boot/compressed/ |
D | head.S | 29 ldm (r1, r2, r3, r5, r6, r7, r8), [r0]+ 48 add r8, r8, r0 69 csub.a r7, r8 168 .word _got_end @ r8
|
/linux-4.1.27/arch/arm/kernel/ |
D | entry-common.S | 126 ARM( add r8, sp, #S_PC ) 127 ARM( stmdb r8, {sp, lr}^ ) @ Calling sp, lr 128 THUMB( mov r8, sp ) 129 THUMB( store_user_sp_lr r8, r10, S_SP ) @ calling sp, lr 130 mrs r8, spsr @ called from non-FIQ mode, so ok. 132 str r8, [sp, #S_PSR] @ Save CPSR 152 tst r8, #PSR_T_BIT 167 tst r8, #PSR_T_BIT @ this is SPSR from save_user_regs 283 @ r8 = syscall table
|
D | entry-header.S | 98 @ r8-r12 is OK. 100 ldmia r9!, {r8, r10-r12} 113 str r8, [sp, #S_IP] 114 add r8, sp, #S_SP 116 stmia r8!, {r9-r12} 118 str r0, [r8] 321 add r8, r0, #S_PC 323 rfeia r8 399 tbl .req r8 @ syscall table pointer 400 why .req r8 @ Linux syscall (!= 0)
|
D | head.S | 110 ldmia r3, {r4, r8} 112 add r8, r8, r4 @ PHYS_OFFSET 114 ldr r8, =PLAT_PHYS_OFFSET @ always constant in this case 140 mov r8, r4 @ set TTBR1 to swapper_pg_dir 164 pgtbl r4, r8 @ page table address 232 orr r3, r8, r7 264 subne r3, r0, r8 386 ldr r8, [r7, lr] @ get secondary_data.swapper_pg_dir
|
D | fiqasm.S | 30 ldmia r0!, {r8 - r12} 43 stmia r0!, {r8 - r12}
|
/linux-4.1.27/arch/avr32/include/asm/ |
D | asm.h | 59 pushm r8-r9,r10,r11,r12,lr 68 popm r8-r9,r10,r11,r12,lr 73 stmts --sp, r0,r1,r2,r3,r4,r5,r6,r7,r8,r9,r10,r11,r12,sp,lr 84 ldmts sp++, r0,r1,r2,r3,r4,r5,r6,r7,r8,r9,r10,r11,r12,sp,lr
|
/linux-4.1.27/drivers/media/dvb-frontends/ |
D | stv6110.c | 183 u8 r8, ret = 0x04; in stv6110_set_bandwidth() local 187 r8 = 31; in stv6110_set_bandwidth() 189 r8 = 0; in stv6110_set_bandwidth() 191 r8 = (bandwidth / 2) / 1000000 - 5; in stv6110_set_bandwidth() 196 priv->regs[RSTV6110_CTRL3] |= (r8 & 0x1f); in stv6110_set_bandwidth() 374 u8 r8 = 0; in stv6110_get_bandwidth() local 379 r8 = priv->regs[RSTV6110_CTRL3] & 0x1f; in stv6110_get_bandwidth() 380 *bandwidth = (r8 + 5) * 2000000;/* x2 for ZIF tuner BW/2 = F+5 Mhz */ in stv6110_get_bandwidth()
|
/linux-4.1.27/arch/x86/ia32/ |
D | ia32entry.S | 65 CFI_UNDEFINED r8 204 xorq %r8,%r8 358 CFI_REGISTER rsp,r8 367 pushq_cfi %r8 /* pt_regs->sp */ 388 1: movl (%r8),%r9d 420 xorq %r8,%r8 588 mov %r8, %rcx
|
/linux-4.1.27/arch/avr32/mm/ |
D | copy_page.S | 24 ld.d r8, r11++ 25 st.d r12++, r8
|
/linux-4.1.27/drivers/misc/sgi-xp/ |
D | xp_nofault.S | 26 mov r8=r0 // Stage a success return value 34 mov r8=1 // Return value of 1
|
/linux-4.1.27/arch/sh/kernel/cpu/sh5/ |
D | switchto.S | 57 addi.l r0, (63*8), r8 ! base of pt_regs.trregs 114 st.q r8, (5*8), r45 115 st.q r8, (6*8), r46 116 st.q r8, (7*8), r47 143 ld.q r8, (5*8), r45 144 ld.q r8, (6*8), r46 145 ld.q r8, (7*8), r47
|
/linux-4.1.27/arch/x86/purgatory/ |
D | entry64.S | 48 movq r8(%rip), %r8 71 r8: .quad 0x0 label
|
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/engine/sec/fuc/ |
D | g98.fuc0s | 186 add b32 $r8 $r6 0x180 187 shl b32 $r8 8 188 iowr I[$r8] $r7 456 shr b32 $r8 $r5 8 458 or $r4 $r8 464 shr b32 $r8 $r7 8 466 or $r6 $r8 470 ld b32 $r8 D[$r0 + #ctx_mode] 471 shl b32 $r8 2 474 ld b16 $r9 D[$r8 + #sec_dtable] [all …]
|
/linux-4.1.27/sound/oss/ |
D | vidc_fill.S | 137 stmfd sp!, {r4 - r8, lr} 138 ldr r8, =dma_start 139 ldmia r8, {r0, r1, r2, r3, r4, r5} 156 stmia r8, {r0, r1} 168 ldmdb r8, {r3, r4, r5} 187 ldmfd sp!, {r4 - r8, lr}
|
/linux-4.1.27/tools/testing/selftests/powerpc/pmu/ebb/ |
D | busy_loop.S | 41 li r8, 0x0808 42 std r8, -136(%r1) 101 cmpwi r8, 0x0808 152 li r8, 0xad 188 ld r8, -136(%r1) 189 cmpwi r8, 0x0808
|
/linux-4.1.27/arch/ia64/sn/kernel/sn2/ |
D | ptc_deadlock.S | 39 mov r8=r0 88 (p8) add r8=1,r8
|
/linux-4.1.27/arch/cris/arch-v10/lib/ |
D | checksum.S | 25 ;; only r0 - r8 have to be saved, the other ones are clobber-able 29 movem $r8,[$sp] 55 add.d $r8,$r12 71 movem [$sp+],$r8 ; restore regs
|
D | checksumcopy.S | 29 ;; only r0 - r8 have to be saved, the other ones are clobber-able 33 movem $r8, [$sp] 61 add.d $r8,$r13 77 movem [$sp+],$r8 ; restore regs
|
/linux-4.1.27/arch/arm/include/asm/ |
D | entry-macro-multi.S | 36 mov r8, lr 38 ret r8
|
/linux-4.1.27/arch/arm/boot/bootp/ |
D | init.S | 37 @ r8 = initrd end 60 stmia r9, {r5, r6, r7, r8, r10} 86 .word initrd_size @ r8
|
/linux-4.1.27/arch/cris/arch-v32/lib/ |
D | checksum.S | 23 movem $r8,[$sp] 40 addc $r8,$r12 52 movem [$sp+],$r8 ; restore regs
|
D | checksumcopy.S | 27 movem $r8,[$sp] 44 addc $r8,$r13 55 movem [$sp+],$r8 ; restore regs
|
/linux-4.1.27/arch/arm/mach-tegra/ |
D | reset-handler.S | 46 check_cpu_part_num 0xc09, r8, r9 72 cmp r8, r9 88 cmp r8, r9 180 ldr r8, [r12, #RESET_DATA(MASK_LP1)] 181 tst r8, r11 @ if in_lp1
|
/linux-4.1.27/arch/cris/include/arch-v10/arch/ |
D | ptrace.h | 67 unsigned long r8; member 97 unsigned long r8; member
|
D | elf.h | 34 (_r)->r9 = 0; (_r)->r8 = 0; (_r)->r7 = 0; (_r)->r6 = 0; \ 54 pr_reg[8] = regs->r8; \
|
/linux-4.1.27/arch/arm/boot/compressed/ |
D | ll_char_wr.S | 38 @ Smashable regs: {r0 - r3}, [r4 - r7], (r8 - fp), [ip], (sp), [lr], (pc) 65 @ Smashable regs: {r0 - r3}, [r4], {r5 - r7}, (r8 - fp), [ip], (sp), {lr}, (pc) 83 @ Smashable regs: {r0 - r3}, [r4], {r5 - r7}, (r8 - fp), [ip], (sp), {lr}, (pc) 111 @ Smashable regs: {r0 - r3}, [r4], {r5, r6}, [r7], (r8 - fp), [ip], (sp), [lr], (pc)
|
D | head-shmobile.S | 51 mov r8, r4 68 mov r8, #0 @ pass null pointer as atag
|
/linux-4.1.27/arch/cris/include/arch-v32/arch/ |
D | ptrace.h | 67 unsigned long r8; member 104 unsigned long r8; member
|
D | elf.h | 23 (_r)->r9 = 0; (_r)->r8 = 0; (_r)->r7 = 0; (_r)->r6 = 0; \ 50 pr_reg[8] = regs->r8; \
|
/linux-4.1.27/arch/unicore32/kernel/ |
D | sleep.S | 92 movl r8, #0x800001ff @ epip4d 93 stw r8, [r1+], #0xc 110 mov r8, #0x1 141 stw r8, [r1]
|
/linux-4.1.27/arch/powerpc/platforms/powernv/ |
D | subcore-asm.S | 49 mfspr r8, SPRN_PMCR 82 mtspr SPRN_PMCR, r8
|
/linux-4.1.27/arch/parisc/kernel/ |
D | entry.S | 158 ldi \code, %r8 191 va = r8 /* virtual address for which the trap occurred */ 1038 copy %r8,%r26 1042 copy %r8,%r26 1071 extrd,u,*<> %r8,PSW_W_BIT,1,%r0 1131 va = r8 /* virtual address for which the trap occurred */ 1346 extrw,u %r9,15,5,%r8 /* Get index register # */ 1350 extrw,u %r9,10,5,%r8 /* Get base register # */ 1356 mfctl %ipsw,%r8 1358 or %r8,%r9,%r8 /* Set PSW_N */ [all …]
|
/linux-4.1.27/arch/microblaze/include/asm/ |
D | syscall.h | 50 case 3: return regs->r8; in microblaze_get_syscall_arg() 70 regs->r8 = val; in microblaze_set_syscall_arg()
|
/linux-4.1.27/arch/arm/mach-sa1100/ |
D | sleep.S | 100 orr r8, r7, #MDREFR_SLFRSH 107 bic r11, r8, #MDREFR_SLFRSH 128 str r8, [r6]
|
/linux-4.1.27/arch/arm/mach-pxa/ |
D | sleep.S | 70 ldr r8, [r6] @ keep original value for resume 113 ldr r8, [r6] @ keep original value for resume 116 bic r7, r8, #CCCR_M_MASK | CCCR_N_MASK 148 str r8, [r6]
|
/linux-4.1.27/arch/microblaze/kernel/ |
D | head.S | 69 addi r8, r0, 0xFFFFFFFF 70 mts rshr, r8 79 msrclr r8, 0 /* clear nothing - just read msr for test */ 80 cmpu r8, r8, r1 /* r1 must contain msr reg content */
|
/linux-4.1.27/arch/nios2/include/asm/ |
D | syscall.h | 86 *args++ = regs->r8; in syscall_get_arguments() 125 regs->r8 = *args++; in syscall_set_arguments()
|
/linux-4.1.27/arch/arm/mach-shmobile/ |
D | headsmp.S | 58 ldr r8, [r5, r1, lsl #2] 59 cmp r8, r0
|
/linux-4.1.27/arch/x86/xen/ |
D | xen-asm.S | 127 push %r8 135 pop %r8
|
/linux-4.1.27/arch/parisc/include/asm/ |
D | assembly.h | 188 STREG %r8, PT_GR8 (\regs) 222 LDREG PT_GR8 (\regs), %r8 352 std %r8, -104(%r30) 378 ldd -104(%r30), %r8 396 stw %r8, -108(%r30) 422 ldw -108(%r30), %r8 467 mfctl %cr22, %r8 468 STREG %r8, PT_PSW(\regs)
|
/linux-4.1.27/arch/nios2/kernel/ |
D | process.c | 80 regs->r5, regs->r6, regs->r7, regs->r8); in show_regs() 139 childstack->r23 = regs->r8; in copy_thread() 179 fp->r8, fp->r9, fp->r10, fp->r11); in dump()
|
/linux-4.1.27/arch/arm/mach-ep93xx/ |
D | crunch-bits.S | 71 ldr r8, =(EP93XX_APB_VIRT_BASE + 0x00130000) @ syscon addr 73 ldr r1, [r8, #0x80] 77 str r3, [r8, #0xc0] 79 str r1, [r8, #0x80] 89 ldr r2, [r8, #0x80]
|