/linux-4.4.14/arch/x86/kernel/ |
D | mcount_64.S | 99 movq %rcx, RCX(%rsp) 138 movq RCX(%rsp), %rcx 163 movq $0, %rcx 208 movq MCOUNT_REG_SIZE(%rsp), %rcx 209 movq %rcx, EFLAGS(%rsp) 211 movq $__KERNEL_DS, %rcx 212 movq %rcx, SS(%rsp) 213 movq $__KERNEL_CS, %rcx 214 movq %rcx, CS(%rsp) 216 leaq MCOUNT_REG_SIZE+8*2(%rsp), %rcx [all …]
|
D | relocate_kernel_64.S | 135 movq %rcx, %r11 214 movq %rdi, %rcx /* Put the page_list in %rcx */ 221 movq (%rbx), %rcx 226 movq %rcx, %rdi 232 movq %rcx, %rbx 242 movq %rcx, %rsi /* For ever source page do a copy */
|
D | head_64.S | 130 leaq (_end - 1)(%rip), %rcx 131 shrq $PMD_SHIFT, %rcx 132 subq %rdi, %rcx 188 movq %rcx, %cr4 364 pushq %rcx # 56(%rsp) 395 movq 88(%rsp),%rcx # %rip 419 popq %rcx
|
/linux-4.4.14/arch/x86/power/ |
D | hibernate_asm_64.S | 32 movq %rcx, pt_regs_cx(%rax) 66 movq %cr3, %rcx; # flush TLB 67 movq %rcx, %cr3; 76 movq relocated_restore_code(%rip), %rcx 77 jmpq *%rcx 88 movq $(PAGE_SIZE >> 3), %rcx 118 movq %cr3, %rcx; # flush TLB 119 movq %rcx, %cr3 129 movq pt_regs_cx(%rax), %rcx
|
/linux-4.4.14/arch/x86/lib/ |
D | memset_64.S | 32 movq %rdx,%rcx 34 shrq $3,%rcx 61 movq %rdx,%rcx 73 imulq %rcx,%rax 81 movq %rdx,%rcx 82 shrq $6,%rcx 87 decq %rcx
|
D | copy_user_64.S | 21 movq %rdi,%rcx 22 addq %rdx,%rcx 24 cmpq TI_addr_limit(%rax),%rcx 36 movq %rsi,%rcx 37 addq %rdx,%rcx 39 cmpq TI_addr_limit(%rax),%rcx 131 40: leal (%rdx,%rcx,8),%edx 195 11: leal (%rdx,%rcx,8),%ecx 347 lea (%rdx,%rcx,8),%rdx 350 lea (%rdx,%rcx,4),%rdx
|
D | memmove_64.S | 84 movq %rdx, %rcx 87 shrq $3, %rcx 98 movq %rdx, %rcx 103 shrq $3, %rcx
|
D | memcpy_64.S | 33 movq %rdx, %rcx 34 shrq $3, %rcx 49 movq %rdx, %rcx
|
D | rwsem.S | 68 pushq %rcx; \ 79 popq %rcx; \
|
D | cmpxchg16b_emu.S | 42 movq %rcx, PER_CPU_VAR(8(%rsi))
|
D | copy_page_64.S | 29 dec %rcx
|
D | csum-copy_64.S | 67 movq %rcx, %r12
|
/linux-4.4.14/arch/x86/xen/ |
D | xen-asm_64.S | 26 mov 8+0(%rsp), %rcx 64 pushq %rcx 83 pushq %rcx 109 mov 0*8(%rsp), %rcx
|
D | xen-asm.S | 123 push %rcx 139 pop %rcx
|
/linux-4.4.14/arch/x86/crypto/ |
D | camellia-aesni-avx-asm_64.S | 193 %rcx, (%r9)); 730 leaq 8 * 16(%rax), %rcx; 734 %xmm15, %rax, %rcx); 738 %xmm15, %rax, %rcx, 0); 741 %rcx, %xmm8, %xmm9, %xmm10, %xmm11, %xmm12, %xmm13, %xmm14, 750 %xmm15, %rax, %rcx, 8); 753 %rcx, %xmm8, %xmm9, %xmm10, %xmm11, %xmm12, %xmm13, %xmm14, 762 %xmm15, %rax, %rcx, 16); 770 vmovdqu 0 * 16(%rcx), %xmm8; 771 vmovdqu 1 * 16(%rcx), %xmm9; [all …]
|
D | salsa20-x86_64-asm_64.S | 16 mov %rcx,%rdx 40 movq 0(%r8),%rcx 56 movq %rcx,56(%rsp) 85 mov %rdx,%rcx 110 mov %rsi,%rcx 226 xor %r15,%rcx 228 lea (%rbx,%rcx),%r15 234 lea (%rcx,%rax),%r15 288 xor %rbp,%rcx 296 lea (%rdi,%rcx),%rbp [all …]
|
D | camellia-aesni-avx2-asm_64.S | 232 %rcx, (%r9)); 770 leaq 8 * 32(%rax), %rcx; 774 %ymm15, %rax, %rcx); 778 %ymm15, %rax, %rcx, 0); 781 %rcx, %ymm8, %ymm9, %ymm10, %ymm11, %ymm12, %ymm13, %ymm14, 790 %ymm15, %rax, %rcx, 8); 793 %rcx, %ymm8, %ymm9, %ymm10, %ymm11, %ymm12, %ymm13, %ymm14, 802 %ymm15, %rax, %rcx, 16); 810 vmovdqu 0 * 32(%rcx), %ymm8; 811 vmovdqu 1 * 32(%rcx), %ymm9; [all …]
|
D | twofish-avx-x86_64-asm_64.S | 84 #define RGI2 %rcx 261 pushq %rcx; 280 popq %rcx; 402 load_ctr_8way(%rcx, .Lbswap128_mask, RA1, RB1, RC1, RD1, RA2, RB2, RC2, 425 load_xts_8way(%rcx, %rdx, %rsi, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2, 447 load_xts_8way(%rcx, %rdx, %rsi, RC1, RD1, RA1, RB1, RC2, RD2, RA2, RB2,
|
D | cast6-avx-x86_64-asm_64.S | 80 #define RGI2 %rcx 418 load_ctr_8way(%rcx, .Lbswap128_mask, RA1, RB1, RC1, RD1, RA2, RB2, RC2, 441 load_xts_8way(%rcx, %rdx, %rsi, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2, 463 load_xts_8way(%rcx, %rdx, %rsi, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2,
|
D | crc32c-pcl-intel-asm_64.S | 81 #define bufptmp %rcx 82 #define block_0 %rcx
|
D | blowfish-x86_64-asm_64.S | 41 #define RX2 %rcx 308 pushq %rcx;
|
D | camellia-x86_64-asm_64.S | 58 #define RCD0 %rcx 202 movq %rcx, RXOR; 437 movq %rcx, RXOR;
|
D | ghash-clmulni-intel_asm.S | 116 movups (%rcx), SHASH
|
D | cast5-avx-x86_64-asm_64.S | 80 #define RGI2 %rcx 497 vmovq (%rcx), RX; 521 vmovq RX, (%rcx);
|
D | twofish-x86_64-asm_64-3way.S | 44 #define RAB2 %rcx 233 pushq %rcx; /* bool xor */
|
D | poly1305-sse2-x86_64.S | 55 # %rcx: Block count 270 dec %rcx 306 # %rcx: Doubleblock count 575 dec %rcx
|
D | crct10dif-pcl-asm_64.S | 78 mov %rsp, %rcx
|
D | poly1305-avx2-x86_64.S | 88 # %rcx: Quadblock count 378 dec %rcx
|
D | aes-x86_64-asm_64.S | 31 #define R3 %rcx
|
D | twofish-x86_64-asm_64.S | 52 #define R2 %rcx
|
D | aes_ctrby8_avx-x86_64.S | 91 #define p_out %rcx
|
D | serpent-avx2-asm_64.S | 741 load_ctr_16way(%rcx, .Lbswap128_mask, RA1, RB1, RC1, RD1, RA2, RB2, RC2, 764 load_xts_16way(%rcx, %rdx, %rsi, RA1, RB1, RC1, RD1, RA2, RB2, RC2, 788 load_xts_16way(%rcx, %rdx, %rsi, RA1, RB1, RC1, RD1, RA2, RB2, RC2,
|
D | serpent-avx-x86_64-asm_64.S | 734 load_ctr_8way(%rcx, .Lbswap128_mask, RA1, RB1, RC1, RD1, RA2, RB2, RC2, 753 load_xts_8way(%rcx, %rdx, %rsi, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2, 773 load_xts_8way(%rcx, %rdx, %rsi, RA1, RB1, RC1, RD1, RA2, RB2, RC2, RD2,
|
D | sha1_avx2_x86_64_asm.S | 85 #define REG_RA %rcx
|
D | sha512-ssse3-asm.S | 61 T1 = %rcx
|
D | sha512-avx-asm.S | 62 T1 = %rcx
|
D | des3_ede-asm_64.S | 52 #define RW2 %rcx
|
D | aesni-intel_asm.S | 96 #define arg4 rcx 132 #define LEN %rcx 2692 addq %rcx, KEYP
|
D | sha512-avx2-asm.S | 79 c = %rcx
|
D | aesni-intel_avx-x86_64.S | 186 #define arg4 %rcx
|
/linux-4.4.14/arch/x86/boot/compressed/ |
D | head_64.S | 245 movq %rcx, efi64_config(%rip) /* Handle */ 349 movq $_bss /* - $startup_32 */, %rcx 350 shrq $3, %rcx 384 leaq _ebss(%rip), %rcx 385 subq %rdi, %rcx 386 shrq $3, %rcx 393 leaq _egot(%rip), %rcx 395 cmpq %rcx, %rdx
|
D | efi_thunk_64.S | 93 or %rcx, %rax
|
/linux-4.4.14/arch/x86/entry/ |
D | entry_64_compat.S | 85 pushq %rcx /* pt_regs->cx */ 177 pushq %rcx /* pt_regs->ip */ 214 movq RIP(%rsp), %rcx /* pt_regs->ip (in rcx) */ 287 pushq %rcx /* pt_regs->cx */ 327 xchg %r8, %rcx
|
D | entry_64.S | 161 pushq %rcx /* pt_regs->ip */ 166 pushq %rcx /* pt_regs->cx */ 184 movq %r10, %rcx 211 movq RIP(%rsp), %rcx 268 movq %r10, %rcx /* fixup for C */ 289 movq RCX(%rsp), %rcx 291 cmpq %rcx, %r11 /* RCX == RIP */ 307 shl $(64 - (__VIRTUAL_MASK_SHIFT+1)), %rcx 308 sar $(64 - (__VIRTUAL_MASK_SHIFT+1)), %rcx 311 cmpq %rcx, %r11 [all …]
|
D | calling.h | 95 .macro SAVE_C_REGS_HELPER offset=0 rax=1 rcx=1 r8910=1 r11=1 107 .if \rcx 108 movq %rcx, 11*8+\offset(%rsp) 170 movq 11*8(%rsp), %rcx
|
D | thunk_64.S | 21 pushq %rcx 61 popq %rcx
|
/linux-4.4.14/arch/x86/purgatory/ |
D | entry64.S | 42 movq rcx(%rip), %rcx 64 rcx: .quad 0x0 label
|
/linux-4.4.14/arch/x86/um/ |
D | stub_64.S | 41 pop %rcx 42 cmp %rcx, %rax
|
/linux-4.4.14/arch/x86/platform/efi/ |
D | efi_stub_64.S | 84 mov %rcx, %r8 85 mov %rsi, %rcx
|
D | efi_thunk_64.S | 113 or %rcx, %rax
|
/linux-4.4.14/arch/x86/crypto/sha-mb/ |
D | sha1_mb_mgr_submit_avx2.S | 64 size_offset = %rcx 65 tmp2 = %rcx 101 # arg 1 : rcx : state
|
D | sha1_x8_avx2.S | 61 ## Function clobbers: rax, rcx, rdx, rbx, rsi, rdi, r9-r15# ymm0-15 63 ## Linux clobbers: rax rbx rcx rdx rsi r9 r10 r11 r12 r13 r14 r15 220 inp7 = %rcx
|
D | sha1_mb_mgr_flush_avx2.S | 114 # arg 1 : rcx : state
|
/linux-4.4.14/arch/x86/kernel/acpi/ |
D | wakeup_64.S | 52 movq %rcx, pt_regs_cx(%rax) 99 movq pt_regs_cx(%rax), %rcx
|
/linux-4.4.14/tools/testing/selftests/x86/ |
D | test_syscall_vdso.c | 67 uint64_t rax, rbx, rcx, rdx; member 120 …printf("ax:%016llx bx:%016llx cx:%016llx dx:%016llx\n", regs64.rax, regs64.rbx, regs64.rcx, reg… in print_regs64()
|
/linux-4.4.14/arch/x86/include/uapi/asm/ |
D | ptrace.h | 60 unsigned long rcx; member
|
D | sigcontext.h | 339 __u64 rcx; member
|
D | kvm.h | 114 __u64 rax, rbx, rcx, rdx; member
|
/linux-4.4.14/tools/perf/arch/x86/util/ |
D | dwarf-regs.c | 81 REG_OFFSET_NAME_64("%cx", rcx),
|
/linux-4.4.14/arch/x86/kvm/ |
D | trace.h | 154 unsigned long rcx, unsigned long rdx), 155 TP_ARGS(function, rax, rbx, rcx, rdx), 161 __field( unsigned long, rcx ) 169 __entry->rcx = rcx; 175 __entry->rbx, __entry->rcx, __entry->rdx)
|
D | emulate.c | 347 ON64(FOP1E(op, rcx)) \ 356 ON64(FOP1EEX(op, rcx)) \ 2736 u64 msr_data, rcx, rdx; in em_sysexit() local 2752 rcx = reg_read(ctxt, VCPU_REGS_RCX); in em_sysexit() 2764 rcx = (u32)rcx; in em_sysexit() 2774 if (is_noncanonical_address(rcx) || in em_sysexit() 2786 *reg_write(ctxt, VCPU_REGS_RSP) = rcx; in em_sysexit() 4010 u64 rcx = reg_read(ctxt, VCPU_REGS_RCX); in check_rdpmc() local 4013 ctxt->ops->check_pmc(ctxt, rcx)) in check_rdpmc()
|
D | x86.c | 6852 regs->rcx = kvm_register_read(vcpu, VCPU_REGS_RCX); in kvm_arch_vcpu_ioctl_get_regs() 6882 kvm_register_write(vcpu, VCPU_REGS_RCX, regs->rcx); in kvm_arch_vcpu_ioctl_set_regs()
|
D | svm.c | 3855 [rcx]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_RCX])), in svm_vcpu_run()
|
D | vmx.c | 8653 [rcx]"i"(offsetof(struct vcpu_vmx, vcpu.arch.regs[VCPU_REGS_RCX])), in vmx_vcpu_run()
|
/linux-4.4.14/arch/x86/include/asm/xen/ |
D | interface_64.h | 83 uint64_t rax, r11, rcx, flags, rip, cs, rflags, rsp, ss; member
|
/linux-4.4.14/tools/perf/arch/x86/tests/ |
D | regs_load.S | 33 movq %rcx, CX(%rdi)
|
/linux-4.4.14/arch/x86/include/asm/ |
D | kexec.h | 192 uint64_t rcx; member
|
D | inst.h | 77 .ifc \r64,%rcx
|
D | paravirt.h | 876 COND_PUSH(set, CLBR_RCX, rcx); \ 892 COND_POP(set, CLBR_RCX, rcx); \
|
/linux-4.4.14/Documentation/virtual/kvm/ |
D | hypercalls.txt | 8 Up to four arguments may be passed in rbx, rcx, rdx, and rsi respectively.
|
D | api.txt | 304 __u64 rax, rbx, rcx, rdx;
|
/linux-4.4.14/fs/jfs/ |
D | jfs_imap.c | 2861 int rc, rcx = 0; in diExtendFS() local 2897 rcx = rc; in diExtendFS() 2933 rcx = rc; in diExtendFS() 2959 rcx = rc; in diExtendFS() 2983 return rcx; in diExtendFS()
|
/linux-4.4.14/Documentation/networking/ |
D | filter.txt | 720 R4 - rcx 728 ... since x86_64 ABI mandates rdi, rsi, rdx, rcx, r8, r9 for argument passing
|
/linux-4.4.14/arch/x86/ |
D | Kconfig | 284 …default "-fcall-saved-rdi -fcall-saved-rsi -fcall-saved-rdx -fcall-saved-rcx -fcall-saved-r8 -fcal…
|