/linux-4.1.27/arch/x86/power/ |
D | hibernate_asm_64.S | 26 movq $saved_context, %rax 27 movq %rsp, pt_regs_sp(%rax) 28 movq %rbp, pt_regs_bp(%rax) 29 movq %rsi, pt_regs_si(%rax) 30 movq %rdi, pt_regs_di(%rax) 31 movq %rbx, pt_regs_bx(%rax) 32 movq %rcx, pt_regs_cx(%rax) 33 movq %rdx, pt_regs_dx(%rax) 34 movq %r8, pt_regs_r8(%rax) 35 movq %r9, pt_regs_r9(%rax) [all …]
|
/linux-4.1.27/arch/x86/kernel/acpi/ |
D | wakeup_64.S | 16 movq saved_magic, %rax 18 cmpq %rdx, %rax 34 movq saved_rip, %rax 35 jmp *%rax 46 movq $saved_context, %rax 47 movq %rsp, pt_regs_sp(%rax) 48 movq %rbp, pt_regs_bp(%rax) 49 movq %rsi, pt_regs_si(%rax) 50 movq %rdi, pt_regs_di(%rax) 51 movq %rbx, pt_regs_bx(%rax) [all …]
|
/linux-4.1.27/arch/x86/lib/ |
D | memset_64.S | 38 movabs $0x0101010101010101,%rax 39 imulq %rsi,%rax 43 movq %r9,%rax 64 movq %r9,%rax 74 movabs $0x0101010101010101,%rax 75 imulq %rcx,%rax 91 movq %rax,(%rdi) 92 movq %rax,8(%rdi) 93 movq %rax,16(%rdi) 94 movq %rax,24(%rdi) [all …]
|
D | csum-copy_64.S | 109 adcq %rbx, %rax 110 adcq %r8, %rax 111 adcq %r11, %rax 112 adcq %rdx, %rax 113 adcq %r10, %rax 114 adcq %rbp, %rax 115 adcq %r14, %rax 116 adcq %r13, %rax 145 adcq %r9, %rax 159 adcq %rbx, %rax [all …]
|
D | rwsem.S | 91 movq %rax,%rdi 102 movq %rax,%rdi 115 movq %rax,%rdi 126 movq %rax,%rdi
|
D | copy_page_64.S | 37 movq 0x8*0(%rsi), %rax 48 movq %rax, 0x8*0(%rdi) 67 movq 0x8*0(%rsi), %rax 76 movq %rax, 0x8*0(%rdi)
|
D | clear_page_64.S | 38 #define PUT(x) movq %rax,x*8(%rdi) 39 movq %rax,(%rdi)
|
D | thunk_64.S | 24 pushq_cfi_reg rax 67 popq_cfi_reg rax
|
D | copy_user_64.S | 46 GET_THREAD_INFO(%rax) 50 cmpq TI_addr_limit(%rax),%rcx 63 GET_THREAD_INFO(%rax) 67 cmpq TI_addr_limit(%rax),%rcx
|
D | memcpy_64.S | 33 movq %rdi, %rax 49 movq %rdi, %rax 57 movq %rdi, %rax
|
D | cmpxchg16b_emu.S | 38 cmpq PER_CPU_VAR((%rsi)), %rax
|
D | memmove_64.S | 33 mov %rdi, %rax
|
/linux-4.1.27/arch/x86/platform/efi/ |
D | efi_thunk_64.S | 40 movq $__START_KERNEL_map, %rax 41 subq phys_base(%rip), %rax 51 subq %rax, %rbx 55 subq %rax, %rbx 71 push %rax 73 push %rax 75 push %rax 91 leaq efi_enter32(%rip), %rax 92 pushq %rax 107 test %rax, %rax [all …]
|
D | efi_stub_64.S | 16 mov %rsp, %rax; \ 19 mov %rax, (%rsp); \ 20 mov %cr0, %rax; \ 22 mov %rax, 0x8(%rsp); \ 78 mov (%rsp), %rax 79 mov 8(%rax), %rax 82 mov %rax, 40(%rsp)
|
/linux-4.1.27/arch/x86/kernel/ |
D | relocate_kernel_64.S | 63 movq %cr0, %rax 64 movq %rax, CR0(%r11) 65 movq %cr3, %rax 66 movq %rax, CR3(%r11) 67 movq %cr4, %rax 68 movq %rax, CR4(%r11) 117 movq %cr0, %rax 118 andq $~(X86_CR0_AM | X86_CR0_WP | X86_CR0_TS | X86_CR0_EM), %rax 120 movq %rax, %cr0 127 movq %rax, %cr4 [all …]
|
D | head_64.S | 27 #define GET_CR2_INTO(reg) GET_CR2_INTO_RAX ; movq %rax, reg 79 movq %rbp, %rax 87 leaq _text(%rip), %rax 88 shrq $MAX_PHYSMEM_BITS, %rax 110 movq %rdi, %rax 111 shrq $PGDIR_SHIFT, %rax 114 movq %rdx, 0(%rbx,%rax,8) 115 movq %rdx, 8(%rbx,%rax,8) 118 movq %rdi, %rax 119 shrq $PUD_SHIFT, %rax [all …]
|
D | entry_64.S | 157 CFI_REL_OFFSET rax, RAX+\offset 236 pushq_cfi_reg rax /* pt_regs->orig_ax */ 253 cmpq $__NR_syscall_max,%rax 260 call *sys_call_table(,%rax,8) 261 movq %rax,RAX(%rsp) 317 test %rax, %rax 320 movq ORIG_RAX(%rsp), %rax 327 movq %rax,%rdx 338 cmpq $__NR_syscall_max,%rax 345 call *sys_call_table(,%rax,8) [all …]
|
D | mcount_64.S | 98 movq %rax, RAX(%rsp) 139 movq RAX(%rsp), %rax 226 movq EFLAGS(%rsp), %rax 227 movq %rax, MCOUNT_REG_SIZE(%rsp) 230 movq RIP(%rsp), %rax 231 movq %rax, MCOUNT_REG_SIZE+8(%rsp) 315 movq %rax, (%rsp) 321 movq %rax, %rdi 323 movq (%rsp), %rax
|
D | vsyscall_emu_64.S | 21 mov $__NR_gettimeofday, %rax 26 mov $__NR_time, %rax 31 mov $__NR_getcpu, %rax
|
/linux-4.1.27/arch/x86/boot/compressed/ |
D | efi_thunk_64.S | 30 leaq efi_exit32(%rip), %rax 32 leaq efi_gdt64(%rip), %rax 34 movl %eax, 2(%rax) /* Fixup the gdt base address */ 37 push %rax 39 push %rax 41 push %rax 65 leaq efi32_boot_gdt(%rip), %rax 66 lgdt (%rax) 69 leaq efi_enter32(%rip), %rax 70 pushq %rax [all …]
|
D | head_64.S | 248 leaq efi64_config(%rip), %rax 249 movq %rax, efi_config(%rip) 260 movq %rax, %rdi 262 cmpq $0,%rax 264 mov %rax, %rsi 265 leaq startup_32(%rip), %rax 277 movq efi_config(%rip), %rax 278 addq %rbp, 88(%rax) 282 movq %rax,%rsi 283 cmpq $0,%rax [all …]
|
/linux-4.1.27/arch/x86/um/ |
D | stub_64.S | 14 movq %rax, (%rbx) 21 mov $(STUB_DATA & 0xffffffff), %rax 22 or %rax, %rbx 28 mov 0x0(%rsp), %rax 32 mov %rax, 8(%rbx) 33 cmp $0, %rax 40 add %rax, %rsp 43 pop %rax 58 cmp %rcx, %rax 63 mov %rax, (%rbx)
|
/linux-4.1.27/arch/x86/ia32/ |
D | ia32entry.S | 34 .macro CLEAR_RREGS _r9=rax 36 movq %rax,R11(%rsp) 37 movq %rax,R10(%rsp) 39 movq %rax,R8(%rsp) 140 pushq_cfi_reg rax /* pt_regs->orig_ax */ 145 pushq_cfi_reg rax /* pt_regs->ax */ 172 cmpq $(IA32_NR_syscalls-1),%rax 182 call *ia32_sys_call_table(,%rax,8) 183 movq %rax,RAX(%rsp) 251 cmpq $(IA32_NR_syscalls-1),%rax [all …]
|
/linux-4.1.27/arch/x86/crypto/ |
D | crc32c-pcl-intel-asm_64.S | 221 shlq $3, %rax # rax *= 8 222 pmovzxdq (bufp,%rax), %xmm0 # 2 consts: K1:K2 223 leal (%eax,%eax,2), %eax # rax *= 3 (total *24) 224 subq %rax, tmp # tmp -= rax*24 233 movq %xmm1, %rax 234 xor -i*8(block_2), %rax 236 crc32 %rax, crc_init 308 movq crc_init, %rax
|
D | camellia-aesni-avx2-asm_64.S | 240 %rax, (%r9)); 770 leaq 8 * 32(%rax), %rcx; 774 %ymm15, %rax, %rcx); 778 %ymm15, %rax, %rcx, 0); 780 fls32(%rax, %ymm0, %ymm1, %ymm2, %ymm3, %ymm4, %ymm5, %ymm6, %ymm7, 790 %ymm15, %rax, %rcx, 8); 792 fls32(%rax, %ymm0, %ymm1, %ymm2, %ymm3, %ymm4, %ymm5, %ymm6, %ymm7, 802 %ymm15, %rax, %rcx, 16); 821 %ymm15, (key_table)(CTX, %r8, 8), (%rax), 1 * 32(%rax)); 829 fls32(%rax, %ymm0, %ymm1, %ymm2, %ymm3, %ymm4, %ymm5, %ymm6, %ymm7, [all …]
|
D | camellia-aesni-avx-asm_64.S | 201 %rax, (%r9)); 730 leaq 8 * 16(%rax), %rcx; 734 %xmm15, %rax, %rcx); 738 %xmm15, %rax, %rcx, 0); 740 fls16(%rax, %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7, 750 %xmm15, %rax, %rcx, 8); 752 fls16(%rax, %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7, 762 %xmm15, %rax, %rcx, 16); 781 %xmm15, (key_table)(CTX, %r8, 8), (%rax), 1 * 16(%rax)); 789 fls16(%rax, %xmm0, %xmm1, %xmm2, %xmm3, %xmm4, %xmm5, %xmm6, %xmm7, [all …]
|
D | salsa20-x86_64-asm_64.S | 44 movq 16(%r8),%rax 60 movq %rax,72(%rsp) 124 mov %r8,%rax 216 lea (%rax,%rbp),%r15 232 xor %r15,%rax 234 lea (%rcx,%rax),%r15 282 xor %rbp,%rax 290 lea (%r15,%rax),%rbp 302 lea (%rax,%r8),%rbp 432 lea (%rax,%rbp),%r15 [all …]
|
D | aesni-intel_asm.S | 127 #define AREG %rax 260 mov %arg5, %rax # %rax = *Y0 261 movdqu (%rax), \XMM0 # XMM0 = Y0 481 mov %arg5, %rax # %rax = *Y0 482 movdqu (%rax), \XMM0 # XMM0 = Y0 1398 MOVQ_R64_XMM %xmm0, %rax 1401 mov %rax, (%arg2 , %r11, 1) 1404 MOVQ_R64_XMM %xmm0, %rax 1409 shr $8, %rax 1426 mov %arg5, %rax # %rax = *Y0 [all …]
|
D | aesni-intel_avx-x86_64.S | 354 ## r10, r11, r12, rax are clobbered 398 mov arg5, %rax # rax = *Y0 399 vmovdqu (%rax), \CTR # CTR = Y0 1366 vmovq %xmm9, %rax 1370 mov %rax, (arg2 , %r11) 1373 vmovq %xmm9, %rax 1379 shr $8, %rax 1398 mov arg5, %rax # rax = *Y0 1399 vmovdqu (%rax), %xmm9 # xmm9 = Y0 1418 vmovq %xmm9, %rax [all …]
|
D | serpent-sse2-x86_64-asm_64.S | 647 leaq (4*4*4)(%rdx), %rax; 649 read_blocks(%rax, RA2, RB2, RC2, RD2, RK0, RK1, RK2); 685 leaq (4*4*4)(%rsi), %rax; 691 write_blocks(%rax, RA2, RB2, RC2, RD2, RK0, RK1, RK2); 697 xor_blocks(%rax, RA2, RB2, RC2, RD2, RK0, RK1, RK2); 711 leaq (4*4*4)(%rdx), %rax; 713 read_blocks(%rax, RA2, RB2, RC2, RD2, RK0, RK1, RK2); 749 leaq (4*4*4)(%rsi), %rax; 751 write_blocks(%rax, RC2, RD2, RB2, RE2, RK0, RK1, RK2);
|
D | sha512-avx-asm.S | 72 tmp0 = %rax 285 mov %rsp, %rax 288 mov %rax, frame_RSPSAVE(%rsp)
|
D | sha512-ssse3-asm.S | 71 tmp0 = %rax 284 mov %rsp, %rax 287 mov %rax, frame_RSPSAVE(%rsp)
|
D | blowfish-x86_64-asm_64.S | 39 #define RX0 %rax
|
D | sha1_ssse3_asm.S | 99 xor %rax, %rax
|
D | aes-x86_64-asm_64.S | 21 #define R1 %rax
|
D | twofish-x86_64-asm_64-3way.S | 42 #define RAB0 %rax
|
D | twofish-x86_64-asm_64.S | 42 #define R0 %rax
|
D | sha512-avx2-asm.S | 86 a = %rax 573 mov %rsp, %rax 576 mov %rax, frame_RSPSAVE(%rsp)
|
D | cast6-avx-x86_64-asm_64.S | 84 #define RGI3 %rax
|
D | twofish-avx-x86_64-asm_64.S | 88 #define RGI3 %rax
|
D | camellia-x86_64-asm_64.S | 57 #define RAB0 %rax
|
D | sha1_avx2_x86_64_asm.S | 88 #define REG_RD %rax
|
D | cast5-avx-x86_64-asm_64.S | 84 #define RGI3 %rax
|
D | sha256-avx2-asm.S | 540 mov %rsp, %rax 543 mov %rax, _RSP(%rsp)
|
D | des3_ede-asm_64.S | 50 #define RW0 %rax
|
/linux-4.1.27/arch/x86/purgatory/ |
D | entry64.S | 34 leaq new_cs_exit(%rip), %rax 35 pushq %rax 40 movq rax(%rip), %rax 63 rax: .quad 0x0 label
|
/linux-4.1.27/tools/perf/arch/x86/tests/ |
D | regs_load.S | 31 movq %rax, AX(%rdi) 39 leaq 8(%rsp), %rax /* exclude this call. */ 40 movq %rax, SP(%rdi) 42 movq 0(%rsp), %rax 43 movq %rax, IP(%rdi)
|
/linux-4.1.27/arch/x86/net/ |
D | bpf_jit.S | 116 test %rax,%rax; \ 127 mov (%rax), %eax 137 mov (%rax),%ax 148 movzbl (%rax), %eax
|
/linux-4.1.27/Documentation/ |
D | static-keys.txt | 192 ffffffff81044299: 65 48 8b 04 25 c0 b6 mov %gs:0xb6c0,%rax 194 ffffffff810442a2: 48 8b 80 80 02 00 00 mov 0x280(%rax),%rax 195 ffffffff810442a9: 48 8b 80 b0 02 00 00 mov 0x2b0(%rax),%rax 196 ffffffff810442b0: 48 8b b8 e8 02 00 00 mov 0x2e8(%rax),%rdi 214 ffffffff810441fe: 65 48 8b 04 25 c0 b6 mov %gs:0xb6c0,%rax 216 ffffffff81044207: 48 8b 80 80 02 00 00 mov 0x280(%rax),%rax 217 ffffffff8104420e: 48 8b 80 b0 02 00 00 mov 0x2b0(%rax),%rax 218 ffffffff81044215: 48 8b b8 e8 02 00 00 mov 0x2e8(%rax),%rdi 227 ffffffff81044235: 66 66 2e 0f 1f 84 00 data32 nopw %cs:0x0(%rax,%rax,1)
|
D | kmemcheck.txt | 501 ffffffff8104edd5: mov %rax,%rdx
|
/linux-4.1.27/arch/x86/include/asm/ |
D | calling.h | 98 .macro SAVE_C_REGS_HELPER offset=0 rax=1 rcx=1 r8910=1 r11=1 107 .if \rax 108 movq_cfi rax, 10*8+\offset 173 movq_cfi_restore 10*8, rax
|
D | kexec.h | 191 uint64_t rax; member
|
D | svm.h | 164 u64 rax; member
|
D | inst.h | 74 .ifc \r64,%rax
|
D | paravirt.h | 872 COND_PUSH(set, CLBR_RAX, rax); \ 890 COND_POP(set, CLBR_RAX, rax)
|
/linux-4.1.27/arch/x86/xen/ |
D | xen-asm.S | 122 push %rax 140 pop %rax
|
D | xen-asm_64.S | 153 mov $-ENOSYS, %rax
|
/linux-4.1.27/arch/x86/crypto/sha-mb/ |
D | sha1_mb_mgr_flush_avx2.S | 78 #define job_rax %rax 79 #define tmp1 %rax 80 #define size_offset %rax 81 #define tmp %rax 82 #define start_offset %rax
|
D | sha1_mb_mgr_submit_avx2.S | 85 job_rax = %rax 86 len = %rax
|
D | sha1_x8_avx2.S | 61 ## Function clobbers: rax, rcx, rdx, rbx, rsi, rdi, r9-r15# ymm0-15 63 ## Linux clobbers: rax rbx rcx rdx rsi r9 r10 r11 r12 r13 r14 r15 212 IDX = %rax
|
/linux-4.1.27/arch/x86/include/uapi/asm/ |
D | ptrace.h | 59 unsigned long rax; member
|
D | sigcontext.h | 174 __u64 rax; member
|
D | kvm.h | 112 __u64 rax, rbx, rcx, rdx; member
|
/linux-4.1.27/arch/x86/kvm/ |
D | trace.h | 135 TP_PROTO(unsigned int function, unsigned long rax, unsigned long rbx, 137 TP_ARGS(function, rax, rbx, rcx, rdx), 141 __field( unsigned long, rax ) 149 __entry->rax = rax; 156 __entry->function, __entry->rax,
|
D | svm.c | 2348 nested_vmcb->save.rax = vmcb->save.rax; in nested_svm_vmexit() 2412 kvm_register_write(&svm->vcpu, VCPU_REGS_RAX, hsave->save.rax); in nested_svm_vmexit() 2485 vmcb_gpa = svm->vmcb->save.rax; in nested_svm_vmrun() 2487 nested_vmcb = nested_svm_map(svm, svm->vmcb->save.rax, &page); in nested_svm_vmrun() 2533 hsave->save.rax = vmcb->save.rax; in nested_svm_vmrun() 2573 kvm_register_write(&svm->vcpu, VCPU_REGS_RAX, nested_vmcb->save.rax); in nested_svm_vmrun() 2578 svm->vmcb->save.rax = nested_vmcb->save.rax; in nested_svm_vmrun() 2661 nested_vmcb = nested_svm_map(svm, svm->vmcb->save.rax, &page); in vmload_interception() 2682 nested_vmcb = nested_svm_map(svm, svm->vmcb->save.rax, &page); in vmsave_interception() 3481 "rsp:", save->rsp, "rax:", save->rax); in dump_vmcb() [all …]
|
D | emulate.c | 337 ON64(FOP1E(op##q, rax)) \ 366 ON64(FOP2E(op##q, rax, rdx)) \ 375 ON64(FOP2E(op##q, rax, rdx)) \ 384 ON64(FOP2E(op##q, rax, cl)) \ 393 ON64(FOP2E(op##q, rdx, rax)) \ 405 ON64(FOP3E(op##q, rax, rdx, cl)) \ 3687 u64 rax = reg_read(ctxt, VCPU_REGS_RAX); in check_svme_pa() local 3690 if (rax & 0xffff000000000000ULL) in check_svme_pa()
|
D | x86.c | 6661 regs->rax = kvm_register_read(vcpu, VCPU_REGS_RAX); in kvm_arch_vcpu_ioctl_get_regs() 6691 kvm_register_write(vcpu, VCPU_REGS_RAX, regs->rax); in kvm_arch_vcpu_ioctl_set_regs()
|
D | vmx.c | 8290 [rax]"i"(offsetof(struct vcpu_vmx, vcpu.arch.regs[VCPU_REGS_RAX])), in vmx_vcpu_run()
|
/linux-4.1.27/Documentation/virtual/kvm/ |
D | hypercalls.txt | 9 The hypercall number should be placed in rax and the return value will be 10 placed in rax. No other registers will be clobbered unless explicitly stated
|
D | api.txt | 299 __u64 rax, rbx, rcx, rdx;
|
/linux-4.1.27/arch/x86/include/asm/xen/ |
D | interface_64.h | 83 uint64_t rax, r11, rcx, flags, rip, cs, rflags, rsp, ss; member
|
/linux-4.1.27/Documentation/networking/ |
D | filter.txt | 688 movq _f2(%rip), %rax 689 jmp *%rax 691 movq %rdi, %rax 692 subq %rsi, %rax 716 R0 - rax 762 mov %rax,%r13 769 add %r13,%rax 784 registers and place their return value into '%rax' which is R0 in eBPF.
|
/linux-4.1.27/arch/x86/ |
D | Makefile | 150 asinstr := $(call as-instr,fxsaveq (%rax),-DCONFIG_AS_FXSAVEQ=1)
|
/linux-4.1.27/tools/perf/Documentation/ |
D | examples.txt | 205 83.42 : 31a2e95609: 48 3d 00 f0 ff ff cmp $0xfffffffffffff000,%rax
|