Lines Matching refs:rsp
58 bt $9, EFLAGS(%rsp) /* interrupts off? */
91 bt $9, EFLAGS(%rsp) /* interrupts off? */
145 movq %rsp, PER_CPU_VAR(rsp_scratch)
146 movq PER_CPU_VAR(cpu_current_top_of_stack), %rsp
172 sub $(6*8), %rsp /* pt_regs->bp, bx, r12-15 not saved */
174 testl $_TIF_WORK_SYSCALL_ENTRY, ASM_THREAD_INFO(TI_flags, %rsp, SIZEOF_PTREGS)
186 movq %rax, RAX(%rsp)
207 testl $_TIF_ALLWORK_MASK, ASM_THREAD_INFO(TI_flags, %rsp, SIZEOF_PTREGS)
211 movq RIP(%rsp), %rcx
212 movq EFLAGS(%rsp), %r11
213 movq RSP(%rsp), %rsp
238 movq %rsp, %rdi
244 movq ORIG_RAX(%rsp), %rax
249 movq %rsp, %rdi
270 movq %rax, RAX(%rsp)
280 movq %rsp, %rdi
289 movq RCX(%rsp), %rcx
290 movq RIP(%rsp), %r11
314 cmpq $__USER_CS, CS(%rsp) /* CS must match SYSRET */
317 movq R11(%rsp), %r11
318 cmpq %r11, EFLAGS(%rsp) /* R11 == RFLAGS */
340 cmpq $__USER_DS, SS(%rsp) /* SS must match SYSRET */
350 movq RSP(%rsp), %rsp
379 addq $8, %rsp
381 movq %rax, RAX(%rsp)
422 addq $8, %rsp
424 movq %rax, RAX(%rsp)
452 testb $3, CS(%rsp) /* from kernel_thread? */
469 movl $0, RAX(%rsp)
504 testb $3, CS(%rsp)
535 movq %rsp, %rdi
537 cmovzq PER_CPU_VAR(irq_stack_ptr), %rsp
552 addq $-0x80, (%rsp) /* Adjust vector to [-256, -1] range */
561 popq %rsp
563 testb $3, CS(%rsp)
568 mov %rsp,%rdi
579 bt $9, EFLAGS(%rsp) /* were interrupts off? */
609 testb $4, (SS-RIP)(%rsp)
630 movq (2*8)(%rsp), %rax /* RIP */
632 movq (3*8)(%rsp), %rax /* CS */
634 movq (4*8)(%rsp), %rax /* RFLAGS */
636 movq (6*8)(%rsp), %rax /* SS */
638 movq (5*8)(%rsp), %rax /* RSP */
644 movq %rax, %rsp
745 testb $3, CS(%rsp) /* If coming from userspace, switch stacks */
762 movq %rsp, %rdi /* pt_regs pointer */
765 movq ORIG_RAX(%rsp), %rsi /* get error code */
766 movq $-1, ORIG_RAX(%rsp) /* no syscall to restart */
798 movq %rsp, %rdi /* pt_regs pointer */
800 movq %rax, %rsp /* switch stack */
802 movq %rsp, %rdi /* pt_regs pointer */
805 movq ORIG_RAX(%rsp), %rsi /* get error code */
806 movq $-1, ORIG_RAX(%rsp) /* no syscall to restart */
873 mov %rsp, %rbp
875 cmove PER_CPU_VAR(irq_stack_ptr), %rsp
905 movq %rdi, %rsp /* we don't return, adjust the stack frame */
907 movq %rsp, %rbp
908 cmovzq PER_CPU_VAR(irq_stack_ptr), %rsp
911 popq %rsp
934 cmpw %cx, 0x10(%rsp)
937 cmpw %cx, 0x18(%rsp)
940 cmpw %cx, 0x20(%rsp)
943 cmpw %cx, 0x28(%rsp)
946 movq (%rsp), %rcx
947 movq 8(%rsp), %r11
948 addq $0x30, %rsp
954 movq (%rsp), %rcx
955 movq 8(%rsp), %r11
956 addq $0x30, %rsp
1052 testb $3, CS+8(%rsp)
1087 cmpq %rcx, RIP+8(%rsp)
1090 cmpq %rax, RIP+8(%rsp)
1092 cmpq $gs_change, RIP+8(%rsp)
1104 movq %rcx, RIP+8(%rsp)
1119 mov %rsp, %rdi
1121 mov %rax, %rsp
1196 testb $3, CS-RIP+8(%rsp)
1212 movq %rsp, %rdx
1213 movq PER_CPU_VAR(cpu_current_top_of_stack), %rsp
1242 movq %rsp, %rdi
1308 cmpq 8(%rsp), %rdx
1311 cmpq 8(%rsp), %rdx
1320 cmpl $1, -8(%rsp)
1335 lea 6*8(%rsp), %rdx
1337 cmpq %rdx, 4*8(%rsp)
1342 cmpq %rdx, 4*8(%rsp)
1348 testb $(X86_EFLAGS_DF >> 8), (3*8 + 1)(%rsp)
1358 subq $8, %rsp
1359 leaq -10*8(%rsp), %rdx
1367 addq $(6*8), %rsp
1377 movq (%rsp), %rdx
1383 subq $(5*8), %rsp
1387 pushq 11*8(%rsp)
1398 pushq %rsp /* RSP (minus 8 because of the previous push) */
1399 addq $8, (%rsp) /* Fix up RSP */
1423 movq $1, 10*8(%rsp) /* Set "NMI executing". */
1430 addq $(10*8), %rsp
1432 pushq -6*8(%rsp)
1434 subq $(5*8), %rsp
1455 movq %rsp, %rdi
1479 movq $0, 5*8(%rsp) /* clear "NMI executing" */