orig_sp 818 arch/sparc/kernel/irq_64.c void *orig_sp; orig_sp 836 arch/sparc/kernel/irq_64.c orig_sp = set_hardirq_stack(); orig_sp 851 arch/sparc/kernel/irq_64.c restore_hardirq_stack(orig_sp); orig_sp 859 arch/sparc/kernel/irq_64.c void *orig_sp, *sp = softirq_stack[smp_processor_id()]; orig_sp 865 arch/sparc/kernel/irq_64.c : "=&r" (orig_sp) orig_sp 869 arch/sparc/kernel/irq_64.c : : "r" (orig_sp)); orig_sp 67 arch/sparc/kernel/kstack.h void *orig_sp, *sp = hardirq_stack[smp_processor_id()]; orig_sp 69 arch/sparc/kernel/kstack.h __asm__ __volatile__("mov %%sp, %0" : "=r" (orig_sp)); orig_sp 70 arch/sparc/kernel/kstack.h if (orig_sp < sp || orig_sp 71 arch/sparc/kernel/kstack.h orig_sp > (sp + THREAD_SIZE)) { orig_sp 76 arch/sparc/kernel/kstack.h return orig_sp; orig_sp 79 arch/sparc/kernel/kstack.h static inline __attribute__((always_inline)) void restore_hardirq_stack(void *orig_sp) orig_sp 81 arch/sparc/kernel/kstack.h __asm__ __volatile__("mov %0, %%sp" : : "r" (orig_sp)); orig_sp 85 arch/sparc/kernel/nmi.c void *orig_sp; orig_sp 93 arch/sparc/kernel/nmi.c orig_sp = set_hardirq_stack(); orig_sp 120 arch/sparc/kernel/nmi.c restore_hardirq_stack(orig_sp); orig_sp 543 arch/sparc/kernel/process_64.c unsigned long sp, orig_sp; orig_sp 545 arch/sparc/kernel/process_64.c orig_sp = sp = t->rwbuf_stkptrs[window]; orig_sp 561 arch/sparc/kernel/process_64.c sp, orig_sp, orig_sp 25 arch/x86/include/asm/unwind.h unsigned long *bp, *orig_sp, ip; orig_sp 48 arch/x86/kernel/unwind_frame.c for (sp = PTR_ALIGN(state->orig_sp, sizeof(long)); sp; orig_sp 230 arch/x86/kernel/unwind_frame.c if (state->orig_sp && state->stack_info.type == prev_type && orig_sp 254 arch/x86/kernel/unwind_frame.c if (!state->orig_sp) orig_sp 255 arch/x86/kernel/unwind_frame.c state->orig_sp = frame;