root/arch/x86/kernel/ftrace_64.S

/* [<][>][^][v][top][bottom][index][help] */
   1 /* SPDX-License-Identifier: GPL-2.0 */
   2 /*
   3  *  Copyright (C) 2014  Steven Rostedt, Red Hat Inc
   4  */
   5 
   6 #include <linux/linkage.h>
   7 #include <asm/ptrace.h>
   8 #include <asm/ftrace.h>
   9 #include <asm/export.h>
  10 #include <asm/nospec-branch.h>
  11 #include <asm/unwind_hints.h>
  12 #include <asm/frame.h>
  13 
  14         .code64
  15         .section .entry.text, "ax"
  16 
  17 # define function_hook  __fentry__
  18 EXPORT_SYMBOL(__fentry__)
  19 
  20 #ifdef CONFIG_FRAME_POINTER
  21 /* Save parent and function stack frames (rip and rbp) */
  22 #  define MCOUNT_FRAME_SIZE     (8+16*2)
  23 #else
  24 /* No need to save a stack frame */
  25 # define MCOUNT_FRAME_SIZE      0
  26 #endif /* CONFIG_FRAME_POINTER */
  27 
  28 /* Size of stack used to save mcount regs in save_mcount_regs */
  29 #define MCOUNT_REG_SIZE         (SS+8 + MCOUNT_FRAME_SIZE)
  30 
  31 /*
  32  * gcc -pg option adds a call to 'mcount' in most functions.
  33  * When -mfentry is used, the call is to 'fentry' and not 'mcount'
  34  * and is done before the function's stack frame is set up.
  35  * They both require a set of regs to be saved before calling
  36  * any C code and restored before returning back to the function.
  37  *
  38  * On boot up, all these calls are converted into nops. When tracing
  39  * is enabled, the call can jump to either ftrace_caller or
  40  * ftrace_regs_caller. Callbacks (tracing functions) that require
  41  * ftrace_regs_caller (like kprobes) need to have pt_regs passed to
  42  * it. For this reason, the size of the pt_regs structure will be
  43  * allocated on the stack and the required mcount registers will
  44  * be saved in the locations that pt_regs has them in.
  45  */
  46 
  47 /*
  48  * @added: the amount of stack added before calling this
  49  *
  50  * After this is called, the following registers contain:
  51  *
  52  *  %rdi - holds the address that called the trampoline
  53  *  %rsi - holds the parent function (traced function's return address)
  54  *  %rdx - holds the original %rbp
  55  */
  56 .macro save_mcount_regs added=0
  57 
  58 #ifdef CONFIG_FRAME_POINTER
  59         /* Save the original rbp */
  60         pushq %rbp
  61 
  62         /*
  63          * Stack traces will stop at the ftrace trampoline if the frame pointer
  64          * is not set up properly. If fentry is used, we need to save a frame
  65          * pointer for the parent as well as the function traced, because the
  66          * fentry is called before the stack frame is set up, where as mcount
  67          * is called afterward.
  68          */
  69 
  70         /* Save the parent pointer (skip orig rbp and our return address) */
  71         pushq \added+8*2(%rsp)
  72         pushq %rbp
  73         movq %rsp, %rbp
  74         /* Save the return address (now skip orig rbp, rbp and parent) */
  75         pushq \added+8*3(%rsp)
  76         pushq %rbp
  77         movq %rsp, %rbp
  78 #endif /* CONFIG_FRAME_POINTER */
  79 
  80         /*
  81          * We add enough stack to save all regs.
  82          */
  83         subq $(MCOUNT_REG_SIZE - MCOUNT_FRAME_SIZE), %rsp
  84         movq %rax, RAX(%rsp)
  85         movq %rcx, RCX(%rsp)
  86         movq %rdx, RDX(%rsp)
  87         movq %rsi, RSI(%rsp)
  88         movq %rdi, RDI(%rsp)
  89         movq %r8, R8(%rsp)
  90         movq %r9, R9(%rsp)
  91         /*
  92          * Save the original RBP. Even though the mcount ABI does not
  93          * require this, it helps out callers.
  94          */
  95 #ifdef CONFIG_FRAME_POINTER
  96         movq MCOUNT_REG_SIZE-8(%rsp), %rdx
  97 #else
  98         movq %rbp, %rdx
  99 #endif
 100         movq %rdx, RBP(%rsp)
 101 
 102         /* Copy the parent address into %rsi (second parameter) */
 103         movq MCOUNT_REG_SIZE+8+\added(%rsp), %rsi
 104 
 105          /* Move RIP to its proper location */
 106         movq MCOUNT_REG_SIZE+\added(%rsp), %rdi
 107         movq %rdi, RIP(%rsp)
 108 
 109         /*
 110          * Now %rdi (the first parameter) has the return address of
 111          * where ftrace_call returns. But the callbacks expect the
 112          * address of the call itself.
 113          */
 114         subq $MCOUNT_INSN_SIZE, %rdi
 115         .endm
 116 
 117 .macro restore_mcount_regs
 118         movq R9(%rsp), %r9
 119         movq R8(%rsp), %r8
 120         movq RDI(%rsp), %rdi
 121         movq RSI(%rsp), %rsi
 122         movq RDX(%rsp), %rdx
 123         movq RCX(%rsp), %rcx
 124         movq RAX(%rsp), %rax
 125 
 126         /* ftrace_regs_caller can modify %rbp */
 127         movq RBP(%rsp), %rbp
 128 
 129         addq $MCOUNT_REG_SIZE, %rsp
 130 
 131         .endm
 132 
 133 #ifdef CONFIG_DYNAMIC_FTRACE
 134 
 135 ENTRY(function_hook)
 136         retq
 137 ENDPROC(function_hook)
 138 
 139 ENTRY(ftrace_caller)
 140         /* save_mcount_regs fills in first two parameters */
 141         save_mcount_regs
 142 
 143 GLOBAL(ftrace_caller_op_ptr)
 144         /* Load the ftrace_ops into the 3rd parameter */
 145         movq function_trace_op(%rip), %rdx
 146 
 147         /* regs go into 4th parameter (but make it NULL) */
 148         movq $0, %rcx
 149 
 150 GLOBAL(ftrace_call)
 151         call ftrace_stub
 152 
 153         restore_mcount_regs
 154 
 155         /*
 156          * The code up to this label is copied into trampolines so
 157          * think twice before adding any new code or changing the
 158          * layout here.
 159          */
 160 GLOBAL(ftrace_epilogue)
 161 
 162 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
 163 GLOBAL(ftrace_graph_call)
 164         jmp ftrace_stub
 165 #endif
 166 
 167 /*
 168  * This is weak to keep gas from relaxing the jumps.
 169  * It is also used to copy the retq for trampolines.
 170  */
 171 WEAK(ftrace_stub)
 172         retq
 173 ENDPROC(ftrace_caller)
 174 
 175 ENTRY(ftrace_regs_caller)
 176         /* Save the current flags before any operations that can change them */
 177         pushfq
 178 
 179         /* added 8 bytes to save flags */
 180         save_mcount_regs 8
 181         /* save_mcount_regs fills in first two parameters */
 182 
 183 GLOBAL(ftrace_regs_caller_op_ptr)
 184         /* Load the ftrace_ops into the 3rd parameter */
 185         movq function_trace_op(%rip), %rdx
 186 
 187         /* Save the rest of pt_regs */
 188         movq %r15, R15(%rsp)
 189         movq %r14, R14(%rsp)
 190         movq %r13, R13(%rsp)
 191         movq %r12, R12(%rsp)
 192         movq %r11, R11(%rsp)
 193         movq %r10, R10(%rsp)
 194         movq %rbx, RBX(%rsp)
 195         /* Copy saved flags */
 196         movq MCOUNT_REG_SIZE(%rsp), %rcx
 197         movq %rcx, EFLAGS(%rsp)
 198         /* Kernel segments */
 199         movq $__KERNEL_DS, %rcx
 200         movq %rcx, SS(%rsp)
 201         movq $__KERNEL_CS, %rcx
 202         movq %rcx, CS(%rsp)
 203         /* Stack - skipping return address and flags */
 204         leaq MCOUNT_REG_SIZE+8*2(%rsp), %rcx
 205         movq %rcx, RSP(%rsp)
 206 
 207         ENCODE_FRAME_POINTER
 208 
 209         /* regs go into 4th parameter */
 210         leaq (%rsp), %rcx
 211 
 212 GLOBAL(ftrace_regs_call)
 213         call ftrace_stub
 214 
 215         /* Copy flags back to SS, to restore them */
 216         movq EFLAGS(%rsp), %rax
 217         movq %rax, MCOUNT_REG_SIZE(%rsp)
 218 
 219         /* Handlers can change the RIP */
 220         movq RIP(%rsp), %rax
 221         movq %rax, MCOUNT_REG_SIZE+8(%rsp)
 222 
 223         /* restore the rest of pt_regs */
 224         movq R15(%rsp), %r15
 225         movq R14(%rsp), %r14
 226         movq R13(%rsp), %r13
 227         movq R12(%rsp), %r12
 228         movq R10(%rsp), %r10
 229         movq RBX(%rsp), %rbx
 230 
 231         restore_mcount_regs
 232 
 233         /* Restore flags */
 234         popfq
 235 
 236         /*
 237          * As this jmp to ftrace_epilogue can be a short jump
 238          * it must not be copied into the trampoline.
 239          * The trampoline will add the code to jump
 240          * to the return.
 241          */
 242 GLOBAL(ftrace_regs_caller_end)
 243 
 244         jmp ftrace_epilogue
 245 
 246 ENDPROC(ftrace_regs_caller)
 247 
 248 
 249 #else /* ! CONFIG_DYNAMIC_FTRACE */
 250 
 251 ENTRY(function_hook)
 252         cmpq $ftrace_stub, ftrace_trace_function
 253         jnz trace
 254 
 255 fgraph_trace:
 256 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
 257         cmpq $ftrace_stub, ftrace_graph_return
 258         jnz ftrace_graph_caller
 259 
 260         cmpq $ftrace_graph_entry_stub, ftrace_graph_entry
 261         jnz ftrace_graph_caller
 262 #endif
 263 
 264 GLOBAL(ftrace_stub)
 265         retq
 266 
 267 trace:
 268         /* save_mcount_regs fills in first two parameters */
 269         save_mcount_regs
 270 
 271         /*
 272          * When DYNAMIC_FTRACE is not defined, ARCH_SUPPORTS_FTRACE_OPS is not
 273          * set (see include/asm/ftrace.h and include/linux/ftrace.h).  Only the
 274          * ip and parent ip are used and the list function is called when
 275          * function tracing is enabled.
 276          */
 277         movq ftrace_trace_function, %r8
 278         CALL_NOSPEC %r8
 279         restore_mcount_regs
 280 
 281         jmp fgraph_trace
 282 ENDPROC(function_hook)
 283 #endif /* CONFIG_DYNAMIC_FTRACE */
 284 
 285 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
 286 ENTRY(ftrace_graph_caller)
 287         /* Saves rbp into %rdx and fills first parameter  */
 288         save_mcount_regs
 289 
 290         leaq MCOUNT_REG_SIZE+8(%rsp), %rsi
 291         movq $0, %rdx   /* No framepointers needed */
 292         call    prepare_ftrace_return
 293 
 294         restore_mcount_regs
 295 
 296         retq
 297 ENDPROC(ftrace_graph_caller)
 298 
 299 ENTRY(return_to_handler)
 300         UNWIND_HINT_EMPTY
 301         subq  $24, %rsp
 302 
 303         /* Save the return values */
 304         movq %rax, (%rsp)
 305         movq %rdx, 8(%rsp)
 306         movq %rbp, %rdi
 307 
 308         call ftrace_return_to_handler
 309 
 310         movq %rax, %rdi
 311         movq 8(%rsp), %rdx
 312         movq (%rsp), %rax
 313         addq $24, %rsp
 314         JMP_NOSPEC %rdi
 315 END(return_to_handler)
 316 #endif

/* [<][>][^][v][top][bottom][index][help] */