root/arch/sh/lib/mcount.S

/* [<][>][^][v][top][bottom][index][help] */
   1 /* SPDX-License-Identifier: GPL-2.0
   2  *
   3  * arch/sh/lib/mcount.S
   4  *
   5  *  Copyright (C) 2008, 2009  Paul Mundt
   6  *  Copyright (C) 2008, 2009  Matt Fleming
   7  */
   8 #include <asm/ftrace.h>
   9 #include <asm/thread_info.h>
  10 #include <asm/asm-offsets.h>
  11 
  12 #define MCOUNT_ENTER()          \
  13         mov.l   r4, @-r15;      \
  14         mov.l   r5, @-r15;      \
  15         mov.l   r6, @-r15;      \
  16         mov.l   r7, @-r15;      \
  17         sts.l   pr, @-r15;      \
  18                                 \
  19         mov.l   @(20,r15),r4;   \
  20         sts     pr, r5
  21 
  22 #define MCOUNT_LEAVE()          \
  23         lds.l   @r15+, pr;      \
  24         mov.l   @r15+, r7;      \
  25         mov.l   @r15+, r6;      \
  26         mov.l   @r15+, r5;      \
  27         rts;                    \
  28          mov.l  @r15+, r4
  29 
  30 #ifdef CONFIG_STACK_DEBUG
  31 /*
  32  * Perform diagnostic checks on the state of the kernel stack.
  33  *
  34  * Check for stack overflow. If there is less than 1KB free
  35  * then it has overflowed.
  36  *
  37  * Make sure the stack pointer contains a valid address. Valid
  38  * addresses for kernel stacks are anywhere after the bss
  39  * (after __bss_stop) and anywhere in init_thread_union (init_stack).
  40  */
  41 #define STACK_CHECK()                                   \
  42         mov     #(THREAD_SIZE >> 10), r0;               \
  43         shll8   r0;                                     \
  44         shll2   r0;                                     \
  45                                                         \
  46         /* r1 = sp & (THREAD_SIZE - 1) */               \
  47         mov     #-1, r1;                                \
  48         add     r0, r1;                                 \
  49         and     r15, r1;                                \
  50                                                         \
  51         mov     #TI_SIZE, r3;                           \
  52         mov     #(STACK_WARN >> 8), r2;                 \
  53         shll8   r2;                                     \
  54         add     r3, r2;                                 \
  55                                                         \
  56         /* Is the stack overflowing? */                 \
  57         cmp/hi  r2, r1;                                 \
  58         bf      stack_panic;                            \
  59                                                         \
  60         /* If sp > __bss_stop then we're OK. */         \
  61         mov.l   .L_ebss, r1;                            \
  62         cmp/hi  r1, r15;                                \
  63         bt      1f;                                     \
  64                                                         \
  65         /* If sp < init_stack, we're not OK. */         \
  66         mov.l   .L_init_thread_union, r1;               \
  67         cmp/hs  r1, r15;                                \
  68         bf      stack_panic;                            \
  69                                                         \
  70         /* If sp > init_stack && sp < __bss_stop, not OK. */    \
  71         add     r0, r1;                                 \
  72         cmp/hs  r1, r15;                                \
  73         bt      stack_panic;                            \
  74 1:
  75 #else
  76 #define STACK_CHECK()
  77 #endif /* CONFIG_STACK_DEBUG */
  78 
  79         .align 2
  80         .globl  _mcount
  81         .type   _mcount,@function
  82         .globl  mcount
  83         .type   mcount,@function
  84 _mcount:
  85 mcount:
  86         STACK_CHECK()
  87 
  88 #ifndef CONFIG_FUNCTION_TRACER
  89         rts
  90          nop
  91 #else
  92         MCOUNT_ENTER()
  93 
  94 #ifdef CONFIG_DYNAMIC_FTRACE
  95         .globl  mcount_call
  96 mcount_call:
  97         mov.l   .Lftrace_stub, r6
  98 #else
  99         mov.l   .Lftrace_trace_function, r6
 100         mov.l   ftrace_stub, r7
 101         cmp/eq  r6, r7
 102         bt      skip_trace
 103         mov.l   @r6, r6
 104 #endif
 105 
 106         jsr     @r6
 107          nop
 108 
 109 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
 110         mov.l   .Lftrace_graph_return, r6
 111         mov.l   .Lftrace_stub, r7
 112         cmp/eq  r6, r7
 113         bt      1f
 114 
 115         mov.l   .Lftrace_graph_caller, r0
 116         jmp     @r0
 117          nop
 118 
 119 1:
 120         mov.l   .Lftrace_graph_entry, r6
 121         mov.l   .Lftrace_graph_entry_stub, r7
 122         cmp/eq  r6, r7
 123         bt      skip_trace
 124 
 125         mov.l   .Lftrace_graph_caller, r0
 126         jmp     @r0
 127          nop
 128 
 129         .align 2
 130 .Lftrace_graph_return:
 131         .long   ftrace_graph_return
 132 .Lftrace_graph_entry:
 133         .long   ftrace_graph_entry
 134 .Lftrace_graph_entry_stub:
 135         .long   ftrace_graph_entry_stub
 136 .Lftrace_graph_caller:
 137         .long   ftrace_graph_caller
 138 #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
 139 
 140         .globl skip_trace
 141 skip_trace:
 142         MCOUNT_LEAVE()
 143 
 144         .align 2
 145 .Lftrace_trace_function:
 146         .long   ftrace_trace_function
 147 
 148 #ifdef CONFIG_DYNAMIC_FTRACE
 149 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
 150 /*
 151  * NOTE: Do not move either ftrace_graph_call or ftrace_caller
 152  * as this will affect the calculation of GRAPH_INSN_OFFSET.
 153  */
 154         .globl ftrace_graph_call
 155 ftrace_graph_call:
 156         mov.l   .Lskip_trace, r0
 157         jmp     @r0
 158          nop
 159 
 160         .align 2
 161 .Lskip_trace:
 162         .long   skip_trace
 163 #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
 164 
 165         .globl ftrace_caller
 166 ftrace_caller:
 167         MCOUNT_ENTER()
 168 
 169         .globl ftrace_call
 170 ftrace_call:
 171         mov.l   .Lftrace_stub, r6
 172         jsr     @r6
 173          nop
 174 
 175 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
 176         bra     ftrace_graph_call
 177          nop
 178 #else
 179         MCOUNT_LEAVE()
 180 #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
 181 #endif /* CONFIG_DYNAMIC_FTRACE */
 182 
 183         .align 2
 184 
 185 /*
 186  * NOTE: From here on the locations of the .Lftrace_stub label and
 187  * ftrace_stub itself are fixed. Adding additional data here will skew
 188  * the displacement for the memory table and break the block replacement.
 189  * Place new labels either after the ftrace_stub body, or before
 190  * ftrace_caller. You have been warned.
 191  */
 192 .Lftrace_stub:
 193         .long   ftrace_stub
 194 
 195         .globl  ftrace_stub
 196 ftrace_stub:
 197         rts
 198          nop
 199 
 200 #ifdef CONFIG_FUNCTION_GRAPH_TRACER
 201         .globl  ftrace_graph_caller
 202 ftrace_graph_caller:
 203         mov.l   2f, r1
 204         jmp     @r1
 205          nop
 206 1:
 207         /*
 208          * MCOUNT_ENTER() pushed 5 registers onto the stack, so
 209          * the stack address containing our return address is
 210          * r15 + 20.
 211          */
 212         mov     #20, r0
 213         add     r15, r0
 214         mov     r0, r4
 215 
 216         mov.l   .Lprepare_ftrace_return, r0
 217         jsr     @r0
 218          nop
 219 
 220         MCOUNT_LEAVE()
 221 
 222         .align 2
 223 2:      .long   skip_trace
 224 .Lprepare_ftrace_return:
 225         .long   prepare_ftrace_return
 226 
 227         .globl  return_to_handler
 228 return_to_handler:
 229         /*
 230          * Save the return values.
 231          */
 232         mov.l   r0, @-r15
 233         mov.l   r1, @-r15
 234 
 235         mov     #0, r4
 236 
 237         mov.l   .Lftrace_return_to_handler, r0
 238         jsr     @r0
 239          nop
 240 
 241         /*
 242          * The return value from ftrace_return_handler has the real
 243          * address that we should return to.
 244          */
 245         lds     r0, pr
 246         mov.l   @r15+, r1
 247         rts
 248          mov.l  @r15+, r0
 249 
 250 
 251         .align 2
 252 .Lftrace_return_to_handler:
 253         .long   ftrace_return_to_handler
 254 #endif /* CONFIG_FUNCTION_GRAPH_TRACER */
 255 #endif /* CONFIG_FUNCTION_TRACER */
 256 
 257 #ifdef CONFIG_STACK_DEBUG
 258         .globl  stack_panic
 259 stack_panic:
 260         mov.l   .Ldump_stack, r0
 261         jsr     @r0
 262          nop
 263 
 264         mov.l   .Lpanic, r0
 265         jsr     @r0
 266          mov.l  .Lpanic_s, r4
 267 
 268         rts
 269          nop
 270 
 271         .align 2
 272 .L_init_thread_union:
 273         .long   init_thread_union
 274 .L_ebss:
 275         .long   __bss_stop
 276 .Lpanic:
 277         .long   panic
 278 .Lpanic_s:
 279         .long   .Lpanic_str
 280 .Ldump_stack:
 281         .long   dump_stack
 282 
 283         .section        .rodata
 284         .align 2
 285 .Lpanic_str:
 286         .string "Stack error"
 287 #endif /* CONFIG_STACK_DEBUG */

/* [<][>][^][v][top][bottom][index][help] */