root/arch/ia64/kernel/minstate.h

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


   1 /* SPDX-License-Identifier: GPL-2.0 */
   2 
   3 #include <asm/cache.h>
   4 
   5 #include "entry.h"
   6 #include <asm/native/inst.h>
   7 
   8 #ifdef CONFIG_VIRT_CPU_ACCOUNTING_NATIVE
   9 /* read ar.itc in advance, and use it before leaving bank 0 */
  10 #define ACCOUNT_GET_STAMP                               \
  11 (pUStk) mov.m r20=ar.itc;
  12 #define ACCOUNT_SYS_ENTER                               \
  13 (pUStk) br.call.spnt rp=account_sys_enter               \
  14         ;;
  15 #else
  16 #define ACCOUNT_GET_STAMP
  17 #define ACCOUNT_SYS_ENTER
  18 #endif
  19 
  20 .section ".data..patch.rse", "a"
  21 .previous
  22 
  23 /*
  24  * DO_SAVE_MIN switches to the kernel stacks (if necessary) and saves
  25  * the minimum state necessary that allows us to turn psr.ic back
  26  * on.
  27  *
  28  * Assumed state upon entry:
  29  *      psr.ic: off
  30  *      r31:    contains saved predicates (pr)
  31  *
  32  * Upon exit, the state is as follows:
  33  *      psr.ic: off
  34  *       r2 = points to &pt_regs.r16
  35  *       r8 = contents of ar.ccv
  36  *       r9 = contents of ar.csd
  37  *      r10 = contents of ar.ssd
  38  *      r11 = FPSR_DEFAULT
  39  *      r12 = kernel sp (kernel virtual address)
  40  *      r13 = points to current task_struct (kernel virtual address)
  41  *      p15 = TRUE if psr.i is set in cr.ipsr
  42  *      predicate registers (other than p2, p3, and p15), b6, r3, r14, r15:
  43  *              preserved
  44  *
  45  * Note that psr.ic is NOT turned on by this macro.  This is so that
  46  * we can pass interruption state as arguments to a handler.
  47  */
  48 #define IA64_NATIVE_DO_SAVE_MIN(__COVER,SAVE_IFS,EXTRA,WORKAROUND)                              \
  49         mov r16=IA64_KR(CURRENT);       /* M */                                                 \
  50         mov r27=ar.rsc;                 /* M */                                                 \
  51         mov r20=r1;                     /* A */                                                 \
  52         mov r25=ar.unat;                /* M */                                                 \
  53         MOV_FROM_IPSR(p0,r29);          /* M */                                                 \
  54         mov r26=ar.pfs;                 /* I */                                                 \
  55         MOV_FROM_IIP(r28);                      /* M */                                         \
  56         mov r21=ar.fpsr;                /* M */                                                 \
  57         __COVER;                                /* B;; (or nothing) */                          \
  58         ;;                                                                                      \
  59         adds r16=IA64_TASK_THREAD_ON_USTACK_OFFSET,r16;                                         \
  60         ;;                                                                                      \
  61         ld1 r17=[r16];                          /* load current->thread.on_ustack flag */       \
  62         st1 [r16]=r0;                           /* clear current->thread.on_ustack flag */      \
  63         adds r1=-IA64_TASK_THREAD_ON_USTACK_OFFSET,r16                                          \
  64         /* switch from user to kernel RBS: */                                                   \
  65         ;;                                                                                      \
  66         invala;                         /* M */                                                 \
  67         SAVE_IFS;                                                                               \
  68         cmp.eq pKStk,pUStk=r0,r17;              /* are we in kernel mode already? */            \
  69         ;;                                                                                      \
  70 (pUStk) mov ar.rsc=0;           /* set enforced lazy mode, pl 0, little-endian, loadrs=0 */     \
  71         ;;                                                                                      \
  72 (pUStk) mov.m r24=ar.rnat;                                                                      \
  73 (pUStk) addl r22=IA64_RBS_OFFSET,r1;                    /* compute base of RBS */               \
  74 (pKStk) mov r1=sp;                                      /* get sp  */                           \
  75         ;;                                                                                      \
  76 (pUStk) lfetch.fault.excl.nt1 [r22];                                                            \
  77 (pUStk) addl r1=IA64_STK_OFFSET-IA64_PT_REGS_SIZE,r1;   /* compute base of memory stack */      \
  78 (pUStk) mov r23=ar.bspstore;                            /* save ar.bspstore */                  \
  79         ;;                                                                                      \
  80 (pUStk) mov ar.bspstore=r22;                            /* switch to kernel RBS */              \
  81 (pKStk) addl r1=-IA64_PT_REGS_SIZE,r1;                  /* if in kernel mode, use sp (r12) */   \
  82         ;;                                                                                      \
  83 (pUStk) mov r18=ar.bsp;                                                                         \
  84 (pUStk) mov ar.rsc=0x3;         /* set eager mode, pl 0, little-endian, loadrs=0 */             \
  85         adds r17=2*L1_CACHE_BYTES,r1;           /* really: biggest cache-line size */           \
  86         adds r16=PT(CR_IPSR),r1;                                                                \
  87         ;;                                                                                      \
  88         lfetch.fault.excl.nt1 [r17],L1_CACHE_BYTES;                                             \
  89         st8 [r16]=r29;          /* save cr.ipsr */                                              \
  90         ;;                                                                                      \
  91         lfetch.fault.excl.nt1 [r17];                                                            \
  92         tbit.nz p15,p0=r29,IA64_PSR_I_BIT;                                                      \
  93         mov r29=b0                                                                              \
  94         ;;                                                                                      \
  95         WORKAROUND;                                                                             \
  96         adds r16=PT(R8),r1;     /* initialize first base pointer */                             \
  97         adds r17=PT(R9),r1;     /* initialize second base pointer */                            \
  98 (pKStk) mov r18=r0;             /* make sure r18 isn't NaT */                                   \
  99         ;;                                                                                      \
 100 .mem.offset 0,0; st8.spill [r16]=r8,16;                                                         \
 101 .mem.offset 8,0; st8.spill [r17]=r9,16;                                                         \
 102         ;;                                                                                      \
 103 .mem.offset 0,0; st8.spill [r16]=r10,24;                                                        \
 104 .mem.offset 8,0; st8.spill [r17]=r11,24;                                                        \
 105         ;;                                                                                      \
 106         st8 [r16]=r28,16;       /* save cr.iip */                                               \
 107         st8 [r17]=r30,16;       /* save cr.ifs */                                               \
 108 (pUStk) sub r18=r18,r22;        /* r18=RSE.ndirty*8 */                                          \
 109         mov r8=ar.ccv;                                                                          \
 110         mov r9=ar.csd;                                                                          \
 111         mov r10=ar.ssd;                                                                         \
 112         movl r11=FPSR_DEFAULT;   /* L-unit */                                                   \
 113         ;;                                                                                      \
 114         st8 [r16]=r25,16;       /* save ar.unat */                                              \
 115         st8 [r17]=r26,16;       /* save ar.pfs */                                               \
 116         shl r18=r18,16;         /* compute ar.rsc to be used for "loadrs" */                    \
 117         ;;                                                                                      \
 118         st8 [r16]=r27,16;       /* save ar.rsc */                                               \
 119 (pUStk) st8 [r17]=r24,16;       /* save ar.rnat */                                              \
 120 (pKStk) adds r17=16,r17;        /* skip over ar_rnat field */                                   \
 121         ;;                      /* avoid RAW on r16 & r17 */                                    \
 122 (pUStk) st8 [r16]=r23,16;       /* save ar.bspstore */                                          \
 123         st8 [r17]=r31,16;       /* save predicates */                                           \
 124 (pKStk) adds r16=16,r16;        /* skip over ar_bspstore field */                               \
 125         ;;                                                                                      \
 126         st8 [r16]=r29,16;       /* save b0 */                                                   \
 127         st8 [r17]=r18,16;       /* save ar.rsc value for "loadrs" */                            \
 128         cmp.eq pNonSys,pSys=r0,r0       /* initialize pSys=0, pNonSys=1 */                      \
 129         ;;                                                                                      \
 130 .mem.offset 0,0; st8.spill [r16]=r20,16;        /* save original r1 */                          \
 131 .mem.offset 8,0; st8.spill [r17]=r12,16;                                                        \
 132         adds r12=-16,r1;        /* switch to kernel memory stack (with 16 bytes of scratch) */  \
 133         ;;                                                                                      \
 134 .mem.offset 0,0; st8.spill [r16]=r13,16;                                                        \
 135 .mem.offset 8,0; st8.spill [r17]=r21,16;        /* save ar.fpsr */                              \
 136         mov r13=IA64_KR(CURRENT);       /* establish `current' */                               \
 137         ;;                                                                                      \
 138 .mem.offset 0,0; st8.spill [r16]=r15,16;                                                        \
 139 .mem.offset 8,0; st8.spill [r17]=r14,16;                                                        \
 140         ;;                                                                                      \
 141 .mem.offset 0,0; st8.spill [r16]=r2,16;                                                         \
 142 .mem.offset 8,0; st8.spill [r17]=r3,16;                                                         \
 143         ACCOUNT_GET_STAMP                                                                       \
 144         adds r2=IA64_PT_REGS_R16_OFFSET,r1;                                                     \
 145         ;;                                                                                      \
 146         EXTRA;                                                                                  \
 147         movl r1=__gp;           /* establish kernel global pointer */                           \
 148         ;;                                                                                      \
 149         ACCOUNT_SYS_ENTER                                                                       \
 150         bsw.1;                  /* switch back to bank 1 (must be last in insn group) */        \
 151         ;;
 152 
 153 /*
 154  * SAVE_REST saves the remainder of pt_regs (with psr.ic on).
 155  *
 156  * Assumed state upon entry:
 157  *      psr.ic: on
 158  *      r2:     points to &pt_regs.r16
 159  *      r3:     points to &pt_regs.r17
 160  *      r8:     contents of ar.ccv
 161  *      r9:     contents of ar.csd
 162  *      r10:    contents of ar.ssd
 163  *      r11:    FPSR_DEFAULT
 164  *
 165  * Registers r14 and r15 are guaranteed not to be touched by SAVE_REST.
 166  */
 167 #define SAVE_REST                               \
 168 .mem.offset 0,0; st8.spill [r2]=r16,16;         \
 169 .mem.offset 8,0; st8.spill [r3]=r17,16;         \
 170         ;;                                      \
 171 .mem.offset 0,0; st8.spill [r2]=r18,16;         \
 172 .mem.offset 8,0; st8.spill [r3]=r19,16;         \
 173         ;;                                      \
 174 .mem.offset 0,0; st8.spill [r2]=r20,16;         \
 175 .mem.offset 8,0; st8.spill [r3]=r21,16;         \
 176         mov r18=b6;                             \
 177         ;;                                      \
 178 .mem.offset 0,0; st8.spill [r2]=r22,16;         \
 179 .mem.offset 8,0; st8.spill [r3]=r23,16;         \
 180         mov r19=b7;                             \
 181         ;;                                      \
 182 .mem.offset 0,0; st8.spill [r2]=r24,16;         \
 183 .mem.offset 8,0; st8.spill [r3]=r25,16;         \
 184         ;;                                      \
 185 .mem.offset 0,0; st8.spill [r2]=r26,16;         \
 186 .mem.offset 8,0; st8.spill [r3]=r27,16;         \
 187         ;;                                      \
 188 .mem.offset 0,0; st8.spill [r2]=r28,16;         \
 189 .mem.offset 8,0; st8.spill [r3]=r29,16;         \
 190         ;;                                      \
 191 .mem.offset 0,0; st8.spill [r2]=r30,16;         \
 192 .mem.offset 8,0; st8.spill [r3]=r31,32;         \
 193         ;;                                      \
 194         mov ar.fpsr=r11;        /* M-unit */    \
 195         st8 [r2]=r8,8;          /* ar.ccv */    \
 196         adds r24=PT(B6)-PT(F7),r3;              \
 197         ;;                                      \
 198         stf.spill [r2]=f6,32;                   \
 199         stf.spill [r3]=f7,32;                   \
 200         ;;                                      \
 201         stf.spill [r2]=f8,32;                   \
 202         stf.spill [r3]=f9,32;                   \
 203         ;;                                      \
 204         stf.spill [r2]=f10;                     \
 205         stf.spill [r3]=f11;                     \
 206         adds r25=PT(B7)-PT(F11),r3;             \
 207         ;;                                      \
 208         st8 [r24]=r18,16;       /* b6 */        \
 209         st8 [r25]=r19,16;       /* b7 */        \
 210         ;;                                      \
 211         st8 [r24]=r9;           /* ar.csd */    \
 212         st8 [r25]=r10;          /* ar.ssd */    \
 213         ;;
 214 
 215 #define RSE_WORKAROUND                          \
 216 (pUStk) extr.u r17=r18,3,6;                     \
 217 (pUStk) sub r16=r18,r22;                        \
 218 [1:](pKStk)     br.cond.sptk.many 1f;           \
 219         .xdata4 ".data..patch.rse",1b-.         \
 220         ;;                                      \
 221         cmp.ge p6,p7 = 33,r17;                  \
 222         ;;                                      \
 223 (p6)    mov r17=0x310;                          \
 224 (p7)    mov r17=0x308;                          \
 225         ;;                                      \
 226         cmp.leu p1,p0=r16,r17;                  \
 227 (p1)    br.cond.sptk.many 1f;                   \
 228         dep.z r17=r26,0,62;                     \
 229         movl r16=2f;                            \
 230         ;;                                      \
 231         mov ar.pfs=r17;                         \
 232         dep r27=r0,r27,16,14;                   \
 233         mov b0=r16;                             \
 234         ;;                                      \
 235         br.ret.sptk b0;                         \
 236         ;;                                      \
 237 2:                                              \
 238         mov ar.rsc=r0                           \
 239         ;;                                      \
 240         flushrs;                                \
 241         ;;                                      \
 242         mov ar.bspstore=r22                     \
 243         ;;                                      \
 244         mov r18=ar.bsp;                         \
 245         ;;                                      \
 246 1:                                              \
 247         .pred.rel "mutex", pKStk, pUStk
 248 
 249 #define SAVE_MIN_WITH_COVER     DO_SAVE_MIN(COVER, mov r30=cr.ifs, , RSE_WORKAROUND)
 250 #define SAVE_MIN_WITH_COVER_R19 DO_SAVE_MIN(COVER, mov r30=cr.ifs, mov r15=r19, RSE_WORKAROUND)
 251 #define SAVE_MIN                        DO_SAVE_MIN(     , mov r30=r0, , )

/* [<][>][^][v][top][bottom][index][help] */