root/arch/powerpc/mm/nohash/tlb_low.S

/* [<][>][^][v][top][bottom][index][help] */
   1 /* SPDX-License-Identifier: GPL-2.0-or-later */
   2 /*
   3  * This file contains low-level functions for performing various
   4  * types of TLB invalidations on various processors with no hash
   5  * table.
   6  *
   7  * This file implements the following functions for all no-hash
   8  * processors. Some aren't implemented for some variants. Some
   9  * are inline in tlbflush.h
  10  *
  11  *      - tlbil_va
  12  *      - tlbil_pid
  13  *      - tlbil_all
  14  *      - tlbivax_bcast
  15  *
  16  * Code mostly moved over from misc_32.S
  17  *
  18  *    Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org)
  19  *
  20  * Partially rewritten by Cort Dougan (cort@cs.nmt.edu)
  21  * Paul Mackerras, Kumar Gala and Benjamin Herrenschmidt.
  22  */
  23 
  24 #include <asm/reg.h>
  25 #include <asm/page.h>
  26 #include <asm/cputable.h>
  27 #include <asm/mmu.h>
  28 #include <asm/ppc_asm.h>
  29 #include <asm/asm-offsets.h>
  30 #include <asm/processor.h>
  31 #include <asm/bug.h>
  32 #include <asm/asm-compat.h>
  33 #include <asm/feature-fixups.h>
  34 
  35 #if defined(CONFIG_40x)
  36 
  37 /*
  38  * 40x implementation needs only tlbil_va
  39  */
  40 _GLOBAL(__tlbil_va)
  41         /* We run the search with interrupts disabled because we have to change
  42          * the PID and I don't want to preempt when that happens.
  43          */
  44         mfmsr   r5
  45         mfspr   r6,SPRN_PID
  46         wrteei  0
  47         mtspr   SPRN_PID,r4
  48         tlbsx.  r3, 0, r3
  49         mtspr   SPRN_PID,r6
  50         wrtee   r5
  51         bne     1f
  52         sync
  53         /* There are only 64 TLB entries, so r3 < 64, which means bit 25 is
  54          * clear. Since 25 is the V bit in the TLB_TAG, loading this value
  55          * will invalidate the TLB entry. */
  56         tlbwe   r3, r3, TLB_TAG
  57         isync
  58 1:      blr
  59 
  60 #elif defined(CONFIG_PPC_8xx)
  61 
  62 /*
  63  * Nothing to do for 8xx, everything is inline
  64  */
  65 
  66 #elif defined(CONFIG_44x) /* Includes 47x */
  67 
  68 /*
  69  * 440 implementation uses tlbsx/we for tlbil_va and a full sweep
  70  * of the TLB for everything else.
  71  */
  72 _GLOBAL(__tlbil_va)
  73         mfspr   r5,SPRN_MMUCR
  74         mfmsr   r10
  75 
  76         /*
  77          * We write 16 bits of STID since 47x supports that much, we
  78          * will never be passed out of bounds values on 440 (hopefully)
  79          */
  80         rlwimi  r5,r4,0,16,31
  81 
  82         /* We have to run the search with interrupts disabled, otherwise
  83          * an interrupt which causes a TLB miss can clobber the MMUCR
  84          * between the mtspr and the tlbsx.
  85          *
  86          * Critical and Machine Check interrupts take care of saving
  87          * and restoring MMUCR, so only normal interrupts have to be
  88          * taken care of.
  89          */
  90         wrteei  0
  91         mtspr   SPRN_MMUCR,r5
  92         tlbsx.  r6,0,r3
  93         bne     10f
  94         sync
  95 BEGIN_MMU_FTR_SECTION
  96         b       2f
  97 END_MMU_FTR_SECTION_IFSET(MMU_FTR_TYPE_47x)
  98         /* On 440 There are only 64 TLB entries, so r3 < 64, which means bit
  99          * 22, is clear.  Since 22 is the V bit in the TLB_PAGEID, loading this
 100          * value will invalidate the TLB entry.
 101          */
 102         tlbwe   r6,r6,PPC44x_TLB_PAGEID
 103         isync
 104 10:     wrtee   r10
 105         blr
 106 2:
 107 #ifdef CONFIG_PPC_47x
 108         oris    r7,r6,0x8000    /* specify way explicitly */
 109         clrrwi  r4,r3,12        /* get an EPN for the hashing with V = 0 */
 110         ori     r4,r4,PPC47x_TLBE_SIZE
 111         tlbwe   r4,r7,0         /* write it */
 112         isync
 113         wrtee   r10
 114         blr
 115 #else /* CONFIG_PPC_47x */
 116 1:      trap
 117         EMIT_BUG_ENTRY 1b,__FILE__,__LINE__,0;
 118 #endif /* !CONFIG_PPC_47x */
 119 
 120 _GLOBAL(_tlbil_all)
 121 _GLOBAL(_tlbil_pid)
 122 BEGIN_MMU_FTR_SECTION
 123         b       2f
 124 END_MMU_FTR_SECTION_IFSET(MMU_FTR_TYPE_47x)
 125         li      r3,0
 126         sync
 127 
 128         /* Load high watermark */
 129         lis     r4,tlb_44x_hwater@ha
 130         lwz     r5,tlb_44x_hwater@l(r4)
 131 
 132 1:      tlbwe   r3,r3,PPC44x_TLB_PAGEID
 133         addi    r3,r3,1
 134         cmpw    0,r3,r5
 135         ble     1b
 136 
 137         isync
 138         blr
 139 2:
 140 #ifdef CONFIG_PPC_47x
 141         /* 476 variant. There's not simple way to do this, hopefully we'll
 142          * try to limit the amount of such full invalidates
 143          */
 144         mfmsr   r11             /* Interrupts off */
 145         wrteei  0
 146         li      r3,-1           /* Current set */
 147         lis     r10,tlb_47x_boltmap@h
 148         ori     r10,r10,tlb_47x_boltmap@l
 149         lis     r7,0x8000       /* Specify way explicitly */
 150 
 151         b       9f              /* For each set */
 152 
 153 1:      li      r9,4            /* Number of ways */
 154         li      r4,0            /* Current way */
 155         li      r6,0            /* Default entry value 0 */
 156         andi.   r0,r8,1         /* Check if way 0 is bolted */
 157         mtctr   r9              /* Load way counter */
 158         bne-    3f              /* Bolted, skip loading it */
 159 
 160 2:      /* For each way */
 161         or      r5,r3,r4        /* Make way|index for tlbre */
 162         rlwimi  r5,r5,16,8,15   /* Copy index into position */
 163         tlbre   r6,r5,0         /* Read entry */
 164 3:      addis   r4,r4,0x2000    /* Next way */
 165         andi.   r0,r6,PPC47x_TLB0_VALID /* Valid entry ? */
 166         beq     4f              /* Nope, skip it */
 167         rlwimi  r7,r5,0,1,2     /* Insert way number */
 168         rlwinm  r6,r6,0,21,19   /* Clear V */
 169         tlbwe   r6,r7,0         /* Write it */
 170 4:      bdnz    2b              /* Loop for each way */
 171         srwi    r8,r8,1         /* Next boltmap bit */
 172 9:      cmpwi   cr1,r3,255      /* Last set done ? */
 173         addi    r3,r3,1         /* Next set */
 174         beq     cr1,1f          /* End of loop */
 175         andi.   r0,r3,0x1f      /* Need to load a new boltmap word ? */
 176         bne     1b              /* No, loop */
 177         lwz     r8,0(r10)       /* Load boltmap entry */
 178         addi    r10,r10,4       /* Next word */
 179         b       1b              /* Then loop */
 180 1:      isync                   /* Sync shadows */
 181         wrtee   r11
 182 #else /* CONFIG_PPC_47x */
 183 1:      trap
 184         EMIT_BUG_ENTRY 1b,__FILE__,__LINE__,0;
 185 #endif /* !CONFIG_PPC_47x */
 186         blr
 187 
 188 #ifdef CONFIG_PPC_47x
 189 
 190 /*
 191  * _tlbivax_bcast is only on 47x. We don't bother doing a runtime
 192  * check though, it will blow up soon enough if we mistakenly try
 193  * to use it on a 440.
 194  */
 195 _GLOBAL(_tlbivax_bcast)
 196         mfspr   r5,SPRN_MMUCR
 197         mfmsr   r10
 198         rlwimi  r5,r4,0,16,31
 199         wrteei  0
 200         mtspr   SPRN_MMUCR,r5
 201         isync
 202         PPC_TLBIVAX(0, R3)
 203         isync
 204         eieio
 205         tlbsync
 206 BEGIN_FTR_SECTION
 207         b       1f
 208 END_FTR_SECTION_IFSET(CPU_FTR_476_DD2)
 209         sync
 210         wrtee   r10
 211         blr
 212 /*
 213  * DD2 HW could hang if in instruction fetch happens before msync completes.
 214  * Touch enough instruction cache lines to ensure cache hits
 215  */
 216 1:      mflr    r9
 217         bl      2f
 218 2:      mflr    r6
 219         li      r7,32
 220         PPC_ICBT(0,R6,R7)               /* touch next cache line */
 221         add     r6,r6,r7
 222         PPC_ICBT(0,R6,R7)               /* touch next cache line */
 223         add     r6,r6,r7
 224         PPC_ICBT(0,R6,R7)               /* touch next cache line */
 225         sync
 226         nop
 227         nop
 228         nop
 229         nop
 230         nop
 231         nop
 232         nop
 233         nop
 234         mtlr    r9
 235         wrtee   r10
 236         blr
 237 #endif /* CONFIG_PPC_47x */
 238 
 239 #elif defined(CONFIG_FSL_BOOKE)
 240 /*
 241  * FSL BookE implementations.
 242  *
 243  * Since feature sections are using _SECTION_ELSE we need
 244  * to have the larger code path before the _SECTION_ELSE
 245  */
 246 
 247 /*
 248  * Flush MMU TLB on the local processor
 249  */
 250 _GLOBAL(_tlbil_all)
 251 BEGIN_MMU_FTR_SECTION
 252         li      r3,(MMUCSR0_TLBFI)@l
 253         mtspr   SPRN_MMUCSR0, r3
 254 1:
 255         mfspr   r3,SPRN_MMUCSR0
 256         andi.   r3,r3,MMUCSR0_TLBFI@l
 257         bne     1b
 258 MMU_FTR_SECTION_ELSE
 259         PPC_TLBILX_ALL(0,R0)
 260 ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX)
 261         msync
 262         isync
 263         blr
 264 
 265 _GLOBAL(_tlbil_pid)
 266 BEGIN_MMU_FTR_SECTION
 267         slwi    r3,r3,16
 268         mfmsr   r10
 269         wrteei  0
 270         mfspr   r4,SPRN_MAS6    /* save MAS6 */
 271         mtspr   SPRN_MAS6,r3
 272         PPC_TLBILX_PID(0,R0)
 273         mtspr   SPRN_MAS6,r4    /* restore MAS6 */
 274         wrtee   r10
 275 MMU_FTR_SECTION_ELSE
 276         li      r3,(MMUCSR0_TLBFI)@l
 277         mtspr   SPRN_MMUCSR0, r3
 278 1:
 279         mfspr   r3,SPRN_MMUCSR0
 280         andi.   r3,r3,MMUCSR0_TLBFI@l
 281         bne     1b
 282 ALT_MMU_FTR_SECTION_END_IFSET(MMU_FTR_USE_TLBILX)
 283         msync
 284         isync
 285         blr
 286 
 287 /*
 288  * Flush MMU TLB for a particular address, but only on the local processor
 289  * (no broadcast)
 290  */
 291 _GLOBAL(__tlbil_va)
 292         mfmsr   r10
 293         wrteei  0
 294         slwi    r4,r4,16
 295         ori     r4,r4,(MAS6_ISIZE(BOOK3E_PAGESZ_4K))@l
 296         mtspr   SPRN_MAS6,r4            /* assume AS=0 for now */
 297 BEGIN_MMU_FTR_SECTION
 298         tlbsx   0,r3
 299         mfspr   r4,SPRN_MAS1            /* check valid */
 300         andis.  r3,r4,MAS1_VALID@h
 301         beq     1f
 302         rlwinm  r4,r4,0,1,31
 303         mtspr   SPRN_MAS1,r4
 304         tlbwe
 305 MMU_FTR_SECTION_ELSE
 306         PPC_TLBILX_VA(0,R3)
 307 ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX)
 308         msync
 309         isync
 310 1:      wrtee   r10
 311         blr
 312 #elif defined(CONFIG_PPC_BOOK3E)
 313 /*
 314  * New Book3E (>= 2.06) implementation
 315  *
 316  * Note: We may be able to get away without the interrupt masking stuff
 317  * if we save/restore MAS6 on exceptions that might modify it
 318  */
 319 _GLOBAL(_tlbil_pid)
 320         slwi    r4,r3,MAS6_SPID_SHIFT
 321         mfmsr   r10
 322         wrteei  0
 323         mtspr   SPRN_MAS6,r4
 324         PPC_TLBILX_PID(0,R0)
 325         wrtee   r10
 326         msync
 327         isync
 328         blr
 329 
 330 _GLOBAL(_tlbil_pid_noind)
 331         slwi    r4,r3,MAS6_SPID_SHIFT
 332         mfmsr   r10
 333         ori     r4,r4,MAS6_SIND
 334         wrteei  0
 335         mtspr   SPRN_MAS6,r4
 336         PPC_TLBILX_PID(0,R0)
 337         wrtee   r10
 338         msync
 339         isync
 340         blr
 341 
 342 _GLOBAL(_tlbil_all)
 343         PPC_TLBILX_ALL(0,R0)
 344         msync
 345         isync
 346         blr
 347 
 348 _GLOBAL(_tlbil_va)
 349         mfmsr   r10
 350         wrteei  0
 351         cmpwi   cr0,r6,0
 352         slwi    r4,r4,MAS6_SPID_SHIFT
 353         rlwimi  r4,r5,MAS6_ISIZE_SHIFT,MAS6_ISIZE_MASK
 354         beq     1f
 355         rlwimi  r4,r6,MAS6_SIND_SHIFT,MAS6_SIND
 356 1:      mtspr   SPRN_MAS6,r4            /* assume AS=0 for now */
 357         PPC_TLBILX_VA(0,R3)
 358         msync
 359         isync
 360         wrtee   r10
 361         blr
 362 
 363 _GLOBAL(_tlbivax_bcast)
 364         mfmsr   r10
 365         wrteei  0
 366         cmpwi   cr0,r6,0
 367         slwi    r4,r4,MAS6_SPID_SHIFT
 368         rlwimi  r4,r5,MAS6_ISIZE_SHIFT,MAS6_ISIZE_MASK
 369         beq     1f
 370         rlwimi  r4,r6,MAS6_SIND_SHIFT,MAS6_SIND
 371 1:      mtspr   SPRN_MAS6,r4            /* assume AS=0 for now */
 372         PPC_TLBIVAX(0,R3)
 373         eieio
 374         tlbsync
 375         sync
 376         wrtee   r10
 377         blr
 378 
 379 _GLOBAL(set_context)
 380 #ifdef CONFIG_BDI_SWITCH
 381         /* Context switch the PTE pointer for the Abatron BDI2000.
 382          * The PGDIR is the second parameter.
 383          */
 384         lis     r5, abatron_pteptrs@h
 385         ori     r5, r5, abatron_pteptrs@l
 386         stw     r4, 0x4(r5)
 387 #endif
 388         mtspr   SPRN_PID,r3
 389         isync                   /* Force context change */
 390         blr
 391 #else
 392 #error Unsupported processor type !
 393 #endif
 394 
 395 #if defined(CONFIG_PPC_FSL_BOOK3E)
 396 /*
 397  * extern void loadcam_entry(unsigned int index)
 398  *
 399  * Load TLBCAM[index] entry in to the L2 CAM MMU
 400  * Must preserve r7, r8, r9, r10 and r11
 401  */
 402 _GLOBAL(loadcam_entry)
 403         mflr    r5
 404         LOAD_REG_ADDR_PIC(r4, TLBCAM)
 405         mtlr    r5
 406         mulli   r5,r3,TLBCAM_SIZE
 407         add     r3,r5,r4
 408         lwz     r4,TLBCAM_MAS0(r3)
 409         mtspr   SPRN_MAS0,r4
 410         lwz     r4,TLBCAM_MAS1(r3)
 411         mtspr   SPRN_MAS1,r4
 412         PPC_LL  r4,TLBCAM_MAS2(r3)
 413         mtspr   SPRN_MAS2,r4
 414         lwz     r4,TLBCAM_MAS3(r3)
 415         mtspr   SPRN_MAS3,r4
 416 BEGIN_MMU_FTR_SECTION
 417         lwz     r4,TLBCAM_MAS7(r3)
 418         mtspr   SPRN_MAS7,r4
 419 END_MMU_FTR_SECTION_IFSET(MMU_FTR_BIG_PHYS)
 420         isync
 421         tlbwe
 422         isync
 423         blr
 424 
 425 /*
 426  * Load multiple TLB entries at once, using an alternate-space
 427  * trampoline so that we don't have to care about whether the same
 428  * TLB entry maps us before and after.
 429  *
 430  * r3 = first entry to write
 431  * r4 = number of entries to write
 432  * r5 = temporary tlb entry
 433  */
 434 _GLOBAL(loadcam_multi)
 435         mflr    r8
 436         /* Don't switch to AS=1 if already there */
 437         mfmsr   r11
 438         andi.   r11,r11,MSR_IS
 439         bne     10f
 440 
 441         /*
 442          * Set up temporary TLB entry that is the same as what we're
 443          * running from, but in AS=1.
 444          */
 445         bl      1f
 446 1:      mflr    r6
 447         tlbsx   0,r8
 448         mfspr   r6,SPRN_MAS1
 449         ori     r6,r6,MAS1_TS
 450         mtspr   SPRN_MAS1,r6
 451         mfspr   r6,SPRN_MAS0
 452         rlwimi  r6,r5,MAS0_ESEL_SHIFT,MAS0_ESEL_MASK
 453         mr      r7,r5
 454         mtspr   SPRN_MAS0,r6
 455         isync
 456         tlbwe
 457         isync
 458 
 459         /* Switch to AS=1 */
 460         mfmsr   r6
 461         ori     r6,r6,MSR_IS|MSR_DS
 462         mtmsr   r6
 463         isync
 464 
 465 10:
 466         mr      r9,r3
 467         add     r10,r3,r4
 468 2:      bl      loadcam_entry
 469         addi    r9,r9,1
 470         cmpw    r9,r10
 471         mr      r3,r9
 472         blt     2b
 473 
 474         /* Don't return to AS=0 if we were in AS=1 at function start */
 475         andi.   r11,r11,MSR_IS
 476         bne     3f
 477 
 478         /* Return to AS=0 and clear the temporary entry */
 479         mfmsr   r6
 480         rlwinm. r6,r6,0,~(MSR_IS|MSR_DS)
 481         mtmsr   r6
 482         isync
 483 
 484         li      r6,0
 485         mtspr   SPRN_MAS1,r6
 486         rlwinm  r6,r7,MAS0_ESEL_SHIFT,MAS0_ESEL_MASK
 487         oris    r6,r6,MAS0_TLBSEL(1)@h
 488         mtspr   SPRN_MAS0,r6
 489         isync
 490         tlbwe
 491         isync
 492 
 493 3:
 494         mtlr    r8
 495         blr
 496 #endif

/* [<][>][^][v][top][bottom][index][help] */