root/arch/xtensa/include/asm/initialize_mmu.h

/* [<][>][^][v][top][bottom][index][help] */

INCLUDED FROM


   1 /*
   2  * arch/xtensa/include/asm/initialize_mmu.h
   3  *
   4  * Initializes MMU:
   5  *
   6  *      For the new V3 MMU we remap the TLB from virtual == physical
   7  *      to the standard Linux mapping used in earlier MMU's.
   8  *
   9  *      The the MMU we also support a new configuration register that
  10  *      specifies how the S32C1I instruction operates with the cache
  11  *      controller.
  12  *
  13  * This file is subject to the terms and conditions of the GNU General
  14  * Public License.  See the file "COPYING" in the main directory of
  15  * this archive for more details.
  16  *
  17  * Copyright (C) 2008 - 2012 Tensilica, Inc.
  18  *
  19  *   Marc Gauthier <marc@tensilica.com>
  20  *   Pete Delaney <piet@tensilica.com>
  21  */
  22 
  23 #ifndef _XTENSA_INITIALIZE_MMU_H
  24 #define _XTENSA_INITIALIZE_MMU_H
  25 
  26 #include <asm/pgtable.h>
  27 #include <asm/vectors.h>
  28 
  29 #if XCHAL_HAVE_PTP_MMU
  30 #define CA_BYPASS       (_PAGE_CA_BYPASS | _PAGE_HW_WRITE | _PAGE_HW_EXEC)
  31 #define CA_WRITEBACK    (_PAGE_CA_WB     | _PAGE_HW_WRITE | _PAGE_HW_EXEC)
  32 #else
  33 #define CA_WRITEBACK    (0x4)
  34 #endif
  35 
  36 #ifdef __ASSEMBLY__
  37 
  38 #define XTENSA_HWVERSION_RC_2009_0 230000
  39 
  40         .macro  initialize_mmu
  41 
  42 #if XCHAL_HAVE_S32C1I && (XCHAL_HW_MIN_VERSION >= XTENSA_HWVERSION_RC_2009_0)
  43 /*
  44  * We Have Atomic Operation Control (ATOMCTL) Register; Initialize it.
  45  * For details see Documentation/xtensa/atomctl.rst
  46  */
  47 #if XCHAL_DCACHE_IS_COHERENT
  48         movi    a3, 0x25        /* For SMP/MX -- internal for writeback,
  49                                  * RCW otherwise
  50                                  */
  51 #else
  52         movi    a3, 0x29        /* non-MX -- Most cores use Std Memory
  53                                  * Controlers which usually can't use RCW
  54                                  */
  55 #endif
  56         wsr     a3, atomctl
  57 #endif  /* XCHAL_HAVE_S32C1I &&
  58          * (XCHAL_HW_MIN_VERSION >= XTENSA_HWVERSION_RC_2009_0)
  59          */
  60 
  61 #if defined(CONFIG_MMU) && XCHAL_HAVE_PTP_MMU && XCHAL_HAVE_SPANNING_WAY
  62 /*
  63  * Have MMU v3
  64  */
  65 
  66 #if !XCHAL_HAVE_VECBASE
  67 # error "MMU v3 requires reloc vectors"
  68 #endif
  69 
  70         movi    a1, 0
  71         _call0  1f
  72         _j      2f
  73 
  74         .align  4
  75 1:      movi    a2, 0x10000000
  76 
  77 #if CONFIG_KERNEL_LOAD_ADDRESS < 0x40000000ul
  78 #define TEMP_MAPPING_VADDR 0x40000000
  79 #else
  80 #define TEMP_MAPPING_VADDR 0x00000000
  81 #endif
  82 
  83         /* Step 1: invalidate mapping at 0x40000000..0x5FFFFFFF. */
  84 
  85         movi    a2, TEMP_MAPPING_VADDR | XCHAL_SPANNING_WAY
  86         idtlb   a2
  87         iitlb   a2
  88         isync
  89 
  90         /* Step 2: map 0x40000000..0x47FFFFFF to paddr containing this code
  91          * and jump to the new mapping.
  92          */
  93 
  94         srli    a3, a0, 27
  95         slli    a3, a3, 27
  96         addi    a3, a3, CA_BYPASS
  97         addi    a7, a2, 5 - XCHAL_SPANNING_WAY
  98         wdtlb   a3, a7
  99         witlb   a3, a7
 100         isync
 101 
 102         slli    a4, a0, 5
 103         srli    a4, a4, 5
 104         addi    a5, a2, -XCHAL_SPANNING_WAY
 105         add     a4, a4, a5
 106         jx      a4
 107 
 108         /* Step 3: unmap everything other than current area.
 109          *         Start at 0x60000000, wrap around, and end with 0x20000000
 110          */
 111 2:      movi    a4, 0x20000000
 112         add     a5, a2, a4
 113 3:      idtlb   a5
 114         iitlb   a5
 115         add     a5, a5, a4
 116         bne     a5, a2, 3b
 117 
 118         /* Step 4: Setup MMU with the requested static mappings. */
 119 
 120         movi    a6, 0x01000000
 121         wsr     a6, ITLBCFG
 122         wsr     a6, DTLBCFG
 123         isync
 124 
 125         movi    a5, XCHAL_KSEG_CACHED_VADDR + XCHAL_KSEG_TLB_WAY
 126         movi    a4, XCHAL_KSEG_PADDR + CA_WRITEBACK
 127         wdtlb   a4, a5
 128         witlb   a4, a5
 129 
 130         movi    a5, XCHAL_KSEG_BYPASS_VADDR + XCHAL_KSEG_TLB_WAY
 131         movi    a4, XCHAL_KSEG_PADDR + CA_BYPASS
 132         wdtlb   a4, a5
 133         witlb   a4, a5
 134 
 135 #ifdef CONFIG_XTENSA_KSEG_512M
 136         movi    a5, XCHAL_KSEG_CACHED_VADDR + 0x10000000 + XCHAL_KSEG_TLB_WAY
 137         movi    a4, XCHAL_KSEG_PADDR + 0x10000000 + CA_WRITEBACK
 138         wdtlb   a4, a5
 139         witlb   a4, a5
 140 
 141         movi    a5, XCHAL_KSEG_BYPASS_VADDR + 0x10000000 + XCHAL_KSEG_TLB_WAY
 142         movi    a4, XCHAL_KSEG_PADDR + 0x10000000 + CA_BYPASS
 143         wdtlb   a4, a5
 144         witlb   a4, a5
 145 #endif
 146 
 147         movi    a5, XCHAL_KIO_CACHED_VADDR + XCHAL_KIO_TLB_WAY
 148         movi    a4, XCHAL_KIO_DEFAULT_PADDR + CA_WRITEBACK
 149         wdtlb   a4, a5
 150         witlb   a4, a5
 151 
 152         movi    a5, XCHAL_KIO_BYPASS_VADDR + XCHAL_KIO_TLB_WAY
 153         movi    a4, XCHAL_KIO_DEFAULT_PADDR + CA_BYPASS
 154         wdtlb   a4, a5
 155         witlb   a4, a5
 156 
 157         isync
 158 
 159         /* Jump to self, using final mappings. */
 160         movi    a4, 1f
 161         jx      a4
 162 
 163 1:
 164         /* Step 5: remove temporary mapping. */
 165         idtlb   a7
 166         iitlb   a7
 167         isync
 168 
 169         movi    a0, 0
 170         wsr     a0, ptevaddr
 171         rsync
 172 
 173 #endif /* defined(CONFIG_MMU) && XCHAL_HAVE_PTP_MMU &&
 174           XCHAL_HAVE_SPANNING_WAY */
 175 
 176         .endm
 177 
 178         .macro  initialize_cacheattr
 179 
 180 #if !defined(CONFIG_MMU) && (XCHAL_HAVE_TLBS || XCHAL_HAVE_MPU)
 181 #if CONFIG_MEMMAP_CACHEATTR == 0x22222222 && XCHAL_HAVE_PTP_MMU
 182 #error Default MEMMAP_CACHEATTR of 0x22222222 does not work with full MMU.
 183 #endif
 184 
 185 #if XCHAL_HAVE_MPU
 186         .data
 187         .align  4
 188 .Lattribute_table:
 189         .long 0x000000, 0x1fff00, 0x1ddf00, 0x1eef00
 190         .long 0x006600, 0x000000, 0x000000, 0x000000
 191         .long 0x000000, 0x000000, 0x000000, 0x000000
 192         .long 0x000000, 0x000000, 0x000000, 0x000000
 193         .previous
 194 
 195         movi    a3, .Lattribute_table
 196         movi    a4, CONFIG_MEMMAP_CACHEATTR
 197         movi    a5, 1
 198         movi    a6, XCHAL_MPU_ENTRIES
 199         movi    a10, 0x20000000
 200         movi    a11, -1
 201 1:
 202         sub     a5, a5, a10
 203         extui   a8, a4, 28, 4
 204         beq     a8, a11, 2f
 205         addi    a6, a6, -1
 206         mov     a11, a8
 207 2:
 208         addx4   a9, a8, a3
 209         l32i    a9, a9, 0
 210         or      a9, a9, a6
 211         wptlb   a9, a5
 212         slli    a4, a4, 4
 213         bgeu    a5, a10, 1b
 214 
 215 #else
 216         movi    a5, XCHAL_SPANNING_WAY
 217         movi    a6, ~_PAGE_ATTRIB_MASK
 218         movi    a4, CONFIG_MEMMAP_CACHEATTR
 219         movi    a8, 0x20000000
 220 1:
 221         rdtlb1  a3, a5
 222         xor     a3, a3, a4
 223         and     a3, a3, a6
 224         xor     a3, a3, a4
 225         wdtlb   a3, a5
 226         ritlb1  a3, a5
 227         xor     a3, a3, a4
 228         and     a3, a3, a6
 229         xor     a3, a3, a4
 230         witlb   a3, a5
 231 
 232         add     a5, a5, a8
 233         srli    a4, a4, 4
 234         bgeu    a5, a8, 1b
 235 
 236         isync
 237 #endif
 238 #endif
 239 
 240         .endm
 241 
 242 #endif /*__ASSEMBLY__*/
 243 
 244 #endif /* _XTENSA_INITIALIZE_MMU_H */

/* [<][>][^][v][top][bottom][index][help] */