root/arch/arm/mm/tlb-v7.S

/* [<][>][^][v][top][bottom][index][help] */
   1 /* SPDX-License-Identifier: GPL-2.0-only */
   2 /*
   3  *  linux/arch/arm/mm/tlb-v7.S
   4  *
   5  *  Copyright (C) 1997-2002 Russell King
   6  *  Modified for ARMv7 by Catalin Marinas
   7  *
   8  *  ARM architecture version 6 TLB handling functions.
   9  *  These assume a split I/D TLB.
  10  */
  11 #include <linux/init.h>
  12 #include <linux/linkage.h>
  13 #include <asm/assembler.h>
  14 #include <asm/asm-offsets.h>
  15 #include <asm/page.h>
  16 #include <asm/tlbflush.h>
  17 #include "proc-macros.S"
  18 
  19 /*
  20  *      v7wbi_flush_user_tlb_range(start, end, vma)
  21  *
  22  *      Invalidate a range of TLB entries in the specified address space.
  23  *
  24  *      - start - start address (may not be aligned)
  25  *      - end   - end address (exclusive, may not be aligned)
  26  *      - vma   - vma_struct describing address range
  27  *
  28  *      It is assumed that:
  29  *      - the "Invalidate single entry" instruction will invalidate
  30  *        both the I and the D TLBs on Harvard-style TLBs
  31  */
  32 ENTRY(v7wbi_flush_user_tlb_range)
  33         vma_vm_mm r3, r2                        @ get vma->vm_mm
  34         mmid    r3, r3                          @ get vm_mm->context.id
  35         dsb     ish
  36         mov     r0, r0, lsr #PAGE_SHIFT         @ align address
  37         mov     r1, r1, lsr #PAGE_SHIFT
  38         asid    r3, r3                          @ mask ASID
  39 #ifdef CONFIG_ARM_ERRATA_720789
  40         ALT_SMP(W(mov)  r3, #0  )
  41         ALT_UP(W(nop)           )
  42 #endif
  43         orr     r0, r3, r0, lsl #PAGE_SHIFT     @ Create initial MVA
  44         mov     r1, r1, lsl #PAGE_SHIFT
  45 1:
  46 #ifdef CONFIG_ARM_ERRATA_720789
  47         ALT_SMP(mcr     p15, 0, r0, c8, c3, 3)  @ TLB invalidate U MVA all ASID (shareable)
  48 #else
  49         ALT_SMP(mcr     p15, 0, r0, c8, c3, 1)  @ TLB invalidate U MVA (shareable)
  50 #endif
  51         ALT_UP(mcr      p15, 0, r0, c8, c7, 1)  @ TLB invalidate U MVA
  52 
  53         add     r0, r0, #PAGE_SZ
  54         cmp     r0, r1
  55         blo     1b
  56         dsb     ish
  57         ret     lr
  58 ENDPROC(v7wbi_flush_user_tlb_range)
  59 
  60 /*
  61  *      v7wbi_flush_kern_tlb_range(start,end)
  62  *
  63  *      Invalidate a range of kernel TLB entries
  64  *
  65  *      - start - start address (may not be aligned)
  66  *      - end   - end address (exclusive, may not be aligned)
  67  */
  68 ENTRY(v7wbi_flush_kern_tlb_range)
  69         dsb     ish
  70         mov     r0, r0, lsr #PAGE_SHIFT         @ align address
  71         mov     r1, r1, lsr #PAGE_SHIFT
  72         mov     r0, r0, lsl #PAGE_SHIFT
  73         mov     r1, r1, lsl #PAGE_SHIFT
  74 1:
  75 #ifdef CONFIG_ARM_ERRATA_720789
  76         ALT_SMP(mcr     p15, 0, r0, c8, c3, 3)  @ TLB invalidate U MVA all ASID (shareable)
  77 #else
  78         ALT_SMP(mcr     p15, 0, r0, c8, c3, 1)  @ TLB invalidate U MVA (shareable)
  79 #endif
  80         ALT_UP(mcr      p15, 0, r0, c8, c7, 1)  @ TLB invalidate U MVA
  81         add     r0, r0, #PAGE_SZ
  82         cmp     r0, r1
  83         blo     1b
  84         dsb     ish
  85         isb
  86         ret     lr
  87 ENDPROC(v7wbi_flush_kern_tlb_range)
  88 
  89         __INIT
  90 
  91         /* define struct cpu_tlb_fns (see <asm/tlbflush.h> and proc-macros.S) */
  92         define_tlb_functions v7wbi, v7wbi_tlb_flags_up, flags_smp=v7wbi_tlb_flags_smp

/* [<][>][^][v][top][bottom][index][help] */