root/arch/arm64/kernel/relocate_kernel.S

/* [<][>][^][v][top][bottom][index][help] */
   1 /* SPDX-License-Identifier: GPL-2.0-only */
   2 /*
   3  * kexec for arm64
   4  *
   5  * Copyright (C) Linaro.
   6  * Copyright (C) Huawei Futurewei Technologies.
   7  */
   8 
   9 #include <linux/kexec.h>
  10 #include <linux/linkage.h>
  11 
  12 #include <asm/assembler.h>
  13 #include <asm/kexec.h>
  14 #include <asm/page.h>
  15 #include <asm/sysreg.h>
  16 
  17 /*
  18  * arm64_relocate_new_kernel - Put a 2nd stage image in place and boot it.
  19  *
  20  * The memory that the old kernel occupies may be overwritten when coping the
  21  * new image to its final location.  To assure that the
  22  * arm64_relocate_new_kernel routine which does that copy is not overwritten,
  23  * all code and data needed by arm64_relocate_new_kernel must be between the
  24  * symbols arm64_relocate_new_kernel and arm64_relocate_new_kernel_end.  The
  25  * machine_kexec() routine will copy arm64_relocate_new_kernel to the kexec
  26  * control_code_page, a special page which has been set up to be preserved
  27  * during the copy operation.
  28  */
  29 ENTRY(arm64_relocate_new_kernel)
  30 
  31         /* Setup the list loop variables. */
  32         mov     x18, x2                         /* x18 = dtb address */
  33         mov     x17, x1                         /* x17 = kimage_start */
  34         mov     x16, x0                         /* x16 = kimage_head */
  35         raw_dcache_line_size x15, x0            /* x15 = dcache line size */
  36         mov     x14, xzr                        /* x14 = entry ptr */
  37         mov     x13, xzr                        /* x13 = copy dest */
  38 
  39         /* Clear the sctlr_el2 flags. */
  40         mrs     x0, CurrentEL
  41         cmp     x0, #CurrentEL_EL2
  42         b.ne    1f
  43         mrs     x0, sctlr_el2
  44         ldr     x1, =SCTLR_ELx_FLAGS
  45         bic     x0, x0, x1
  46         pre_disable_mmu_workaround
  47         msr     sctlr_el2, x0
  48         isb
  49 1:
  50 
  51         /* Check if the new image needs relocation. */
  52         tbnz    x16, IND_DONE_BIT, .Ldone
  53 
  54 .Lloop:
  55         and     x12, x16, PAGE_MASK             /* x12 = addr */
  56 
  57         /* Test the entry flags. */
  58 .Ltest_source:
  59         tbz     x16, IND_SOURCE_BIT, .Ltest_indirection
  60 
  61         /* Invalidate dest page to PoC. */
  62         mov     x0, x13
  63         add     x20, x0, #PAGE_SIZE
  64         sub     x1, x15, #1
  65         bic     x0, x0, x1
  66 2:      dc      ivac, x0
  67         add     x0, x0, x15
  68         cmp     x0, x20
  69         b.lo    2b
  70         dsb     sy
  71 
  72         mov x20, x13
  73         mov x21, x12
  74         copy_page x20, x21, x0, x1, x2, x3, x4, x5, x6, x7
  75 
  76         /* dest += PAGE_SIZE */
  77         add     x13, x13, PAGE_SIZE
  78         b       .Lnext
  79 
  80 .Ltest_indirection:
  81         tbz     x16, IND_INDIRECTION_BIT, .Ltest_destination
  82 
  83         /* ptr = addr */
  84         mov     x14, x12
  85         b       .Lnext
  86 
  87 .Ltest_destination:
  88         tbz     x16, IND_DESTINATION_BIT, .Lnext
  89 
  90         /* dest = addr */
  91         mov     x13, x12
  92 
  93 .Lnext:
  94         /* entry = *ptr++ */
  95         ldr     x16, [x14], #8
  96 
  97         /* while (!(entry & DONE)) */
  98         tbz     x16, IND_DONE_BIT, .Lloop
  99 
 100 .Ldone:
 101         /* wait for writes from copy_page to finish */
 102         dsb     nsh
 103         ic      iallu
 104         dsb     nsh
 105         isb
 106 
 107         /* Start new image. */
 108         mov     x0, x18
 109         mov     x1, xzr
 110         mov     x2, xzr
 111         mov     x3, xzr
 112         br      x17
 113 
 114 ENDPROC(arm64_relocate_new_kernel)
 115 
 116 .ltorg
 117 
 118 .align 3        /* To keep the 64-bit values below naturally aligned. */
 119 
 120 .Lcopy_end:
 121 .org    KEXEC_CONTROL_PAGE_SIZE
 122 
 123 /*
 124  * arm64_relocate_new_kernel_size - Number of bytes to copy to the
 125  * control_code_page.
 126  */
 127 .globl arm64_relocate_new_kernel_size
 128 arm64_relocate_new_kernel_size:
 129         .quad   .Lcopy_end - arm64_relocate_new_kernel

/* [<][>][^][v][top][bottom][index][help] */