root/arch/arm64/kvm/hyp-init.S

/* [<][>][^][v][top][bottom][index][help] */
   1 /* SPDX-License-Identifier: GPL-2.0-only */
   2 /*
   3  * Copyright (C) 2012,2013 - ARM Ltd
   4  * Author: Marc Zyngier <marc.zyngier@arm.com>
   5  */
   6 
   7 #include <linux/linkage.h>
   8 
   9 #include <asm/assembler.h>
  10 #include <asm/kvm_arm.h>
  11 #include <asm/kvm_mmu.h>
  12 #include <asm/pgtable-hwdef.h>
  13 #include <asm/sysreg.h>
  14 #include <asm/virt.h>
  15 
  16         .text
  17         .pushsection    .hyp.idmap.text, "ax"
  18 
  19         .align  11
  20 
  21 ENTRY(__kvm_hyp_init)
  22         ventry  __invalid               // Synchronous EL2t
  23         ventry  __invalid               // IRQ EL2t
  24         ventry  __invalid               // FIQ EL2t
  25         ventry  __invalid               // Error EL2t
  26 
  27         ventry  __invalid               // Synchronous EL2h
  28         ventry  __invalid               // IRQ EL2h
  29         ventry  __invalid               // FIQ EL2h
  30         ventry  __invalid               // Error EL2h
  31 
  32         ventry  __do_hyp_init           // Synchronous 64-bit EL1
  33         ventry  __invalid               // IRQ 64-bit EL1
  34         ventry  __invalid               // FIQ 64-bit EL1
  35         ventry  __invalid               // Error 64-bit EL1
  36 
  37         ventry  __invalid               // Synchronous 32-bit EL1
  38         ventry  __invalid               // IRQ 32-bit EL1
  39         ventry  __invalid               // FIQ 32-bit EL1
  40         ventry  __invalid               // Error 32-bit EL1
  41 
  42 __invalid:
  43         b       .
  44 
  45         /*
  46          * x0: HYP pgd
  47          * x1: HYP stack
  48          * x2: HYP vectors
  49          * x3: per-CPU offset
  50          */
  51 __do_hyp_init:
  52         /* Check for a stub HVC call */
  53         cmp     x0, #HVC_STUB_HCALL_NR
  54         b.lo    __kvm_handle_stub_hvc
  55 
  56         phys_to_ttbr x4, x0
  57 alternative_if ARM64_HAS_CNP
  58         orr     x4, x4, #TTBR_CNP_BIT
  59 alternative_else_nop_endif
  60         msr     ttbr0_el2, x4
  61 
  62         mrs     x4, tcr_el1
  63         ldr     x5, =TCR_EL2_MASK
  64         and     x4, x4, x5
  65         mov     x5, #TCR_EL2_RES1
  66         orr     x4, x4, x5
  67 
  68         /*
  69          * The ID map may be configured to use an extended virtual address
  70          * range. This is only the case if system RAM is out of range for the
  71          * currently configured page size and VA_BITS, in which case we will
  72          * also need the extended virtual range for the HYP ID map, or we won't
  73          * be able to enable the EL2 MMU.
  74          *
  75          * However, at EL2, there is only one TTBR register, and we can't switch
  76          * between translation tables *and* update TCR_EL2.T0SZ at the same
  77          * time. Bottom line: we need to use the extended range with *both* our
  78          * translation tables.
  79          *
  80          * So use the same T0SZ value we use for the ID map.
  81          */
  82         ldr_l   x5, idmap_t0sz
  83         bfi     x4, x5, TCR_T0SZ_OFFSET, TCR_TxSZ_WIDTH
  84 
  85         /*
  86          * Set the PS bits in TCR_EL2.
  87          */
  88         tcr_compute_pa_size x4, #TCR_EL2_PS_SHIFT, x5, x6
  89 
  90         msr     tcr_el2, x4
  91 
  92         mrs     x4, mair_el1
  93         msr     mair_el2, x4
  94         isb
  95 
  96         /* Invalidate the stale TLBs from Bootloader */
  97         tlbi    alle2
  98         dsb     sy
  99 
 100         /*
 101          * Preserve all the RES1 bits while setting the default flags,
 102          * as well as the EE bit on BE. Drop the A flag since the compiler
 103          * is allowed to generate unaligned accesses.
 104          */
 105         ldr     x4, =(SCTLR_EL2_RES1 | (SCTLR_ELx_FLAGS & ~SCTLR_ELx_A))
 106 CPU_BE( orr     x4, x4, #SCTLR_ELx_EE)
 107         msr     sctlr_el2, x4
 108         isb
 109 
 110         /* Set the stack and new vectors */
 111         kern_hyp_va     x1
 112         mov     sp, x1
 113         msr     vbar_el2, x2
 114 
 115         /* Set tpidr_el2 for use by HYP */
 116         msr     tpidr_el2, x3
 117 
 118         /* Hello, World! */
 119         eret
 120 ENDPROC(__kvm_hyp_init)
 121 
 122 ENTRY(__kvm_handle_stub_hvc)
 123         cmp     x0, #HVC_SOFT_RESTART
 124         b.ne    1f
 125 
 126         /* This is where we're about to jump, staying at EL2 */
 127         msr     elr_el2, x1
 128         mov     x0, #(PSR_F_BIT | PSR_I_BIT | PSR_A_BIT | PSR_D_BIT | PSR_MODE_EL2h)
 129         msr     spsr_el2, x0
 130 
 131         /* Shuffle the arguments, and don't come back */
 132         mov     x0, x2
 133         mov     x1, x3
 134         mov     x2, x4
 135         b       reset
 136 
 137 1:      cmp     x0, #HVC_RESET_VECTORS
 138         b.ne    1f
 139 reset:
 140         /*
 141          * Reset kvm back to the hyp stub. Do not clobber x0-x4 in
 142          * case we coming via HVC_SOFT_RESTART.
 143          */
 144         mrs     x5, sctlr_el2
 145         ldr     x6, =SCTLR_ELx_FLAGS
 146         bic     x5, x5, x6              // Clear SCTL_M and etc
 147         pre_disable_mmu_workaround
 148         msr     sctlr_el2, x5
 149         isb
 150 
 151         /* Install stub vectors */
 152         adr_l   x5, __hyp_stub_vectors
 153         msr     vbar_el2, x5
 154         mov     x0, xzr
 155         eret
 156 
 157 1:      /* Bad stub call */
 158         ldr     x0, =HVC_STUB_ERR
 159         eret
 160 
 161 ENDPROC(__kvm_handle_stub_hvc)
 162 
 163         .ltorg
 164 
 165         .popsection

/* [<][>][^][v][top][bottom][index][help] */