David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame^] | 1 | /* SPDX-License-Identifier: GPL-2.0-only */ |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 2 | /* |
| 3 | * Copyright (C) 2012 - Virtual Open Systems and Columbia University |
| 4 | * Author: Christoffer Dall <c.dall@virtualopensystems.com> |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 5 | */ |
| 6 | |
| 7 | #include <linux/linkage.h> |
| 8 | #include <asm/assembler.h> |
| 9 | #include <asm/unified.h> |
| 10 | #include <asm/asm-offsets.h> |
| 11 | #include <asm/kvm_asm.h> |
| 12 | #include <asm/kvm_arm.h> |
| 13 | #include <asm/kvm_mmu.h> |
| 14 | #include <asm/virt.h> |
| 15 | |
| 16 | /******************************************************************** |
| 17 | * Hypervisor initialization |
| 18 | * - should be called with: |
| 19 | * r0 = top of Hyp stack (kernel VA) |
| 20 | * r1 = pointer to hyp vectors |
| 21 | * r2,r3 = Hypervisor pgd pointer |
| 22 | * |
| 23 | * The init scenario is: |
| 24 | * - We jump in HYP with 3 parameters: runtime HYP pgd, runtime stack, |
| 25 | * runtime vectors |
| 26 | * - Invalidate TLBs |
| 27 | * - Set stack and vectors |
| 28 | * - Setup the page tables |
| 29 | * - Enable the MMU |
| 30 | * - Profit! (or eret, if you only care about the code). |
| 31 | * |
| 32 | * Another possibility is to get a HYP stub hypercall. |
| 33 | * We discriminate between the two by checking if r0 contains a value |
| 34 | * that is less than HVC_STUB_HCALL_NR. |
| 35 | */ |
| 36 | |
| 37 | .text |
| 38 | .pushsection .hyp.idmap.text,"ax" |
| 39 | .align 5 |
| 40 | __kvm_hyp_init: |
| 41 | .globl __kvm_hyp_init |
| 42 | |
| 43 | @ Hyp-mode exception vector |
| 44 | W(b) . |
| 45 | W(b) . |
| 46 | W(b) . |
| 47 | W(b) . |
| 48 | W(b) . |
| 49 | W(b) __do_hyp_init |
| 50 | W(b) . |
| 51 | W(b) . |
| 52 | |
| 53 | __do_hyp_init: |
| 54 | @ Check for a stub hypercall |
| 55 | cmp r0, #HVC_STUB_HCALL_NR |
| 56 | blo __kvm_handle_stub_hvc |
| 57 | |
| 58 | @ Set stack pointer |
| 59 | mov sp, r0 |
| 60 | |
| 61 | @ Set HVBAR to point to the HYP vectors |
| 62 | mcr p15, 4, r1, c12, c0, 0 @ HVBAR |
| 63 | |
| 64 | @ Set the HTTBR to point to the hypervisor PGD pointer passed |
| 65 | mcrr p15, 4, rr_lo_hi(r2, r3), c2 |
| 66 | |
| 67 | @ Set the HTCR and VTCR to the same shareability and cacheability |
| 68 | @ settings as the non-secure TTBCR and with T0SZ == 0. |
| 69 | mrc p15, 4, r0, c2, c0, 2 @ HTCR |
| 70 | ldr r2, =HTCR_MASK |
| 71 | bic r0, r0, r2 |
| 72 | mrc p15, 0, r1, c2, c0, 2 @ TTBCR |
| 73 | and r1, r1, #(HTCR_MASK & ~TTBCR_T0SZ) |
| 74 | orr r0, r0, r1 |
| 75 | mcr p15, 4, r0, c2, c0, 2 @ HTCR |
| 76 | |
| 77 | @ Use the same memory attributes for hyp. accesses as the kernel |
| 78 | @ (copy MAIRx ro HMAIRx). |
| 79 | mrc p15, 0, r0, c10, c2, 0 |
| 80 | mcr p15, 4, r0, c10, c2, 0 |
| 81 | mrc p15, 0, r0, c10, c2, 1 |
| 82 | mcr p15, 4, r0, c10, c2, 1 |
| 83 | |
| 84 | @ Invalidate the stale TLBs from Bootloader |
| 85 | mcr p15, 4, r0, c8, c7, 0 @ TLBIALLH |
| 86 | dsb ish |
| 87 | |
| 88 | @ Set the HSCTLR to: |
| 89 | @ - ARM/THUMB exceptions: Kernel config (Thumb-2 kernel) |
| 90 | @ - Endianness: Kernel config |
| 91 | @ - Fast Interrupt Features: Kernel config |
| 92 | @ - Write permission implies XN: disabled |
| 93 | @ - Instruction cache: enabled |
| 94 | @ - Data/Unified cache: enabled |
| 95 | @ - MMU: enabled (this code must be run from an identity mapping) |
| 96 | mrc p15, 4, r0, c1, c0, 0 @ HSCR |
| 97 | ldr r2, =HSCTLR_MASK |
| 98 | bic r0, r0, r2 |
| 99 | mrc p15, 0, r1, c1, c0, 0 @ SCTLR |
| 100 | ldr r2, =(HSCTLR_EE | HSCTLR_FI | HSCTLR_I | HSCTLR_C) |
| 101 | and r1, r1, r2 |
| 102 | ARM( ldr r2, =(HSCTLR_M) ) |
| 103 | THUMB( ldr r2, =(HSCTLR_M | HSCTLR_TE) ) |
| 104 | orr r1, r1, r2 |
| 105 | orr r0, r0, r1 |
| 106 | mcr p15, 4, r0, c1, c0, 0 @ HSCR |
| 107 | isb |
| 108 | |
| 109 | eret |
| 110 | |
| 111 | ENTRY(__kvm_handle_stub_hvc) |
| 112 | cmp r0, #HVC_SOFT_RESTART |
| 113 | bne 1f |
| 114 | |
| 115 | /* The target is expected in r1 */ |
| 116 | msr ELR_hyp, r1 |
| 117 | mrs r0, cpsr |
| 118 | bic r0, r0, #MODE_MASK |
| 119 | orr r0, r0, #HYP_MODE |
| 120 | THUMB( orr r0, r0, #PSR_T_BIT ) |
| 121 | msr spsr_cxsf, r0 |
| 122 | b reset |
| 123 | |
| 124 | 1: cmp r0, #HVC_RESET_VECTORS |
| 125 | bne 1f |
| 126 | |
| 127 | reset: |
| 128 | /* We're now in idmap, disable MMU */ |
| 129 | mrc p15, 4, r1, c1, c0, 0 @ HSCTLR |
| 130 | ldr r0, =(HSCTLR_M | HSCTLR_A | HSCTLR_C | HSCTLR_I) |
| 131 | bic r1, r1, r0 |
| 132 | mcr p15, 4, r1, c1, c0, 0 @ HSCTLR |
| 133 | |
| 134 | /* |
| 135 | * Install stub vectors, using ardb's VA->PA trick. |
| 136 | */ |
| 137 | 0: adr r0, 0b @ PA(0) |
| 138 | movw r1, #:lower16:__hyp_stub_vectors - 0b @ VA(stub) - VA(0) |
| 139 | movt r1, #:upper16:__hyp_stub_vectors - 0b |
| 140 | add r1, r1, r0 @ PA(stub) |
| 141 | mcr p15, 4, r1, c12, c0, 0 @ HVBAR |
| 142 | b exit |
| 143 | |
| 144 | 1: ldr r0, =HVC_STUB_ERR |
| 145 | eret |
| 146 | |
| 147 | exit: |
| 148 | mov r0, #0 |
| 149 | eret |
| 150 | ENDPROC(__kvm_handle_stub_hvc) |
| 151 | |
| 152 | .ltorg |
| 153 | |
| 154 | .globl __kvm_hyp_init_end |
| 155 | __kvm_hyp_init_end: |
| 156 | |
| 157 | .popsection |