2 * Copyright (c) 2018-2021 Maxime Villard, m00nbsd.net
5 * This code is part of the NVMM hypervisor.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
16 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
21 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
23 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
25 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
29 #if defined(__NetBSD__)
30 /* Override user-land alignment before including asm.h */
31 #define ALIGN_DATA .align 8
32 #define ALIGN_TEXT .align 16,0x90
33 #define _ALIGN_TEXT ALIGN_TEXT
36 #include <machine/asm.h>
37 #elif defined(__DragonFly__)
39 #include <machine/asmacros.h>
49 #define VMCS_HOST_RSP 0x00006C14
51 #define HOST_SAVE_GPRS \
59 #define HOST_RESTORE_GPRS \
67 #define HOST_SAVE_RAX \
70 #define HOST_RESTORE_RAX \
73 #define HOST_SAVE_LDT \
77 #define HOST_RESTORE_LDT \
82 * We don't save RAX (done manually), but we do restore it.
85 #define GUEST_SAVE_GPRS(reg) \
86 movq %rcx,(NVMM_X64_GPR_RCX * 8)(reg) ;\
87 movq %rdx,(NVMM_X64_GPR_RDX * 8)(reg) ;\
88 movq %rbx,(NVMM_X64_GPR_RBX * 8)(reg) ;\
89 movq %rbp,(NVMM_X64_GPR_RBP * 8)(reg) ;\
90 movq %rsi,(NVMM_X64_GPR_RSI * 8)(reg) ;\
91 movq %rdi,(NVMM_X64_GPR_RDI * 8)(reg) ;\
92 movq %r8,(NVMM_X64_GPR_R8 * 8)(reg) ;\
93 movq %r9,(NVMM_X64_GPR_R9 * 8)(reg) ;\
94 movq %r10,(NVMM_X64_GPR_R10 * 8)(reg) ;\
95 movq %r11,(NVMM_X64_GPR_R11 * 8)(reg) ;\
96 movq %r12,(NVMM_X64_GPR_R12 * 8)(reg) ;\
97 movq %r13,(NVMM_X64_GPR_R13 * 8)(reg) ;\
98 movq %r14,(NVMM_X64_GPR_R14 * 8)(reg) ;\
99 movq %r15,(NVMM_X64_GPR_R15 * 8)(reg)
101 #define GUEST_RESTORE_GPRS(reg) \
102 movq (NVMM_X64_GPR_RCX * 8)(reg),%rcx ;\
103 movq (NVMM_X64_GPR_RDX * 8)(reg),%rdx ;\
104 movq (NVMM_X64_GPR_RBX * 8)(reg),%rbx ;\
105 movq (NVMM_X64_GPR_RBP * 8)(reg),%rbp ;\
106 movq (NVMM_X64_GPR_RSI * 8)(reg),%rsi ;\
107 movq (NVMM_X64_GPR_RDI * 8)(reg),%rdi ;\
108 movq (NVMM_X64_GPR_R8 * 8)(reg),%r8 ;\
109 movq (NVMM_X64_GPR_R9 * 8)(reg),%r9 ;\
110 movq (NVMM_X64_GPR_R10 * 8)(reg),%r10 ;\
111 movq (NVMM_X64_GPR_R11 * 8)(reg),%r11 ;\
112 movq (NVMM_X64_GPR_R12 * 8)(reg),%r12 ;\
113 movq (NVMM_X64_GPR_R13 * 8)(reg),%r13 ;\
114 movq (NVMM_X64_GPR_R14 * 8)(reg),%r14 ;\
115 movq (NVMM_X64_GPR_R15 * 8)(reg),%r15 ;\
116 movq (NVMM_X64_GPR_RAX * 8)(reg),%rax
119 * %rdi = VA of guest GPR state
122 /* Save the Host GPRs. */
125 /* Save the Host LDT. */
128 /* Save the Host RAX. */
132 /* Save the Host RSP. */
133 movq $VMCS_HOST_RSP,%rdi
137 /* Restore the Guest GPRs. */
138 GUEST_RESTORE_GPRS(%rax)
152 * %rdi = VA of guest GPR state
155 /* Save the Host GPRs. */
158 /* Save the Host LDT. */
161 /* Save the Host RAX. */
165 /* Save the Host RSP. */
166 movq $VMCS_HOST_RSP,%rdi
170 /* Restore the Guest GPRs. */
171 GUEST_RESTORE_GPRS(%rax)
185 * The CPU jumps here after a #VMEXIT.
187 ENTRY(vmx_resume_rip)
188 /* Save the Guest GPRs. RAX done manually. */
191 GUEST_SAVE_GPRS(%rax)
193 movq %rbx,(NVMM_X64_GPR_RAX * 8)(%rax)
196 /* Restore the Host LDT. */
199 /* Restore the Host GPRs. */
206 ENTRY(vmx_insn_failvalid)
207 movq $.Lvmx_validstr,%rdi
209 END(vmx_insn_failvalid)
211 ENTRY(vmx_insn_failinvalid)
212 movq $.Lvmx_invalidstr,%rdi
214 END(vmx_insn_failinvalid)
219 .string "VMX fail valid\0"
221 .string "VMX fail invalid\0"