4 #define VMX_FAIL_INVALID 1
5 #define VMX_FAIL_VALID 2
8 #define VMCS_HOST_RSP 0x00006C14
9 #define VMCS_HOST_RIP 0x00006C16
11 #if defined(__V3_64BIT__)
13 #define save_registers(location) \
15 movq location, %rax; \
18 movq %rbp, 16(%rax); \
20 movq %rbx, 32(%rax); \
21 movq %rdx, 40(%rax); \
22 movq %rcx, 48(%rax); \
25 movq %rbx, 56(%rax); \
30 movq %r10, 80(%rax); \
31 movq %r11, 88(%rax); \
32 movq %r12, 96(%rax); \
33 movq %r13, 104(%rax); \
34 movq %r14, 112(%rax); \
35 movq %r15, 120(%rax); \
38 #define restore_registers(location) \
52 mov 104(%rax), %r13; \
53 mov 112(%rax), %r14; \
54 mov 120(%rax), %r15; \
56 movq 56(%rax), %rbx; \
60 #define save_ctrl_regs(location) \
63 movq location, %rax; \
69 #define restore_ctrl_regs(location) \
72 movq location, %rax; \
113 .globl v3_vmx_exit_handler
115 // the save_* argument is a macro expansion; it has to jump past any pushes in the macro
116 // stack: vm_regs ptr, ctrl_regs_ptr
117 // save registers macro stack: vm_regs ptr, ctrl_regs ptr, pushed rax
118 // save_ctrl_regs macro stack: vm_regs ptr, ctrl_regs_ptr, pushed rax, pushed rbx
119 // Both macros jump past 2 saved values to reach their pointers, so both are 16(rsp)
120 save_registers(16(%rsp));
121 save_ctrl_regs(16(%rsp));
128 call v3_handle_vmx_exit
141 restore_ctrl_regs(%rdx);
142 restore_registers(%rdi);
152 .globl v3_vmx_vmlaunch
153 // vm_regs = %rdi, guest_info * = %rsi, ctrl_regs = %rdx
161 movq $VMCS_HOST_RSP, %rbx
166 movq $v3_vmx_exit_handler, %rax
167 movq $VMCS_HOST_RIP, %rbx
172 restore_ctrl_regs(%rdx);
173 restore_registers(%rdi);
184 movq $VMX_FAIL_VALID, %rax
191 movq $VMX_FAIL_INVALID, %rax
196 movq $VMM_FAILURE, %rax
205 #define save_resgisters(location) \
207 movl location, %eax; \
209 movl %esi, 8(%eax); \
210 movl %ebp, 16(%eax); \
212 movl %ebx, 32(%eax); \
213 movl %edx, 40(%eax); \
214 movl %ecx, 48(%eax); \
216 movl 8(%esp), %ebx; \
217 movl %ebx, 56(%eax); \
222 #define restore_registers(location) \
224 movl location, %eax; \
226 movl 8(%eax), %esi; \
227 movl 16(%eax), %ebp; \
228 movl 32(%eax), %ebx; \
229 movl 40(%eax), %edx; \
230 movl 48(%eax), %ecx; \
252 .globl v3_vmx_exit_handler
254 save_registers(4(%esp))
259 call v3_handle_vmx_exit
269 restore_registers(%rdi)
279 .globl v3_vmx_vmlaunch
288 movl $VMCS_HOST_RSP, %ebx
293 movl $v3_vmx_exit_handler, %eax
294 movl $VMCS_HOST_RIP, %ebx
299 restore_registers(%edi)
309 movl $VMX_FAIL_VALID, %eax
315 movl $MVX_FAIL_INVALID, %eax
321 movl $VMM_FAILURE, %eax