4 #define VMX_FAIL_INVALID 1
5 #define VMX_FAIL_VALID 2
8 #define VMCS_HOST_RSP 0x00006C14
9 #define VMCS_HOST_RIP 0x00006C16
11 #if defined(__V3_64BIT__)
13 #define save_registers(location) \
15 movq location, %rax; \
18 movq %rbp, 16(%rax); \
20 movq %rbx, 32(%rax); \
21 movq %rdx, 40(%rax); \
22 movq %rcx, 48(%rax); \
25 movq %rbx, 56(%rax); \
30 movq %r10, 80(%rax); \
31 movq %r11, 88(%rax); \
32 movq %r12, 96(%rax); \
33 movq %r13, 104(%rax); \
34 movq %r14, 112(%rax); \
35 movq %r15, 120(%rax); \
39 #define restore_registers(location) \
53 mov 104(%rax), %r13; \
54 mov 112(%rax), %r14; \
55 mov 120(%rax), %r15; \
57 movq 56(%rax), %rbx; \
99 .globl v3_vmx_exit_handler
101 save_registers(8(%rsp));
107 call v3_handle_vmx_exit
118 restore_registers(%rdi);
128 .globl v3_vmx_vmlaunch
137 movq $VMCS_HOST_RSP, %rbx
142 movq $v3_vmx_exit_handler, %rax
143 movq $VMCS_HOST_RIP, %rbx
148 restore_registers(%rdi);
159 movq $VMX_FAIL_VALID, %rax
166 movq $VMX_FAIL_INVALID, %rax
171 movq $VMM_FAILURE, %rax
180 #define save_resgisters(location) \
182 movl location, %eax; \
184 movl %esi, 8(%eax); \
185 movl %ebp, 16(%eax); \
187 movl %ebx, 32(%eax); \
188 movl %edx, 40(%eax); \
189 movl %ecx, 48(%eax); \
191 movl 8(%esp), %ebx; \
192 movl %ebx, 56(%eax); \
197 #define restore_registers(location) \
199 movl location, %eax; \
201 movl 8(%eax), %esi; \
202 movl 16(%eax), %ebp; \
203 movl 32(%eax), %ebx; \
204 movl 40(%eax), %edx; \
205 movl 48(%eax), %ecx; \
227 .globl v3_vmx_exit_handler
229 save_registers(4(%esp))
234 call v3_handle_vmx_exit
244 restore_registers(%rdi)
254 .globl v3_vmx_vmlaunch
263 movl $VMCS_HOST_RSP, %ebx
268 movl $v3_vmx_exit_handler, %eax
269 movl $VMCS_HOST_RIP, %ebx
274 restore_registers(%edi)
284 movl $VMX_FAIL_VALID, %eax
290 movl $MVX_FAIL_INVALID, %eax
296 movl $VMM_FAILURE, %eax