4 #define VMX_FAIL_INVALID 1
5 #define VMX_FAIL_VALID 2
8 #define VMCS_HOST_RSP 0x00006C14
9 #define VMCS_HOST_RIP 0x00006C16
11 #if defined(__V3_64BIT__)
13 #define save_registers(location) \
15 movq location, %rax; \
18 movq %rbp, 16(%rax); \
20 movq %rbx, 32(%rax); \
21 movq %rdx, 40(%rax); \
22 movq %rcx, 48(%rax); \
24 movq 16(%rsp), %rbx; \
25 movq %rbx, 56(%rax); \
30 movq %r10, 80(%rax); \
31 movq %r11, 88(%rax); \
32 movq %r12, 96(%rax); \
33 movq %r13, 104(%rax); \
34 movq %r14, 112(%rax); \
35 movq %r15, 120(%rax); \
39 #define restore_registers(location) \
54 mov 104(%rax), %r13; \
55 mov 112(%rax), %r14; \
56 mov 120(%rax), %r15; \
96 .globl v3_vmx_exit_handler
98 save_registers(8(%rsp));
103 call v3_handle_vmx_exit
113 restore_registers(%rdi);
123 .globl v3_vmx_vmlaunch
132 movq $VMCS_HOST_RSP, %rbx
137 movq $v3_vmx_exit_handler, %rax
138 movq $VMCS_HOST_RIP, %rbx
143 restore_registers(%rdi);
154 movq $VMX_FAIL_VALID, %rax
161 movq $VMX_FAIL_INVALID, %rax
166 movq $VMM_FAILURE, %rax
175 #define save_resgisters(location) \
177 movl location, %eax; \
179 movl %esi, 8(%eax); \
180 movl %ebp, 16(%eax); \
182 movl %ebx, 32(%eax); \
183 movl %edx, 40(%eax); \
184 movl %ecx, 48(%eax); \
186 movl 8(%esp), %ebx; \
187 movl %ebx, 56(%eax); \
192 #define restore_registers(location) \
194 movl location, %eax; \
196 movl 8(%eax), %esi; \
197 movl 16(%eax), %ebp; \
198 movl 32(%eax), %ebx; \
199 movl 40(%eax), %edx; \
200 movl 48(%eax), %ecx; \
222 .globl v3_vmx_exit_handler
224 save_registers(4(%esp))
229 call v3_handle_vmx_exit
239 restore_registers(%rdi)
249 .globl v3_vmx_vmlaunch
258 movl $VMCS_HOST_RSP, %ebx
263 movl $v3_vmx_exit_handler, %eax
264 movl $VMCS_HOST_RIP, %ebx
269 restore_registers(%edi)
279 movl $VMX_FAIL_VALID, %eax
285 movl $MVX_FAIL_INVALID, %eax
291 movl $VMM_FAILURE, %eax