4 #define VMX_FAIL_INVALID 1
5 #define VMX_FAIL_VALID 2
8 #define VMCS_HOST_RSP 0x00006C14
9 #define VMCS_HOST_RIP 0x00006C16
11 #if defined(__V3_64BIT__)
13 #define save_registers(location) \
15 movq location, %rax; \
18 movq %rbp, 16(%rax); \
20 movq %rbx, 32(%rax); \
21 movq %rdx, 40(%rax); \
22 movq %rcx, 48(%rax); \
24 movq 16(%rsp), %rbx; \
25 movq %rbx, 56(%rax); \
30 movq %r10, 80(%rax); \
31 movq %r11, 88(%rax); \
32 movq %r12, 96(%rax); \
33 movq %r13, 104(%rax); \
34 movq %r14, 112(%rax); \
35 movq %r15, 120(%rax); \
39 #define restore_registers(location) \
54 mov 104(%rax), %r13; \
55 mov 112(%rax), %r14; \
56 mov 120(%rax), %r15; \
96 .globl v3_vmx_exit_handler
98 save_registers(8(%rsp));
104 call v3_handle_vmx_exit
115 restore_registers(%rdi);
125 .globl v3_vmx_vmlaunch
134 movq $VMCS_HOST_RSP, %rbx
139 movq $v3_vmx_exit_handler, %rax
140 movq $VMCS_HOST_RIP, %rbx
145 restore_registers(%rdi);
156 movq $VMX_FAIL_VALID, %rax
163 movq $VMX_FAIL_INVALID, %rax
168 movq $VMM_FAILURE, %rax
177 #define save_resgisters(location) \
179 movl location, %eax; \
181 movl %esi, 8(%eax); \
182 movl %ebp, 16(%eax); \
184 movl %ebx, 32(%eax); \
185 movl %edx, 40(%eax); \
186 movl %ecx, 48(%eax); \
188 movl 8(%esp), %ebx; \
189 movl %ebx, 56(%eax); \
194 #define restore_registers(location) \
196 movl location, %eax; \
198 movl 8(%eax), %esi; \
199 movl 16(%eax), %ebp; \
200 movl 32(%eax), %ebx; \
201 movl 40(%eax), %edx; \
202 movl 48(%eax), %ecx; \
224 .globl v3_vmx_exit_handler
226 save_registers(4(%esp))
231 call v3_handle_vmx_exit
241 restore_registers(%rdi)
251 .globl v3_vmx_vmlaunch
260 movl $VMCS_HOST_RSP, %ebx
265 movl $v3_vmx_exit_handler, %eax
266 movl $VMCS_HOST_RIP, %ebx
271 restore_registers(%edi)
281 movl $VMX_FAIL_VALID, %eax
287 movl $MVX_FAIL_INVALID, %eax
293 movl $VMM_FAILURE, %eax