movl %edx, 40(%eax); \
movl %ecx, 48(%eax); \
pushl %ebx; \
- movl 4(%esp), %ebx; \
+ movl 8(%esp), %ebx; \
movl %ebx, 56(%eax); \
popl %ebx; \
popl %eax;
popl %eax;
-
-#elif __V3_64BIT__
-
-#endif
-
-
-v3_stgi:
- stgi;
- ret;
-
-v3_clgi:
- clgi;
- ret;
-
-
v3_svm_launch:
push %ebp;
movl %esp, %ebp;
popf;
pop %ebp;
ret
+
+
+
+#elif __V3_64BIT__
+
+#define Save_SVM_Registers(location) \
+ push %rax; \
+ mov location, %rax; \
+ mov %rdi, (%rax); \
+ mov %rsi, 8(%rax); \
+ mov %rbp, 16(%rax); \
+ movq $0, 24(%rax); \
+ mov %rbx, 32(%rax); \
+ mov %rdx, 40(%rax); \
+ mov %rcx, 48(%rax); \
+ push %rbx; \
+ mov 16(%rsp), %rbx; \
+ mov %rbx, 56(%rax); \
+ pop %rbx; \
+ pop %rax;
+
+
+#define Restore_SVM_Registers(location) \
+ push %rax; \
+ mov location, %rax; \
+ mov (%rax), %rdi; \
+ mov 8(%rax), %rsi; \
+ mov 16(%rax), %rbp; \
+ mov 32(%rax), %rbx; \
+ mov 40(%rax), %rdx; \
+ mov 48(%rax), %rcx; \
+ pop %rax;
+
+
+
+
+#define PUSHA \
+ pushq %rbp; \
+ pushq %rbx; \
+ pushq %r12; \
+ pushq %r13; \
+ pushq %r14; \
+ pushq %r15;
+
+
+#define POPA \
+ popq %r15; \
+ popq %r14; \
+ popq %r13; \
+ popq %r12; \
+ popq %rbx; \
+ popq %rbp;
+
+// VMCB => RDI
+// vm_regs => RSI
+
+v3_svm_launch:
+ pushf;
+ push %fs;
+ push %gs;
+ PUSHA
+
+
+
+
+
+ pushq %rsi
+
+ movq %rdi, %rax
+ Restore_SVM_Registers(%rsi);
+
+
+
+
+ vmload;
+ vmrun;
+ vmsave;
+
+
+ Save_SVM_Registers(8(%rsp));
+
+ addq $8, %rsp
+
+
+
+
+
+ POPA
+ pop %gs;
+ pop %fs;
+ popf;
+ ret
+
+
+#endif
+
+
+v3_stgi:
+ stgi;
+ ret;
+
+v3_clgi:
+ clgi;
+ ret;
+
+