#; -*- fundamental -*-
-
.text
.align 4
#define clgi .byte 0x0F,0x01,0xDD
+#define SVM_VM_HSAVE_PA_MSR .dword 0xc0010117
+
+
#ifdef __V3_32BIT__
+// Note that RAX is saved in the VMCB, so we don't touch it here
+
#define Save_SVM_Registers(location) \
pushl %eax; \
movl location, %eax; \
movl %ebx, 32(%eax); \
movl %edx, 40(%eax); \
movl %ecx, 48(%eax); \
- pushl %ebx; \
- movl 8(%esp), %ebx; \
- movl %ebx, 56(%eax); \
- popl %ebx; \
popl %eax;
popl %eax;
+// 32 bit GCC passes arguments via stack
+
v3_svm_launch:
push %ebp;
movl %esp, %ebp;
pushf;
- push %fs;
- push %gs;
pusha;
+ movl 16(%ebp), %eax;
+ vmsave;
+
pushl 12(%ebp);
pushl 8(%ebp);
addl $4, %esp;
popa;
- pop %gs;
- pop %fs;
popf;
+
+ movl 16(%ebp), %eax;
+ vmload;
+
+ // We don't detect failures here, so just return 0
+ xorl %eax, %eax
+
pop %ebp;
ret
#elif __V3_64BIT__
+// Note that RAX is saved in the VMCB, so we don't touch it here
+
#define Save_SVM_Registers(location) \
- push %rax; \
- mov location, %rax; \
- mov %rdi, (%rax); \
- mov %rsi, 8(%rax); \
- mov %rbp, 16(%rax); \
+ pushq %rax; \
+ movq location, %rax; \
+ movq %rdi, (%rax); \
+ movq %rsi, 8(%rax); \
+ movq %rbp, 16(%rax); \
movq $0, 24(%rax); \
- mov %rbx, 32(%rax); \
- mov %rdx, 40(%rax); \
- mov %rcx, 48(%rax); \
- push %rbx; \
- mov 16(%rsp), %rbx; \
- mov %rbx, 56(%rax); \
- pop %rbx; \
- pop %rax;
+ movq %rbx, 32(%rax); \
+ movq %rdx, 40(%rax); \
+ movq %rcx, 48(%rax); \
+ \
+ movq %r8, 64(%rax); \
+ movq %r9, 72(%rax); \
+ movq %r10, 80(%rax); \
+ movq %r11, 88(%rax); \
+ movq %r12, 96(%rax); \
+ movq %r13, 104(%rax); \
+ movq %r14, 112(%rax); \
+ movq %r15, 120(%rax); \
+ popq %rax;
#define Restore_SVM_Registers(location) \
mov 32(%rax), %rbx; \
mov 40(%rax), %rdx; \
mov 48(%rax), %rcx; \
+ \
+ mov 64(%rax), %r8; \
+ mov 72(%rax), %r9; \
+ mov 80(%rax), %r10; \
+ mov 88(%rax), %r11; \
+ mov 96(%rax), %r12; \
+ mov 104(%rax), %r13; \
+ mov 112(%rax), %r14; \
+ mov 120(%rax), %r15; \
pop %rax;
#define PUSHA \
pushq %rbp; \
pushq %rbx; \
+ pushq %r8; \
+ pushq %r9; \
+ pushq %r10; \
+ pushq %r11; \
pushq %r12; \
pushq %r13; \
pushq %r14; \
popq %r14; \
popq %r13; \
popq %r12; \
+ popq %r11; \
+ popq %r10; \
+ popq %r9; \
+ popq %r8; \
popq %rbx; \
popq %rbp;
+
+// Note that this is only for 64 bit GCC, 32 bit GCC passes via stack
// VMCB => RDI
// vm_regs => RSI
+// HOST VMCB => RDX
v3_svm_launch:
pushf;
- push %fs;
- push %gs;
PUSHA
+
+ pushq %rdx;
+ movq %rdx, %rax;
+ vmsave;
-
- pushq %rsi
+ pushq %rsi
movq %rdi, %rax
Restore_SVM_Registers(%rsi);
-
-
vmload;
vmrun;
vmsave;
-
Save_SVM_Registers(8(%rsp));
addq $8, %rsp
+
+ popq %rax;
+ vmload;
+
POPA
- pop %gs;
- pop %fs;
popf;
ret