movq %rdx, 40(%rax); \
movq %rcx, 48(%rax); \
pushq %rbx; \
- movq 16(%rsp), %rbx; \
+ movq 8(%rsp), %rbx; \
movq %rbx, 56(%rax); \
popq %rbx; \
\
movq %r15, 120(%rax); \
popq %rax;
-
#define restore_registers(location) \
- push %rax; \
mov location, %rax; \
mov (%rax), %rdi; \
mov 8(%rax), %rsi; \
mov 16(%rax), %rbp; \
mov 32(%rax), %rbx; \
mov 40(%rax), %rdx; \
- mov 48(%rax), %rcx; \
+ mov 48(%rax), %rcx; \
\
mov 64(%rax), %r8; \
mov 72(%rax), %r9; \
mov 104(%rax), %r13; \
mov 112(%rax), %r14; \
mov 120(%rax), %r15; \
- pop %rax;
-
-
+ pushq %rbx; \
+ movq 56(%rax), %rbx; \
+ movq %rbx, %rax; \
+ popq %rbx;;
+
+#define save_ctrl_regs(location) \
+ pushq %rax; \
+ pushq %rbx; \
+ movq location, %rax; \
+ movq %cr2, %rbx; \
+ movq %rbx, 8(%rax); \
+ popq %rbx; \
+ popq %rax
+
+#define restore_ctrl_regs(location) \
+ pushq %rax; \
+ pushq %rbx; \
+ movq location, %rax; \
+ movq 8(%rax), %rbx; \
+ movq %rbx, %cr2; \
+ popq %rbx; \
+ popq %rax
#define PUSHA \
push %rax; \
.align 8
.globl v3_vmx_exit_handler
v3_vmx_exit_handler:
- save_registers(8(%rsp));
- addq $8, %rsp
+ // the save_* argument is a macro expansion; it has to jump past any pushes in the macro
+ // stack: vm_regs ptr, ctrl_regs_ptr
+ // save registers macro stack: vm_regs ptr, ctrl_regs ptr, pushed rax
+ // save_ctrl_regs macro stack: vm_regs ptr, ctrl_regs_ptr, pushed rax, pushed rbx
+ // Both macros jump past 2 saved values to reach their pointers, so both are 16(rsp)
+ save_registers(16(%rsp));
+ save_ctrl_regs(16(%rsp));
+ addq $16, %rsp
POPA
popf
pushq %rdi
+ pushq %rsi
+ pushq %rdx
call v3_handle_vmx_exit
andq %rax, %rax
jnz .Lvmm_failure
v3_vmx_vmresume:
+ pop %rdx
+ pop %rsi
pop %rdi
pushf
PUSHA
pushq %rdi
+ pushq %rdx
+ restore_ctrl_regs(%rdx);
restore_registers(%rdi);
vmresume
jz .Lfail_valid
jc .Lfail_invalid
- addq $8, %rsp
+ addq $16, %rsp
jmp .Lreturn
.align 8
.globl v3_vmx_vmlaunch
-// vm_regs = %rdi
+// vm_regs = %rdi, guest_info * = %rsi, ctrl_regs = %rdx
v3_vmx_vmlaunch:
- cli
pushf
PUSHA
pushq %rdi
+ pushq %rdx
movq %rsp, %rax
movq $VMCS_HOST_RSP, %rbx
jz .Lfail_valid
jc .Lfail_invalid
+ restore_ctrl_regs(%rdx);
restore_registers(%rdi);
vmlaunch
jmp .Lreturn
.Lfail_valid:
- addq $8, %rsp
+ addq $16, %rsp
POPA
popf
movq $VMX_FAIL_VALID, %rax
jmp .Lreturn
.Lfail_invalid:
- addq $8, %rsp
+ addq $16, %rsp
POPA
popf
movq $VMX_FAIL_INVALID, %rax
jmp .Lreturn
.Lvmm_failure:
- addq $8, %rsp
+ addq $24, %rsp
movq $VMM_FAILURE, %rax
jmp .Lreturn