X-Git-Url: http://v3vee.org/palacios/gitweb/gitweb.cgi?p=palacios.git;a=blobdiff_plain;f=palacios%2Fsrc%2Fpalacios%2Fvmx_lowlevel.S;h=757d835081b5333b147eae9c1f63feae0610ddba;hp=f0e793f2c00cd06d5b76a8b413760a4acd5a051d;hb=3d2d01f344c7417adba8f7b8ac6501d45392222f;hpb=52a58bb7bdf06ca22ad6883f8095f8aa5ca4b8a4 diff --git a/palacios/src/palacios/vmx_lowlevel.S b/palacios/src/palacios/vmx_lowlevel.S index f0e793f..757d835 100644 --- a/palacios/src/palacios/vmx_lowlevel.S +++ b/palacios/src/palacios/vmx_lowlevel.S @@ -21,7 +21,7 @@ movq %rdx, 40(%rax); \ movq %rcx, 48(%rax); \ pushq %rbx; \ - movq 16(%rsp), %rbx; \ + movq 8(%rsp), %rbx; \ movq %rbx, 56(%rax); \ popq %rbx; \ \ @@ -35,16 +35,14 @@ movq %r15, 120(%rax); \ popq %rax; - #define restore_registers(location) \ - push %rax; \ mov location, %rax; \ mov (%rax), %rdi; \ mov 8(%rax), %rsi; \ mov 16(%rax), %rbp; \ mov 32(%rax), %rbx; \ mov 40(%rax), %rdx; \ - mov 48(%rax), %rcx; \ + mov 48(%rax), %rcx; \ \ mov 64(%rax), %r8; \ mov 72(%rax), %r9; \ @@ -54,9 +52,28 @@ mov 104(%rax), %r13; \ mov 112(%rax), %r14; \ mov 120(%rax), %r15; \ - pop %rax; - - + pushq %rbx; \ + movq 56(%rax), %rbx; \ + movq %rbx, %rax; \ + popq %rbx; + +#define save_ctrl_regs(location) \ + pushq %rax; \ + pushq %rbx; \ + movq location, %rax; \ + movq %cr2, %rbx; \ + movq %rbx, 8(%rax); \ + popq %rbx; \ + popq %rax + +#define restore_ctrl_regs(location) \ + pushq %rax; \ + pushq %rbx; \ + movq location, %rax; \ + movq 8(%rax), %rbx; \ + movq %rbx, %cr2; \ + popq %rbx; \ + popq %rax #define PUSHA \ push %rax; \ @@ -92,89 +109,99 @@ pop %rbx; \ pop %rax; -.align 8 -.globl v3_vmx_exit_handler -v3_vmx_exit_handler: - save_registers(8(%rsp)); - addq $8, %rsp - POPA - popf - pushq %rdi - pushq %rsi - call v3_handle_vmx_exit - - andq %rax, %rax - jnz .Lvmm_failure - -v3_vmx_vmresume: - pop %rsi - pop %rdi - pushf - PUSHA - pushq %rdi + +#define PRE_LAUNCH(return_target) \ + pushf; \ + PUSHA; \ + pushq %rdi; \ + pushq %rdx; \ + \ + movq %rsp, %rax; \ + movq $VMCS_HOST_RSP, %rbx; \ + vmwrite %rax, %rbx; \ + jz .Lfail_valid; \ + jc .Lfail_invalid; \ + \ + movq return_target, %rax; \ + movq $VMCS_HOST_RIP, %rbx; \ + vmwrite %rax, %rbx; \ + jz .Lfail_valid; \ + jc .Lfail_invalid; \ + \ + restore_ctrl_regs(%rdx); \ restore_registers(%rdi); - vmresume - jz .Lfail_valid - jc .Lfail_invalid - addq $8, %rsp - jmp .Lreturn + .align 8 -.globl v3_vmx_vmlaunch -// vm_regs = %rdi -v3_vmx_vmlaunch: - cli - pushf - PUSHA - pushq %rdi - - movq %rsp, %rax - movq $VMCS_HOST_RSP, %rbx - vmwrite %rax, %rbx - jz .Lfail_valid - jc .Lfail_invalid +.globl v3_vmx_resume +// vm_regs = %rdi, guest_info * = %rsi, ctrl_regs = %rdx +v3_vmx_resume: + + PRE_LAUNCH($vmx_resume_ret); - movq $v3_vmx_exit_handler, %rax - movq $VMCS_HOST_RIP, %rbx - vmwrite %rax, %rbx + vmresume + +vmx_resume_ret: jz .Lfail_valid jc .Lfail_invalid + jmp .Lnormal_exit - restore_registers(%rdi); + +.align 8 +.globl v3_vmx_launch +// vm_regs = %rdi, guest_info * = %rsi, ctrl_regs = %rdx +v3_vmx_launch: + + PRE_LAUNCH($vmx_launch_ret); vmlaunch + +vmx_launch_ret: jz .Lfail_valid jc .Lfail_invalid - jmp .Lreturn + jmp .Lnormal_exit + + + .Lfail_valid: - addq $8, %rsp + addq $16, %rsp POPA popf movq $VMX_FAIL_VALID, %rax jmp .Lreturn .Lfail_invalid: - addq $8, %rsp + addq $16, %rsp POPA popf movq $VMX_FAIL_INVALID, %rax jmp .Lreturn .Lvmm_failure: - addq $16, %rsp + addq $24, %rsp movq $VMM_FAILURE, %rax jmp .Lreturn + +.Lnormal_exit: + save_registers(16(%rsp)); + save_ctrl_regs(16(%rsp)); + addq $16, %rsp + POPA + popf + xorq %rax, %rax + jmp .Lreturn + + .Lreturn: - sti ret #else -#define save_resgisters(location) \ +#define save_registers(location) \ pushl %eax; \ movl location, %eax; \ movl %edi, (%eax); \ @@ -202,6 +229,25 @@ v3_vmx_vmlaunch: movl 48(%eax), %ecx; \ popl %eax; + +#define save_ctrl_regs(location) \ + push %eax; \ + push %ebx; \ + movl location, %eax; \ + movl %cr2, %ebx; \ + movl %ebx, 8(%eax); \ + popl %ebx; \ + popl %eax + +#define restore_ctrl_regs(location) \ + push %eax; \ + push %ebx; \ + movl location, %eax; \ + movl 8(%eax), %ebx; \ + movl %ebx, %cr2; \ + popl %ebx; \ + popl %eax + #define PUSHA \ push %eax; \ push %ebx; \ @@ -220,82 +266,86 @@ v3_vmx_vmlaunch: pop %ebx; \ pop %eax; -.align 8 -.globl v3_vmx_exit_handler -v3_vmx_exit_handler: - save_registers(4(%esp)) - addl $8, %esp - POPA - popf - pushl %edi - call v3_handle_vmx_exit - andl %eax, %eax - jnz .Lvmm_failure +#define PRE_LAUNCH(return_target) \ + pushf; \ + PUSHA; \ + pushl %edi; \ + pushl %edx; \ + \ + movl %esp, %eax; \ + movl $VMCS_HOST_RSP, %ebx; \ + vmwrite %eax, %ebx; \ + jz .Lfail_valid; \ + jc .Lfail_invalid; \ + \ + movl return_target, %eax; \ + movl $VMCS_HOST_RIP, %ebx; \ + vmwrite %eax, %ebx; \ + jz .Lfail_valid; \ + jc .Lfail_invalid; \ + \ + restore_ctrl_regs(%edx); \ + restore_registers(%edi); + + +.align 4 +.globl v3_vmx_resume +v3_vmx_resume: -v3_vmx_vmresume: - popl %edi - pushf - PUSHA - pushl %edi - restore_registers(%rdi) + PRE_LAUNCH($vmx_resume_ret); vmresume - addl $8, %esp +vmx_resume_ret: jz .Lfail_valid jc .Lfail_invalid - jmp .Lreturn + jmp .Lnormal_exit -.align 8 -.globl v3_vmx_vmlaunch +.align 4 +.globl v3_vmx_launch // vm_regs = %edi -v3_vmx_vmlaunch: - cli - pushf - PUSHA - pushl %edi - - movl %esp, %eax - movl $VMCS_HOST_RSP, %ebx - vmwrite %eax, %ebx - jz .Lfail_valid - jc .Lfail_invalid +v3_vmx_launch: - movl $v3_vmx_exit_handler, %eax - movl $VMCS_HOST_RIP, %ebx - vmwrite %eax, %ebx - jz .Lfail_valid - jc .Lfail_invalid - - restore_registers(%edi) + PRE_LAUNCH($vmx_launch_ret); vmlaunch + +vmx_launch_ret: jz .Lfail_valid jc .Lfail_invalid - jmp .Lreturn + jmp .Lnormal_exit .Lfail_valid: addl $8, %esp POPA + popf movl $VMX_FAIL_VALID, %eax jmp .Lreturn .Lfail_invalid: - addq $8, %esp + addl $8, %esp POPA - movl $MVX_FAIL_INVALID, %eax + popf + movl $VMX_FAIL_INVALID, %eax jmp .Lreturn .Lvmm_failure: - addq $8, %esp - POPA + addl $12, %esp movl $VMM_FAILURE, %eax jmp .Lreturn -.Lreturn: - sti + +.Lnormal_exit: + save_registers(8(%esp)); + save_ctrl_regs(8(%esp)); + addl $8, %esp + POPA popf + xorl %eax, %eax + jmp .Lreturn + +.Lreturn: ret #endif