Palacios Public Git Repository

To checkout Palacios execute

  git clone http://v3vee.org/palacios/palacios.web/palacios.git
This will give you the master branch. You probably want the devel branch or one of the release branches. To switch to the devel branch, simply execute
  cd palacios
  git checkout --track -b devel origin/devel
The other branches are similar.


major VMX update
[palacios.git] / palacios / src / palacios / vmx_lowlevel.S
index 3591463..aaae943 100644 (file)
@@ -1,13 +1,79 @@
+#;  -*- fundamental -*-
 
 #define VMX_SUCCESS         0
 #define VMX_FAIL_INVALID    1
 #define VMX_FAIL_VALID      2
+#define VMM_FAILURE         3
 
 #define VMCS_HOST_RSP       0x00006C14
+#define VMCS_HOST_RIP       0x00006C16
 
 #if defined(__V3_64BIT__)
 
-#define r(reg) %r##reg
+#define save_registers(location)       \
+       pushq   %rax;                   \
+       movq    location, %rax;         \
+       movq    %rdi, (%rax);           \
+       movq    %rsi, 8(%rax);          \
+       movq    %rbp, 16(%rax);         \
+       movq    $0, 24(%rax);           \
+       movq    %rbx, 32(%rax);         \
+       movq    %rdx, 40(%rax);         \
+       movq    %rcx, 48(%rax);         \
+       pushq   %rbx;                   \
+       movq    8(%rsp), %rbx;          \
+       movq    %rbx, 56(%rax);         \
+       popq    %rbx;                   \
+                                       \
+       movq    %r8, 64(%rax);          \
+       movq    %r9, 72(%rax);          \
+       movq    %r10, 80(%rax);         \
+       movq    %r11, 88(%rax);         \
+       movq    %r12, 96(%rax);         \
+       movq    %r13, 104(%rax);        \
+       movq    %r14, 112(%rax);        \
+       movq    %r15, 120(%rax);        \
+       popq    %rax;                   
+       
+#define restore_registers(location) \
+       mov     location, %rax;         \
+       mov     (%rax), %rdi;           \
+       mov     8(%rax), %rsi;          \
+       mov     16(%rax), %rbp;         \
+       mov     32(%rax), %rbx;         \
+       mov     40(%rax), %rdx;         \
+       mov 48(%rax), %rcx;             \
+                                       \
+       mov     64(%rax), %r8;          \
+       mov     72(%rax), %r9;          \
+       mov     80(%rax), %r10;         \
+       mov     88(%rax), %r11;         \
+       mov     96(%rax), %r12;         \
+       mov     104(%rax), %r13;        \
+       mov     112(%rax), %r14;        \
+       mov     120(%rax), %r15;        \
+       pushq %rbx;                     \
+       movq 56(%rax), %rbx;            \
+       movq %rbx, %rax;                \
+       popq %rbx;
+
+#define save_ctrl_regs(location)  \
+    pushq %rax;              \
+    pushq %rbx;              \
+    movq location, %rax;     \
+    movq %cr2, %rbx;         \
+    movq %rbx, 8(%rax);      \
+    popq %rbx;               \
+    popq %rax
+
+#define restore_ctrl_regs(location)  \
+    pushq %rax;              \
+    pushq %rbx;              \
+    movq location, %rax;     \
+    movq 8(%rax), %rbx;      \
+    movq %rbx, %cr2;         \
+    popq %rbx;               \
+    popq %rax
 
 #define PUSHA    \
     push %rax;   \
     pop %rcx;    \
     pop %rbx;    \
     pop %rax;    
+
+
+#define PRE_LAUNCH(return_target)      \
+    pushf;                             \
+    PUSHA;                             \
+    pushq %rdi;                                \
+    pushq %rdx;                                \
+                                       \
+    movq %rsp, %rax;                   \
+    movq $VMCS_HOST_RSP, %rbx;         \
+    vmwrite %rax, %rbx;                        \
+    jz .Lfail_valid;                   \
+    jc .Lfail_invalid;                 \
+                                       \
+    movq return_target, %rax;          \
+    movq $VMCS_HOST_RIP, %rbx;         \
+    vmwrite %rax, %rbx;                        \
+    jz .Lfail_valid;                   \
+    jc .Lfail_invalid;                 \
+                                       \
+    restore_ctrl_regs(%rdx);           \
+    restore_registers(%rdi);
+
+
+
+
+.align 8
+.globl v3_vmx_resume
+// vm_regs = %rdi, guest_info * = %rsi, ctrl_regs = %rdx
+v3_vmx_resume:
+
+    PRE_LAUNCH($vmx_resume_ret);
+
+    vmresume
+
+vmx_resume_ret:
+    jz .Lfail_valid
+    jc .Lfail_invalid
+    jmp .Lnormal_exit
+
+
+.align 8
+.globl v3_vmx_launch
+// vm_regs = %rdi, guest_info * = %rsi, ctrl_regs = %rdx
+v3_vmx_launch:
+
+    PRE_LAUNCH($vmx_launch_ret);
+
+    vmlaunch
+
+vmx_launch_ret:
+    jz .Lfail_valid
+    jc .Lfail_invalid
+    jmp .Lnormal_exit
+
+
+
+
+.Lfail_valid:
+    addq $16, %rsp
+    POPA
+    popf
+    movq $VMX_FAIL_VALID, %rax
+    jmp .Lreturn
+
+.Lfail_invalid:
+    addq $16, %rsp
+    POPA
+    popf
+    movq $VMX_FAIL_INVALID, %rax
+    jmp .Lreturn
+
+.Lvmm_failure:
+    addq $24, %rsp
+    movq $VMM_FAILURE, %rax
+    jmp .Lreturn
+
+
+.Lnormal_exit:
+    save_registers(16(%rsp));
+    save_ctrl_regs(16(%rsp));
+    addq $16, %rsp
+    POPA
+    popf
+    xorq %rax, %rax
+    jmp .Lreturn
+
+
+.Lreturn:
+    ret
+    
 #else
 
-#define r(reg) %e##reg
-  
+#define save_resgisters(location)      \
+       pushl   %eax;                   \
+       movl    location, %eax;         \
+       movl    %edi, (%eax);           \
+       movl    %esi, 8(%eax);          \
+       movl    %ebp, 16(%eax);         \
+       movl    $0, 24(%eax);           \
+       movl    %ebx, 32(%eax);         \
+       movl    %edx, 40(%eax);         \
+       movl    %ecx, 48(%eax);         \
+       pushl   %ebx;                   \
+       movl    8(%esp), %ebx;          \
+       movl    %ebx, 56(%eax);         \
+       popl    %ebx;                   \
+       popl    %eax;                   
+       
+
+#define restore_registers(location) \
+       pushl   %eax;                   \
+       movl    location, %eax;         \
+       movl    (%eax), %edi;           \
+       movl    8(%eax), %esi;          \
+       movl    16(%eax), %ebp;         \
+       movl    32(%eax), %ebx;         \
+       movl    40(%eax), %edx;         \
+       movl    48(%eax), %ecx;         \
+       popl    %eax;
 #define PUSHA    \
     push %eax;   \
     push %ebx;   \
     pop %ebx;    \
     pop %eax;
 
-#endif
-
 .align 8
 .globl v3_vmx_exit_handler
 v3_vmx_exit_handler:
-    PUSHA
-    call v3_vmx_handle_exit
+    save_registers(4(%esp))
+    addl $8, %esp
     POPA
+    popf
+    pushl %edi
+    call v3_handle_vmx_exit
+
+    andl %eax, %eax
+    jnz .Lvmm_failure
 
 v3_vmx_vmresume:
+    popl %edi
+    pushf
+    PUSHA
+    pushl %edi
+    restore_registers(%rdi)
+
     vmresume
-    sti
+
+    addl $8, %esp
     jz .Lfail_valid
     jc .Lfail_invalid
     jmp .Lreturn
 
+.align 8
 .globl v3_vmx_vmlaunch
+// vm_regs = %edi
 v3_vmx_vmlaunch:
-    cli 
+    cli
     pushf
     PUSHA
+    pushl %edi
 
-    mov r(sp), r(ax)
-    mov $VMCS_HOST_RSP, r(bx)
-    vmwrite r(bx), r(ax)
+    movl %esp, %eax
+    movl $VMCS_HOST_RSP, %ebx
+    vmwrite %eax, %ebx
     jz .Lfail_valid
     jc .Lfail_invalid
 
+    movl $v3_vmx_exit_handler, %eax
+    movl $VMCS_HOST_RIP, %ebx
+    vmwrite %eax, %ebx
+    jz .Lfail_valid
+    jc .Lfail_invalid
+
+    restore_registers(%edi)
+
     vmlaunch
-    sti
     jz .Lfail_valid
     jc .Lfail_invalid
     jmp .Lreturn
 
 .Lfail_valid:
-    mov $VMX_FAIL_VALID, r(ax)
+    addl $8, %esp
+    POPA
+    movl $VMX_FAIL_VALID, %eax
     jmp .Lreturn
 
 .Lfail_invalid:
-    mov $VMX_FAIL_INVALID, r(ax)
+    addq $8, %esp
+    POPA
+    movl $MVX_FAIL_INVALID, %eax
     jmp .Lreturn
 
-.Lreturn:
+.Lvmm_failure:
+    addq $8, %esp
     POPA
+    movl $VMM_FAILURE, %eax
+    jmp .Lreturn
+
+.Lreturn:
+    sti
     popf
     ret
 
-
-
+#endif