Palacios Public Git Repository

To checkout Palacios execute

  git clone http://v3vee.org/palacios/palacios.web/palacios.git
This will give you the master branch. You probably want the devel branch or one of the release branches. To switch to the devel branch, simply execute
  cd palacios
  git checkout --track -b devel origin/devel
The other branches are similar.


ported 32 bit VMX launch code for nested entries
[palacios.git] / palacios / src / palacios / vmx_lowlevel.S
index 3591463..757d835 100644 (file)
@@ -1,13 +1,79 @@
+#;  -*- fundamental -*-
 
 #define VMX_SUCCESS         0
 #define VMX_FAIL_INVALID    1
 #define VMX_FAIL_VALID      2
+#define VMM_FAILURE         3
 
 #define VMCS_HOST_RSP       0x00006C14
+#define VMCS_HOST_RIP       0x00006C16
 
 #if defined(__V3_64BIT__)
 
-#define r(reg) %r##reg
+#define save_registers(location)       \
+       pushq   %rax;                   \
+       movq    location, %rax;         \
+       movq    %rdi, (%rax);           \
+       movq    %rsi, 8(%rax);          \
+       movq    %rbp, 16(%rax);         \
+       movq    $0, 24(%rax);           \
+       movq    %rbx, 32(%rax);         \
+       movq    %rdx, 40(%rax);         \
+       movq    %rcx, 48(%rax);         \
+       pushq   %rbx;                   \
+       movq    8(%rsp), %rbx;          \
+       movq    %rbx, 56(%rax);         \
+       popq    %rbx;                   \
+                                       \
+       movq    %r8, 64(%rax);          \
+       movq    %r9, 72(%rax);          \
+       movq    %r10, 80(%rax);         \
+       movq    %r11, 88(%rax);         \
+       movq    %r12, 96(%rax);         \
+       movq    %r13, 104(%rax);        \
+       movq    %r14, 112(%rax);        \
+       movq    %r15, 120(%rax);        \
+       popq    %rax;                   
+       
+#define restore_registers(location) \
+       mov     location, %rax;         \
+       mov     (%rax), %rdi;           \
+       mov     8(%rax), %rsi;          \
+       mov     16(%rax), %rbp;         \
+       mov     32(%rax), %rbx;         \
+       mov     40(%rax), %rdx;         \
+       mov 48(%rax), %rcx;             \
+                                       \
+       mov     64(%rax), %r8;          \
+       mov     72(%rax), %r9;          \
+       mov     80(%rax), %r10;         \
+       mov     88(%rax), %r11;         \
+       mov     96(%rax), %r12;         \
+       mov     104(%rax), %r13;        \
+       mov     112(%rax), %r14;        \
+       mov     120(%rax), %r15;        \
+       pushq %rbx;                     \
+       movq 56(%rax), %rbx;            \
+       movq %rbx, %rax;                \
+       popq %rbx;
+
+#define save_ctrl_regs(location)  \
+    pushq %rax;              \
+    pushq %rbx;              \
+    movq location, %rax;     \
+    movq %cr2, %rbx;         \
+    movq %rbx, 8(%rax);      \
+    popq %rbx;               \
+    popq %rax
+
+#define restore_ctrl_regs(location)  \
+    pushq %rax;              \
+    pushq %rbx;              \
+    movq location, %rax;     \
+    movq 8(%rax), %rbx;      \
+    movq %rbx, %cr2;         \
+    popq %rbx;               \
+    popq %rax
 
 #define PUSHA    \
     push %rax;   \
     pop %rcx;    \
     pop %rbx;    \
     pop %rax;    
+
+
+#define PRE_LAUNCH(return_target)      \
+    pushf;                             \
+    PUSHA;                             \
+    pushq %rdi;                                \
+    pushq %rdx;                                \
+                                       \
+    movq %rsp, %rax;                   \
+    movq $VMCS_HOST_RSP, %rbx;         \
+    vmwrite %rax, %rbx;                        \
+    jz .Lfail_valid;                   \
+    jc .Lfail_invalid;                 \
+                                       \
+    movq return_target, %rax;          \
+    movq $VMCS_HOST_RIP, %rbx;         \
+    vmwrite %rax, %rbx;                        \
+    jz .Lfail_valid;                   \
+    jc .Lfail_invalid;                 \
+                                       \
+    restore_ctrl_regs(%rdx);           \
+    restore_registers(%rdi);
+
+
+
+
+.align 8
+.globl v3_vmx_resume
+// vm_regs = %rdi, guest_info * = %rsi, ctrl_regs = %rdx
+v3_vmx_resume:
+
+    PRE_LAUNCH($vmx_resume_ret);
+
+    vmresume
+
+vmx_resume_ret:
+    jz .Lfail_valid
+    jc .Lfail_invalid
+    jmp .Lnormal_exit
+
+
+.align 8
+.globl v3_vmx_launch
+// vm_regs = %rdi, guest_info * = %rsi, ctrl_regs = %rdx
+v3_vmx_launch:
+
+    PRE_LAUNCH($vmx_launch_ret);
+
+    vmlaunch
+
+vmx_launch_ret:
+    jz .Lfail_valid
+    jc .Lfail_invalid
+    jmp .Lnormal_exit
+
+
+
+
+.Lfail_valid:
+    addq $16, %rsp
+    POPA
+    popf
+    movq $VMX_FAIL_VALID, %rax
+    jmp .Lreturn
+
+.Lfail_invalid:
+    addq $16, %rsp
+    POPA
+    popf
+    movq $VMX_FAIL_INVALID, %rax
+    jmp .Lreturn
+
+.Lvmm_failure:
+    addq $24, %rsp
+    movq $VMM_FAILURE, %rax
+    jmp .Lreturn
+
+
+.Lnormal_exit:
+    save_registers(16(%rsp));
+    save_ctrl_regs(16(%rsp));
+    addq $16, %rsp
+    POPA
+    popf
+    xorq %rax, %rax
+    jmp .Lreturn
+
+
+.Lreturn:
+    ret
+    
 #else
 
-#define r(reg) %e##reg
-  
+#define save_registers(location)       \
+       pushl   %eax;                   \
+       movl    location, %eax;         \
+       movl    %edi, (%eax);           \
+       movl    %esi, 8(%eax);          \
+       movl    %ebp, 16(%eax);         \
+       movl    $0, 24(%eax);           \
+       movl    %ebx, 32(%eax);         \
+       movl    %edx, 40(%eax);         \
+       movl    %ecx, 48(%eax);         \
+       pushl   %ebx;                   \
+       movl    8(%esp), %ebx;          \
+       movl    %ebx, 56(%eax);         \
+       popl    %ebx;                   \
+       popl    %eax;                   
+       
+
+#define restore_registers(location) \
+       pushl   %eax;                   \
+       movl    location, %eax;         \
+       movl    (%eax), %edi;           \
+       movl    8(%eax), %esi;          \
+       movl    16(%eax), %ebp;         \
+       movl    32(%eax), %ebx;         \
+       movl    40(%eax), %edx;         \
+       movl    48(%eax), %ecx;         \
+       popl    %eax;
+
+#define save_ctrl_regs(location)  \
+    push %eax;              \
+    push %ebx;              \
+    movl location, %eax;     \
+    movl %cr2, %ebx;         \
+    movl %ebx, 8(%eax);      \
+    popl %ebx;               \
+    popl %eax
+
+#define restore_ctrl_regs(location)  \
+    push %eax;              \
+    push %ebx;              \
+    movl location, %eax;     \
+    movl 8(%eax), %ebx;      \
+    movl %ebx, %cr2;         \
+    popl %ebx;               \
+    popl %eax
+
 #define PUSHA    \
     push %eax;   \
     push %ebx;   \
     pop %ebx;    \
     pop %eax;
 
-#endif
 
-.align 8
-.globl v3_vmx_exit_handler
-v3_vmx_exit_handler:
-    PUSHA
-    call v3_vmx_handle_exit
-    POPA
+#define PRE_LAUNCH(return_target)      \
+    pushf;                             \
+    PUSHA;                             \
+    pushl %edi;                                \
+    pushl %edx;                                \
+                                       \
+    movl %esp, %eax;                   \
+    movl $VMCS_HOST_RSP, %ebx;         \
+    vmwrite %eax, %ebx;                        \
+    jz .Lfail_valid;                   \
+    jc .Lfail_invalid;                 \
+                                       \
+    movl return_target, %eax;          \
+    movl $VMCS_HOST_RIP, %ebx;         \
+    vmwrite %eax, %ebx;                        \
+    jz .Lfail_valid;                   \
+    jc .Lfail_invalid;                 \
+                                       \
+    restore_ctrl_regs(%edx);           \
+    restore_registers(%edi);
+
+
+.align 4
+.globl v3_vmx_resume
+v3_vmx_resume:
+
+    PRE_LAUNCH($vmx_resume_ret);
 
-v3_vmx_vmresume:
     vmresume
-    sti
+
+vmx_resume_ret:
     jz .Lfail_valid
     jc .Lfail_invalid
-    jmp .Lreturn
+    jmp .Lnormal_exit
 
-.globl v3_vmx_vmlaunch
-v3_vmx_vmlaunch:
-    cli 
-    pushf
-    PUSHA
+.align 4
+.globl v3_vmx_launch
+// vm_regs = %edi
+v3_vmx_launch:
 
-    mov r(sp), r(ax)
-    mov $VMCS_HOST_RSP, r(bx)
-    vmwrite r(bx), r(ax)
-    jz .Lfail_valid
-    jc .Lfail_invalid
+    PRE_LAUNCH($vmx_launch_ret);
 
     vmlaunch
-    sti
+
+vmx_launch_ret:
     jz .Lfail_valid
     jc .Lfail_invalid
-    jmp .Lreturn
+    jmp .Lnormal_exit
 
 .Lfail_valid:
-    mov $VMX_FAIL_VALID, r(ax)
+    addl $8, %esp
+    POPA
+    popf
+    movl $VMX_FAIL_VALID, %eax
     jmp .Lreturn
 
 .Lfail_invalid:
-    mov $VMX_FAIL_INVALID, r(ax)
+    addl $8, %esp
+    POPA
+    popf
+    movl $VMX_FAIL_INVALID, %eax
     jmp .Lreturn
 
-.Lreturn:
+.Lvmm_failure:
+    addl $12, %esp
+    movl $VMM_FAILURE, %eax
+    jmp .Lreturn
+
+
+.Lnormal_exit:
+    save_registers(8(%esp));
+    save_ctrl_regs(8(%esp));
+    addl $8, %esp
     POPA
     popf
-    ret
-
+    xorl %eax, %eax
+    jmp .Lreturn
 
+.Lreturn:
+    ret
 
+#endif