10 #define SVM_ERROR .dword 0xffffffff
11 #define SVM_SUCCESS .dword 0x00000000
13 #define vmrun .byte 0x0f,0x01,0xd8
14 #define vmload .byte 0x0F,0x01,0xDA
15 #define vmsave .byte 0x0F,0x01,0xDB
16 #define stgi .byte 0x0F,0x01,0xDC
17 #define clgi .byte 0x0F,0x01,0xDD
20 #define SVM_VM_HSAVE_PA_MSR .dword 0xc0010117
25 #define Save_SVM_Registers(location) \
27 movl location, %eax; \
30 movl %ebp, 16(%eax); \
32 movl %ebx, 32(%eax); \
33 movl %edx, 40(%eax); \
34 movl %ecx, 48(%eax); \
37 movl %ebx, 56(%eax); \
42 #define Restore_SVM_Registers(location) \
44 movl location, %eax; \
47 movl 16(%eax), %ebp; \
48 movl 32(%eax), %ebx; \
49 movl 40(%eax), %edx; \
50 movl 48(%eax), %ecx; \
54 // 32 bit GCC passes arguments via stack
68 Restore_SVM_Registers(8(%esp));
75 Save_SVM_Registers(4(%esp));
92 #define Save_SVM_Registers(location) \
94 movq location, %rax; \
97 movq %rbp, 16(%rax); \
99 movq %rbx, 32(%rax); \
100 movq %rdx, 40(%rax); \
101 movq %rcx, 48(%rax); \
103 movq 16(%rsp), %rbx; \
104 movq %rbx, 56(%rax); \
107 movq %r8, 64(%rax); \
108 movq %r9, 72(%rax); \
109 movq %r10, 80(%rax); \
110 movq %r11, 88(%rax); \
111 movq %r12, 96(%rax); \
112 movq %r13, 104(%rax); \
113 movq %r14, 112(%rax); \
114 movq %r15, 120(%rax); \
118 #define Restore_SVM_Registers(location) \
120 mov location, %rax; \
123 mov 16(%rax), %rbp; \
124 mov 32(%rax), %rbx; \
125 mov 40(%rax), %rdx; \
126 mov 48(%rax), %rcx; \
130 mov 80(%rax), %r10; \
131 mov 88(%rax), %r11; \
132 mov 96(%rax), %r12; \
133 mov 104(%rax), %r13; \
134 mov 112(%rax), %r14; \
135 mov 120(%rax), %r15; \
167 // Note that this is only for 64 bit GCC, 32 bit GCC passes via stack
183 Restore_SVM_Registers(%rsi);
191 Save_SVM_Registers(8(%rsp));