11 #define SVM_ERROR .dword 0xffffffff
12 #define SVM_SUCCESS .dword 0x00000000
14 #define vmrun .byte 0x0f,0x01,0xd8
15 #define vmload .byte 0x0F,0x01,0xDA
16 #define vmsave .byte 0x0F,0x01,0xDB
17 #define stgi .byte 0x0F,0x01,0xDC
18 #define clgi .byte 0x0F,0x01,0xDD
21 #define SVM_VM_HSAVE_PA_MSR .dword 0xc0010117
26 #define Save_SVM_Registers(location) \
28 movl location, %eax; \
31 movl %ebp, 16(%eax); \
33 movl %ebx, 32(%eax); \
34 movl %edx, 40(%eax); \
35 movl %ecx, 48(%eax); \
38 movl %ebx, 56(%eax); \
43 #define Restore_SVM_Registers(location) \
45 movl location, %eax; \
48 movl 16(%eax), %ebp; \
49 movl 32(%eax), %ebx; \
50 movl 40(%eax), %edx; \
51 movl 48(%eax), %ecx; \
55 // 32 bit GCC passes arguments via stack
69 Restore_SVM_Registers(8(%esp));
76 Save_SVM_Registers(4(%esp));
93 #define Save_SVM_Registers(location) \
95 movq location, %rax; \
98 movq %rbp, 16(%rax); \
100 movq %rbx, 32(%rax); \
101 movq %rdx, 40(%rax); \
102 movq %rcx, 48(%rax); \
104 movq 16(%rsp), %rbx; \
105 movq %rbx, 56(%rax); \
108 movq %r8, 64(%rax); \
109 movq %r9, 72(%rax); \
110 movq %r10, 80(%rax); \
111 movq %r11, 88(%rax); \
112 movq %r12, 96(%rax); \
113 movq %r13, 104(%rax); \
114 movq %r14, 112(%rax); \
115 movq %r15, 120(%rax); \
119 #define Restore_SVM_Registers(location) \
121 mov location, %rax; \
124 mov 16(%rax), %rbp; \
125 mov 32(%rax), %rbx; \
126 mov 40(%rax), %rdx; \
127 mov 48(%rax), %rcx; \
131 mov 80(%rax), %r10; \
132 mov 88(%rax), %r11; \
133 mov 96(%rax), %r12; \
134 mov 104(%rax), %r13; \
135 mov 112(%rax), %r14; \
136 mov 120(%rax), %r15; \
168 // Note that this is only for 64 bit GCC, 32 bit GCC passes via stack
184 Restore_SVM_Registers(%rsi);
192 Save_SVM_Registers(8(%rsp));