Palacios Public Git Repository

To checkout Palacios execute

  git clone http://v3vee.org/palacios/palacios.web/palacios.git
This will give you the master branch. You probably want the devel branch or one of the release branches. To switch to the devel branch, simply execute
  cd palacios
  git checkout --track -b devel origin/devel
The other branches are similar.


fixed crash issue due to inproperly saving host state
[palacios.git] / palacios / src / palacios / svm_lowlevel.S
1 #;  -*- fundamental -*-
2
3
4 .text
5 .align 4
6
7 .globl v3_svm_launch
8 .globl v3_stgi
9 .globl v3_clgi
10
11 #define SVM_ERROR .dword 0xffffffff
12 #define SVM_SUCCESS .dword 0x00000000
13
14 #define vmrun .byte 0x0f,0x01,0xd8
15 #define vmload .byte 0x0F,0x01,0xDA
16 #define vmsave .byte 0x0F,0x01,0xDB
17 #define stgi   .byte 0x0F,0x01,0xDC
18 #define clgi   .byte 0x0F,0x01,0xDD
19
20
21 #define SVM_VM_HSAVE_PA_MSR .dword 0xc0010117
22
23
24 #ifdef __V3_32BIT__
25
26 #define Save_SVM_Registers(location)    \
27         pushl   %eax;                   \
28         movl    location, %eax;         \
29         movl    %edi, (%eax);           \
30         movl    %esi, 8(%eax);          \
31         movl    %ebp, 16(%eax);         \
32         movl    $0, 24(%eax);           \
33         movl    %ebx, 32(%eax);         \
34         movl    %edx, 40(%eax);         \
35         movl    %ecx, 48(%eax);         \
36         pushl   %ebx;                   \
37         movl    8(%esp), %ebx;          \
38         movl    %ebx, 56(%eax);         \
39         popl    %ebx;                   \
40         popl    %eax;                   
41         
42
43 #define Restore_SVM_Registers(location) \
44         pushl   %eax;                   \
45         movl    location, %eax;         \
46         movl    (%eax), %edi;           \
47         movl    8(%eax), %esi;          \
48         movl    16(%eax), %ebp;         \
49         movl    32(%eax), %ebx;         \
50         movl    40(%eax), %edx;         \
51         movl    48(%eax), %ecx;         \
52         popl    %eax;
53
54
55 // 32 bit GCC passes arguments via stack
56
57 v3_svm_launch:
58         push    %ebp;
59         movl    %esp, %ebp;
60         pushf;
61         pusha;
62
63         movl    16(%ebp), %eax;
64         vmsave;
65
66         pushl   12(%ebp);
67         pushl   8(%ebp);
68
69         Restore_SVM_Registers(8(%esp));
70         popl    %eax;
71
72         vmload;
73         vmrun;
74         vmsave;
75
76         Save_SVM_Registers(4(%esp));
77
78         addl    $4, %esp;
79
80
81         movl    16(%ebp), %eax;
82         vmload;
83
84         popa;
85         popf;
86         pop     %ebp;
87         ret
88
89
90
91 #elif __V3_64BIT__
92
93 #define Save_SVM_Registers(location)    \
94         pushq   %rax;                   \
95         movq    location, %rax;         \
96         movq    %rdi, (%rax);           \
97         movq    %rsi, 8(%rax);          \
98         movq    %rbp, 16(%rax);         \
99         movq    $0, 24(%rax);           \
100         movq    %rbx, 32(%rax);         \
101         movq    %rdx, 40(%rax);         \
102         movq    %rcx, 48(%rax);         \
103         pushq   %rbx;                   \
104         movq    16(%rsp), %rbx;         \
105         movq    %rbx, 56(%rax);         \
106         popq    %rbx;                   \
107                                         \
108         movq    %r8, 64(%rax);          \
109         movq    %r9, 72(%rax);          \
110         movq    %r10, 80(%rax);         \
111         movq    %r11, 88(%rax);         \
112         movq    %r12, 96(%rax);         \
113         movq    %r13, 104(%rax);        \
114         movq    %r14, 112(%rax);        \
115         movq    %r15, 120(%rax);        \
116         popq    %rax;                   
117         
118
119 #define Restore_SVM_Registers(location) \
120         push    %rax;                   \
121         mov     location, %rax;         \
122         mov     (%rax), %rdi;           \
123         mov     8(%rax), %rsi;          \
124         mov     16(%rax), %rbp;         \
125         mov     32(%rax), %rbx;         \
126         mov     40(%rax), %rdx;         \
127         mov     48(%rax), %rcx;         \
128                                         \
129         mov     64(%rax), %r8;          \
130         mov     72(%rax), %r9;          \
131         mov     80(%rax), %r10;         \
132         mov     88(%rax), %r11;         \
133         mov     96(%rax), %r12;         \
134         mov     104(%rax), %r13;        \
135         mov     112(%rax), %r14;        \
136         mov     120(%rax), %r15;        \
137         pop     %rax;
138
139
140
141
142 #define PUSHA                           \
143         pushq %rbp;                     \
144         pushq %rbx;                     \
145         pushq %r8;                      \
146         pushq %r9;                      \
147         pushq %r10;                     \
148         pushq %r11;                     \
149         pushq %r12;                     \
150         pushq %r13;                     \
151         pushq %r14;                     \
152         pushq %r15;                     
153
154
155 #define POPA                            \
156         popq %r15;                      \
157         popq %r14;                      \
158         popq %r13;                      \
159         popq %r12;                      \
160         popq %r11;                      \
161         popq %r10;                      \
162         popq %r9;                       \
163         popq %r8;                       \
164         popq %rbx;                      \
165         popq %rbp;                      
166
167
168 // Note that this is only for 64 bit GCC, 32 bit GCC passes via stack
169 // VMCB => RDI
170 // vm_regs => RSI
171 // HOST VMCB => RDX
172
173 v3_svm_launch:
174         pushf;
175         PUSHA
176         
177         pushq   %rdx;
178         movq    %rdx, %rax;
179         vmsave;
180
181         pushq   %rsi
182
183         movq    %rdi, %rax
184         Restore_SVM_Registers(%rsi);
185
186
187         vmload;
188         vmrun;
189         vmsave;
190
191
192         Save_SVM_Registers(8(%rsp));
193
194         addq $8, %rsp
195
196
197         popq %rax;
198         vmload;
199
200         POPA
201         popf;
202         ret
203
204
205 #endif
206
207
208 v3_stgi:
209         stgi;
210         ret;
211
212 v3_clgi:
213         clgi;
214         ret;
215         
216