Palacios Public Git Repository

To checkout Palacios execute

  git clone http://v3vee.org/palacios/palacios.web/palacios.git
This will give you the master branch. You probably want the devel branch or one of the release branches. To switch to the devel branch, simply execute
  cd palacios
  git checkout --track -b devel origin/devel
The other branches are similar.


bug fix for 32 bit ebp cloberring
[palacios.git] / palacios / src / palacios / svm_lowlevel.S
1 #;  -*- fundamental -*-
2
3 .text
4 .align 4
5
6 .globl v3_svm_launch
7 .globl v3_stgi
8 .globl v3_clgi
9
10 #define SVM_ERROR .dword 0xffffffff
11 #define SVM_SUCCESS .dword 0x00000000
12
13 #define vmrun .byte 0x0f,0x01,0xd8
14 #define vmload .byte 0x0F,0x01,0xDA
15 #define vmsave .byte 0x0F,0x01,0xDB
16 #define stgi   .byte 0x0F,0x01,0xDC
17 #define clgi   .byte 0x0F,0x01,0xDD
18
19
20 #define SVM_VM_HSAVE_PA_MSR .dword 0xc0010117
21
22
23 #ifdef __V3_32BIT__
24
25 // Note that RAX is saved in the VMCB, so we don't touch it here
26
27 #define Save_SVM_Registers(location)    \
28         pushl   %eax;                   \
29         movl    location, %eax;         \
30         movl    %edi, (%eax);           \
31         movl    %esi, 8(%eax);          \
32         movl    %ebp, 16(%eax);         \
33         movl    $0, 24(%eax);           \
34         movl    %ebx, 32(%eax);         \
35         movl    %edx, 40(%eax);         \
36         movl    %ecx, 48(%eax);         \
37         popl    %eax;                   
38         
39
40 #define Restore_SVM_Registers(location) \
41         pushl   %eax;                   \
42         movl    location, %eax;         \
43         movl    (%eax), %edi;           \
44         movl    8(%eax), %esi;          \
45         movl    16(%eax), %ebp;         \
46         movl    32(%eax), %ebx;         \
47         movl    40(%eax), %edx;         \
48         movl    48(%eax), %ecx;         \
49         popl    %eax;
50
51
52 // 32 bit GCC passes arguments via stack
53
54 v3_svm_launch:
55         push    %ebp;
56         movl    %esp, %ebp;
57         pushf;
58         pusha;
59
60         movl    16(%ebp), %eax;
61         vmsave;
62
63         pushl   12(%ebp);
64         pushl   8(%ebp);
65
66         Restore_SVM_Registers(8(%esp));
67         popl    %eax;
68
69         vmload;
70         vmrun;
71         vmsave;
72
73         Save_SVM_Registers(4(%esp));
74
75         addl    $4, %esp;
76
77         popa;
78         popf;
79
80         movl    16(%ebp), %eax;
81         vmload;
82
83         // We don't detect failures here, so just return 0
84         xorl    %eax, %eax 
85
86         pop     %ebp;
87         ret
88
89
90
91 #elif __V3_64BIT__
92
93 // Note that RAX is saved in the VMCB, so we don't touch it here
94
95 #define Save_SVM_Registers(location)    \
96         pushq   %rax;                   \
97         movq    location, %rax;         \
98         movq    %rdi, (%rax);           \
99         movq    %rsi, 8(%rax);          \
100         movq    %rbp, 16(%rax);         \
101         movq    $0, 24(%rax);           \
102         movq    %rbx, 32(%rax);         \
103         movq    %rdx, 40(%rax);         \
104         movq    %rcx, 48(%rax);         \
105                                         \
106         movq    %r8, 64(%rax);          \
107         movq    %r9, 72(%rax);          \
108         movq    %r10, 80(%rax);         \
109         movq    %r11, 88(%rax);         \
110         movq    %r12, 96(%rax);         \
111         movq    %r13, 104(%rax);        \
112         movq    %r14, 112(%rax);        \
113         movq    %r15, 120(%rax);        \
114         popq    %rax;                   
115         
116
117 #define Restore_SVM_Registers(location) \
118         push    %rax;                   \
119         mov     location, %rax;         \
120         mov     (%rax), %rdi;           \
121         mov     8(%rax), %rsi;          \
122         mov     16(%rax), %rbp;         \
123         mov     32(%rax), %rbx;         \
124         mov     40(%rax), %rdx;         \
125         mov     48(%rax), %rcx;         \
126                                         \
127         mov     64(%rax), %r8;          \
128         mov     72(%rax), %r9;          \
129         mov     80(%rax), %r10;         \
130         mov     88(%rax), %r11;         \
131         mov     96(%rax), %r12;         \
132         mov     104(%rax), %r13;        \
133         mov     112(%rax), %r14;        \
134         mov     120(%rax), %r15;        \
135         pop     %rax;
136
137
138
139
140 #define PUSHA                           \
141         pushq %rbp;                     \
142         pushq %rbx;                     \
143         pushq %r8;                      \
144         pushq %r9;                      \
145         pushq %r10;                     \
146         pushq %r11;                     \
147         pushq %r12;                     \
148         pushq %r13;                     \
149         pushq %r14;                     \
150         pushq %r15;                     
151
152
153 #define POPA                            \
154         popq %r15;                      \
155         popq %r14;                      \
156         popq %r13;                      \
157         popq %r12;                      \
158         popq %r11;                      \
159         popq %r10;                      \
160         popq %r9;                       \
161         popq %r8;                       \
162         popq %rbx;                      \
163         popq %rbp;                      
164
165
166 // Note that this is only for 64 bit GCC, 32 bit GCC passes via stack
167 // VMCB => RDI
168 // vm_regs => RSI
169 // HOST VMCB => RDX
170
171 v3_svm_launch:
172         pushf;
173         PUSHA
174         
175         pushq   %rdx;
176         movq    %rdx, %rax;
177         vmsave;
178
179         pushq   %rsi
180
181         movq    %rdi, %rax
182         Restore_SVM_Registers(%rsi);
183
184
185         vmload;
186         vmrun;
187         vmsave;
188
189
190         Save_SVM_Registers(8(%rsp));
191
192         addq $8, %rsp
193
194
195         popq %rax;
196         vmload;
197
198         POPA
199         popf;
200         ret
201
202
203 #endif
204
205
206 v3_stgi:
207         stgi;
208         ret;
209
210 v3_clgi:
211         clgi;
212         ret;
213         
214