Palacios Public Git Repository

To checkout Palacios execute

  git clone http://v3vee.org/palacios/palacios.web/palacios.git
This will give you the master branch. You probably want the devel branch or one of the release branches. To switch to the devel branch, simply execute
  cd palacios
  git checkout --track -b devel origin/devel
The other branches are similar.


64 bit guests initially supported
[palacios.git] / palacios / src / palacios / svm_lowlevel.S
1 #;  -*- fundamental -*-
2
3
4 .text
5 .align 4
6
7 .globl v3_svm_launch
8 .globl v3_stgi
9 .globl v3_clgi
10
11 #define SVM_ERROR .dword 0xffffffff
12 #define SVM_SUCCESS .dword 0x00000000
13
14 #define vmrun .byte 0x0f,0x01,0xd8
15 #define vmload .byte 0x0F,0x01,0xDA
16 #define vmsave .byte 0x0F,0x01,0xDB
17 #define stgi   .byte 0x0F,0x01,0xDC
18 #define clgi   .byte 0x0F,0x01,0xDD
19
20
21 #ifdef __V3_32BIT__
22
23 #define Save_SVM_Registers(location)    \
24         pushl   %eax;                   \
25         movl    location, %eax;         \
26         movl    %edi, (%eax);           \
27         movl    %esi, 8(%eax);          \
28         movl    %ebp, 16(%eax);         \
29         movl    $0, 24(%eax);           \
30         movl    %ebx, 32(%eax);         \
31         movl    %edx, 40(%eax);         \
32         movl    %ecx, 48(%eax);         \
33         pushl   %ebx;                   \
34         movl    8(%esp), %ebx;          \
35         movl    %ebx, 56(%eax);         \
36         popl    %ebx;                   \
37         popl    %eax;                   
38         
39
40 #define Restore_SVM_Registers(location) \
41         pushl   %eax;                   \
42         movl    location, %eax;         \
43         movl    (%eax), %edi;           \
44         movl    8(%eax), %esi;          \
45         movl    16(%eax), %ebp;         \
46         movl    32(%eax), %ebx;         \
47         movl    40(%eax), %edx;         \
48         movl    48(%eax), %ecx;         \
49         popl    %eax;
50
51
52 v3_svm_launch:
53         push    %ebp;
54         movl    %esp, %ebp;
55         pushf;
56         push    %fs;
57         push    %gs;
58         pusha;
59
60         pushl   12(%ebp);
61         pushl   8(%ebp);
62
63         Restore_SVM_Registers(8(%esp));
64         popl    %eax;
65
66         vmload;
67         vmrun;
68         vmsave;
69
70         Save_SVM_Registers(4(%esp));
71
72         addl    $4, %esp;
73
74         popa;
75         pop     %gs;
76         pop     %fs;
77         popf;
78         pop     %ebp;
79         ret
80
81
82
83 #elif __V3_64BIT__
84
85 #define Save_SVM_Registers(location)    \
86         pushq   %rax;                   \
87         movq    location, %rax;         \
88         movq    %rdi, (%rax);           \
89         movq    %rsi, 8(%rax);          \
90         movq    %rbp, 16(%rax);         \
91         movq    $0, 24(%rax);           \
92         movq    %rbx, 32(%rax);         \
93         movq    %rdx, 40(%rax);         \
94         movq    %rcx, 48(%rax);         \
95         pushq   %rbx;                   \
96         movq    16(%rsp), %rbx;         \
97         movq    %rbx, 56(%rax);         \
98         popq    %rbx;                   \
99                                         \
100         movq    %r8, 64(%rax);          \
101         movq    %r9, 72(%rax);          \
102         movq    %r10, 80(%rax);         \
103         movq    %r11, 88(%rax);         \
104         movq    %r12, 96(%rax);         \
105         movq    %r13, 104(%rax);        \
106         movq    %r14, 112(%rax);        \
107         movq    %r15, 120(%rax);        \
108         popq    %rax;                   
109         
110
111 #define Restore_SVM_Registers(location) \
112         push    %rax;                   \
113         mov     location, %rax;         \
114         mov     (%rax), %rdi;           \
115         mov     8(%rax), %rsi;          \
116         mov     16(%rax), %rbp;         \
117         mov     32(%rax), %rbx;         \
118         mov     40(%rax), %rdx;         \
119         mov     48(%rax), %rcx;         \
120                                         \
121         mov     64(%rax), %r8;          \
122         mov     72(%rax), %r9;          \
123         mov     80(%rax), %r10;         \
124         mov     88(%rax), %r11;         \
125         mov     96(%rax), %r12;         \
126         mov     104(%rax), %r13;        \
127         mov     112(%rax), %r14;        \
128         mov     120(%rax), %r15;        \
129         pop     %rax;
130
131
132
133
134 #define PUSHA                           \
135         pushq %rbp;                     \
136         pushq %rbx;                     \
137         pushq %r8;                      \
138         pushq %r9;                      \
139         pushq %r10;                     \
140         pushq %r11;                     \
141         pushq %r12;                     \
142         pushq %r13;                     \
143         pushq %r14;                     \
144         pushq %r15;                     
145
146
147 #define POPA                            \
148         popq %r15;                      \
149         popq %r14;                      \
150         popq %r13;                      \
151         popq %r12;                      \
152         popq %r11;                      \
153         popq %r10;                      \
154         popq %r9;                       \
155         popq %r8;                       \
156         popq %rbx;                      \
157         popq %rbp;                      
158
159 // VMCB => RDI
160 // vm_regs => RSI
161
162 v3_svm_launch:
163         pushf;
164         push    %fs;
165         push    %gs;
166         PUSHA
167
168         
169         
170
171
172         pushq %rsi
173
174         movq    %rdi, %rax
175         Restore_SVM_Registers(%rsi);
176
177
178         
179
180         vmload;
181         vmrun;
182         vmsave;
183
184
185         Save_SVM_Registers(8(%rsp));
186
187         addq $8, %rsp
188
189
190
191
192
193         POPA
194         pop     %gs;
195         pop     %fs;
196         popf;
197         ret
198
199
200 #endif
201
202
203 v3_stgi:
204         stgi;
205         ret;
206
207 v3_clgi:
208         clgi;
209         ret;
210         
211