Palacios Public Git Repository

To checkout Palacios execute

  git clone http://v3vee.org/palacios/palacios.web/palacios.git
This will give you the master branch. You probably want the devel branch or one of the release branches. To switch to the devel branch, simply execute
  cd palacios
  git checkout --track -b devel origin/devel
The other branches are similar.


11cc3b003157fbf35f9ff55457ae1b576c354769
[palacios.git] / palacios / src / palacios / svm_lowlevel.S
1 #;  -*- fundamental -*-
2
3 .text
4 .align 4
5
6 .globl v3_svm_launch
7 .globl v3_stgi
8 .globl v3_clgi
9
10 #define SVM_ERROR .dword 0xffffffff
11 #define SVM_SUCCESS .dword 0x00000000
12
13 #define vmrun .byte 0x0f,0x01,0xd8
14 #define vmload .byte 0x0F,0x01,0xDA
15 #define vmsave .byte 0x0F,0x01,0xDB
16 #define stgi   .byte 0x0F,0x01,0xDC
17 #define clgi   .byte 0x0F,0x01,0xDD
18
19
20 #define SVM_VM_HSAVE_PA_MSR .dword 0xc0010117
21
22
23 #ifdef __V3_32BIT__
24
25 // Note that RAX is saved in the VMCB, so we don't touch it here
26 #define Save_SVM_Registers(location)    \
27         pushl   %eax;                   \
28         movl    location, %eax;         \
29         movl    %edi, (%eax);           \
30         movl    %esi, 8(%eax);          \
31         movl    %ebp, 16(%eax);         \
32         movl    $0, 24(%eax);           \
33         movl    %ebx, 32(%eax);         \
34         movl    %edx, 40(%eax);         \
35         movl    %ecx, 48(%eax);         \ 
36         popl    %eax;                   
37         
38
39 #define Restore_SVM_Registers(location) \
40         pushl   %eax;                   \
41         movl    location, %eax;         \
42         movl    (%eax), %edi;           \
43         movl    8(%eax), %esi;          \
44         movl    16(%eax), %ebp;         \
45         movl    32(%eax), %ebx;         \
46         movl    40(%eax), %edx;         \
47         movl    48(%eax), %ecx;         \
48         popl    %eax;
49
50
51 // 32 bit GCC passes arguments via stack
52
53 v3_svm_launch:
54         push    %ebp;
55         movl    %esp, %ebp;
56         pushf;
57         pusha;
58
59         movl    16(%ebp), %eax;
60         vmsave;
61
62         pushl   12(%ebp);
63         pushl   8(%ebp);
64
65         Restore_SVM_Registers(8(%esp));
66         popl    %eax;
67
68         vmload;
69         vmrun;
70         vmsave;
71
72         Save_SVM_Registers(4(%esp));
73
74         addl    $4, %esp;
75
76
77         movl    16(%ebp), %eax;
78         vmload;
79
80         popa;
81         popf;
82         pop     %ebp;
83         ret
84
85
86
87 #elif __V3_64BIT__
88
89 // Note that RAX is saved in the VMCB, so we don't touch it here
90 #define Save_SVM_Registers(location)    \
91         pushq   %rax;                   \
92         movq    location, %rax;         \
93         movq    %rdi, (%rax);           \
94         movq    %rsi, 8(%rax);          \
95         movq    %rbp, 16(%rax);         \
96         movq    $0, 24(%rax);           \
97         movq    %rbx, 32(%rax);         \
98         movq    %rdx, 40(%rax);         \
99         movq    %rcx, 48(%rax);         \
100                                         \
101         movq    %r8, 64(%rax);          \
102         movq    %r9, 72(%rax);          \
103         movq    %r10, 80(%rax);         \
104         movq    %r11, 88(%rax);         \
105         movq    %r12, 96(%rax);         \
106         movq    %r13, 104(%rax);        \
107         movq    %r14, 112(%rax);        \
108         movq    %r15, 120(%rax);        \
109         popq    %rax;                   
110         
111
112 #define Restore_SVM_Registers(location) \
113         push    %rax;                   \
114         mov     location, %rax;         \
115         mov     (%rax), %rdi;           \
116         mov     8(%rax), %rsi;          \
117         mov     16(%rax), %rbp;         \
118         mov     32(%rax), %rbx;         \
119         mov     40(%rax), %rdx;         \
120         mov     48(%rax), %rcx;         \
121                                         \
122         mov     64(%rax), %r8;          \
123         mov     72(%rax), %r9;          \
124         mov     80(%rax), %r10;         \
125         mov     88(%rax), %r11;         \
126         mov     96(%rax), %r12;         \
127         mov     104(%rax), %r13;        \
128         mov     112(%rax), %r14;        \
129         mov     120(%rax), %r15;        \
130         pop     %rax;
131
132
133
134
135 #define PUSHA                           \
136         pushq %rbp;                     \
137         pushq %rbx;                     \
138         pushq %r8;                      \
139         pushq %r9;                      \
140         pushq %r10;                     \
141         pushq %r11;                     \
142         pushq %r12;                     \
143         pushq %r13;                     \
144         pushq %r14;                     \
145         pushq %r15;                     
146
147
148 #define POPA                            \
149         popq %r15;                      \
150         popq %r14;                      \
151         popq %r13;                      \
152         popq %r12;                      \
153         popq %r11;                      \
154         popq %r10;                      \
155         popq %r9;                       \
156         popq %r8;                       \
157         popq %rbx;                      \
158         popq %rbp;                      
159
160
161 // Note that this is only for 64 bit GCC, 32 bit GCC passes via stack
162 // VMCB => RDI
163 // vm_regs => RSI
164 // HOST VMCB => RDX
165
166 v3_svm_launch:
167         pushf;
168         PUSHA
169         
170         pushq   %rdx;
171         movq    %rdx, %rax;
172         vmsave;
173
174         pushq   %rsi
175
176         movq    %rdi, %rax
177         Restore_SVM_Registers(%rsi);
178
179
180         vmload;
181         vmrun;
182         vmsave;
183
184
185         Save_SVM_Registers(8(%rsp));
186
187         addq $8, %rsp
188
189
190         popq %rax;
191         vmload;
192
193         POPA
194         popf;
195         ret
196
197
198 #endif
199
200
201 v3_stgi:
202         stgi;
203         ret;
204
205 v3_clgi:
206         clgi;
207         ret;
208         
209