Palacios Public Git Repository

To checkout Palacios execute

  git clone http://v3vee.org/palacios/palacios.web/palacios.git
This will give you the master branch. You probably want the devel branch or one of the release branches. To switch to the devel branch, simply execute
  cd palacios
  git checkout --track -b devel origin/devel
The other branches are similar.


6dcd39bdf7110d520d775768949c22ce76832c7e
[palacios.git] / palacios / src / palacios / svm_lowlevel.S
1 #;  -*- fundamental -*-
2
3 .text
4 .align 4
5
6 .globl v3_svm_launch
7 .globl v3_stgi
8 .globl v3_clgi
9
10 #define SVM_ERROR .dword 0xffffffff
11 #define SVM_SUCCESS .dword 0x00000000
12
13 #define vmrun .byte 0x0f,0x01,0xd8
14 #define vmload .byte 0x0F,0x01,0xDA
15 #define vmsave .byte 0x0F,0x01,0xDB
16 #define stgi   .byte 0x0F,0x01,0xDC
17 #define clgi   .byte 0x0F,0x01,0xDD
18
19
20 #define SVM_VM_HSAVE_PA_MSR .dword 0xc0010117
21
22
23 #ifdef __V3_32BIT__
24
25 // Note that RAX is saved in the VMCB, so we don't touch it here
26
27 #define Save_SVM_Registers(location)    \
28         pushl   %eax;                   \
29         movl    location, %eax;         \
30         movl    %edi, (%eax);           \
31         movl    %esi, 8(%eax);          \
32         movl    %ebp, 16(%eax);         \
33         movl    $0, 24(%eax);           \
34         movl    %ebx, 32(%eax);         \
35         movl    %edx, 40(%eax);         \
36         movl    %ecx, 48(%eax);         \
37         popl    %eax;                   
38         
39
40 #define Restore_SVM_Registers(location) \
41         pushl   %eax;                   \
42         movl    location, %eax;         \
43         movl    (%eax), %edi;           \
44         movl    8(%eax), %esi;          \
45         movl    16(%eax), %ebp;         \
46         movl    32(%eax), %ebx;         \
47         movl    40(%eax), %edx;         \
48         movl    48(%eax), %ecx;         \
49         popl    %eax;
50
51
52 // 32 bit GCC passes arguments via stack
53
54 v3_svm_launch:
55         push    %ebp;
56         movl    %esp, %ebp;
57         pushf;
58         pusha;
59
60         movl    16(%ebp), %eax;
61         vmsave;
62
63         pushl   12(%ebp);
64         pushl   8(%ebp);
65
66         Restore_SVM_Registers(8(%esp));
67         popl    %eax;
68
69         vmload;
70         vmrun;
71         vmsave;
72
73         Save_SVM_Registers(4(%esp));
74
75         addl    $4, %esp;
76
77
78         movl    16(%ebp), %eax;
79         vmload;
80
81         popa;
82         popf;
83         pop     %ebp;
84         ret
85
86
87
88 #elif __V3_64BIT__
89
90 // Note that RAX is saved in the VMCB, so we don't touch it here
91
92 #define Save_SVM_Registers(location)    \
93         pushq   %rax;                   \
94         movq    location, %rax;         \
95         movq    %rdi, (%rax);           \
96         movq    %rsi, 8(%rax);          \
97         movq    %rbp, 16(%rax);         \
98         movq    $0, 24(%rax);           \
99         movq    %rbx, 32(%rax);         \
100         movq    %rdx, 40(%rax);         \
101         movq    %rcx, 48(%rax);         \
102                                         \
103         movq    %r8, 64(%rax);          \
104         movq    %r9, 72(%rax);          \
105         movq    %r10, 80(%rax);         \
106         movq    %r11, 88(%rax);         \
107         movq    %r12, 96(%rax);         \
108         movq    %r13, 104(%rax);        \
109         movq    %r14, 112(%rax);        \
110         movq    %r15, 120(%rax);        \
111         popq    %rax;                   
112         
113
114 #define Restore_SVM_Registers(location) \
115         push    %rax;                   \
116         mov     location, %rax;         \
117         mov     (%rax), %rdi;           \
118         mov     8(%rax), %rsi;          \
119         mov     16(%rax), %rbp;         \
120         mov     32(%rax), %rbx;         \
121         mov     40(%rax), %rdx;         \
122         mov     48(%rax), %rcx;         \
123                                         \
124         mov     64(%rax), %r8;          \
125         mov     72(%rax), %r9;          \
126         mov     80(%rax), %r10;         \
127         mov     88(%rax), %r11;         \
128         mov     96(%rax), %r12;         \
129         mov     104(%rax), %r13;        \
130         mov     112(%rax), %r14;        \
131         mov     120(%rax), %r15;        \
132         pop     %rax;
133
134
135
136
137 #define PUSHA                           \
138         pushq %rbp;                     \
139         pushq %rbx;                     \
140         pushq %r8;                      \
141         pushq %r9;                      \
142         pushq %r10;                     \
143         pushq %r11;                     \
144         pushq %r12;                     \
145         pushq %r13;                     \
146         pushq %r14;                     \
147         pushq %r15;                     
148
149
150 #define POPA                            \
151         popq %r15;                      \
152         popq %r14;                      \
153         popq %r13;                      \
154         popq %r12;                      \
155         popq %r11;                      \
156         popq %r10;                      \
157         popq %r9;                       \
158         popq %r8;                       \
159         popq %rbx;                      \
160         popq %rbp;                      
161
162
163 // Note that this is only for 64 bit GCC, 32 bit GCC passes via stack
164 // VMCB => RDI
165 // vm_regs => RSI
166 // HOST VMCB => RDX
167
168 v3_svm_launch:
169         pushf;
170         PUSHA
171         
172         pushq   %rdx;
173         movq    %rdx, %rax;
174         vmsave;
175
176         pushq   %rsi
177
178         movq    %rdi, %rax
179         Restore_SVM_Registers(%rsi);
180
181
182         vmload;
183         vmrun;
184         vmsave;
185
186
187         Save_SVM_Registers(8(%rsp));
188
189         addq $8, %rsp
190
191
192         popq %rax;
193         vmload;
194
195         POPA
196         popf;
197         ret
198
199
200 #endif
201
202
203 v3_stgi:
204         stgi;
205         ret;
206
207 v3_clgi:
208         clgi;
209         ret;
210         
211