Deleted Added
full compact
svm_support.S (254677) svm_support.S (272195)
1/*-
2 * Copyright (c) 2013, Anish Gupta (akgupt3@gmail.com)
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright

--- 14 unchanged lines hidden (view full) ---

23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
24 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26#include <machine/asmacros.h>
27
28#include "svm_assym.s"
29
30/*
1/*-
2 * Copyright (c) 2013, Anish Gupta (akgupt3@gmail.com)
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright

--- 14 unchanged lines hidden (view full) ---

23 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
24 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 */
26#include <machine/asmacros.h>
27
28#include "svm_assym.s"
29
30/*
31 * Macros to save and restore GPRs.
31 * Be friendly to DTrace FBT's prologue/epilogue pattern matching.
32 *
33 * They are also responsible for saving/restoring the host %rbp across VMRUN.
32 */
34 */
33#define SAVE_GPR_STATE(reg); \
34 movq %rbp, SCTX_RBP(reg); \
35 movq %rbx, SCTX_RBX(reg); \
36 movq %rcx, SCTX_RCX(reg); \
37 movq %r8, SCTX_R8(reg); \
38 movq %r9, SCTX_R9(reg); \
39 movq %r10, SCTX_R10(reg); \
40 movq %r11, SCTX_R11(reg); \
41 movq %r12, SCTX_R12(reg); \
42 movq %r13, SCTX_R13(reg); \
43 movq %r14, SCTX_R14(reg); \
44 movq %r15, SCTX_R15(reg); \
35#define VENTER push %rbp ; mov %rsp,%rbp
36#define VLEAVE pop %rbp
45
37
46#define LOAD_GPR_STATE(reg) \
47 movq SCTX_RBP(reg), %rbp; \
48 movq SCTX_RBX(reg), %rbx; \
49 movq SCTX_RCX(reg), %rcx; \
50 movq SCTX_R8(reg), %r8; \
51 movq SCTX_R9(reg), %r9; \
52 movq SCTX_R10(reg), %r10; \
53 movq SCTX_R11(reg), %r11; \
54 movq SCTX_R12(reg), %r12; \
55 movq SCTX_R13(reg), %r13; \
56 movq SCTX_R14(reg), %r14; \
57 movq SCTX_R15(reg), %r15; \
58
59/*
38/*
60 * Macros to save and restore vcpu registers which are not
61 * done by SVM.
39 * svm_launch(uint64_t vmcb, struct svm_regctx *gctx)
40 * %rdi: physical address of VMCB
41 * %rsi: pointer to guest context
62 */
42 */
63#define SAVE_GUEST_STATE(reg) \
64 movq %rdi, SCTX_GUEST_RDI(reg); \
65 movq %rsi, SCTX_GUEST_RSI(reg); \
66 movq %rdx, SCTX_GUEST_RDX(reg); \
67 SAVE_GPR_STATE(reg)
68
69#define LOAD_GUEST_STATE(reg) \
70 movq SCTX_GUEST_RDI(reg), %rdi; \
71 movq SCTX_GUEST_RSI(reg), %rsi; \
72 movq SCTX_GUEST_RDX(reg), %rdx; \
73 LOAD_GPR_STATE(reg)
74
75/*
76 * Macros to save and restore host registers which are not
77 * saved by SVM.
78 */
79#define SAVE_HOST_STATE(reg) \
80 mov %fs, SCTX_HOST_FS(reg); \
81 mov %gs, SCTX_HOST_GS(reg); \
82 movq %rsp, SCTX_HOST_RSP(reg); \
83 SAVE_GPR_STATE(reg)
84
85#define LOAD_HOST_STATE(reg) \
86 mov SCTX_HOST_FS(reg), %fs; \
87 mov SCTX_HOST_GS(reg), %gs; \
88 movq SCTX_HOST_RSP(reg), %rsp; \
89 LOAD_GPR_STATE(reg)
90
91/*
92 * This is where virtual machine vcpu start execution.
93 * int svm_launch(vmcb_pa, gswctx, hswctx)
94 * vmcb_pa - VMCB physical address is in %rdi.
95 * gswctx - Guest context is in %rsi.
96 * hswctx - Host context is in %rdx.
97 *
98 * Note: SVM guarantees host RSP and RAX will be restored
99 * back after guest exit. RAX is where VMCB Phy addr is so
100 * we are left with only RSP. RSP will hold base for guest
101 * software context which will have base for host software
102 * context.
103 */
104ENTRY(svm_launch)
43ENTRY(svm_launch)
44 VENTER
105
45
106 /* Save host GPRs. */
107 SAVE_HOST_STATE(%rdx)
108
109 /*
46 /*
110 * Move the parameters to final destinations.
111 * RAX - VMCB phy addr.
112 * RSP - Guest software context.
113 * SCTX_GUEST_HOST(guest) - Host software context.
47 * Host register state saved across a VMRUN.
48 *
49 * All "callee saved registers" except:
50 * %rsp: because it is preserved by the processor across VMRUN.
51 * %rbp: because it is saved/restored by the function prologue/epilogue.
114 */
52 */
53 push %rbx
54 push %r12
55 push %r13
56 push %r14
57 push %r15
58
59 /* Save the physical address of the VMCB in %rax */
115 movq %rdi, %rax
60 movq %rdi, %rax
116 movq %rsi, %rsp
117 movq %rdx, SCTX_GUEST_HCTX_BASE(%rsp)
118
61
119 /* Load guest context. */
120 LOAD_GUEST_STATE(%rsp)
62 push %rsi /* push guest context pointer on the stack */
121
63
122 vmload %rax
64 /*
65 * Restore guest state.
66 */
67 movq SCTX_R8(%rsi), %r8
68 movq SCTX_R9(%rsi), %r9
69 movq SCTX_R10(%rsi), %r10
70 movq SCTX_R11(%rsi), %r11
71 movq SCTX_R12(%rsi), %r12
72 movq SCTX_R13(%rsi), %r13
73 movq SCTX_R14(%rsi), %r14
74 movq SCTX_R15(%rsi), %r15
75 movq SCTX_RBP(%rsi), %rbp
76 movq SCTX_RBX(%rsi), %rbx
77 movq SCTX_RCX(%rsi), %rcx
78 movq SCTX_RDX(%rsi), %rdx
79 movq SCTX_RDI(%rsi), %rdi
80 movq SCTX_RSI(%rsi), %rsi /* %rsi must be restored last */
123
81
82 vmload %rax
124 vmrun %rax
83 vmrun %rax
125
126 vmsave %rax
127
84 vmsave %rax
85
128 /* Save guest state. */
129 SAVE_GUEST_STATE(%rsp)
86 pop %rax /* pop guest context pointer from the stack */
130
87
131 /* Restore host context base in RDX. */
132 movq SCTX_GUEST_HCTX_BASE(%rsp), %rdx
133 /* Restore host GPRs. */
134 LOAD_HOST_STATE(%rdx)
88 /*
89 * Save guest state.
90 */
91 movq %r8, SCTX_R8(%rax)
92 movq %r9, SCTX_R9(%rax)
93 movq %r10, SCTX_R10(%rax)
94 movq %r11, SCTX_R11(%rax)
95 movq %r12, SCTX_R12(%rax)
96 movq %r13, SCTX_R13(%rax)
97 movq %r14, SCTX_R14(%rax)
98 movq %r15, SCTX_R15(%rax)
99 movq %rbp, SCTX_RBP(%rax)
100 movq %rbx, SCTX_RBX(%rax)
101 movq %rcx, SCTX_RCX(%rax)
102 movq %rdx, SCTX_RDX(%rax)
103 movq %rdi, SCTX_RDI(%rax)
104 movq %rsi, SCTX_RSI(%rax)
135
105
106 /* Restore host state */
107 pop %r15
108 pop %r14
109 pop %r13
110 pop %r12
111 pop %rbx
112
113 VLEAVE
136 ret
137END(svm_launch)
114 ret
115END(svm_launch)