vmx_support.S revision 221828
1221828Sgrehan/*-
2221828Sgrehan * Copyright (c) 2011 NetApp, Inc.
3221828Sgrehan * All rights reserved.
4221828Sgrehan *
5221828Sgrehan * Redistribution and use in source and binary forms, with or without
6221828Sgrehan * modification, are permitted provided that the following conditions
7221828Sgrehan * are met:
8221828Sgrehan * 1. Redistributions of source code must retain the above copyright
9221828Sgrehan *    notice, this list of conditions and the following disclaimer.
10221828Sgrehan * 2. Redistributions in binary form must reproduce the above copyright
11221828Sgrehan *    notice, this list of conditions and the following disclaimer in the
12221828Sgrehan *    documentation and/or other materials provided with the distribution.
13221828Sgrehan *
14221828Sgrehan * THIS SOFTWARE IS PROVIDED BY NETAPP, INC ``AS IS'' AND
15221828Sgrehan * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16221828Sgrehan * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17221828Sgrehan * ARE DISCLAIMED.  IN NO EVENT SHALL NETAPP, INC OR CONTRIBUTORS BE LIABLE
18221828Sgrehan * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19221828Sgrehan * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20221828Sgrehan * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21221828Sgrehan * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22221828Sgrehan * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23221828Sgrehan * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24221828Sgrehan * SUCH DAMAGE.
25221828Sgrehan *
26221828Sgrehan * $FreeBSD$
27221828Sgrehan */
28221828Sgrehan
29221828Sgrehan#include <machine/asmacros.h>
30221828Sgrehan
31221828Sgrehan#include "vmx_assym.s"
32221828Sgrehan
33221828Sgrehan/*
34221828Sgrehan * Assumes that %rdi holds a pointer to the 'vmxctx'
35221828Sgrehan */
36221828Sgrehan#define	VMX_GUEST_RESTORE						\
37221828Sgrehan	/*								\
38221828Sgrehan	 * Make sure that interrupts are disabled before restoring CR2.	\
39221828Sgrehan	 * Otherwise there could be a page fault during the interrupt	\
40221828Sgrehan	 * handler execution that would end up trashing CR2.		\
41221828Sgrehan	 */								\
42221828Sgrehan	cli;								\
43221828Sgrehan	movq	VMXCTX_GUEST_CR2(%rdi),%rsi;				\
44221828Sgrehan	movq	%rsi,%cr2;						\
45221828Sgrehan	movq	VMXCTX_GUEST_RSI(%rdi),%rsi;				\
46221828Sgrehan	movq	VMXCTX_GUEST_RDX(%rdi),%rdx;				\
47221828Sgrehan	movq	VMXCTX_GUEST_RCX(%rdi),%rcx;				\
48221828Sgrehan	movq	VMXCTX_GUEST_R8(%rdi),%r8;				\
49221828Sgrehan	movq	VMXCTX_GUEST_R9(%rdi),%r9;				\
50221828Sgrehan	movq	VMXCTX_GUEST_RAX(%rdi),%rax;				\
51221828Sgrehan	movq	VMXCTX_GUEST_RBX(%rdi),%rbx;				\
52221828Sgrehan	movq	VMXCTX_GUEST_RBP(%rdi),%rbp;				\
53221828Sgrehan	movq	VMXCTX_GUEST_R10(%rdi),%r10;				\
54221828Sgrehan	movq	VMXCTX_GUEST_R11(%rdi),%r11;				\
55221828Sgrehan	movq	VMXCTX_GUEST_R12(%rdi),%r12;				\
56221828Sgrehan	movq	VMXCTX_GUEST_R13(%rdi),%r13;				\
57221828Sgrehan	movq	VMXCTX_GUEST_R14(%rdi),%r14;				\
58221828Sgrehan	movq	VMXCTX_GUEST_R15(%rdi),%r15;				\
59221828Sgrehan	movq	VMXCTX_GUEST_RDI(%rdi),%rdi; /* restore rdi the last */
60221828Sgrehan
61221828Sgrehan#define	VM_INSTRUCTION_ERROR(reg)					\
62221828Sgrehan	jnc 	1f;							\
63221828Sgrehan	movl 	$VM_FAIL_INVALID,reg;		/* CF is set */		\
64221828Sgrehan	jmp 	3f;							\
65221828Sgrehan1:	jnz 	2f;							\
66221828Sgrehan	movl 	$VM_FAIL_VALID,reg;		/* ZF is set */		\
67221828Sgrehan	jmp 	3f;							\
68221828Sgrehan2:	movl 	$VM_SUCCESS,reg;					\
69221828Sgrehan3:	movl	reg,VMXCTX_LAUNCH_ERROR(%rsp)
70221828Sgrehan
71221828Sgrehan	.text
72221828Sgrehan/*
73221828Sgrehan * int vmx_setjmp(ctxp)
74221828Sgrehan * %rdi = ctxp
75221828Sgrehan *
76221828Sgrehan * Return value is '0' when it returns directly from here.
77221828Sgrehan * Return value is '1' when it returns after a vm exit through vmx_longjmp.
78221828Sgrehan */
79221828SgrehanENTRY(vmx_setjmp)
80221828Sgrehan	movq	(%rsp),%rax			/* return address */
81221828Sgrehan	movq    %r15,VMXCTX_HOST_R15(%rdi)
82221828Sgrehan	movq    %r14,VMXCTX_HOST_R14(%rdi)
83221828Sgrehan	movq    %r13,VMXCTX_HOST_R13(%rdi)
84221828Sgrehan	movq    %r12,VMXCTX_HOST_R12(%rdi)
85221828Sgrehan	movq    %rbp,VMXCTX_HOST_RBP(%rdi)
86221828Sgrehan	movq    %rsp,VMXCTX_HOST_RSP(%rdi)
87221828Sgrehan	movq    %rbx,VMXCTX_HOST_RBX(%rdi)
88221828Sgrehan	movq    %rax,VMXCTX_HOST_RIP(%rdi)
89221828Sgrehan
90221828Sgrehan	/*
91221828Sgrehan	 * XXX save host debug registers
92221828Sgrehan	 */
93221828Sgrehan	movl	$VMX_RETURN_DIRECT,%eax
94221828Sgrehan	ret
95221828SgrehanEND(vmx_setjmp)
96221828Sgrehan
97221828Sgrehan/*
98221828Sgrehan * void vmx_return(struct vmxctx *ctxp, int retval)
99221828Sgrehan * %rdi = ctxp
100221828Sgrehan * %rsi = retval
101221828Sgrehan * Return to vmm context through vmx_setjmp() with a value of 'retval'.
102221828Sgrehan */
103221828SgrehanENTRY(vmx_return)
104221828Sgrehan	/* Restore host context. */
105221828Sgrehan	movq	VMXCTX_HOST_R15(%rdi),%r15
106221828Sgrehan	movq	VMXCTX_HOST_R14(%rdi),%r14
107221828Sgrehan	movq	VMXCTX_HOST_R13(%rdi),%r13
108221828Sgrehan	movq	VMXCTX_HOST_R12(%rdi),%r12
109221828Sgrehan	movq	VMXCTX_HOST_RBP(%rdi),%rbp
110221828Sgrehan	movq	VMXCTX_HOST_RSP(%rdi),%rsp
111221828Sgrehan	movq	VMXCTX_HOST_RBX(%rdi),%rbx
112221828Sgrehan	movq	VMXCTX_HOST_RIP(%rdi),%rax
113221828Sgrehan	movq	%rax,(%rsp)			/* return address */
114221828Sgrehan
115221828Sgrehan	/*
116221828Sgrehan	 * XXX restore host debug registers
117221828Sgrehan	 */
118221828Sgrehan	movl	%esi,%eax
119221828Sgrehan	ret
120221828SgrehanEND(vmx_return)
121221828Sgrehan
122221828Sgrehan/*
123221828Sgrehan * void vmx_longjmp(void)
124221828Sgrehan * %rsp points to the struct vmxctx
125221828Sgrehan */
126221828SgrehanENTRY(vmx_longjmp)
127221828Sgrehan	/*
128221828Sgrehan	 * Save guest state that is not automatically saved in the vmcs.
129221828Sgrehan	 */
130221828Sgrehan	movq	%rdi,VMXCTX_GUEST_RDI(%rsp)
131221828Sgrehan	movq	%rsi,VMXCTX_GUEST_RSI(%rsp)
132221828Sgrehan	movq	%rdx,VMXCTX_GUEST_RDX(%rsp)
133221828Sgrehan	movq	%rcx,VMXCTX_GUEST_RCX(%rsp)
134221828Sgrehan	movq	%r8,VMXCTX_GUEST_R8(%rsp)
135221828Sgrehan	movq	%r9,VMXCTX_GUEST_R9(%rsp)
136221828Sgrehan	movq	%rax,VMXCTX_GUEST_RAX(%rsp)
137221828Sgrehan	movq	%rbx,VMXCTX_GUEST_RBX(%rsp)
138221828Sgrehan	movq	%rbp,VMXCTX_GUEST_RBP(%rsp)
139221828Sgrehan	movq	%r10,VMXCTX_GUEST_R10(%rsp)
140221828Sgrehan	movq	%r11,VMXCTX_GUEST_R11(%rsp)
141221828Sgrehan	movq	%r12,VMXCTX_GUEST_R12(%rsp)
142221828Sgrehan	movq	%r13,VMXCTX_GUEST_R13(%rsp)
143221828Sgrehan	movq	%r14,VMXCTX_GUEST_R14(%rsp)
144221828Sgrehan	movq	%r15,VMXCTX_GUEST_R15(%rsp)
145221828Sgrehan
146221828Sgrehan	movq	%cr2,%rdi
147221828Sgrehan	movq	%rdi,VMXCTX_GUEST_CR2(%rsp)
148221828Sgrehan
149221828Sgrehan	movq	%rsp,%rdi
150221828Sgrehan	movq	$VMX_RETURN_LONGJMP,%rsi
151221828Sgrehan	callq	vmx_return
152221828SgrehanEND(vmx_longjmp)
153221828Sgrehan
154221828Sgrehan/*
155221828Sgrehan * void vmx_resume(struct vmxctx *ctxp)
156221828Sgrehan * %rdi = ctxp
157221828Sgrehan *
158221828Sgrehan * Although the return type is a 'void' this function may return indirectly
159221828Sgrehan * through vmx_setjmp() with a return value of 2.
160221828Sgrehan */
161221828SgrehanENTRY(vmx_resume)
162221828Sgrehan	/*
163221828Sgrehan	 * Restore guest state that is not automatically loaded from the vmcs.
164221828Sgrehan	 */
165221828Sgrehan	VMX_GUEST_RESTORE
166221828Sgrehan
167221828Sgrehan	vmresume
168221828Sgrehan
169221828Sgrehan	/*
170221828Sgrehan	 * Capture the reason why vmresume failed.
171221828Sgrehan	 */
172221828Sgrehan	VM_INSTRUCTION_ERROR(%eax)
173221828Sgrehan
174221828Sgrehan	/* Return via vmx_setjmp with return value of VMX_RETURN_VMRESUME */
175221828Sgrehan	movq	%rsp,%rdi
176221828Sgrehan	movq	$VMX_RETURN_VMRESUME,%rsi
177221828Sgrehan	callq	vmx_return
178221828SgrehanEND(vmx_resume)
179221828Sgrehan
180221828Sgrehan/*
181221828Sgrehan * void vmx_launch(struct vmxctx *ctxp)
182221828Sgrehan * %rdi = ctxp
183221828Sgrehan *
184221828Sgrehan * Although the return type is a 'void' this function may return indirectly
185221828Sgrehan * through vmx_setjmp() with a return value of 3.
186221828Sgrehan */
187221828SgrehanENTRY(vmx_launch)
188221828Sgrehan	/*
189221828Sgrehan	 * Restore guest state that is not automatically loaded from the vmcs.
190221828Sgrehan	 */
191221828Sgrehan	VMX_GUEST_RESTORE
192221828Sgrehan
193221828Sgrehan	vmlaunch
194221828Sgrehan
195221828Sgrehan	/*
196221828Sgrehan	 * Capture the reason why vmlaunch failed.
197221828Sgrehan	 */
198221828Sgrehan	VM_INSTRUCTION_ERROR(%eax)
199221828Sgrehan
200221828Sgrehan	/* Return via vmx_setjmp with return value of VMX_RETURN_VMLAUNCH */
201221828Sgrehan	movq	%rsp,%rdi
202221828Sgrehan	movq	$VMX_RETURN_VMLAUNCH,%rsi
203221828Sgrehan	callq	vmx_return
204221828SgrehanEND(vmx_launch)
205