Searched refs:vmcb (Results 1 - 12 of 12) sorted by relevance

/barrelfish-2018-10-04/kernel/arch/x86_64/
H A Dvmkit.c47 lpaddr = gen_phys_to_local_phys(dcb->guest_desc.vmcb.cap.u.frame.base);
48 amd_vmcb_t vmcb; local
49 amd_vmcb_initialize(&vmcb, (void *)local_phys_to_mem(lpaddr));
51 amd_vmcb_rip_wr(&vmcb, disp->dispatcher_run);
52 amd_vmcb_rsp_wr(&vmcb, 0);
53 amd_vmcb_rax_wr(&vmcb, 0);
54 amd_vmcb_rflags_wr_raw(&vmcb, USER_RFLAGS);
55 amd_vmcb_fs_selector_wr(&vmcb, 0);
56 amd_vmcb_gs_selector_wr(&vmcb, 0);
H A Dsvm_vmkit.c132 : "a" (dcb->guest_desc.vmcb.cap.u.frame.base)
150 : "a" (dcb->guest_desc.vmcb.cap.u.frame.base)
174 vmload (lpaddr_t vmcb) { argument
175 __asm volatile ("vmload" : : "a" (vmcb) : "memory");
179 vmsave (lpaddr_t vmcb) { argument
180 __asm volatile ("vmsave" : : "a" (vmcb) : "memory");
192 vmload(gen_phys_to_local_phys(dcb->guest_desc.vmcb.cap.u.frame.base));
202 vmsave(gen_phys_to_local_phys(dcb->guest_desc.vmcb.cap.u.frame.base));
222 lpaddr = gen_phys_to_local_phys(dcb->guest_desc.vmcb.cap.u.frame.base);
223 amd_vmcb_t vmcb; local
[all...]
H A Dsyscall.c610 lpaddr_t vmcs_base = dcb->guest_desc.vmcb.cap.u.frame.base;
628 lpaddr_t vmcs_base = dcb->guest_desc.vmcb.cap.u.frame.base;
646 lpaddr_t vmcs_base = dcb->guest_desc.vmcb.cap.u.frame.base;
660 lpaddr_t vmcs_base = dcb->guest_desc.vmcb.cap.u.frame.base;
725 err = caps_copy_to_cte(&dcb->guest_desc.vmcb, vmcb_cte, false, 0, 0);
757 /* dcb->guest_desc.vmcb = vmcb_cap->u.frame.base; */
1436 lpaddr_t lpaddr = gen_phys_to_local_phys(dcb_current->guest_desc.vmcb.cap.u.frame.base);
1437 amd_vmcb_t vmcb; local
1438 amd_vmcb_initialize(&vmcb, (void *)local_phys_to_mem(lpaddr));
1439 save_area->fs = amd_vmcb_fs_selector_rd(&vmcb);
[all...]
/barrelfish-2018-10-04/usr/arrakismon/
H A Drealmode.c25 set_vmcb_exit(amd_vmcb_t *vmcb, uint64_t code, uint64_t info1, uint64_t info2) argument
27 amd_vmcb_exitcode_wr(vmcb, code);
28 amd_vmcb_exitinfo1_wr(vmcb, info1);
29 amd_vmcb_exitinfo2_wr(vmcb, info2);
62 set_vmcb_exit(&env->vmcb, SVM_VMEXIT_IOIO, info1, M.x86.R_EIP);
91 if (amd_vmcb_exceptions_rd_raw(&env->vmcb) & (1 << num)) {
99 if (amd_vmcb_intercepts_rd(&env->vmcb).intn == 1) {
100 set_vmcb_exit(&env->vmcb, SVM_VMEXIT_SWINT, 0, 0);
156 M.x86.R_EAX = amd_vmcb_rax_rd(&g->vmcb);
161 M.x86.R_ESP = amd_vmcb_rsp_rd(&g->vmcb);
[all...]
H A Dguest.c273 #define INIT_DATA_SEGREG(vmcb,x) \
284 amd_vmcb_##x## _attrib_wr((vmcb), __sa); \
285 amd_vmcb_##x## _selector_wr((vmcb), 0x10); \
286 amd_vmcb_##x## _base_wr((vmcb), 0x0); \
287 amd_vmcb_##x## _limit_wr((vmcb), 0xffffffff); \
290 #define INIT_CODE_SEGREG(vmcb,x) \
301 amd_vmcb_##x## _attrib_wr((vmcb), __sa); \
302 amd_vmcb_##x## _selector_wr((vmcb), 8); \
303 amd_vmcb_##x## _base_wr((vmcb), 0x0); \
304 amd_vmcb_##x## _limit_wr((vmcb),
[all...]
H A Dsvm.h53 #define VMCB_WRITE_SEGREG_REALMODE(vmcb,reg,selector) \
55 amd_vmcb_ ##reg## _selector_wr((vmcb), (selector)); \
56 amd_vmcb_ ##reg## _base_wr((vmcb), (selector) << 4); \
57 amd_vmcb_ ##reg## _limit_wr((vmcb), ((selector) << 4) + 0xffff); \
H A Dguest.h56 amd_vmcb_t vmcb; member in struct:guest
110 return amd_vmcb_rax_rd(&g->vmcb);
116 amd_vmcb_rax_wr(&g->vmcb, val);
122 return amd_vmcb_rax_rd(&g->vmcb) & 0xffffffff;
128 uint64_t buf = amd_vmcb_rax_rd(&g->vmcb);
130 amd_vmcb_rax_wr(&g->vmcb, buf);
136 return amd_vmcb_rax_rd(&g->vmcb) & 0xffff;
142 uint64_t buf = amd_vmcb_rax_rd(&g->vmcb);
144 amd_vmcb_rax_wr(&g->vmcb, buf);
150 return amd_vmcb_rax_rd(&g->vmcb) >>
[all...]
/barrelfish-2018-10-04/usr/vmkitmon/
H A Dsvm.h53 #define VMCB_WRITE_SEGREG_REALMODE(vmcb,reg,selector) \
55 amd_vmcb_ ##reg## _selector_wr((vmcb), (selector)); \
56 amd_vmcb_ ##reg## _base_wr((vmcb), (selector) << 4); \
57 amd_vmcb_ ##reg## _limit_wr((vmcb), ((selector) << 4) + 0xffff); \
H A Drealmode.c35 set_vmcb_exit(amd_vmcb_t *vmcb, uint64_t code, uint64_t info1, uint64_t info2) argument
37 amd_vmcb_exitcode_wr(vmcb, code);
38 amd_vmcb_exitinfo1_wr(vmcb, info1);
39 amd_vmcb_exitinfo2_wr(vmcb, info2);
73 set_vmcb_exit(&env->vmcb, SVM_VMEXIT_IOIO, info1, M.x86.R_EIP);
108 if (amd_vmcb_exceptions_rd_raw(&env->vmcb) & (1 << num)) {
123 if (amd_vmcb_intercepts_rd(&env->vmcb).intn == 1) {
124 set_vmcb_exit(&env->vmcb, SVM_VMEXIT_SWINT, 0, 0);
185 M.x86.R_ESP = amd_vmcb_rsp_rd(&g->vmcb);
186 M.x86.R_EIP = amd_vmcb_rip_rd(&g->vmcb);
[all...]
H A Dguest.c335 #define INIT_DATA_SEGREG(vmcb,x) \
342 amd_vmcb_##x## _attrib_wr((vmcb), __sa); \
343 amd_vmcb_##x## _selector_wr((vmcb), 0x0); \
344 amd_vmcb_##x## _base_wr((vmcb), 0x0); \
345 amd_vmcb_##x## _limit_wr((vmcb), 0xffff); \
348 #define INIT_CODE_SEGREG(vmcb,x) \
355 amd_vmcb_##x## _attrib_wr((vmcb), __sa); \
356 amd_vmcb_##x## _selector_wr((vmcb), 0xf000); \
357 amd_vmcb_##x## _base_wr((vmcb), 0xffff0000); \
358 amd_vmcb_##x## _limit_wr((vmcb),
[all...]
H A Dguest.h65 amd_vmcb_t vmcb; member in struct:guest
120 return amd_vmcb_rax_rd(&g->vmcb);
130 amd_vmcb_rax_wr(&g->vmcb, val);
140 return amd_vmcb_rax_rd(&g->vmcb) & 0xffffffff;
150 uint64_t buf = amd_vmcb_rax_rd(&g->vmcb);
152 amd_vmcb_rax_wr(&g->vmcb, buf);
164 return amd_vmcb_rax_rd(&g->vmcb) & 0xffff;
174 uint64_t buf = amd_vmcb_rax_rd(&g->vmcb);
176 amd_vmcb_rax_wr(&g->vmcb, buf);
188 return amd_vmcb_rax_rd(&g->vmcb) >>
[all...]
/barrelfish-2018-10-04/kernel/include/
H A Ddispatch.h31 struct cte vmcb; ///< The physical address of the AMD VMCB member in struct:guest

Completed in 255 milliseconds