Searched refs:efer (Results 1 - 25 of 32) sorted by path

12

/linux-master/arch/x86/hyperv/
H A Dhv_vtl.c120 input->vp_context.efer = __rdmsr(MSR_EFER);
H A Divm.c322 vmsa->efer = native_read_msr(MSR_EFER);
/linux-master/arch/x86/include/asm/
H A Dhyperv-tlfs.h777 u64 efer; member in struct:hv_init_vp_context
H A Dkvm_host.h765 u64 efer; member in struct:kvm_vcpu_arch
1648 int (*set_efer)(struct kvm_vcpu *vcpu, u64 efer);
2041 bool kvm_valid_efer(struct kvm_vcpu *vcpu, u64 efer);
H A Drealmode.h52 u64 efer;
H A Dsuspend_64.h44 unsigned long efer; member in struct:saved_context
H A Dsvm.h314 u64 efer; member in struct:vmcb_save_area
370 u64 efer; member in struct:sev_es_save_area
/linux-master/arch/x86/include/uapi/asm/
H A Dkvm.h150 __u64 efer; member in struct:kvm_sregs
161 __u64 efer; member in struct:kvm_sregs2
/linux-master/arch/x86/kernel/
H A Dsev.c1066 vmsa->efer = EFER_SVME;
/linux-master/arch/x86/kvm/
H A Demulate.c783 u64 efer; local
788 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
792 if (efer & EFER_LMA)
800 if (efer & EFER_LMA)
809 if (efer & EFER_LMA) {
1512 u64 efer = 0; local
1514 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
1515 if (!(efer & EFER_LMA))
1686 u64 efer = 0; local
1688 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
2407 u64 efer = 0; local
2466 u64 efer = 0; local
3909 u64 efer = 0; local
[all...]
H A Dmmu.h111 unsigned long cr4, u64 efer, gpa_t nested_cr3);
H A Dsmm.c82 CHECK_SMRAM64_OFFSET(efer, 0xFED0);
249 smram->efer = vcpu->arch.efer;
531 if (kvm_set_msr(vcpu, MSR_EFER, smstate->efer & ~EFER_LMA))
613 unsigned long cr4, efer; local
621 efer = 0;
622 kvm_set_msr(vcpu, MSR_EFER, efer);
H A Dsmm.h104 u64 efer; member in struct:kvm_smram_state_64
H A Dx86.c987 if ((vcpu->arch.efer & EFER_LME) && !is_paging(vcpu) &&
998 if (!(vcpu->arch.efer & EFER_LME) && (cr0 & X86_CR0_PG) &&
1723 static bool __kvm_valid_efer(struct kvm_vcpu *vcpu, u64 efer) argument
1725 if (efer & EFER_AUTOIBRS && !guest_cpuid_has(vcpu, X86_FEATURE_AUTOIBRS))
1728 if (efer & EFER_FFXSR && !guest_cpuid_has(vcpu, X86_FEATURE_FXSR_OPT))
1731 if (efer & EFER_SVME && !guest_cpuid_has(vcpu, X86_FEATURE_SVM))
1734 if (efer & (EFER_LME | EFER_LMA) &&
1738 if (efer & EFER_NX && !guest_cpuid_has(vcpu, X86_FEATURE_NX))
1744 bool kvm_valid_efer(struct kvm_vcpu *vcpu, u64 efer) argument
1746 if (efer
1756 u64 efer = msr_info->data; local
[all...]
H A Dx86.h148 return !!(vcpu->arch.efer & EFER_LMA);
/linux-master/arch/x86/kvm/mmu/
H A Dmmu.c189 const u64 efer; member in struct:kvm_mmu_role_regs
214 BUILD_MMU_ROLE_REGS_ACCESSOR(efer, nx, EFER_NX);
215 BUILD_MMU_ROLE_REGS_ACCESSOR(efer, lma, EFER_LMA);
234 BUILD_MMU_ROLE_ACCESSOR(base, efer, nx);
235 BUILD_MMU_ROLE_ACCESSOR(ext, efer, lma);
252 .efer = vcpu->arch.efer,
5420 unsigned long cr4, u64 efer, gpa_t nested_cr3)
5426 .efer = efer,
5419 kvm_init_shadow_npt_mmu(struct kvm_vcpu *vcpu, unsigned long cr0, unsigned long cr4, u64 efer, gpa_t nested_cr3) argument
[all...]
/linux-master/arch/x86/kvm/svm/
H A Dnested.c94 svm->vmcb01.ptr->save.efer,
287 if (CC(!(save->efer & EFER_SVME)))
302 if ((save->efer & EFER_LME) && (save->cr0 & X86_CR0_PG)) {
313 if (CC(!kvm_valid_efer(vcpu, save->efer)))
395 to->efer = from->efer;
570 svm_set_efer(vcpu, svm->nested.save.efer);
895 vmcb01->save.efer = vcpu->arch.efer;
941 to_save->efer
[all...]
H A Dsvm.c296 int svm_set_efer(struct kvm_vcpu *vcpu, u64 efer) argument
299 u64 old_efer = vcpu->arch.efer;
300 vcpu->arch.efer = efer;
304 efer |= EFER_NX;
306 if (!(efer & EFER_LMA))
307 efer &= ~EFER_LME;
310 if ((old_efer & EFER_SVME) != (efer & EFER_SVME)) {
311 if (!(efer & EFER_SVME)) {
330 vcpu->arch.efer
574 uint64_t efer; local
610 uint64_t efer; local
[all...]
H A Dsvm.h120 u64 efer; member in struct:vmcb_save_area_cached
557 int svm_set_efer(struct kvm_vcpu *vcpu, u64 efer);
/linux-master/arch/x86/kvm/vmx/
H A Dnested.c2217 return vmx->vcpu.arch.efer | (EFER_LMA | EFER_LME);
2219 return vmx->vcpu.arch.efer & ~(EFER_LMA | EFER_LME);
2666 vcpu->arch.efer = nested_vmx_calc_efer(vmx, vmcs12);
2668 vmx_set_efer(vcpu, vcpu->arch.efer);
2962 !!(vcpu->arch.efer & EFER_LMA)))
4461 vmcs12->guest_ia32_efer = vcpu->arch.efer;
4538 vcpu->arch.efer = vmcs12->host_ia32_efer;
4540 vcpu->arch.efer |= (EFER_LMA | EFER_LME);
4542 vcpu->arch.efer &= ~(EFER_LMA | EFER_LME);
4543 vmx_set_efer(vcpu, vcpu->arch.efer);
[all...]
H A Dvmx.c1101 u64 guest_efer = vmx->vcpu.arch.efer;
1126 (enable_ept && ((vmx->vcpu.arch.efer ^ host_efer) & EFER_NX))) {
1869 (vmx->vcpu.arch.efer & EFER_SCE);
3118 int vmx_set_efer(struct kvm_vcpu *vcpu, u64 efer) argument
3126 vcpu->arch.efer = efer;
3128 if (efer & EFER_LMA)
3133 if (KVM_BUG_ON(efer & EFER_LMA, vcpu->kvm))
3157 vmx_set_efer(vcpu, vcpu->arch.efer | EFER_LMA);
3162 vmx_set_efer(vcpu, vcpu->arch.efer
[all...]
H A Dvmx.h397 int vmx_set_efer(struct kvm_vcpu *vcpu, u64 efer);
H A Dx86_ops.h73 int vmx_set_efer(struct kvm_vcpu *vcpu, u64 efer);
/linux-master/arch/x86/power/
H A Dcpu.c117 rdmsrl(MSR_EFER, ctxt->efer);
210 wrmsrl(MSR_EFER, ctxt->efer);
/linux-master/arch/x86/realmode/
H A Dinit.c100 u64 efer; local
148 rdmsrl(MSR_EFER, efer);
149 trampoline_header->efer = efer & ~EFER_LMA;

Completed in 436 milliseconds

12