Searched refs:efer (Results 1 - 25 of 31) sorted by relevance

12

/linux-master/arch/x86/include/asm/
H A Dsuspend_64.h44 unsigned long efer; member in struct:saved_context
H A Drealmode.h52 u64 efer;
H A Dsvm.h314 u64 efer; member in struct:vmcb_save_area
370 u64 efer; member in struct:sev_es_save_area
/linux-master/include/xen/interface/hvm/
H A Dhvm_vcpu.h35 uint64_t efer; member in struct:vcpu_hvm_x86_32
89 uint64_t efer; member in struct:vcpu_hvm_x86_64
/linux-master/tools/testing/selftests/kvm/lib/x86_64/
H A Dsvm.c73 uint64_t efer; local
75 efer = rdmsr(MSR_EFER);
76 wrmsr(MSR_EFER, efer | EFER_SVME);
90 save->efer = rdmsr(MSR_EFER);
/linux-master/arch/x86/realmode/
H A Dinit.c100 u64 efer; local
148 rdmsrl(MSR_EFER, efer);
149 trampoline_header->efer = efer & ~EFER_LMA;
/linux-master/arch/x86/kvm/svm/
H A Dnested.c94 svm->vmcb01.ptr->save.efer,
287 if (CC(!(save->efer & EFER_SVME)))
302 if ((save->efer & EFER_LME) && (save->cr0 & X86_CR0_PG)) {
313 if (CC(!kvm_valid_efer(vcpu, save->efer)))
395 to->efer = from->efer;
570 svm_set_efer(vcpu, svm->nested.save.efer);
895 vmcb01->save.efer = vcpu->arch.efer;
941 to_save->efer
[all...]
H A Dsvm.c296 int svm_set_efer(struct kvm_vcpu *vcpu, u64 efer) argument
299 u64 old_efer = vcpu->arch.efer;
300 vcpu->arch.efer = efer;
304 efer |= EFER_NX;
306 if (!(efer & EFER_LMA))
307 efer &= ~EFER_LME;
310 if ((old_efer & EFER_SVME) != (efer & EFER_SVME)) {
311 if (!(efer & EFER_SVME)) {
330 vcpu->arch.efer
574 uint64_t efer; local
610 uint64_t efer; local
[all...]
H A Dsvm.h117 u64 efer; member in struct:vmcb_save_area_cached
548 int svm_set_efer(struct kvm_vcpu *vcpu, u64 efer);
/linux-master/arch/x86/kvm/
H A Dsmm.c82 CHECK_SMRAM64_OFFSET(efer, 0xFED0);
249 smram->efer = vcpu->arch.efer;
531 if (kvm_set_msr(vcpu, MSR_EFER, smstate->efer & ~EFER_LMA))
613 unsigned long cr4, efer; local
621 efer = 0;
622 kvm_set_msr(vcpu, MSR_EFER, efer);
H A Dsmm.h104 u64 efer; member in struct:kvm_smram_state_64
H A Dmmu.h109 unsigned long cr4, u64 efer, gpa_t nested_cr3);
H A Demulate.c783 u64 efer; local
788 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
792 if (efer & EFER_LMA)
800 if (efer & EFER_LMA)
809 if (efer & EFER_LMA) {
1512 u64 efer = 0; local
1514 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
1515 if (!(efer & EFER_LMA))
1686 u64 efer = 0; local
1688 ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
2407 u64 efer = 0; local
2466 u64 efer = 0; local
3909 u64 efer = 0; local
[all...]
H A Dx86.h146 return !!(vcpu->arch.efer & EFER_LMA);
H A Dx86.c984 if ((vcpu->arch.efer & EFER_LME) && !is_paging(vcpu) &&
995 if (!(vcpu->arch.efer & EFER_LME) && (cr0 & X86_CR0_PG) &&
1720 static bool __kvm_valid_efer(struct kvm_vcpu *vcpu, u64 efer) argument
1722 if (efer & EFER_AUTOIBRS && !guest_cpuid_has(vcpu, X86_FEATURE_AUTOIBRS))
1725 if (efer & EFER_FFXSR && !guest_cpuid_has(vcpu, X86_FEATURE_FXSR_OPT))
1728 if (efer & EFER_SVME && !guest_cpuid_has(vcpu, X86_FEATURE_SVM))
1731 if (efer & (EFER_LME | EFER_LMA) &&
1735 if (efer & EFER_NX && !guest_cpuid_has(vcpu, X86_FEATURE_NX))
1741 bool kvm_valid_efer(struct kvm_vcpu *vcpu, u64 efer) argument
1743 if (efer
1753 u64 efer = msr_info->data; local
[all...]
/linux-master/arch/x86/power/
H A Dcpu.c117 rdmsrl(MSR_EFER, ctxt->efer);
210 wrmsrl(MSR_EFER, ctxt->efer);
/linux-master/tools/testing/selftests/kvm/x86_64/
H A Dsync_regs_test.c181 (run->s.regs.sregs.efer & EFER_LME),
184 !!(run->s.regs.sregs.efer & EFER_LME));
/linux-master/tools/testing/selftests/kvm/include/x86_64/
H A Dsvm.h209 u64 efer; member in struct:vmcb_save_area
/linux-master/arch/x86/hyperv/
H A Dhv_vtl.c121 input->vp_context.efer = __rdmsr(MSR_EFER);
/linux-master/arch/x86/include/uapi/asm/
H A Dkvm.h150 __u64 efer; member in struct:kvm_sregs
161 __u64 efer; member in struct:kvm_sregs2
/linux-master/tools/arch/x86/include/uapi/asm/
H A Dkvm.h150 __u64 efer; member in struct:kvm_sregs
161 __u64 efer; member in struct:kvm_sregs2
/linux-master/drivers/parport/
H A Dparport_pc.c1114 static void decode_winbond(int efer, int key, int devid, int devrev, int oldid) argument
1159 efer, key, devid, devrev, oldid, type);
1162 show_parconfig_winbond(efer, key);
1165 static void decode_smsc(int efer, int key, int devid, int devrev) argument
1191 efer, key, devid, devrev, type);
1194 func(efer, key);
/linux-master/arch/x86/kvm/vmx/
H A Dnested.c2195 return vmx->vcpu.arch.efer | (EFER_LMA | EFER_LME);
2197 return vmx->vcpu.arch.efer & ~(EFER_LMA | EFER_LME);
2644 vcpu->arch.efer = nested_vmx_calc_efer(vmx, vmcs12);
2646 vmx_set_efer(vcpu, vcpu->arch.efer);
2940 !!(vcpu->arch.efer & EFER_LMA)))
4439 vmcs12->guest_ia32_efer = vcpu->arch.efer;
4516 vcpu->arch.efer = vmcs12->host_ia32_efer;
4518 vcpu->arch.efer |= (EFER_LMA | EFER_LME);
4520 vcpu->arch.efer &= ~(EFER_LMA | EFER_LME);
4521 vmx_set_efer(vcpu, vcpu->arch.efer);
[all...]
H A Dvmx.c1096 u64 guest_efer = vmx->vcpu.arch.efer;
1121 (enable_ept && ((vmx->vcpu.arch.efer ^ host_efer) & EFER_NX))) {
1864 (vmx->vcpu.arch.efer & EFER_SCE);
3109 int vmx_set_efer(struct kvm_vcpu *vcpu, u64 efer) argument
3117 vcpu->arch.efer = efer;
3119 if (efer & EFER_LMA)
3124 if (KVM_BUG_ON(efer & EFER_LMA, vcpu->kvm))
3148 vmx_set_efer(vcpu, vcpu->arch.efer | EFER_LMA);
3153 vmx_set_efer(vcpu, vcpu->arch.efer
[all...]
/linux-master/arch/x86/kvm/mmu/
H A Dmmu.c189 const u64 efer; member in struct:kvm_mmu_role_regs
214 BUILD_MMU_ROLE_REGS_ACCESSOR(efer, nx, EFER_NX);
215 BUILD_MMU_ROLE_REGS_ACCESSOR(efer, lma, EFER_LMA);
234 BUILD_MMU_ROLE_ACCESSOR(base, efer, nx);
235 BUILD_MMU_ROLE_ACCESSOR(ext, efer, lma);
252 .efer = vcpu->arch.efer,
5421 unsigned long cr4, u64 efer, gpa_t nested_cr3)
5427 .efer = efer,
5420 kvm_init_shadow_npt_mmu(struct kvm_vcpu *vcpu, unsigned long cr0, unsigned long cr4, u64 efer, gpa_t nested_cr3) argument
[all...]

Completed in 308 milliseconds

12