Lines Matching defs:msr_index

975 					    u32 msr_index,
985 if (msr_index == MSR_IA32_TSC) {
997 if (kvm_get_msr(vcpu, msr_index, data)) {
999 msr_index);
1055 static bool nested_msr_store_list_has_msr(struct kvm_vcpu *vcpu, u32 msr_index)
1067 if (e.index == msr_index)
1074 u32 msr_index)
1083 msr_autostore_slot = vmx_find_loadstore_msr_slot(autostore, msr_index);
1085 in_vmcs12_store_list = nested_msr_store_list_has_msr(vcpu, msr_index);
1098 msr_index);
1102 autostore->val[last].index = msr_index;
1259 static void vmx_get_control_msr(struct nested_vmx_msrs *msrs, u32 msr_index,
1262 switch (msr_index) {
1289 vmx_restore_control_msr(struct vcpu_vmx *vmx, u32 msr_index, u64 data)
1294 vmx_get_control_msr(&vmcs_config.nested, msr_index, &lowp, &highp);
1306 vmx_get_control_msr(&vmx->nested.msrs, msr_index, &lowp, &highp);
1361 static u64 *vmx_get_fixed0_msr(struct nested_vmx_msrs *msrs, u32 msr_index)
1363 switch (msr_index) {
1373 static int vmx_restore_fixed0_msr(struct vcpu_vmx *vmx, u32 msr_index, u64 data)
1375 const u64 *msr = vmx_get_fixed0_msr(&vmcs_config.nested, msr_index);
1384 *vmx_get_fixed0_msr(&vmx->nested.msrs, msr_index) = data;
1393 int vmx_set_vmx_msr(struct kvm_vcpu *vcpu, u32 msr_index, u64 data)
1404 switch (msr_index) {
1426 return vmx_restore_control_msr(vmx, msr_index, data);
1431 return vmx_restore_fixed0_msr(vmx, msr_index, data);
1458 int vmx_get_vmx_msr(struct nested_vmx_msrs *msrs, u32 msr_index, u64 *pdata)
1460 switch (msr_index) {
1469 if (msr_index == MSR_IA32_VMX_PINBASED_CTLS)
1477 if (msr_index == MSR_IA32_VMX_PROCBASED_CTLS)
1485 if (msr_index == MSR_IA32_VMX_EXIT_CTLS)
1493 if (msr_index == MSR_IA32_VMX_ENTRY_CTLS)
6024 u32 msr_index = kvm_rcx_read(vcpu);
6038 if (msr_index >= 0xc0000000) {
6039 msr_index -= 0xc0000000;
6043 /* Then read the msr_index'th bit from this bitmap: */
6044 if (msr_index < 1024*8) {
6046 if (kvm_vcpu_read_guest(vcpu, bitmap + msr_index/8, &b, 1))
6048 return 1 & (b >> (msr_index & 7));