Lines Matching defs:kvm

7 #include <linux/kvm.h>
172 masks = vcpu->kvm->arch.sysreg_masks;
184 static void set_sysreg_masks(struct kvm *kvm, int sr, u64 res0, u64 res1)
188 kvm->arch.sysreg_masks->mask[i].res0 = res0;
189 kvm->arch.sysreg_masks->mask[i].res1 = res1;
192 int kvm_init_nv_sysregs(struct kvm *kvm)
197 mutex_lock(&kvm->arch.config_lock);
199 if (kvm->arch.sysreg_masks)
202 kvm->arch.sysreg_masks = kzalloc(sizeof(*(kvm->arch.sysreg_masks)),
204 if (!kvm->arch.sysreg_masks) {
210 kvm->arch.id_regs[i] = limit_nv_id_reg(IDX_IDREG(i),
211 kvm->arch.id_regs[i]);
215 if (!kvm_has_feat_enum(kvm, ID_AA64MMFR1_EL1, VMIDBits, 16))
217 if (!kvm_has_feat(kvm, ID_AA64MMFR2_EL1, CnP, IMP))
219 set_sysreg_masks(kvm, VTTBR_EL2, res0, res1);
224 set_sysreg_masks(kvm, VTCR_EL2, res0, res1);
229 set_sysreg_masks(kvm, VMPIDR_EL2, res0, res1);
234 if (!kvm_has_feat(kvm, ID_AA64MMFR1_EL1, TWED, IMP))
236 if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, MTE, MTE2))
238 if (!kvm_has_feat(kvm, ID_AA64MMFR2_EL1, EVT, TTLBxS))
240 if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, CSV2, CSV2_2) &&
241 !kvm_has_feat(kvm, ID_AA64PFR1_EL1, CSV2_frac, CSV2_1p2))
243 if (!kvm_has_feat(kvm, ID_AA64MMFR2_EL1, EVT, IMP))
245 if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, AMU, V1P1))
247 if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, RAS, V1P1))
249 if (!kvm_has_feat(kvm, ID_AA64MMFR2_EL1, FWB, IMP))
251 if (!kvm_has_feat(kvm, ID_AA64MMFR2_EL1, NV, NV2))
253 if (!kvm_has_feat(kvm, ID_AA64MMFR2_EL1, NV, IMP))
255 if (!(__vcpu_has_feature(&kvm->arch, KVM_ARM_VCPU_PTRAUTH_ADDRESS) &&
256 __vcpu_has_feature(&kvm->arch, KVM_ARM_VCPU_PTRAUTH_GENERIC)))
258 if (!kvm_has_feat(kvm, ID_AA64ISAR0_EL1, TME, IMP))
260 if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, RAS, IMP))
262 if (!kvm_has_feat(kvm, ID_AA64MMFR1_EL1, LO, IMP))
264 if (!kvm_has_feat(kvm, ID_AA64MMFR4_EL1, E2H0, IMP))
266 set_sysreg_masks(kvm, HCR_EL2, res0, res1);
271 if (!kvm_has_feat(kvm, ID_AA64ISAR3_EL1, PACM, TRIVIAL_IMP))
273 if (!kvm_has_feat(kvm, ID_AA64PFR2_EL1, FPMR, IMP))
275 if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, GCS, IMP))
277 if (!kvm_has_feat(kvm, ID_AA64ISAR2_EL1, SYSREG_128, IMP))
279 if (!kvm_has_feat(kvm, ID_AA64MMFR3_EL1, ADERR, DEV_ASYNC))
281 if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, DF2, IMP))
283 if (!kvm_has_feat(kvm, ID_AA64MMFR3_EL1, D128, IMP))
285 if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, THE, IMP))
287 if (!kvm_has_feat(kvm, ID_AA64MMFR3_EL1, SCTLRX, IMP))
289 if (!kvm_has_feat(kvm, ID_AA64MMFR3_EL1, TCRX, IMP))
291 if (!kvm_has_feat(kvm, ID_AA64ISAR2_EL1, MOPS, IMP))
293 if (!kvm_has_feat(kvm, ID_AA64MMFR1_EL1, CMOW, IMP))
295 if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, NMI, IMP))
297 if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, SME, IMP) ||
300 if (!kvm_has_feat(kvm, ID_AA64ISAR1_EL1, XS, IMP))
302 if (!kvm_has_feat(kvm, ID_AA64ISAR1_EL1, LS64, LS64_V))
304 if (!kvm_has_feat(kvm, ID_AA64ISAR1_EL1, LS64, LS64))
306 if (!kvm_has_feat(kvm, ID_AA64ISAR1_EL1, LS64, LS64_ACCDATA))
308 set_sysreg_masks(kvm, HCRX_EL2, res0, res1);
312 if (!(__vcpu_has_feature(&kvm->arch, KVM_ARM_VCPU_PTRAUTH_ADDRESS) &&
313 __vcpu_has_feature(&kvm->arch, KVM_ARM_VCPU_PTRAUTH_GENERIC)))
317 if (!kvm_has_feat(kvm, ID_AA64MMFR1_EL1, LO, IMP))
321 if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, CSV2, CSV2_2) &&
322 !kvm_has_feat(kvm, ID_AA64PFR1_EL1, CSV2_frac, CSV2_1p2))
324 if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, GIC, IMP))
326 if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, RAS, IMP))
332 if (!kvm_has_feat(kvm, ID_AA64ISAR1_EL1, LS64, LS64_ACCDATA))
334 if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, GCS, IMP))
336 if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, SME, IMP))
338 if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, THE, IMP))
340 if (!kvm_has_feat(kvm, ID_AA64MMFR3_EL1, S1PIE, IMP))
342 if (!kvm_has_feat(kvm, ID_AA64MMFR3_EL1, S1POE, IMP))
344 if (!kvm_has_feat(kvm, ID_AA64MMFR3_EL1, S2POE, IMP))
346 if (!kvm_has_feat(kvm, ID_AA64MMFR3_EL1, AIE, IMP))
348 set_sysreg_masks(kvm, HFGRTR_EL2, res0 | __HFGRTR_EL2_RES0, res1);
349 set_sysreg_masks(kvm, HFGWTR_EL2, res0 | __HFGWTR_EL2_RES0, res1);
353 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, DoubleLock, IMP))
355 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, PMUVer, IMP))
362 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, PMSVer, IMP))
369 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, TraceVer, IMP))
377 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, TraceBuffer, IMP))
382 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, BRBE, IMP))
385 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, PMSVer, V1P2))
387 set_sysreg_masks(kvm, HDFGRTR_EL2, res0 | HDFGRTR_EL2_RES0, res1);
390 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, PMUVer, IMP))
392 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, TraceVer, IMP))
394 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, TraceFilt, IMP))
396 set_sysreg_masks(kvm, HFGWTR_EL2, res0 | HDFGWTR_EL2_RES0, res1);
401 if (!kvm_has_feat(kvm, ID_AA64ISAR1_EL1, DPB, DPB2))
403 if (!kvm_has_feat(kvm, ID_AA64MMFR1_EL1, PAN, PAN2))
405 if (!kvm_has_feat(kvm, ID_AA64ISAR0_EL1, TLB, OS))
411 if (!kvm_has_feat(kvm, ID_AA64ISAR0_EL1, TLB, RANGE))
418 if (!kvm_has_feat(kvm, ID_AA64ISAR1_EL1, SPECRES, IMP))
421 if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, BRBE, IMP))
423 if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, GCS, IMP))
426 if (!kvm_has_feat(kvm, ID_AA64ISAR1_EL1, SPECRES, COSP_RCTX))
428 if (!kvm_has_feat(kvm, ID_AA64ISAR2_EL1, ATS1A, IMP))
430 set_sysreg_masks(kvm, HFGITR_EL2, res0, res1);
435 if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, AMU, V1P1))
437 set_sysreg_masks(kvm, HAFGRTR_EL2, res0, res1);
439 mutex_unlock(&kvm->arch.config_lock);