Lines Matching refs:msr

54 			     ulong msr);
69 ulong msr = kvmppc_get_msr(vcpu);
70 return (msr & (MSR_IR|MSR_DR)) == MSR_DR;
75 ulong msr = kvmppc_get_msr(vcpu);
79 if ((msr & (MSR_IR|MSR_DR)) != MSR_DR)
109 unsigned long msr, pc, new_msr, new_pc;
113 msr = kvmppc_get_msr(vcpu);
120 if (MSR_TM_TRANSACTIONAL(msr))
123 new_msr |= msr & MSR_TS_MASK;
127 kvmppc_set_srr1(vcpu, (msr & SRR1_MSR_BITS) | srr1_flags);
466 static void kvmppc_set_msr_pr(struct kvm_vcpu *vcpu, u64 msr)
472 msr = (msr & ~MSR_HV) | MSR_ME;
475 printk(KERN_INFO "KVM: Set MSR to 0x%llx\n", msr);
483 if (!(msr & MSR_PR) && MSR_TM_TRANSACTIONAL(msr))
489 msr &= to_book3s(vcpu)->msr_mask;
490 kvmppc_set_msr_fast(vcpu, msr);
493 if (msr & MSR_POW) {
499 msr &= ~MSR_POW;
500 kvmppc_set_msr_fast(vcpu, msr);
515 if (!(msr & MSR_PR) && vcpu->arch.magic_page_pa) {
518 if (msr & MSR_DR)
534 !(old_msr & MSR_PR) && !(old_msr & MSR_SF) && (msr & MSR_SF)) {
799 void kvmppc_giveup_ext(struct kvm_vcpu *vcpu, ulong msr)
807 if (msr & MSR_VSX)
808 msr |= MSR_FP | MSR_VEC;
810 msr &= vcpu->arch.guest_owned_ext;
811 if (!msr)
815 printk(KERN_INFO "Giving up ext 0x%lx\n", msr);
818 if (msr & MSR_FP) {
824 if (t->regs->msr & MSR_FP)
830 if (msr & MSR_VEC) {
831 if (current->thread.regs->msr & MSR_VEC)
837 vcpu->arch.guest_owned_ext &= ~(msr | MSR_VSX);
862 ulong msr)
870 if (!(kvmppc_get_msr(vcpu) & msr)) {
875 if (msr == MSR_VSX) {
889 msr = MSR_FP | MSR_VEC | MSR_VSX;
893 msr &= ~vcpu->arch.guest_owned_ext;
894 if (!msr)
898 printk(KERN_INFO "Loading up ext 0x%lx\n", msr);
901 if (msr & MSR_FP) {
910 if (msr & MSR_VEC) {
921 t->regs->msr |= msr;
922 vcpu->arch.guest_owned_ext |= msr;
936 lost_ext = vcpu->arch.guest_owned_ext & ~current->thread.regs->msr;
956 current->thread.regs->msr |= lost_ext;
1060 u64 msr = kvmppc_get_msr(vcpu);
1062 kvmppc_set_msr(vcpu, msr | MSR_SE);
1069 u64 msr = kvmppc_get_msr(vcpu);
1071 kvmppc_set_msr(vcpu, msr & ~MSR_SE);
1422 printk(KERN_EMERG "exit_nr=0x%x | pc=0x%lx | msr=0x%lx\n",