Lines Matching refs:msr

132 static inline bool is_amd_pmu_msr(unsigned int msr)
138 if ((msr >= MSR_F15H_PERF_CTL &&
139 msr < MSR_F15H_PERF_CTR + (amd_num_counters * 2)) ||
140 (msr >= MSR_K7_EVNTSEL0 &&
141 msr < MSR_K7_PERFCTR0 + amd_num_counters))
198 static bool xen_intel_pmu_emulate(unsigned int msr, u64 *val, int type,
214 switch (msr) {
252 if (msr == MSR_CORE_PERF_GLOBAL_OVF_CTRL)
261 static bool xen_amd_pmu_emulate(unsigned int msr, u64 *val, bool is_read)
274 ((msr >= MSR_K7_EVNTSEL0) && (msr <= MSR_K7_PERFCTR3)))
275 msr = get_fam15h_addr(msr);
279 if (msr == amd_ctrls_base + off) {
283 } else if (msr == amd_counters_base + off) {
302 static bool pmu_msr_chk_emulated(unsigned int msr, uint64_t *val, bool is_read,
307 if (is_amd_pmu_msr(msr))
308 *emul = xen_amd_pmu_emulate(msr, val, is_read);
309 else if (is_intel_pmu_msr(msr, &type, &index))
310 *emul = xen_intel_pmu_emulate(msr, val, type, index, is_read);
317 bool pmu_msr_read(unsigned int msr, uint64_t *val, int *err)
321 if (!pmu_msr_chk_emulated(msr, val, true, &emulated))
325 *val = err ? native_read_msr_safe(msr, err)
326 : native_read_msr(msr);
332 bool pmu_msr_write(unsigned int msr, uint32_t low, uint32_t high, int *err)
337 if (!pmu_msr_chk_emulated(msr, &val, false, &emulated))
342 *err = native_write_msr_safe(msr, low, high);
344 native_write_msr(msr, low, high);
358 uint32_t msr;
361 msr = amd_counters_base + (counter * amd_msr_step);
362 return native_read_msr_safe(msr, &err);
379 uint32_t msr;
383 msr = MSR_CORE_PERF_FIXED_CTR0 + (counter & 0xffff);
385 msr = MSR_IA32_PERFCTR0 + counter;
387 return native_read_msr_safe(msr, &err);