1#ifndef __ASM_MSR_H
2#define __ASM_MSR_H
3
4#include <asm/msr-index.h>
5
6#ifdef __KERNEL__
7#ifndef __ASSEMBLY__
8
9#include <asm/errno.h>
10
11static inline unsigned long long native_read_msr(unsigned int msr)
12{
13	unsigned long long val;
14
15	asm volatile("rdmsr" : "=A" (val) : "c" (msr));
16	return val;
17}
18
19static inline unsigned long long native_read_msr_safe(unsigned int msr,
20						      int *err)
21{
22	unsigned long long val;
23
24	asm volatile("2: rdmsr ; xorl %0,%0\n"
25		     "1:\n\t"
26		     ".section .fixup,\"ax\"\n\t"
27		     "3:  movl %3,%0 ; jmp 1b\n\t"
28		     ".previous\n\t"
29 		     ".section __ex_table,\"a\"\n"
30		     "   .align 4\n\t"
31		     "   .long 	2b,3b\n\t"
32		     ".previous"
33		     : "=r" (*err), "=A" (val)
34		     : "c" (msr), "i" (-EFAULT));
35
36	return val;
37}
38
39static inline void native_write_msr(unsigned int msr, unsigned long long val)
40{
41	asm volatile("wrmsr" : : "c" (msr), "A"(val));
42}
43
44static inline int native_write_msr_safe(unsigned int msr,
45					unsigned long long val)
46{
47	int err;
48	asm volatile("2: wrmsr ; xorl %0,%0\n"
49		     "1:\n\t"
50		     ".section .fixup,\"ax\"\n\t"
51		     "3:  movl %4,%0 ; jmp 1b\n\t"
52		     ".previous\n\t"
53 		     ".section __ex_table,\"a\"\n"
54		     "   .align 4\n\t"
55		     "   .long 	2b,3b\n\t"
56		     ".previous"
57		     : "=a" (err)
58		     : "c" (msr), "0" ((u32)val), "d" ((u32)(val>>32)),
59		       "i" (-EFAULT));
60	return err;
61}
62
63static inline unsigned long long native_read_tsc(void)
64{
65	unsigned long long val;
66	asm volatile("rdtsc" : "=A" (val));
67	return val;
68}
69
70static inline unsigned long long native_read_pmc(void)
71{
72	unsigned long long val;
73	asm volatile("rdpmc" : "=A" (val));
74	return val;
75}
76
77#ifdef CONFIG_PARAVIRT
78#include <asm/paravirt.h>
79#else
80#include <linux/errno.h>
81/*
82 * Access to machine-specific registers (available on 586 and better only)
83 * Note: the rd* operations modify the parameters directly (without using
84 * pointer indirection), this allows gcc to optimize better
85 */
86
87#define rdmsr(msr,val1,val2)						\
88	do {								\
89		u64 __val = native_read_msr(msr);			\
90		(val1) = (u32)__val;					\
91		(val2) = (u32)(__val >> 32);				\
92	} while(0)
93
94static inline void wrmsr(u32 __msr, u32 __low, u32 __high)
95{
96	native_write_msr(__msr, ((u64)__high << 32) | __low);
97}
98
99#define rdmsrl(msr,val)							\
100	((val) = native_read_msr(msr))
101
102#define wrmsrl(msr,val)	native_write_msr(msr, val)
103
104/* wrmsr with exception handling */
105static inline int wrmsr_safe(u32 __msr, u32 __low, u32 __high)
106{
107	return native_write_msr_safe(__msr, ((u64)__high << 32) | __low);
108}
109
110/* rdmsr with exception handling */
111#define rdmsr_safe(msr,p1,p2)						\
112	({								\
113		int __err;						\
114		u64 __val = native_read_msr_safe(msr, &__err);		\
115		(*p1) = (u32)__val;					\
116		(*p2) = (u32)(__val >> 32);				\
117		__err;							\
118	})
119
120#define rdtscl(low)						\
121	((low) = (u32)native_read_tsc())
122
123#define rdtscll(val)						\
124	((val) = native_read_tsc())
125
126#define write_tsc(val1,val2) wrmsr(0x10, val1, val2)
127
128#define rdpmc(counter,low,high)					\
129	do {							\
130		u64 _l = native_read_pmc();			\
131		(low)  = (u32)_l;				\
132		(high) = (u32)(_l >> 32);			\
133	} while(0)
134#endif	/* !CONFIG_PARAVIRT */
135
136#ifdef CONFIG_SMP
137void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
138void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
139int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
140int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
141#else  /*  CONFIG_SMP  */
142static inline void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h)
143{
144	rdmsr(msr_no, *l, *h);
145}
146static inline void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h)
147{
148	wrmsr(msr_no, l, h);
149}
150static inline int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h)
151{
152	return rdmsr_safe(msr_no, l, h);
153}
154static inline int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h)
155{
156	return wrmsr_safe(msr_no, l, h);
157}
158#endif  /*  CONFIG_SMP  */
159#endif
160#endif
161#endif /* __ASM_MSR_H */
162