1 #ifndef __ASM_X86_MSR_H_
2 #define __ASM_X86_MSR_H_
4 #include <asm/msr-index.h>
7 # include <linux/types.h>
14 #include <asm/errno.h>
16 static inline unsigned long long native_read_tscp(unsigned int *aux)
18 unsigned long low, high;
19 asm volatile (".byte 0x0f,0x01,0xf9"
20 : "=a" (low), "=d" (high), "=c" (*aux));
21 return low | ((u64)high >> 32);
25 * i386 calling convention returns 64-bit value in edx:eax, while
26 * x86_64 returns at rax. Also, the "A" constraint does not really
27 * mean rdx:rax in x86_64, so we need specialized behaviour for each
31 #define DECLARE_ARGS(val, low, high) unsigned low, high
32 #define EAX_EDX_VAL(val, low, high) (low | ((u64)(high) << 32))
33 #define EAX_EDX_ARGS(val, low, high) "a" (low), "d" (high)
34 #define EAX_EDX_RET(val, low, high) "=a" (low), "=d" (high)
36 #define DECLARE_ARGS(val, low, high) unsigned long long val
37 #define EAX_EDX_VAL(val, low, high) (val)
38 #define EAX_EDX_ARGS(val, low, high) "A" (val)
39 #define EAX_EDX_RET(val, low, high) "=A" (val)
42 static inline unsigned long long native_read_msr(unsigned int msr)
44 DECLARE_ARGS(val, low, high);
46 asm volatile("rdmsr" : EAX_EDX_RET(val, low, high) : "c" (msr));
47 return EAX_EDX_VAL(val, low, high);
50 static inline unsigned long long native_read_msr_safe(unsigned int msr,
53 DECLARE_ARGS(val, low, high);
55 asm volatile("2: rdmsr ; xor %0,%0\n"
57 ".section .fixup,\"ax\"\n\t"
58 "3: mov %3,%0 ; jmp 1b\n\t"
60 ".section __ex_table,\"a\"\n"
64 : "=r" (*err), EAX_EDX_RET(val, low, high)
65 : "c" (msr), "i" (-EFAULT));
66 return EAX_EDX_VAL(val, low, high);
69 static inline void native_write_msr(unsigned int msr,
70 unsigned low, unsigned high)
72 asm volatile("wrmsr" : : "c" (msr), "a"(low), "d" (high));
75 static inline int native_write_msr_safe(unsigned int msr,
76 unsigned low, unsigned high)
79 asm volatile("2: wrmsr ; xor %0,%0\n"
81 ".section .fixup,\"ax\"\n\t"
82 "3: mov %4,%0 ; jmp 1b\n\t"
84 ".section __ex_table,\"a\"\n"
89 : "c" (msr), "0" (low), "d" (high),
94 extern unsigned long long native_read_tsc(void);
96 static inline unsigned long long native_read_pmc(int counter)
98 DECLARE_ARGS(val, low, high);
100 asm volatile("rdpmc" : EAX_EDX_RET(val, low, high) : "c" (counter));
101 return EAX_EDX_VAL(val, low, high);
104 #ifdef CONFIG_PARAVIRT
105 #include <asm/paravirt.h>
107 #include <linux/errno.h>
109 * Access to machine-specific registers (available on 586 and better only)
110 * Note: the rd* operations modify the parameters directly (without using
111 * pointer indirection), this allows gcc to optimize better
114 #define rdmsr(msr,val1,val2) \
116 u64 __val = native_read_msr(msr); \
117 (val1) = (u32)__val; \
118 (val2) = (u32)(__val >> 32); \
121 static inline void wrmsr(unsigned msr, unsigned low, unsigned high)
123 native_write_msr(msr, low, high);
126 #define rdmsrl(msr,val) \
127 ((val) = native_read_msr(msr))
129 #define wrmsrl(msr, val) \
130 native_write_msr(msr, (u32)((u64)(val)), (u32)((u64)(val) >> 32))
132 /* wrmsr with exception handling */
133 static inline int wrmsr_safe(unsigned msr, unsigned low, unsigned high)
135 return native_write_msr_safe(msr, low, high);
138 /* rdmsr with exception handling */
139 #define rdmsr_safe(msr,p1,p2) \
142 u64 __val = native_read_msr_safe(msr, &__err); \
143 (*p1) = (u32)__val; \
144 (*p2) = (u32)(__val >> 32); \
148 #define rdtscl(low) \
149 ((low) = (u32)native_read_tsc())
151 #define rdtscll(val) \
152 ((val) = native_read_tsc())
154 #define rdpmc(counter,low,high) \
156 u64 _l = native_read_pmc(counter); \
158 (high) = (u32)(_l >> 32); \
161 #define rdtscp(low, high, aux) \
163 unsigned long long _val = native_read_tscp(&(aux)); \
165 (high) = (u32)(_val >> 32); \
168 #define rdtscpll(val, aux) (val) = native_read_tscp(&(aux))
170 #endif /* !CONFIG_PARAVIRT */
173 #define checking_wrmsrl(msr,val) wrmsr_safe(msr,(u32)(val),(u32)((val)>>32))
175 #define write_tsc(val1,val2) wrmsr(0x10, val1, val2)
177 #define write_rdtscp_aux(val) wrmsr(0xc0000103, val, 0)
180 void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
181 void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
182 int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
183 int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
184 #else /* CONFIG_SMP */
185 static inline void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h)
187 rdmsr(msr_no, *l, *h);
189 static inline void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h)
193 static inline int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h)
195 return rdmsr_safe(msr_no, l, h);
197 static inline int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h)
199 return wrmsr_safe(msr_no, l, h);
201 #endif /* CONFIG_SMP */
202 #endif /* __ASSEMBLY__ */
203 #endif /* __KERNEL__ */