blob: cb7222358897d5c03bd54a976ff666e39042ab59 [file] [log] [blame]
Thomas Gleixnerbe7baf82007-10-23 22:37:24 +02001#ifndef __ASM_X86_MSR_H_
2#define __ASM_X86_MSR_H_
3
4#include <asm/msr-index.h>
5
Mike Frysingerd43a3312008-01-15 16:44:38 +01006#ifndef __ASSEMBLY__
7# include <linux/types.h>
8#endif
9
Glauber de Oliveira Costa8f12dea2008-01-30 13:31:06 +010010#ifdef __KERNEL__
11#ifndef __ASSEMBLY__
12static inline unsigned long long native_read_tscp(int *aux)
13{
14 unsigned long low, high;
15 asm volatile (".byte 0x0f,0x01,0xf9"
16 : "=a" (low), "=d" (high), "=c" (*aux));
17 return low | ((u64)high >> 32);
18}
19
20#define rdtscp(low, high, aux) \
21 do { \
22 unsigned long long _val = native_read_tscp(&(aux)); \
23 (low) = (u32)_val; \
24 (high) = (u32)(_val >> 32); \
25 } while (0)
26
27#define rdtscpll(val, aux) (val) = native_read_tscp(&(aux))
28#endif
29#endif
30
Thomas Gleixnerbe7baf82007-10-23 22:37:24 +020031#ifdef __i386__
32
Thomas Gleixner96a388d2007-10-11 11:20:03 +020033#ifdef __KERNEL__
Thomas Gleixnerbe7baf82007-10-23 22:37:24 +020034#ifndef __ASSEMBLY__
35
36#include <asm/errno.h>
37
38static inline unsigned long long native_read_msr(unsigned int msr)
39{
40 unsigned long long val;
41
42 asm volatile("rdmsr" : "=A" (val) : "c" (msr));
43 return val;
44}
45
46static inline unsigned long long native_read_msr_safe(unsigned int msr,
47 int *err)
48{
49 unsigned long long val;
50
51 asm volatile("2: rdmsr ; xorl %0,%0\n"
52 "1:\n\t"
53 ".section .fixup,\"ax\"\n\t"
54 "3: movl %3,%0 ; jmp 1b\n\t"
55 ".previous\n\t"
56 ".section __ex_table,\"a\"\n"
57 " .align 4\n\t"
58 " .long 2b,3b\n\t"
59 ".previous"
60 : "=r" (*err), "=A" (val)
61 : "c" (msr), "i" (-EFAULT));
62
63 return val;
64}
65
Glauber de Oliveira Costac9dcda52008-01-30 13:31:07 +010066static inline void native_write_msr(unsigned int msr,
67 unsigned low, unsigned high)
Thomas Gleixnerbe7baf82007-10-23 22:37:24 +020068{
Glauber de Oliveira Costac9dcda52008-01-30 13:31:07 +010069 asm volatile("wrmsr" : : "c" (msr), "a"(low), "d" (high));
Thomas Gleixnerbe7baf82007-10-23 22:37:24 +020070}
71
72static inline int native_write_msr_safe(unsigned int msr,
Glauber de Oliveira Costac9dcda52008-01-30 13:31:07 +010073 unsigned low, unsigned high)
Thomas Gleixnerbe7baf82007-10-23 22:37:24 +020074{
75 int err;
76 asm volatile("2: wrmsr ; xorl %0,%0\n"
77 "1:\n\t"
78 ".section .fixup,\"ax\"\n\t"
79 "3: movl %4,%0 ; jmp 1b\n\t"
80 ".previous\n\t"
81 ".section __ex_table,\"a\"\n"
82 " .align 4\n\t"
83 " .long 2b,3b\n\t"
84 ".previous"
85 : "=a" (err)
Glauber de Oliveira Costac9dcda52008-01-30 13:31:07 +010086 : "c" (msr), "0" (low), "d" (high),
Thomas Gleixnerbe7baf82007-10-23 22:37:24 +020087 "i" (-EFAULT));
88 return err;
89}
90
91static inline unsigned long long native_read_tsc(void)
92{
93 unsigned long long val;
94 asm volatile("rdtsc" : "=A" (val));
95 return val;
96}
97
Glauber de Oliveira Costab8d1fae2008-01-30 13:31:07 +010098static inline unsigned long long native_read_pmc(int counter)
Thomas Gleixnerbe7baf82007-10-23 22:37:24 +020099{
100 unsigned long long val;
Glauber de Oliveira Costab8d1fae2008-01-30 13:31:07 +0100101 asm volatile("rdpmc" : "=A" (val) : "c" (counter));
Thomas Gleixnerbe7baf82007-10-23 22:37:24 +0200102 return val;
103}
104
105#ifdef CONFIG_PARAVIRT
106#include <asm/paravirt.h>
Thomas Gleixner96a388d2007-10-11 11:20:03 +0200107#else
Thomas Gleixnerbe7baf82007-10-23 22:37:24 +0200108#include <linux/errno.h>
109/*
110 * Access to machine-specific registers (available on 586 and better only)
111 * Note: the rd* operations modify the parameters directly (without using
112 * pointer indirection), this allows gcc to optimize better
113 */
114
115#define rdmsr(msr,val1,val2) \
116 do { \
117 u64 __val = native_read_msr(msr); \
118 (val1) = (u32)__val; \
119 (val2) = (u32)(__val >> 32); \
120 } while(0)
121
Glauber de Oliveira Costac9dcda52008-01-30 13:31:07 +0100122static inline void wrmsr(unsigned msr, unsigned low, unsigned high)
Thomas Gleixnerbe7baf82007-10-23 22:37:24 +0200123{
Glauber de Oliveira Costac9dcda52008-01-30 13:31:07 +0100124 native_write_msr(msr, low, high);
Thomas Gleixnerbe7baf82007-10-23 22:37:24 +0200125}
126
127#define rdmsrl(msr,val) \
128 ((val) = native_read_msr(msr))
129
Glauber de Oliveira Costac9dcda52008-01-30 13:31:07 +0100130#define wrmsrl(msr, val) native_write_msr(msr, (u32)val, (u32)(val >> 32))
Thomas Gleixnerbe7baf82007-10-23 22:37:24 +0200131
132/* wrmsr with exception handling */
Glauber de Oliveira Costac9dcda52008-01-30 13:31:07 +0100133static inline int wrmsr_safe(unsigned msr, unsigned low, unsigned high)
Thomas Gleixnerbe7baf82007-10-23 22:37:24 +0200134{
Glauber de Oliveira Costac9dcda52008-01-30 13:31:07 +0100135 return native_write_msr_safe(msr, low, high);
Thomas Gleixnerbe7baf82007-10-23 22:37:24 +0200136}
137
138/* rdmsr with exception handling */
139#define rdmsr_safe(msr,p1,p2) \
140 ({ \
141 int __err; \
142 u64 __val = native_read_msr_safe(msr, &__err); \
143 (*p1) = (u32)__val; \
144 (*p2) = (u32)(__val >> 32); \
145 __err; \
146 })
147
148#define rdtscl(low) \
149 ((low) = (u32)native_read_tsc())
150
151#define rdtscll(val) \
152 ((val) = native_read_tsc())
153
154#define write_tsc(val1,val2) wrmsr(0x10, val1, val2)
155
156#define rdpmc(counter,low,high) \
157 do { \
Glauber de Oliveira Costab8d1fae2008-01-30 13:31:07 +0100158 u64 _l = native_read_pmc(counter); \
Thomas Gleixnerbe7baf82007-10-23 22:37:24 +0200159 (low) = (u32)_l; \
160 (high) = (u32)(_l >> 32); \
161 } while(0)
162#endif /* !CONFIG_PARAVIRT */
163
Thomas Gleixnerbe7baf82007-10-23 22:37:24 +0200164#endif /* ! __ASSEMBLY__ */
165#endif /* __KERNEL__ */
166
167#else /* __i386__ */
168
169#ifndef __ASSEMBLY__
170#include <linux/errno.h>
171/*
172 * Access to machine-specific registers (available on 586 and better only)
173 * Note: the rd* operations modify the parameters directly (without using
174 * pointer indirection), this allows gcc to optimize better
175 */
176
177#define rdmsr(msr,val1,val2) \
178 __asm__ __volatile__("rdmsr" \
179 : "=a" (val1), "=d" (val2) \
180 : "c" (msr))
181
182
183#define rdmsrl(msr,val) do { unsigned long a__,b__; \
184 __asm__ __volatile__("rdmsr" \
185 : "=a" (a__), "=d" (b__) \
186 : "c" (msr)); \
187 val = a__ | (b__<<32); \
188} while(0)
189
190#define wrmsr(msr,val1,val2) \
191 __asm__ __volatile__("wrmsr" \
192 : /* no outputs */ \
193 : "c" (msr), "a" (val1), "d" (val2))
194
195#define wrmsrl(msr,val) wrmsr(msr,(__u32)((__u64)(val)),((__u64)(val))>>32)
196
Thomas Gleixnerbe7baf82007-10-23 22:37:24 +0200197#define rdtsc(low,high) \
198 __asm__ __volatile__("rdtsc" : "=a" (low), "=d" (high))
199
200#define rdtscl(low) \
201 __asm__ __volatile__ ("rdtsc" : "=a" (low) : : "edx")
202
Thomas Gleixnerbe7baf82007-10-23 22:37:24 +0200203
204#define rdtscll(val) do { \
205 unsigned int __a,__d; \
Mike Frysinger56986d42008-01-01 19:12:15 +0100206 __asm__ __volatile__("rdtsc" : "=a" (__a), "=d" (__d)); \
Thomas Gleixnerbe7baf82007-10-23 22:37:24 +0200207 (val) = ((unsigned long)__a) | (((unsigned long)__d)<<32); \
208} while(0)
209
Thomas Gleixnerbe7baf82007-10-23 22:37:24 +0200210#define write_tsc(val1,val2) wrmsr(0x10, val1, val2)
211
212#define write_rdtscp_aux(val) wrmsr(0xc0000103, val, 0)
213
214#define rdpmc(counter,low,high) \
215 __asm__ __volatile__("rdpmc" \
216 : "=a" (low), "=d" (high) \
217 : "c" (counter))
218
Mike Frysinger56986d42008-01-01 19:12:15 +0100219
Mike Frysinger56986d42008-01-01 19:12:15 +0100220#ifdef __KERNEL__
221
222/* wrmsr with exception handling */
223#define wrmsr_safe(msr,a,b) ({ int ret__; \
224 asm volatile("2: wrmsr ; xorl %0,%0\n" \
225 "1:\n\t" \
226 ".section .fixup,\"ax\"\n\t" \
227 "3: movl %4,%0 ; jmp 1b\n\t" \
228 ".previous\n\t" \
229 ".section __ex_table,\"a\"\n" \
230 " .align 8\n\t" \
231 " .quad 2b,3b\n\t" \
232 ".previous" \
233 : "=a" (ret__) \
234 : "c" (msr), "0" (a), "d" (b), "i" (-EFAULT)); \
235 ret__; })
236
237#define checking_wrmsrl(msr,val) wrmsr_safe(msr,(u32)(val),(u32)((val)>>32))
238
239#define rdmsr_safe(msr,a,b) \
240 ({ int ret__; \
241 asm volatile ("1: rdmsr\n" \
242 "2:\n" \
243 ".section .fixup,\"ax\"\n" \
244 "3: movl %4,%0\n" \
245 " jmp 2b\n" \
246 ".previous\n" \
247 ".section __ex_table,\"a\"\n" \
248 " .align 8\n" \
249 " .quad 1b,3b\n" \
250 ".previous":"=&bDS" (ret__), "=a"(*(a)), "=d"(*(b)) \
251 :"c"(msr), "i"(-EIO), "0"(0)); \
252 ret__; })
253
Glauber de Oliveira Costa751de832008-01-30 13:31:03 +0100254#endif /* __ASSEMBLY__ */
255
256#endif /* !__i386__ */
257
258#ifndef __ASSEMBLY__
259
Thomas Gleixnerbe7baf82007-10-23 22:37:24 +0200260#ifdef CONFIG_SMP
261void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
262void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
263int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h);
264int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h);
265#else /* CONFIG_SMP */
266static inline void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h)
267{
268 rdmsr(msr_no, *l, *h);
269}
270static inline void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h)
271{
272 wrmsr(msr_no, l, h);
273}
274static inline int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h)
275{
276 return rdmsr_safe(msr_no, l, h);
277}
278static inline int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h)
279{
280 return wrmsr_safe(msr_no, l, h);
281}
282#endif /* CONFIG_SMP */
Mike Frysinger56986d42008-01-01 19:12:15 +0100283#endif /* __KERNEL__ */
Glauber de Oliveira Costa751de832008-01-30 13:31:03 +0100284#endif /* __ASSEMBLY__ */
Thomas Gleixnerbe7baf82007-10-23 22:37:24 +0200285
Thomas Gleixner96a388d2007-10-11 11:20:03 +0200286#endif