| Vegard Nossum | 77ef50a | 2008-06-18 17:08:48 +0200 | [diff] [blame] | 1 | #ifndef ASM_X86__MSR_H | 
 | 2 | #define ASM_X86__MSR_H | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 3 |  | 
 | 4 | #include <asm/msr-index.h> | 
 | 5 |  | 
| Mike Frysinger | d43a331 | 2008-01-15 16:44:38 +0100 | [diff] [blame] | 6 | #ifndef __ASSEMBLY__ | 
 | 7 | # include <linux/types.h> | 
 | 8 | #endif | 
 | 9 |  | 
| Glauber de Oliveira Costa | 8f12dea | 2008-01-30 13:31:06 +0100 | [diff] [blame] | 10 | #ifdef __KERNEL__ | 
 | 11 | #ifndef __ASSEMBLY__ | 
| Glauber de Oliveira Costa | c210d24 | 2008-01-30 13:31:07 +0100 | [diff] [blame] | 12 |  | 
 | 13 | #include <asm/asm.h> | 
 | 14 | #include <asm/errno.h> | 
 | 15 |  | 
| Andrew Morton | 1e160cc | 2008-01-30 13:31:17 +0100 | [diff] [blame] | 16 | static inline unsigned long long native_read_tscp(unsigned int *aux) | 
| Glauber de Oliveira Costa | 8f12dea | 2008-01-30 13:31:06 +0100 | [diff] [blame] | 17 | { | 
 | 18 | 	unsigned long low, high; | 
| Joe Perches | abb0ade | 2008-03-23 01:02:51 -0700 | [diff] [blame] | 19 | 	asm volatile(".byte 0x0f,0x01,0xf9" | 
 | 20 | 		     : "=a" (low), "=d" (high), "=c" (*aux)); | 
| Max Asbock | 41aefdc | 2008-06-25 14:45:28 -0700 | [diff] [blame] | 21 | 	return low | ((u64)high << 32); | 
| Glauber de Oliveira Costa | 8f12dea | 2008-01-30 13:31:06 +0100 | [diff] [blame] | 22 | } | 
 | 23 |  | 
| Glauber de Oliveira Costa | c210d24 | 2008-01-30 13:31:07 +0100 | [diff] [blame] | 24 | /* | 
 | 25 |  * i386 calling convention returns 64-bit value in edx:eax, while | 
 | 26 |  * x86_64 returns at rax. Also, the "A" constraint does not really | 
 | 27 |  * mean rdx:rax in x86_64, so we need specialized behaviour for each | 
 | 28 |  * architecture | 
 | 29 |  */ | 
 | 30 | #ifdef CONFIG_X86_64 | 
 | 31 | #define DECLARE_ARGS(val, low, high)	unsigned low, high | 
| Joe Perches | abb0ade | 2008-03-23 01:02:51 -0700 | [diff] [blame] | 32 | #define EAX_EDX_VAL(val, low, high)	((low) | ((u64)(high) << 32)) | 
| Glauber de Oliveira Costa | c210d24 | 2008-01-30 13:31:07 +0100 | [diff] [blame] | 33 | #define EAX_EDX_ARGS(val, low, high)	"a" (low), "d" (high) | 
 | 34 | #define EAX_EDX_RET(val, low, high)	"=a" (low), "=d" (high) | 
 | 35 | #else | 
 | 36 | #define DECLARE_ARGS(val, low, high)	unsigned long long val | 
 | 37 | #define EAX_EDX_VAL(val, low, high)	(val) | 
 | 38 | #define EAX_EDX_ARGS(val, low, high)	"A" (val) | 
 | 39 | #define EAX_EDX_RET(val, low, high)	"=A" (val) | 
| Glauber de Oliveira Costa | 8f12dea | 2008-01-30 13:31:06 +0100 | [diff] [blame] | 40 | #endif | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 41 |  | 
 | 42 | static inline unsigned long long native_read_msr(unsigned int msr) | 
 | 43 | { | 
| Glauber de Oliveira Costa | c210d24 | 2008-01-30 13:31:07 +0100 | [diff] [blame] | 44 | 	DECLARE_ARGS(val, low, high); | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 45 |  | 
| Glauber de Oliveira Costa | c210d24 | 2008-01-30 13:31:07 +0100 | [diff] [blame] | 46 | 	asm volatile("rdmsr" : EAX_EDX_RET(val, low, high) : "c" (msr)); | 
 | 47 | 	return EAX_EDX_VAL(val, low, high); | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 48 | } | 
 | 49 |  | 
 | 50 | static inline unsigned long long native_read_msr_safe(unsigned int msr, | 
 | 51 | 						      int *err) | 
 | 52 | { | 
| Glauber de Oliveira Costa | c210d24 | 2008-01-30 13:31:07 +0100 | [diff] [blame] | 53 | 	DECLARE_ARGS(val, low, high); | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 54 |  | 
| H. Peter Anvin | 08970fc | 2008-08-25 22:39:15 -0700 | [diff] [blame] | 55 | 	asm volatile("2: rdmsr ; xor %[err],%[err]\n" | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 56 | 		     "1:\n\t" | 
 | 57 | 		     ".section .fixup,\"ax\"\n\t" | 
| H. Peter Anvin | 08970fc | 2008-08-25 22:39:15 -0700 | [diff] [blame] | 58 | 		     "3:  mov %[fault],%[err] ; jmp 1b\n\t" | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 59 | 		     ".previous\n\t" | 
| Joe Perches | abb0ade | 2008-03-23 01:02:51 -0700 | [diff] [blame] | 60 | 		     _ASM_EXTABLE(2b, 3b) | 
| H. Peter Anvin | 08970fc | 2008-08-25 22:39:15 -0700 | [diff] [blame] | 61 | 		     : [err] "=r" (*err), EAX_EDX_RET(val, low, high) | 
 | 62 | 		     : "c" (msr), [fault] "i" (-EFAULT)); | 
| Glauber de Oliveira Costa | c210d24 | 2008-01-30 13:31:07 +0100 | [diff] [blame] | 63 | 	return EAX_EDX_VAL(val, low, high); | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 64 | } | 
 | 65 |  | 
| Yinghai Lu | b05f78f | 2008-08-22 01:32:50 -0700 | [diff] [blame] | 66 | static inline unsigned long long native_read_msr_amd_safe(unsigned int msr, | 
 | 67 | 						      int *err) | 
 | 68 | { | 
 | 69 | 	DECLARE_ARGS(val, low, high); | 
 | 70 |  | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 71 | 	asm volatile("2: rdmsr ; xor %0,%0\n" | 
 | 72 | 		     "1:\n\t" | 
 | 73 | 		     ".section .fixup,\"ax\"\n\t" | 
 | 74 | 		     "3:  mov %3,%0 ; jmp 1b\n\t" | 
 | 75 | 		     ".previous\n\t" | 
 | 76 | 		     _ASM_EXTABLE(2b, 3b) | 
 | 77 | 		     : "=r" (*err), EAX_EDX_RET(val, low, high) | 
| Yinghai Lu | b05f78f | 2008-08-22 01:32:50 -0700 | [diff] [blame] | 78 | 		     : "c" (msr), "D" (0x9c5a203a), "i" (-EFAULT)); | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 79 | 	return EAX_EDX_VAL(val, low, high); | 
 | 80 | } | 
 | 81 |  | 
| Glauber de Oliveira Costa | c9dcda5 | 2008-01-30 13:31:07 +0100 | [diff] [blame] | 82 | static inline void native_write_msr(unsigned int msr, | 
 | 83 | 				    unsigned low, unsigned high) | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 84 | { | 
| Jeremy Fitzhardinge | af2b1c6 | 2008-06-25 00:18:59 -0400 | [diff] [blame] | 85 | 	asm volatile("wrmsr" : : "c" (msr), "a"(low), "d" (high) : "memory"); | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 86 | } | 
 | 87 |  | 
 | 88 | static inline int native_write_msr_safe(unsigned int msr, | 
| Glauber de Oliveira Costa | c9dcda5 | 2008-01-30 13:31:07 +0100 | [diff] [blame] | 89 | 					unsigned low, unsigned high) | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 90 | { | 
 | 91 | 	int err; | 
| H. Peter Anvin | 08970fc | 2008-08-25 22:39:15 -0700 | [diff] [blame] | 92 | 	asm volatile("2: wrmsr ; xor %[err],%[err]\n" | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 93 | 		     "1:\n\t" | 
 | 94 | 		     ".section .fixup,\"ax\"\n\t" | 
| H. Peter Anvin | 08970fc | 2008-08-25 22:39:15 -0700 | [diff] [blame] | 95 | 		     "3:  mov %[fault],%[err] ; jmp 1b\n\t" | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 96 | 		     ".previous\n\t" | 
| Joe Perches | abb0ade | 2008-03-23 01:02:51 -0700 | [diff] [blame] | 97 | 		     _ASM_EXTABLE(2b, 3b) | 
| H. Peter Anvin | 08970fc | 2008-08-25 22:39:15 -0700 | [diff] [blame] | 98 | 		     : [err] "=a" (err) | 
| Glauber de Oliveira Costa | c9dcda5 | 2008-01-30 13:31:07 +0100 | [diff] [blame] | 99 | 		     : "c" (msr), "0" (low), "d" (high), | 
| H. Peter Anvin | 08970fc | 2008-08-25 22:39:15 -0700 | [diff] [blame] | 100 | 		       [fault] "i" (-EFAULT) | 
| Jeremy Fitzhardinge | af2b1c6 | 2008-06-25 00:18:59 -0400 | [diff] [blame] | 101 | 		     : "memory"); | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 102 | 	return err; | 
 | 103 | } | 
 | 104 |  | 
| Ingo Molnar | cdc7957 | 2008-01-30 13:32:39 +0100 | [diff] [blame] | 105 | extern unsigned long long native_read_tsc(void); | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 106 |  | 
| Ingo Molnar | 92767af | 2008-01-30 13:32:40 +0100 | [diff] [blame] | 107 | static __always_inline unsigned long long __native_read_tsc(void) | 
 | 108 | { | 
 | 109 | 	DECLARE_ARGS(val, low, high); | 
 | 110 |  | 
 | 111 | 	rdtsc_barrier(); | 
 | 112 | 	asm volatile("rdtsc" : EAX_EDX_RET(val, low, high)); | 
 | 113 | 	rdtsc_barrier(); | 
 | 114 |  | 
 | 115 | 	return EAX_EDX_VAL(val, low, high); | 
 | 116 | } | 
 | 117 |  | 
| Glauber de Oliveira Costa | b8d1fae | 2008-01-30 13:31:07 +0100 | [diff] [blame] | 118 | static inline unsigned long long native_read_pmc(int counter) | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 119 | { | 
| Glauber de Oliveira Costa | c210d24 | 2008-01-30 13:31:07 +0100 | [diff] [blame] | 120 | 	DECLARE_ARGS(val, low, high); | 
 | 121 |  | 
 | 122 | 	asm volatile("rdpmc" : EAX_EDX_RET(val, low, high) : "c" (counter)); | 
 | 123 | 	return EAX_EDX_VAL(val, low, high); | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 124 | } | 
 | 125 |  | 
 | 126 | #ifdef CONFIG_PARAVIRT | 
 | 127 | #include <asm/paravirt.h> | 
| Thomas Gleixner | 96a388d | 2007-10-11 11:20:03 +0200 | [diff] [blame] | 128 | #else | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 129 | #include <linux/errno.h> | 
 | 130 | /* | 
 | 131 |  * Access to machine-specific registers (available on 586 and better only) | 
 | 132 |  * Note: the rd* operations modify the parameters directly (without using | 
 | 133 |  * pointer indirection), this allows gcc to optimize better | 
 | 134 |  */ | 
 | 135 |  | 
| Joe Perches | abb0ade | 2008-03-23 01:02:51 -0700 | [diff] [blame] | 136 | #define rdmsr(msr, val1, val2)					\ | 
 | 137 | do {								\ | 
 | 138 | 	u64 __val = native_read_msr((msr));			\ | 
 | 139 | 	(val1) = (u32)__val;					\ | 
 | 140 | 	(val2) = (u32)(__val >> 32);				\ | 
 | 141 | } while (0) | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 142 |  | 
| Glauber de Oliveira Costa | c9dcda5 | 2008-01-30 13:31:07 +0100 | [diff] [blame] | 143 | static inline void wrmsr(unsigned msr, unsigned low, unsigned high) | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 144 | { | 
| Glauber de Oliveira Costa | c9dcda5 | 2008-01-30 13:31:07 +0100 | [diff] [blame] | 145 | 	native_write_msr(msr, low, high); | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 146 | } | 
 | 147 |  | 
| Joe Perches | abb0ade | 2008-03-23 01:02:51 -0700 | [diff] [blame] | 148 | #define rdmsrl(msr, val)			\ | 
 | 149 | 	((val) = native_read_msr((msr))) | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 150 |  | 
| Glauber de Oliveira Costa | c210d24 | 2008-01-30 13:31:07 +0100 | [diff] [blame] | 151 | #define wrmsrl(msr, val)						\ | 
| Joe Perches | abb0ade | 2008-03-23 01:02:51 -0700 | [diff] [blame] | 152 | 	native_write_msr((msr), (u32)((u64)(val)), (u32)((u64)(val) >> 32)) | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 153 |  | 
 | 154 | /* wrmsr with exception handling */ | 
| Glauber de Oliveira Costa | c9dcda5 | 2008-01-30 13:31:07 +0100 | [diff] [blame] | 155 | static inline int wrmsr_safe(unsigned msr, unsigned low, unsigned high) | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 156 | { | 
| Glauber de Oliveira Costa | c9dcda5 | 2008-01-30 13:31:07 +0100 | [diff] [blame] | 157 | 	return native_write_msr_safe(msr, low, high); | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 158 | } | 
 | 159 |  | 
 | 160 | /* rdmsr with exception handling */ | 
| Joe Perches | abb0ade | 2008-03-23 01:02:51 -0700 | [diff] [blame] | 161 | #define rdmsr_safe(msr, p1, p2)					\ | 
 | 162 | ({								\ | 
 | 163 | 	int __err;						\ | 
 | 164 | 	u64 __val = native_read_msr_safe((msr), &__err);	\ | 
 | 165 | 	(*p1) = (u32)__val;					\ | 
 | 166 | 	(*p2) = (u32)(__val >> 32);				\ | 
 | 167 | 	__err;							\ | 
 | 168 | }) | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 169 |  | 
| Andi Kleen | 1de87bd | 2008-03-22 10:59:28 +0100 | [diff] [blame] | 170 | static inline int rdmsrl_safe(unsigned msr, unsigned long long *p) | 
 | 171 | { | 
 | 172 | 	int err; | 
 | 173 |  | 
 | 174 | 	*p = native_read_msr_safe(msr, &err); | 
 | 175 | 	return err; | 
 | 176 | } | 
| Yinghai Lu | b05f78f | 2008-08-22 01:32:50 -0700 | [diff] [blame] | 177 | static inline int rdmsrl_amd_safe(unsigned msr, unsigned long long *p) | 
 | 178 | { | 
 | 179 | 	int err; | 
 | 180 |  | 
 | 181 | 	*p = native_read_msr_amd_safe(msr, &err); | 
 | 182 | 	return err; | 
 | 183 | } | 
| Andi Kleen | 1de87bd | 2008-03-22 10:59:28 +0100 | [diff] [blame] | 184 |  | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 185 | #define rdtscl(low)						\ | 
 | 186 | 	((low) = (u32)native_read_tsc()) | 
 | 187 |  | 
 | 188 | #define rdtscll(val)						\ | 
 | 189 | 	((val) = native_read_tsc()) | 
 | 190 |  | 
| Joe Perches | abb0ade | 2008-03-23 01:02:51 -0700 | [diff] [blame] | 191 | #define rdpmc(counter, low, high)			\ | 
 | 192 | do {							\ | 
 | 193 | 	u64 _l = native_read_pmc((counter));		\ | 
 | 194 | 	(low)  = (u32)_l;				\ | 
 | 195 | 	(high) = (u32)(_l >> 32);			\ | 
 | 196 | } while (0) | 
| Glauber de Oliveira Costa | c210d24 | 2008-01-30 13:31:07 +0100 | [diff] [blame] | 197 |  | 
| Joe Perches | abb0ade | 2008-03-23 01:02:51 -0700 | [diff] [blame] | 198 | #define rdtscp(low, high, aux)					\ | 
 | 199 | do {                                                            \ | 
 | 200 | 	unsigned long long _val = native_read_tscp(&(aux));     \ | 
 | 201 | 	(low) = (u32)_val;                                      \ | 
 | 202 | 	(high) = (u32)(_val >> 32);                             \ | 
 | 203 | } while (0) | 
| Glauber de Oliveira Costa | c210d24 | 2008-01-30 13:31:07 +0100 | [diff] [blame] | 204 |  | 
 | 205 | #define rdtscpll(val, aux) (val) = native_read_tscp(&(aux)) | 
 | 206 |  | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 207 | #endif	/* !CONFIG_PARAVIRT */ | 
 | 208 |  | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 209 |  | 
| Joe Perches | abb0ade | 2008-03-23 01:02:51 -0700 | [diff] [blame] | 210 | #define checking_wrmsrl(msr, val) wrmsr_safe((msr), (u32)(val),		\ | 
 | 211 | 					     (u32)((val) >> 32)) | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 212 |  | 
| Joe Perches | abb0ade | 2008-03-23 01:02:51 -0700 | [diff] [blame] | 213 | #define write_tsc(val1, val2) wrmsr(0x10, (val1), (val2)) | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 214 |  | 
| Joe Perches | abb0ade | 2008-03-23 01:02:51 -0700 | [diff] [blame] | 215 | #define write_rdtscp_aux(val) wrmsr(0xc0000103, (val), 0) | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 216 |  | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 217 | #ifdef CONFIG_SMP | 
| H. Peter Anvin | c6f3193 | 2008-08-25 17:27:21 -0700 | [diff] [blame] | 218 | int rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); | 
 | 219 | int wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 220 | int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); | 
 | 221 | int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); | 
 | 222 | #else  /*  CONFIG_SMP  */ | 
| H. Peter Anvin | c6f3193 | 2008-08-25 17:27:21 -0700 | [diff] [blame] | 223 | static inline int rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h) | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 224 | { | 
 | 225 | 	rdmsr(msr_no, *l, *h); | 
| H. Peter Anvin | c6f3193 | 2008-08-25 17:27:21 -0700 | [diff] [blame] | 226 | 	return 0; | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 227 | } | 
| H. Peter Anvin | c6f3193 | 2008-08-25 17:27:21 -0700 | [diff] [blame] | 228 | static inline int wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 229 | { | 
 | 230 | 	wrmsr(msr_no, l, h); | 
| H. Peter Anvin | c6f3193 | 2008-08-25 17:27:21 -0700 | [diff] [blame] | 231 | 	return 0; | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 232 | } | 
| Joe Perches | abb0ade | 2008-03-23 01:02:51 -0700 | [diff] [blame] | 233 | static inline int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, | 
 | 234 | 				    u32 *l, u32 *h) | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 235 | { | 
 | 236 | 	return rdmsr_safe(msr_no, l, h); | 
 | 237 | } | 
 | 238 | static inline int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) | 
 | 239 | { | 
 | 240 | 	return wrmsr_safe(msr_no, l, h); | 
 | 241 | } | 
 | 242 | #endif  /* CONFIG_SMP */ | 
| Glauber de Oliveira Costa | 751de83 | 2008-01-30 13:31:03 +0100 | [diff] [blame] | 243 | #endif /* __ASSEMBLY__ */ | 
| Glauber de Oliveira Costa | c210d24 | 2008-01-30 13:31:07 +0100 | [diff] [blame] | 244 | #endif /* __KERNEL__ */ | 
 | 245 |  | 
| Thomas Gleixner | be7baf8 | 2007-10-23 22:37:24 +0200 | [diff] [blame] | 246 |  | 
| Vegard Nossum | 77ef50a | 2008-06-18 17:08:48 +0200 | [diff] [blame] | 247 | #endif /* ASM_X86__MSR_H */ |