| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | #ifndef __ASM_MSR_H | 
|  | 2 | #define __ASM_MSR_H | 
|  | 3 |  | 
| H. Peter Anvin | 4bc5aa9 | 2007-05-02 19:27:12 +0200 | [diff] [blame] | 4 | #include <asm/msr-index.h> | 
|  | 5 |  | 
|  | 6 | #ifdef __KERNEL__ | 
|  | 7 | #ifndef __ASSEMBLY__ | 
|  | 8 |  | 
| Rusty Russell | 90a0a06 | 2007-05-02 19:27:10 +0200 | [diff] [blame] | 9 | #include <asm/errno.h> | 
|  | 10 |  | 
|  | 11 | static inline unsigned long long native_read_msr(unsigned int msr) | 
|  | 12 | { | 
|  | 13 | unsigned long long val; | 
|  | 14 |  | 
|  | 15 | asm volatile("rdmsr" : "=A" (val) : "c" (msr)); | 
|  | 16 | return val; | 
|  | 17 | } | 
|  | 18 |  | 
|  | 19 | static inline unsigned long long native_read_msr_safe(unsigned int msr, | 
|  | 20 | int *err) | 
|  | 21 | { | 
|  | 22 | unsigned long long val; | 
|  | 23 |  | 
|  | 24 | asm volatile("2: rdmsr ; xorl %0,%0\n" | 
|  | 25 | "1:\n\t" | 
|  | 26 | ".section .fixup,\"ax\"\n\t" | 
|  | 27 | "3:  movl %3,%0 ; jmp 1b\n\t" | 
|  | 28 | ".previous\n\t" | 
|  | 29 | ".section __ex_table,\"a\"\n" | 
|  | 30 | "   .align 4\n\t" | 
|  | 31 | "   .long 	2b,3b\n\t" | 
|  | 32 | ".previous" | 
|  | 33 | : "=r" (*err), "=A" (val) | 
|  | 34 | : "c" (msr), "i" (-EFAULT)); | 
|  | 35 |  | 
|  | 36 | return val; | 
|  | 37 | } | 
|  | 38 |  | 
|  | 39 | static inline void native_write_msr(unsigned int msr, unsigned long long val) | 
|  | 40 | { | 
|  | 41 | asm volatile("wrmsr" : : "c" (msr), "A"(val)); | 
|  | 42 | } | 
|  | 43 |  | 
|  | 44 | static inline int native_write_msr_safe(unsigned int msr, | 
|  | 45 | unsigned long long val) | 
|  | 46 | { | 
|  | 47 | int err; | 
|  | 48 | asm volatile("2: wrmsr ; xorl %0,%0\n" | 
|  | 49 | "1:\n\t" | 
|  | 50 | ".section .fixup,\"ax\"\n\t" | 
|  | 51 | "3:  movl %4,%0 ; jmp 1b\n\t" | 
|  | 52 | ".previous\n\t" | 
|  | 53 | ".section __ex_table,\"a\"\n" | 
|  | 54 | "   .align 4\n\t" | 
|  | 55 | "   .long 	2b,3b\n\t" | 
|  | 56 | ".previous" | 
|  | 57 | : "=a" (err) | 
|  | 58 | : "c" (msr), "0" ((u32)val), "d" ((u32)(val>>32)), | 
|  | 59 | "i" (-EFAULT)); | 
|  | 60 | return err; | 
|  | 61 | } | 
|  | 62 |  | 
|  | 63 | static inline unsigned long long native_read_tsc(void) | 
|  | 64 | { | 
|  | 65 | unsigned long long val; | 
|  | 66 | asm volatile("rdtsc" : "=A" (val)); | 
|  | 67 | return val; | 
|  | 68 | } | 
|  | 69 |  | 
|  | 70 | static inline unsigned long long native_read_pmc(void) | 
|  | 71 | { | 
|  | 72 | unsigned long long val; | 
|  | 73 | asm volatile("rdpmc" : "=A" (val)); | 
|  | 74 | return val; | 
|  | 75 | } | 
|  | 76 |  | 
| Rusty Russell | d3561b7 | 2006-12-07 02:14:07 +0100 | [diff] [blame] | 77 | #ifdef CONFIG_PARAVIRT | 
|  | 78 | #include <asm/paravirt.h> | 
|  | 79 | #else | 
| Rudolf Marek | 4e9baad | 2007-05-08 17:22:01 +0200 | [diff] [blame] | 80 | #include <linux/errno.h> | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 81 | /* | 
|  | 82 | * Access to machine-specific registers (available on 586 and better only) | 
|  | 83 | * Note: the rd* operations modify the parameters directly (without using | 
|  | 84 | * pointer indirection), this allows gcc to optimize better | 
|  | 85 | */ | 
|  | 86 |  | 
| Rusty Russell | 90a0a06 | 2007-05-02 19:27:10 +0200 | [diff] [blame] | 87 | #define rdmsr(msr,val1,val2)						\ | 
|  | 88 | do {								\ | 
| H. Peter Anvin | b0b73cb | 2007-05-09 00:02:11 -0700 | [diff] [blame] | 89 | u64 __val = native_read_msr(msr);			\ | 
|  | 90 | (val1) = (u32)__val;					\ | 
|  | 91 | (val2) = (u32)(__val >> 32);				\ | 
| Rusty Russell | 90a0a06 | 2007-05-02 19:27:10 +0200 | [diff] [blame] | 92 | } while(0) | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 93 |  | 
| H. Peter Anvin | b0b73cb | 2007-05-09 00:02:11 -0700 | [diff] [blame] | 94 | static inline void wrmsr(u32 __msr, u32 __low, u32 __high) | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 95 | { | 
| H. Peter Anvin | b0b73cb | 2007-05-09 00:02:11 -0700 | [diff] [blame] | 96 | native_write_msr(__msr, ((u64)__high << 32) | __low); | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 97 | } | 
|  | 98 |  | 
| H. Peter Anvin | b0b73cb | 2007-05-09 00:02:11 -0700 | [diff] [blame] | 99 | #define rdmsrl(msr,val)							\ | 
|  | 100 | ((val) = native_read_msr(msr)) | 
|  | 101 |  | 
|  | 102 | #define wrmsrl(msr,val)	native_write_msr(msr, val) | 
|  | 103 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 104 | /* wrmsr with exception handling */ | 
| H. Peter Anvin | b0b73cb | 2007-05-09 00:02:11 -0700 | [diff] [blame] | 105 | static inline int wrmsr_safe(u32 __msr, u32 __low, u32 __high) | 
|  | 106 | { | 
|  | 107 | return native_write_msr_safe(__msr, ((u64)__high << 32) | __low); | 
|  | 108 | } | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 109 |  | 
| Zachary Amsden | f2ab446 | 2005-09-03 15:56:42 -0700 | [diff] [blame] | 110 | /* rdmsr with exception handling */ | 
| Rusty Russell | 90a0a06 | 2007-05-02 19:27:10 +0200 | [diff] [blame] | 111 | #define rdmsr_safe(msr,p1,p2)						\ | 
|  | 112 | ({								\ | 
|  | 113 | int __err;						\ | 
| H. Peter Anvin | b0b73cb | 2007-05-09 00:02:11 -0700 | [diff] [blame] | 114 | u64 __val = native_read_msr_safe(msr, &__err);		\ | 
|  | 115 | (*p1) = (u32)__val;					\ | 
|  | 116 | (*p2) = (u32)(__val >> 32);				\ | 
| Rusty Russell | 90a0a06 | 2007-05-02 19:27:10 +0200 | [diff] [blame] | 117 | __err;							\ | 
|  | 118 | }) | 
| Zachary Amsden | f2ab446 | 2005-09-03 15:56:42 -0700 | [diff] [blame] | 119 |  | 
| Rusty Russell | 90a0a06 | 2007-05-02 19:27:10 +0200 | [diff] [blame] | 120 | #define rdtscl(low)						\ | 
| H. Peter Anvin | b0b73cb | 2007-05-09 00:02:11 -0700 | [diff] [blame] | 121 | ((low) = (u32)native_read_tsc()) | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 122 |  | 
| H. Peter Anvin | b0b73cb | 2007-05-09 00:02:11 -0700 | [diff] [blame] | 123 | #define rdtscll(val)						\ | 
|  | 124 | ((val) = native_read_tsc()) | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 125 |  | 
|  | 126 | #define write_tsc(val1,val2) wrmsr(0x10, val1, val2) | 
|  | 127 |  | 
| Rusty Russell | 90a0a06 | 2007-05-02 19:27:10 +0200 | [diff] [blame] | 128 | #define rdpmc(counter,low,high)					\ | 
|  | 129 | do {							\ | 
|  | 130 | u64 _l = native_read_pmc();			\ | 
| H. Peter Anvin | b0b73cb | 2007-05-09 00:02:11 -0700 | [diff] [blame] | 131 | (low)  = (u32)_l;				\ | 
|  | 132 | (high) = (u32)(_l >> 32);			\ | 
| Rusty Russell | 90a0a06 | 2007-05-02 19:27:10 +0200 | [diff] [blame] | 133 | } while(0) | 
| Rusty Russell | d3561b7 | 2006-12-07 02:14:07 +0100 | [diff] [blame] | 134 | #endif	/* !CONFIG_PARAVIRT */ | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 135 |  | 
| Adrian Bunk | b44755c | 2007-02-20 01:07:13 +0100 | [diff] [blame] | 136 | #ifdef CONFIG_SMP | 
| Alexey Dobriyan | b077ffb | 2007-02-16 01:48:11 -0800 | [diff] [blame] | 137 | void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); | 
|  | 138 | void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); | 
| Rudolf Marek | 4e9baad | 2007-05-08 17:22:01 +0200 | [diff] [blame] | 139 | int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); | 
|  | 140 | int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); | 
| Adrian Bunk | b44755c | 2007-02-20 01:07:13 +0100 | [diff] [blame] | 141 | #else  /*  CONFIG_SMP  */ | 
|  | 142 | static inline void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h) | 
|  | 143 | { | 
|  | 144 | rdmsr(msr_no, *l, *h); | 
|  | 145 | } | 
|  | 146 | static inline void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) | 
|  | 147 | { | 
|  | 148 | wrmsr(msr_no, l, h); | 
|  | 149 | } | 
| Rudolf Marek | 4e9baad | 2007-05-08 17:22:01 +0200 | [diff] [blame] | 150 | static inline int rdmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h) | 
|  | 151 | { | 
|  | 152 | return rdmsr_safe(msr_no, l, h); | 
|  | 153 | } | 
|  | 154 | static inline int wrmsr_safe_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) | 
|  | 155 | { | 
|  | 156 | return wrmsr_safe(msr_no, l, h); | 
|  | 157 | } | 
| Adrian Bunk | b44755c | 2007-02-20 01:07:13 +0100 | [diff] [blame] | 158 | #endif  /*  CONFIG_SMP  */ | 
| H. Peter Anvin | 4bc5aa9 | 2007-05-02 19:27:12 +0200 | [diff] [blame] | 159 | #endif | 
|  | 160 | #endif | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 161 | #endif /* __ASM_MSR_H */ |