Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | #ifndef X86_64_MSR_H |
| 2 | #define X86_64_MSR_H 1 |
| 3 | |
H. Peter Anvin | 4bc5aa9 | 2007-05-02 19:27:12 +0200 | [diff] [blame^] | 4 | #include <asm/msr-index.h> |
| 5 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 6 | #ifndef __ASSEMBLY__ |
| 7 | /* |
| 8 | * Access to machine-specific registers (available on 586 and better only) |
| 9 | * Note: the rd* operations modify the parameters directly (without using |
| 10 | * pointer indirection), this allows gcc to optimize better |
| 11 | */ |
| 12 | |
| 13 | #define rdmsr(msr,val1,val2) \ |
| 14 | __asm__ __volatile__("rdmsr" \ |
| 15 | : "=a" (val1), "=d" (val2) \ |
| 16 | : "c" (msr)) |
| 17 | |
| 18 | |
| 19 | #define rdmsrl(msr,val) do { unsigned long a__,b__; \ |
| 20 | __asm__ __volatile__("rdmsr" \ |
| 21 | : "=a" (a__), "=d" (b__) \ |
| 22 | : "c" (msr)); \ |
| 23 | val = a__ | (b__<<32); \ |
Andi Kleen | a88cde1 | 2005-11-05 17:25:54 +0100 | [diff] [blame] | 24 | } while(0) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 25 | |
| 26 | #define wrmsr(msr,val1,val2) \ |
| 27 | __asm__ __volatile__("wrmsr" \ |
| 28 | : /* no outputs */ \ |
| 29 | : "c" (msr), "a" (val1), "d" (val2)) |
| 30 | |
| 31 | #define wrmsrl(msr,val) wrmsr(msr,(__u32)((__u64)(val)),((__u64)(val))>>32) |
| 32 | |
| 33 | /* wrmsr with exception handling */ |
Andi Kleen | 059bf0f | 2005-09-12 18:49:24 +0200 | [diff] [blame] | 34 | #define wrmsr_safe(msr,a,b) ({ int ret__; \ |
| 35 | asm volatile("2: wrmsr ; xorl %0,%0\n" \ |
| 36 | "1:\n\t" \ |
| 37 | ".section .fixup,\"ax\"\n\t" \ |
| 38 | "3: movl %4,%0 ; jmp 1b\n\t" \ |
| 39 | ".previous\n\t" \ |
| 40 | ".section __ex_table,\"a\"\n" \ |
| 41 | " .align 8\n\t" \ |
| 42 | " .quad 2b,3b\n\t" \ |
| 43 | ".previous" \ |
| 44 | : "=a" (ret__) \ |
| 45 | : "c" (msr), "0" (a), "d" (b), "i" (-EFAULT)); \ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 46 | ret__; }) |
| 47 | |
| 48 | #define checking_wrmsrl(msr,val) wrmsr_safe(msr,(u32)(val),(u32)((val)>>32)) |
| 49 | |
Andi Kleen | 059bf0f | 2005-09-12 18:49:24 +0200 | [diff] [blame] | 50 | #define rdmsr_safe(msr,a,b) \ |
| 51 | ({ int ret__; \ |
| 52 | asm volatile ("1: rdmsr\n" \ |
| 53 | "2:\n" \ |
| 54 | ".section .fixup,\"ax\"\n" \ |
| 55 | "3: movl %4,%0\n" \ |
| 56 | " jmp 2b\n" \ |
| 57 | ".previous\n" \ |
| 58 | ".section __ex_table,\"a\"\n" \ |
| 59 | " .align 8\n" \ |
| 60 | " .quad 1b,3b\n" \ |
Jacob.Shin@amd.com | e6c6675 | 2005-11-20 18:49:07 +0100 | [diff] [blame] | 61 | ".previous":"=&bDS" (ret__), "=a"(*(a)), "=d"(*(b))\ |
Andi Kleen | 059bf0f | 2005-09-12 18:49:24 +0200 | [diff] [blame] | 62 | :"c"(msr), "i"(-EIO), "0"(0)); \ |
| 63 | ret__; }) |
| 64 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 65 | #define rdtsc(low,high) \ |
| 66 | __asm__ __volatile__("rdtsc" : "=a" (low), "=d" (high)) |
| 67 | |
| 68 | #define rdtscl(low) \ |
| 69 | __asm__ __volatile__ ("rdtsc" : "=a" (low) : : "edx") |
| 70 | |
Vojtech Pavlik | 81af444 | 2006-09-26 10:52:28 +0200 | [diff] [blame] | 71 | #define rdtscp(low,high,aux) \ |
| 72 | asm volatile (".byte 0x0f,0x01,0xf9" : "=a" (low), "=d" (high), "=c" (aux)) |
| 73 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 74 | #define rdtscll(val) do { \ |
| 75 | unsigned int __a,__d; \ |
| 76 | asm volatile("rdtsc" : "=a" (__a), "=d" (__d)); \ |
| 77 | (val) = ((unsigned long)__a) | (((unsigned long)__d)<<32); \ |
| 78 | } while(0) |
| 79 | |
Vojtech Pavlik | 81af444 | 2006-09-26 10:52:28 +0200 | [diff] [blame] | 80 | #define rdtscpll(val, aux) do { \ |
| 81 | unsigned long __a, __d; \ |
| 82 | asm volatile (".byte 0x0f,0x01,0xf9" : "=a" (__a), "=d" (__d), "=c" (aux)); \ |
| 83 | (val) = (__d << 32) | __a; \ |
| 84 | } while (0) |
| 85 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 86 | #define write_tsc(val1,val2) wrmsr(0x10, val1, val2) |
| 87 | |
Vojtech Pavlik | 81af444 | 2006-09-26 10:52:28 +0200 | [diff] [blame] | 88 | #define write_rdtscp_aux(val) wrmsr(0xc0000103, val, 0) |
| 89 | |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 90 | #define rdpmc(counter,low,high) \ |
| 91 | __asm__ __volatile__("rdpmc" \ |
| 92 | : "=a" (low), "=d" (high) \ |
| 93 | : "c" (counter)) |
| 94 | |
Adrian Bunk | 9c0aa0f | 2005-09-12 18:49:24 +0200 | [diff] [blame] | 95 | static inline void cpuid(int op, unsigned int *eax, unsigned int *ebx, |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 96 | unsigned int *ecx, unsigned int *edx) |
| 97 | { |
| 98 | __asm__("cpuid" |
| 99 | : "=a" (*eax), |
| 100 | "=b" (*ebx), |
| 101 | "=c" (*ecx), |
| 102 | "=d" (*edx) |
| 103 | : "0" (op)); |
| 104 | } |
| 105 | |
| 106 | /* Some CPUID calls want 'count' to be placed in ecx */ |
| 107 | static inline void cpuid_count(int op, int count, int *eax, int *ebx, int *ecx, |
| 108 | int *edx) |
| 109 | { |
| 110 | __asm__("cpuid" |
| 111 | : "=a" (*eax), |
| 112 | "=b" (*ebx), |
| 113 | "=c" (*ecx), |
| 114 | "=d" (*edx) |
| 115 | : "0" (op), "c" (count)); |
| 116 | } |
| 117 | |
| 118 | /* |
| 119 | * CPUID functions returning a single datum |
| 120 | */ |
Adrian Bunk | 9c0aa0f | 2005-09-12 18:49:24 +0200 | [diff] [blame] | 121 | static inline unsigned int cpuid_eax(unsigned int op) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 122 | { |
| 123 | unsigned int eax; |
| 124 | |
| 125 | __asm__("cpuid" |
| 126 | : "=a" (eax) |
| 127 | : "0" (op) |
| 128 | : "bx", "cx", "dx"); |
| 129 | return eax; |
| 130 | } |
Adrian Bunk | 9c0aa0f | 2005-09-12 18:49:24 +0200 | [diff] [blame] | 131 | static inline unsigned int cpuid_ebx(unsigned int op) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 132 | { |
| 133 | unsigned int eax, ebx; |
| 134 | |
| 135 | __asm__("cpuid" |
| 136 | : "=a" (eax), "=b" (ebx) |
| 137 | : "0" (op) |
| 138 | : "cx", "dx" ); |
| 139 | return ebx; |
| 140 | } |
Adrian Bunk | 9c0aa0f | 2005-09-12 18:49:24 +0200 | [diff] [blame] | 141 | static inline unsigned int cpuid_ecx(unsigned int op) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 142 | { |
| 143 | unsigned int eax, ecx; |
| 144 | |
| 145 | __asm__("cpuid" |
| 146 | : "=a" (eax), "=c" (ecx) |
| 147 | : "0" (op) |
| 148 | : "bx", "dx" ); |
| 149 | return ecx; |
| 150 | } |
Adrian Bunk | 9c0aa0f | 2005-09-12 18:49:24 +0200 | [diff] [blame] | 151 | static inline unsigned int cpuid_edx(unsigned int op) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 152 | { |
| 153 | unsigned int eax, edx; |
| 154 | |
| 155 | __asm__("cpuid" |
| 156 | : "=a" (eax), "=d" (edx) |
| 157 | : "0" (op) |
| 158 | : "bx", "cx"); |
| 159 | return edx; |
| 160 | } |
| 161 | |
Adrian Bunk | b44755c | 2007-02-20 01:07:13 +0100 | [diff] [blame] | 162 | #ifdef CONFIG_SMP |
Alexey Dobriyan | b077ffb | 2007-02-16 01:48:11 -0800 | [diff] [blame] | 163 | void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h); |
| 164 | void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h); |
Adrian Bunk | b44755c | 2007-02-20 01:07:13 +0100 | [diff] [blame] | 165 | #else /* CONFIG_SMP */ |
| 166 | static inline void rdmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 *l, u32 *h) |
| 167 | { |
| 168 | rdmsr(msr_no, *l, *h); |
| 169 | } |
| 170 | static inline void wrmsr_on_cpu(unsigned int cpu, u32 msr_no, u32 l, u32 h) |
| 171 | { |
| 172 | wrmsr(msr_no, l, h); |
| 173 | } |
H. Peter Anvin | 4bc5aa9 | 2007-05-02 19:27:12 +0200 | [diff] [blame^] | 174 | #endif /* CONFIG_SMP */ |
| 175 | #endif /* __ASSEMBLY__ */ |
| 176 | #endif /* X86_64_MSR_H */ |