blob: 51f1bb3f8c7976e2a084b7c1837da63943eb0ff0 [file] [log] [blame]
Borislav Petkov132ec922009-08-31 09:50:09 +02001#include <linux/linkage.h>
2#include <linux/errno.h>
3#include <asm/asm.h>
4#include <asm/msr.h>
5
6#ifdef CONFIG_X86_64
7/*
8 * int native_{rdmsr,wrmsr}_safe_regs(u32 gprs[8]);
9 *
10 * reg layout: u32 gprs[eax, ecx, edx, ebx, esp, ebp, esi, edi]
11 *
12 */
13.macro op_safe_regs op:req
14ENTRY(native_\op\()_safe_regs)
15 push %rbx
16 push %rbp
17 push $0 /* Return value */
18 push %rdi
19 movl (%rdi), %eax
20 movl 4(%rdi), %ecx
21 movl 8(%rdi), %edx
22 movl 12(%rdi), %ebx
23 movl 20(%rdi), %ebp
24 movl 24(%rdi), %esi
25 movl 28(%rdi), %edi
261: \op
272: movl %edi, %r10d
28 pop %rdi
29 movl %eax, (%rdi)
30 movl %ecx, 4(%rdi)
31 movl %edx, 8(%rdi)
32 movl %ebx, 12(%rdi)
33 movl %ebp, 20(%rdi)
34 movl %esi, 24(%rdi)
35 movl %r10d, 28(%rdi)
36 pop %rax
37 pop %rbp
38 pop %rbx
39 ret
403:
41 movq $-EIO, 8(%rsp)
42 jmp 2b
43 .section __ex_table,"ax"
44 .balign 4
45 .quad 1b, 3b
46 .previous
47ENDPROC(native_\op\()_safe_regs)
48.endm
49
50#else /* X86_32 */
51
52.macro op_safe_regs op:req
53ENTRY(native_\op\()_safe_regs)
54 push %ebx
55 push %ebp
56 push %esi
57 push %edi
58 push $0 /* Return value */
59 push %eax
60 movl 4(%eax), %ecx
61 movl 8(%eax), %edx
62 movl 12(%eax), %ebx
63 movl 20(%eax), %ebp
64 movl 24(%eax), %esi
65 movl 28(%eax), %edi
66 movl (%eax), %eax
671: \op
682: push %eax
69 movl 4(%esp), %eax
70 pop (%eax)
71 addl $4, %esp
72 movl %ecx, 4(%eax)
73 movl %edx, 8(%eax)
74 movl %ebx, 12(%eax)
75 movl %ebp, 20(%eax)
76 movl %esi, 24(%eax)
77 movl %edi, 28(%eax)
78 pop %eax
79 pop %edi
80 pop %esi
81 pop %ebp
82 pop %ebx
83 ret
843:
85 movl $-EIO, 4(%esp)
86 jmp 2b
87 .section __ex_table,"ax"
88 .balign 4
89 .long 1b, 3b
90 .previous
91ENDPROC(native_\op\()_safe_regs)
92.endm
93
94#endif
95
96op_safe_regs rdmsr
97op_safe_regs wrmsr
98