| H. Peter Anvin | 1965aae | 2008-10-22 22:26:29 -0700 | [diff] [blame] | 1 | #ifndef _ASM_X86_FTRACE_H | 
|  | 2 | #define _ASM_X86_FTRACE_H | 
| Abhishek Sagar | 395a59d | 2008-06-21 23:47:27 +0530 | [diff] [blame] | 3 |  | 
| Cyrill Gorcunov | d680fe4 | 2008-12-13 00:09:08 +0300 | [diff] [blame] | 4 | #ifdef __ASSEMBLY__ | 
|  | 5 |  | 
| Steven Rostedt | 08f6fba | 2012-04-30 16:20:23 -0400 | [diff] [blame] | 6 | /* skip is set if the stack was already partially adjusted */ | 
|  | 7 | .macro MCOUNT_SAVE_FRAME skip=0 | 
|  | 8 | /* | 
|  | 9 | * We add enough stack to save all regs. | 
|  | 10 | */ | 
|  | 11 | subq $(SS+8-\skip), %rsp | 
|  | 12 | movq %rax, RAX(%rsp) | 
|  | 13 | movq %rcx, RCX(%rsp) | 
|  | 14 | movq %rdx, RDX(%rsp) | 
|  | 15 | movq %rsi, RSI(%rsp) | 
|  | 16 | movq %rdi, RDI(%rsp) | 
|  | 17 | movq %r8, R8(%rsp) | 
|  | 18 | movq %r9, R9(%rsp) | 
|  | 19 | /* Move RIP to its proper location */ | 
|  | 20 | movq SS+8(%rsp), %rdx | 
|  | 21 | movq %rdx, RIP(%rsp) | 
| Cyrill Gorcunov | d680fe4 | 2008-12-13 00:09:08 +0300 | [diff] [blame] | 22 | .endm | 
|  | 23 |  | 
| Steven Rostedt | 08f6fba | 2012-04-30 16:20:23 -0400 | [diff] [blame] | 24 | .macro MCOUNT_RESTORE_FRAME skip=0 | 
|  | 25 | movq R9(%rsp), %r9 | 
|  | 26 | movq R8(%rsp), %r8 | 
|  | 27 | movq RDI(%rsp), %rdi | 
|  | 28 | movq RSI(%rsp), %rsi | 
|  | 29 | movq RDX(%rsp), %rdx | 
|  | 30 | movq RCX(%rsp), %rcx | 
|  | 31 | movq RAX(%rsp), %rax | 
|  | 32 | addq $(SS+8-\skip), %rsp | 
| Cyrill Gorcunov | d680fe4 | 2008-12-13 00:09:08 +0300 | [diff] [blame] | 33 | .endm | 
|  | 34 |  | 
|  | 35 | #endif | 
|  | 36 |  | 
| Ingo Molnar | 4944dd6 | 2008-10-27 10:50:54 +0100 | [diff] [blame] | 37 | #ifdef CONFIG_FUNCTION_TRACER | 
| Steven Rostedt | d57c5d5 | 2011-02-09 13:32:18 -0500 | [diff] [blame] | 38 | #ifdef CC_USING_FENTRY | 
|  | 39 | # define MCOUNT_ADDR		((long)(__fentry__)) | 
|  | 40 | #else | 
|  | 41 | # define MCOUNT_ADDR		((long)(mcount)) | 
|  | 42 | #endif | 
| Abhishek Sagar | 395a59d | 2008-06-21 23:47:27 +0530 | [diff] [blame] | 43 | #define MCOUNT_INSN_SIZE	5 /* sizeof mcount call */ | 
|  | 44 |  | 
| Steven Rostedt | 28fb5df | 2011-08-10 22:00:55 -0400 | [diff] [blame] | 45 | #ifdef CONFIG_DYNAMIC_FTRACE | 
| Steven Rostedt | 2f5f6ad | 2011-08-08 16:57:47 -0400 | [diff] [blame] | 46 | #define ARCH_SUPPORTS_FTRACE_OPS 1 | 
| Steven Rostedt | 08f6fba | 2012-04-30 16:20:23 -0400 | [diff] [blame] | 47 | #define ARCH_SUPPORTS_FTRACE_SAVE_REGS | 
|  | 48 | #endif | 
| Steven Rostedt | 2f5f6ad | 2011-08-08 16:57:47 -0400 | [diff] [blame] | 49 |  | 
| Abhishek Sagar | 395a59d | 2008-06-21 23:47:27 +0530 | [diff] [blame] | 50 | #ifndef __ASSEMBLY__ | 
|  | 51 | extern void mcount(void); | 
| Steven Rostedt | a192cd0 | 2012-05-30 13:26:37 -0400 | [diff] [blame] | 52 | extern atomic_t modifying_ftrace_code; | 
| Steven Rostedt | d57c5d5 | 2011-02-09 13:32:18 -0500 | [diff] [blame] | 53 | extern void __fentry__(void); | 
| Steven Rostedt | 68bf21a | 2008-08-14 15:45:08 -0400 | [diff] [blame] | 54 |  | 
|  | 55 | static inline unsigned long ftrace_call_adjust(unsigned long addr) | 
|  | 56 | { | 
|  | 57 | /* | 
| Martin Schwidefsky | 521ccb5 | 2011-05-10 10:10:41 +0200 | [diff] [blame] | 58 | * addr is the address of the mcount call instruction. | 
|  | 59 | * recordmcount does the necessary offset calculation. | 
| Steven Rostedt | 68bf21a | 2008-08-14 15:45:08 -0400 | [diff] [blame] | 60 | */ | 
| Martin Schwidefsky | 521ccb5 | 2011-05-10 10:10:41 +0200 | [diff] [blame] | 61 | return addr; | 
| Steven Rostedt | 68bf21a | 2008-08-14 15:45:08 -0400 | [diff] [blame] | 62 | } | 
| Steven Rostedt | 31e8890 | 2008-11-14 16:21:19 -0800 | [diff] [blame] | 63 |  | 
|  | 64 | #ifdef CONFIG_DYNAMIC_FTRACE | 
|  | 65 |  | 
|  | 66 | struct dyn_arch_ftrace { | 
|  | 67 | /* No extra data needed for x86 */ | 
|  | 68 | }; | 
|  | 69 |  | 
| Steven Rostedt | 08d636b | 2011-08-16 09:57:10 -0400 | [diff] [blame] | 70 | int ftrace_int3_handler(struct pt_regs *regs); | 
|  | 71 |  | 
| Steven Rostedt | 31e8890 | 2008-11-14 16:21:19 -0800 | [diff] [blame] | 72 | #endif /*  CONFIG_DYNAMIC_FTRACE */ | 
| Steven Rostedt | a26a2a2 | 2008-10-31 00:03:22 -0400 | [diff] [blame] | 73 | #endif /* __ASSEMBLY__ */ | 
| Ingo Molnar | 4944dd6 | 2008-10-27 10:50:54 +0100 | [diff] [blame] | 74 | #endif /* CONFIG_FUNCTION_TRACER */ | 
| Abhishek Sagar | 395a59d | 2008-06-21 23:47:27 +0530 | [diff] [blame] | 75 |  | 
| H. Peter Anvin | 1965aae | 2008-10-22 22:26:29 -0700 | [diff] [blame] | 76 | #endif /* _ASM_X86_FTRACE_H */ |