2017-03-23 22:33:49 +08:00
|
|
|
/*
|
|
|
|
* Copyright (C) 2017 Steven Rostedt, VMware Inc.
|
|
|
|
*/
|
|
|
|
|
|
|
|
#include <linux/linkage.h>
|
|
|
|
#include <asm/page_types.h>
|
|
|
|
#include <asm/segment.h>
|
|
|
|
#include <asm/export.h>
|
|
|
|
#include <asm/ftrace.h>
|
|
|
|
|
|
|
|
#ifdef CONFIG_FUNCTION_TRACER
|
|
|
|
#ifdef CONFIG_DYNAMIC_FTRACE
|
|
|
|
|
|
|
|
ENTRY(mcount)
|
|
|
|
ret
|
|
|
|
END(mcount)
|
|
|
|
|
|
|
|
ENTRY(ftrace_caller)
|
2017-03-23 22:33:50 +08:00
|
|
|
|
|
|
|
pushl %ebp
|
|
|
|
movl %esp, %ebp
|
|
|
|
|
2017-03-23 22:33:49 +08:00
|
|
|
pushl %eax
|
|
|
|
pushl %ecx
|
|
|
|
pushl %edx
|
|
|
|
pushl $0 /* Pass NULL as regs pointer */
|
2017-03-23 22:33:50 +08:00
|
|
|
movl 5*4(%esp), %eax
|
|
|
|
/* Copy original ebp into %edx */
|
|
|
|
movl 4*4(%esp), %edx
|
|
|
|
/* Get the parent ip */
|
|
|
|
movl 0x4(%edx), %edx
|
2017-03-23 22:33:49 +08:00
|
|
|
movl function_trace_op, %ecx
|
|
|
|
subl $MCOUNT_INSN_SIZE, %eax
|
|
|
|
|
|
|
|
.globl ftrace_call
|
|
|
|
ftrace_call:
|
|
|
|
call ftrace_stub
|
|
|
|
|
|
|
|
addl $4, %esp /* skip NULL pointer */
|
|
|
|
popl %edx
|
|
|
|
popl %ecx
|
|
|
|
popl %eax
|
2017-03-23 22:33:50 +08:00
|
|
|
popl %ebp
|
2017-03-23 22:33:49 +08:00
|
|
|
.Lftrace_ret:
|
|
|
|
#ifdef CONFIG_FUNCTION_GRAPH_TRACER
|
|
|
|
.globl ftrace_graph_call
|
|
|
|
ftrace_graph_call:
|
|
|
|
jmp ftrace_stub
|
|
|
|
#endif
|
|
|
|
|
|
|
|
/* This is weak to keep gas from relaxing the jumps */
|
|
|
|
WEAK(ftrace_stub)
|
|
|
|
ret
|
|
|
|
END(ftrace_caller)
|
|
|
|
|
|
|
|
ENTRY(ftrace_regs_caller)
|
|
|
|
pushf /* push flags before compare (in cs location) */
|
|
|
|
|
|
|
|
/*
|
|
|
|
* i386 does not save SS and ESP when coming from kernel.
|
|
|
|
* Instead, to get sp, ®s->sp is used (see ptrace.h).
|
|
|
|
* Unfortunately, that means eflags must be at the same location
|
|
|
|
* as the current return ip is. We move the return ip into the
|
|
|
|
* ip location, and move flags into the return ip location.
|
|
|
|
*/
|
|
|
|
pushl 4(%esp) /* save return ip into ip slot */
|
|
|
|
|
|
|
|
pushl $0 /* Load 0 into orig_ax */
|
|
|
|
pushl %gs
|
|
|
|
pushl %fs
|
|
|
|
pushl %es
|
|
|
|
pushl %ds
|
|
|
|
pushl %eax
|
|
|
|
pushl %ebp
|
|
|
|
pushl %edi
|
|
|
|
pushl %esi
|
|
|
|
pushl %edx
|
|
|
|
pushl %ecx
|
|
|
|
pushl %ebx
|
|
|
|
|
|
|
|
movl 13*4(%esp), %eax /* Get the saved flags */
|
|
|
|
movl %eax, 14*4(%esp) /* Move saved flags into regs->flags location */
|
|
|
|
/* clobbering return ip */
|
|
|
|
movl $__KERNEL_CS, 13*4(%esp)
|
|
|
|
|
|
|
|
movl 12*4(%esp), %eax /* Load ip (1st parameter) */
|
|
|
|
subl $MCOUNT_INSN_SIZE, %eax /* Adjust ip */
|
|
|
|
movl 0x4(%ebp), %edx /* Load parent ip (2nd parameter) */
|
|
|
|
movl function_trace_op, %ecx /* Save ftrace_pos in 3rd parameter */
|
|
|
|
pushl %esp /* Save pt_regs as 4th parameter */
|
|
|
|
|
|
|
|
GLOBAL(ftrace_regs_call)
|
|
|
|
call ftrace_stub
|
|
|
|
|
|
|
|
addl $4, %esp /* Skip pt_regs */
|
|
|
|
movl 14*4(%esp), %eax /* Move flags back into cs */
|
|
|
|
movl %eax, 13*4(%esp) /* Needed to keep addl from modifying flags */
|
|
|
|
movl 12*4(%esp), %eax /* Get return ip from regs->ip */
|
|
|
|
movl %eax, 14*4(%esp) /* Put return ip back for ret */
|
|
|
|
|
|
|
|
popl %ebx
|
|
|
|
popl %ecx
|
|
|
|
popl %edx
|
|
|
|
popl %esi
|
|
|
|
popl %edi
|
|
|
|
popl %ebp
|
|
|
|
popl %eax
|
|
|
|
popl %ds
|
|
|
|
popl %es
|
|
|
|
popl %fs
|
|
|
|
popl %gs
|
|
|
|
addl $8, %esp /* Skip orig_ax and ip */
|
|
|
|
popf /* Pop flags at end (no addl to corrupt flags) */
|
|
|
|
jmp .Lftrace_ret
|
|
|
|
|
|
|
|
popf
|
|
|
|
jmp ftrace_stub
|
|
|
|
#else /* ! CONFIG_DYNAMIC_FTRACE */
|
|
|
|
|
|
|
|
ENTRY(mcount)
|
|
|
|
cmpl $__PAGE_OFFSET, %esp
|
|
|
|
jb ftrace_stub /* Paging not enabled yet? */
|
|
|
|
|
|
|
|
cmpl $ftrace_stub, ftrace_trace_function
|
|
|
|
jnz .Ltrace
|
|
|
|
#ifdef CONFIG_FUNCTION_GRAPH_TRACER
|
|
|
|
cmpl $ftrace_stub, ftrace_graph_return
|
|
|
|
jnz ftrace_graph_caller
|
|
|
|
|
|
|
|
cmpl $ftrace_graph_entry_stub, ftrace_graph_entry
|
|
|
|
jnz ftrace_graph_caller
|
|
|
|
#endif
|
|
|
|
.globl ftrace_stub
|
|
|
|
ftrace_stub:
|
|
|
|
ret
|
|
|
|
|
|
|
|
/* taken from glibc */
|
|
|
|
.Ltrace:
|
|
|
|
pushl %eax
|
|
|
|
pushl %ecx
|
|
|
|
pushl %edx
|
|
|
|
movl 0xc(%esp), %eax
|
|
|
|
movl 0x4(%ebp), %edx
|
|
|
|
subl $MCOUNT_INSN_SIZE, %eax
|
|
|
|
|
|
|
|
call *ftrace_trace_function
|
|
|
|
|
|
|
|
popl %edx
|
|
|
|
popl %ecx
|
|
|
|
popl %eax
|
|
|
|
jmp ftrace_stub
|
|
|
|
END(mcount)
|
|
|
|
#endif /* CONFIG_DYNAMIC_FTRACE */
|
|
|
|
EXPORT_SYMBOL(mcount)
|
|
|
|
#endif /* CONFIG_FUNCTION_TRACER */
|
|
|
|
|
|
|
|
#ifdef CONFIG_FUNCTION_GRAPH_TRACER
|
|
|
|
ENTRY(ftrace_graph_caller)
|
|
|
|
pushl %eax
|
|
|
|
pushl %ecx
|
|
|
|
pushl %edx
|
|
|
|
movl 0xc(%esp), %eax
|
|
|
|
lea 0x4(%ebp), %edx
|
|
|
|
movl (%ebp), %ecx
|
|
|
|
subl $MCOUNT_INSN_SIZE, %eax
|
|
|
|
call prepare_ftrace_return
|
|
|
|
popl %edx
|
|
|
|
popl %ecx
|
|
|
|
popl %eax
|
|
|
|
ret
|
|
|
|
END(ftrace_graph_caller)
|
|
|
|
|
|
|
|
.globl return_to_handler
|
|
|
|
return_to_handler:
|
|
|
|
pushl %eax
|
|
|
|
pushl %edx
|
|
|
|
movl %ebp, %eax
|
|
|
|
call ftrace_return_to_handler
|
|
|
|
movl %eax, %ecx
|
|
|
|
popl %edx
|
|
|
|
popl %eax
|
|
|
|
jmp *%ecx
|
|
|
|
#endif
|