diff options
author | David S. Miller <davem@davemloft.net> | 2010-04-13 07:35:24 +0200 |
---|---|---|
committer | David S. Miller <davem@davemloft.net> | 2010-04-13 07:35:24 +0200 |
commit | 63b754957371c23b7515399a977a2e1d361a036c (patch) | |
tree | d1e7cd39f91215970300adfffcdf96b5f7484c43 /arch/sparc/lib/mcount.S | |
parent | sparc64: Adjust __raw_local_irq_save() to cooperate in NMIs. (diff) | |
download | linux-63b754957371c23b7515399a977a2e1d361a036c.tar.xz linux-63b754957371c23b7515399a977a2e1d361a036c.zip |
sparc64: Add HAVE_FUNCTION_TRACE_MCOUNT_TEST and tidy up.
Check function_trace_stop at ftrace_caller
Toss mcount_call and dummy call of ftrace_stub, unnecessary.
Document problems we'll have if the final kernel image link
ever turns on relaxation.
Properly size 'ftrace_call' so it looks right when inspecting
instructions under gdb et al.
Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to '')
-rw-r--r-- | arch/sparc/lib/mcount.S | 22 |
1 files changed, 15 insertions, 7 deletions
diff --git a/arch/sparc/lib/mcount.S b/arch/sparc/lib/mcount.S index 24b8b12deed2..7047997be0eb 100644 --- a/arch/sparc/lib/mcount.S +++ b/arch/sparc/lib/mcount.S @@ -96,13 +96,12 @@ mcount: #endif #ifdef CONFIG_FUNCTION_TRACER #ifdef CONFIG_DYNAMIC_FTRACE - mov %o7, %o0 - .globl mcount_call -mcount_call: - call ftrace_stub - mov %o0, %o7 + /* Do nothing, the retl/nop below is all we need. */ #else - sethi %hi(ftrace_trace_function), %g1 + sethi %hi(function_trace_stop), %g1 + lduw [%g1 + %lo(function_trace_stop)], %g2 + brnz,pn %g2, 1f + sethi %hi(ftrace_trace_function), %g1 sethi %hi(ftrace_stub), %g2 ldx [%g1 + %lo(ftrace_trace_function)], %g1 or %g2, %lo(ftrace_stub), %g2 @@ -131,14 +130,23 @@ ftrace_stub: .globl ftrace_caller .type ftrace_caller,#function ftrace_caller: + sethi %hi(function_trace_stop), %g1 mov %i7, %o1 - mov %o7, %o0 + lduw [%g1 + %lo(function_trace_stop)], %g2 + brnz,pn %g2, ftrace_stub + mov %o7, %o0 .globl ftrace_call ftrace_call: + /* If the final kernel link ever turns on relaxation, we'll need + * to do something about this tail call. Otherwise the linker + * will rewrite the call into a branch and nop out the move + * instruction. + */ call ftrace_stub mov %o0, %o7 retl nop + .size ftrace_call,.-ftrace_call .size ftrace_caller,.-ftrace_caller #endif #endif |