summaryrefslogtreecommitdiffstats
path: root/arch/sparc
diff options
context:
space:
mode:
authorDavid S. Miller <davem@davemloft.net>2010-04-07 04:59:46 +0200
committerDavid S. Miller <davem@davemloft.net>2010-04-13 07:37:15 +0200
commita71d1d6bb1b26e566e5c06c37857f4cdc1664780 (patch)
tree5b982f3bd77f47ef6ebbd2a9529a4efcb1df106e /arch/sparc
parentsparc64: Use a seperate counter for timer interrupts and NMI checks, like x86. (diff)
downloadlinux-a71d1d6bb1b26e566e5c06c37857f4cdc1664780.tar.xz
linux-a71d1d6bb1b26e566e5c06c37857f4cdc1664780.zip
sparc64: Give a stack frame to the ftrace call sites.
It's the only way we'll be able to implement the function graph tracer properly. A positive is that we no longer have to worry about the linker over-optimizing the tail call, since we don't use a tail call any more. Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'arch/sparc')
-rw-r--r--arch/sparc/lib/mcount.S31
1 files changed, 16 insertions, 15 deletions
diff --git a/arch/sparc/lib/mcount.S b/arch/sparc/lib/mcount.S
index 73ed0f3aaa0c..153c80e62cf1 100644
--- a/arch/sparc/lib/mcount.S
+++ b/arch/sparc/lib/mcount.S
@@ -33,9 +33,13 @@ mcount:
or %g2, %lo(ftrace_stub), %g2
cmp %g1, %g2
be,pn %icc, 1f
- mov %i7, %o1
- jmpl %g1, %g0
- mov %o7, %o0
+ mov %i7, %g2
+ save %sp, -128, %sp
+ mov %g2, %o1
+ jmpl %g1, %o7
+ mov %i7, %o0
+ ret
+ restore
/* not reached */
1:
#endif
@@ -57,21 +61,18 @@ ftrace_stub:
.type ftrace_caller,#function
ftrace_caller:
sethi %hi(function_trace_stop), %g1
- mov %i7, %o1
- lduw [%g1 + %lo(function_trace_stop)], %g2
- brnz,pn %g2, ftrace_stub
- mov %o7, %o0
+ mov %i7, %g2
+ lduw [%g1 + %lo(function_trace_stop)], %g3
+ brnz,pn %g3, ftrace_stub
+ nop
+ save %sp, -128, %sp
+ mov %g2, %o1
.globl ftrace_call
ftrace_call:
- /* If the final kernel link ever turns on relaxation, we'll need
- * to do something about this tail call. Otherwise the linker
- * will rewrite the call into a branch and nop out the move
- * instruction.
- */
call ftrace_stub
- mov %o0, %o7
- retl
- nop
+ mov %i7, %o0
+ ret
+ restore
.size ftrace_call,.-ftrace_call
.size ftrace_caller,.-ftrace_caller
#endif