summaryrefslogtreecommitdiffstats
path: root/arch/x86/kernel
diff options
context:
space:
mode:
authorPeter Zijlstra <peterz@infradead.org>2022-09-15 13:11:35 +0200
committerPeter Zijlstra <peterz@infradead.org>2022-10-17 16:41:18 +0200
commiteac828eaef295cd0cc8b58f55fa5c8401fdc2370 (patch)
tree6fe1ce228938debd23aea644f731f1505caa8152 /arch/x86/kernel
parentx86/bpf: Emit call depth accounting if required (diff)
downloadlinux-eac828eaef295cd0cc8b58f55fa5c8401fdc2370.tar.xz
linux-eac828eaef295cd0cc8b58f55fa5c8401fdc2370.zip
x86/ftrace: Remove ftrace_epilogue()
Remove the weird jumps to RET and simply use RET. This then promotes ftrace_stub() to a real function; which becomes important for kcfi. Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Signed-off-by: Thomas Gleixner <tglx@linutronix.de> Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Link: https://lore.kernel.org/r/20220915111148.719080593@infradead.org
Diffstat (limited to 'arch/x86/kernel')
-rw-r--r--arch/x86/kernel/ftrace_64.S21
1 files changed, 6 insertions, 15 deletions
diff --git a/arch/x86/kernel/ftrace_64.S b/arch/x86/kernel/ftrace_64.S
index dfeb227de561..a90c55a6b481 100644
--- a/arch/x86/kernel/ftrace_64.S
+++ b/arch/x86/kernel/ftrace_64.S
@@ -172,20 +172,14 @@ SYM_INNER_LABEL(ftrace_call, SYM_L_GLOBAL)
*/
SYM_INNER_LABEL(ftrace_caller_end, SYM_L_GLOBAL)
ANNOTATE_NOENDBR
-
- jmp ftrace_epilogue
+ RET
SYM_FUNC_END(ftrace_caller);
STACK_FRAME_NON_STANDARD_FP(ftrace_caller)
-SYM_FUNC_START(ftrace_epilogue)
-/*
- * This is weak to keep gas from relaxing the jumps.
- */
-SYM_INNER_LABEL_ALIGN(ftrace_stub, SYM_L_WEAK)
+SYM_FUNC_START(ftrace_stub)
UNWIND_HINT_FUNC
- ENDBR
RET
-SYM_FUNC_END(ftrace_epilogue)
+SYM_FUNC_END(ftrace_stub)
SYM_FUNC_START(ftrace_regs_caller)
/* Save the current flags before any operations that can change them */
@@ -262,14 +256,11 @@ SYM_INNER_LABEL(ftrace_regs_caller_jmp, SYM_L_GLOBAL)
popfq
/*
- * As this jmp to ftrace_epilogue can be a short jump
- * it must not be copied into the trampoline.
- * The trampoline will add the code to jump
- * to the return.
+ * The trampoline will add the return.
*/
SYM_INNER_LABEL(ftrace_regs_caller_end, SYM_L_GLOBAL)
ANNOTATE_NOENDBR
- jmp ftrace_epilogue
+ RET
/* Swap the flags with orig_rax */
1: movq MCOUNT_REG_SIZE(%rsp), %rdi
@@ -280,7 +271,7 @@ SYM_INNER_LABEL(ftrace_regs_caller_end, SYM_L_GLOBAL)
/* Restore flags */
popfq
UNWIND_HINT_FUNC
- jmp ftrace_epilogue
+ RET
SYM_FUNC_END(ftrace_regs_caller)
STACK_FRAME_NON_STANDARD_FP(ftrace_regs_caller)