summaryrefslogtreecommitdiff
path: root/arch/x86/kernel/ftrace_64.S
diff options
context:
space:
mode:
authorPeter Zijlstra <peterz@infradead.org>2022-03-08 18:30:31 +0300
committerPeter Zijlstra <peterz@infradead.org>2022-03-15 12:32:37 +0300
commite52fc2cf3f662828cc0d51c4b73bed73ad275fce (patch)
tree66c13647656492fd38701ff5bf2adff25e6e4b20 /arch/x86/kernel/ftrace_64.S
parentd15cb3dab1e4f00e29599a4f5e1f6678a530d270 (diff)
downloadlinux-e52fc2cf3f662828cc0d51c4b73bed73ad275fce.tar.xz
x86/ibt,ftrace: Make function-graph play nice
Return trampoline must not use indirect branch to return; while this preserves the RSB, it is fundamentally incompatible with IBT. Instead use a retpoline like ROP gadget that defeats IBT while not unbalancing the RSB. And since ftrace_stub is no longer a plain RET, don't use it to copy from. Since RET is a trivial instruction, poke it directly. Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org> Acked-by: Josh Poimboeuf <jpoimboe@redhat.com> Link: https://lore.kernel.org/r/20220308154318.347296408@infradead.org
Diffstat (limited to 'arch/x86/kernel/ftrace_64.S')
-rw-r--r--arch/x86/kernel/ftrace_64.S21
1 files changed, 17 insertions, 4 deletions
diff --git a/arch/x86/kernel/ftrace_64.S b/arch/x86/kernel/ftrace_64.S
index 11ac028e30e4..e32b5cd6dc15 100644
--- a/arch/x86/kernel/ftrace_64.S
+++ b/arch/x86/kernel/ftrace_64.S
@@ -176,10 +176,10 @@ SYM_FUNC_END(ftrace_caller);
SYM_FUNC_START(ftrace_epilogue)
/*
* This is weak to keep gas from relaxing the jumps.
- * It is also used to copy the RET for trampolines.
*/
SYM_INNER_LABEL_ALIGN(ftrace_stub, SYM_L_WEAK)
UNWIND_HINT_FUNC
+ ENDBR
RET
SYM_FUNC_END(ftrace_epilogue)
@@ -284,6 +284,7 @@ SYM_FUNC_START(__fentry__)
jnz trace
SYM_INNER_LABEL(ftrace_stub, SYM_L_GLOBAL)
+ ENDBR
RET
trace:
@@ -307,7 +308,7 @@ EXPORT_SYMBOL(__fentry__)
#ifdef CONFIG_FUNCTION_GRAPH_TRACER
SYM_FUNC_START(return_to_handler)
- subq $24, %rsp
+ subq $16, %rsp
/* Save the return values */
movq %rax, (%rsp)
@@ -319,7 +320,19 @@ SYM_FUNC_START(return_to_handler)
movq %rax, %rdi
movq 8(%rsp), %rdx
movq (%rsp), %rax
- addq $24, %rsp
- JMP_NOSPEC rdi
+
+ addq $16, %rsp
+ /*
+ * Jump back to the old return address. This cannot be JMP_NOSPEC rdi
+ * since IBT would demand that contain ENDBR, which simply isn't so for
+ * return addresses. Use a retpoline here to keep the RSB balanced.
+ */
+ ANNOTATE_INTRA_FUNCTION_CALL
+ call .Ldo_rop
+ int3
+.Ldo_rop:
+ mov %rdi, (%rsp)
+ UNWIND_HINT_FUNC
+ RET
SYM_FUNC_END(return_to_handler)
#endif