From: "Madhavan T. Venkataraman" <madvenka@xxxxxxxxxxxxxxxxxxx> Add unwind hints to the following: - Ftrace entry code - Interrupt and Exception handlers - Kretprobe trampoline Signed-off-by: Madhavan T. Venkataraman <madvenka@xxxxxxxxxxxxxxxxxxx> --- arch/arm64/kernel/entry-ftrace.S | 23 +++++++++++++++++++ arch/arm64/kernel/entry.S | 3 +++ arch/arm64/kernel/probes/kprobes_trampoline.S | 3 +++ 3 files changed, 29 insertions(+) diff --git a/arch/arm64/kernel/entry-ftrace.S b/arch/arm64/kernel/entry-ftrace.S index e535480a4069..6d68833e8cec 100644 --- a/arch/arm64/kernel/entry-ftrace.S +++ b/arch/arm64/kernel/entry-ftrace.S @@ -11,6 +11,7 @@ #include <asm/assembler.h> #include <asm/ftrace.h> #include <asm/insn.h> +#include <asm/unwind_hints.h> #ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS /* @@ -95,7 +96,14 @@ SYM_CODE_START(ftrace_common) mov x3, sp // regs SYM_INNER_LABEL(ftrace_call, SYM_L_GLOBAL) + /* + * Tracer functions are patched at ftrace_stub. Stack traces + * taken from tracer functions will end up here. Place an + * unwind hint based on the stackframe setup in ftrace_regs_entry. + */ bl ftrace_stub +SYM_INNER_LABEL(ftrace_call_entry, SYM_L_GLOBAL) + UNWIND_HINT_REGS PT_REGS_SIZE #ifdef CONFIG_FUNCTION_GRAPH_TRACER SYM_INNER_LABEL(ftrace_graph_call, SYM_L_GLOBAL) // ftrace_graph_caller(); @@ -134,10 +142,25 @@ SYM_CODE_START(ftrace_graph_caller) add x1, sp, #S_LR // parent_ip (callsite's LR) ldr x2, [sp, #PT_REGS_SIZE] // parent fp (callsite's FP) bl prepare_ftrace_return +SYM_INNER_LABEL(ftrace_graph_caller_entry, SYM_L_GLOBAL) + UNWIND_HINT_REGS PT_REGS_SIZE b ftrace_common_return SYM_CODE_END(ftrace_graph_caller) #endif +/* + * ftrace_regs_entry() sets up two stackframes - one for the callsite and + * one for the ftrace entry code. Unwind hints have been placed for the + * ftrace entry code above. We need an unwind hint for the callsite. Callsites + * are numerous. But the unwind hint required for all the callsites is the + * same. Define a dummy function here with the callsite unwind hint for the + * benefit of the unwinder. + */ +SYM_CODE_START(ftrace_callsite) + UNWIND_HINT_FTRACE 16 // for the callsite + ret +SYM_CODE_END(ftrace_callsite) + #else /* CONFIG_DYNAMIC_FTRACE_WITH_REGS */ /* diff --git a/arch/arm64/kernel/entry.S b/arch/arm64/kernel/entry.S index ede028dee81b..95d5f3c08aa1 100644 --- a/arch/arm64/kernel/entry.S +++ b/arch/arm64/kernel/entry.S @@ -28,6 +28,7 @@ #include <asm/thread_info.h> #include <asm/asm-uaccess.h> #include <asm/unistd.h> +#include <asm/unwind_hints.h> .macro clear_gp_regs .irp n,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29 @@ -560,6 +561,7 @@ SYM_CODE_START_LOCAL(el\el\ht\()_\regsize\()_\label) .if \el == 0 b ret_to_user .else + UNWIND_HINT_REGS PT_REGS_SIZE b ret_to_kernel .endif SYM_CODE_END(el\el\ht\()_\regsize\()_\label) @@ -894,6 +896,7 @@ SYM_FUNC_START(call_on_irq_stack) /* Move to the new stack and call the function there */ mov sp, x16 blr x1 + UNWIND_HINT_IRQ 16 /* * Restore the SP from the FP, and restore the FP and LR from the frame diff --git a/arch/arm64/kernel/probes/kprobes_trampoline.S b/arch/arm64/kernel/probes/kprobes_trampoline.S index 9a6499bed58b..847cbb81ca33 100644 --- a/arch/arm64/kernel/probes/kprobes_trampoline.S +++ b/arch/arm64/kernel/probes/kprobes_trampoline.S @@ -6,6 +6,7 @@ #include <linux/linkage.h> #include <asm/asm-offsets.h> #include <asm/assembler.h> +#include <asm/unwind_hints.h> .text @@ -71,6 +72,8 @@ SYM_CODE_START(__kretprobe_trampoline) mov x0, sp bl trampoline_probe_handler + UNWIND_HINT_REGS PT_REGS_SIZE + /* * Replace trampoline address in lr with actual orig_ret_addr return * address. -- 2.25.1