Add vmalloc and kernel addresses check to prevent invalid access. Closes: https://lore.kernel.org/all/20230926105949.1025995-2-twuufnxlz@xxxxxxxxx/ Fixes: 5d8544e2d007 ("RISC-V: Generic library routines and assembly") Reported-and-test-by: syzbot+8d2757d62d403b2d9275@xxxxxxxxxxxxxxxxxxxxxxxxx Link: https://lore.kernel.org/all/0000000000000170df0605ccf91a@xxxxxxxxxx/T/ Signed-off-by: Edward AD <twuufnxlz@xxxxxxxxx> --- arch/riscv/kernel/stacktrace.c | 3 +++ 1 file changed, 3 insertions(+) diff --git a/arch/riscv/kernel/stacktrace.c b/arch/riscv/kernel/stacktrace.c index 64a9c093aef9..031a4a35c1d0 100644 --- a/arch/riscv/kernel/stacktrace.c +++ b/arch/riscv/kernel/stacktrace.c @@ -54,6 +54,9 @@ void notrace walk_stackframe(struct task_struct *task, struct pt_regs *regs, break; /* Unwind stack frame */ frame = (struct stackframe *)fp - 1; + if ((is_vmalloc_addr(frame) && !pfn_valid(page_to_pfn(vmalloc_to_page(frame)))) || + !virt_addr_valid(frame)) + break; sp = fp; if (regs && (regs->epc == pc) && (frame->fp & 0x7)) { fp = frame->ra; -- 2.25.1