6.12-stable review patch. If anyone has any objections, please let me know. ------------------ From: Uros Bizjak <ubizjak@xxxxxxxxx> [ Upstream commit 25cf4fbb596d730476afcc0fb87a9d708db14078 ] x86_32 __arch_{,try_}cmpxchg64_emu()() macros use CALL instruction inside asm statement. Use ALT_OUTPUT_SP() macro to add required dependence on %esp register. Fixes: 79e1dd05d1a2 ("x86: Provide an alternative() based cmpxchg64()") Signed-off-by: Uros Bizjak <ubizjak@xxxxxxxxx> Signed-off-by: Peter Zijlstra (Intel) <peterz@xxxxxxxxxxxxx> Link: https://lkml.kernel.org/r/20241103160954.3329-2-ubizjak@xxxxxxxxx Signed-off-by: Sasha Levin <sashal@xxxxxxxxxx> --- arch/x86/include/asm/cmpxchg_32.h | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/arch/x86/include/asm/cmpxchg_32.h b/arch/x86/include/asm/cmpxchg_32.h index 62cef2113ca74..fd1282a783ddb 100644 --- a/arch/x86/include/asm/cmpxchg_32.h +++ b/arch/x86/include/asm/cmpxchg_32.h @@ -94,7 +94,7 @@ static __always_inline bool __try_cmpxchg64_local(volatile u64 *ptr, u64 *oldp, asm volatile(ALTERNATIVE(_lock_loc \ "call cmpxchg8b_emu", \ _lock "cmpxchg8b %a[ptr]", X86_FEATURE_CX8) \ - : "+a" (o.low), "+d" (o.high) \ + : ALT_OUTPUT_SP("+a" (o.low), "+d" (o.high)) \ : "b" (n.low), "c" (n.high), [ptr] "S" (_ptr) \ : "memory"); \ \ @@ -123,8 +123,8 @@ static __always_inline u64 arch_cmpxchg64_local(volatile u64 *ptr, u64 old, u64 "call cmpxchg8b_emu", \ _lock "cmpxchg8b %a[ptr]", X86_FEATURE_CX8) \ CC_SET(e) \ - : CC_OUT(e) (ret), \ - "+a" (o.low), "+d" (o.high) \ + : ALT_OUTPUT_SP(CC_OUT(e) (ret), \ + "+a" (o.low), "+d" (o.high)) \ : "b" (n.low), "c" (n.high), [ptr] "S" (_ptr) \ : "memory"); \ \ -- 2.43.0