Assign the registers used in the inline assembly immediately before the inline assembly block. This ensures the compiler doesn't optimize away dead register assignments when it shouldn't. Signed-off-by: Stephen Boyd <sboyd@xxxxxxxxxxxxxx> --- arch/arm/mach-msm/scm.c | 7 +++++-- 1 files changed, 5 insertions(+), 2 deletions(-) diff --git a/arch/arm/mach-msm/scm.c b/arch/arm/mach-msm/scm.c index ba57b5a..5eddf54 100644 --- a/arch/arm/mach-msm/scm.c +++ b/arch/arm/mach-msm/scm.c @@ -264,13 +264,16 @@ u32 scm_get_version(void) { int context_id; static u32 version = -1; - register u32 r0 asm("r0") = 0x1 << 8; - register u32 r1 asm("r1") = (u32)&context_id; + register u32 r0 asm("r0"); + register u32 r1 asm("r1"); if (version != -1) return version; mutex_lock(&scm_lock); + + r0 = 0x1 << 8; + r1 = (u32)&context_id; asm volatile( __asmeq("%0", "r1") __asmeq("%1", "r0") -- Sent by an employee of the Qualcomm Innovation Center, Inc. The Qualcomm Innovation Center, Inc. is a member of the Code Aurora Forum. -- To unsubscribe from this list: send the line "unsubscribe linux-arm-msm" in the body of a message to majordomo@xxxxxxxxxxxxxxx More majordomo info at http://vger.kernel.org/majordomo-info.html