When MPUSS hits off-mode e, L2 cache is lost. This patch adds L2X0
necessary maintenance operations and context restoration in the
low power code.
Signed-off-by: Santosh Shilimkar<santosh.shilimkar@xxxxxx>
Cc: Kevin Hilman<khilman@xxxxxx>
---
arch/arm/mach-omap2/include/mach/omap-secure.h | 5 +
arch/arm/mach-omap2/omap-mpuss-lowpower.c | 38 +++++++++-
arch/arm/mach-omap2/omap4-sar-layout.h | 4 +
arch/arm/mach-omap2/sleep44xx.S | 96 ++++++++++++++++++++++++
4 files changed, 142 insertions(+), 1 deletions(-)
diff --git a/arch/arm/mach-omap2/sleep44xx.S b/arch/arm/mach-omap2/sleep44xx.S
index 230ab8c..a7cce0b 100644
--- a/arch/arm/mach-omap2/sleep44xx.S
+++ b/arch/arm/mach-omap2/sleep44xx.S
@@ -32,6 +32,9 @@
ppa_zero_params:
.word 0x0
+ppa_por_params:
+ .word 1, 0
+
/*
* =============================
* == CPU suspend finisher ==
@@ -130,6 +133,55 @@ skip_scu_gp_set:
mcrne p15, 0, r0, c1, c0, 1
isb
dsb
+#ifdef CONFIG_CACHE_L2X0
+ /*
+ * Clean and invalidate the L2 cache.
+ * Common cache-l2x0.c functions can't be used here since it
+ * uses spinlocks. We are out of coherency here with data cache
+ * disabled. The spinlock implementation uses exclusive load/store
+ * instruction which can fail without data cache being enabled.
+ * OMAP4 hardware doesn't support exclusive monitor which can
+ * overcome exclusive access issue. Because of this, CPU can
+ * lead to deadlock.
+ */
+l2x_clean_inv:
+ bl omap4_get_sar_ram_base
+ mov r8, r0
+ mrc p15, 0, r5, c0, c0, 5 @ Read MPIDR
+ ands r5, r5, #0x0f
+ ldreq r0, [r8, #L2X0_SAVE_OFFSET0]
+ ldrne r0, [r8, #L2X0_SAVE_OFFSET1]
+ cmp r0, #3
+ bne do_WFI
+#ifdef CONFIG_PL310_ERRATA_727915
+ mov r0, #0x03
+ mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX
+ DO_SMC
+#endif
+ bl omap4_get_l2cache_base
+ mov r2, r0
+ ldr r0, =0xffff
+ str r0, [r2, #L2X0_CLEAN_INV_WAY]
+wait:
+ ldr r0, [r2, #L2X0_CLEAN_INV_WAY]
+ ldr r1, =0xffff
+ ands r0, r0, r1
+ bne wait
+#ifdef CONFIG_PL310_ERRATA_727915
+ mov r0, #0x00
+ mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX
+ DO_SMC
+#endif
+l2x_sync:
+ bl omap4_get_l2cache_base
+ mov r2, r0
+ mov r0, #0x0
+ str r0, [r2, #L2X0_CACHE_SYNC]
+sync:
+ ldr r0, [r2, #L2X0_CACHE_SYNC]
+ ands r0, r0, #0x1
+ bne sync
+#endif