Rewrite arch_spin_lock() and arch_spin_lock_flags() to not re-enable and disable the PSW_SM_I interrupt flag too often. Signed-off-by: Helge Deller <deller@xxxxxx> diff --git a/arch/parisc/include/asm/spinlock.h b/arch/parisc/include/asm/spinlock.h index 197d2247e4db..51582eacb7ec 100644 --- a/arch/parisc/include/asm/spinlock.h +++ b/arch/parisc/include/asm/spinlock.h @@ -10,25 +10,34 @@ static inline int arch_spin_is_locked(arch_spinlock_t *x) { volatile unsigned int *a = __ldcw_align(x); + smp_mb(); return *a == 0; } -#define arch_spin_lock(lock) arch_spin_lock_flags(lock, 0) +static inline void arch_spin_lock(arch_spinlock_t *x) +{ + volatile unsigned int *a; + + a = __ldcw_align(x); + while (__ldcw(a) == 0) + while (*a == 0) + cpu_relax(); +} static inline void arch_spin_lock_flags(arch_spinlock_t *x, unsigned long flags) { volatile unsigned int *a; + unsigned long flags_dis; a = __ldcw_align(x); - while (__ldcw(a) == 0) + while (__ldcw(a) == 0) { + local_save_flags(flags_dis); + local_irq_restore(flags); while (*a == 0) - if (flags & PSW_SM_I) { - local_irq_enable(); - cpu_relax(); - local_irq_disable(); - } else - cpu_relax(); + cpu_relax(); + local_irq_restore(flags_dis); + } } #define arch_spin_lock_flags arch_spin_lock_flags