From: David Daney <david.daney@xxxxxxxxxx> We were doing: SRL $r,$?,16 ANDI $r,$r,0xffff The logical right shift by 16 leaves the upper 16 bits clear, so the subsequent masking out of those bits is redundant, and can safely be removed. Signed-off-by: David Daney <david.daney@xxxxxxxxxx> --- arch/mips/include/asm/spinlock.h | 4 ---- 1 file changed, 4 deletions(-) diff --git a/arch/mips/include/asm/spinlock.h b/arch/mips/include/asm/spinlock.h index 0b1dbd2..78d201f 100644 --- a/arch/mips/include/asm/spinlock.h +++ b/arch/mips/include/asm/spinlock.h @@ -71,7 +71,6 @@ static inline void arch_spin_lock(arch_spinlock_t *lock) " nop \n" " srl %[my_ticket], %[ticket], 16 \n" " andi %[ticket], %[ticket], 0xffff \n" - " andi %[my_ticket], %[my_ticket], 0xffff \n" " bne %[ticket], %[my_ticket], 4f \n" " subu %[ticket], %[my_ticket], %[ticket] \n" "2: \n" @@ -105,7 +104,6 @@ static inline void arch_spin_lock(arch_spinlock_t *lock) " beqz %[my_ticket], 1b \n" " srl %[my_ticket], %[ticket], 16 \n" " andi %[ticket], %[ticket], 0xffff \n" - " andi %[my_ticket], %[my_ticket], 0xffff \n" " bne %[ticket], %[my_ticket], 4f \n" " subu %[ticket], %[my_ticket], %[ticket] \n" "2: \n" @@ -153,7 +151,6 @@ static inline unsigned int arch_spin_trylock(arch_spinlock_t *lock) " \n" "1: ll %[ticket], %[ticket_ptr] \n" " srl %[my_ticket], %[ticket], 16 \n" - " andi %[my_ticket], %[my_ticket], 0xffff \n" " andi %[now_serving], %[ticket], 0xffff \n" " bne %[my_ticket], %[now_serving], 3f \n" " addu %[ticket], %[ticket], %[inc] \n" @@ -178,7 +175,6 @@ static inline unsigned int arch_spin_trylock(arch_spinlock_t *lock) " \n" "1: ll %[ticket], %[ticket_ptr] \n" " srl %[my_ticket], %[ticket], 16 \n" - " andi %[my_ticket], %[my_ticket], 0xffff \n" " andi %[now_serving], %[ticket], 0xffff \n" " bne %[my_ticket], %[now_serving], 3f \n" " addu %[ticket], %[ticket], %[inc] \n" -- 1.7.11.7