commit d6ffe6067a54972564552ea45d320fb98db1ac5e upstream. Some architectures define their own arch_test_bit and they also need arch_test_bit_acquire, otherwise they won't compile. We also clean up the code by using the generic test_bit if that is equivalent to the arch-specific version. Signed-off-by: Mikulas Patocka <mpatocka@xxxxxxxxxx> Cc: stable@xxxxxxxxxxxxxxx Fixes: 8238b4579866 ("wait_on_bit: add an acquire memory barrier") Signed-off-by: Linus Torvalds <torvalds@xxxxxxxxxxxxxxxxxxxx> --- arch/alpha/include/asm/bitops.h | 7 +++++++ arch/arc/include/asm/bitops.h | 7 +++++++ arch/frv/include/asm/bitops.h | 7 +++++++ arch/h8300/include/asm/bitops.h | 3 ++- arch/hexagon/include/asm/bitops.h | 15 +++++++++++++++ arch/ia64/include/asm/bitops.h | 7 +++++++ arch/m68k/include/asm/bitops.h | 6 ++++++ arch/mn10300/include/asm/bitops.h | 7 +++++++ arch/s390/include/asm/bitops.h | 7 +++++++ arch/sh/include/asm/bitops-op32.h | 7 +++++++ 10 files changed, 72 insertions(+), 1 deletion(-) Index: linux-stable/arch/alpha/include/asm/bitops.h =================================================================== --- linux-stable.orig/arch/alpha/include/asm/bitops.h 2022-10-27 14:24:02.000000000 +0200 +++ linux-stable/arch/alpha/include/asm/bitops.h 2022-10-27 14:24:02.000000000 +0200 @@ -289,6 +289,13 @@ test_bit(int nr, const volatile void * a return (1UL & (((const int *) addr)[nr >> 5] >> (nr & 31))) != 0UL; } +static __always_inline bool +test_bit_acquire(unsigned long nr, const volatile unsigned long *addr) +{ + unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); + return 1UL & (smp_load_acquire(p) >> (nr & (BITS_PER_LONG-1))); +} + /* * ffz = Find First Zero in word. Undefined if no zero exists, * so code should check against ~0UL first.. Index: linux-stable/arch/hexagon/include/asm/bitops.h =================================================================== --- linux-stable.orig/arch/hexagon/include/asm/bitops.h 2022-10-27 14:24:02.000000000 +0200 +++ linux-stable/arch/hexagon/include/asm/bitops.h 2022-10-27 14:24:02.000000000 +0200 @@ -186,7 +186,22 @@ static inline int __test_bit(int nr, con return retval; } +static inline int __test_bit_acquire(int nr, const volatile unsigned long *addr) +{ + int retval; + + asm volatile( + "{P0 = tstbit(%1,%2); if (P0.new) %0 = #1; if (!P0.new) %0 = #0;}\n" + : "=&r" (retval) + : "r" (addr[BIT_WORD(nr)]), "r" (nr % BITS_PER_LONG) + : "p0", "memory" + ); + + return retval; +} + #define test_bit(nr, addr) __test_bit(nr, addr) +#define test_bit_acquire(nr, addr) __test_bit_acquire(nr, addr) /* * ffz - find first zero in word. Index: linux-stable/arch/ia64/include/asm/bitops.h =================================================================== --- linux-stable.orig/arch/ia64/include/asm/bitops.h 2022-10-27 14:24:02.000000000 +0200 +++ linux-stable/arch/ia64/include/asm/bitops.h 2022-10-27 14:24:02.000000000 +0200 @@ -337,6 +337,13 @@ test_bit (int nr, const volatile void *a return 1 & (((const volatile __u32 *) addr)[nr >> 5] >> (nr & 31)); } +static __always_inline bool +test_bit_acquire(unsigned long nr, const volatile unsigned long *addr) +{ + unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); + return 1UL & (smp_load_acquire(p) >> (nr & (BITS_PER_LONG-1))); +} + /** * ffz - find the first zero bit in a long word * @x: The long word to find the bit in Index: linux-stable/arch/m68k/include/asm/bitops.h =================================================================== --- linux-stable.orig/arch/m68k/include/asm/bitops.h 2022-10-27 14:24:02.000000000 +0200 +++ linux-stable/arch/m68k/include/asm/bitops.h 2022-10-27 14:24:02.000000000 +0200 @@ -153,6 +153,12 @@ static inline int test_bit(int nr, const return (vaddr[nr >> 5] & (1UL << (nr & 31))) != 0; } +static __always_inline bool +test_bit_acquire(unsigned long nr, const volatile unsigned long *addr) +{ + unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); + return 1UL & (smp_load_acquire(p) >> (nr & (BITS_PER_LONG-1))); +} static inline int bset_reg_test_and_set_bit(int nr, volatile unsigned long *vaddr) Index: linux-stable/arch/s390/include/asm/bitops.h =================================================================== --- linux-stable.orig/arch/s390/include/asm/bitops.h 2022-10-27 14:24:02.000000000 +0200 +++ linux-stable/arch/s390/include/asm/bitops.h 2022-10-27 14:24:02.000000000 +0200 @@ -215,6 +215,13 @@ static inline int test_bit(unsigned long return (*addr >> (nr & 7)) & 1; } +static __always_inline bool +test_bit_acquire(unsigned long nr, const volatile unsigned long *addr) +{ + unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); + return 1UL & (smp_load_acquire(p) >> (nr & (BITS_PER_LONG-1))); +} + static inline int test_and_set_bit_lock(unsigned long nr, volatile unsigned long *ptr) { Index: linux-stable/arch/sh/include/asm/bitops-op32.h =================================================================== --- linux-stable.orig/arch/sh/include/asm/bitops-op32.h 2022-10-27 14:24:02.000000000 +0200 +++ linux-stable/arch/sh/include/asm/bitops-op32.h 2022-10-27 14:24:02.000000000 +0200 @@ -140,4 +140,11 @@ static inline int test_bit(int nr, const return 1UL & (addr[BIT_WORD(nr)] >> (nr & (BITS_PER_LONG-1))); } +static __always_inline bool +test_bit_acquire(unsigned long nr, const volatile unsigned long *addr) +{ + unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); + return 1UL & (smp_load_acquire(p) >> (nr & (BITS_PER_LONG-1))); +} + #endif /* __ASM_SH_BITOPS_OP32_H */ Index: linux-stable/arch/arc/include/asm/bitops.h =================================================================== --- linux-stable.orig/arch/arc/include/asm/bitops.h 2022-10-27 14:24:02.000000000 +0200 +++ linux-stable/arch/arc/include/asm/bitops.h 2022-10-27 14:24:02.000000000 +0200 @@ -254,6 +254,13 @@ test_bit(unsigned int nr, const volatile return ((mask & *addr) != 0); } +static __always_inline bool +test_bit_acquire(unsigned long nr, const volatile unsigned long *addr) +{ + unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); + return 1UL & (smp_load_acquire(p) >> (nr & (BITS_PER_LONG-1))); +} + #ifdef CONFIG_ISA_ARCOMPACT /* Index: linux-stable/arch/h8300/include/asm/bitops.h =================================================================== --- linux-stable.orig/arch/h8300/include/asm/bitops.h 2022-10-27 14:24:02.000000000 +0200 +++ linux-stable/arch/h8300/include/asm/bitops.h 2022-10-27 14:24:02.000000000 +0200 @@ -89,7 +89,8 @@ static inline int test_bit(int nr, const return ret; } -#define __test_bit(nr, addr) test_bit(nr, addr) +#define __test_bit(nr, addr) test_bit(nr, addr) +#define test_bit_acquire(nr, addr) test_bit(nr, addr) #define H8300_GEN_TEST_BITOP(FNNAME, OP) \ static inline int FNNAME(int nr, void *addr) \ Index: linux-stable/arch/frv/include/asm/bitops.h =================================================================== --- linux-stable.orig/arch/frv/include/asm/bitops.h 2022-10-27 14:24:02.000000000 +0200 +++ linux-stable/arch/frv/include/asm/bitops.h 2022-10-27 14:24:02.000000000 +0200 @@ -156,6 +156,13 @@ static inline int __test_bit(unsigned lo __constant_test_bit((nr),(addr)) : \ __test_bit((nr),(addr))) +static __always_inline bool +test_bit_acquire(unsigned long nr, const volatile unsigned long *addr) +{ + unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); + return 1UL & (smp_load_acquire(p) >> (nr & (BITS_PER_LONG-1))); +} + #include <asm-generic/bitops/find.h> /** Index: linux-stable/arch/mn10300/include/asm/bitops.h =================================================================== --- linux-stable.orig/arch/mn10300/include/asm/bitops.h 2022-10-27 14:24:02.000000000 +0200 +++ linux-stable/arch/mn10300/include/asm/bitops.h 2022-10-27 14:24:02.000000000 +0200 @@ -73,6 +73,13 @@ static inline int test_bit(unsigned long return 1UL & (((const volatile unsigned int *) addr)[nr >> 5] >> (nr & 31)); } +static __always_inline bool +test_bit_acquire(unsigned long nr, const volatile unsigned long *addr) +{ + unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); + return 1UL & (smp_load_acquire(p) >> (nr & (BITS_PER_LONG-1))); +} + /* * change bit */