Make the generic bitops return bool when returning the value of a tested bit. Signed-off-by: David Howells <dhowells@xxxxxxxxxx> cc: Linus Torvalds <torvalds@xxxxxxxxxxxxxxxxxxxx> cc: Matthew Wilcox <willy@xxxxxxxxxxxxx> cc: Akinobu Mita <akinobu.mita@xxxxxxxxx> cc: Arnd Bergmann <arnd@xxxxxxxx> cc: Will Deacon <will@xxxxxxxxxx> --- include/asm-generic/bitops/atomic.h | 6 +++--- include/asm-generic/bitops/le.h | 10 +++++----- include/asm-generic/bitops/lock.h | 4 ++-- include/asm-generic/bitops/non-atomic.h | 8 ++++---- 4 files changed, 14 insertions(+), 14 deletions(-) diff --git a/include/asm-generic/bitops/atomic.h b/include/asm-generic/bitops/atomic.h index 0e7316a86240..9b05e8634c09 100644 --- a/include/asm-generic/bitops/atomic.h +++ b/include/asm-generic/bitops/atomic.h @@ -29,7 +29,7 @@ static __always_inline void change_bit(unsigned int nr, volatile unsigned long * atomic_long_xor(BIT_MASK(nr), (atomic_long_t *)p); } -static inline int test_and_set_bit(unsigned int nr, volatile unsigned long *p) +static inline bool test_and_set_bit(unsigned int nr, volatile unsigned long *p) { long old; unsigned long mask = BIT_MASK(nr); @@ -42,7 +42,7 @@ static inline int test_and_set_bit(unsigned int nr, volatile unsigned long *p) return !!(old & mask); } -static inline int test_and_clear_bit(unsigned int nr, volatile unsigned long *p) +static inline bool test_and_clear_bit(unsigned int nr, volatile unsigned long *p) { long old; unsigned long mask = BIT_MASK(nr); @@ -55,7 +55,7 @@ static inline int test_and_clear_bit(unsigned int nr, volatile unsigned long *p) return !!(old & mask); } -static inline int test_and_change_bit(unsigned int nr, volatile unsigned long *p) +static inline bool test_and_change_bit(unsigned int nr, volatile unsigned long *p) { long old; unsigned long mask = BIT_MASK(nr); diff --git a/include/asm-generic/bitops/le.h b/include/asm-generic/bitops/le.h index 188d3eba3ace..33355cf288f6 100644 --- a/include/asm-generic/bitops/le.h +++ b/include/asm-generic/bitops/le.h @@ -50,7 +50,7 @@ extern unsigned long find_next_bit_le(const void *addr, #error "Please fix <asm/byteorder.h>" #endif -static inline int test_bit_le(int nr, const void *addr) +static inline bool test_bit_le(int nr, const void *addr) { return test_bit(nr ^ BITOP_LE_SWIZZLE, addr); } @@ -75,22 +75,22 @@ static inline void __clear_bit_le(int nr, void *addr) __clear_bit(nr ^ BITOP_LE_SWIZZLE, addr); } -static inline int test_and_set_bit_le(int nr, void *addr) +static inline bool test_and_set_bit_le(int nr, void *addr) { return test_and_set_bit(nr ^ BITOP_LE_SWIZZLE, addr); } -static inline int test_and_clear_bit_le(int nr, void *addr) +static inline bool test_and_clear_bit_le(int nr, void *addr) { return test_and_clear_bit(nr ^ BITOP_LE_SWIZZLE, addr); } -static inline int __test_and_set_bit_le(int nr, void *addr) +static inline bool __test_and_set_bit_le(int nr, void *addr) { return __test_and_set_bit(nr ^ BITOP_LE_SWIZZLE, addr); } -static inline int __test_and_clear_bit_le(int nr, void *addr) +static inline bool __test_and_clear_bit_le(int nr, void *addr) { return __test_and_clear_bit(nr ^ BITOP_LE_SWIZZLE, addr); } diff --git a/include/asm-generic/bitops/lock.h b/include/asm-generic/bitops/lock.h index 3ae021368f48..0e6acd059a59 100644 --- a/include/asm-generic/bitops/lock.h +++ b/include/asm-generic/bitops/lock.h @@ -15,8 +15,8 @@ * the returned value is 0. * It can be used to implement bit locks. */ -static inline int test_and_set_bit_lock(unsigned int nr, - volatile unsigned long *p) +static inline bool test_and_set_bit_lock(unsigned int nr, + volatile unsigned long *p) { long old; unsigned long mask = BIT_MASK(nr); diff --git a/include/asm-generic/bitops/non-atomic.h b/include/asm-generic/bitops/non-atomic.h index 7e10c4b50c5d..7d916f677be3 100644 --- a/include/asm-generic/bitops/non-atomic.h +++ b/include/asm-generic/bitops/non-atomic.h @@ -55,7 +55,7 @@ static inline void __change_bit(int nr, volatile unsigned long *addr) * If two examples of this operation race, one can appear to succeed * but actually fail. You must protect multiple accesses with a lock. */ -static inline int __test_and_set_bit(int nr, volatile unsigned long *addr) +static inline bool __test_and_set_bit(int nr, volatile unsigned long *addr) { unsigned long mask = BIT_MASK(nr); unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); @@ -74,7 +74,7 @@ static inline int __test_and_set_bit(int nr, volatile unsigned long *addr) * If two examples of this operation race, one can appear to succeed * but actually fail. You must protect multiple accesses with a lock. */ -static inline int __test_and_clear_bit(int nr, volatile unsigned long *addr) +static inline bool __test_and_clear_bit(int nr, volatile unsigned long *addr) { unsigned long mask = BIT_MASK(nr); unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr); @@ -85,7 +85,7 @@ static inline int __test_and_clear_bit(int nr, volatile unsigned long *addr) } /* WARNING: non atomic and it can be reordered! */ -static inline int __test_and_change_bit(int nr, +static inline bool __test_and_change_bit(int nr, volatile unsigned long *addr) { unsigned long mask = BIT_MASK(nr); @@ -101,7 +101,7 @@ static inline int __test_and_change_bit(int nr, * @nr: bit number to test * @addr: Address to start counting from */ -static inline int test_bit(int nr, const volatile unsigned long *addr) +static inline bool test_bit(int nr, const volatile unsigned long *addr) { return 1UL & (addr[BIT_WORD(nr)] >> (nr & (BITS_PER_LONG-1))); }