Add explicit KCSAN checks for bitops. Signed-off-by: Marco Elver <elver@xxxxxxxxxx> --- The same patch was previously sent, but at that point the updated bitops instrumented infrastructure was not yet in mainline: http://lkml.kernel.org/r/20191115115524.GA77379@xxxxxxxxxx Note that test_bit() is an atomic bitop, and KCSAN treats it as such, although it is in the non-atomic header. Currently it cannot be moved: http://lkml.kernel.org/r/87pnh5dlmn.fsf@xxxxxxxxxxxxxxxxxxxxxxx --- include/asm-generic/bitops/instrumented-atomic.h | 7 +++++++ include/asm-generic/bitops/instrumented-lock.h | 5 +++++ include/asm-generic/bitops/instrumented-non-atomic.h | 8 ++++++++ 3 files changed, 20 insertions(+) diff --git a/include/asm-generic/bitops/instrumented-atomic.h b/include/asm-generic/bitops/instrumented-atomic.h index 18ce3c9e8eec..eb3abf7e5c08 100644 --- a/include/asm-generic/bitops/instrumented-atomic.h +++ b/include/asm-generic/bitops/instrumented-atomic.h @@ -12,6 +12,7 @@ #define _ASM_GENERIC_BITOPS_INSTRUMENTED_ATOMIC_H #include <linux/kasan-checks.h> +#include <linux/kcsan-checks.h> /** * set_bit - Atomically set a bit in memory @@ -26,6 +27,7 @@ static inline void set_bit(long nr, volatile unsigned long *addr) { kasan_check_write(addr + BIT_WORD(nr), sizeof(long)); + kcsan_check_atomic_write(addr + BIT_WORD(nr), sizeof(long)); arch_set_bit(nr, addr); } @@ -39,6 +41,7 @@ static inline void set_bit(long nr, volatile unsigned long *addr) static inline void clear_bit(long nr, volatile unsigned long *addr) { kasan_check_write(addr + BIT_WORD(nr), sizeof(long)); + kcsan_check_atomic_write(addr + BIT_WORD(nr), sizeof(long)); arch_clear_bit(nr, addr); } @@ -55,6 +58,7 @@ static inline void clear_bit(long nr, volatile unsigned long *addr) static inline void change_bit(long nr, volatile unsigned long *addr) { kasan_check_write(addr + BIT_WORD(nr), sizeof(long)); + kcsan_check_atomic_write(addr + BIT_WORD(nr), sizeof(long)); arch_change_bit(nr, addr); } @@ -68,6 +72,7 @@ static inline void change_bit(long nr, volatile unsigned long *addr) static inline bool test_and_set_bit(long nr, volatile unsigned long *addr) { kasan_check_write(addr + BIT_WORD(nr), sizeof(long)); + kcsan_check_atomic_write(addr + BIT_WORD(nr), sizeof(long)); return arch_test_and_set_bit(nr, addr); } @@ -81,6 +86,7 @@ static inline bool test_and_set_bit(long nr, volatile unsigned long *addr) static inline bool test_and_clear_bit(long nr, volatile unsigned long *addr) { kasan_check_write(addr + BIT_WORD(nr), sizeof(long)); + kcsan_check_atomic_write(addr + BIT_WORD(nr), sizeof(long)); return arch_test_and_clear_bit(nr, addr); } @@ -94,6 +100,7 @@ static inline bool test_and_clear_bit(long nr, volatile unsigned long *addr) static inline bool test_and_change_bit(long nr, volatile unsigned long *addr) { kasan_check_write(addr + BIT_WORD(nr), sizeof(long)); + kcsan_check_atomic_write(addr + BIT_WORD(nr), sizeof(long)); return arch_test_and_change_bit(nr, addr); } diff --git a/include/asm-generic/bitops/instrumented-lock.h b/include/asm-generic/bitops/instrumented-lock.h index ec53fdeea9ec..2c80dca31e27 100644 --- a/include/asm-generic/bitops/instrumented-lock.h +++ b/include/asm-generic/bitops/instrumented-lock.h @@ -12,6 +12,7 @@ #define _ASM_GENERIC_BITOPS_INSTRUMENTED_LOCK_H #include <linux/kasan-checks.h> +#include <linux/kcsan-checks.h> /** * clear_bit_unlock - Clear a bit in memory, for unlock @@ -23,6 +24,7 @@ static inline void clear_bit_unlock(long nr, volatile unsigned long *addr) { kasan_check_write(addr + BIT_WORD(nr), sizeof(long)); + kcsan_check_atomic_write(addr + BIT_WORD(nr), sizeof(long)); arch_clear_bit_unlock(nr, addr); } @@ -38,6 +40,7 @@ static inline void clear_bit_unlock(long nr, volatile unsigned long *addr) static inline void __clear_bit_unlock(long nr, volatile unsigned long *addr) { kasan_check_write(addr + BIT_WORD(nr), sizeof(long)); + kcsan_check_write(addr + BIT_WORD(nr), sizeof(long)); arch___clear_bit_unlock(nr, addr); } @@ -53,6 +56,7 @@ static inline void __clear_bit_unlock(long nr, volatile unsigned long *addr) static inline bool test_and_set_bit_lock(long nr, volatile unsigned long *addr) { kasan_check_write(addr + BIT_WORD(nr), sizeof(long)); + kcsan_check_atomic_write(addr + BIT_WORD(nr), sizeof(long)); return arch_test_and_set_bit_lock(nr, addr); } @@ -72,6 +76,7 @@ static inline bool clear_bit_unlock_is_negative_byte(long nr, volatile unsigned long *addr) { kasan_check_write(addr + BIT_WORD(nr), sizeof(long)); + kcsan_check_atomic_write(addr + BIT_WORD(nr), sizeof(long)); return arch_clear_bit_unlock_is_negative_byte(nr, addr); } /* Let everybody know we have it. */ diff --git a/include/asm-generic/bitops/instrumented-non-atomic.h b/include/asm-generic/bitops/instrumented-non-atomic.h index 95ff28d128a1..8479af8b3309 100644 --- a/include/asm-generic/bitops/instrumented-non-atomic.h +++ b/include/asm-generic/bitops/instrumented-non-atomic.h @@ -12,6 +12,7 @@ #define _ASM_GENERIC_BITOPS_INSTRUMENTED_NON_ATOMIC_H #include <linux/kasan-checks.h> +#include <linux/kcsan-checks.h> /** * __set_bit - Set a bit in memory @@ -25,6 +26,7 @@ static inline void __set_bit(long nr, volatile unsigned long *addr) { kasan_check_write(addr + BIT_WORD(nr), sizeof(long)); + kcsan_check_write(addr + BIT_WORD(nr), sizeof(long)); arch___set_bit(nr, addr); } @@ -40,6 +42,7 @@ static inline void __set_bit(long nr, volatile unsigned long *addr) static inline void __clear_bit(long nr, volatile unsigned long *addr) { kasan_check_write(addr + BIT_WORD(nr), sizeof(long)); + kcsan_check_write(addr + BIT_WORD(nr), sizeof(long)); arch___clear_bit(nr, addr); } @@ -55,6 +58,7 @@ static inline void __clear_bit(long nr, volatile unsigned long *addr) static inline void __change_bit(long nr, volatile unsigned long *addr) { kasan_check_write(addr + BIT_WORD(nr), sizeof(long)); + kcsan_check_write(addr + BIT_WORD(nr), sizeof(long)); arch___change_bit(nr, addr); } @@ -69,6 +73,7 @@ static inline void __change_bit(long nr, volatile unsigned long *addr) static inline bool __test_and_set_bit(long nr, volatile unsigned long *addr) { kasan_check_write(addr + BIT_WORD(nr), sizeof(long)); + kcsan_check_write(addr + BIT_WORD(nr), sizeof(long)); return arch___test_and_set_bit(nr, addr); } @@ -83,6 +88,7 @@ static inline bool __test_and_set_bit(long nr, volatile unsigned long *addr) static inline bool __test_and_clear_bit(long nr, volatile unsigned long *addr) { kasan_check_write(addr + BIT_WORD(nr), sizeof(long)); + kcsan_check_write(addr + BIT_WORD(nr), sizeof(long)); return arch___test_and_clear_bit(nr, addr); } @@ -97,6 +103,7 @@ static inline bool __test_and_clear_bit(long nr, volatile unsigned long *addr) static inline bool __test_and_change_bit(long nr, volatile unsigned long *addr) { kasan_check_write(addr + BIT_WORD(nr), sizeof(long)); + kcsan_check_write(addr + BIT_WORD(nr), sizeof(long)); return arch___test_and_change_bit(nr, addr); } @@ -108,6 +115,7 @@ static inline bool __test_and_change_bit(long nr, volatile unsigned long *addr) static inline bool test_bit(long nr, const volatile unsigned long *addr) { kasan_check_read(addr + BIT_WORD(nr), sizeof(long)); + kcsan_check_atomic_read(addr + BIT_WORD(nr), sizeof(long)); return arch_test_bit(nr, addr); } -- 2.25.0.rc1.283.g88dfdc4193-goog