On Tue, Oct 05, 2021 at 01:41:18PM +0200, Peter Zijlstra wrote: > On Tue, Oct 05, 2021 at 12:58:47PM +0200, Marco Elver wrote: > > +static __always_inline void kcsan_atomic_release(int memorder) > > +{ > > + if (memorder == __ATOMIC_RELEASE || > > + memorder == __ATOMIC_SEQ_CST || > > + memorder == __ATOMIC_ACQ_REL) > > + __kcsan_release(); > > +} > > + > > #define DEFINE_TSAN_ATOMIC_LOAD_STORE(bits) \ > > u##bits __tsan_atomic##bits##_load(const u##bits *ptr, int memorder); \ > > u##bits __tsan_atomic##bits##_load(const u##bits *ptr, int memorder) \ > > { \ > > + kcsan_atomic_release(memorder); \ > > if (!IS_ENABLED(CONFIG_KCSAN_IGNORE_ATOMICS)) { \ > > check_access(ptr, bits / BITS_PER_BYTE, KCSAN_ACCESS_ATOMIC, _RET_IP_); \ > > } \ > > @@ -1156,6 +1187,7 @@ EXPORT_SYMBOL(__tsan_init); > > void __tsan_atomic##bits##_store(u##bits *ptr, u##bits v, int memorder); \ > > void __tsan_atomic##bits##_store(u##bits *ptr, u##bits v, int memorder) \ > > { \ > > + kcsan_atomic_release(memorder); \ > > if (!IS_ENABLED(CONFIG_KCSAN_IGNORE_ATOMICS)) { \ > > check_access(ptr, bits / BITS_PER_BYTE, \ > > KCSAN_ACCESS_WRITE | KCSAN_ACCESS_ATOMIC, _RET_IP_); \ > > @@ -1168,6 +1200,7 @@ EXPORT_SYMBOL(__tsan_init); > > u##bits __tsan_atomic##bits##_##op(u##bits *ptr, u##bits v, int memorder); \ > > u##bits __tsan_atomic##bits##_##op(u##bits *ptr, u##bits v, int memorder) \ > > { \ > > + kcsan_atomic_release(memorder); \ > > if (!IS_ENABLED(CONFIG_KCSAN_IGNORE_ATOMICS)) { \ > > check_access(ptr, bits / BITS_PER_BYTE, \ > > KCSAN_ACCESS_COMPOUND | KCSAN_ACCESS_WRITE | \ > > @@ -1200,6 +1233,7 @@ EXPORT_SYMBOL(__tsan_init); > > int __tsan_atomic##bits##_compare_exchange_##strength(u##bits *ptr, u##bits *exp, \ > > u##bits val, int mo, int fail_mo) \ > > { \ > > + kcsan_atomic_release(mo); \ > > if (!IS_ENABLED(CONFIG_KCSAN_IGNORE_ATOMICS)) { \ > > check_access(ptr, bits / BITS_PER_BYTE, \ > > KCSAN_ACCESS_COMPOUND | KCSAN_ACCESS_WRITE | \ > > @@ -1215,6 +1249,7 @@ EXPORT_SYMBOL(__tsan_init); > > u##bits __tsan_atomic##bits##_compare_exchange_val(u##bits *ptr, u##bits exp, u##bits val, \ > > int mo, int fail_mo) \ > > { \ > > + kcsan_atomic_release(mo); \ > > if (!IS_ENABLED(CONFIG_KCSAN_IGNORE_ATOMICS)) { \ > > check_access(ptr, bits / BITS_PER_BYTE, \ > > KCSAN_ACCESS_COMPOUND | KCSAN_ACCESS_WRITE | \ > > @@ -1246,6 +1281,7 @@ DEFINE_TSAN_ATOMIC_OPS(64); > > void __tsan_atomic_thread_fence(int memorder); > > void __tsan_atomic_thread_fence(int memorder) > > { > > + kcsan_atomic_release(memorder); > > __atomic_thread_fence(memorder); > > } > > EXPORT_SYMBOL(__tsan_atomic_thread_fence); > > I find that very hard to read.. kcsan_atomic_release() it not in fact a > release. It might be a release if @memorder implies one. Also, what's the atomic part signify? Is that because you're modeling the difference in acquire/release semantics between smp_load_{acquire,release}() and atomic*_{acquire,release}() ?