Index: lib/libthr/pthread.map =================================================================== --- lib/libthr/pthread.map +++ lib/libthr/pthread.map @@ -296,6 +296,14 @@ _thread_size_key; _thread_state_running; _thread_state_zoombie; + + /* To avoid TSan false-positives */ + __tsan_mutex_create; + __tsan_mutex_destroy; + __tsan_mutex_pre_lock; + __tsan_mutex_post_lock; + __tsan_mutex_pre_unlock; + __tsan_mutex_post_unlock; }; FBSD_1.1 { Index: lib/libthr/thread/thr_malloc.c =================================================================== --- lib/libthr/thread/thr_malloc.c +++ lib/libthr/thread/thr_malloc.c @@ -56,6 +56,13 @@ } pagesizes = pagesizes_d; _thr_umutex_init(&thr_malloc_umtx); + /* + * TODO: should probably annotate other internal locks too (maybe even + * move it down to the internal API). However, annotating only the + * malloc lock appears to silence (almost?) all false-positives, so + * this can likely wait (and also has a lower performance overhead). + */ + __tsan_mutex_create(&thr_malloc_umtx, __tsan_mutex_write_reentrant); } static void @@ -65,12 +72,14 @@ if (curthread == NULL) return; + __tsan_mutex_pre_lock(&thr_malloc_umtx, 0); curthread->locklevel++; curtid = TID(curthread); if ((uint32_t)thr_malloc_umtx.m_owner == curtid) thr_malloc_umtx_level++; else _thr_umutex_lock(&thr_malloc_umtx, curtid); + __tsan_mutex_post_lock(&thr_malloc_umtx, 0, 1); } static void @@ -79,12 +88,14 @@ if (curthread == NULL) return; + __tsan_mutex_pre_unlock(&thr_malloc_umtx, 0); if (thr_malloc_umtx_level > 0) thr_malloc_umtx_level--; else _thr_umutex_unlock(&thr_malloc_umtx, TID(curthread)); curthread->locklevel--; _thr_ast(curthread); + __tsan_mutex_post_unlock(&thr_malloc_umtx, 0); } void * @@ -140,13 +151,15 @@ void __thr_malloc_prefork(struct pthread *curthread) { - + __tsan_mutex_pre_lock(&thr_malloc_umtx, 0); _thr_umutex_lock(&thr_malloc_umtx, TID(curthread)); + __tsan_mutex_post_lock(&thr_malloc_umtx, 0, 1); } void __thr_malloc_postfork(struct pthread *curthread) { - + __tsan_mutex_pre_unlock(&thr_malloc_umtx, 0); _thr_umutex_unlock(&thr_malloc_umtx, TID(curthread)); + __tsan_mutex_post_unlock(&thr_malloc_umtx, 0); } Index: lib/libthr/thread/thr_private.h =================================================================== --- lib/libthr/thread/thr_private.h +++ lib/libthr/thread/thr_private.h @@ -1099,6 +1099,15 @@ int __Tthr_mutex_lock(pthread_mutex_t *); int __Tthr_mutex_trylock(pthread_mutex_t *); +/* These are required to avoid TSan false-positives for libthr-internal locks */ +#define __tsan_mutex_write_reentrant (1 << 1) +void __tsan_mutex_create(void *, unsigned) __weak_symbol; +void __tsan_mutex_destroy(void *, unsigned) __weak_symbol; +void __tsan_mutex_pre_lock(void *, unsigned) __weak_symbol; +void __tsan_mutex_post_lock(void *, unsigned, int) __weak_symbol; +int __tsan_mutex_pre_unlock(void *, unsigned) __weak_symbol; +void __tsan_mutex_post_unlock(void *, unsigned) __weak_symbol; + __END_DECLS __NULLABILITY_PRAGMA_POP Index: lib/libthr/thread/thr_umtx.c =================================================================== --- lib/libthr/thread/thr_umtx.c +++ lib/libthr/thread/thr_umtx.c @@ -374,3 +374,36 @@ if (_thr_rwlock_unlock(rwlock)) PANIC("unlock error"); } + +/* + * To avoid TSan false-positives we provide weak versions of the TSan + * callbacks that are used to annotate mutexes. Without this we get tons of + * false-positives each time __thr_malloc, etc. is used. + * See tsan_interface.h inside compiler-rt. + */ +void __weak_symbol +__tsan_mutex_create(void *addr __unused, unsigned flags __unused) +{ +} +void __weak_symbol +__tsan_mutex_destroy(void *addr __unused, unsigned flags __unused) +{ +} +void __weak_symbol +__tsan_mutex_pre_lock(void *addr __unused, unsigned flags __unused) +{ +} +void __weak_symbol +__tsan_mutex_post_lock(void *addr __unused, unsigned flags __unused, + int recursion __unused) +{ +} +int __weak_symbol +__tsan_mutex_pre_unlock(void *addr __unused, unsigned flags __unused) +{ + return (0); +} +void __weak_symbol +__tsan_mutex_post_unlock(void *addr __unused, unsigned flags __unused) +{ +}