Skip to content

Commit

Permalink
Revert "[tsan] Don't use enum __tsan_memory_order in tsan interface" (
Browse files Browse the repository at this point in the history
#115032)

Reverts #114724

Breaks OSX builds
  • Loading branch information
vitalybuka authored Nov 5, 2024
1 parent 380fd09 commit b14c436
Show file tree
Hide file tree
Showing 4 changed files with 336 additions and 336 deletions.
169 changes: 87 additions & 82 deletions compiler-rt/include/sanitizer/tsan_interface_atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -43,178 +43,183 @@ typedef enum {
} __tsan_memory_order;

__tsan_atomic8 SANITIZER_CDECL
__tsan_atomic8_load(const volatile __tsan_atomic8 *a, int mo);
__tsan_atomic8_load(const volatile __tsan_atomic8 *a, __tsan_memory_order mo);
__tsan_atomic16 SANITIZER_CDECL
__tsan_atomic16_load(const volatile __tsan_atomic16 *a, int mo);
__tsan_atomic16_load(const volatile __tsan_atomic16 *a, __tsan_memory_order mo);
__tsan_atomic32 SANITIZER_CDECL
__tsan_atomic32_load(const volatile __tsan_atomic32 *a, int mo);
__tsan_atomic32_load(const volatile __tsan_atomic32 *a, __tsan_memory_order mo);
__tsan_atomic64 SANITIZER_CDECL
__tsan_atomic64_load(const volatile __tsan_atomic64 *a, int mo);
__tsan_atomic64_load(const volatile __tsan_atomic64 *a, __tsan_memory_order mo);
#if __TSAN_HAS_INT128
__tsan_atomic128 SANITIZER_CDECL
__tsan_atomic128_load(const volatile __tsan_atomic128 *a, int mo);
__tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_load(
const volatile __tsan_atomic128 *a, __tsan_memory_order mo);
#endif

void SANITIZER_CDECL __tsan_atomic8_store(volatile __tsan_atomic8 *a,
__tsan_atomic8 v, int mo);
__tsan_atomic8 v,
__tsan_memory_order mo);
void SANITIZER_CDECL __tsan_atomic16_store(volatile __tsan_atomic16 *a,
__tsan_atomic16 v, int mo);
__tsan_atomic16 v,
__tsan_memory_order mo);
void SANITIZER_CDECL __tsan_atomic32_store(volatile __tsan_atomic32 *a,
__tsan_atomic32 v, int mo);
__tsan_atomic32 v,
__tsan_memory_order mo);
void SANITIZER_CDECL __tsan_atomic64_store(volatile __tsan_atomic64 *a,
__tsan_atomic64 v, int mo);
__tsan_atomic64 v,
__tsan_memory_order mo);
#if __TSAN_HAS_INT128
void SANITIZER_CDECL __tsan_atomic128_store(volatile __tsan_atomic128 *a,
__tsan_atomic128 v, int mo);
__tsan_atomic128 v,
__tsan_memory_order mo);
#endif

__tsan_atomic8 SANITIZER_CDECL
__tsan_atomic8_exchange(volatile __tsan_atomic8 *a, __tsan_atomic8 v, int mo);
__tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_exchange(
volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo);
__tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_exchange(
volatile __tsan_atomic16 *a, __tsan_atomic16 v, int mo);
volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo);
__tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_exchange(
volatile __tsan_atomic32 *a, __tsan_atomic32 v, int mo);
volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo);
__tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_exchange(
volatile __tsan_atomic64 *a, __tsan_atomic64 v, int mo);
volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo);
#if __TSAN_HAS_INT128
__tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_exchange(
volatile __tsan_atomic128 *a, __tsan_atomic128 v, int mo);
volatile __tsan_atomic128 *a, __tsan_atomic128 v, __tsan_memory_order mo);
#endif

__tsan_atomic8 SANITIZER_CDECL
__tsan_atomic8_fetch_add(volatile __tsan_atomic8 *a, __tsan_atomic8 v, int mo);
__tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_fetch_add(
volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo);
__tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_fetch_add(
volatile __tsan_atomic16 *a, __tsan_atomic16 v, int mo);
volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo);
__tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_fetch_add(
volatile __tsan_atomic32 *a, __tsan_atomic32 v, int mo);
volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo);
__tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_fetch_add(
volatile __tsan_atomic64 *a, __tsan_atomic64 v, int mo);
volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo);
#if __TSAN_HAS_INT128
__tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_fetch_add(
volatile __tsan_atomic128 *a, __tsan_atomic128 v, int mo);
volatile __tsan_atomic128 *a, __tsan_atomic128 v, __tsan_memory_order mo);
#endif

__tsan_atomic8 SANITIZER_CDECL
__tsan_atomic8_fetch_sub(volatile __tsan_atomic8 *a, __tsan_atomic8 v, int mo);
__tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_fetch_sub(
volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo);
__tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_fetch_sub(
volatile __tsan_atomic16 *a, __tsan_atomic16 v, int mo);
volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo);
__tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_fetch_sub(
volatile __tsan_atomic32 *a, __tsan_atomic32 v, int mo);
volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo);
__tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_fetch_sub(
volatile __tsan_atomic64 *a, __tsan_atomic64 v, int mo);
volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo);
#if __TSAN_HAS_INT128
__tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_fetch_sub(
volatile __tsan_atomic128 *a, __tsan_atomic128 v, int mo);
volatile __tsan_atomic128 *a, __tsan_atomic128 v, __tsan_memory_order mo);
#endif

__tsan_atomic8 SANITIZER_CDECL
__tsan_atomic8_fetch_and(volatile __tsan_atomic8 *a, __tsan_atomic8 v, int mo);
__tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_fetch_and(
volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo);
__tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_fetch_and(
volatile __tsan_atomic16 *a, __tsan_atomic16 v, int mo);
volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo);
__tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_fetch_and(
volatile __tsan_atomic32 *a, __tsan_atomic32 v, int mo);
volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo);
__tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_fetch_and(
volatile __tsan_atomic64 *a, __tsan_atomic64 v, int mo);
volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo);
#if __TSAN_HAS_INT128
__tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_fetch_and(
volatile __tsan_atomic128 *a, __tsan_atomic128 v, int mo);
volatile __tsan_atomic128 *a, __tsan_atomic128 v, __tsan_memory_order mo);
#endif

__tsan_atomic8 SANITIZER_CDECL
__tsan_atomic8_fetch_or(volatile __tsan_atomic8 *a, __tsan_atomic8 v, int mo);
__tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_fetch_or(
volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo);
__tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_fetch_or(
volatile __tsan_atomic16 *a, __tsan_atomic16 v, int mo);
volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo);
__tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_fetch_or(
volatile __tsan_atomic32 *a, __tsan_atomic32 v, int mo);
volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo);
__tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_fetch_or(
volatile __tsan_atomic64 *a, __tsan_atomic64 v, int mo);
volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo);
#if __TSAN_HAS_INT128
__tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_fetch_or(
volatile __tsan_atomic128 *a, __tsan_atomic128 v, int mo);
volatile __tsan_atomic128 *a, __tsan_atomic128 v, __tsan_memory_order mo);
#endif

__tsan_atomic8 SANITIZER_CDECL
__tsan_atomic8_fetch_xor(volatile __tsan_atomic8 *a, __tsan_atomic8 v, int mo);
__tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_fetch_xor(
volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo);
__tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_fetch_xor(
volatile __tsan_atomic16 *a, __tsan_atomic16 v, int mo);
volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo);
__tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_fetch_xor(
volatile __tsan_atomic32 *a, __tsan_atomic32 v, int mo);
volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo);
__tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_fetch_xor(
volatile __tsan_atomic64 *a, __tsan_atomic64 v, int mo);
volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo);
#if __TSAN_HAS_INT128
__tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_fetch_xor(
volatile __tsan_atomic128 *a, __tsan_atomic128 v, int mo);
volatile __tsan_atomic128 *a, __tsan_atomic128 v, __tsan_memory_order mo);
#endif

__tsan_atomic8 SANITIZER_CDECL
__tsan_atomic8_fetch_nand(volatile __tsan_atomic8 *a, __tsan_atomic8 v, int mo);
__tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_fetch_nand(
volatile __tsan_atomic8 *a, __tsan_atomic8 v, __tsan_memory_order mo);
__tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_fetch_nand(
volatile __tsan_atomic16 *a, __tsan_atomic16 v, int mo);
volatile __tsan_atomic16 *a, __tsan_atomic16 v, __tsan_memory_order mo);
__tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_fetch_nand(
volatile __tsan_atomic32 *a, __tsan_atomic32 v, int mo);
volatile __tsan_atomic32 *a, __tsan_atomic32 v, __tsan_memory_order mo);
__tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_fetch_nand(
volatile __tsan_atomic64 *a, __tsan_atomic64 v, int mo);
volatile __tsan_atomic64 *a, __tsan_atomic64 v, __tsan_memory_order mo);
#if __TSAN_HAS_INT128
__tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_fetch_nand(
volatile __tsan_atomic128 *a, __tsan_atomic128 v, int mo);
volatile __tsan_atomic128 *a, __tsan_atomic128 v, __tsan_memory_order mo);
#endif

int SANITIZER_CDECL __tsan_atomic8_compare_exchange_weak(
volatile __tsan_atomic8 *a, __tsan_atomic8 *c, __tsan_atomic8 v, int mo,
int fail_mo);
volatile __tsan_atomic8 *a, __tsan_atomic8 *c, __tsan_atomic8 v,
__tsan_memory_order mo, __tsan_memory_order fail_mo);
int SANITIZER_CDECL __tsan_atomic16_compare_exchange_weak(
volatile __tsan_atomic16 *a, __tsan_atomic16 *c, __tsan_atomic16 v, int mo,
int fail_mo);
volatile __tsan_atomic16 *a, __tsan_atomic16 *c, __tsan_atomic16 v,
__tsan_memory_order mo, __tsan_memory_order fail_mo);
int SANITIZER_CDECL __tsan_atomic32_compare_exchange_weak(
volatile __tsan_atomic32 *a, __tsan_atomic32 *c, __tsan_atomic32 v, int mo,
int fail_mo);
volatile __tsan_atomic32 *a, __tsan_atomic32 *c, __tsan_atomic32 v,
__tsan_memory_order mo, __tsan_memory_order fail_mo);
int SANITIZER_CDECL __tsan_atomic64_compare_exchange_weak(
volatile __tsan_atomic64 *a, __tsan_atomic64 *c, __tsan_atomic64 v, int mo,
int fail_mo);
volatile __tsan_atomic64 *a, __tsan_atomic64 *c, __tsan_atomic64 v,
__tsan_memory_order mo, __tsan_memory_order fail_mo);
#if __TSAN_HAS_INT128
int SANITIZER_CDECL __tsan_atomic128_compare_exchange_weak(
volatile __tsan_atomic128 *a, __tsan_atomic128 *c, __tsan_atomic128 v,
int mo, int fail_mo);
__tsan_memory_order mo, __tsan_memory_order fail_mo);
#endif

int SANITIZER_CDECL __tsan_atomic8_compare_exchange_strong(
volatile __tsan_atomic8 *a, __tsan_atomic8 *c, __tsan_atomic8 v, int mo,
int fail_mo);
volatile __tsan_atomic8 *a, __tsan_atomic8 *c, __tsan_atomic8 v,
__tsan_memory_order mo, __tsan_memory_order fail_mo);
int SANITIZER_CDECL __tsan_atomic16_compare_exchange_strong(
volatile __tsan_atomic16 *a, __tsan_atomic16 *c, __tsan_atomic16 v, int mo,
int fail_mo);
volatile __tsan_atomic16 *a, __tsan_atomic16 *c, __tsan_atomic16 v,
__tsan_memory_order mo, __tsan_memory_order fail_mo);
int SANITIZER_CDECL __tsan_atomic32_compare_exchange_strong(
volatile __tsan_atomic32 *a, __tsan_atomic32 *c, __tsan_atomic32 v, int mo,
int fail_mo);
volatile __tsan_atomic32 *a, __tsan_atomic32 *c, __tsan_atomic32 v,
__tsan_memory_order mo, __tsan_memory_order fail_mo);
int SANITIZER_CDECL __tsan_atomic64_compare_exchange_strong(
volatile __tsan_atomic64 *a, __tsan_atomic64 *c, __tsan_atomic64 v, int mo,
int fail_mo);
volatile __tsan_atomic64 *a, __tsan_atomic64 *c, __tsan_atomic64 v,
__tsan_memory_order mo, __tsan_memory_order fail_mo);
#if __TSAN_HAS_INT128
int SANITIZER_CDECL __tsan_atomic128_compare_exchange_strong(
volatile __tsan_atomic128 *a, __tsan_atomic128 *c, __tsan_atomic128 v,
int mo, int fail_mo);
__tsan_memory_order mo, __tsan_memory_order fail_mo);
#endif

__tsan_atomic8 SANITIZER_CDECL __tsan_atomic8_compare_exchange_val(
volatile __tsan_atomic8 *a, __tsan_atomic8 c, __tsan_atomic8 v, int mo,
int fail_mo);
volatile __tsan_atomic8 *a, __tsan_atomic8 c, __tsan_atomic8 v,
__tsan_memory_order mo, __tsan_memory_order fail_mo);
__tsan_atomic16 SANITIZER_CDECL __tsan_atomic16_compare_exchange_val(
volatile __tsan_atomic16 *a, __tsan_atomic16 c, __tsan_atomic16 v, int mo,
int fail_mo);
volatile __tsan_atomic16 *a, __tsan_atomic16 c, __tsan_atomic16 v,
__tsan_memory_order mo, __tsan_memory_order fail_mo);
__tsan_atomic32 SANITIZER_CDECL __tsan_atomic32_compare_exchange_val(
volatile __tsan_atomic32 *a, __tsan_atomic32 c, __tsan_atomic32 v, int mo,
int fail_mo);
volatile __tsan_atomic32 *a, __tsan_atomic32 c, __tsan_atomic32 v,
__tsan_memory_order mo, __tsan_memory_order fail_mo);
__tsan_atomic64 SANITIZER_CDECL __tsan_atomic64_compare_exchange_val(
volatile __tsan_atomic64 *a, __tsan_atomic64 c, __tsan_atomic64 v, int mo,
int fail_mo);
volatile __tsan_atomic64 *a, __tsan_atomic64 c, __tsan_atomic64 v,
__tsan_memory_order mo, __tsan_memory_order fail_mo);
#if __TSAN_HAS_INT128
__tsan_atomic128 SANITIZER_CDECL __tsan_atomic128_compare_exchange_val(
volatile __tsan_atomic128 *a, __tsan_atomic128 c, __tsan_atomic128 v,
int mo, int fail_mo);
__tsan_memory_order mo, __tsan_memory_order fail_mo);
#endif

void SANITIZER_CDECL __tsan_atomic_thread_fence(int mo);
void SANITIZER_CDECL __tsan_atomic_signal_fence(int mo);
void SANITIZER_CDECL __tsan_atomic_thread_fence(__tsan_memory_order mo);
void SANITIZER_CDECL __tsan_atomic_signal_fence(__tsan_memory_order mo);

#ifdef __cplusplus
} // extern "C"
Expand Down
27 changes: 13 additions & 14 deletions compiler-rt/lib/tsan/rtl/tsan_interceptors_mac.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -40,15 +40,14 @@ int setcontext(const ucontext_t *ucp);

namespace __tsan {

// The non-barrier versions of OSAtomic* functions are semantically
// morder::relaxed, but the two variants (e.g. OSAtomicAdd32 and
// OSAtomicAdd32Barrier) are actually aliases of each other, and we cannot have
// different interceptors for them, because they're actually the same function.
// Thus, we have to stay conservative and treat the non-barrier versions as
// morder::acq_rel.
static constexpr morder kMacOrderBarrier = morder::acq_rel;
static constexpr morder kMacOrderNonBarrier = morder::acq_rel;
static constexpr morder kMacFailureOrder = morder::relaxed;
// The non-barrier versions of OSAtomic* functions are semantically mo_relaxed,
// but the two variants (e.g. OSAtomicAdd32 and OSAtomicAdd32Barrier) are
// actually aliases of each other, and we cannot have different interceptors for
// them, because they're actually the same function. Thus, we have to stay
// conservative and treat the non-barrier versions as mo_acq_rel.
static constexpr morder kMacOrderBarrier = mo_acq_rel;
static constexpr morder kMacOrderNonBarrier = mo_acq_rel;
static constexpr morder kMacFailureOrder = mo_relaxed;

# define OSATOMIC_INTERCEPTOR(return_t, t, tsan_t, f, tsan_atomic_f, mo) \
TSAN_INTERCEPTOR(return_t, f, t x, volatile t *ptr) { \
Expand Down Expand Up @@ -465,7 +464,7 @@ struct fake_shared_weak_count {
// Shared and weak pointers in C++ maintain reference counts via atomics in
// libc++.dylib, which are TSan-invisible, and this leads to false positives in
// destructor code. These interceptors re-implements the whole functions so that
// the morder::acq_rel semantics of the atomic decrement are visible.
// the mo_acq_rel semantics of the atomic decrement are visible.
//
// Unfortunately, the interceptors cannot simply Acquire/Release some sync
// object and call the original function, because it would have a race between
Expand All @@ -480,11 +479,11 @@ STDCXX_INTERCEPTOR(void, _ZNSt3__119__shared_weak_count16__release_sharedEv,

SCOPED_TSAN_INTERCEPTOR(_ZNSt3__119__shared_weak_count16__release_sharedEv,
o);
if (__tsan_atomic64_fetch_add(&o->shared_owners, -1, morder::release) == 0) {
if (__tsan_atomic64_fetch_add(&o->shared_owners, -1, mo_release) == 0) {
Acquire(thr, pc, (uptr)&o->shared_owners);
o->on_zero_shared();
if (__tsan_atomic64_fetch_add(&o->shared_weak_owners, -1,
morder::release) == 0) {
if (__tsan_atomic64_fetch_add(&o->shared_weak_owners, -1, mo_release) ==
0) {
Acquire(thr, pc, (uptr)&o->shared_weak_owners);
o->on_zero_shared_weak();
}
Expand All @@ -497,7 +496,7 @@ STDCXX_INTERCEPTOR(bool, _ZNSt3__114__shared_count16__release_sharedEv,
return REAL(_ZNSt3__114__shared_count16__release_sharedEv)(o);

SCOPED_TSAN_INTERCEPTOR(_ZNSt3__114__shared_count16__release_sharedEv, o);
if (__tsan_atomic64_fetch_add(&o->shared_owners, -1, morder::release) == 0) {
if (__tsan_atomic64_fetch_add(&o->shared_owners, -1, mo_release) == 0) {
Acquire(thr, pc, (uptr)&o->shared_owners);
o->on_zero_shared();
return true;
Expand Down
Loading

0 comments on commit b14c436

Please sign in to comment.