summaryrefslogtreecommitdiff
path: root/lib/tsan/rtl/tsan_interface_atomic.cc
diff options
context:
space:
mode:
authorKostya Serebryany <kcc@google.com>2014-12-09 01:31:14 +0000
committerKostya Serebryany <kcc@google.com>2014-12-09 01:31:14 +0000
commita371b02fccce7d968732449b2f094380cfe3d4f8 (patch)
tree27b1c59ad6f6c4cf6d7ff0bfd066bb4b1f041aa2 /lib/tsan/rtl/tsan_interface_atomic.cc
parente10c4275cb236f88e4b4015b66c23518a46e6cab (diff)
[tsan] remove TSAN_GO in favor of SANITIZER_GO
git-svn-id: https://llvm.org/svn/llvm-project/compiler-rt/trunk@223732 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib/tsan/rtl/tsan_interface_atomic.cc')
-rw-r--r--lib/tsan/rtl/tsan_interface_atomic.cc20
1 files changed, 10 insertions, 10 deletions
diff --git a/lib/tsan/rtl/tsan_interface_atomic.cc b/lib/tsan/rtl/tsan_interface_atomic.cc
index ceb32bd60..9b6995116 100644
--- a/lib/tsan/rtl/tsan_interface_atomic.cc
+++ b/lib/tsan/rtl/tsan_interface_atomic.cc
@@ -32,7 +32,7 @@ typedef unsigned char a8;
typedef unsigned short a16; // NOLINT
typedef unsigned int a32;
typedef unsigned long long a64; // NOLINT
-#if !defined(TSAN_GO) && (defined(__SIZEOF_INT128__) \
+#if !defined(SANITIZER_GO) && (defined(__SIZEOF_INT128__) \
|| (__clang_major__ * 100 + __clang_minor__ >= 302))
__extension__ typedef __int128 a128;
# define __TSAN_HAS_INT128 1
@@ -40,7 +40,7 @@ __extension__ typedef __int128 a128;
# define __TSAN_HAS_INT128 0
#endif
-#ifndef TSAN_GO
+#ifndef SANITIZER_GO
// Protects emulation of 128-bit atomic operations.
static StaticSpinMutex mutex128;
#endif
@@ -125,7 +125,7 @@ template<typename T> T func_cas(volatile T *v, T cmp, T xch) {
// Atomic ops are executed under tsan internal mutex,
// here we assume that the atomic variables are not accessed
// from non-instrumented code.
-#if !defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_16) && !defined(TSAN_GO)
+#if !defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_16) && !defined(SANITIZER_GO)
a128 func_xchg(volatile a128 *v, a128 op) {
SpinMutexLock lock(&mutex128);
a128 cmp = *v;
@@ -198,7 +198,7 @@ static int SizeLog() {
// this leads to false negatives only in very obscure cases.
}
-#ifndef TSAN_GO
+#ifndef SANITIZER_GO
static atomic_uint8_t *to_atomic(const volatile a8 *a) {
return reinterpret_cast<atomic_uint8_t *>(const_cast<a8 *>(a));
}
@@ -234,7 +234,7 @@ static T NoTsanAtomicLoad(const volatile T *a, morder mo) {
return atomic_load(to_atomic(a), to_mo(mo));
}
-#if __TSAN_HAS_INT128 && !defined(TSAN_GO)
+#if __TSAN_HAS_INT128 && !defined(SANITIZER_GO)
static a128 NoTsanAtomicLoad(const volatile a128 *a, morder mo) {
SpinMutexLock lock(&mutex128);
return *a;
@@ -264,7 +264,7 @@ static void NoTsanAtomicStore(volatile T *a, T v, morder mo) {
atomic_store(to_atomic(a), v, to_mo(mo));
}
-#if __TSAN_HAS_INT128 && !defined(TSAN_GO)
+#if __TSAN_HAS_INT128 && !defined(SANITIZER_GO)
static void NoTsanAtomicStore(volatile a128 *a, a128 v, morder mo) {
SpinMutexLock lock(&mutex128);
*a = v;
@@ -456,7 +456,7 @@ static T AtomicCAS(ThreadState *thr, uptr pc,
return c;
}
-#ifndef TSAN_GO
+#ifndef SANITIZER_GO
static void NoTsanAtomicFence(morder mo) {
__sync_synchronize();
}
@@ -468,7 +468,7 @@ static void AtomicFence(ThreadState *thr, uptr pc, morder mo) {
#endif
// Interface functions follow.
-#ifndef TSAN_GO
+#ifndef SANITIZER_GO
// C/C++
@@ -867,7 +867,7 @@ void __tsan_atomic_signal_fence(morder mo) {
}
} // extern "C"
-#else // #ifndef TSAN_GO
+#else // #ifndef SANITIZER_GO
// Go
@@ -950,4 +950,4 @@ void __tsan_go_atomic64_compare_exchange(
*(bool*)(a+24) = (cur == cmp);
}
} // extern "C"
-#endif // #ifndef TSAN_GO
+#endif // #ifndef SANITIZER_GO