diff options
author | Kostya Serebryany <kcc@google.com> | 2012-05-10 13:48:04 +0000 |
---|---|---|
committer | Kostya Serebryany <kcc@google.com> | 2012-05-10 13:48:04 +0000 |
commit | 7ac41484ea322e0ea5774df681660269f5dc321e (patch) | |
tree | 85fd49d59eebae20d3a82770431fd26478d2611b /lib/tsan/rtl/tsan_sync.cc | |
parent | f2b1df7cb8f53f51bcb105e0d2930d99325bb681 (diff) |
[tsan] First commit of ThreadSanitizer (TSan) run-time library.
Algorithm description: http://code.google.com/p/thread-sanitizer/wiki/ThreadSanitizerAlgorithm
Status:
The tool is known to work on large real-life applications, but still has quite a few rough edges.
Nothing is guaranteed yet.
The tool works on x86_64 Linux.
Support for 64-bit MacOS 10.7+ is planned for late 2012.
Support for 32-bit OSes is doable, but problematic and not yet planed.
Further commits coming:
- tests
- makefiles
- documentation
- clang driver patch
The code was previously developed at http://code.google.com/p/data-race-test/source/browse/trunk/v2/
by Dmitry Vyukov and Kostya Serebryany with contributions from
Timur Iskhodzhanov, Alexander Potapenko, Alexey Samsonov and Evgeniy Stepanov.
git-svn-id: https://llvm.org/svn/llvm-project/compiler-rt/trunk@156542 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib/tsan/rtl/tsan_sync.cc')
-rw-r--r-- | lib/tsan/rtl/tsan_sync.cc | 177 |
1 files changed, 177 insertions, 0 deletions
diff --git a/lib/tsan/rtl/tsan_sync.cc b/lib/tsan/rtl/tsan_sync.cc new file mode 100644 index 000000000..0b31ab9ce --- /dev/null +++ b/lib/tsan/rtl/tsan_sync.cc @@ -0,0 +1,177 @@ +//===-- tsan_sync.cc --------------------------------------------*- C++ -*-===// +// +// The LLVM Compiler Infrastructure +// +// This file is distributed under the University of Illinois Open Source +// License. See LICENSE.TXT for details. +// +//===----------------------------------------------------------------------===// +// +// This file is a part of ThreadSanitizer (TSan), a race detector. +// +//===----------------------------------------------------------------------===// +#include "tsan_sync.h" +#include "tsan_placement_new.h" +#include "tsan_rtl.h" +#include "tsan_mman.h" + +namespace __tsan { + +SyncVar::SyncVar(uptr addr) + : mtx(MutexTypeSyncVar, StatMtxSyncVar) + , addr(addr) + , owner_tid(kInvalidTid) + , recursion() + , is_rw() + , is_recursive() + , is_broken() { +} + +SyncTab::Part::Part() + : mtx(MutexTypeSyncTab, StatMtxSyncTab) + , val() { +} + +SyncTab::SyncTab() { +} + +SyncTab::~SyncTab() { + for (int i = 0; i < kPartCount; i++) { + while (tab_[i].val) { + SyncVar *tmp = tab_[i].val; + tab_[i].val = tmp->next; + DestroyAndFree(tmp); + } + } +} + +SyncVar* SyncTab::GetAndLock(ThreadState *thr, uptr pc, + uptr addr, bool write_lock) { + Part *p = &tab_[PartIdx(addr)]; + { + ReadLock l(&p->mtx); + for (SyncVar *res = p->val; res; res = res->next) { + if (res->addr == addr) { + if (write_lock) + res->mtx.Lock(); + else + res->mtx.ReadLock(); + return res; + } + } + } + { + Lock l(&p->mtx); + SyncVar *res = p->val; + for (; res; res = res->next) { + if (res->addr == addr) + break; + } + if (res == 0) { + StatInc(thr, StatSyncCreated); + void *mem = internal_alloc(MBlockSync, sizeof(SyncVar)); + res = new(mem) SyncVar(addr); + res->creation_stack.ObtainCurrent(thr, pc); + res->next = p->val; + p->val = res; + } + if (write_lock) + res->mtx.Lock(); + else + res->mtx.ReadLock(); + return res; + } +} + +SyncVar* SyncTab::GetAndRemove(ThreadState *thr, uptr pc, uptr addr) { + Part *p = &tab_[PartIdx(addr)]; + SyncVar *res = 0; + { + Lock l(&p->mtx); + SyncVar **prev = &p->val; + res = *prev; + while (res) { + if (res->addr == addr) { + *prev = res->next; + break; + } + prev = &res->next; + res = *prev; + } + } + if (res) { + StatInc(thr, StatSyncDestroyed); + res->mtx.Lock(); + res->mtx.Unlock(); + } + return res; +} + +int SyncTab::PartIdx(uptr addr) { + return (addr >> 3) % kPartCount; +} + +StackTrace::StackTrace() + : n_() + , s_() { +} + +StackTrace::~StackTrace() { + Reset(); +} + +void StackTrace::Reset() { + if (s_) { + CHECK_NE(n_, 0); + internal_free(s_); + s_ = 0; + n_ = 0; + } +} + +void StackTrace::Init(const uptr *pcs, uptr cnt) { + Reset(); + if (cnt == 0) + return; + n_ = cnt; + s_ = (uptr*)internal_alloc(MBlockStackTrace, cnt * sizeof(s_[0])); + internal_memcpy(s_, pcs, cnt * sizeof(s_[0])); +} + +void StackTrace::ObtainCurrent(ThreadState *thr, uptr toppc) { + Reset(); + n_ = thr->shadow_stack_pos - &thr->shadow_stack[0]; + if (n_ + !!toppc == 0) + return; + s_ = (uptr*)internal_alloc(MBlockStackTrace, (n_ + !!toppc) * sizeof(s_[0])); + for (uptr i = 0; i < n_; i++) + s_[i] = thr->shadow_stack[i]; + if (toppc) { + s_[n_] = toppc; + n_++; + } +} + +void StackTrace::CopyFrom(const StackTrace& other) { + Reset(); + Init(other.Begin(), other.Size()); +} + +bool StackTrace::IsEmpty() const { + return n_ == 0; +} + +uptr StackTrace::Size() const { + return n_; +} + +uptr StackTrace::Get(uptr i) const { + CHECK_LT(i, n_); + return s_[i]; +} + +const uptr *StackTrace::Begin() const { + return s_; +} + +} // namespace __tsan |