diff options
author | Kostya Serebryany <kcc@google.com> | 2014-04-15 13:30:32 +0000 |
---|---|---|
committer | Kostya Serebryany <kcc@google.com> | 2014-04-15 13:30:32 +0000 |
commit | 170ad114f2d633267b2a9c87e5e0a70963707cd8 (patch) | |
tree | cb4e9747f1bda420418e83e814349bb5f98e813a /lib | |
parent | 69d526afcfd969cd208c97e9802f4b36e5d08d17 (diff) |
[asan] fix the alloctor code to not use opaque data structure, which was larger than needed. This was a leftover of the allocator1=>allocator2 migration; thanks Yuri Gribov for reminding
git-svn-id: https://llvm.org/svn/llvm-project/compiler-rt/trunk@206280 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib')
-rw-r--r-- | lib/asan/asan_allocator.h | 41 | ||||
-rw-r--r-- | lib/asan/asan_allocator2.cc | 71 |
2 files changed, 57 insertions, 55 deletions
diff --git a/lib/asan/asan_allocator.h b/lib/asan/asan_allocator.h index 10c9eee8d..6b2324ae3 100644 --- a/lib/asan/asan_allocator.h +++ b/lib/asan/asan_allocator.h @@ -17,6 +17,7 @@ #include "asan_internal.h" #include "asan_interceptors.h" +#include "sanitizer_common/sanitizer_allocator.h" #include "sanitizer_common/sanitizer_list.h" namespace __asan { @@ -92,10 +93,46 @@ class AsanChunkFifoList: public IntrusiveList<AsanChunk> { uptr size_; }; +struct AsanMapUnmapCallback { + void OnMap(uptr p, uptr size) const; + void OnUnmap(uptr p, uptr size) const; +}; + +#if SANITIZER_CAN_USE_ALLOCATOR64 +# if defined(__powerpc64__) +const uptr kAllocatorSpace = 0xa0000000000ULL; +const uptr kAllocatorSize = 0x20000000000ULL; // 2T. +# else +const uptr kAllocatorSpace = 0x600000000000ULL; +const uptr kAllocatorSize = 0x40000000000ULL; // 4T. +# endif +typedef DefaultSizeClassMap SizeClassMap; +typedef SizeClassAllocator64<kAllocatorSpace, kAllocatorSize, 0 /*metadata*/, + SizeClassMap, AsanMapUnmapCallback> PrimaryAllocator; +#else // Fallback to SizeClassAllocator32. +static const uptr kRegionSizeLog = 20; +static const uptr kNumRegions = SANITIZER_MMAP_RANGE_SIZE >> kRegionSizeLog; +# if SANITIZER_WORDSIZE == 32 +typedef FlatByteMap<kNumRegions> ByteMap; +# elif SANITIZER_WORDSIZE == 64 +typedef TwoLevelByteMap<(kNumRegions >> 12), 1 << 12> ByteMap; +# endif +typedef CompactSizeClassMap SizeClassMap; +typedef SizeClassAllocator32<0, SANITIZER_MMAP_RANGE_SIZE, 16, + SizeClassMap, kRegionSizeLog, + ByteMap, + AsanMapUnmapCallback> PrimaryAllocator; +#endif // SANITIZER_CAN_USE_ALLOCATOR64 + +typedef SizeClassAllocatorLocalCache<PrimaryAllocator> AllocatorCache; +typedef LargeMmapAllocator<AsanMapUnmapCallback> SecondaryAllocator; +typedef CombinedAllocator<PrimaryAllocator, AllocatorCache, + SecondaryAllocator> Allocator; + + struct AsanThreadLocalMallocStorage { uptr quarantine_cache[16]; - // Allocator cache contains atomic_uint64_t which must be 8-byte aligned. - ALIGNED(8) uptr allocator2_cache[96 * (512 * 8 + 16)]; // Opaque. + AllocatorCache allocator2_cache; void CommitBack(); private: // These objects are allocated via mmap() and are zero-initialized. diff --git a/lib/asan/asan_allocator2.cc b/lib/asan/asan_allocator2.cc index ea0ffbe1c..e5626bf26 100644 --- a/lib/asan/asan_allocator2.cc +++ b/lib/asan/asan_allocator2.cc @@ -21,7 +21,6 @@ #include "asan_report.h" #include "asan_stack.h" #include "asan_thread.h" -#include "sanitizer_common/sanitizer_allocator.h" #include "sanitizer_common/sanitizer_flags.h" #include "sanitizer_common/sanitizer_internal_defs.h" #include "sanitizer_common/sanitizer_list.h" @@ -31,64 +30,30 @@ namespace __asan { -struct AsanMapUnmapCallback { - void OnMap(uptr p, uptr size) const { - PoisonShadow(p, size, kAsanHeapLeftRedzoneMagic); - // Statistics. - AsanStats &thread_stats = GetCurrentThreadStats(); - thread_stats.mmaps++; - thread_stats.mmaped += size; - } - void OnUnmap(uptr p, uptr size) const { - PoisonShadow(p, size, 0); - // We are about to unmap a chunk of user memory. - // Mark the corresponding shadow memory as not needed. - FlushUnneededASanShadowMemory(p, size); - // Statistics. - AsanStats &thread_stats = GetCurrentThreadStats(); - thread_stats.munmaps++; - thread_stats.munmaped += size; - } -}; - -#if SANITIZER_CAN_USE_ALLOCATOR64 -# if defined(__powerpc64__) -const uptr kAllocatorSpace = 0xa0000000000ULL; -const uptr kAllocatorSize = 0x20000000000ULL; // 2T. -# else -const uptr kAllocatorSpace = 0x600000000000ULL; -const uptr kAllocatorSize = 0x40000000000ULL; // 4T. -# endif -typedef DefaultSizeClassMap SizeClassMap; -typedef SizeClassAllocator64<kAllocatorSpace, kAllocatorSize, 0 /*metadata*/, - SizeClassMap, AsanMapUnmapCallback> PrimaryAllocator; -#else // Fallback to SizeClassAllocator32. -static const uptr kRegionSizeLog = 20; -static const uptr kNumRegions = SANITIZER_MMAP_RANGE_SIZE >> kRegionSizeLog; -# if SANITIZER_WORDSIZE == 32 -typedef FlatByteMap<kNumRegions> ByteMap; -# elif SANITIZER_WORDSIZE == 64 -typedef TwoLevelByteMap<(kNumRegions >> 12), 1 << 12> ByteMap; -# endif -typedef CompactSizeClassMap SizeClassMap; -typedef SizeClassAllocator32<0, SANITIZER_MMAP_RANGE_SIZE, 16, - SizeClassMap, kRegionSizeLog, - ByteMap, - AsanMapUnmapCallback> PrimaryAllocator; -#endif // SANITIZER_CAN_USE_ALLOCATOR64 - -typedef SizeClassAllocatorLocalCache<PrimaryAllocator> AllocatorCache; -typedef LargeMmapAllocator<AsanMapUnmapCallback> SecondaryAllocator; -typedef CombinedAllocator<PrimaryAllocator, AllocatorCache, - SecondaryAllocator> Allocator; +void AsanMapUnmapCallback::OnMap(uptr p, uptr size) const { + PoisonShadow(p, size, kAsanHeapLeftRedzoneMagic); + // Statistics. + AsanStats &thread_stats = GetCurrentThreadStats(); + thread_stats.mmaps++; + thread_stats.mmaped += size; +} +void AsanMapUnmapCallback::OnUnmap(uptr p, uptr size) const { + PoisonShadow(p, size, 0); + // We are about to unmap a chunk of user memory. + // Mark the corresponding shadow memory as not needed. + FlushUnneededASanShadowMemory(p, size); + // Statistics. + AsanStats &thread_stats = GetCurrentThreadStats(); + thread_stats.munmaps++; + thread_stats.munmaped += size; +} // We can not use THREADLOCAL because it is not supported on some of the // platforms we care about (OSX 10.6, Android). // static THREADLOCAL AllocatorCache cache; AllocatorCache *GetAllocatorCache(AsanThreadLocalMallocStorage *ms) { CHECK(ms); - CHECK_LE(sizeof(AllocatorCache), sizeof(ms->allocator2_cache)); - return reinterpret_cast<AllocatorCache *>(ms->allocator2_cache); + return &ms->allocator2_cache; } static Allocator allocator; |