summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorEtienne Bergeron <etienneb@google.com>2016-08-04 18:15:38 +0000
committerEtienne Bergeron <etienneb@google.com>2016-08-04 18:15:38 +0000
commit61fc5295eff10163308fa78056f19364c1687e1c (patch)
tree3af890104728367afbbe94e9db9ccf581dc66453 /lib
parent6c4c464850bb49047329fdd44b439bfd3dc3b0cd (diff)
[compiler-rt] Fix memory allocator for dynamic address space
Summary: The sanitizer allocators can works with a dynamic address space (i.e. specified with ~0ULL). Unfortunately, the code was broken on GetMetadata and GetChunkIdx. The current patch is moving the Win64 memory test to a dynamic address space. There is a migration to move every concept to a dynamic address space on windows. To have a better coverage, the unittest are now testing dynamic address space on other platforms too. Reviewers: rnk, kcc Subscribers: kubabrecka, dberris, llvm-commits, chrisha Differential Revision: https://reviews.llvm.org/D23170 git-svn-id: https://llvm.org/svn/llvm-project/compiler-rt/trunk@277745 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib')
-rw-r--r--lib/sanitizer_common/sanitizer_allocator_primary64.h14
-rw-r--r--lib/sanitizer_common/sanitizer_win.cc8
-rw-r--r--lib/sanitizer_common/tests/sanitizer_allocator_test.cc32
3 files changed, 49 insertions, 5 deletions
diff --git a/lib/sanitizer_common/sanitizer_allocator_primary64.h b/lib/sanitizer_common/sanitizer_allocator_primary64.h
index 754962837..c7c906bc3 100644
--- a/lib/sanitizer_common/sanitizer_allocator_primary64.h
+++ b/lib/sanitizer_common/sanitizer_allocator_primary64.h
@@ -94,6 +94,13 @@ class SizeClassAllocator64 {
return P >= SpaceBeg() && P < SpaceEnd();
}
+ uptr GetRegionBegin(const void *p) {
+ if (kUsingConstantSpaceBeg)
+ return reinterpret_cast<uptr>(p) & ~(kRegionSize - 1);
+ uptr space_beg = SpaceBeg();
+ return ((reinterpret_cast<uptr>(p) - space_beg) & ~(kRegionSize - 1)) + space_beg;
+ }
+
uptr GetSizeClass(const void *p) {
if (kUsingConstantSpaceBeg && (kSpaceBeg % kSpaceSize) == 0)
return ((reinterpret_cast<uptr>(p)) / kRegionSize) % kNumClassesRounded;
@@ -106,7 +113,7 @@ class SizeClassAllocator64 {
uptr size = SizeClassMap::Size(class_id);
if (!size) return nullptr;
uptr chunk_idx = GetChunkIdx((uptr)p, size);
- uptr reg_beg = (uptr)p & ~(kRegionSize - 1);
+ uptr reg_beg = GetRegionBegin(p);
uptr beg = chunk_idx * size;
uptr next_beg = beg + size;
if (class_id >= kNumClasses) return nullptr;
@@ -258,7 +265,10 @@ class SizeClassAllocator64 {
return &regions[class_id];
}
- static uptr GetChunkIdx(uptr chunk, uptr size) {
+ uptr GetChunkIdx(uptr chunk, uptr size) {
+ if (!kUsingConstantSpaceBeg)
+ chunk -= SpaceBeg();
+
uptr offset = chunk % kRegionSize;
// Here we divide by a non-constant. This is costly.
// size always fits into 32-bits. If the offset fits too, use 32-bit div.
diff --git a/lib/sanitizer_common/sanitizer_win.cc b/lib/sanitizer_common/sanitizer_win.cc
index cb4d7ddaa..83481769a 100644
--- a/lib/sanitizer_common/sanitizer_win.cc
+++ b/lib/sanitizer_common/sanitizer_win.cc
@@ -221,8 +221,12 @@ void *MmapFixedNoAccess(uptr fixed_addr, uptr size, const char *name) {
}
void *MmapNoAccess(uptr size) {
- // FIXME: unsupported.
- return nullptr;
+ void *res = VirtualAlloc(nullptr, size, MEM_RESERVE, PAGE_NOACCESS);
+ if (res == 0)
+ Report("WARNING: %s failed to "
+ "mprotect %p (%zd) bytes (error code: %d)\n",
+ SanitizerToolName, size, size, GetLastError());
+ return res;
}
bool MprotectNoAccess(uptr addr, uptr size) {
diff --git a/lib/sanitizer_common/tests/sanitizer_allocator_test.cc b/lib/sanitizer_common/tests/sanitizer_allocator_test.cc
index 03f765ba0..cfb44a1af 100644
--- a/lib/sanitizer_common/tests/sanitizer_allocator_test.cc
+++ b/lib/sanitizer_common/tests/sanitizer_allocator_test.cc
@@ -30,7 +30,10 @@
#if SANITIZER_CAN_USE_ALLOCATOR64
#if SANITIZER_WINDOWS
-static const uptr kAllocatorSpace = 0x10000000000ULL;
+// On Windows 64-bit there is no easy way to find a large enough fixed address
+// space that is always available. Thus, a dynamically allocated address space
+// is used instead (i.e. ~(uptr)0).
+static const uptr kAllocatorSpace = ~(uptr)0;
static const uptr kAllocatorSize = 0x10000000000ULL; // 1T.
static const u64 kAddressSpaceSize = 1ULL << 40;
#else
@@ -41,6 +44,8 @@ static const u64 kAddressSpaceSize = 1ULL << 47;
typedef SizeClassAllocator64<
kAllocatorSpace, kAllocatorSize, 16, DefaultSizeClassMap> Allocator64;
+typedef SizeClassAllocator64<
+ ~(uptr)0, kAllocatorSize, 16, DefaultSizeClassMap> Allocator64Dynamic;
typedef SizeClassAllocator64<
kAllocatorSpace, kAllocatorSize, 16, CompactSizeClassMap> Allocator64Compact;
@@ -158,6 +163,10 @@ TEST(SanitizerCommon, SizeClassAllocator64) {
TestSizeClassAllocator<Allocator64>();
}
+TEST(SanitizerCommon, SizeClassAllocator64Dynamic) {
+ TestSizeClassAllocator<Allocator64Dynamic>();
+}
+
TEST(SanitizerCommon, SizeClassAllocator64Compact) {
TestSizeClassAllocator<Allocator64Compact>();
}
@@ -202,6 +211,10 @@ TEST(SanitizerCommon, SizeClassAllocator64MetadataStress) {
SizeClassAllocatorMetadataStress<Allocator64>();
}
+TEST(SanitizerCommon, SizeClassAllocator64DynamicMetadataStress) {
+ SizeClassAllocatorMetadataStress<Allocator64Dynamic>();
+}
+
TEST(SanitizerCommon, SizeClassAllocator64CompactMetadataStress) {
SizeClassAllocatorMetadataStress<Allocator64Compact>();
}
@@ -238,6 +251,9 @@ void SizeClassAllocatorGetBlockBeginStress() {
TEST(SanitizerCommon, SizeClassAllocator64GetBlockBegin) {
SizeClassAllocatorGetBlockBeginStress<Allocator64>();
}
+TEST(SanitizerCommon, SizeClassAllocator64DynamicGetBlockBegin) {
+ SizeClassAllocatorGetBlockBeginStress<Allocator64Dynamic>();
+}
TEST(SanitizerCommon, SizeClassAllocator64CompactGetBlockBegin) {
SizeClassAllocatorGetBlockBeginStress<Allocator64Compact>();
}
@@ -484,6 +500,12 @@ TEST(SanitizerCommon, CombinedAllocator64) {
SizeClassAllocatorLocalCache<Allocator64> > ();
}
+TEST(SanitizerCommon, CombinedAllocator64Dynamic) {
+ TestCombinedAllocator<Allocator64Dynamic,
+ LargeMmapAllocator<>,
+ SizeClassAllocatorLocalCache<Allocator64Dynamic> > ();
+}
+
TEST(SanitizerCommon, CombinedAllocator64Compact) {
TestCombinedAllocator<Allocator64Compact,
LargeMmapAllocator<>,
@@ -537,6 +559,11 @@ TEST(SanitizerCommon, SizeClassAllocator64LocalCache) {
SizeClassAllocatorLocalCache<Allocator64> >();
}
+TEST(SanitizerCommon, SizeClassAllocator64DynamicLocalCache) {
+ TestSizeClassAllocatorLocalCache<
+ SizeClassAllocatorLocalCache<Allocator64Dynamic> >();
+}
+
TEST(SanitizerCommon, SizeClassAllocator64CompactLocalCache) {
TestSizeClassAllocatorLocalCache<
SizeClassAllocatorLocalCache<Allocator64Compact> >();
@@ -710,6 +737,9 @@ void TestSizeClassAllocatorIteration() {
TEST(SanitizerCommon, SizeClassAllocator64Iteration) {
TestSizeClassAllocatorIteration<Allocator64>();
}
+TEST(SanitizerCommon, SizeClassAllocator64DynamicIteration) {
+ TestSizeClassAllocatorIteration<Allocator64Dynamic>();
+}
#endif
TEST(SanitizerCommon, SizeClassAllocator32Iteration) {