summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAlexander Potapenko <glider@google.com>2018-07-20 16:28:49 +0000
committerAlexander Potapenko <glider@google.com>2018-07-20 16:28:49 +0000
commit7d86834e82ca6d4bd80a18d40c405001243e2d5a (patch)
treece836ad2829034fe45f0407eb8531f46579f1b55
parent8ec2a959fa2bbc0be28643f05759e149677c7a91 (diff)
[MSan] run materializeChecks() before materializeStores()
When pointer checking is enabled, it's important that every pointer is checked before its value is used. For stores MSan used to generate code that calculates shadow/origin addresses from a pointer before checking it. For userspace this isn't a problem, because the shadow calculation code is quite simple and compiler is able to move it after the check on -O2. But for KMSAN getShadowOriginPtr() creates a runtime call, so we want the check to be performed strictly before that call. Swapping materializeChecks() and materializeStores() resolves the issue: both functions insert code before the given IR location, so the new insertion order guarantees that the code calculating shadow address is between the address check and the memory access. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@337571 91177308-0d34-0410-b5e6-96231b3b80d8
-rw-r--r--lib/Transforms/Instrumentation/MemorySanitizer.cpp13
-rw-r--r--test/Instrumentation/MemorySanitizer/check_access_address.ll3
2 files changed, 9 insertions, 7 deletions
diff --git a/lib/Transforms/Instrumentation/MemorySanitizer.cpp b/lib/Transforms/Instrumentation/MemorySanitizer.cpp
index 7828fcc432c..40033fc7eb0 100644
--- a/lib/Transforms/Instrumentation/MemorySanitizer.cpp
+++ b/lib/Transforms/Instrumentation/MemorySanitizer.cpp
@@ -918,9 +918,6 @@ struct MemorySanitizerVisitor : public InstVisitor<MemorySanitizerVisitor> {
StoreInst *NewSI = IRB.CreateAlignedStore(Shadow, ShadowPtr, Alignment);
LLVM_DEBUG(dbgs() << " STORE: " << *NewSI << "\n");
- if (ClCheckAccessAddress)
- insertShadowCheck(Addr, NewSI);
-
if (SI->isAtomic())
SI->setOrdering(addReleaseOrdering(SI->getOrdering()));
@@ -1024,13 +1021,13 @@ struct MemorySanitizerVisitor : public InstVisitor<MemorySanitizerVisitor> {
InstrumentationList.size() + StoreList.size() >
(unsigned)ClInstrumentationWithCallThreshold;
- // Delayed instrumentation of StoreInst.
- // This may add new checks to be inserted later.
- materializeStores(InstrumentWithCalls);
-
// Insert shadow value checks.
materializeChecks(InstrumentWithCalls);
+ // Delayed instrumentation of StoreInst.
+ // This may not add new address checks.
+ materializeStores(InstrumentWithCalls);
+
return true;
}
@@ -1490,6 +1487,8 @@ struct MemorySanitizerVisitor : public InstVisitor<MemorySanitizerVisitor> {
/// Optionally, checks that the store address is fully defined.
void visitStoreInst(StoreInst &I) {
StoreList.push_back(&I);
+ if (ClCheckAccessAddress)
+ insertShadowCheck(I.getPointerOperand(), &I);
}
void handleCASOrRMW(Instruction &I) {
diff --git a/test/Instrumentation/MemorySanitizer/check_access_address.ll b/test/Instrumentation/MemorySanitizer/check_access_address.ll
index 38f29b71cdf..21bb4125606 100644
--- a/test/Instrumentation/MemorySanitizer/check_access_address.ll
+++ b/test/Instrumentation/MemorySanitizer/check_access_address.ll
@@ -38,11 +38,14 @@ entry:
; CHECK-LABEL: @Store
; CHECK: load {{.*}} @__msan_param_tls
+; Shadow calculations must happen after the check.
+; CHECK-NOT: xor
; CHECK: icmp
; CHECK: br i1
; CHECK: <label>
; CHECK: call void @__msan_warning_noreturn
; CHECK: <label>
+; CHECK: xor
; CHECK: store
; CHECK: store i32 %x
; CHECK: ret void