summaryrefslogtreecommitdiff
path: root/lib/Transforms/Instrumentation/MemorySanitizer.cpp
diff options
context:
space:
mode:
authorEvgeniy Stepanov <eugeni.stepanov@gmail.com>2012-12-06 11:41:03 +0000
committerEvgeniy Stepanov <eugeni.stepanov@gmail.com>2012-12-06 11:41:03 +0000
commit4031b194acd50f35b75658f66ee3bb1b4afcfd25 (patch)
treebbcccc2a3fe47d96d3c8d974da061ca8c2f8acc2 /lib/Transforms/Instrumentation/MemorySanitizer.cpp
parent6afe478e005bf9f112b32b7ec25879475adc1915 (diff)
downloadllvm-4031b194acd50f35b75658f66ee3bb1b4afcfd25.tar.gz
llvm-4031b194acd50f35b75658f66ee3bb1b4afcfd25.tar.bz2
llvm-4031b194acd50f35b75658f66ee3bb1b4afcfd25.tar.xz
[msan] Do not store origin for clean values.
Instead of unconditionally storing origin with every application store, only do this when the shadow of the stored value is != 0. This change also delays instrumentation of stores until after the walk over function's instructions, because adding new basic blocks confuses InstVisitor. We only keep 1 origin value per 4 bytes of application memory. This change fixes the bug when a store of a single clean byte wiped the origin for the whole 4-byte area. Since stores of uninitialized values are relatively uncommon, this change improves performance of track-origins mode by 5% median and by up to 47% on specs. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@169490 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib/Transforms/Instrumentation/MemorySanitizer.cpp')
-rw-r--r--lib/Transforms/Instrumentation/MemorySanitizer.cpp74
1 files changed, 57 insertions, 17 deletions
diff --git a/lib/Transforms/Instrumentation/MemorySanitizer.cpp b/lib/Transforms/Instrumentation/MemorySanitizer.cpp
index 342751296b..4302843f80 100644
--- a/lib/Transforms/Instrumentation/MemorySanitizer.cpp
+++ b/lib/Transforms/Instrumentation/MemorySanitizer.cpp
@@ -102,6 +102,10 @@ static cl::opt<bool> ClHandleICmp("msan-handle-icmp",
cl::desc("propagate shadow through ICmpEQ and ICmpNE"),
cl::Hidden, cl::init(true));
+static cl::opt<bool> ClStoreCleanOrigin("msan-store-clean-origin",
+ cl::desc("store origin for clean (fully initialized) values"),
+ cl::Hidden, cl::init(false));
+
// This flag controls whether we check the shadow of the address
// operand of load or store. Such bugs are very rare, since load from
// a garbage address typically results in SEGV, but still happen
@@ -180,6 +184,8 @@ private:
uint64_t OriginOffset;
/// \brief Branch weights for error reporting.
MDNode *ColdCallWeights;
+ /// \brief Branch weights for origin store.
+ MDNode *OriginStoreWeights;
/// \brief The blacklist.
OwningPtr<BlackList> BL;
/// \brief An empty volatile inline asm that prevents callback merge.
@@ -308,6 +314,7 @@ bool MemorySanitizer::doInitialization(Module &M) {
OriginTy = IRB.getInt32Ty();
ColdCallWeights = MDBuilder(*C).createBranchWeights(1, 1000);
+ OriginStoreWeights = MDBuilder(*C).createBranchWeights(1, 1000);
// Insert a call to __msan_init/__msan_track_origins into the module's CTORs.
appendToGlobalCtors(M, cast<Function>(M.getOrInsertFunction(
@@ -382,6 +389,7 @@ struct MemorySanitizerVisitor : public InstVisitor<MemorySanitizerVisitor> {
ShadowOriginAndInsertPoint() : Shadow(0), Origin(0), OrigIns(0) { }
};
SmallVector<ShadowOriginAndInsertPoint, 16> InstrumentationList;
+ SmallVector<Instruction*, 16> StoreList;
MemorySanitizerVisitor(Function &F, MemorySanitizer &MS)
: F(F), MS(MS), VAHelper(CreateVarArgHelper(F, MS, *this)) {
@@ -391,6 +399,49 @@ struct MemorySanitizerVisitor : public InstVisitor<MemorySanitizerVisitor> {
<< F.getName() << "'\n");
}
+ void materializeStores() {
+ for (size_t i = 0, n = StoreList.size(); i < n; i++) {
+ StoreInst& I = *dyn_cast<StoreInst>(StoreList[i]);
+
+ IRBuilder<> IRB(&I);
+ Value *Val = I.getValueOperand();
+ Value *Addr = I.getPointerOperand();
+ Value *Shadow = getShadow(Val);
+ Value *ShadowPtr = getShadowPtr(Addr, Shadow->getType(), IRB);
+
+ StoreInst *NewSI = IRB.CreateAlignedStore(Shadow, ShadowPtr, I.getAlignment());
+ DEBUG(dbgs() << " STORE: " << *NewSI << "\n");
+ // If the store is volatile, add a check.
+ if (I.isVolatile())
+ insertCheck(Val, &I);
+ if (ClCheckAccessAddress)
+ insertCheck(Addr, &I);
+
+ if (ClTrackOrigins) {
+ if (ClStoreCleanOrigin || isa<StructType>(Shadow->getType())) {
+ IRB.CreateAlignedStore(getOrigin(Val), getOriginPtr(Addr, IRB), I.getAlignment());
+ } else {
+ Value *ConvertedShadow = convertToShadowTyNoVec(Shadow, IRB);
+
+ Constant *Cst = dyn_cast_or_null<Constant>(ConvertedShadow);
+ // TODO(eugenis): handle non-zero constant shadow by inserting an
+ // unconditional check (can not simply fail compilation as this could
+ // be in the dead code).
+ if (Cst)
+ continue;
+
+ Value *Cmp = IRB.CreateICmpNE(ConvertedShadow,
+ getCleanShadow(ConvertedShadow), "_mscmp");
+ Instruction *CheckTerm =
+ SplitBlockAndInsertIfThen(cast<Instruction>(Cmp), false, MS.OriginStoreWeights);
+ IRBuilder<> IRBNewBlock(CheckTerm);
+ IRBNewBlock.CreateAlignedStore(getOrigin(Val),
+ getOriginPtr(Addr, IRBNewBlock), I.getAlignment());
+ }
+ }
+ }
+ }
+
void materializeChecks() {
for (size_t i = 0, n = InstrumentationList.size(); i < n; i++) {
Instruction *Shadow = InstrumentationList[i].Shadow;
@@ -448,6 +499,11 @@ struct MemorySanitizerVisitor : public InstVisitor<MemorySanitizerVisitor> {
VAHelper->finalizeInstrumentation();
+ // Delayed instrumentation of StoreInst.
+ // This make add new checks to inserted later.
+ materializeStores();
+
+ // Insert shadow value checks.
materializeChecks();
return true;
@@ -738,23 +794,7 @@ struct MemorySanitizerVisitor : public InstVisitor<MemorySanitizerVisitor> {
/// Optionally, checks that the store address is fully defined.
/// Volatile stores check that the value being stored is fully defined.
void visitStoreInst(StoreInst &I) {
- IRBuilder<> IRB(&I);
- Value *Val = I.getValueOperand();
- Value *Addr = I.getPointerOperand();
- Value *Shadow = getShadow(Val);
- Value *ShadowPtr = getShadowPtr(Addr, Shadow->getType(), IRB);
-
- StoreInst *NewSI = IRB.CreateAlignedStore(Shadow, ShadowPtr, I.getAlignment());
- DEBUG(dbgs() << " STORE: " << *NewSI << "\n");
- (void)NewSI;
- // If the store is volatile, add a check.
- if (I.isVolatile())
- insertCheck(Val, &I);
- if (ClCheckAccessAddress)
- insertCheck(Addr, &I);
-
- if (ClTrackOrigins)
- IRB.CreateAlignedStore(getOrigin(Val), getOriginPtr(Addr, IRB), I.getAlignment());
+ StoreList.push_back(&I);
}
// Vector manipulation.