summaryrefslogtreecommitdiff
path: root/lib/Analysis/ValueTracking.cpp
diff options
context:
space:
mode:
authorDan Gohman <gohman@apple.com>2011-12-14 23:49:11 +0000
committerDan Gohman <gohman@apple.com>2011-12-14 23:49:11 +0000
commitf0426601977c3e386d2d26c72a2cca691dc42072 (patch)
tree91fd661ded01669ee95c1d1d2115a010644545e8 /lib/Analysis/ValueTracking.cpp
parent299b059cd675f1e5beac1a2383a44e062e2eef43 (diff)
downloadllvm-f0426601977c3e386d2d26c72a2cca691dc42072.tar.gz
llvm-f0426601977c3e386d2d26c72a2cca691dc42072.tar.bz2
llvm-f0426601977c3e386d2d26c72a2cca691dc42072.tar.xz
Move Instruction::isSafeToSpeculativelyExecute out of VMCore and
into Analysis as a standalone function, since there's no need for it to be in VMCore. Also, update it to use isKnownNonZero and other goodies available in Analysis, making it more precise, enabling more aggressive optimization. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@146610 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib/Analysis/ValueTracking.cpp')
-rw-r--r--lib/Analysis/ValueTracking.cpp61
1 files changed, 61 insertions, 0 deletions
diff --git a/lib/Analysis/ValueTracking.cpp b/lib/Analysis/ValueTracking.cpp
index ecc83dfceb..ef19e065b7 100644
--- a/lib/Analysis/ValueTracking.cpp
+++ b/lib/Analysis/ValueTracking.cpp
@@ -1875,3 +1875,64 @@ bool llvm::onlyUsedByLifetimeMarkers(const Value *V) {
}
return true;
}
+
+bool llvm::isSafeToSpeculativelyExecute(const Instruction *Inst,
+ const TargetData *TD) {
+ for (unsigned i = 0, e = Inst->getNumOperands(); i != e; ++i)
+ if (Constant *C = dyn_cast<Constant>(Inst->getOperand(i)))
+ if (C->canTrap())
+ return false;
+
+ switch (Inst->getOpcode()) {
+ default:
+ return true;
+ case Instruction::UDiv:
+ case Instruction::URem:
+ // x / y is undefined if y == 0, but calcuations like x / 3 are safe.
+ return isKnownNonZero(Inst->getOperand(1), TD);
+ case Instruction::SDiv:
+ case Instruction::SRem: {
+ Value *Op = Inst->getOperand(1);
+ // x / y is undefined if y == 0
+ if (!isKnownNonZero(Op, TD))
+ return false;
+ // x / y might be undefined if y == -1
+ unsigned BitWidth = getBitWidth(Op->getType(), TD);
+ if (BitWidth == 0)
+ return false;
+ APInt KnownZero(BitWidth, 0);
+ APInt KnownOne(BitWidth, 0);
+ ComputeMaskedBits(Op, APInt::getAllOnesValue(BitWidth),
+ KnownZero, KnownOne, TD);
+ return !!KnownZero;
+ }
+ case Instruction::Load: {
+ const LoadInst *LI = cast<LoadInst>(Inst);
+ if (!LI->isUnordered())
+ return false;
+ return LI->getPointerOperand()->isDereferenceablePointer();
+ }
+ case Instruction::Call:
+ return false; // The called function could have undefined behavior or
+ // side-effects.
+ // FIXME: We should special-case some intrinsics (bswap,
+ // overflow-checking arithmetic, etc.)
+ case Instruction::VAArg:
+ case Instruction::Alloca:
+ case Instruction::Invoke:
+ case Instruction::PHI:
+ case Instruction::Store:
+ case Instruction::Ret:
+ case Instruction::Br:
+ case Instruction::IndirectBr:
+ case Instruction::Switch:
+ case Instruction::Unwind:
+ case Instruction::Unreachable:
+ case Instruction::Fence:
+ case Instruction::LandingPad:
+ case Instruction::AtomicRMW:
+ case Instruction::AtomicCmpXchg:
+ case Instruction::Resume:
+ return false; // Misc instructions which have effects
+ }
+}