summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--include/llvm/Support/MathExtras.h7
-rw-r--r--lib/Target/Alpha/AlphaInstrInfo.td4
-rw-r--r--lib/Transforms/Scalar/LoopStrengthReduce.cpp6
3 files changed, 12 insertions, 5 deletions
diff --git a/include/llvm/Support/MathExtras.h b/include/llvm/Support/MathExtras.h
index 0fb2760b12..fd3ac9157b 100644
--- a/include/llvm/Support/MathExtras.h
+++ b/include/llvm/Support/MathExtras.h
@@ -425,6 +425,13 @@ inline uint64_t RoundUpToAlignment(uint64_t Value, uint64_t Align) {
return ((Value + Align - 1) / Align) * Align;
}
+/// abs64 - absolute value of a 64-bit int. Not all environments support
+/// "abs" on whatever their name for the 64-bit int type is. The absolute
+/// value of the largest negative number is undefined, as with "abs".
+inline int64_t abs64(int64_t x) {
+ return (x < 0) ? -x : x;
+}
+
} // End llvm namespace
#endif
diff --git a/lib/Target/Alpha/AlphaInstrInfo.td b/lib/Target/Alpha/AlphaInstrInfo.td
index ae9282564d..e73bdf9f6e 100644
--- a/lib/Target/Alpha/AlphaInstrInfo.td
+++ b/lib/Target/Alpha/AlphaInstrInfo.td
@@ -69,7 +69,7 @@ def nearP2X : SDNodeXForm<imm, [{
}]>;
def nearP2RemX : SDNodeXForm<imm, [{
uint64_t x =
- abs(N->getZExtValue() - getNearPower2((uint64_t)N->getZExtValue()));
+ abs64(N->getZExtValue() - getNearPower2((uint64_t)N->getZExtValue()));
return getI64Imm(Log2_64(x));
}]>;
@@ -124,7 +124,7 @@ def immRemP2 : PatLeaf<(imm), [{
getNearPower2((uint64_t)N->getZExtValue()));
}]>;
def immUExt8ME : PatLeaf<(imm), [{ //use this imm for mulqi
- int64_t d = abs((int64_t)N->getZExtValue() -
+ int64_t d = abs64((int64_t)N->getZExtValue() -
(int64_t)getNearPower2((uint64_t)N->getZExtValue()));
if (isPowerOf2_64(d)) return false;
switch (d) {
diff --git a/lib/Transforms/Scalar/LoopStrengthReduce.cpp b/lib/Transforms/Scalar/LoopStrengthReduce.cpp
index 4f6d53179e..ed12d8683d 100644
--- a/lib/Transforms/Scalar/LoopStrengthReduce.cpp
+++ b/lib/Transforms/Scalar/LoopStrengthReduce.cpp
@@ -1013,7 +1013,7 @@ SCEVHandle LoopStrengthReduce::CheckForIVReuse(bool HasBaseReg,
continue;
int64_t SSInt = cast<SCEVConstant>(SI->first)->getValue()->getSExtValue();
if (SI->first != Stride &&
- (unsigned(abs(SInt)) < SSInt || (SInt % SSInt) != 0))
+ (unsigned(abs64(SInt)) < SSInt || (SInt % SSInt) != 0))
continue;
int64_t Scale = SInt / SSInt;
// Check that this stride is valid for all the types used for loads and
@@ -1900,7 +1900,7 @@ ICmpInst *LoopStrengthReduce::ChangeCompareStride(Loop *L, ICmpInst *Cond,
continue;
int64_t SSInt = cast<SCEVConstant>(SI->first)->getValue()->getSExtValue();
if (SSInt == CmpSSInt ||
- abs(SSInt) < abs(CmpSSInt) ||
+ abs64(SSInt) < abs64(CmpSSInt) ||
(SSInt % CmpSSInt) != 0)
continue;
@@ -2336,7 +2336,7 @@ void LoopStrengthReduce::OptimizeLoopTermCond(Loop *L) {
cast<SCEVConstant>(SI->first)->getValue()->getSExtValue();
if (SSInt == SInt)
return; // This can definitely be reused.
- if (unsigned(abs(SSInt)) < SInt || (SSInt % SInt) != 0)
+ if (unsigned(abs64(SSInt)) < SInt || (SSInt % SInt) != 0)
continue;
int64_t Scale = SSInt / SInt;
bool AllUsesAreAddresses = true;