aboutsummaryrefslogtreecommitdiffstats
path: root/lib/Analysis
diff options
context:
space:
mode:
authorDan Gohman <gohman@apple.com>2009-10-09 00:10:36 +0000
committerDan Gohman <gohman@apple.com>2009-10-09 00:10:36 +0000
commitc9afeb35476d80a581127cd6e46b62a75727de30 (patch)
tree6609299ae5e2d8b6db0a101acb030aac8471b3ce /lib/Analysis
parentc7692e0185faaab4a68386afc362d95be591b455 (diff)
downloadexternal_llvm-c9afeb35476d80a581127cd6e46b62a75727de30.zip
external_llvm-c9afeb35476d80a581127cd6e46b62a75727de30.tar.gz
external_llvm-c9afeb35476d80a581127cd6e46b62a75727de30.tar.bz2
Add the ability to track HasNSW and HasNUW on more kinds of SCEV expressions.
git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@83601 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib/Analysis')
-rw-r--r--lib/Analysis/ScalarEvolution.cpp34
1 files changed, 22 insertions, 12 deletions
diff --git a/lib/Analysis/ScalarEvolution.cpp b/lib/Analysis/ScalarEvolution.cpp
index 12ad429..9300de1 100644
--- a/lib/Analysis/ScalarEvolution.cpp
+++ b/lib/Analysis/ScalarEvolution.cpp
@@ -1191,7 +1191,8 @@ namespace {
/// getAddExpr - Get a canonical add expression, or something simpler if
/// possible.
-const SCEV *ScalarEvolution::getAddExpr(SmallVectorImpl<const SCEV *> &Ops) {
+const SCEV *ScalarEvolution::getAddExpr(SmallVectorImpl<const SCEV *> &Ops,
+ bool HasNUW, bool HasNSW) {
assert(!Ops.empty() && "Cannot get empty add!");
if (Ops.size() == 1) return Ops[0];
#ifndef NDEBUG
@@ -1241,7 +1242,7 @@ const SCEV *ScalarEvolution::getAddExpr(SmallVectorImpl<const SCEV *> &Ops) {
return Mul;
Ops.erase(Ops.begin()+i, Ops.begin()+i+2);
Ops.push_back(Mul);
- return getAddExpr(Ops);
+ return getAddExpr(Ops, HasNUW, HasNSW);
}
// Check for truncates. If all the operands are truncated from the same
@@ -1296,7 +1297,7 @@ const SCEV *ScalarEvolution::getAddExpr(SmallVectorImpl<const SCEV *> &Ops) {
}
if (Ok) {
// Evaluate the expression in the larger type.
- const SCEV *Fold = getAddExpr(LargeOps);
+ const SCEV *Fold = getAddExpr(LargeOps, HasNUW, HasNSW);
// If it folds to something simple, use it. Otherwise, don't.
if (isa<SCEVConstant>(Fold) || isa<SCEVUnknown>(Fold))
return getTruncateExpr(Fold, DstType);
@@ -1516,16 +1517,19 @@ const SCEV *ScalarEvolution::getAddExpr(SmallVectorImpl<const SCEV *> &Ops) {
ID.AddPointer(Ops[i]);
void *IP = 0;
if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) return S;
- SCEV *S = SCEVAllocator.Allocate<SCEVAddExpr>();
+ SCEVAddExpr *S = SCEVAllocator.Allocate<SCEVAddExpr>();
new (S) SCEVAddExpr(ID, Ops);
UniqueSCEVs.InsertNode(S, IP);
+ if (HasNUW) S->setHasNoUnsignedWrap(true);
+ if (HasNSW) S->setHasNoSignedWrap(true);
return S;
}
/// getMulExpr - Get a canonical multiply expression, or something simpler if
/// possible.
-const SCEV *ScalarEvolution::getMulExpr(SmallVectorImpl<const SCEV *> &Ops) {
+const SCEV *ScalarEvolution::getMulExpr(SmallVectorImpl<const SCEV *> &Ops,
+ bool HasNUW, bool HasNSW) {
assert(!Ops.empty() && "Cannot get empty mul!");
#ifndef NDEBUG
for (unsigned i = 1, e = Ops.size(); i != e; ++i)
@@ -1688,9 +1692,11 @@ const SCEV *ScalarEvolution::getMulExpr(SmallVectorImpl<const SCEV *> &Ops) {
ID.AddPointer(Ops[i]);
void *IP = 0;
if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) return S;
- SCEV *S = SCEVAllocator.Allocate<SCEVMulExpr>();
+ SCEVMulExpr *S = SCEVAllocator.Allocate<SCEVMulExpr>();
new (S) SCEVMulExpr(ID, Ops);
UniqueSCEVs.InsertNode(S, IP);
+ if (HasNUW) S->setHasNoUnsignedWrap(true);
+ if (HasNSW) S->setHasNoSignedWrap(true);
return S;
}
@@ -1797,7 +1803,8 @@ const SCEV *ScalarEvolution::getUDivExpr(const SCEV *LHS,
/// getAddRecExpr - Get an add recurrence expression for the specified loop.
/// Simplify the expression as much as possible.
const SCEV *ScalarEvolution::getAddRecExpr(const SCEV *Start,
- const SCEV *Step, const Loop *L) {
+ const SCEV *Step, const Loop *L,
+ bool HasNUW, bool HasNSW) {
SmallVector<const SCEV *, 4> Operands;
Operands.push_back(Start);
if (const SCEVAddRecExpr *StepChrec = dyn_cast<SCEVAddRecExpr>(Step))
@@ -1808,14 +1815,15 @@ const SCEV *ScalarEvolution::getAddRecExpr(const SCEV *Start,
}
Operands.push_back(Step);
- return getAddRecExpr(Operands, L);
+ return getAddRecExpr(Operands, L, HasNUW, HasNSW);
}
/// getAddRecExpr - Get an add recurrence expression for the specified loop.
/// Simplify the expression as much as possible.
const SCEV *
ScalarEvolution::getAddRecExpr(SmallVectorImpl<const SCEV *> &Operands,
- const Loop *L) {
+ const Loop *L,
+ bool HasNUW, bool HasNSW) {
if (Operands.size() == 1) return Operands[0];
#ifndef NDEBUG
for (unsigned i = 1, e = Operands.size(); i != e; ++i)
@@ -1826,7 +1834,7 @@ ScalarEvolution::getAddRecExpr(SmallVectorImpl<const SCEV *> &Operands,
if (Operands.back()->isZero()) {
Operands.pop_back();
- return getAddRecExpr(Operands, L); // {X,+,0} --> X
+ return getAddRecExpr(Operands, L, HasNUW, HasNSW); // {X,+,0} --> X
}
// Canonicalize nested AddRecs in by nesting them in order of loop depth.
@@ -1855,7 +1863,7 @@ ScalarEvolution::getAddRecExpr(SmallVectorImpl<const SCEV *> &Operands,
}
if (AllInvariant)
// Ok, both add recurrences are valid after the transformation.
- return getAddRecExpr(NestedOperands, NestedLoop);
+ return getAddRecExpr(NestedOperands, NestedLoop, HasNUW, HasNSW);
}
// Reset Operands to its original state.
Operands[0] = NestedAR;
@@ -1870,9 +1878,11 @@ ScalarEvolution::getAddRecExpr(SmallVectorImpl<const SCEV *> &Operands,
ID.AddPointer(L);
void *IP = 0;
if (const SCEV *S = UniqueSCEVs.FindNodeOrInsertPos(ID, IP)) return S;
- SCEV *S = SCEVAllocator.Allocate<SCEVAddRecExpr>();
+ SCEVAddRecExpr *S = SCEVAllocator.Allocate<SCEVAddRecExpr>();
new (S) SCEVAddRecExpr(ID, Operands, L);
UniqueSCEVs.InsertNode(S, IP);
+ if (HasNUW) S->setHasNoUnsignedWrap(true);
+ if (HasNSW) S->setHasNoSignedWrap(true);
return S;
}