aboutsummaryrefslogtreecommitdiff
path: root/lib/Transforms/Scalar/LoopStrengthReduce.cpp
diff options
context:
space:
mode:
authorChris Lattner <sabre@nondot.org>2007-05-19 01:22:21 +0000
committerChris Lattner <sabre@nondot.org>2007-05-19 01:22:21 +0000
commitfb3e1190fc33c93a7185695051d5aeeaddbae0ad (patch)
treec7abf0c8352f38918312404bc7dc03a909cfe073 /lib/Transforms/Scalar/LoopStrengthReduce.cpp
parentb0e07dd19a5fb6c556fdc6e67e605103b30a7d9f (diff)
Handle negative strides much more optimally. This compiles X86/lsr-negative-stride.ll
into: _t: movl 8(%esp), %ecx movl 4(%esp), %eax cmpl %ecx, %eax je LBB1_3 #bb17 LBB1_1: #bb cmpl %ecx, %eax jg LBB1_4 #cond_true LBB1_2: #cond_false subl %eax, %ecx cmpl %ecx, %eax jne LBB1_1 #bb LBB1_3: #bb17 ret LBB1_4: #cond_true subl %ecx, %eax cmpl %ecx, %eax jne LBB1_1 #bb jmp LBB1_3 #bb17 instead of: _t: subl $4, %esp movl %esi, (%esp) movl 12(%esp), %ecx movl 8(%esp), %eax cmpl %ecx, %eax je LBB1_4 #bb17 LBB1_1: #bb.outer movl %ecx, %edx negl %edx LBB1_2: #bb cmpl %ecx, %eax jle LBB1_5 #cond_false LBB1_3: #cond_true addl %edx, %eax cmpl %ecx, %eax jne LBB1_2 #bb LBB1_4: #bb17 movl (%esp), %esi addl $4, %esp ret LBB1_5: #cond_false movl %ecx, %edx subl %eax, %edx movl %eax, %esi addl %esi, %esi cmpl %ecx, %esi je LBB1_4 #bb17 LBB1_6: #cond_false.bb.outer_crit_edge movl %edx, %ecx jmp LBB1_1 #bb.outer git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@37252 91177308-0d34-0410-b5e6-96231b3b80d8
Diffstat (limited to 'lib/Transforms/Scalar/LoopStrengthReduce.cpp')
-rw-r--r--lib/Transforms/Scalar/LoopStrengthReduce.cpp29
1 files changed, 26 insertions, 3 deletions
diff --git a/lib/Transforms/Scalar/LoopStrengthReduce.cpp b/lib/Transforms/Scalar/LoopStrengthReduce.cpp
index 1a9b9881d7..449cee3bda 100644
--- a/lib/Transforms/Scalar/LoopStrengthReduce.cpp
+++ b/lib/Transforms/Scalar/LoopStrengthReduce.cpp
@@ -987,6 +987,20 @@ static bool PartitionByIsUseOfPostIncrementedValue(const BasedUser &Val) {
return Val.isUseOfPostIncrementedValue;
}
+/// isNonConstantNegative - REturn true if the specified scev is negated, but
+/// not a constant.
+static bool isNonConstantNegative(const SCEVHandle &Expr) {
+ SCEVMulExpr *Mul = dyn_cast<SCEVMulExpr>(Expr);
+ if (!Mul) return false;
+
+ // If there is a constant factor, it will be first.
+ SCEVConstant *SC = dyn_cast<SCEVConstant>(Mul->getOperand(0));
+ if (!SC) return false;
+
+ // Return true if the value is negative, this matches things like (-42 * V).
+ return SC->getValue()->getValue().isNegative();
+}
+
/// StrengthReduceStridedIVUsers - Strength reduce all of the users of a single
/// stride of IV. All of the users may have different starting values, and this
/// may not be the only stride (we know it is if isOnlyStride is true).
@@ -1104,15 +1118,24 @@ void LoopStrengthReduce::StrengthReduceStridedIVUsers(const SCEVHandle &Stride,
// Add common base to the new Phi node.
NewPHI->addIncoming(CommonBaseV, Preheader);
+ // If the stride is negative, insert a sub instead of an add for the
+ // increment.
+ bool isNegative = isNonConstantNegative(Stride);
+ SCEVHandle IncAmount = Stride;
+ if (isNegative)
+ IncAmount = SCEV::getNegativeSCEV(Stride);
+
// Insert the stride into the preheader.
- Value *StrideV = PreheaderRewriter.expandCodeFor(Stride, PreInsertPt,
+ Value *StrideV = PreheaderRewriter.expandCodeFor(IncAmount, PreInsertPt,
ReplacedTy);
if (!isa<ConstantInt>(StrideV)) ++NumVariable;
// Emit the increment of the base value before the terminator of the loop
// latch block, and add it to the Phi node.
- SCEVHandle IncExp = SCEVAddExpr::get(SCEVUnknown::get(NewPHI),
- SCEVUnknown::get(StrideV));
+ SCEVHandle IncExp = SCEVUnknown::get(StrideV);
+ if (isNegative)
+ IncExp = SCEV::getNegativeSCEV(IncExp);
+ IncExp = SCEVAddExpr::get(SCEVUnknown::get(NewPHI), IncExp);
IncV = Rewriter.expandCodeFor(IncExp, LatchBlock->getTerminator(),
ReplacedTy);