aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorOwen Anderson <resistor@mac.com>2011-04-06 23:35:59 +0000
committerOwen Anderson <resistor@mac.com>2011-04-06 23:35:59 +0000
commitdf298c9ea64eb335f63fc075d8ef6306682ffe75 (patch)
treef018cf1b4a2cb10c1385cddb772fd995eff0c0ae
parentef7fb17936ef38153e0a8c8146229d618722eb15 (diff)
downloadexternal_llvm-df298c9ea64eb335f63fc075d8ef6306682ffe75.zip
external_llvm-df298c9ea64eb335f63fc075d8ef6306682ffe75.tar.gz
external_llvm-df298c9ea64eb335f63fc075d8ef6306682ffe75.tar.bz2
Teach the ARM peephole optimizer that RSB, RSC, ADC, and SBC can be used for folded comparisons, just like ADD and SUB.
git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@129038 91177308-0d34-0410-b5e6-96231b3b80d8
-rw-r--r--lib/Target/ARM/ARMBaseInstrInfo.cpp9
-rw-r--r--test/CodeGen/ARM/sub.ll9
2 files changed, 17 insertions, 1 deletions
diff --git a/lib/Target/ARM/ARMBaseInstrInfo.cpp b/lib/Target/ARM/ARMBaseInstrInfo.cpp
index 1acad9d..30148c2 100644
--- a/lib/Target/ARM/ARMBaseInstrInfo.cpp
+++ b/lib/Target/ARM/ARMBaseInstrInfo.cpp
@@ -1618,10 +1618,17 @@ OptimizeCompareInstr(MachineInstr *CmpInstr, unsigned SrcReg, int CmpMask,
// Set the "zero" bit in CPSR.
switch (MI->getOpcode()) {
default: break;
+ case ARM::RSBri:
+ case ARM::RSCri:
case ARM::ADDri:
+ case ARM::ADCri:
case ARM::SUBri:
+ case ARM::SBCri:
+ case ARM::t2RSBri:
case ARM::t2ADDri:
- case ARM::t2SUBri: {
+ case ARM::t2ADCri:
+ case ARM::t2SUBri:
+ case ARM::t2SBCri: {
// Scan forward for the use of CPSR, if it's a conditional code requires
// checking of V bit, then this is not safe to do. If we can't find the
// CPSR use (i.e. used in another block), then it's not safe to perform
diff --git a/test/CodeGen/ARM/sub.ll b/test/CodeGen/ARM/sub.ll
index 7ada14d..555b18e 100644
--- a/test/CodeGen/ARM/sub.ll
+++ b/test/CodeGen/ARM/sub.ll
@@ -27,3 +27,12 @@ define i64 @f3(i64 %a) {
ret i64 %tmp
}
+define i32 @f4(i32 %x) {
+entry:
+; CHECK: f4
+; CHECK: rsbs
+ %sub = sub i32 1, %x
+ %cmp = icmp ugt i32 %sub, 0
+ %sel = select i1 %cmp, i32 1, i32 %sub
+ ret i32 %sel
+}