aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorChris Lattner <sabre@nondot.org>2004-05-25 06:32:08 +0000
committerChris Lattner <sabre@nondot.org>2004-05-25 06:32:08 +0000
commit3c6a0d4ae2a5ffb19f2b6527c51c95ea0475ce9f (patch)
treec3ed6ac57c248ab12f1351bd938cf076fc3b6ed7
parent256b7a452ba55daedd499a06594568c4d630b77c (diff)
downloadexternal_llvm-3c6a0d4ae2a5ffb19f2b6527c51c95ea0475ce9f.zip
external_llvm-3c6a0d4ae2a5ffb19f2b6527c51c95ea0475ce9f.tar.gz
external_llvm-3c6a0d4ae2a5ffb19f2b6527c51c95ea0475ce9f.tar.bz2
Implement InstCombine:shift.ll:test16, which turns (X >> C1) & C2 != C3
into (X & (C2 << C1)) != (C3 << C1), where the shift may be either left or right and the compare may be any one. This triggers 1546 times in 176.gcc alone, as it is a common pattern that occurs for bitfield accesses. git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@13740 91177308-0d34-0410-b5e6-96231b3b80d8
-rw-r--r--lib/Transforms/Scalar/InstructionCombining.cpp35
1 files changed, 35 insertions, 0 deletions
diff --git a/lib/Transforms/Scalar/InstructionCombining.cpp b/lib/Transforms/Scalar/InstructionCombining.cpp
index d31e7e8..6d39c50 100644
--- a/lib/Transforms/Scalar/InstructionCombining.cpp
+++ b/lib/Transforms/Scalar/InstructionCombining.cpp
@@ -1475,6 +1475,41 @@ Instruction *InstCombiner::visitSetCondInst(BinaryOperator &I) {
// integers at the end of their ranges...
//
if (ConstantInt *CI = dyn_cast<ConstantInt>(Op1)) {
+ if (Instruction *LHSI = dyn_cast<Instruction>(Op0))
+ if (LHSI->hasOneUse() && LHSI->getNumOperands() == 2 &&
+ isa<ConstantInt>(LHSI->getOperand(1))) {
+ // If this is: (X >> C1) & C2 != C3 (where any shift and any compare
+ // could exist), turn it into (X & (C2 << C1)) != (C3 << C1). This
+ // happens a LOT in code produced by the C front-end, for bitfield
+ // access.
+ if (LHSI->getOpcode() == Instruction::And &&
+ LHSI->getOperand(0)->hasOneUse())
+ if (ShiftInst *Shift = dyn_cast<ShiftInst>(LHSI->getOperand(0)))
+ if (ConstantUInt *ShAmt =
+ dyn_cast<ConstantUInt>(Shift->getOperand(1))) {
+ ConstantInt *AndCST = cast<ConstantInt>(LHSI->getOperand(1));
+
+ // We can fold this as long as we can't shift unknown bits into
+ // the mask. This can only happen with signed shift rights, as
+ // they sign-extend.
+ const Type *Ty = Shift->getType();
+ if (Shift->getOpcode() != Instruction::Shr ||
+ Shift->getType()->isUnsigned() ||
+ // To test for the bad case of the signed shr, see if any of
+ // the bits shifted in could be tested after the mask.
+ ConstantExpr::getAnd(ConstantExpr::getShl(ConstantInt::getAllOnesValue(Ty), ConstantUInt::get(Type::UByteTy, Ty->getPrimitiveSize()*8-ShAmt->getValue())), AndCST)->isNullValue()) {
+ unsigned ShiftOp = Shift->getOpcode() == Instruction::Shl
+ ? Instruction::Shr : Instruction::Shl;
+ I.setOperand(1, ConstantExpr::get(ShiftOp, CI, ShAmt));
+ LHSI->setOperand(1, ConstantExpr::get(ShiftOp, AndCST, ShAmt));
+ LHSI->setOperand(0, Shift->getOperand(0));
+ WorkList.push_back(Shift); // Shift is probably dead.
+ AddUsesToWorkList(I);
+ return &I;
+ }
+ }
+ }
+
// Simplify seteq and setne instructions...
if (I.getOpcode() == Instruction::SetEQ ||
I.getOpcode() == Instruction::SetNE) {