Reassociate.cpp revision 210299
1//===- Reassociate.cpp - Reassociate binary expressions -------------------===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This pass reassociates commutative expressions in an order that is designed 11// to promote better constant propagation, GCSE, LICM, PRE, etc. 12// 13// For example: 4 + (x + 5) -> x + (4 + 5) 14// 15// In the implementation of this algorithm, constants are assigned rank = 0, 16// function arguments are rank = 1, and other values are assigned ranks 17// corresponding to the reverse post order traversal of current function 18// (starting at 2), which effectively gives values in deep loops higher rank 19// than values not in loops. 20// 21//===----------------------------------------------------------------------===// 22 23#define DEBUG_TYPE "reassociate" 24#include "llvm/Transforms/Scalar.h" 25#include "llvm/Constants.h" 26#include "llvm/DerivedTypes.h" 27#include "llvm/Function.h" 28#include "llvm/Instructions.h" 29#include "llvm/IntrinsicInst.h" 30#include "llvm/Pass.h" 31#include "llvm/Assembly/Writer.h" 32#include "llvm/Support/CFG.h" 33#include "llvm/Support/Debug.h" 34#include "llvm/Support/ValueHandle.h" 35#include "llvm/Support/raw_ostream.h" 36#include "llvm/ADT/PostOrderIterator.h" 37#include "llvm/ADT/Statistic.h" 38#include "llvm/ADT/DenseMap.h" 39#include <algorithm> 40using namespace llvm; 41 42STATISTIC(NumLinear , "Number of insts linearized"); 43STATISTIC(NumChanged, "Number of insts reassociated"); 44STATISTIC(NumAnnihil, "Number of expr tree annihilated"); 45STATISTIC(NumFactor , "Number of multiplies factored"); 46 47namespace { 48 struct ValueEntry { 49 unsigned Rank; 50 Value *Op; 51 ValueEntry(unsigned R, Value *O) : Rank(R), Op(O) {} 52 }; 53 inline bool operator<(const ValueEntry &LHS, const ValueEntry &RHS) { 54 return LHS.Rank > RHS.Rank; // Sort so that highest rank goes to start. 55 } 56} 57 58#ifndef NDEBUG 59/// PrintOps - Print out the expression identified in the Ops list. 60/// 61static void PrintOps(Instruction *I, const SmallVectorImpl<ValueEntry> &Ops) { 62 Module *M = I->getParent()->getParent()->getParent(); 63 dbgs() << Instruction::getOpcodeName(I->getOpcode()) << " " 64 << *Ops[0].Op->getType() << '\t'; 65 for (unsigned i = 0, e = Ops.size(); i != e; ++i) { 66 dbgs() << "[ "; 67 WriteAsOperand(dbgs(), Ops[i].Op, false, M); 68 dbgs() << ", #" << Ops[i].Rank << "] "; 69 } 70} 71#endif 72 73namespace { 74 class Reassociate : public FunctionPass { 75 DenseMap<BasicBlock*, unsigned> RankMap; 76 DenseMap<AssertingVH<>, unsigned> ValueRankMap; 77 bool MadeChange; 78 public: 79 static char ID; // Pass identification, replacement for typeid 80 Reassociate() : FunctionPass(&ID) {} 81 82 bool runOnFunction(Function &F); 83 84 virtual void getAnalysisUsage(AnalysisUsage &AU) const { 85 AU.setPreservesCFG(); 86 } 87 private: 88 void BuildRankMap(Function &F); 89 unsigned getRank(Value *V); 90 Value *ReassociateExpression(BinaryOperator *I); 91 void RewriteExprTree(BinaryOperator *I, SmallVectorImpl<ValueEntry> &Ops, 92 unsigned Idx = 0); 93 Value *OptimizeExpression(BinaryOperator *I, 94 SmallVectorImpl<ValueEntry> &Ops); 95 Value *OptimizeAdd(Instruction *I, SmallVectorImpl<ValueEntry> &Ops); 96 void LinearizeExprTree(BinaryOperator *I, SmallVectorImpl<ValueEntry> &Ops); 97 void LinearizeExpr(BinaryOperator *I); 98 Value *RemoveFactorFromExpression(Value *V, Value *Factor); 99 void ReassociateBB(BasicBlock *BB); 100 101 void RemoveDeadBinaryOp(Value *V); 102 }; 103} 104 105char Reassociate::ID = 0; 106static RegisterPass<Reassociate> X("reassociate", "Reassociate expressions"); 107 108// Public interface to the Reassociate pass 109FunctionPass *llvm::createReassociatePass() { return new Reassociate(); } 110 111void Reassociate::RemoveDeadBinaryOp(Value *V) { 112 Instruction *Op = dyn_cast<Instruction>(V); 113 if (!Op || !isa<BinaryOperator>(Op) || !Op->use_empty()) 114 return; 115 116 Value *LHS = Op->getOperand(0), *RHS = Op->getOperand(1); 117 118 ValueRankMap.erase(Op); 119 Op->eraseFromParent(); 120 RemoveDeadBinaryOp(LHS); 121 RemoveDeadBinaryOp(RHS); 122} 123 124 125static bool isUnmovableInstruction(Instruction *I) { 126 if (I->getOpcode() == Instruction::PHI || 127 I->getOpcode() == Instruction::Alloca || 128 I->getOpcode() == Instruction::Load || 129 I->getOpcode() == Instruction::Invoke || 130 (I->getOpcode() == Instruction::Call && 131 !isa<DbgInfoIntrinsic>(I)) || 132 I->getOpcode() == Instruction::UDiv || 133 I->getOpcode() == Instruction::SDiv || 134 I->getOpcode() == Instruction::FDiv || 135 I->getOpcode() == Instruction::URem || 136 I->getOpcode() == Instruction::SRem || 137 I->getOpcode() == Instruction::FRem) 138 return true; 139 return false; 140} 141 142void Reassociate::BuildRankMap(Function &F) { 143 unsigned i = 2; 144 145 // Assign distinct ranks to function arguments 146 for (Function::arg_iterator I = F.arg_begin(), E = F.arg_end(); I != E; ++I) 147 ValueRankMap[&*I] = ++i; 148 149 ReversePostOrderTraversal<Function*> RPOT(&F); 150 for (ReversePostOrderTraversal<Function*>::rpo_iterator I = RPOT.begin(), 151 E = RPOT.end(); I != E; ++I) { 152 BasicBlock *BB = *I; 153 unsigned BBRank = RankMap[BB] = ++i << 16; 154 155 // Walk the basic block, adding precomputed ranks for any instructions that 156 // we cannot move. This ensures that the ranks for these instructions are 157 // all different in the block. 158 for (BasicBlock::iterator I = BB->begin(), E = BB->end(); I != E; ++I) 159 if (isUnmovableInstruction(I)) 160 ValueRankMap[&*I] = ++BBRank; 161 } 162} 163 164unsigned Reassociate::getRank(Value *V) { 165 Instruction *I = dyn_cast<Instruction>(V); 166 if (I == 0) { 167 if (isa<Argument>(V)) return ValueRankMap[V]; // Function argument. 168 return 0; // Otherwise it's a global or constant, rank 0. 169 } 170 171 if (unsigned Rank = ValueRankMap[I]) 172 return Rank; // Rank already known? 173 174 // If this is an expression, return the 1+MAX(rank(LHS), rank(RHS)) so that 175 // we can reassociate expressions for code motion! Since we do not recurse 176 // for PHI nodes, we cannot have infinite recursion here, because there 177 // cannot be loops in the value graph that do not go through PHI nodes. 178 unsigned Rank = 0, MaxRank = RankMap[I->getParent()]; 179 for (unsigned i = 0, e = I->getNumOperands(); 180 i != e && Rank != MaxRank; ++i) 181 Rank = std::max(Rank, getRank(I->getOperand(i))); 182 183 // If this is a not or neg instruction, do not count it for rank. This 184 // assures us that X and ~X will have the same rank. 185 if (!I->getType()->isIntegerTy() || 186 (!BinaryOperator::isNot(I) && !BinaryOperator::isNeg(I))) 187 ++Rank; 188 189 //DEBUG(dbgs() << "Calculated Rank[" << V->getName() << "] = " 190 // << Rank << "\n"); 191 192 return ValueRankMap[I] = Rank; 193} 194 195/// isReassociableOp - Return true if V is an instruction of the specified 196/// opcode and if it only has one use. 197static BinaryOperator *isReassociableOp(Value *V, unsigned Opcode) { 198 if ((V->hasOneUse() || V->use_empty()) && isa<Instruction>(V) && 199 cast<Instruction>(V)->getOpcode() == Opcode) 200 return cast<BinaryOperator>(V); 201 return 0; 202} 203 204/// LowerNegateToMultiply - Replace 0-X with X*-1. 205/// 206static Instruction *LowerNegateToMultiply(Instruction *Neg, 207 DenseMap<AssertingVH<>, unsigned> &ValueRankMap) { 208 Constant *Cst = Constant::getAllOnesValue(Neg->getType()); 209 210 Instruction *Res = BinaryOperator::CreateMul(Neg->getOperand(1), Cst, "",Neg); 211 ValueRankMap.erase(Neg); 212 Res->takeName(Neg); 213 Neg->replaceAllUsesWith(Res); 214 Neg->eraseFromParent(); 215 return Res; 216} 217 218// Given an expression of the form '(A+B)+(D+C)', turn it into '(((A+B)+C)+D)'. 219// Note that if D is also part of the expression tree that we recurse to 220// linearize it as well. Besides that case, this does not recurse into A,B, or 221// C. 222void Reassociate::LinearizeExpr(BinaryOperator *I) { 223 BinaryOperator *LHS = cast<BinaryOperator>(I->getOperand(0)); 224 BinaryOperator *RHS = cast<BinaryOperator>(I->getOperand(1)); 225 assert(isReassociableOp(LHS, I->getOpcode()) && 226 isReassociableOp(RHS, I->getOpcode()) && 227 "Not an expression that needs linearization?"); 228 229 DEBUG(dbgs() << "Linear" << *LHS << '\n' << *RHS << '\n' << *I << '\n'); 230 231 // Move the RHS instruction to live immediately before I, avoiding breaking 232 // dominator properties. 233 RHS->moveBefore(I); 234 235 // Move operands around to do the linearization. 236 I->setOperand(1, RHS->getOperand(0)); 237 RHS->setOperand(0, LHS); 238 I->setOperand(0, RHS); 239 240 ++NumLinear; 241 MadeChange = true; 242 DEBUG(dbgs() << "Linearized: " << *I << '\n'); 243 244 // If D is part of this expression tree, tail recurse. 245 if (isReassociableOp(I->getOperand(1), I->getOpcode())) 246 LinearizeExpr(I); 247} 248 249 250/// LinearizeExprTree - Given an associative binary expression tree, traverse 251/// all of the uses putting it into canonical form. This forces a left-linear 252/// form of the expression (((a+b)+c)+d), and collects information about the 253/// rank of the non-tree operands. 254/// 255/// NOTE: These intentionally destroys the expression tree operands (turning 256/// them into undef values) to reduce #uses of the values. This means that the 257/// caller MUST use something like RewriteExprTree to put the values back in. 258/// 259void Reassociate::LinearizeExprTree(BinaryOperator *I, 260 SmallVectorImpl<ValueEntry> &Ops) { 261 Value *LHS = I->getOperand(0), *RHS = I->getOperand(1); 262 unsigned Opcode = I->getOpcode(); 263 264 // First step, linearize the expression if it is in ((A+B)+(C+D)) form. 265 BinaryOperator *LHSBO = isReassociableOp(LHS, Opcode); 266 BinaryOperator *RHSBO = isReassociableOp(RHS, Opcode); 267 268 // If this is a multiply expression tree and it contains internal negations, 269 // transform them into multiplies by -1 so they can be reassociated. 270 if (I->getOpcode() == Instruction::Mul) { 271 if (!LHSBO && LHS->hasOneUse() && BinaryOperator::isNeg(LHS)) { 272 LHS = LowerNegateToMultiply(cast<Instruction>(LHS), ValueRankMap); 273 LHSBO = isReassociableOp(LHS, Opcode); 274 } 275 if (!RHSBO && RHS->hasOneUse() && BinaryOperator::isNeg(RHS)) { 276 RHS = LowerNegateToMultiply(cast<Instruction>(RHS), ValueRankMap); 277 RHSBO = isReassociableOp(RHS, Opcode); 278 } 279 } 280 281 if (!LHSBO) { 282 if (!RHSBO) { 283 // Neither the LHS or RHS as part of the tree, thus this is a leaf. As 284 // such, just remember these operands and their rank. 285 Ops.push_back(ValueEntry(getRank(LHS), LHS)); 286 Ops.push_back(ValueEntry(getRank(RHS), RHS)); 287 288 // Clear the leaves out. 289 I->setOperand(0, UndefValue::get(I->getType())); 290 I->setOperand(1, UndefValue::get(I->getType())); 291 return; 292 } 293 294 // Turn X+(Y+Z) -> (Y+Z)+X 295 std::swap(LHSBO, RHSBO); 296 std::swap(LHS, RHS); 297 bool Success = !I->swapOperands(); 298 assert(Success && "swapOperands failed"); 299 Success = false; 300 MadeChange = true; 301 } else if (RHSBO) { 302 // Turn (A+B)+(C+D) -> (((A+B)+C)+D). This guarantees the RHS is not 303 // part of the expression tree. 304 LinearizeExpr(I); 305 LHS = LHSBO = cast<BinaryOperator>(I->getOperand(0)); 306 RHS = I->getOperand(1); 307 RHSBO = 0; 308 } 309 310 // Okay, now we know that the LHS is a nested expression and that the RHS is 311 // not. Perform reassociation. 312 assert(!isReassociableOp(RHS, Opcode) && "LinearizeExpr failed!"); 313 314 // Move LHS right before I to make sure that the tree expression dominates all 315 // values. 316 LHSBO->moveBefore(I); 317 318 // Linearize the expression tree on the LHS. 319 LinearizeExprTree(LHSBO, Ops); 320 321 // Remember the RHS operand and its rank. 322 Ops.push_back(ValueEntry(getRank(RHS), RHS)); 323 324 // Clear the RHS leaf out. 325 I->setOperand(1, UndefValue::get(I->getType())); 326} 327 328// RewriteExprTree - Now that the operands for this expression tree are 329// linearized and optimized, emit them in-order. This function is written to be 330// tail recursive. 331void Reassociate::RewriteExprTree(BinaryOperator *I, 332 SmallVectorImpl<ValueEntry> &Ops, 333 unsigned i) { 334 if (i+2 == Ops.size()) { 335 if (I->getOperand(0) != Ops[i].Op || 336 I->getOperand(1) != Ops[i+1].Op) { 337 Value *OldLHS = I->getOperand(0); 338 DEBUG(dbgs() << "RA: " << *I << '\n'); 339 I->setOperand(0, Ops[i].Op); 340 I->setOperand(1, Ops[i+1].Op); 341 DEBUG(dbgs() << "TO: " << *I << '\n'); 342 MadeChange = true; 343 ++NumChanged; 344 345 // If we reassociated a tree to fewer operands (e.g. (1+a+2) -> (a+3) 346 // delete the extra, now dead, nodes. 347 RemoveDeadBinaryOp(OldLHS); 348 } 349 return; 350 } 351 assert(i+2 < Ops.size() && "Ops index out of range!"); 352 353 if (I->getOperand(1) != Ops[i].Op) { 354 DEBUG(dbgs() << "RA: " << *I << '\n'); 355 I->setOperand(1, Ops[i].Op); 356 DEBUG(dbgs() << "TO: " << *I << '\n'); 357 MadeChange = true; 358 ++NumChanged; 359 } 360 361 BinaryOperator *LHS = cast<BinaryOperator>(I->getOperand(0)); 362 assert(LHS->getOpcode() == I->getOpcode() && 363 "Improper expression tree!"); 364 365 // Compactify the tree instructions together with each other to guarantee 366 // that the expression tree is dominated by all of Ops. 367 LHS->moveBefore(I); 368 RewriteExprTree(LHS, Ops, i+1); 369} 370 371 372 373// NegateValue - Insert instructions before the instruction pointed to by BI, 374// that computes the negative version of the value specified. The negative 375// version of the value is returned, and BI is left pointing at the instruction 376// that should be processed next by the reassociation pass. 377// 378static Value *NegateValue(Value *V, Instruction *BI) { 379 if (Constant *C = dyn_cast<Constant>(V)) 380 return ConstantExpr::getNeg(C); 381 382 // We are trying to expose opportunity for reassociation. One of the things 383 // that we want to do to achieve this is to push a negation as deep into an 384 // expression chain as possible, to expose the add instructions. In practice, 385 // this means that we turn this: 386 // X = -(A+12+C+D) into X = -A + -12 + -C + -D = -12 + -A + -C + -D 387 // so that later, a: Y = 12+X could get reassociated with the -12 to eliminate 388 // the constants. We assume that instcombine will clean up the mess later if 389 // we introduce tons of unnecessary negation instructions. 390 // 391 if (Instruction *I = dyn_cast<Instruction>(V)) 392 if (I->getOpcode() == Instruction::Add && I->hasOneUse()) { 393 // Push the negates through the add. 394 I->setOperand(0, NegateValue(I->getOperand(0), BI)); 395 I->setOperand(1, NegateValue(I->getOperand(1), BI)); 396 397 // We must move the add instruction here, because the neg instructions do 398 // not dominate the old add instruction in general. By moving it, we are 399 // assured that the neg instructions we just inserted dominate the 400 // instruction we are about to insert after them. 401 // 402 I->moveBefore(BI); 403 I->setName(I->getName()+".neg"); 404 return I; 405 } 406 407 // Okay, we need to materialize a negated version of V with an instruction. 408 // Scan the use lists of V to see if we have one already. 409 for (Value::use_iterator UI = V->use_begin(), E = V->use_end(); UI != E;++UI){ 410 User *U = *UI; 411 if (!BinaryOperator::isNeg(U)) continue; 412 413 // We found one! Now we have to make sure that the definition dominates 414 // this use. We do this by moving it to the entry block (if it is a 415 // non-instruction value) or right after the definition. These negates will 416 // be zapped by reassociate later, so we don't need much finesse here. 417 BinaryOperator *TheNeg = cast<BinaryOperator>(U); 418 419 // Verify that the negate is in this function, V might be a constant expr. 420 if (TheNeg->getParent()->getParent() != BI->getParent()->getParent()) 421 continue; 422 423 BasicBlock::iterator InsertPt; 424 if (Instruction *InstInput = dyn_cast<Instruction>(V)) { 425 if (InvokeInst *II = dyn_cast<InvokeInst>(InstInput)) { 426 InsertPt = II->getNormalDest()->begin(); 427 } else { 428 InsertPt = InstInput; 429 ++InsertPt; 430 } 431 while (isa<PHINode>(InsertPt)) ++InsertPt; 432 } else { 433 InsertPt = TheNeg->getParent()->getParent()->getEntryBlock().begin(); 434 } 435 TheNeg->moveBefore(InsertPt); 436 return TheNeg; 437 } 438 439 // Insert a 'neg' instruction that subtracts the value from zero to get the 440 // negation. 441 return BinaryOperator::CreateNeg(V, V->getName() + ".neg", BI); 442} 443 444/// ShouldBreakUpSubtract - Return true if we should break up this subtract of 445/// X-Y into (X + -Y). 446static bool ShouldBreakUpSubtract(Instruction *Sub) { 447 // If this is a negation, we can't split it up! 448 if (BinaryOperator::isNeg(Sub)) 449 return false; 450 451 // Don't bother to break this up unless either the LHS is an associable add or 452 // subtract or if this is only used by one. 453 if (isReassociableOp(Sub->getOperand(0), Instruction::Add) || 454 isReassociableOp(Sub->getOperand(0), Instruction::Sub)) 455 return true; 456 if (isReassociableOp(Sub->getOperand(1), Instruction::Add) || 457 isReassociableOp(Sub->getOperand(1), Instruction::Sub)) 458 return true; 459 if (Sub->hasOneUse() && 460 (isReassociableOp(Sub->use_back(), Instruction::Add) || 461 isReassociableOp(Sub->use_back(), Instruction::Sub))) 462 return true; 463 464 return false; 465} 466 467/// BreakUpSubtract - If we have (X-Y), and if either X is an add, or if this is 468/// only used by an add, transform this into (X+(0-Y)) to promote better 469/// reassociation. 470static Instruction *BreakUpSubtract(Instruction *Sub, 471 DenseMap<AssertingVH<>, unsigned> &ValueRankMap) { 472 // Convert a subtract into an add and a neg instruction. This allows sub 473 // instructions to be commuted with other add instructions. 474 // 475 // Calculate the negative value of Operand 1 of the sub instruction, 476 // and set it as the RHS of the add instruction we just made. 477 // 478 Value *NegVal = NegateValue(Sub->getOperand(1), Sub); 479 Instruction *New = 480 BinaryOperator::CreateAdd(Sub->getOperand(0), NegVal, "", Sub); 481 New->takeName(Sub); 482 483 // Everyone now refers to the add instruction. 484 ValueRankMap.erase(Sub); 485 Sub->replaceAllUsesWith(New); 486 Sub->eraseFromParent(); 487 488 DEBUG(dbgs() << "Negated: " << *New << '\n'); 489 return New; 490} 491 492/// ConvertShiftToMul - If this is a shift of a reassociable multiply or is used 493/// by one, change this into a multiply by a constant to assist with further 494/// reassociation. 495static Instruction *ConvertShiftToMul(Instruction *Shl, 496 DenseMap<AssertingVH<>, unsigned> &ValueRankMap) { 497 // If an operand of this shift is a reassociable multiply, or if the shift 498 // is used by a reassociable multiply or add, turn into a multiply. 499 if (isReassociableOp(Shl->getOperand(0), Instruction::Mul) || 500 (Shl->hasOneUse() && 501 (isReassociableOp(Shl->use_back(), Instruction::Mul) || 502 isReassociableOp(Shl->use_back(), Instruction::Add)))) { 503 Constant *MulCst = ConstantInt::get(Shl->getType(), 1); 504 MulCst = ConstantExpr::getShl(MulCst, cast<Constant>(Shl->getOperand(1))); 505 506 Instruction *Mul = 507 BinaryOperator::CreateMul(Shl->getOperand(0), MulCst, "", Shl); 508 ValueRankMap.erase(Shl); 509 Mul->takeName(Shl); 510 Shl->replaceAllUsesWith(Mul); 511 Shl->eraseFromParent(); 512 return Mul; 513 } 514 return 0; 515} 516 517// Scan backwards and forwards among values with the same rank as element i to 518// see if X exists. If X does not exist, return i. This is useful when 519// scanning for 'x' when we see '-x' because they both get the same rank. 520static unsigned FindInOperandList(SmallVectorImpl<ValueEntry> &Ops, unsigned i, 521 Value *X) { 522 unsigned XRank = Ops[i].Rank; 523 unsigned e = Ops.size(); 524 for (unsigned j = i+1; j != e && Ops[j].Rank == XRank; ++j) 525 if (Ops[j].Op == X) 526 return j; 527 // Scan backwards. 528 for (unsigned j = i-1; j != ~0U && Ops[j].Rank == XRank; --j) 529 if (Ops[j].Op == X) 530 return j; 531 return i; 532} 533 534/// EmitAddTreeOfValues - Emit a tree of add instructions, summing Ops together 535/// and returning the result. Insert the tree before I. 536static Value *EmitAddTreeOfValues(Instruction *I, SmallVectorImpl<Value*> &Ops){ 537 if (Ops.size() == 1) return Ops.back(); 538 539 Value *V1 = Ops.back(); 540 Ops.pop_back(); 541 Value *V2 = EmitAddTreeOfValues(I, Ops); 542 return BinaryOperator::CreateAdd(V2, V1, "tmp", I); 543} 544 545/// RemoveFactorFromExpression - If V is an expression tree that is a 546/// multiplication sequence, and if this sequence contains a multiply by Factor, 547/// remove Factor from the tree and return the new tree. 548Value *Reassociate::RemoveFactorFromExpression(Value *V, Value *Factor) { 549 BinaryOperator *BO = isReassociableOp(V, Instruction::Mul); 550 if (!BO) return 0; 551 552 SmallVector<ValueEntry, 8> Factors; 553 LinearizeExprTree(BO, Factors); 554 555 bool FoundFactor = false; 556 bool NeedsNegate = false; 557 for (unsigned i = 0, e = Factors.size(); i != e; ++i) { 558 if (Factors[i].Op == Factor) { 559 FoundFactor = true; 560 Factors.erase(Factors.begin()+i); 561 break; 562 } 563 564 // If this is a negative version of this factor, remove it. 565 if (ConstantInt *FC1 = dyn_cast<ConstantInt>(Factor)) 566 if (ConstantInt *FC2 = dyn_cast<ConstantInt>(Factors[i].Op)) 567 if (FC1->getValue() == -FC2->getValue()) { 568 FoundFactor = NeedsNegate = true; 569 Factors.erase(Factors.begin()+i); 570 break; 571 } 572 } 573 574 if (!FoundFactor) { 575 // Make sure to restore the operands to the expression tree. 576 RewriteExprTree(BO, Factors); 577 return 0; 578 } 579 580 BasicBlock::iterator InsertPt = BO; ++InsertPt; 581 582 // If this was just a single multiply, remove the multiply and return the only 583 // remaining operand. 584 if (Factors.size() == 1) { 585 ValueRankMap.erase(BO); 586 BO->eraseFromParent(); 587 V = Factors[0].Op; 588 } else { 589 RewriteExprTree(BO, Factors); 590 V = BO; 591 } 592 593 if (NeedsNegate) 594 V = BinaryOperator::CreateNeg(V, "neg", InsertPt); 595 596 return V; 597} 598 599/// FindSingleUseMultiplyFactors - If V is a single-use multiply, recursively 600/// add its operands as factors, otherwise add V to the list of factors. 601/// 602/// Ops is the top-level list of add operands we're trying to factor. 603static void FindSingleUseMultiplyFactors(Value *V, 604 SmallVectorImpl<Value*> &Factors, 605 const SmallVectorImpl<ValueEntry> &Ops, 606 bool IsRoot) { 607 BinaryOperator *BO; 608 if (!(V->hasOneUse() || V->use_empty()) || // More than one use. 609 !(BO = dyn_cast<BinaryOperator>(V)) || 610 BO->getOpcode() != Instruction::Mul) { 611 Factors.push_back(V); 612 return; 613 } 614 615 // If this value has a single use because it is another input to the add 616 // tree we're reassociating and we dropped its use, it actually has two 617 // uses and we can't factor it. 618 if (!IsRoot) { 619 for (unsigned i = 0, e = Ops.size(); i != e; ++i) 620 if (Ops[i].Op == V) { 621 Factors.push_back(V); 622 return; 623 } 624 } 625 626 627 // Otherwise, add the LHS and RHS to the list of factors. 628 FindSingleUseMultiplyFactors(BO->getOperand(1), Factors, Ops, false); 629 FindSingleUseMultiplyFactors(BO->getOperand(0), Factors, Ops, false); 630} 631 632/// OptimizeAndOrXor - Optimize a series of operands to an 'and', 'or', or 'xor' 633/// instruction. This optimizes based on identities. If it can be reduced to 634/// a single Value, it is returned, otherwise the Ops list is mutated as 635/// necessary. 636static Value *OptimizeAndOrXor(unsigned Opcode, 637 SmallVectorImpl<ValueEntry> &Ops) { 638 // Scan the operand lists looking for X and ~X pairs, along with X,X pairs. 639 // If we find any, we can simplify the expression. X&~X == 0, X|~X == -1. 640 for (unsigned i = 0, e = Ops.size(); i != e; ++i) { 641 // First, check for X and ~X in the operand list. 642 assert(i < Ops.size()); 643 if (BinaryOperator::isNot(Ops[i].Op)) { // Cannot occur for ^. 644 Value *X = BinaryOperator::getNotArgument(Ops[i].Op); 645 unsigned FoundX = FindInOperandList(Ops, i, X); 646 if (FoundX != i) { 647 if (Opcode == Instruction::And) // ...&X&~X = 0 648 return Constant::getNullValue(X->getType()); 649 650 if (Opcode == Instruction::Or) // ...|X|~X = -1 651 return Constant::getAllOnesValue(X->getType()); 652 } 653 } 654 655 // Next, check for duplicate pairs of values, which we assume are next to 656 // each other, due to our sorting criteria. 657 assert(i < Ops.size()); 658 if (i+1 != Ops.size() && Ops[i+1].Op == Ops[i].Op) { 659 if (Opcode == Instruction::And || Opcode == Instruction::Or) { 660 // Drop duplicate values for And and Or. 661 Ops.erase(Ops.begin()+i); 662 --i; --e; 663 ++NumAnnihil; 664 continue; 665 } 666 667 // Drop pairs of values for Xor. 668 assert(Opcode == Instruction::Xor); 669 if (e == 2) 670 return Constant::getNullValue(Ops[0].Op->getType()); 671 672 // Y ^ X^X -> Y 673 Ops.erase(Ops.begin()+i, Ops.begin()+i+2); 674 i -= 1; e -= 2; 675 ++NumAnnihil; 676 } 677 } 678 return 0; 679} 680 681/// OptimizeAdd - Optimize a series of operands to an 'add' instruction. This 682/// optimizes based on identities. If it can be reduced to a single Value, it 683/// is returned, otherwise the Ops list is mutated as necessary. 684Value *Reassociate::OptimizeAdd(Instruction *I, 685 SmallVectorImpl<ValueEntry> &Ops) { 686 // Scan the operand lists looking for X and -X pairs. If we find any, we 687 // can simplify the expression. X+-X == 0. While we're at it, scan for any 688 // duplicates. We want to canonicalize Y+Y+Y+Z -> 3*Y+Z. 689 // 690 // TODO: We could handle "X + ~X" -> "-1" if we wanted, since "-X = ~X+1". 691 // 692 for (unsigned i = 0, e = Ops.size(); i != e; ++i) { 693 Value *TheOp = Ops[i].Op; 694 // Check to see if we've seen this operand before. If so, we factor all 695 // instances of the operand together. Due to our sorting criteria, we know 696 // that these need to be next to each other in the vector. 697 if (i+1 != Ops.size() && Ops[i+1].Op == TheOp) { 698 // Rescan the list, remove all instances of this operand from the expr. 699 unsigned NumFound = 0; 700 do { 701 Ops.erase(Ops.begin()+i); 702 ++NumFound; 703 } while (i != Ops.size() && Ops[i].Op == TheOp); 704 705 DEBUG(errs() << "\nFACTORING [" << NumFound << "]: " << *TheOp << '\n'); 706 ++NumFactor; 707 708 // Insert a new multiply. 709 Value *Mul = ConstantInt::get(cast<IntegerType>(I->getType()), NumFound); 710 Mul = BinaryOperator::CreateMul(TheOp, Mul, "factor", I); 711 712 // Now that we have inserted a multiply, optimize it. This allows us to 713 // handle cases that require multiple factoring steps, such as this: 714 // (X*2) + (X*2) + (X*2) -> (X*2)*3 -> X*6 715 Mul = ReassociateExpression(cast<BinaryOperator>(Mul)); 716 717 // If every add operand was a duplicate, return the multiply. 718 if (Ops.empty()) 719 return Mul; 720 721 // Otherwise, we had some input that didn't have the dupe, such as 722 // "A + A + B" -> "A*2 + B". Add the new multiply to the list of 723 // things being added by this operation. 724 Ops.insert(Ops.begin(), ValueEntry(getRank(Mul), Mul)); 725 726 --i; 727 e = Ops.size(); 728 continue; 729 } 730 731 // Check for X and -X in the operand list. 732 if (!BinaryOperator::isNeg(TheOp)) 733 continue; 734 735 Value *X = BinaryOperator::getNegArgument(TheOp); 736 unsigned FoundX = FindInOperandList(Ops, i, X); 737 if (FoundX == i) 738 continue; 739 740 // Remove X and -X from the operand list. 741 if (Ops.size() == 2) 742 return Constant::getNullValue(X->getType()); 743 744 Ops.erase(Ops.begin()+i); 745 if (i < FoundX) 746 --FoundX; 747 else 748 --i; // Need to back up an extra one. 749 Ops.erase(Ops.begin()+FoundX); 750 ++NumAnnihil; 751 --i; // Revisit element. 752 e -= 2; // Removed two elements. 753 } 754 755 // Scan the operand list, checking to see if there are any common factors 756 // between operands. Consider something like A*A+A*B*C+D. We would like to 757 // reassociate this to A*(A+B*C)+D, which reduces the number of multiplies. 758 // To efficiently find this, we count the number of times a factor occurs 759 // for any ADD operands that are MULs. 760 DenseMap<Value*, unsigned> FactorOccurrences; 761 762 // Keep track of each multiply we see, to avoid triggering on (X*4)+(X*4) 763 // where they are actually the same multiply. 764 unsigned MaxOcc = 0; 765 Value *MaxOccVal = 0; 766 for (unsigned i = 0, e = Ops.size(); i != e; ++i) { 767 BinaryOperator *BOp = dyn_cast<BinaryOperator>(Ops[i].Op); 768 if (BOp == 0 || BOp->getOpcode() != Instruction::Mul || !BOp->use_empty()) 769 continue; 770 771 // Compute all of the factors of this added value. 772 SmallVector<Value*, 8> Factors; 773 FindSingleUseMultiplyFactors(BOp, Factors, Ops, true); 774 assert(Factors.size() > 1 && "Bad linearize!"); 775 776 // Add one to FactorOccurrences for each unique factor in this op. 777 SmallPtrSet<Value*, 8> Duplicates; 778 for (unsigned i = 0, e = Factors.size(); i != e; ++i) { 779 Value *Factor = Factors[i]; 780 if (!Duplicates.insert(Factor)) continue; 781 782 unsigned Occ = ++FactorOccurrences[Factor]; 783 if (Occ > MaxOcc) { MaxOcc = Occ; MaxOccVal = Factor; } 784 785 // If Factor is a negative constant, add the negated value as a factor 786 // because we can percolate the negate out. Watch for minint, which 787 // cannot be positivified. 788 if (ConstantInt *CI = dyn_cast<ConstantInt>(Factor)) 789 if (CI->getValue().isNegative() && !CI->getValue().isMinSignedValue()) { 790 Factor = ConstantInt::get(CI->getContext(), -CI->getValue()); 791 assert(!Duplicates.count(Factor) && 792 "Shouldn't have two constant factors, missed a canonicalize"); 793 794 unsigned Occ = ++FactorOccurrences[Factor]; 795 if (Occ > MaxOcc) { MaxOcc = Occ; MaxOccVal = Factor; } 796 } 797 } 798 } 799 800 // If any factor occurred more than one time, we can pull it out. 801 if (MaxOcc > 1) { 802 DEBUG(errs() << "\nFACTORING [" << MaxOcc << "]: " << *MaxOccVal << '\n'); 803 ++NumFactor; 804 805 // Create a new instruction that uses the MaxOccVal twice. If we don't do 806 // this, we could otherwise run into situations where removing a factor 807 // from an expression will drop a use of maxocc, and this can cause 808 // RemoveFactorFromExpression on successive values to behave differently. 809 Instruction *DummyInst = BinaryOperator::CreateAdd(MaxOccVal, MaxOccVal); 810 SmallVector<Value*, 4> NewMulOps; 811 for (unsigned i = 0, e = Ops.size(); i != e; ++i) { 812 // Only try to remove factors from expressions we're allowed to. 813 BinaryOperator *BOp = dyn_cast<BinaryOperator>(Ops[i].Op); 814 if (BOp == 0 || BOp->getOpcode() != Instruction::Mul || !BOp->use_empty()) 815 continue; 816 817 if (Value *V = RemoveFactorFromExpression(Ops[i].Op, MaxOccVal)) { 818 NewMulOps.push_back(V); 819 Ops.erase(Ops.begin()+i); 820 --i; --e; 821 } 822 } 823 824 // No need for extra uses anymore. 825 delete DummyInst; 826 827 unsigned NumAddedValues = NewMulOps.size(); 828 Value *V = EmitAddTreeOfValues(I, NewMulOps); 829 830 // Now that we have inserted the add tree, optimize it. This allows us to 831 // handle cases that require multiple factoring steps, such as this: 832 // A*A*B + A*A*C --> A*(A*B+A*C) --> A*(A*(B+C)) 833 assert(NumAddedValues > 1 && "Each occurrence should contribute a value"); 834 (void)NumAddedValues; 835 V = ReassociateExpression(cast<BinaryOperator>(V)); 836 837 // Create the multiply. 838 Value *V2 = BinaryOperator::CreateMul(V, MaxOccVal, "tmp", I); 839 840 // Rerun associate on the multiply in case the inner expression turned into 841 // a multiply. We want to make sure that we keep things in canonical form. 842 V2 = ReassociateExpression(cast<BinaryOperator>(V2)); 843 844 // If every add operand included the factor (e.g. "A*B + A*C"), then the 845 // entire result expression is just the multiply "A*(B+C)". 846 if (Ops.empty()) 847 return V2; 848 849 // Otherwise, we had some input that didn't have the factor, such as 850 // "A*B + A*C + D" -> "A*(B+C) + D". Add the new multiply to the list of 851 // things being added by this operation. 852 Ops.insert(Ops.begin(), ValueEntry(getRank(V2), V2)); 853 } 854 855 return 0; 856} 857 858Value *Reassociate::OptimizeExpression(BinaryOperator *I, 859 SmallVectorImpl<ValueEntry> &Ops) { 860 // Now that we have the linearized expression tree, try to optimize it. 861 // Start by folding any constants that we found. 862 bool IterateOptimization = false; 863 if (Ops.size() == 1) return Ops[0].Op; 864 865 unsigned Opcode = I->getOpcode(); 866 867 if (Constant *V1 = dyn_cast<Constant>(Ops[Ops.size()-2].Op)) 868 if (Constant *V2 = dyn_cast<Constant>(Ops.back().Op)) { 869 Ops.pop_back(); 870 Ops.back().Op = ConstantExpr::get(Opcode, V1, V2); 871 return OptimizeExpression(I, Ops); 872 } 873 874 // Check for destructive annihilation due to a constant being used. 875 if (ConstantInt *CstVal = dyn_cast<ConstantInt>(Ops.back().Op)) 876 switch (Opcode) { 877 default: break; 878 case Instruction::And: 879 if (CstVal->isZero()) // X & 0 -> 0 880 return CstVal; 881 if (CstVal->isAllOnesValue()) // X & -1 -> X 882 Ops.pop_back(); 883 break; 884 case Instruction::Mul: 885 if (CstVal->isZero()) { // X * 0 -> 0 886 ++NumAnnihil; 887 return CstVal; 888 } 889 890 if (cast<ConstantInt>(CstVal)->isOne()) 891 Ops.pop_back(); // X * 1 -> X 892 break; 893 case Instruction::Or: 894 if (CstVal->isAllOnesValue()) // X | -1 -> -1 895 return CstVal; 896 // FALLTHROUGH! 897 case Instruction::Add: 898 case Instruction::Xor: 899 if (CstVal->isZero()) // X [|^+] 0 -> X 900 Ops.pop_back(); 901 break; 902 } 903 if (Ops.size() == 1) return Ops[0].Op; 904 905 // Handle destructive annihilation due to identities between elements in the 906 // argument list here. 907 switch (Opcode) { 908 default: break; 909 case Instruction::And: 910 case Instruction::Or: 911 case Instruction::Xor: { 912 unsigned NumOps = Ops.size(); 913 if (Value *Result = OptimizeAndOrXor(Opcode, Ops)) 914 return Result; 915 IterateOptimization |= Ops.size() != NumOps; 916 break; 917 } 918 919 case Instruction::Add: { 920 unsigned NumOps = Ops.size(); 921 if (Value *Result = OptimizeAdd(I, Ops)) 922 return Result; 923 IterateOptimization |= Ops.size() != NumOps; 924 } 925 926 break; 927 //case Instruction::Mul: 928 } 929 930 if (IterateOptimization) 931 return OptimizeExpression(I, Ops); 932 return 0; 933} 934 935 936/// ReassociateBB - Inspect all of the instructions in this basic block, 937/// reassociating them as we go. 938void Reassociate::ReassociateBB(BasicBlock *BB) { 939 for (BasicBlock::iterator BBI = BB->begin(); BBI != BB->end(); ) { 940 Instruction *BI = BBI++; 941 if (BI->getOpcode() == Instruction::Shl && 942 isa<ConstantInt>(BI->getOperand(1))) 943 if (Instruction *NI = ConvertShiftToMul(BI, ValueRankMap)) { 944 MadeChange = true; 945 BI = NI; 946 } 947 948 // Reject cases where it is pointless to do this. 949 if (!isa<BinaryOperator>(BI) || BI->getType()->isFloatingPointTy() || 950 BI->getType()->isVectorTy()) 951 continue; // Floating point ops are not associative. 952 953 // Do not reassociate boolean (i1) expressions. We want to preserve the 954 // original order of evaluation for short-circuited comparisons that 955 // SimplifyCFG has folded to AND/OR expressions. If the expression 956 // is not further optimized, it is likely to be transformed back to a 957 // short-circuited form for code gen, and the source order may have been 958 // optimized for the most likely conditions. 959 if (BI->getType()->isIntegerTy(1)) 960 continue; 961 962 // If this is a subtract instruction which is not already in negate form, 963 // see if we can convert it to X+-Y. 964 if (BI->getOpcode() == Instruction::Sub) { 965 if (ShouldBreakUpSubtract(BI)) { 966 BI = BreakUpSubtract(BI, ValueRankMap); 967 // Reset the BBI iterator in case BreakUpSubtract changed the 968 // instruction it points to. 969 BBI = BI; 970 ++BBI; 971 MadeChange = true; 972 } else if (BinaryOperator::isNeg(BI)) { 973 // Otherwise, this is a negation. See if the operand is a multiply tree 974 // and if this is not an inner node of a multiply tree. 975 if (isReassociableOp(BI->getOperand(1), Instruction::Mul) && 976 (!BI->hasOneUse() || 977 !isReassociableOp(BI->use_back(), Instruction::Mul))) { 978 BI = LowerNegateToMultiply(BI, ValueRankMap); 979 MadeChange = true; 980 } 981 } 982 } 983 984 // If this instruction is a commutative binary operator, process it. 985 if (!BI->isAssociative()) continue; 986 BinaryOperator *I = cast<BinaryOperator>(BI); 987 988 // If this is an interior node of a reassociable tree, ignore it until we 989 // get to the root of the tree, to avoid N^2 analysis. 990 if (I->hasOneUse() && isReassociableOp(I->use_back(), I->getOpcode())) 991 continue; 992 993 // If this is an add tree that is used by a sub instruction, ignore it 994 // until we process the subtract. 995 if (I->hasOneUse() && I->getOpcode() == Instruction::Add && 996 cast<Instruction>(I->use_back())->getOpcode() == Instruction::Sub) 997 continue; 998 999 ReassociateExpression(I); 1000 } 1001} 1002 1003Value *Reassociate::ReassociateExpression(BinaryOperator *I) { 1004 1005 // First, walk the expression tree, linearizing the tree, collecting the 1006 // operand information. 1007 SmallVector<ValueEntry, 8> Ops; 1008 LinearizeExprTree(I, Ops); 1009 1010 DEBUG(dbgs() << "RAIn:\t"; PrintOps(I, Ops); dbgs() << '\n'); 1011 1012 // Now that we have linearized the tree to a list and have gathered all of 1013 // the operands and their ranks, sort the operands by their rank. Use a 1014 // stable_sort so that values with equal ranks will have their relative 1015 // positions maintained (and so the compiler is deterministic). Note that 1016 // this sorts so that the highest ranking values end up at the beginning of 1017 // the vector. 1018 std::stable_sort(Ops.begin(), Ops.end()); 1019 1020 // OptimizeExpression - Now that we have the expression tree in a convenient 1021 // sorted form, optimize it globally if possible. 1022 if (Value *V = OptimizeExpression(I, Ops)) { 1023 // This expression tree simplified to something that isn't a tree, 1024 // eliminate it. 1025 DEBUG(dbgs() << "Reassoc to scalar: " << *V << '\n'); 1026 I->replaceAllUsesWith(V); 1027 RemoveDeadBinaryOp(I); 1028 ++NumAnnihil; 1029 return V; 1030 } 1031 1032 // We want to sink immediates as deeply as possible except in the case where 1033 // this is a multiply tree used only by an add, and the immediate is a -1. 1034 // In this case we reassociate to put the negation on the outside so that we 1035 // can fold the negation into the add: (-X)*Y + Z -> Z-X*Y 1036 if (I->getOpcode() == Instruction::Mul && I->hasOneUse() && 1037 cast<Instruction>(I->use_back())->getOpcode() == Instruction::Add && 1038 isa<ConstantInt>(Ops.back().Op) && 1039 cast<ConstantInt>(Ops.back().Op)->isAllOnesValue()) { 1040 ValueEntry Tmp = Ops.pop_back_val(); 1041 Ops.insert(Ops.begin(), Tmp); 1042 } 1043 1044 DEBUG(dbgs() << "RAOut:\t"; PrintOps(I, Ops); dbgs() << '\n'); 1045 1046 if (Ops.size() == 1) { 1047 // This expression tree simplified to something that isn't a tree, 1048 // eliminate it. 1049 I->replaceAllUsesWith(Ops[0].Op); 1050 RemoveDeadBinaryOp(I); 1051 return Ops[0].Op; 1052 } 1053 1054 // Now that we ordered and optimized the expressions, splat them back into 1055 // the expression tree, removing any unneeded nodes. 1056 RewriteExprTree(I, Ops); 1057 return I; 1058} 1059 1060 1061bool Reassociate::runOnFunction(Function &F) { 1062 // Recalculate the rank map for F 1063 BuildRankMap(F); 1064 1065 MadeChange = false; 1066 for (Function::iterator FI = F.begin(), FE = F.end(); FI != FE; ++FI) 1067 ReassociateBB(FI); 1068 1069 // We are done with the rank map. 1070 RankMap.clear(); 1071 ValueRankMap.clear(); 1072 return MadeChange; 1073} 1074 1075