Home | History | Annotate | Download | only in Scalar

Lines Matching full:instruction

86     /// next instruction to optimize.  Xforms that can invalidate this should
124 bool OptimizeInst(Instruction *I);
125 bool OptimizeMemoryInst(Instruction *I, Value *Addr, Type *AccessTy);
128 bool MoveExtToFormExtLoad(Instruction *I);
129 bool OptimizeExtUses(Instruction *I);
283 // If the instruction before the branch (skipping debug info) isn't a phi
323 const Instruction *User = cast<Instruction>(*UI);
332 Instruction *Insn = dyn_cast<Instruction>(UPN->getIncomingValue(I));
455 /// OptimizeNoopCopyExpression - If the specified cast instruction is a noop
498 Instruction *User = cast<Instruction>(*UI);
554 Instruction *User = cast<Instruction>(*UI);
644 // If the iterator instruction was recursively deleted, start over at the
741 // return is the first instruction in the block.
801 // Make sure the call instruction is followed by an unconditional branch to
888 SmallVectorImpl<Instruction*> &AddrModeInsts;
892 /// the memory instruction that we're computing this address for.
894 Instruction *MemoryInst;
905 AddressingModeMatcher(SmallVectorImpl<Instruction*> &AMI,
907 Instruction *MI, ExtAddrMode &AM)
917 Instruction *MemoryInst,
918 SmallVectorImpl<Instruction*> &AddrModeInsts,
932 bool IsProfitableToFoldIntoAddressingMode(Instruction *I,
975 if (isa<Instruction>(ScaleReg) && // not a constant expr.
981 // this instruction.
983 AddrModeInsts.push_back(cast<Instruction>(ScaleReg));
997 static bool MightBeFoldableInst(Instruction *I) {
999 case Instruction::BitCast:
1004 case Instruction::PtrToInt:
1007 case Instruction::IntToPtr:
1010 case Instruction::Add:
1012 case Instruction::Mul:
1013 case Instruction::Shl:
1016 case Instruction::GetElementPtr:
1023 /// MatchOperationAddr - Given an instruction or constant expr, see if we can
1032 case Instruction::PtrToInt:
1035 case Instruction::IntToPtr:
1041 case Instruction::BitCast:
1052 case Instruction::Add: {
1074 //case Instruction::Or:
1077 case Instruction::Mul:
1078 case Instruction::Shl: {
1083 if (Opcode == Instruction::Shl)
1088 case Instruction::GetElementPtr: {
1198 } else if (Instruction *I = dyn_cast<Instruction>(Addr)) {
1274 /// memory use. If we find an obviously non-foldable instruction, return true.
1276 static bool FindAllMemoryUses(Instruction *I,
1277 SmallVectorImpl<std::pair<Instruction*,unsigned> > &MemoryUses,
1278 SmallPtrSet<Instruction*, 16> &ConsideredInsts,
1280 // If we already considered this instruction, we're done.
1284 // If this is an obviously unfoldable instruction, bail out.
1315 if (FindAllMemoryUses(cast<Instruction>(U), MemoryUses, ConsideredInsts,
1326 /// that we know are live at the instruction already.
1334 if (!isa<Instruction>(Val) && !isa<Argument>(Val)) return true;
1343 // Check to see if this value is already used in the memory instruction's
1350 /// mode of the machine to fold the specified instruction into a load or store
1351 /// that ultimately uses it. However, the specified instruction has multiple
1371 IsProfitableToFoldIntoAddressingMode(Instruction *I, ExtAddrMode &AMBefore,
1375 // AMBefore is the addressing mode before this instruction was folded into it,
1376 // and AMAfter is the addressing mode after the instruction was folded. Get
1387 // lifetime wasn't extended by adding this instruction.
1393 // If folding this instruction (and it's subexprs) didn't extend any live
1398 // If all uses of this instruction are ultimately load/store/inlineasm's,
1399 // check to see if their addressing modes will include this instruction. If
1402 SmallVector<std::pair<Instruction*,unsigned>, 16> MemoryUses;
1403 SmallPtrSet<Instruction*, 16> ConsideredInsts;
1407 // Now that we know that all uses of this instruction are part of a chain of
1410 // *actually* fold the instruction.
1411 SmallVector<Instruction*, 32> MatchedAddrModeInsts;
1413 Instruction *User = MemoryUses[i].first;
1426 // *actually* cover the shared instruction.
1450 if (Instruction *I = dyn_cast<Instruction>(V))
1457 /// instruction selection will try to get the load or store to do as much
1464 bool CodeGenPrepare::OptimizeMemoryInst(Instruction *MemoryInst, Value *Addr,
1480 SmallVector<Instruction*, 16> AddrModeInsts;
1500 SmallVector<Instruction*, 16> NewAddrModeInsts;
1654 // If the iterator instruction was recursively deleted, start over at the
1694 bool CodeGenPrepare::MoveExtToFormExtLoad(Instruction *I) {
1730 bool CodeGenPrepare::OptimizeExtUses(Instruction *I) {
1745 if (!isa<Instruction>(Src) || DefBB != cast<Instruction>(Src)->getParent())
1751 Instruction *User = cast<Instruction>(*UI);
1765 Instruction *User = cast<Instruction>(*UI);
1775 DenseMap<BasicBlock*, Instruction*> InsertedTruncs;
1781 Instruction *User = cast<Instruction>(*UI);
1788 Instruction *&InsertedTrunc = InsertedTruncs[UserBB];
1852 // We have efficient codegen support for the select instruction.
1892 bool CodeGenPrepare::OptimizeInst(Instruction *I) {
1945 Instruction *NC = new BitCastInst(GEPI->getOperand(0), GEPI->getType(),
1987 Instruction *PrevNonDbgInst = NULL;
1989 Instruction *Insn = BI; ++BI;
1996 Instruction *VI = dyn_cast_or_null<Instruction>(DVI->getValue());