1 //===- ValueMapper.cpp - Interface shared by lib/Transforms/Utils ---------===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file defines the MapValue function, which is shared by various parts of 11 // the lib/Transforms/Utils library. 12 // 13 //===----------------------------------------------------------------------===// 14 15 #include "llvm/Transforms/Utils/ValueMapper.h" 16 #include "llvm/ADT/DenseSet.h" 17 #include "llvm/IR/CallSite.h" 18 #include "llvm/IR/Constants.h" 19 #include "llvm/IR/DebugInfoMetadata.h" 20 #include "llvm/IR/Function.h" 21 #include "llvm/IR/GlobalAlias.h" 22 #include "llvm/IR/GlobalVariable.h" 23 #include "llvm/IR/InlineAsm.h" 24 #include "llvm/IR/Instructions.h" 25 #include "llvm/IR/Metadata.h" 26 #include "llvm/IR/Operator.h" 27 using namespace llvm; 28 29 // Out of line method to get vtable etc for class. 30 void ValueMapTypeRemapper::anchor() {} 31 void ValueMaterializer::anchor() {} 32 33 namespace { 34 35 /// A basic block used in a BlockAddress whose function body is not yet 36 /// materialized. 37 struct DelayedBasicBlock { 38 BasicBlock *OldBB; 39 std::unique_ptr<BasicBlock> TempBB; 40 41 // Explicit move for MSVC. 42 DelayedBasicBlock(DelayedBasicBlock &&X) 43 : OldBB(std::move(X.OldBB)), TempBB(std::move(X.TempBB)) {} 44 DelayedBasicBlock &operator=(DelayedBasicBlock &&X) { 45 OldBB = std::move(X.OldBB); 46 TempBB = std::move(X.TempBB); 47 return *this; 48 } 49 50 DelayedBasicBlock(const BlockAddress &Old) 51 : OldBB(Old.getBasicBlock()), 52 TempBB(BasicBlock::Create(Old.getContext())) {} 53 }; 54 55 struct WorklistEntry { 56 enum EntryKind { 57 MapGlobalInit, 58 MapAppendingVar, 59 MapGlobalAliasee, 60 RemapFunction 61 }; 62 struct GVInitTy { 63 GlobalVariable *GV; 64 Constant *Init; 65 }; 66 struct AppendingGVTy { 67 GlobalVariable *GV; 68 Constant *InitPrefix; 69 }; 70 struct GlobalAliaseeTy { 71 GlobalAlias *GA; 72 Constant *Aliasee; 73 }; 74 75 unsigned Kind : 2; 76 unsigned MCID : 29; 77 unsigned AppendingGVIsOldCtorDtor : 1; 78 unsigned AppendingGVNumNewMembers; 79 union { 80 GVInitTy GVInit; 81 AppendingGVTy AppendingGV; 82 GlobalAliaseeTy GlobalAliasee; 83 Function *RemapF; 84 } Data; 85 }; 86 87 struct MappingContext { 88 ValueToValueMapTy *VM; 89 ValueMaterializer *Materializer = nullptr; 90 91 /// Construct a MappingContext with a value map and materializer. 92 explicit MappingContext(ValueToValueMapTy &VM, 93 ValueMaterializer *Materializer = nullptr) 94 : VM(&VM), Materializer(Materializer) {} 95 }; 96 97 class MDNodeMapper; 98 class Mapper { 99 friend class MDNodeMapper; 100 101 #ifndef NDEBUG 102 DenseSet<GlobalValue *> AlreadyScheduled; 103 #endif 104 105 RemapFlags Flags; 106 ValueMapTypeRemapper *TypeMapper; 107 unsigned CurrentMCID = 0; 108 SmallVector<MappingContext, 2> MCs; 109 SmallVector<WorklistEntry, 4> Worklist; 110 SmallVector<DelayedBasicBlock, 1> DelayedBBs; 111 SmallVector<Constant *, 16> AppendingInits; 112 113 public: 114 Mapper(ValueToValueMapTy &VM, RemapFlags Flags, 115 ValueMapTypeRemapper *TypeMapper, ValueMaterializer *Materializer) 116 : Flags(Flags), TypeMapper(TypeMapper), 117 MCs(1, MappingContext(VM, Materializer)) {} 118 119 /// ValueMapper should explicitly call \a flush() before destruction. 120 ~Mapper() { assert(!hasWorkToDo() && "Expected to be flushed"); } 121 122 bool hasWorkToDo() const { return !Worklist.empty(); } 123 124 unsigned 125 registerAlternateMappingContext(ValueToValueMapTy &VM, 126 ValueMaterializer *Materializer = nullptr) { 127 MCs.push_back(MappingContext(VM, Materializer)); 128 return MCs.size() - 1; 129 } 130 131 void addFlags(RemapFlags Flags); 132 133 Value *mapValue(const Value *V); 134 void remapInstruction(Instruction *I); 135 void remapFunction(Function &F); 136 137 Constant *mapConstant(const Constant *C) { 138 return cast_or_null<Constant>(mapValue(C)); 139 } 140 141 /// Map metadata. 142 /// 143 /// Find the mapping for MD. Guarantees that the return will be resolved 144 /// (not an MDNode, or MDNode::isResolved() returns true). 145 Metadata *mapMetadata(const Metadata *MD); 146 147 void scheduleMapGlobalInitializer(GlobalVariable &GV, Constant &Init, 148 unsigned MCID); 149 void scheduleMapAppendingVariable(GlobalVariable &GV, Constant *InitPrefix, 150 bool IsOldCtorDtor, 151 ArrayRef<Constant *> NewMembers, 152 unsigned MCID); 153 void scheduleMapGlobalAliasee(GlobalAlias &GA, Constant &Aliasee, 154 unsigned MCID); 155 void scheduleRemapFunction(Function &F, unsigned MCID); 156 157 void flush(); 158 159 private: 160 void mapGlobalInitializer(GlobalVariable &GV, Constant &Init); 161 void mapAppendingVariable(GlobalVariable &GV, Constant *InitPrefix, 162 bool IsOldCtorDtor, 163 ArrayRef<Constant *> NewMembers); 164 void mapGlobalAliasee(GlobalAlias &GA, Constant &Aliasee); 165 void remapFunction(Function &F, ValueToValueMapTy &VM); 166 167 ValueToValueMapTy &getVM() { return *MCs[CurrentMCID].VM; } 168 ValueMaterializer *getMaterializer() { return MCs[CurrentMCID].Materializer; } 169 170 Value *mapBlockAddress(const BlockAddress &BA); 171 172 /// Map metadata that doesn't require visiting operands. 173 Optional<Metadata *> mapSimpleMetadata(const Metadata *MD); 174 175 Metadata *mapToMetadata(const Metadata *Key, Metadata *Val); 176 Metadata *mapToSelf(const Metadata *MD); 177 }; 178 179 class MDNodeMapper { 180 Mapper &M; 181 182 /// Data about a node in \a UniquedGraph. 183 struct Data { 184 bool HasChanged = false; 185 unsigned ID = ~0u; 186 TempMDNode Placeholder; 187 188 Data() {} 189 Data(Data &&X) 190 : HasChanged(std::move(X.HasChanged)), ID(std::move(X.ID)), 191 Placeholder(std::move(X.Placeholder)) {} 192 Data &operator=(Data &&X) { 193 HasChanged = std::move(X.HasChanged); 194 ID = std::move(X.ID); 195 Placeholder = std::move(X.Placeholder); 196 return *this; 197 } 198 }; 199 200 /// A graph of uniqued nodes. 201 struct UniquedGraph { 202 SmallDenseMap<const Metadata *, Data, 32> Info; // Node properties. 203 SmallVector<MDNode *, 16> POT; // Post-order traversal. 204 205 /// Propagate changed operands through the post-order traversal. 206 /// 207 /// Iteratively update \a Data::HasChanged for each node based on \a 208 /// Data::HasChanged of its operands, until fixed point. 209 void propagateChanges(); 210 211 /// Get a forward reference to a node to use as an operand. 212 Metadata &getFwdReference(MDNode &Op); 213 }; 214 215 /// Worklist of distinct nodes whose operands need to be remapped. 216 SmallVector<MDNode *, 16> DistinctWorklist; 217 218 // Storage for a UniquedGraph. 219 SmallDenseMap<const Metadata *, Data, 32> InfoStorage; 220 SmallVector<MDNode *, 16> POTStorage; 221 222 public: 223 MDNodeMapper(Mapper &M) : M(M) {} 224 225 /// Map a metadata node (and its transitive operands). 226 /// 227 /// Map all the (unmapped) nodes in the subgraph under \c N. The iterative 228 /// algorithm handles distinct nodes and uniqued node subgraphs using 229 /// different strategies. 230 /// 231 /// Distinct nodes are immediately mapped and added to \a DistinctWorklist 232 /// using \a mapDistinctNode(). Their mapping can always be computed 233 /// immediately without visiting operands, even if their operands change. 234 /// 235 /// The mapping for uniqued nodes depends on whether their operands change. 236 /// \a mapTopLevelUniquedNode() traverses the transitive uniqued subgraph of 237 /// a node to calculate uniqued node mappings in bulk. Distinct leafs are 238 /// added to \a DistinctWorklist with \a mapDistinctNode(). 239 /// 240 /// After mapping \c N itself, this function remaps the operands of the 241 /// distinct nodes in \a DistinctWorklist until the entire subgraph under \c 242 /// N has been mapped. 243 Metadata *map(const MDNode &N); 244 245 private: 246 /// Map a top-level uniqued node and the uniqued subgraph underneath it. 247 /// 248 /// This builds up a post-order traversal of the (unmapped) uniqued subgraph 249 /// underneath \c FirstN and calculates the nodes' mapping. Each node uses 250 /// the identity mapping (\a Mapper::mapToSelf()) as long as all of its 251 /// operands uses the identity mapping. 252 /// 253 /// The algorithm works as follows: 254 /// 255 /// 1. \a createPOT(): traverse the uniqued subgraph under \c FirstN and 256 /// save the post-order traversal in the given \a UniquedGraph, tracking 257 /// nodes' operands change. 258 /// 259 /// 2. \a UniquedGraph::propagateChanges(): propagate changed operands 260 /// through the \a UniquedGraph until fixed point, following the rule 261 /// that if a node changes, any node that references must also change. 262 /// 263 /// 3. \a mapNodesInPOT(): map the uniqued nodes, creating new uniqued nodes 264 /// (referencing new operands) where necessary. 265 Metadata *mapTopLevelUniquedNode(const MDNode &FirstN); 266 267 /// Try to map the operand of an \a MDNode. 268 /// 269 /// If \c Op is already mapped, return the mapping. If it's not an \a 270 /// MDNode, compute and return the mapping. If it's a distinct \a MDNode, 271 /// return the result of \a mapDistinctNode(). 272 /// 273 /// \return None if \c Op is an unmapped uniqued \a MDNode. 274 /// \post getMappedOp(Op) only returns None if this returns None. 275 Optional<Metadata *> tryToMapOperand(const Metadata *Op); 276 277 /// Map a distinct node. 278 /// 279 /// Return the mapping for the distinct node \c N, saving the result in \a 280 /// DistinctWorklist for later remapping. 281 /// 282 /// \pre \c N is not yet mapped. 283 /// \pre \c N.isDistinct(). 284 MDNode *mapDistinctNode(const MDNode &N); 285 286 /// Get a previously mapped node. 287 Optional<Metadata *> getMappedOp(const Metadata *Op) const; 288 289 /// Create a post-order traversal of an unmapped uniqued node subgraph. 290 /// 291 /// This traverses the metadata graph deeply enough to map \c FirstN. It 292 /// uses \a tryToMapOperand() (via \a Mapper::mapSimplifiedNode()), so any 293 /// metadata that has already been mapped will not be part of the POT. 294 /// 295 /// Each node that has a changed operand from outside the graph (e.g., a 296 /// distinct node, an already-mapped uniqued node, or \a ConstantAsMetadata) 297 /// is marked with \a Data::HasChanged. 298 /// 299 /// \return \c true if any nodes in \c G have \a Data::HasChanged. 300 /// \post \c G.POT is a post-order traversal ending with \c FirstN. 301 /// \post \a Data::hasChanged in \c G.Info indicates whether any node needs 302 /// to change because of operands outside the graph. 303 bool createPOT(UniquedGraph &G, const MDNode &FirstN); 304 305 /// Visit the operands of a uniqued node in the POT. 306 /// 307 /// Visit the operands in the range from \c I to \c E, returning the first 308 /// uniqued node we find that isn't yet in \c G. \c I is always advanced to 309 /// where to continue the loop through the operands. 310 /// 311 /// This sets \c HasChanged if any of the visited operands change. 312 MDNode *visitOperands(UniquedGraph &G, MDNode::op_iterator &I, 313 MDNode::op_iterator E, bool &HasChanged); 314 315 /// Map all the nodes in the given uniqued graph. 316 /// 317 /// This visits all the nodes in \c G in post-order, using the identity 318 /// mapping or creating a new node depending on \a Data::HasChanged. 319 /// 320 /// \pre \a getMappedOp() returns None for nodes in \c G, but not for any of 321 /// their operands outside of \c G. 322 /// \pre \a Data::HasChanged is true for a node in \c G iff any of its 323 /// operands have changed. 324 /// \post \a getMappedOp() returns the mapped node for every node in \c G. 325 void mapNodesInPOT(UniquedGraph &G); 326 327 /// Remap a node's operands using the given functor. 328 /// 329 /// Iterate through the operands of \c N and update them in place using \c 330 /// mapOperand. 331 /// 332 /// \pre N.isDistinct() or N.isTemporary(). 333 template <class OperandMapper> 334 void remapOperands(MDNode &N, OperandMapper mapOperand); 335 }; 336 337 } // end namespace 338 339 Value *Mapper::mapValue(const Value *V) { 340 ValueToValueMapTy::iterator I = getVM().find(V); 341 342 // If the value already exists in the map, use it. 343 if (I != getVM().end()) { 344 assert(I->second && "Unexpected null mapping"); 345 return I->second; 346 } 347 348 // If we have a materializer and it can materialize a value, use that. 349 if (auto *Materializer = getMaterializer()) { 350 if (Value *NewV = Materializer->materialize(const_cast<Value *>(V))) { 351 getVM()[V] = NewV; 352 return NewV; 353 } 354 } 355 356 // Global values do not need to be seeded into the VM if they 357 // are using the identity mapping. 358 if (isa<GlobalValue>(V)) { 359 if (Flags & RF_NullMapMissingGlobalValues) 360 return nullptr; 361 return getVM()[V] = const_cast<Value *>(V); 362 } 363 364 if (const InlineAsm *IA = dyn_cast<InlineAsm>(V)) { 365 // Inline asm may need *type* remapping. 366 FunctionType *NewTy = IA->getFunctionType(); 367 if (TypeMapper) { 368 NewTy = cast<FunctionType>(TypeMapper->remapType(NewTy)); 369 370 if (NewTy != IA->getFunctionType()) 371 V = InlineAsm::get(NewTy, IA->getAsmString(), IA->getConstraintString(), 372 IA->hasSideEffects(), IA->isAlignStack()); 373 } 374 375 return getVM()[V] = const_cast<Value *>(V); 376 } 377 378 if (const auto *MDV = dyn_cast<MetadataAsValue>(V)) { 379 const Metadata *MD = MDV->getMetadata(); 380 381 if (auto *LAM = dyn_cast<LocalAsMetadata>(MD)) { 382 // Look through to grab the local value. 383 if (Value *LV = mapValue(LAM->getValue())) { 384 if (V == LAM->getValue()) 385 return const_cast<Value *>(V); 386 return MetadataAsValue::get(V->getContext(), ValueAsMetadata::get(LV)); 387 } 388 389 // FIXME: always return nullptr once Verifier::verifyDominatesUse() 390 // ensures metadata operands only reference defined SSA values. 391 return (Flags & RF_IgnoreMissingLocals) 392 ? nullptr 393 : MetadataAsValue::get(V->getContext(), 394 MDTuple::get(V->getContext(), None)); 395 } 396 397 // If this is a module-level metadata and we know that nothing at the module 398 // level is changing, then use an identity mapping. 399 if (Flags & RF_NoModuleLevelChanges) 400 return getVM()[V] = const_cast<Value *>(V); 401 402 // Map the metadata and turn it into a value. 403 auto *MappedMD = mapMetadata(MD); 404 if (MD == MappedMD) 405 return getVM()[V] = const_cast<Value *>(V); 406 return getVM()[V] = MetadataAsValue::get(V->getContext(), MappedMD); 407 } 408 409 // Okay, this either must be a constant (which may or may not be mappable) or 410 // is something that is not in the mapping table. 411 Constant *C = const_cast<Constant*>(dyn_cast<Constant>(V)); 412 if (!C) 413 return nullptr; 414 415 if (BlockAddress *BA = dyn_cast<BlockAddress>(C)) 416 return mapBlockAddress(*BA); 417 418 auto mapValueOrNull = [this](Value *V) { 419 auto Mapped = mapValue(V); 420 assert((Mapped || (Flags & RF_NullMapMissingGlobalValues)) && 421 "Unexpected null mapping for constant operand without " 422 "NullMapMissingGlobalValues flag"); 423 return Mapped; 424 }; 425 426 // Otherwise, we have some other constant to remap. Start by checking to see 427 // if all operands have an identity remapping. 428 unsigned OpNo = 0, NumOperands = C->getNumOperands(); 429 Value *Mapped = nullptr; 430 for (; OpNo != NumOperands; ++OpNo) { 431 Value *Op = C->getOperand(OpNo); 432 Mapped = mapValueOrNull(Op); 433 if (!Mapped) 434 return nullptr; 435 if (Mapped != Op) 436 break; 437 } 438 439 // See if the type mapper wants to remap the type as well. 440 Type *NewTy = C->getType(); 441 if (TypeMapper) 442 NewTy = TypeMapper->remapType(NewTy); 443 444 // If the result type and all operands match up, then just insert an identity 445 // mapping. 446 if (OpNo == NumOperands && NewTy == C->getType()) 447 return getVM()[V] = C; 448 449 // Okay, we need to create a new constant. We've already processed some or 450 // all of the operands, set them all up now. 451 SmallVector<Constant*, 8> Ops; 452 Ops.reserve(NumOperands); 453 for (unsigned j = 0; j != OpNo; ++j) 454 Ops.push_back(cast<Constant>(C->getOperand(j))); 455 456 // If one of the operands mismatch, push it and the other mapped operands. 457 if (OpNo != NumOperands) { 458 Ops.push_back(cast<Constant>(Mapped)); 459 460 // Map the rest of the operands that aren't processed yet. 461 for (++OpNo; OpNo != NumOperands; ++OpNo) { 462 Mapped = mapValueOrNull(C->getOperand(OpNo)); 463 if (!Mapped) 464 return nullptr; 465 Ops.push_back(cast<Constant>(Mapped)); 466 } 467 } 468 Type *NewSrcTy = nullptr; 469 if (TypeMapper) 470 if (auto *GEPO = dyn_cast<GEPOperator>(C)) 471 NewSrcTy = TypeMapper->remapType(GEPO->getSourceElementType()); 472 473 if (ConstantExpr *CE = dyn_cast<ConstantExpr>(C)) 474 return getVM()[V] = CE->getWithOperands(Ops, NewTy, false, NewSrcTy); 475 if (isa<ConstantArray>(C)) 476 return getVM()[V] = ConstantArray::get(cast<ArrayType>(NewTy), Ops); 477 if (isa<ConstantStruct>(C)) 478 return getVM()[V] = ConstantStruct::get(cast<StructType>(NewTy), Ops); 479 if (isa<ConstantVector>(C)) 480 return getVM()[V] = ConstantVector::get(Ops); 481 // If this is a no-operand constant, it must be because the type was remapped. 482 if (isa<UndefValue>(C)) 483 return getVM()[V] = UndefValue::get(NewTy); 484 if (isa<ConstantAggregateZero>(C)) 485 return getVM()[V] = ConstantAggregateZero::get(NewTy); 486 assert(isa<ConstantPointerNull>(C)); 487 return getVM()[V] = ConstantPointerNull::get(cast<PointerType>(NewTy)); 488 } 489 490 Value *Mapper::mapBlockAddress(const BlockAddress &BA) { 491 Function *F = cast<Function>(mapValue(BA.getFunction())); 492 493 // F may not have materialized its initializer. In that case, create a 494 // dummy basic block for now, and replace it once we've materialized all 495 // the initializers. 496 BasicBlock *BB; 497 if (F->empty()) { 498 DelayedBBs.push_back(DelayedBasicBlock(BA)); 499 BB = DelayedBBs.back().TempBB.get(); 500 } else { 501 BB = cast_or_null<BasicBlock>(mapValue(BA.getBasicBlock())); 502 } 503 504 return getVM()[&BA] = BlockAddress::get(F, BB ? BB : BA.getBasicBlock()); 505 } 506 507 Metadata *Mapper::mapToMetadata(const Metadata *Key, Metadata *Val) { 508 getVM().MD()[Key].reset(Val); 509 return Val; 510 } 511 512 Metadata *Mapper::mapToSelf(const Metadata *MD) { 513 return mapToMetadata(MD, const_cast<Metadata *>(MD)); 514 } 515 516 Optional<Metadata *> MDNodeMapper::tryToMapOperand(const Metadata *Op) { 517 if (!Op) 518 return nullptr; 519 520 if (Optional<Metadata *> MappedOp = M.mapSimpleMetadata(Op)) { 521 #ifndef NDEBUG 522 if (auto *CMD = dyn_cast<ConstantAsMetadata>(Op)) 523 assert((!*MappedOp || M.getVM().count(CMD->getValue()) || 524 M.getVM().getMappedMD(Op)) && 525 "Expected Value to be memoized"); 526 else 527 assert((isa<MDString>(Op) || M.getVM().getMappedMD(Op)) && 528 "Expected result to be memoized"); 529 #endif 530 return *MappedOp; 531 } 532 533 const MDNode &N = *cast<MDNode>(Op); 534 if (N.isDistinct()) 535 return mapDistinctNode(N); 536 return None; 537 } 538 539 MDNode *MDNodeMapper::mapDistinctNode(const MDNode &N) { 540 assert(N.isDistinct() && "Expected a distinct node"); 541 assert(!M.getVM().getMappedMD(&N) && "Expected an unmapped node"); 542 DistinctWorklist.push_back(cast<MDNode>( 543 (M.Flags & RF_MoveDistinctMDs) 544 ? M.mapToSelf(&N) 545 : M.mapToMetadata(&N, MDNode::replaceWithDistinct(N.clone())))); 546 return DistinctWorklist.back(); 547 } 548 549 static ConstantAsMetadata *wrapConstantAsMetadata(const ConstantAsMetadata &CMD, 550 Value *MappedV) { 551 if (CMD.getValue() == MappedV) 552 return const_cast<ConstantAsMetadata *>(&CMD); 553 return MappedV ? ConstantAsMetadata::getConstant(MappedV) : nullptr; 554 } 555 556 Optional<Metadata *> MDNodeMapper::getMappedOp(const Metadata *Op) const { 557 if (!Op) 558 return nullptr; 559 560 if (Optional<Metadata *> MappedOp = M.getVM().getMappedMD(Op)) 561 return *MappedOp; 562 563 if (isa<MDString>(Op)) 564 return const_cast<Metadata *>(Op); 565 566 if (auto *CMD = dyn_cast<ConstantAsMetadata>(Op)) 567 return wrapConstantAsMetadata(*CMD, M.getVM().lookup(CMD->getValue())); 568 569 return None; 570 } 571 572 Metadata &MDNodeMapper::UniquedGraph::getFwdReference(MDNode &Op) { 573 auto Where = Info.find(&Op); 574 assert(Where != Info.end() && "Expected a valid reference"); 575 576 auto &OpD = Where->second; 577 if (!OpD.HasChanged) 578 return Op; 579 580 // Lazily construct a temporary node. 581 if (!OpD.Placeholder) 582 OpD.Placeholder = Op.clone(); 583 584 return *OpD.Placeholder; 585 } 586 587 template <class OperandMapper> 588 void MDNodeMapper::remapOperands(MDNode &N, OperandMapper mapOperand) { 589 assert(!N.isUniqued() && "Expected distinct or temporary nodes"); 590 for (unsigned I = 0, E = N.getNumOperands(); I != E; ++I) { 591 Metadata *Old = N.getOperand(I); 592 Metadata *New = mapOperand(Old); 593 594 if (Old != New) 595 N.replaceOperandWith(I, New); 596 } 597 } 598 599 namespace { 600 /// An entry in the worklist for the post-order traversal. 601 struct POTWorklistEntry { 602 MDNode *N; ///< Current node. 603 MDNode::op_iterator Op; ///< Current operand of \c N. 604 605 /// Keep a flag of whether operands have changed in the worklist to avoid 606 /// hitting the map in \a UniquedGraph. 607 bool HasChanged = false; 608 609 POTWorklistEntry(MDNode &N) : N(&N), Op(N.op_begin()) {} 610 }; 611 } // end namespace 612 613 bool MDNodeMapper::createPOT(UniquedGraph &G, const MDNode &FirstN) { 614 assert(G.Info.empty() && "Expected a fresh traversal"); 615 assert(FirstN.isUniqued() && "Expected uniqued node in POT"); 616 617 // Construct a post-order traversal of the uniqued subgraph under FirstN. 618 bool AnyChanges = false; 619 SmallVector<POTWorklistEntry, 16> Worklist; 620 Worklist.push_back(POTWorklistEntry(const_cast<MDNode &>(FirstN))); 621 (void)G.Info[&FirstN]; 622 while (!Worklist.empty()) { 623 // Start or continue the traversal through the this node's operands. 624 auto &WE = Worklist.back(); 625 if (MDNode *N = visitOperands(G, WE.Op, WE.N->op_end(), WE.HasChanged)) { 626 // Push a new node to traverse first. 627 Worklist.push_back(POTWorklistEntry(*N)); 628 continue; 629 } 630 631 // Push the node onto the POT. 632 assert(WE.N->isUniqued() && "Expected only uniqued nodes"); 633 assert(WE.Op == WE.N->op_end() && "Expected to visit all operands"); 634 auto &D = G.Info[WE.N]; 635 AnyChanges |= D.HasChanged = WE.HasChanged; 636 D.ID = G.POT.size(); 637 G.POT.push_back(WE.N); 638 639 // Pop the node off the worklist. 640 Worklist.pop_back(); 641 } 642 return AnyChanges; 643 } 644 645 MDNode *MDNodeMapper::visitOperands(UniquedGraph &G, MDNode::op_iterator &I, 646 MDNode::op_iterator E, bool &HasChanged) { 647 while (I != E) { 648 Metadata *Op = *I++; // Increment even on early return. 649 if (Optional<Metadata *> MappedOp = tryToMapOperand(Op)) { 650 // Check if the operand changes. 651 HasChanged |= Op != *MappedOp; 652 continue; 653 } 654 655 // A uniqued metadata node. 656 MDNode &OpN = *cast<MDNode>(Op); 657 assert(OpN.isUniqued() && 658 "Only uniqued operands cannot be mapped immediately"); 659 if (G.Info.insert(std::make_pair(&OpN, Data())).second) 660 return &OpN; // This is a new one. Return it. 661 } 662 return nullptr; 663 } 664 665 void MDNodeMapper::UniquedGraph::propagateChanges() { 666 bool AnyChanges; 667 do { 668 AnyChanges = false; 669 for (MDNode *N : POT) { 670 auto &D = Info[N]; 671 if (D.HasChanged) 672 continue; 673 674 if (!llvm::any_of(N->operands(), [&](const Metadata *Op) { 675 auto Where = Info.find(Op); 676 return Where != Info.end() && Where->second.HasChanged; 677 })) 678 continue; 679 680 AnyChanges = D.HasChanged = true; 681 } 682 } while (AnyChanges); 683 } 684 685 void MDNodeMapper::mapNodesInPOT(UniquedGraph &G) { 686 // Construct uniqued nodes, building forward references as necessary. 687 SmallVector<MDNode *, 16> CyclicNodes; 688 for (auto *N : G.POT) { 689 auto &D = G.Info[N]; 690 if (!D.HasChanged) { 691 // The node hasn't changed. 692 M.mapToSelf(N); 693 continue; 694 } 695 696 // Remember whether this node had a placeholder. 697 bool HadPlaceholder(D.Placeholder); 698 699 // Clone the uniqued node and remap the operands. 700 TempMDNode ClonedN = D.Placeholder ? std::move(D.Placeholder) : N->clone(); 701 remapOperands(*ClonedN, [this, &D, &G](Metadata *Old) { 702 if (Optional<Metadata *> MappedOp = getMappedOp(Old)) 703 return *MappedOp; 704 assert(G.Info[Old].ID > D.ID && "Expected a forward reference"); 705 return &G.getFwdReference(*cast<MDNode>(Old)); 706 }); 707 708 auto *NewN = MDNode::replaceWithUniqued(std::move(ClonedN)); 709 M.mapToMetadata(N, NewN); 710 711 // Nodes that were referenced out of order in the POT are involved in a 712 // uniquing cycle. 713 if (HadPlaceholder) 714 CyclicNodes.push_back(NewN); 715 } 716 717 // Resolve cycles. 718 for (auto *N : CyclicNodes) 719 if (!N->isResolved()) 720 N->resolveCycles(); 721 } 722 723 Metadata *MDNodeMapper::map(const MDNode &N) { 724 assert(DistinctWorklist.empty() && "MDNodeMapper::map is not recursive"); 725 assert(!(M.Flags & RF_NoModuleLevelChanges) && 726 "MDNodeMapper::map assumes module-level changes"); 727 728 // Require resolved nodes whenever metadata might be remapped. 729 assert(N.isResolved() && "Unexpected unresolved node"); 730 731 Metadata *MappedN = 732 N.isUniqued() ? mapTopLevelUniquedNode(N) : mapDistinctNode(N); 733 while (!DistinctWorklist.empty()) 734 remapOperands(*DistinctWorklist.pop_back_val(), [this](Metadata *Old) { 735 if (Optional<Metadata *> MappedOp = tryToMapOperand(Old)) 736 return *MappedOp; 737 return mapTopLevelUniquedNode(*cast<MDNode>(Old)); 738 }); 739 return MappedN; 740 } 741 742 Metadata *MDNodeMapper::mapTopLevelUniquedNode(const MDNode &FirstN) { 743 assert(FirstN.isUniqued() && "Expected uniqued node"); 744 745 // Create a post-order traversal of uniqued nodes under FirstN. 746 UniquedGraph G; 747 if (!createPOT(G, FirstN)) { 748 // Return early if no nodes have changed. 749 for (const MDNode *N : G.POT) 750 M.mapToSelf(N); 751 return &const_cast<MDNode &>(FirstN); 752 } 753 754 // Update graph with all nodes that have changed. 755 G.propagateChanges(); 756 757 // Map all the nodes in the graph. 758 mapNodesInPOT(G); 759 760 // Return the original node, remapped. 761 return *getMappedOp(&FirstN); 762 } 763 764 namespace { 765 766 struct MapMetadataDisabler { 767 ValueToValueMapTy &VM; 768 769 MapMetadataDisabler(ValueToValueMapTy &VM) : VM(VM) { 770 VM.disableMapMetadata(); 771 } 772 ~MapMetadataDisabler() { VM.enableMapMetadata(); } 773 }; 774 775 } // end namespace 776 777 Optional<Metadata *> Mapper::mapSimpleMetadata(const Metadata *MD) { 778 // If the value already exists in the map, use it. 779 if (Optional<Metadata *> NewMD = getVM().getMappedMD(MD)) 780 return *NewMD; 781 782 if (isa<MDString>(MD)) 783 return const_cast<Metadata *>(MD); 784 785 // This is a module-level metadata. If nothing at the module level is 786 // changing, use an identity mapping. 787 if ((Flags & RF_NoModuleLevelChanges)) 788 return const_cast<Metadata *>(MD); 789 790 if (auto *CMD = dyn_cast<ConstantAsMetadata>(MD)) { 791 // Disallow recursion into metadata mapping through mapValue. 792 MapMetadataDisabler MMD(getVM()); 793 794 // Don't memoize ConstantAsMetadata. Instead of lasting until the 795 // LLVMContext is destroyed, they can be deleted when the GlobalValue they 796 // reference is destructed. These aren't super common, so the extra 797 // indirection isn't that expensive. 798 return wrapConstantAsMetadata(*CMD, mapValue(CMD->getValue())); 799 } 800 801 assert(isa<MDNode>(MD) && "Expected a metadata node"); 802 803 return None; 804 } 805 806 Metadata *Mapper::mapMetadata(const Metadata *MD) { 807 assert(MD && "Expected valid metadata"); 808 assert(!isa<LocalAsMetadata>(MD) && "Unexpected local metadata"); 809 810 if (Optional<Metadata *> NewMD = mapSimpleMetadata(MD)) 811 return *NewMD; 812 813 return MDNodeMapper(*this).map(*cast<MDNode>(MD)); 814 } 815 816 void Mapper::flush() { 817 // Flush out the worklist of global values. 818 while (!Worklist.empty()) { 819 WorklistEntry E = Worklist.pop_back_val(); 820 CurrentMCID = E.MCID; 821 switch (E.Kind) { 822 case WorklistEntry::MapGlobalInit: 823 E.Data.GVInit.GV->setInitializer(mapConstant(E.Data.GVInit.Init)); 824 break; 825 case WorklistEntry::MapAppendingVar: { 826 unsigned PrefixSize = AppendingInits.size() - E.AppendingGVNumNewMembers; 827 mapAppendingVariable(*E.Data.AppendingGV.GV, 828 E.Data.AppendingGV.InitPrefix, 829 E.AppendingGVIsOldCtorDtor, 830 makeArrayRef(AppendingInits).slice(PrefixSize)); 831 AppendingInits.resize(PrefixSize); 832 break; 833 } 834 case WorklistEntry::MapGlobalAliasee: 835 E.Data.GlobalAliasee.GA->setAliasee( 836 mapConstant(E.Data.GlobalAliasee.Aliasee)); 837 break; 838 case WorklistEntry::RemapFunction: 839 remapFunction(*E.Data.RemapF); 840 break; 841 } 842 } 843 CurrentMCID = 0; 844 845 // Finish logic for block addresses now that all global values have been 846 // handled. 847 while (!DelayedBBs.empty()) { 848 DelayedBasicBlock DBB = DelayedBBs.pop_back_val(); 849 BasicBlock *BB = cast_or_null<BasicBlock>(mapValue(DBB.OldBB)); 850 DBB.TempBB->replaceAllUsesWith(BB ? BB : DBB.OldBB); 851 } 852 } 853 854 void Mapper::remapInstruction(Instruction *I) { 855 // Remap operands. 856 for (Use &Op : I->operands()) { 857 Value *V = mapValue(Op); 858 // If we aren't ignoring missing entries, assert that something happened. 859 if (V) 860 Op = V; 861 else 862 assert((Flags & RF_IgnoreMissingLocals) && 863 "Referenced value not in value map!"); 864 } 865 866 // Remap phi nodes' incoming blocks. 867 if (PHINode *PN = dyn_cast<PHINode>(I)) { 868 for (unsigned i = 0, e = PN->getNumIncomingValues(); i != e; ++i) { 869 Value *V = mapValue(PN->getIncomingBlock(i)); 870 // If we aren't ignoring missing entries, assert that something happened. 871 if (V) 872 PN->setIncomingBlock(i, cast<BasicBlock>(V)); 873 else 874 assert((Flags & RF_IgnoreMissingLocals) && 875 "Referenced block not in value map!"); 876 } 877 } 878 879 // Remap attached metadata. 880 SmallVector<std::pair<unsigned, MDNode *>, 4> MDs; 881 I->getAllMetadata(MDs); 882 for (const auto &MI : MDs) { 883 MDNode *Old = MI.second; 884 MDNode *New = cast_or_null<MDNode>(mapMetadata(Old)); 885 if (New != Old) 886 I->setMetadata(MI.first, New); 887 } 888 889 if (!TypeMapper) 890 return; 891 892 // If the instruction's type is being remapped, do so now. 893 if (auto CS = CallSite(I)) { 894 SmallVector<Type *, 3> Tys; 895 FunctionType *FTy = CS.getFunctionType(); 896 Tys.reserve(FTy->getNumParams()); 897 for (Type *Ty : FTy->params()) 898 Tys.push_back(TypeMapper->remapType(Ty)); 899 CS.mutateFunctionType(FunctionType::get( 900 TypeMapper->remapType(I->getType()), Tys, FTy->isVarArg())); 901 return; 902 } 903 if (auto *AI = dyn_cast<AllocaInst>(I)) 904 AI->setAllocatedType(TypeMapper->remapType(AI->getAllocatedType())); 905 if (auto *GEP = dyn_cast<GetElementPtrInst>(I)) { 906 GEP->setSourceElementType( 907 TypeMapper->remapType(GEP->getSourceElementType())); 908 GEP->setResultElementType( 909 TypeMapper->remapType(GEP->getResultElementType())); 910 } 911 I->mutateType(TypeMapper->remapType(I->getType())); 912 } 913 914 void Mapper::remapFunction(Function &F) { 915 // Remap the operands. 916 for (Use &Op : F.operands()) 917 if (Op) 918 Op = mapValue(Op); 919 920 // Remap the metadata attachments. 921 SmallVector<std::pair<unsigned, MDNode *>, 8> MDs; 922 F.getAllMetadata(MDs); 923 F.clearMetadata(); 924 for (const auto &I : MDs) 925 F.addMetadata(I.first, *cast<MDNode>(mapMetadata(I.second))); 926 927 // Remap the argument types. 928 if (TypeMapper) 929 for (Argument &A : F.args()) 930 A.mutateType(TypeMapper->remapType(A.getType())); 931 932 // Remap the instructions. 933 for (BasicBlock &BB : F) 934 for (Instruction &I : BB) 935 remapInstruction(&I); 936 } 937 938 void Mapper::mapAppendingVariable(GlobalVariable &GV, Constant *InitPrefix, 939 bool IsOldCtorDtor, 940 ArrayRef<Constant *> NewMembers) { 941 SmallVector<Constant *, 16> Elements; 942 if (InitPrefix) { 943 unsigned NumElements = 944 cast<ArrayType>(InitPrefix->getType())->getNumElements(); 945 for (unsigned I = 0; I != NumElements; ++I) 946 Elements.push_back(InitPrefix->getAggregateElement(I)); 947 } 948 949 PointerType *VoidPtrTy; 950 Type *EltTy; 951 if (IsOldCtorDtor) { 952 // FIXME: This upgrade is done during linking to support the C API. See 953 // also IRLinker::linkAppendingVarProto() in IRMover.cpp. 954 VoidPtrTy = Type::getInt8Ty(GV.getContext())->getPointerTo(); 955 auto &ST = *cast<StructType>(NewMembers.front()->getType()); 956 Type *Tys[3] = {ST.getElementType(0), ST.getElementType(1), VoidPtrTy}; 957 EltTy = StructType::get(GV.getContext(), Tys, false); 958 } 959 960 for (auto *V : NewMembers) { 961 Constant *NewV; 962 if (IsOldCtorDtor) { 963 auto *S = cast<ConstantStruct>(V); 964 auto *E1 = mapValue(S->getOperand(0)); 965 auto *E2 = mapValue(S->getOperand(1)); 966 Value *Null = Constant::getNullValue(VoidPtrTy); 967 NewV = 968 ConstantStruct::get(cast<StructType>(EltTy), E1, E2, Null, nullptr); 969 } else { 970 NewV = cast_or_null<Constant>(mapValue(V)); 971 } 972 Elements.push_back(NewV); 973 } 974 975 GV.setInitializer(ConstantArray::get( 976 cast<ArrayType>(GV.getType()->getElementType()), Elements)); 977 } 978 979 void Mapper::scheduleMapGlobalInitializer(GlobalVariable &GV, Constant &Init, 980 unsigned MCID) { 981 assert(AlreadyScheduled.insert(&GV).second && "Should not reschedule"); 982 assert(MCID < MCs.size() && "Invalid mapping context"); 983 984 WorklistEntry WE; 985 WE.Kind = WorklistEntry::MapGlobalInit; 986 WE.MCID = MCID; 987 WE.Data.GVInit.GV = &GV; 988 WE.Data.GVInit.Init = &Init; 989 Worklist.push_back(WE); 990 } 991 992 void Mapper::scheduleMapAppendingVariable(GlobalVariable &GV, 993 Constant *InitPrefix, 994 bool IsOldCtorDtor, 995 ArrayRef<Constant *> NewMembers, 996 unsigned MCID) { 997 assert(AlreadyScheduled.insert(&GV).second && "Should not reschedule"); 998 assert(MCID < MCs.size() && "Invalid mapping context"); 999 1000 WorklistEntry WE; 1001 WE.Kind = WorklistEntry::MapAppendingVar; 1002 WE.MCID = MCID; 1003 WE.Data.AppendingGV.GV = &GV; 1004 WE.Data.AppendingGV.InitPrefix = InitPrefix; 1005 WE.AppendingGVIsOldCtorDtor = IsOldCtorDtor; 1006 WE.AppendingGVNumNewMembers = NewMembers.size(); 1007 Worklist.push_back(WE); 1008 AppendingInits.append(NewMembers.begin(), NewMembers.end()); 1009 } 1010 1011 void Mapper::scheduleMapGlobalAliasee(GlobalAlias &GA, Constant &Aliasee, 1012 unsigned MCID) { 1013 assert(AlreadyScheduled.insert(&GA).second && "Should not reschedule"); 1014 assert(MCID < MCs.size() && "Invalid mapping context"); 1015 1016 WorklistEntry WE; 1017 WE.Kind = WorklistEntry::MapGlobalAliasee; 1018 WE.MCID = MCID; 1019 WE.Data.GlobalAliasee.GA = &GA; 1020 WE.Data.GlobalAliasee.Aliasee = &Aliasee; 1021 Worklist.push_back(WE); 1022 } 1023 1024 void Mapper::scheduleRemapFunction(Function &F, unsigned MCID) { 1025 assert(AlreadyScheduled.insert(&F).second && "Should not reschedule"); 1026 assert(MCID < MCs.size() && "Invalid mapping context"); 1027 1028 WorklistEntry WE; 1029 WE.Kind = WorklistEntry::RemapFunction; 1030 WE.MCID = MCID; 1031 WE.Data.RemapF = &F; 1032 Worklist.push_back(WE); 1033 } 1034 1035 void Mapper::addFlags(RemapFlags Flags) { 1036 assert(!hasWorkToDo() && "Expected to have flushed the worklist"); 1037 this->Flags = this->Flags | Flags; 1038 } 1039 1040 static Mapper *getAsMapper(void *pImpl) { 1041 return reinterpret_cast<Mapper *>(pImpl); 1042 } 1043 1044 namespace { 1045 1046 class FlushingMapper { 1047 Mapper &M; 1048 1049 public: 1050 explicit FlushingMapper(void *pImpl) : M(*getAsMapper(pImpl)) { 1051 assert(!M.hasWorkToDo() && "Expected to be flushed"); 1052 } 1053 ~FlushingMapper() { M.flush(); } 1054 Mapper *operator->() const { return &M; } 1055 }; 1056 1057 } // end namespace 1058 1059 ValueMapper::ValueMapper(ValueToValueMapTy &VM, RemapFlags Flags, 1060 ValueMapTypeRemapper *TypeMapper, 1061 ValueMaterializer *Materializer) 1062 : pImpl(new Mapper(VM, Flags, TypeMapper, Materializer)) {} 1063 1064 ValueMapper::~ValueMapper() { delete getAsMapper(pImpl); } 1065 1066 unsigned 1067 ValueMapper::registerAlternateMappingContext(ValueToValueMapTy &VM, 1068 ValueMaterializer *Materializer) { 1069 return getAsMapper(pImpl)->registerAlternateMappingContext(VM, Materializer); 1070 } 1071 1072 void ValueMapper::addFlags(RemapFlags Flags) { 1073 FlushingMapper(pImpl)->addFlags(Flags); 1074 } 1075 1076 Value *ValueMapper::mapValue(const Value &V) { 1077 return FlushingMapper(pImpl)->mapValue(&V); 1078 } 1079 1080 Constant *ValueMapper::mapConstant(const Constant &C) { 1081 return cast_or_null<Constant>(mapValue(C)); 1082 } 1083 1084 Metadata *ValueMapper::mapMetadata(const Metadata &MD) { 1085 return FlushingMapper(pImpl)->mapMetadata(&MD); 1086 } 1087 1088 MDNode *ValueMapper::mapMDNode(const MDNode &N) { 1089 return cast_or_null<MDNode>(mapMetadata(N)); 1090 } 1091 1092 void ValueMapper::remapInstruction(Instruction &I) { 1093 FlushingMapper(pImpl)->remapInstruction(&I); 1094 } 1095 1096 void ValueMapper::remapFunction(Function &F) { 1097 FlushingMapper(pImpl)->remapFunction(F); 1098 } 1099 1100 void ValueMapper::scheduleMapGlobalInitializer(GlobalVariable &GV, 1101 Constant &Init, 1102 unsigned MCID) { 1103 getAsMapper(pImpl)->scheduleMapGlobalInitializer(GV, Init, MCID); 1104 } 1105 1106 void ValueMapper::scheduleMapAppendingVariable(GlobalVariable &GV, 1107 Constant *InitPrefix, 1108 bool IsOldCtorDtor, 1109 ArrayRef<Constant *> NewMembers, 1110 unsigned MCID) { 1111 getAsMapper(pImpl)->scheduleMapAppendingVariable( 1112 GV, InitPrefix, IsOldCtorDtor, NewMembers, MCID); 1113 } 1114 1115 void ValueMapper::scheduleMapGlobalAliasee(GlobalAlias &GA, Constant &Aliasee, 1116 unsigned MCID) { 1117 getAsMapper(pImpl)->scheduleMapGlobalAliasee(GA, Aliasee, MCID); 1118 } 1119 1120 void ValueMapper::scheduleRemapFunction(Function &F, unsigned MCID) { 1121 getAsMapper(pImpl)->scheduleRemapFunction(F, MCID); 1122 } 1123