1 //===--- RewriteRope.cpp - Rope specialized for rewriter --------*- C++ -*-===// 2 // 3 // The LLVM Compiler Infrastructure 4 // 5 // This file is distributed under the University of Illinois Open Source 6 // License. See LICENSE.TXT for details. 7 // 8 //===----------------------------------------------------------------------===// 9 // 10 // This file implements the RewriteRope class, which is a powerful string. 11 // 12 //===----------------------------------------------------------------------===// 13 14 #include "clang/Rewrite/Core/RewriteRope.h" 15 #include "clang/Basic/LLVM.h" 16 #include <algorithm> 17 using namespace clang; 18 19 /// RewriteRope is a "strong" string class, designed to make insertions and 20 /// deletions in the middle of the string nearly constant time (really, they are 21 /// O(log N), but with a very low constant factor). 22 /// 23 /// The implementation of this datastructure is a conceptual linear sequence of 24 /// RopePiece elements. Each RopePiece represents a view on a separately 25 /// allocated and reference counted string. This means that splitting a very 26 /// long string can be done in constant time by splitting a RopePiece that 27 /// references the whole string into two rope pieces that reference each half. 28 /// Once split, another string can be inserted in between the two halves by 29 /// inserting a RopePiece in between the two others. All of this is very 30 /// inexpensive: it takes time proportional to the number of RopePieces, not the 31 /// length of the strings they represent. 32 /// 33 /// While a linear sequences of RopePieces is the conceptual model, the actual 34 /// implementation captures them in an adapted B+ Tree. Using a B+ tree (which 35 /// is a tree that keeps the values in the leaves and has where each node 36 /// contains a reasonable number of pointers to children/values) allows us to 37 /// maintain efficient operation when the RewriteRope contains a *huge* number 38 /// of RopePieces. The basic idea of the B+ Tree is that it allows us to find 39 /// the RopePiece corresponding to some offset very efficiently, and it 40 /// automatically balances itself on insertions of RopePieces (which can happen 41 /// for both insertions and erases of string ranges). 42 /// 43 /// The one wrinkle on the theory is that we don't attempt to keep the tree 44 /// properly balanced when erases happen. Erases of string data can both insert 45 /// new RopePieces (e.g. when the middle of some other rope piece is deleted, 46 /// which results in two rope pieces, which is just like an insert) or it can 47 /// reduce the number of RopePieces maintained by the B+Tree. In the case when 48 /// the number of RopePieces is reduced, we don't attempt to maintain the 49 /// standard 'invariant' that each node in the tree contains at least 50 /// 'WidthFactor' children/values. For our use cases, this doesn't seem to 51 /// matter. 52 /// 53 /// The implementation below is primarily implemented in terms of three classes: 54 /// RopePieceBTreeNode - Common base class for: 55 /// 56 /// RopePieceBTreeLeaf - Directly manages up to '2*WidthFactor' RopePiece 57 /// nodes. This directly represents a chunk of the string with those 58 /// RopePieces contatenated. 59 /// RopePieceBTreeInterior - An interior node in the B+ Tree, which manages 60 /// up to '2*WidthFactor' other nodes in the tree. 61 62 63 //===----------------------------------------------------------------------===// 64 // RopePieceBTreeNode Class 65 //===----------------------------------------------------------------------===// 66 67 namespace { 68 /// RopePieceBTreeNode - Common base class of RopePieceBTreeLeaf and 69 /// RopePieceBTreeInterior. This provides some 'virtual' dispatching methods 70 /// and a flag that determines which subclass the instance is. Also 71 /// important, this node knows the full extend of the node, including any 72 /// children that it has. This allows efficient skipping over entire subtrees 73 /// when looking for an offset in the BTree. 74 class RopePieceBTreeNode { 75 protected: 76 /// WidthFactor - This controls the number of K/V slots held in the BTree: 77 /// how wide it is. Each level of the BTree is guaranteed to have at least 78 /// 'WidthFactor' elements in it (either ropepieces or children), (except 79 /// the root, which may have less) and may have at most 2*WidthFactor 80 /// elements. 81 enum { WidthFactor = 8 }; 82 83 /// Size - This is the number of bytes of file this node (including any 84 /// potential children) covers. 85 unsigned Size; 86 87 /// IsLeaf - True if this is an instance of RopePieceBTreeLeaf, false if it 88 /// is an instance of RopePieceBTreeInterior. 89 bool IsLeaf; 90 91 RopePieceBTreeNode(bool isLeaf) : Size(0), IsLeaf(isLeaf) {} 92 ~RopePieceBTreeNode() = default; 93 94 public: 95 bool isLeaf() const { return IsLeaf; } 96 unsigned size() const { return Size; } 97 98 void Destroy(); 99 100 /// split - Split the range containing the specified offset so that we are 101 /// guaranteed that there is a place to do an insertion at the specified 102 /// offset. The offset is relative, so "0" is the start of the node. 103 /// 104 /// If there is no space in this subtree for the extra piece, the extra tree 105 /// node is returned and must be inserted into a parent. 106 RopePieceBTreeNode *split(unsigned Offset); 107 108 /// insert - Insert the specified ropepiece into this tree node at the 109 /// specified offset. The offset is relative, so "0" is the start of the 110 /// node. 111 /// 112 /// If there is no space in this subtree for the extra piece, the extra tree 113 /// node is returned and must be inserted into a parent. 114 RopePieceBTreeNode *insert(unsigned Offset, const RopePiece &R); 115 116 /// erase - Remove NumBytes from this node at the specified offset. We are 117 /// guaranteed that there is a split at Offset. 118 void erase(unsigned Offset, unsigned NumBytes); 119 120 }; 121 } // end anonymous namespace 122 123 //===----------------------------------------------------------------------===// 124 // RopePieceBTreeLeaf Class 125 //===----------------------------------------------------------------------===// 126 127 namespace { 128 /// RopePieceBTreeLeaf - Directly manages up to '2*WidthFactor' RopePiece 129 /// nodes. This directly represents a chunk of the string with those 130 /// RopePieces contatenated. Since this is a B+Tree, all values (in this case 131 /// instances of RopePiece) are stored in leaves like this. To make iteration 132 /// over the leaves efficient, they maintain a singly linked list through the 133 /// NextLeaf field. This allows the B+Tree forward iterator to be constant 134 /// time for all increments. 135 class RopePieceBTreeLeaf : public RopePieceBTreeNode { 136 /// NumPieces - This holds the number of rope pieces currently active in the 137 /// Pieces array. 138 unsigned char NumPieces; 139 140 /// Pieces - This tracks the file chunks currently in this leaf. 141 /// 142 RopePiece Pieces[2*WidthFactor]; 143 144 /// NextLeaf - This is a pointer to the next leaf in the tree, allowing 145 /// efficient in-order forward iteration of the tree without traversal. 146 RopePieceBTreeLeaf **PrevLeaf, *NextLeaf; 147 public: 148 RopePieceBTreeLeaf() : RopePieceBTreeNode(true), NumPieces(0), 149 PrevLeaf(nullptr), NextLeaf(nullptr) {} 150 ~RopePieceBTreeLeaf() { 151 if (PrevLeaf || NextLeaf) 152 removeFromLeafInOrder(); 153 clear(); 154 } 155 156 bool isFull() const { return NumPieces == 2*WidthFactor; } 157 158 /// clear - Remove all rope pieces from this leaf. 159 void clear() { 160 while (NumPieces) 161 Pieces[--NumPieces] = RopePiece(); 162 Size = 0; 163 } 164 165 unsigned getNumPieces() const { return NumPieces; } 166 167 const RopePiece &getPiece(unsigned i) const { 168 assert(i < getNumPieces() && "Invalid piece ID"); 169 return Pieces[i]; 170 } 171 172 const RopePieceBTreeLeaf *getNextLeafInOrder() const { return NextLeaf; } 173 void insertAfterLeafInOrder(RopePieceBTreeLeaf *Node) { 174 assert(!PrevLeaf && !NextLeaf && "Already in ordering"); 175 176 NextLeaf = Node->NextLeaf; 177 if (NextLeaf) 178 NextLeaf->PrevLeaf = &NextLeaf; 179 PrevLeaf = &Node->NextLeaf; 180 Node->NextLeaf = this; 181 } 182 183 void removeFromLeafInOrder() { 184 if (PrevLeaf) { 185 *PrevLeaf = NextLeaf; 186 if (NextLeaf) 187 NextLeaf->PrevLeaf = PrevLeaf; 188 } else if (NextLeaf) { 189 NextLeaf->PrevLeaf = nullptr; 190 } 191 } 192 193 /// FullRecomputeSizeLocally - This method recomputes the 'Size' field by 194 /// summing the size of all RopePieces. 195 void FullRecomputeSizeLocally() { 196 Size = 0; 197 for (unsigned i = 0, e = getNumPieces(); i != e; ++i) 198 Size += getPiece(i).size(); 199 } 200 201 /// split - Split the range containing the specified offset so that we are 202 /// guaranteed that there is a place to do an insertion at the specified 203 /// offset. The offset is relative, so "0" is the start of the node. 204 /// 205 /// If there is no space in this subtree for the extra piece, the extra tree 206 /// node is returned and must be inserted into a parent. 207 RopePieceBTreeNode *split(unsigned Offset); 208 209 /// insert - Insert the specified ropepiece into this tree node at the 210 /// specified offset. The offset is relative, so "0" is the start of the 211 /// node. 212 /// 213 /// If there is no space in this subtree for the extra piece, the extra tree 214 /// node is returned and must be inserted into a parent. 215 RopePieceBTreeNode *insert(unsigned Offset, const RopePiece &R); 216 217 218 /// erase - Remove NumBytes from this node at the specified offset. We are 219 /// guaranteed that there is a split at Offset. 220 void erase(unsigned Offset, unsigned NumBytes); 221 222 static inline bool classof(const RopePieceBTreeNode *N) { 223 return N->isLeaf(); 224 } 225 }; 226 } // end anonymous namespace 227 228 /// split - Split the range containing the specified offset so that we are 229 /// guaranteed that there is a place to do an insertion at the specified 230 /// offset. The offset is relative, so "0" is the start of the node. 231 /// 232 /// If there is no space in this subtree for the extra piece, the extra tree 233 /// node is returned and must be inserted into a parent. 234 RopePieceBTreeNode *RopePieceBTreeLeaf::split(unsigned Offset) { 235 // Find the insertion point. We are guaranteed that there is a split at the 236 // specified offset so find it. 237 if (Offset == 0 || Offset == size()) { 238 // Fastpath for a common case. There is already a splitpoint at the end. 239 return nullptr; 240 } 241 242 // Find the piece that this offset lands in. 243 unsigned PieceOffs = 0; 244 unsigned i = 0; 245 while (Offset >= PieceOffs+Pieces[i].size()) { 246 PieceOffs += Pieces[i].size(); 247 ++i; 248 } 249 250 // If there is already a split point at the specified offset, just return 251 // success. 252 if (PieceOffs == Offset) 253 return nullptr; 254 255 // Otherwise, we need to split piece 'i' at Offset-PieceOffs. Convert Offset 256 // to being Piece relative. 257 unsigned IntraPieceOffset = Offset-PieceOffs; 258 259 // We do this by shrinking the RopePiece and then doing an insert of the tail. 260 RopePiece Tail(Pieces[i].StrData, Pieces[i].StartOffs+IntraPieceOffset, 261 Pieces[i].EndOffs); 262 Size -= Pieces[i].size(); 263 Pieces[i].EndOffs = Pieces[i].StartOffs+IntraPieceOffset; 264 Size += Pieces[i].size(); 265 266 return insert(Offset, Tail); 267 } 268 269 270 /// insert - Insert the specified RopePiece into this tree node at the 271 /// specified offset. The offset is relative, so "0" is the start of the node. 272 /// 273 /// If there is no space in this subtree for the extra piece, the extra tree 274 /// node is returned and must be inserted into a parent. 275 RopePieceBTreeNode *RopePieceBTreeLeaf::insert(unsigned Offset, 276 const RopePiece &R) { 277 // If this node is not full, insert the piece. 278 if (!isFull()) { 279 // Find the insertion point. We are guaranteed that there is a split at the 280 // specified offset so find it. 281 unsigned i = 0, e = getNumPieces(); 282 if (Offset == size()) { 283 // Fastpath for a common case. 284 i = e; 285 } else { 286 unsigned SlotOffs = 0; 287 for (; Offset > SlotOffs; ++i) 288 SlotOffs += getPiece(i).size(); 289 assert(SlotOffs == Offset && "Split didn't occur before insertion!"); 290 } 291 292 // For an insertion into a non-full leaf node, just insert the value in 293 // its sorted position. This requires moving later values over. 294 for (; i != e; --e) 295 Pieces[e] = Pieces[e-1]; 296 Pieces[i] = R; 297 ++NumPieces; 298 Size += R.size(); 299 return nullptr; 300 } 301 302 // Otherwise, if this is leaf is full, split it in two halves. Since this 303 // node is full, it contains 2*WidthFactor values. We move the first 304 // 'WidthFactor' values to the LHS child (which we leave in this node) and 305 // move the last 'WidthFactor' values into the RHS child. 306 307 // Create the new node. 308 RopePieceBTreeLeaf *NewNode = new RopePieceBTreeLeaf(); 309 310 // Move over the last 'WidthFactor' values from here to NewNode. 311 std::copy(&Pieces[WidthFactor], &Pieces[2*WidthFactor], 312 &NewNode->Pieces[0]); 313 // Replace old pieces with null RopePieces to drop refcounts. 314 std::fill(&Pieces[WidthFactor], &Pieces[2*WidthFactor], RopePiece()); 315 316 // Decrease the number of values in the two nodes. 317 NewNode->NumPieces = NumPieces = WidthFactor; 318 319 // Recompute the two nodes' size. 320 NewNode->FullRecomputeSizeLocally(); 321 FullRecomputeSizeLocally(); 322 323 // Update the list of leaves. 324 NewNode->insertAfterLeafInOrder(this); 325 326 // These insertions can't fail. 327 if (this->size() >= Offset) 328 this->insert(Offset, R); 329 else 330 NewNode->insert(Offset - this->size(), R); 331 return NewNode; 332 } 333 334 /// erase - Remove NumBytes from this node at the specified offset. We are 335 /// guaranteed that there is a split at Offset. 336 void RopePieceBTreeLeaf::erase(unsigned Offset, unsigned NumBytes) { 337 // Since we are guaranteed that there is a split at Offset, we start by 338 // finding the Piece that starts there. 339 unsigned PieceOffs = 0; 340 unsigned i = 0; 341 for (; Offset > PieceOffs; ++i) 342 PieceOffs += getPiece(i).size(); 343 assert(PieceOffs == Offset && "Split didn't occur before erase!"); 344 345 unsigned StartPiece = i; 346 347 // Figure out how many pieces completely cover 'NumBytes'. We want to remove 348 // all of them. 349 for (; Offset+NumBytes > PieceOffs+getPiece(i).size(); ++i) 350 PieceOffs += getPiece(i).size(); 351 352 // If we exactly include the last one, include it in the region to delete. 353 if (Offset+NumBytes == PieceOffs+getPiece(i).size()) 354 PieceOffs += getPiece(i).size(), ++i; 355 356 // If we completely cover some RopePieces, erase them now. 357 if (i != StartPiece) { 358 unsigned NumDeleted = i-StartPiece; 359 for (; i != getNumPieces(); ++i) 360 Pieces[i-NumDeleted] = Pieces[i]; 361 362 // Drop references to dead rope pieces. 363 std::fill(&Pieces[getNumPieces()-NumDeleted], &Pieces[getNumPieces()], 364 RopePiece()); 365 NumPieces -= NumDeleted; 366 367 unsigned CoverBytes = PieceOffs-Offset; 368 NumBytes -= CoverBytes; 369 Size -= CoverBytes; 370 } 371 372 // If we completely removed some stuff, we could be done. 373 if (NumBytes == 0) return; 374 375 // Okay, now might be erasing part of some Piece. If this is the case, then 376 // move the start point of the piece. 377 assert(getPiece(StartPiece).size() > NumBytes); 378 Pieces[StartPiece].StartOffs += NumBytes; 379 380 // The size of this node just shrunk by NumBytes. 381 Size -= NumBytes; 382 } 383 384 //===----------------------------------------------------------------------===// 385 // RopePieceBTreeInterior Class 386 //===----------------------------------------------------------------------===// 387 388 namespace { 389 /// RopePieceBTreeInterior - This represents an interior node in the B+Tree, 390 /// which holds up to 2*WidthFactor pointers to child nodes. 391 class RopePieceBTreeInterior : public RopePieceBTreeNode { 392 /// NumChildren - This holds the number of children currently active in the 393 /// Children array. 394 unsigned char NumChildren; 395 RopePieceBTreeNode *Children[2*WidthFactor]; 396 public: 397 RopePieceBTreeInterior() : RopePieceBTreeNode(false), NumChildren(0) {} 398 399 RopePieceBTreeInterior(RopePieceBTreeNode *LHS, RopePieceBTreeNode *RHS) 400 : RopePieceBTreeNode(false) { 401 Children[0] = LHS; 402 Children[1] = RHS; 403 NumChildren = 2; 404 Size = LHS->size() + RHS->size(); 405 } 406 407 ~RopePieceBTreeInterior() { 408 for (unsigned i = 0, e = getNumChildren(); i != e; ++i) 409 Children[i]->Destroy(); 410 } 411 412 bool isFull() const { return NumChildren == 2*WidthFactor; } 413 414 unsigned getNumChildren() const { return NumChildren; } 415 const RopePieceBTreeNode *getChild(unsigned i) const { 416 assert(i < NumChildren && "invalid child #"); 417 return Children[i]; 418 } 419 RopePieceBTreeNode *getChild(unsigned i) { 420 assert(i < NumChildren && "invalid child #"); 421 return Children[i]; 422 } 423 424 /// FullRecomputeSizeLocally - Recompute the Size field of this node by 425 /// summing up the sizes of the child nodes. 426 void FullRecomputeSizeLocally() { 427 Size = 0; 428 for (unsigned i = 0, e = getNumChildren(); i != e; ++i) 429 Size += getChild(i)->size(); 430 } 431 432 433 /// split - Split the range containing the specified offset so that we are 434 /// guaranteed that there is a place to do an insertion at the specified 435 /// offset. The offset is relative, so "0" is the start of the node. 436 /// 437 /// If there is no space in this subtree for the extra piece, the extra tree 438 /// node is returned and must be inserted into a parent. 439 RopePieceBTreeNode *split(unsigned Offset); 440 441 442 /// insert - Insert the specified ropepiece into this tree node at the 443 /// specified offset. The offset is relative, so "0" is the start of the 444 /// node. 445 /// 446 /// If there is no space in this subtree for the extra piece, the extra tree 447 /// node is returned and must be inserted into a parent. 448 RopePieceBTreeNode *insert(unsigned Offset, const RopePiece &R); 449 450 /// HandleChildPiece - A child propagated an insertion result up to us. 451 /// Insert the new child, and/or propagate the result further up the tree. 452 RopePieceBTreeNode *HandleChildPiece(unsigned i, RopePieceBTreeNode *RHS); 453 454 /// erase - Remove NumBytes from this node at the specified offset. We are 455 /// guaranteed that there is a split at Offset. 456 void erase(unsigned Offset, unsigned NumBytes); 457 458 static inline bool classof(const RopePieceBTreeNode *N) { 459 return !N->isLeaf(); 460 } 461 }; 462 } // end anonymous namespace 463 464 /// split - Split the range containing the specified offset so that we are 465 /// guaranteed that there is a place to do an insertion at the specified 466 /// offset. The offset is relative, so "0" is the start of the node. 467 /// 468 /// If there is no space in this subtree for the extra piece, the extra tree 469 /// node is returned and must be inserted into a parent. 470 RopePieceBTreeNode *RopePieceBTreeInterior::split(unsigned Offset) { 471 // Figure out which child to split. 472 if (Offset == 0 || Offset == size()) 473 return nullptr; // If we have an exact offset, we're already split. 474 475 unsigned ChildOffset = 0; 476 unsigned i = 0; 477 for (; Offset >= ChildOffset+getChild(i)->size(); ++i) 478 ChildOffset += getChild(i)->size(); 479 480 // If already split there, we're done. 481 if (ChildOffset == Offset) 482 return nullptr; 483 484 // Otherwise, recursively split the child. 485 if (RopePieceBTreeNode *RHS = getChild(i)->split(Offset-ChildOffset)) 486 return HandleChildPiece(i, RHS); 487 return nullptr; // Done! 488 } 489 490 /// insert - Insert the specified ropepiece into this tree node at the 491 /// specified offset. The offset is relative, so "0" is the start of the 492 /// node. 493 /// 494 /// If there is no space in this subtree for the extra piece, the extra tree 495 /// node is returned and must be inserted into a parent. 496 RopePieceBTreeNode *RopePieceBTreeInterior::insert(unsigned Offset, 497 const RopePiece &R) { 498 // Find the insertion point. We are guaranteed that there is a split at the 499 // specified offset so find it. 500 unsigned i = 0, e = getNumChildren(); 501 502 unsigned ChildOffs = 0; 503 if (Offset == size()) { 504 // Fastpath for a common case. Insert at end of last child. 505 i = e-1; 506 ChildOffs = size()-getChild(i)->size(); 507 } else { 508 for (; Offset > ChildOffs+getChild(i)->size(); ++i) 509 ChildOffs += getChild(i)->size(); 510 } 511 512 Size += R.size(); 513 514 // Insert at the end of this child. 515 if (RopePieceBTreeNode *RHS = getChild(i)->insert(Offset-ChildOffs, R)) 516 return HandleChildPiece(i, RHS); 517 518 return nullptr; 519 } 520 521 /// HandleChildPiece - A child propagated an insertion result up to us. 522 /// Insert the new child, and/or propagate the result further up the tree. 523 RopePieceBTreeNode * 524 RopePieceBTreeInterior::HandleChildPiece(unsigned i, RopePieceBTreeNode *RHS) { 525 // Otherwise the child propagated a subtree up to us as a new child. See if 526 // we have space for it here. 527 if (!isFull()) { 528 // Insert RHS after child 'i'. 529 if (i + 1 != getNumChildren()) 530 memmove(&Children[i+2], &Children[i+1], 531 (getNumChildren()-i-1)*sizeof(Children[0])); 532 Children[i+1] = RHS; 533 ++NumChildren; 534 return nullptr; 535 } 536 537 // Okay, this node is full. Split it in half, moving WidthFactor children to 538 // a newly allocated interior node. 539 540 // Create the new node. 541 RopePieceBTreeInterior *NewNode = new RopePieceBTreeInterior(); 542 543 // Move over the last 'WidthFactor' values from here to NewNode. 544 memcpy(&NewNode->Children[0], &Children[WidthFactor], 545 WidthFactor*sizeof(Children[0])); 546 547 // Decrease the number of values in the two nodes. 548 NewNode->NumChildren = NumChildren = WidthFactor; 549 550 // Finally, insert the two new children in the side the can (now) hold them. 551 // These insertions can't fail. 552 if (i < WidthFactor) 553 this->HandleChildPiece(i, RHS); 554 else 555 NewNode->HandleChildPiece(i-WidthFactor, RHS); 556 557 // Recompute the two nodes' size. 558 NewNode->FullRecomputeSizeLocally(); 559 FullRecomputeSizeLocally(); 560 return NewNode; 561 } 562 563 /// erase - Remove NumBytes from this node at the specified offset. We are 564 /// guaranteed that there is a split at Offset. 565 void RopePieceBTreeInterior::erase(unsigned Offset, unsigned NumBytes) { 566 // This will shrink this node by NumBytes. 567 Size -= NumBytes; 568 569 // Find the first child that overlaps with Offset. 570 unsigned i = 0; 571 for (; Offset >= getChild(i)->size(); ++i) 572 Offset -= getChild(i)->size(); 573 574 // Propagate the delete request into overlapping children, or completely 575 // delete the children as appropriate. 576 while (NumBytes) { 577 RopePieceBTreeNode *CurChild = getChild(i); 578 579 // If we are deleting something contained entirely in the child, pass on the 580 // request. 581 if (Offset+NumBytes < CurChild->size()) { 582 CurChild->erase(Offset, NumBytes); 583 return; 584 } 585 586 // If this deletion request starts somewhere in the middle of the child, it 587 // must be deleting to the end of the child. 588 if (Offset) { 589 unsigned BytesFromChild = CurChild->size()-Offset; 590 CurChild->erase(Offset, BytesFromChild); 591 NumBytes -= BytesFromChild; 592 // Start at the beginning of the next child. 593 Offset = 0; 594 ++i; 595 continue; 596 } 597 598 // If the deletion request completely covers the child, delete it and move 599 // the rest down. 600 NumBytes -= CurChild->size(); 601 CurChild->Destroy(); 602 --NumChildren; 603 if (i != getNumChildren()) 604 memmove(&Children[i], &Children[i+1], 605 (getNumChildren()-i)*sizeof(Children[0])); 606 } 607 } 608 609 //===----------------------------------------------------------------------===// 610 // RopePieceBTreeNode Implementation 611 //===----------------------------------------------------------------------===// 612 613 void RopePieceBTreeNode::Destroy() { 614 if (RopePieceBTreeLeaf *Leaf = dyn_cast<RopePieceBTreeLeaf>(this)) 615 delete Leaf; 616 else 617 delete cast<RopePieceBTreeInterior>(this); 618 } 619 620 /// split - Split the range containing the specified offset so that we are 621 /// guaranteed that there is a place to do an insertion at the specified 622 /// offset. The offset is relative, so "0" is the start of the node. 623 /// 624 /// If there is no space in this subtree for the extra piece, the extra tree 625 /// node is returned and must be inserted into a parent. 626 RopePieceBTreeNode *RopePieceBTreeNode::split(unsigned Offset) { 627 assert(Offset <= size() && "Invalid offset to split!"); 628 if (RopePieceBTreeLeaf *Leaf = dyn_cast<RopePieceBTreeLeaf>(this)) 629 return Leaf->split(Offset); 630 return cast<RopePieceBTreeInterior>(this)->split(Offset); 631 } 632 633 /// insert - Insert the specified ropepiece into this tree node at the 634 /// specified offset. The offset is relative, so "0" is the start of the 635 /// node. 636 /// 637 /// If there is no space in this subtree for the extra piece, the extra tree 638 /// node is returned and must be inserted into a parent. 639 RopePieceBTreeNode *RopePieceBTreeNode::insert(unsigned Offset, 640 const RopePiece &R) { 641 assert(Offset <= size() && "Invalid offset to insert!"); 642 if (RopePieceBTreeLeaf *Leaf = dyn_cast<RopePieceBTreeLeaf>(this)) 643 return Leaf->insert(Offset, R); 644 return cast<RopePieceBTreeInterior>(this)->insert(Offset, R); 645 } 646 647 /// erase - Remove NumBytes from this node at the specified offset. We are 648 /// guaranteed that there is a split at Offset. 649 void RopePieceBTreeNode::erase(unsigned Offset, unsigned NumBytes) { 650 assert(Offset+NumBytes <= size() && "Invalid offset to erase!"); 651 if (RopePieceBTreeLeaf *Leaf = dyn_cast<RopePieceBTreeLeaf>(this)) 652 return Leaf->erase(Offset, NumBytes); 653 return cast<RopePieceBTreeInterior>(this)->erase(Offset, NumBytes); 654 } 655 656 657 //===----------------------------------------------------------------------===// 658 // RopePieceBTreeIterator Implementation 659 //===----------------------------------------------------------------------===// 660 661 static const RopePieceBTreeLeaf *getCN(const void *P) { 662 return static_cast<const RopePieceBTreeLeaf*>(P); 663 } 664 665 // begin iterator. 666 RopePieceBTreeIterator::RopePieceBTreeIterator(const void *n) { 667 const RopePieceBTreeNode *N = static_cast<const RopePieceBTreeNode*>(n); 668 669 // Walk down the left side of the tree until we get to a leaf. 670 while (const RopePieceBTreeInterior *IN = dyn_cast<RopePieceBTreeInterior>(N)) 671 N = IN->getChild(0); 672 673 // We must have at least one leaf. 674 CurNode = cast<RopePieceBTreeLeaf>(N); 675 676 // If we found a leaf that happens to be empty, skip over it until we get 677 // to something full. 678 while (CurNode && getCN(CurNode)->getNumPieces() == 0) 679 CurNode = getCN(CurNode)->getNextLeafInOrder(); 680 681 if (CurNode) 682 CurPiece = &getCN(CurNode)->getPiece(0); 683 else // Empty tree, this is an end() iterator. 684 CurPiece = nullptr; 685 CurChar = 0; 686 } 687 688 void RopePieceBTreeIterator::MoveToNextPiece() { 689 if (CurPiece != &getCN(CurNode)->getPiece(getCN(CurNode)->getNumPieces()-1)) { 690 CurChar = 0; 691 ++CurPiece; 692 return; 693 } 694 695 // Find the next non-empty leaf node. 696 do 697 CurNode = getCN(CurNode)->getNextLeafInOrder(); 698 while (CurNode && getCN(CurNode)->getNumPieces() == 0); 699 700 if (CurNode) 701 CurPiece = &getCN(CurNode)->getPiece(0); 702 else // Hit end(). 703 CurPiece = nullptr; 704 CurChar = 0; 705 } 706 707 //===----------------------------------------------------------------------===// 708 // RopePieceBTree Implementation 709 //===----------------------------------------------------------------------===// 710 711 static RopePieceBTreeNode *getRoot(void *P) { 712 return static_cast<RopePieceBTreeNode*>(P); 713 } 714 715 RopePieceBTree::RopePieceBTree() { 716 Root = new RopePieceBTreeLeaf(); 717 } 718 RopePieceBTree::RopePieceBTree(const RopePieceBTree &RHS) { 719 assert(RHS.empty() && "Can't copy non-empty tree yet"); 720 Root = new RopePieceBTreeLeaf(); 721 } 722 RopePieceBTree::~RopePieceBTree() { 723 getRoot(Root)->Destroy(); 724 } 725 726 unsigned RopePieceBTree::size() const { 727 return getRoot(Root)->size(); 728 } 729 730 void RopePieceBTree::clear() { 731 if (RopePieceBTreeLeaf *Leaf = dyn_cast<RopePieceBTreeLeaf>(getRoot(Root))) 732 Leaf->clear(); 733 else { 734 getRoot(Root)->Destroy(); 735 Root = new RopePieceBTreeLeaf(); 736 } 737 } 738 739 void RopePieceBTree::insert(unsigned Offset, const RopePiece &R) { 740 // #1. Split at Offset. 741 if (RopePieceBTreeNode *RHS = getRoot(Root)->split(Offset)) 742 Root = new RopePieceBTreeInterior(getRoot(Root), RHS); 743 744 // #2. Do the insertion. 745 if (RopePieceBTreeNode *RHS = getRoot(Root)->insert(Offset, R)) 746 Root = new RopePieceBTreeInterior(getRoot(Root), RHS); 747 } 748 749 void RopePieceBTree::erase(unsigned Offset, unsigned NumBytes) { 750 // #1. Split at Offset. 751 if (RopePieceBTreeNode *RHS = getRoot(Root)->split(Offset)) 752 Root = new RopePieceBTreeInterior(getRoot(Root), RHS); 753 754 // #2. Do the erasing. 755 getRoot(Root)->erase(Offset, NumBytes); 756 } 757 758 //===----------------------------------------------------------------------===// 759 // RewriteRope Implementation 760 //===----------------------------------------------------------------------===// 761 762 /// MakeRopeString - This copies the specified byte range into some instance of 763 /// RopeRefCountString, and return a RopePiece that represents it. This uses 764 /// the AllocBuffer object to aggregate requests for small strings into one 765 /// allocation instead of doing tons of tiny allocations. 766 RopePiece RewriteRope::MakeRopeString(const char *Start, const char *End) { 767 unsigned Len = End-Start; 768 assert(Len && "Zero length RopePiece is invalid!"); 769 770 // If we have space for this string in the current alloc buffer, use it. 771 if (AllocOffs+Len <= AllocChunkSize) { 772 memcpy(AllocBuffer->Data+AllocOffs, Start, Len); 773 AllocOffs += Len; 774 return RopePiece(AllocBuffer, AllocOffs-Len, AllocOffs); 775 } 776 777 // If we don't have enough room because this specific allocation is huge, 778 // just allocate a new rope piece for it alone. 779 if (Len > AllocChunkSize) { 780 unsigned Size = End-Start+sizeof(RopeRefCountString)-1; 781 RopeRefCountString *Res = 782 reinterpret_cast<RopeRefCountString *>(new char[Size]); 783 Res->RefCount = 0; 784 memcpy(Res->Data, Start, End-Start); 785 return RopePiece(Res, 0, End-Start); 786 } 787 788 // Otherwise, this was a small request but we just don't have space for it 789 // Make a new chunk and share it with later allocations. 790 791 unsigned AllocSize = offsetof(RopeRefCountString, Data) + AllocChunkSize; 792 RopeRefCountString *Res = 793 reinterpret_cast<RopeRefCountString *>(new char[AllocSize]); 794 Res->RefCount = 0; 795 memcpy(Res->Data, Start, Len); 796 AllocBuffer = Res; 797 AllocOffs = Len; 798 799 return RopePiece(AllocBuffer, 0, Len); 800 } 801 802 803