1 // Bitmap Allocator. -*- C++ -*- 2 3 // Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009 4 // Free Software Foundation, Inc. 5 // 6 // This file is part of the GNU ISO C++ Library. This library is free 7 // software; you can redistribute it and/or modify it under the 8 // terms of the GNU General Public License as published by the 9 // Free Software Foundation; either version 3, or (at your option) 10 // any later version. 11 12 // This library is distributed in the hope that it will be useful, 13 // but WITHOUT ANY WARRANTY; without even the implied warranty of 14 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 // GNU General Public License for more details. 16 17 // Under Section 7 of GPL version 3, you are granted additional 18 // permissions described in the GCC Runtime Library Exception, version 19 // 3.1, as published by the Free Software Foundation. 20 21 // You should have received a copy of the GNU General Public License and 22 // a copy of the GCC Runtime Library Exception along with this program; 23 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see 24 // <http://www.gnu.org/licenses/>. 25 26 /** @file ext/bitmap_allocator.h 27 * This file is a GNU extension to the Standard C++ Library. 28 */ 29 30 #ifndef _BITMAP_ALLOCATOR_H 31 #define _BITMAP_ALLOCATOR_H 1 32 33 #include <cstddef> // For std::size_t, and ptrdiff_t. 34 #include <bits/functexcept.h> // For __throw_bad_alloc(). 35 #include <utility> // For std::pair. 36 #include <functional> // For greater_equal, and less_equal. 37 #include <new> // For operator new. 38 #include <debug/debug.h> // _GLIBCXX_DEBUG_ASSERT 39 #include <ext/concurrence.h> 40 #include <bits/move.h> 41 42 /** @brief The constant in the expression below is the alignment 43 * required in bytes. 44 */ 45 #define _BALLOC_ALIGN_BYTES 8 46 47 _GLIBCXX_BEGIN_NAMESPACE(__gnu_cxx) 48 49 using std::size_t; 50 using std::ptrdiff_t; 51 52 namespace __detail 53 { 54 /** @class __mini_vector bitmap_allocator.h bitmap_allocator.h 55 * 56 * @brief __mini_vector<> is a stripped down version of the 57 * full-fledged std::vector<>. 58 * 59 * It is to be used only for built-in types or PODs. Notable 60 * differences are: 61 * 62 * @detail 63 * 1. Not all accessor functions are present. 64 * 2. Used ONLY for PODs. 65 * 3. No Allocator template argument. Uses ::operator new() to get 66 * memory, and ::operator delete() to free it. 67 * Caveat: The dtor does NOT free the memory allocated, so this a 68 * memory-leaking vector! 69 */ 70 template<typename _Tp> 71 class __mini_vector 72 { 73 __mini_vector(const __mini_vector&); 74 __mini_vector& operator=(const __mini_vector&); 75 76 public: 77 typedef _Tp value_type; 78 typedef _Tp* pointer; 79 typedef _Tp& reference; 80 typedef const _Tp& const_reference; 81 typedef size_t size_type; 82 typedef ptrdiff_t difference_type; 83 typedef pointer iterator; 84 85 private: 86 pointer _M_start; 87 pointer _M_finish; 88 pointer _M_end_of_storage; 89 90 size_type 91 _M_space_left() const throw() 92 { return _M_end_of_storage - _M_finish; } 93 94 pointer 95 allocate(size_type __n) 96 { return static_cast<pointer>(::operator new(__n * sizeof(_Tp))); } 97 98 void 99 deallocate(pointer __p, size_type) 100 { ::operator delete(__p); } 101 102 public: 103 // Members used: size(), push_back(), pop_back(), 104 // insert(iterator, const_reference), erase(iterator), 105 // begin(), end(), back(), operator[]. 106 107 __mini_vector() : _M_start(0), _M_finish(0), 108 _M_end_of_storage(0) 109 { } 110 111 #if 0 112 ~__mini_vector() 113 { 114 if (this->_M_start) 115 { 116 this->deallocate(this->_M_start, this->_M_end_of_storage 117 - this->_M_start); 118 } 119 } 120 #endif 121 122 size_type 123 size() const throw() 124 { return _M_finish - _M_start; } 125 126 iterator 127 begin() const throw() 128 { return this->_M_start; } 129 130 iterator 131 end() const throw() 132 { return this->_M_finish; } 133 134 reference 135 back() const throw() 136 { return *(this->end() - 1); } 137 138 reference 139 operator[](const size_type __pos) const throw() 140 { return this->_M_start[__pos]; } 141 142 void 143 insert(iterator __pos, const_reference __x); 144 145 void 146 push_back(const_reference __x) 147 { 148 if (this->_M_space_left()) 149 { 150 *this->end() = __x; 151 ++this->_M_finish; 152 } 153 else 154 this->insert(this->end(), __x); 155 } 156 157 void 158 pop_back() throw() 159 { --this->_M_finish; } 160 161 void 162 erase(iterator __pos) throw(); 163 164 void 165 clear() throw() 166 { this->_M_finish = this->_M_start; } 167 }; 168 169 // Out of line function definitions. 170 template<typename _Tp> 171 void __mini_vector<_Tp>:: 172 insert(iterator __pos, const_reference __x) 173 { 174 if (this->_M_space_left()) 175 { 176 size_type __to_move = this->_M_finish - __pos; 177 iterator __dest = this->end(); 178 iterator __src = this->end() - 1; 179 180 ++this->_M_finish; 181 while (__to_move) 182 { 183 *__dest = *__src; 184 --__dest; --__src; --__to_move; 185 } 186 *__pos = __x; 187 } 188 else 189 { 190 size_type __new_size = this->size() ? this->size() * 2 : 1; 191 iterator __new_start = this->allocate(__new_size); 192 iterator __first = this->begin(); 193 iterator __start = __new_start; 194 while (__first != __pos) 195 { 196 *__start = *__first; 197 ++__start; ++__first; 198 } 199 *__start = __x; 200 ++__start; 201 while (__first != this->end()) 202 { 203 *__start = *__first; 204 ++__start; ++__first; 205 } 206 if (this->_M_start) 207 this->deallocate(this->_M_start, this->size()); 208 209 this->_M_start = __new_start; 210 this->_M_finish = __start; 211 this->_M_end_of_storage = this->_M_start + __new_size; 212 } 213 } 214 215 template<typename _Tp> 216 void __mini_vector<_Tp>:: 217 erase(iterator __pos) throw() 218 { 219 while (__pos + 1 != this->end()) 220 { 221 *__pos = __pos[1]; 222 ++__pos; 223 } 224 --this->_M_finish; 225 } 226 227 228 template<typename _Tp> 229 struct __mv_iter_traits 230 { 231 typedef typename _Tp::value_type value_type; 232 typedef typename _Tp::difference_type difference_type; 233 }; 234 235 template<typename _Tp> 236 struct __mv_iter_traits<_Tp*> 237 { 238 typedef _Tp value_type; 239 typedef ptrdiff_t difference_type; 240 }; 241 242 enum 243 { 244 bits_per_byte = 8, 245 bits_per_block = sizeof(size_t) * size_t(bits_per_byte) 246 }; 247 248 template<typename _ForwardIterator, typename _Tp, typename _Compare> 249 _ForwardIterator 250 __lower_bound(_ForwardIterator __first, _ForwardIterator __last, 251 const _Tp& __val, _Compare __comp) 252 { 253 typedef typename __mv_iter_traits<_ForwardIterator>::value_type 254 _ValueType; 255 typedef typename __mv_iter_traits<_ForwardIterator>::difference_type 256 _DistanceType; 257 258 _DistanceType __len = __last - __first; 259 _DistanceType __half; 260 _ForwardIterator __middle; 261 262 while (__len > 0) 263 { 264 __half = __len >> 1; 265 __middle = __first; 266 __middle += __half; 267 if (__comp(*__middle, __val)) 268 { 269 __first = __middle; 270 ++__first; 271 __len = __len - __half - 1; 272 } 273 else 274 __len = __half; 275 } 276 return __first; 277 } 278 279 template<typename _InputIterator, typename _Predicate> 280 inline _InputIterator 281 __find_if(_InputIterator __first, _InputIterator __last, _Predicate __p) 282 { 283 while (__first != __last && !__p(*__first)) 284 ++__first; 285 return __first; 286 } 287 288 /** @brief The number of Blocks pointed to by the address pair 289 * passed to the function. 290 */ 291 template<typename _AddrPair> 292 inline size_t 293 __num_blocks(_AddrPair __ap) 294 { return (__ap.second - __ap.first) + 1; } 295 296 /** @brief The number of Bit-maps pointed to by the address pair 297 * passed to the function. 298 */ 299 template<typename _AddrPair> 300 inline size_t 301 __num_bitmaps(_AddrPair __ap) 302 { return __num_blocks(__ap) / size_t(bits_per_block); } 303 304 // _Tp should be a pointer type. 305 template<typename _Tp> 306 class _Inclusive_between 307 : public std::unary_function<typename std::pair<_Tp, _Tp>, bool> 308 { 309 typedef _Tp pointer; 310 pointer _M_ptr_value; 311 typedef typename std::pair<_Tp, _Tp> _Block_pair; 312 313 public: 314 _Inclusive_between(pointer __ptr) : _M_ptr_value(__ptr) 315 { } 316 317 bool 318 operator()(_Block_pair __bp) const throw() 319 { 320 if (std::less_equal<pointer>()(_M_ptr_value, __bp.second) 321 && std::greater_equal<pointer>()(_M_ptr_value, __bp.first)) 322 return true; 323 else 324 return false; 325 } 326 }; 327 328 // Used to pass a Functor to functions by reference. 329 template<typename _Functor> 330 class _Functor_Ref 331 : public std::unary_function<typename _Functor::argument_type, 332 typename _Functor::result_type> 333 { 334 _Functor& _M_fref; 335 336 public: 337 typedef typename _Functor::argument_type argument_type; 338 typedef typename _Functor::result_type result_type; 339 340 _Functor_Ref(_Functor& __fref) : _M_fref(__fref) 341 { } 342 343 result_type 344 operator()(argument_type __arg) 345 { return _M_fref(__arg); } 346 }; 347 348 /** @class _Ffit_finder bitmap_allocator.h bitmap_allocator.h 349 * 350 * @brief The class which acts as a predicate for applying the 351 * first-fit memory allocation policy for the bitmap allocator. 352 */ 353 // _Tp should be a pointer type, and _Alloc is the Allocator for 354 // the vector. 355 template<typename _Tp> 356 class _Ffit_finder 357 : public std::unary_function<typename std::pair<_Tp, _Tp>, bool> 358 { 359 typedef typename std::pair<_Tp, _Tp> _Block_pair; 360 typedef typename __detail::__mini_vector<_Block_pair> _BPVector; 361 typedef typename _BPVector::difference_type _Counter_type; 362 363 size_t* _M_pbitmap; 364 _Counter_type _M_data_offset; 365 366 public: 367 _Ffit_finder() : _M_pbitmap(0), _M_data_offset(0) 368 { } 369 370 bool 371 operator()(_Block_pair __bp) throw() 372 { 373 // Set the _rover to the last physical location bitmap, 374 // which is the bitmap which belongs to the first free 375 // block. Thus, the bitmaps are in exact reverse order of 376 // the actual memory layout. So, we count down the bitmaps, 377 // which is the same as moving up the memory. 378 379 // If the used count stored at the start of the Bit Map headers 380 // is equal to the number of Objects that the current Block can 381 // store, then there is definitely no space for another single 382 // object, so just return false. 383 _Counter_type __diff = 384 __gnu_cxx::__detail::__num_bitmaps(__bp); 385 386 if (*(reinterpret_cast<size_t*> 387 (__bp.first) - (__diff + 1)) 388 == __gnu_cxx::__detail::__num_blocks(__bp)) 389 return false; 390 391 size_t* __rover = reinterpret_cast<size_t*>(__bp.first) - 1; 392 393 for (_Counter_type __i = 0; __i < __diff; ++__i) 394 { 395 _M_data_offset = __i; 396 if (*__rover) 397 { 398 _M_pbitmap = __rover; 399 return true; 400 } 401 --__rover; 402 } 403 return false; 404 } 405 406 407 size_t* 408 _M_get() const throw() 409 { return _M_pbitmap; } 410 411 _Counter_type 412 _M_offset() const throw() 413 { return _M_data_offset * size_t(bits_per_block); } 414 }; 415 416 417 /** @class _Bitmap_counter bitmap_allocator.h bitmap_allocator.h 418 * 419 * @brief The bitmap counter which acts as the bitmap 420 * manipulator, and manages the bit-manipulation functions and 421 * the searching and identification functions on the bit-map. 422 */ 423 // _Tp should be a pointer type. 424 template<typename _Tp> 425 class _Bitmap_counter 426 { 427 typedef typename __detail::__mini_vector<typename std::pair<_Tp, _Tp> > 428 _BPVector; 429 typedef typename _BPVector::size_type _Index_type; 430 typedef _Tp pointer; 431 432 _BPVector& _M_vbp; 433 size_t* _M_curr_bmap; 434 size_t* _M_last_bmap_in_block; 435 _Index_type _M_curr_index; 436 437 public: 438 // Use the 2nd parameter with care. Make sure that such an 439 // entry exists in the vector before passing that particular 440 // index to this ctor. 441 _Bitmap_counter(_BPVector& Rvbp, long __index = -1) : _M_vbp(Rvbp) 442 { this->_M_reset(__index); } 443 444 void 445 _M_reset(long __index = -1) throw() 446 { 447 if (__index == -1) 448 { 449 _M_curr_bmap = 0; 450 _M_curr_index = static_cast<_Index_type>(-1); 451 return; 452 } 453 454 _M_curr_index = __index; 455 _M_curr_bmap = reinterpret_cast<size_t*> 456 (_M_vbp[_M_curr_index].first) - 1; 457 458 _GLIBCXX_DEBUG_ASSERT(__index <= (long)_M_vbp.size() - 1); 459 460 _M_last_bmap_in_block = _M_curr_bmap 461 - ((_M_vbp[_M_curr_index].second 462 - _M_vbp[_M_curr_index].first + 1) 463 / size_t(bits_per_block) - 1); 464 } 465 466 // Dangerous Function! Use with extreme care. Pass to this 467 // function ONLY those values that are known to be correct, 468 // otherwise this will mess up big time. 469 void 470 _M_set_internal_bitmap(size_t* __new_internal_marker) throw() 471 { _M_curr_bmap = __new_internal_marker; } 472 473 bool 474 _M_finished() const throw() 475 { return(_M_curr_bmap == 0); } 476 477 _Bitmap_counter& 478 operator++() throw() 479 { 480 if (_M_curr_bmap == _M_last_bmap_in_block) 481 { 482 if (++_M_curr_index == _M_vbp.size()) 483 _M_curr_bmap = 0; 484 else 485 this->_M_reset(_M_curr_index); 486 } 487 else 488 --_M_curr_bmap; 489 return *this; 490 } 491 492 size_t* 493 _M_get() const throw() 494 { return _M_curr_bmap; } 495 496 pointer 497 _M_base() const throw() 498 { return _M_vbp[_M_curr_index].first; } 499 500 _Index_type 501 _M_offset() const throw() 502 { 503 return size_t(bits_per_block) 504 * ((reinterpret_cast<size_t*>(this->_M_base()) 505 - _M_curr_bmap) - 1); 506 } 507 508 _Index_type 509 _M_where() const throw() 510 { return _M_curr_index; } 511 }; 512 513 /** @brief Mark a memory address as allocated by re-setting the 514 * corresponding bit in the bit-map. 515 */ 516 inline void 517 __bit_allocate(size_t* __pbmap, size_t __pos) throw() 518 { 519 size_t __mask = 1 << __pos; 520 __mask = ~__mask; 521 *__pbmap &= __mask; 522 } 523 524 /** @brief Mark a memory address as free by setting the 525 * corresponding bit in the bit-map. 526 */ 527 inline void 528 __bit_free(size_t* __pbmap, size_t __pos) throw() 529 { 530 size_t __mask = 1 << __pos; 531 *__pbmap |= __mask; 532 } 533 } // namespace __detail 534 535 /** @brief Generic Version of the bsf instruction. 536 */ 537 inline size_t 538 _Bit_scan_forward(size_t __num) 539 { return static_cast<size_t>(__builtin_ctzl(__num)); } 540 541 /** @class free_list bitmap_allocator.h bitmap_allocator.h 542 * 543 * @brief The free list class for managing chunks of memory to be 544 * given to and returned by the bitmap_allocator. 545 */ 546 class free_list 547 { 548 typedef size_t* value_type; 549 typedef __detail::__mini_vector<value_type> vector_type; 550 typedef vector_type::iterator iterator; 551 typedef __mutex __mutex_type; 552 553 struct _LT_pointer_compare 554 { 555 bool 556 operator()(const size_t* __pui, 557 const size_t __cui) const throw() 558 { return *__pui < __cui; } 559 }; 560 561 #if defined __GTHREADS 562 __mutex_type& 563 _M_get_mutex() 564 { 565 static __mutex_type _S_mutex; 566 return _S_mutex; 567 } 568 #endif 569 570 vector_type& 571 _M_get_free_list() 572 { 573 static vector_type _S_free_list; 574 return _S_free_list; 575 } 576 577 /** @brief Performs validation of memory based on their size. 578 * 579 * @param __addr The pointer to the memory block to be 580 * validated. 581 * 582 * @detail Validates the memory block passed to this function and 583 * appropriately performs the action of managing the free list of 584 * blocks by adding this block to the free list or deleting this 585 * or larger blocks from the free list. 586 */ 587 void 588 _M_validate(size_t* __addr) throw() 589 { 590 vector_type& __free_list = _M_get_free_list(); 591 const vector_type::size_type __max_size = 64; 592 if (__free_list.size() >= __max_size) 593 { 594 // Ok, the threshold value has been reached. We determine 595 // which block to remove from the list of free blocks. 596 if (*__addr >= *__free_list.back()) 597 { 598 // Ok, the new block is greater than or equal to the 599 // last block in the list of free blocks. We just free 600 // the new block. 601 ::operator delete(static_cast<void*>(__addr)); 602 return; 603 } 604 else 605 { 606 // Deallocate the last block in the list of free lists, 607 // and insert the new one in its correct position. 608 ::operator delete(static_cast<void*>(__free_list.back())); 609 __free_list.pop_back(); 610 } 611 } 612 613 // Just add the block to the list of free lists unconditionally. 614 iterator __temp = __gnu_cxx::__detail::__lower_bound 615 (__free_list.begin(), __free_list.end(), 616 *__addr, _LT_pointer_compare()); 617 618 // We may insert the new free list before _temp; 619 __free_list.insert(__temp, __addr); 620 } 621 622 /** @brief Decides whether the wastage of memory is acceptable for 623 * the current memory request and returns accordingly. 624 * 625 * @param __block_size The size of the block available in the free 626 * list. 627 * 628 * @param __required_size The required size of the memory block. 629 * 630 * @return true if the wastage incurred is acceptable, else returns 631 * false. 632 */ 633 bool 634 _M_should_i_give(size_t __block_size, 635 size_t __required_size) throw() 636 { 637 const size_t __max_wastage_percentage = 36; 638 if (__block_size >= __required_size && 639 (((__block_size - __required_size) * 100 / __block_size) 640 < __max_wastage_percentage)) 641 return true; 642 else 643 return false; 644 } 645 646 public: 647 /** @brief This function returns the block of memory to the 648 * internal free list. 649 * 650 * @param __addr The pointer to the memory block that was given 651 * by a call to the _M_get function. 652 */ 653 inline void 654 _M_insert(size_t* __addr) throw() 655 { 656 #if defined __GTHREADS 657 __gnu_cxx::__scoped_lock __bfl_lock(_M_get_mutex()); 658 #endif 659 // Call _M_validate to decide what should be done with 660 // this particular free list. 661 this->_M_validate(reinterpret_cast<size_t*>(__addr) - 1); 662 // See discussion as to why this is 1! 663 } 664 665 /** @brief This function gets a block of memory of the specified 666 * size from the free list. 667 * 668 * @param __sz The size in bytes of the memory required. 669 * 670 * @return A pointer to the new memory block of size at least 671 * equal to that requested. 672 */ 673 size_t* 674 _M_get(size_t __sz) throw(std::bad_alloc); 675 676 /** @brief This function just clears the internal Free List, and 677 * gives back all the memory to the OS. 678 */ 679 void 680 _M_clear(); 681 }; 682 683 684 // Forward declare the class. 685 template<typename _Tp> 686 class bitmap_allocator; 687 688 // Specialize for void: 689 template<> 690 class bitmap_allocator<void> 691 { 692 public: 693 typedef void* pointer; 694 typedef const void* const_pointer; 695 696 // Reference-to-void members are impossible. 697 typedef void value_type; 698 template<typename _Tp1> 699 struct rebind 700 { 701 typedef bitmap_allocator<_Tp1> other; 702 }; 703 }; 704 705 /** 706 * @brief Bitmap Allocator, primary template. 707 * @ingroup allocators 708 */ 709 template<typename _Tp> 710 class bitmap_allocator : private free_list 711 { 712 public: 713 typedef size_t size_type; 714 typedef ptrdiff_t difference_type; 715 typedef _Tp* pointer; 716 typedef const _Tp* const_pointer; 717 typedef _Tp& reference; 718 typedef const _Tp& const_reference; 719 typedef _Tp value_type; 720 typedef free_list::__mutex_type __mutex_type; 721 722 template<typename _Tp1> 723 struct rebind 724 { 725 typedef bitmap_allocator<_Tp1> other; 726 }; 727 728 private: 729 template<size_t _BSize, size_t _AlignSize> 730 struct aligned_size 731 { 732 enum 733 { 734 modulus = _BSize % _AlignSize, 735 value = _BSize + (modulus ? _AlignSize - (modulus) : 0) 736 }; 737 }; 738 739 struct _Alloc_block 740 { 741 char __M_unused[aligned_size<sizeof(value_type), 742 _BALLOC_ALIGN_BYTES>::value]; 743 }; 744 745 746 typedef typename std::pair<_Alloc_block*, _Alloc_block*> _Block_pair; 747 748 typedef typename 749 __detail::__mini_vector<_Block_pair> _BPVector; 750 751 #if defined _GLIBCXX_DEBUG 752 // Complexity: O(lg(N)). Where, N is the number of block of size 753 // sizeof(value_type). 754 void 755 _S_check_for_free_blocks() throw() 756 { 757 typedef typename 758 __gnu_cxx::__detail::_Ffit_finder<_Alloc_block*> _FFF; 759 _FFF __fff; 760 typedef typename _BPVector::iterator _BPiter; 761 _BPiter __bpi = 762 __gnu_cxx::__detail::__find_if 763 (_S_mem_blocks.begin(), _S_mem_blocks.end(), 764 __gnu_cxx::__detail::_Functor_Ref<_FFF>(__fff)); 765 766 _GLIBCXX_DEBUG_ASSERT(__bpi == _S_mem_blocks.end()); 767 } 768 #endif 769 770 /** @brief Responsible for exponentially growing the internal 771 * memory pool. 772 * 773 * @throw std::bad_alloc. If memory can not be allocated. 774 * 775 * @detail Complexity: O(1), but internally depends upon the 776 * complexity of the function free_list::_M_get. The part where 777 * the bitmap headers are written has complexity: O(X),where X 778 * is the number of blocks of size sizeof(value_type) within 779 * the newly acquired block. Having a tight bound. 780 */ 781 void 782 _S_refill_pool() throw(std::bad_alloc) 783 { 784 #if defined _GLIBCXX_DEBUG 785 _S_check_for_free_blocks(); 786 #endif 787 788 const size_t __num_bitmaps = (_S_block_size 789 / size_t(__detail::bits_per_block)); 790 const size_t __size_to_allocate = sizeof(size_t) 791 + _S_block_size * sizeof(_Alloc_block) 792 + __num_bitmaps * sizeof(size_t); 793 794 size_t* __temp = 795 reinterpret_cast<size_t*> 796 (this->_M_get(__size_to_allocate)); 797 *__temp = 0; 798 ++__temp; 799 800 // The Header information goes at the Beginning of the Block. 801 _Block_pair __bp = 802 std::make_pair(reinterpret_cast<_Alloc_block*> 803 (__temp + __num_bitmaps), 804 reinterpret_cast<_Alloc_block*> 805 (__temp + __num_bitmaps) 806 + _S_block_size - 1); 807 808 // Fill the Vector with this information. 809 _S_mem_blocks.push_back(__bp); 810 811 size_t __bit_mask = 0; // 0 Indicates all Allocated. 812 __bit_mask = ~__bit_mask; // 1 Indicates all Free. 813 814 for (size_t __i = 0; __i < __num_bitmaps; ++__i) 815 __temp[__i] = __bit_mask; 816 817 _S_block_size *= 2; 818 } 819 820 821 static _BPVector _S_mem_blocks; 822 static size_t _S_block_size; 823 static __gnu_cxx::__detail:: 824 _Bitmap_counter<_Alloc_block*> _S_last_request; 825 static typename _BPVector::size_type _S_last_dealloc_index; 826 #if defined __GTHREADS 827 static __mutex_type _S_mut; 828 #endif 829 830 public: 831 832 /** @brief Allocates memory for a single object of size 833 * sizeof(_Tp). 834 * 835 * @throw std::bad_alloc. If memory can not be allocated. 836 * 837 * @detail Complexity: Worst case complexity is O(N), but that 838 * is hardly ever hit. If and when this particular case is 839 * encountered, the next few cases are guaranteed to have a 840 * worst case complexity of O(1)! That's why this function 841 * performs very well on average. You can consider this 842 * function to have a complexity referred to commonly as: 843 * Amortized Constant time. 844 */ 845 pointer 846 _M_allocate_single_object() throw(std::bad_alloc) 847 { 848 #if defined __GTHREADS 849 __gnu_cxx::__scoped_lock __bit_lock(_S_mut); 850 #endif 851 852 // The algorithm is something like this: The last_request 853 // variable points to the last accessed Bit Map. When such a 854 // condition occurs, we try to find a free block in the 855 // current bitmap, or succeeding bitmaps until the last bitmap 856 // is reached. If no free block turns up, we resort to First 857 // Fit method. 858 859 // WARNING: Do not re-order the condition in the while 860 // statement below, because it relies on C++'s short-circuit 861 // evaluation. The return from _S_last_request->_M_get() will 862 // NOT be dereference able if _S_last_request->_M_finished() 863 // returns true. This would inevitably lead to a NULL pointer 864 // dereference if tinkered with. 865 while (_S_last_request._M_finished() == false 866 && (*(_S_last_request._M_get()) == 0)) 867 { 868 _S_last_request.operator++(); 869 } 870 871 if (__builtin_expect(_S_last_request._M_finished() == true, false)) 872 { 873 // Fall Back to First Fit algorithm. 874 typedef typename 875 __gnu_cxx::__detail::_Ffit_finder<_Alloc_block*> _FFF; 876 _FFF __fff; 877 typedef typename _BPVector::iterator _BPiter; 878 _BPiter __bpi = 879 __gnu_cxx::__detail::__find_if 880 (_S_mem_blocks.begin(), _S_mem_blocks.end(), 881 __gnu_cxx::__detail::_Functor_Ref<_FFF>(__fff)); 882 883 if (__bpi != _S_mem_blocks.end()) 884 { 885 // Search was successful. Ok, now mark the first bit from 886 // the right as 0, meaning Allocated. This bit is obtained 887 // by calling _M_get() on __fff. 888 size_t __nz_bit = _Bit_scan_forward(*__fff._M_get()); 889 __detail::__bit_allocate(__fff._M_get(), __nz_bit); 890 891 _S_last_request._M_reset(__bpi - _S_mem_blocks.begin()); 892 893 // Now, get the address of the bit we marked as allocated. 894 pointer __ret = reinterpret_cast<pointer> 895 (__bpi->first + __fff._M_offset() + __nz_bit); 896 size_t* __puse_count = 897 reinterpret_cast<size_t*> 898 (__bpi->first) 899 - (__gnu_cxx::__detail::__num_bitmaps(*__bpi) + 1); 900 901 ++(*__puse_count); 902 return __ret; 903 } 904 else 905 { 906 // Search was unsuccessful. We Add more memory to the 907 // pool by calling _S_refill_pool(). 908 _S_refill_pool(); 909 910 // _M_Reset the _S_last_request structure to the first 911 // free block's bit map. 912 _S_last_request._M_reset(_S_mem_blocks.size() - 1); 913 914 // Now, mark that bit as allocated. 915 } 916 } 917 918 // _S_last_request holds a pointer to a valid bit map, that 919 // points to a free block in memory. 920 size_t __nz_bit = _Bit_scan_forward(*_S_last_request._M_get()); 921 __detail::__bit_allocate(_S_last_request._M_get(), __nz_bit); 922 923 pointer __ret = reinterpret_cast<pointer> 924 (_S_last_request._M_base() + _S_last_request._M_offset() + __nz_bit); 925 926 size_t* __puse_count = reinterpret_cast<size_t*> 927 (_S_mem_blocks[_S_last_request._M_where()].first) 928 - (__gnu_cxx::__detail:: 929 __num_bitmaps(_S_mem_blocks[_S_last_request._M_where()]) + 1); 930 931 ++(*__puse_count); 932 return __ret; 933 } 934 935 /** @brief Deallocates memory that belongs to a single object of 936 * size sizeof(_Tp). 937 * 938 * @detail Complexity: O(lg(N)), but the worst case is not hit 939 * often! This is because containers usually deallocate memory 940 * close to each other and this case is handled in O(1) time by 941 * the deallocate function. 942 */ 943 void 944 _M_deallocate_single_object(pointer __p) throw() 945 { 946 #if defined __GTHREADS 947 __gnu_cxx::__scoped_lock __bit_lock(_S_mut); 948 #endif 949 _Alloc_block* __real_p = reinterpret_cast<_Alloc_block*>(__p); 950 951 typedef typename _BPVector::iterator _Iterator; 952 typedef typename _BPVector::difference_type _Difference_type; 953 954 _Difference_type __diff; 955 long __displacement; 956 957 _GLIBCXX_DEBUG_ASSERT(_S_last_dealloc_index >= 0); 958 959 960 if (__gnu_cxx::__detail::_Inclusive_between<_Alloc_block*> 961 (__real_p) (_S_mem_blocks[_S_last_dealloc_index])) 962 { 963 _GLIBCXX_DEBUG_ASSERT(_S_last_dealloc_index 964 <= _S_mem_blocks.size() - 1); 965 966 // Initial Assumption was correct! 967 __diff = _S_last_dealloc_index; 968 __displacement = __real_p - _S_mem_blocks[__diff].first; 969 } 970 else 971 { 972 _Iterator _iter = __gnu_cxx::__detail:: 973 __find_if(_S_mem_blocks.begin(), 974 _S_mem_blocks.end(), 975 __gnu_cxx::__detail:: 976 _Inclusive_between<_Alloc_block*>(__real_p)); 977 978 _GLIBCXX_DEBUG_ASSERT(_iter != _S_mem_blocks.end()); 979 980 __diff = _iter - _S_mem_blocks.begin(); 981 __displacement = __real_p - _S_mem_blocks[__diff].first; 982 _S_last_dealloc_index = __diff; 983 } 984 985 // Get the position of the iterator that has been found. 986 const size_t __rotate = (__displacement 987 % size_t(__detail::bits_per_block)); 988 size_t* __bitmapC = 989 reinterpret_cast<size_t*> 990 (_S_mem_blocks[__diff].first) - 1; 991 __bitmapC -= (__displacement / size_t(__detail::bits_per_block)); 992 993 __detail::__bit_free(__bitmapC, __rotate); 994 size_t* __puse_count = reinterpret_cast<size_t*> 995 (_S_mem_blocks[__diff].first) 996 - (__gnu_cxx::__detail::__num_bitmaps(_S_mem_blocks[__diff]) + 1); 997 998 _GLIBCXX_DEBUG_ASSERT(*__puse_count != 0); 999 1000 --(*__puse_count); 1001 1002 if (__builtin_expect(*__puse_count == 0, false)) 1003 { 1004 _S_block_size /= 2; 1005 1006 // We can safely remove this block. 1007 // _Block_pair __bp = _S_mem_blocks[__diff]; 1008 this->_M_insert(__puse_count); 1009 _S_mem_blocks.erase(_S_mem_blocks.begin() + __diff); 1010 1011 // Reset the _S_last_request variable to reflect the 1012 // erased block. We do this to protect future requests 1013 // after the last block has been removed from a particular 1014 // memory Chunk, which in turn has been returned to the 1015 // free list, and hence had been erased from the vector, 1016 // so the size of the vector gets reduced by 1. 1017 if ((_Difference_type)_S_last_request._M_where() >= __diff--) 1018 _S_last_request._M_reset(__diff); 1019 1020 // If the Index into the vector of the region of memory 1021 // that might hold the next address that will be passed to 1022 // deallocated may have been invalidated due to the above 1023 // erase procedure being called on the vector, hence we 1024 // try to restore this invariant too. 1025 if (_S_last_dealloc_index >= _S_mem_blocks.size()) 1026 { 1027 _S_last_dealloc_index =(__diff != -1 ? __diff : 0); 1028 _GLIBCXX_DEBUG_ASSERT(_S_last_dealloc_index >= 0); 1029 } 1030 } 1031 } 1032 1033 public: 1034 bitmap_allocator() throw() 1035 { } 1036 1037 bitmap_allocator(const bitmap_allocator&) 1038 { } 1039 1040 template<typename _Tp1> 1041 bitmap_allocator(const bitmap_allocator<_Tp1>&) throw() 1042 { } 1043 1044 ~bitmap_allocator() throw() 1045 { } 1046 1047 pointer 1048 allocate(size_type __n) 1049 { 1050 if (__builtin_expect(__n > this->max_size(), false)) 1051 std::__throw_bad_alloc(); 1052 1053 if (__builtin_expect(__n == 1, true)) 1054 return this->_M_allocate_single_object(); 1055 else 1056 { 1057 const size_type __b = __n * sizeof(value_type); 1058 return reinterpret_cast<pointer>(::operator new(__b)); 1059 } 1060 } 1061 1062 pointer 1063 allocate(size_type __n, typename bitmap_allocator<void>::const_pointer) 1064 { return allocate(__n); } 1065 1066 void 1067 deallocate(pointer __p, size_type __n) throw() 1068 { 1069 if (__builtin_expect(__p != 0, true)) 1070 { 1071 if (__builtin_expect(__n == 1, true)) 1072 this->_M_deallocate_single_object(__p); 1073 else 1074 ::operator delete(__p); 1075 } 1076 } 1077 1078 pointer 1079 address(reference __r) const 1080 { return &__r; } 1081 1082 const_pointer 1083 address(const_reference __r) const 1084 { return &__r; } 1085 1086 size_type 1087 max_size() const throw() 1088 { return size_type(-1) / sizeof(value_type); } 1089 1090 void 1091 construct(pointer __p, const_reference __data) 1092 { ::new((void *)__p) value_type(__data); } 1093 1094 #ifdef __GXX_EXPERIMENTAL_CXX0X__ 1095 template<typename... _Args> 1096 void 1097 construct(pointer __p, _Args&&... __args) 1098 { ::new((void *)__p) _Tp(std::forward<_Args>(__args)...); } 1099 #endif 1100 1101 void 1102 destroy(pointer __p) 1103 { __p->~value_type(); } 1104 }; 1105 1106 template<typename _Tp1, typename _Tp2> 1107 bool 1108 operator==(const bitmap_allocator<_Tp1>&, 1109 const bitmap_allocator<_Tp2>&) throw() 1110 { return true; } 1111 1112 template<typename _Tp1, typename _Tp2> 1113 bool 1114 operator!=(const bitmap_allocator<_Tp1>&, 1115 const bitmap_allocator<_Tp2>&) throw() 1116 { return false; } 1117 1118 // Static member definitions. 1119 template<typename _Tp> 1120 typename bitmap_allocator<_Tp>::_BPVector 1121 bitmap_allocator<_Tp>::_S_mem_blocks; 1122 1123 template<typename _Tp> 1124 size_t bitmap_allocator<_Tp>::_S_block_size = 1125 2 * size_t(__detail::bits_per_block); 1126 1127 template<typename _Tp> 1128 typename __gnu_cxx::bitmap_allocator<_Tp>::_BPVector::size_type 1129 bitmap_allocator<_Tp>::_S_last_dealloc_index = 0; 1130 1131 template<typename _Tp> 1132 __gnu_cxx::__detail::_Bitmap_counter 1133 <typename bitmap_allocator<_Tp>::_Alloc_block*> 1134 bitmap_allocator<_Tp>::_S_last_request(_S_mem_blocks); 1135 1136 #if defined __GTHREADS 1137 template<typename _Tp> 1138 typename bitmap_allocator<_Tp>::__mutex_type 1139 bitmap_allocator<_Tp>::_S_mut; 1140 #endif 1141 1142 _GLIBCXX_END_NAMESPACE 1143 1144 #endif 1145 1146