Home | History | Annotate | Download | only in stl
      1 /*
      2  *
      3  * Copyright (c) 1996,1997
      4  * Silicon Graphics Computer Systems, Inc.
      5  *
      6  * Copyright (c) 1997
      7  * Moscow Center for SPARC Technology
      8  *
      9  * Copyright (c) 1999
     10  * Boris Fomitchev
     11  *
     12  * This material is provided "as is", with absolutely no warranty expressed
     13  * or implied. Any use is at your own risk.
     14  *
     15  * Permission to use or copy this software for any purpose is hereby granted
     16  * without fee, provided the above notices are retained on all copies.
     17  * Permission to modify the code and to distribute modified code is granted,
     18  * provided the above notices are retained, and a notice that the code was
     19  * modified is included with the above copyright notice.
     20  *
     21  */
     22 
     23 /* NOTE: This is an internal header file, included by other STL headers.
     24  *   You should not attempt to use it directly.
     25  */
     26 
     27 // rope<_CharT,_Alloc> is a sequence of _CharT.
     28 // Ropes appear to be mutable, but update operations
     29 // really copy enough of the data structure to leave the original
     30 // valid.  Thus ropes can be logically copied by just copying
     31 // a pointer value.
     32 
     33 #ifndef _STLP_INTERNAL_ROPE_H
     34 #define _STLP_INTERNAL_ROPE_H
     35 
     36 #ifndef _STLP_INTERNAL_ALGOBASE_H
     37 #  include <stl/_algobase.h>
     38 #endif
     39 
     40 #if !defined (_STLP_USE_NO_IOSTREAMS) && !defined (_STLP_INTERNAL_IOSFWD)
     41 #  include <stl/_iosfwd.h>
     42 #endif
     43 
     44 #ifndef _STLP_INTERNAL_ALLOC_H
     45 #  include <stl/_alloc.h>
     46 #endif
     47 
     48 #ifndef _STLP_INTERNAL_ITERATOR_H
     49 #  include <stl/_iterator.h>
     50 #endif
     51 
     52 #ifndef _STLP_INTERNAL_ALGO_H
     53 #  include <stl/_algo.h>
     54 #endif
     55 
     56 #ifndef _STLP_INTERNAL_FUNCTION_BASE_H
     57 #  include <stl/_function_base.h>
     58 #endif
     59 
     60 #ifndef _STLP_INTERNAL_NUMERIC_H
     61 #  include <stl/_numeric.h>
     62 #endif
     63 
     64 #ifndef _STLP_INTERNAL_HASH_FUN_H
     65 #  include <stl/_hash_fun.h>
     66 #endif
     67 
     68 #ifndef _STLP_CHAR_TRAITS_H
     69 #  include <stl/char_traits.h>
     70 #endif
     71 
     72 #ifndef _STLP_INTERNAL_THREADS_H
     73 #  include <stl/_threads.h>
     74 #endif
     75 
     76 #ifdef _STLP_SGI_THREADS
     77 #  include <mutex.h>
     78 #endif
     79 
     80 #ifndef _STLP_DONT_SUPPORT_REBIND_MEMBER_TEMPLATE
     81 #  define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) (_Alloc_traits<_Tp,__atype>::create_allocator(__a))
     82 #else
     83 #  define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create(__a,(_Tp*)0)
     84 #endif
     85 
     86 _STLP_BEGIN_NAMESPACE
     87 
     88 // First a lot of forward declarations.  The standard seems to require
     89 // much stricter "declaration before use" than many of the implementations
     90 // that preceded it.
     91 template<class _CharT, _STLP_DFL_TMPL_PARAM(_Alloc, allocator<_CharT>) > class rope;
     92 template<class _CharT, class _Alloc> struct _Rope_RopeConcatenation;
     93 template<class _CharT, class _Alloc> struct _Rope_RopeRep;
     94 template<class _CharT, class _Alloc> struct _Rope_RopeLeaf;
     95 template<class _CharT, class _Alloc> struct _Rope_RopeFunction;
     96 template<class _CharT, class _Alloc> struct _Rope_RopeSubstring;
     97 template<class _CharT, class _Alloc> class _Rope_iterator;
     98 template<class _CharT, class _Alloc> class _Rope_const_iterator;
     99 template<class _CharT, class _Alloc> class _Rope_char_ref_proxy;
    100 template<class _CharT, class _Alloc> class _Rope_char_ptr_proxy;
    101 
    102 _STLP_MOVE_TO_PRIV_NAMESPACE
    103 
    104 template <class _CharT>
    105 struct _BasicCharType { typedef __false_type _Ret; };
    106 
    107 _STLP_TEMPLATE_NULL
    108 struct _BasicCharType<char> { typedef __true_type _Ret; };
    109 
    110 #ifdef _STLP_HAS_WCHAR_T
    111 _STLP_TEMPLATE_NULL
    112 struct _BasicCharType<wchar_t> { typedef __true_type _Ret; };
    113 #endif
    114 
    115 // Some helpers, so we can use the power algorithm on ropes.
    116 // See below for why this isn't local to the implementation.
    117 
    118 // This uses a nonstandard refcount convention.
    119 // The result has refcount 0.
    120 template<class _CharT, class _Alloc>
    121 struct _Rope_Concat_fn
    122   : public binary_function<rope<_CharT,_Alloc>, rope<_CharT,_Alloc>,
    123                            rope<_CharT,_Alloc> > {
    124   rope<_CharT,_Alloc> operator() (const rope<_CharT,_Alloc>& __x,
    125                                   const rope<_CharT,_Alloc>& __y) {
    126     return __x + __y;
    127   }
    128 };
    129 
    130 template <class _CharT, class _Alloc>
    131 inline
    132 rope<_CharT,_Alloc>
    133 __identity_element(_Rope_Concat_fn<_CharT, _Alloc>)
    134 { return rope<_CharT,_Alloc>(); }
    135 
    136 _STLP_MOVE_TO_STD_NAMESPACE
    137 
    138 // Store an eos
    139 template <class _CharT>
    140 inline void _S_construct_null_aux(_CharT *__p, const __true_type&)
    141 { *__p = 0; }
    142 
    143 template <class _CharT>
    144 inline void _S_construct_null_aux(_CharT *__p, const __false_type&)
    145 { _STLP_STD::_Construct(__p); }
    146 
    147 template <class _CharT>
    148 inline void _S_construct_null(_CharT *__p) {
    149   typedef typename _IsIntegral<_CharT>::_Ret _Char_Is_Integral;
    150   _S_construct_null_aux(__p, _Char_Is_Integral());
    151 }
    152 
    153 // char_producers are logically functions that generate a section of
    154 // a string.  These can be converted to ropes.  The resulting rope
    155 // invokes the char_producer on demand.  This allows, for example,
    156 // files to be viewed as ropes without reading the entire file.
    157 template <class _CharT>
    158 class char_producer {
    159 public:
    160   virtual ~char_producer() {}
    161   virtual void operator()(size_t __start_pos, size_t __len,
    162                           _CharT* __buffer) = 0;
    163   // Buffer should really be an arbitrary output iterator.
    164   // That way we could flatten directly into an ostream, etc.
    165   // This is thoroughly impossible, since iterator types don't
    166   // have runtime descriptions.
    167 };
    168 
    169 // Sequence buffers:
    170 //
    171 // Sequence must provide an append operation that appends an
    172 // array to the sequence.  Sequence buffers are useful only if
    173 // appending an entire array is cheaper than appending element by element.
    174 // This is true for many string representations.
    175 // This should  perhaps inherit from ostream<sequence::value_type>
    176 // and be implemented correspondingly, so that they can be used
    177 // for formatted.  For the sake of portability, we don't do this yet.
    178 //
    179 // For now, sequence buffers behave as output iterators.  But they also
    180 // behave a little like basic_ostringstream<sequence::value_type> and a
    181 // little like containers.
    182 
    183 template<class _Sequence
    184 # if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
    185        defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
    186          , size_t _Buf_sz = 100
    187 #   if defined(__sgi) && !defined(__GNUC__)
    188 #   define __TYPEDEF_WORKAROUND
    189          ,class _V = typename _Sequence::value_type
    190 #   endif /* __sgi */
    191 # endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
    192          >
    193 // The 3rd parameter works around a common compiler bug.
    194 class sequence_buffer : public iterator <output_iterator_tag, void, void, void, void> {
    195 public:
    196 # ifndef __TYPEDEF_WORKAROUND
    197   typedef typename _Sequence::value_type value_type;
    198   typedef sequence_buffer<_Sequence
    199 # if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
    200        defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
    201   , _Buf_sz
    202   > _Self;
    203 # else /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
    204   > _Self;
    205   enum { _Buf_sz = 100};
    206 # endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
    207   // # endif
    208 # else /* __TYPEDEF_WORKAROUND */
    209   typedef _V value_type;
    210   typedef sequence_buffer<_Sequence, _Buf_sz, _V> _Self;
    211 # endif /* __TYPEDEF_WORKAROUND */
    212 protected:
    213   _Sequence* _M_prefix;
    214   value_type _M_buffer[_Buf_sz];
    215   size_t     _M_buf_count;
    216 public:
    217   void flush() {
    218     _M_prefix->append(_M_buffer, _M_buffer + _M_buf_count);
    219     _M_buf_count = 0;
    220   }
    221   ~sequence_buffer() { flush(); }
    222   sequence_buffer() : _M_prefix(0), _M_buf_count(0) {}
    223   sequence_buffer(const _Self& __x) {
    224     _M_prefix = __x._M_prefix;
    225     _M_buf_count = __x._M_buf_count;
    226     _STLP_STD::copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
    227   }
    228   sequence_buffer(_Self& __x) {
    229     __x.flush();
    230     _M_prefix = __x._M_prefix;
    231     _M_buf_count = 0;
    232   }
    233   sequence_buffer(_Sequence& __s) : _M_prefix(&__s), _M_buf_count(0) {}
    234   _Self& operator= (_Self& __x) {
    235     __x.flush();
    236     _M_prefix = __x._M_prefix;
    237     _M_buf_count = 0;
    238     return *this;
    239   }
    240   _Self& operator= (const _Self& __x) {
    241     _M_prefix = __x._M_prefix;
    242     _M_buf_count = __x._M_buf_count;
    243     _STLP_STD::copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
    244     return *this;
    245   }
    246   void push_back(value_type __x) {
    247     if (_M_buf_count < _Buf_sz) {
    248       _M_buffer[_M_buf_count] = __x;
    249       ++_M_buf_count;
    250     } else {
    251       flush();
    252       _M_buffer[0] = __x;
    253       _M_buf_count = 1;
    254     }
    255   }
    256   void append(const value_type *__s, size_t __len) {
    257     if (__len + _M_buf_count <= _Buf_sz) {
    258       size_t __i = _M_buf_count;
    259       size_t __j = 0;
    260       for (; __j < __len; __i++, __j++) {
    261         _M_buffer[__i] = __s[__j];
    262       }
    263       _M_buf_count += __len;
    264     } else if (0 == _M_buf_count) {
    265       _M_prefix->append(__s, __s + __len);
    266     } else {
    267       flush();
    268       append(__s, __len);
    269     }
    270   }
    271   _Self& write(const value_type *__s, size_t __len) {
    272     append(__s, __len);
    273     return *this;
    274   }
    275   _Self& put(value_type __x) {
    276     push_back(__x);
    277     return *this;
    278   }
    279   _Self& operator=(const value_type& __rhs) {
    280     push_back(__rhs);
    281     return *this;
    282   }
    283   _Self& operator*() { return *this; }
    284   _Self& operator++() { return *this; }
    285   _Self& operator++(int) { return *this; }
    286 };
    287 
    288 // The following should be treated as private, at least for now.
    289 template<class _CharT>
    290 class _Rope_char_consumer {
    291 #if !defined (_STLP_MEMBER_TEMPLATES)
    292 public:
    293   //Without member templates we have to use run-time parameterization.
    294   // The symmetry with char_producer is accidental and temporary.
    295   virtual ~_Rope_char_consumer() {}
    296   virtual bool operator()(const _CharT* __buffer, size_t __len) = 0;
    297 #endif
    298 };
    299 
    300 //
    301 // What follows should really be local to rope.  Unfortunately,
    302 // that doesn't work, since it makes it impossible to define generic
    303 // equality on rope iterators.  According to the draft standard, the
    304 // template parameters for such an equality operator cannot be inferred
    305 // from the occurence of a member class as a parameter.
    306 // (SGI compilers in fact allow this, but the __result wouldn't be
    307 // portable.)
    308 // Similarly, some of the static member functions are member functions
    309 // only to avoid polluting the global namespace, and to circumvent
    310 // restrictions on type inference for template functions.
    311 //
    312 
    313 //
    314 // The internal data structure for representing a rope.  This is
    315 // private to the implementation.  A rope is really just a pointer
    316 // to one of these.
    317 //
    318 // A few basic functions for manipulating this data structure
    319 // are members of _RopeRep.  Most of the more complex algorithms
    320 // are implemented as rope members.
    321 //
    322 // Some of the static member functions of _RopeRep have identically
    323 // named functions in rope that simply invoke the _RopeRep versions.
    324 //
    325 
    326 template<class _CharT, class _Alloc>
    327 struct _Rope_RopeRep
    328   : public _Refcount_Base
    329 {
    330   typedef _Rope_RopeRep<_CharT, _Alloc> _Self;
    331 public:
    332   //
    333   // GAB: 11/09/05
    334   //
    335   // "__ROPE_DEPTH_SIZE" is set to one more then the "__ROPE_MAX_DEPTH".
    336   // This was originally just an addition of "__ROPE_MAX_DEPTH + 1"
    337   // but this addition causes the sunpro compiler to complain about
    338   // multiple declarations during the initialization of "_S_min_len".
    339   // Changed to be a fixed value and the sunpro compiler appears to
    340   // be happy???
    341   //
    342 #  define __ROPE_MAX_DEPTH  45
    343 #  define __ROPE_DEPTH_SIZE 46 // __ROPE_MAX_DEPTH + 1
    344   enum { _S_max_rope_depth = __ROPE_MAX_DEPTH };
    345   enum _Tag {_S_leaf, _S_concat, _S_substringfn, _S_function};
    346   // Apparently needed by VC++
    347   // The data fields of leaves are allocated with some
    348   // extra space, to accomodate future growth and for basic
    349   // character types, to hold a trailing eos character.
    350   enum { _S_alloc_granularity = 8 };
    351 
    352   _Tag _M_tag:8;
    353   bool _M_is_balanced:8;
    354 
    355   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
    356   typedef _Alloc allocator_type;
    357 
    358   allocator_type get_allocator() const { return allocator_type(_M_size);  }
    359 
    360   unsigned char _M_depth;
    361   _CharT* _STLP_VOLATILE _M_c_string;
    362   _STLP_PRIV _STLP_alloc_proxy<size_t, _CharT, allocator_type> _M_size;
    363 
    364 #ifdef _STLP_NO_ARROW_OPERATOR
    365   _Rope_RopeRep() : _Refcount_Base(1), _M_size(allocator_type(), 0) {
    366 #  if defined (_STLP_CHECK_RUNTIME_COMPATIBILITY)
    367     _STLP_CHECK_RUNTIME_COMPATIBILITY();
    368 #  endif
    369   }
    370 #endif
    371 
    372   /* Flattened version of string, if needed.  */
    373   /* typically 0.                             */
    374   /* If it's not 0, then the memory is owned  */
    375   /* by this node.                            */
    376   /* In the case of a leaf, this may point to */
    377   /* the same memory as the data field.       */
    378   _Rope_RopeRep(_Tag __t, unsigned char __d, bool __b, size_t _p_size,
    379                 allocator_type __a) :
    380     _Refcount_Base(1),
    381     _M_tag(__t), _M_is_balanced(__b), _M_depth(__d), _M_c_string(0), _M_size(__a, _p_size) {
    382 #if defined (_STLP_CHECK_RUNTIME_COMPATIBILITY)
    383     _STLP_CHECK_RUNTIME_COMPATIBILITY();
    384 #endif
    385     }
    386 
    387   typedef _STLP_TYPENAME _STLP_PRIV _BasicCharType<_CharT>::_Ret _IsBasicCharType;
    388 
    389 #if 0
    390   /* Please tell why this code is necessary if you uncomment it.
    391    * Problem with it is that rope implementation expect that _S_rounded_up_size(n)
    392    * returns a size > n in order to store the terminating null charater. When
    393    * instanciation type is not a char or wchar_t this is not guaranty resulting in
    394    * memory overrun.
    395    */
    396   static size_t _S_rounded_up_size_aux(size_t __n, __true_type const& /*_IsBasicCharType*/) {
    397     // Allow slop for in-place expansion.
    398     return (__n + _S_alloc_granularity) & ~(_S_alloc_granularity - 1);
    399   }
    400 
    401   static size_t _S_rounded_up_size_aux(size_t __n, __false_type const& /*_IsBasicCharType*/) {
    402     // Allow slop for in-place expansion.
    403     return (__n + _S_alloc_granularity - 1) & ~(_S_alloc_granularity - 1);
    404   }
    405 #endif
    406   // fbp : moved from RopeLeaf
    407   static size_t _S_rounded_up_size(size_t __n)
    408   //{ return _S_rounded_up_size_aux(__n, _IsBasicCharType()); }
    409   { return (__n + _S_alloc_granularity) & ~(_S_alloc_granularity - 1); }
    410 
    411   static void _S_free_string( _CharT* __s, size_t __len,
    412                              allocator_type __a) {
    413     _STLP_STD::_Destroy_Range(__s, __s + __len);
    414     //  This has to be a static member, so this gets a bit messy
    415 #   ifndef _STLP_DONT_SUPPORT_REBIND_MEMBER_TEMPLATE
    416     __a.deallocate(__s, _S_rounded_up_size(__len));    //*ty 03/24/2001 - restored not to use __stl_alloc_rebind() since it is not defined under _STLP_MEMBER_TEMPLATE_CLASSES
    417 #   else
    418     __stl_alloc_rebind (__a, (_CharT*)0).deallocate(__s, _S_rounded_up_size(__len));
    419 #   endif
    420   }
    421 
    422   // Deallocate data section of a leaf.
    423   // This shouldn't be a member function.
    424   // But its hard to do anything else at the
    425   // moment, because it's templatized w.r.t.
    426   // an allocator.
    427   // Does nothing if __GC is defined.
    428   void _M_free_c_string();
    429   void _M_free_tree();
    430   // Deallocate t. Assumes t is not 0.
    431   void _M_unref_nonnil() {
    432     if (_M_decr() == 0) _M_free_tree();
    433   }
    434   void _M_ref_nonnil() {
    435     _M_incr();
    436   }
    437   static void _S_unref(_Self* __t) {
    438     if (0 != __t) {
    439       __t->_M_unref_nonnil();
    440     }
    441   }
    442   static void _S_ref(_Self* __t) {
    443     if (0 != __t) __t->_M_incr();
    444   }
    445   //static void _S_free_if_unref(_Self* __t) {
    446   //  if (0 != __t && 0 == __t->_M_ref_count) __t->_M_free_tree();
    447   //}
    448 };
    449 
    450 template<class _CharT, class _Alloc>
    451 struct _Rope_RopeLeaf : public _Rope_RopeRep<_CharT,_Alloc> {
    452 public:
    453   _CharT* _M_data; /* Not necessarily 0 terminated. */
    454                                 /* The allocated size is         */
    455                                 /* _S_rounded_up_size(size), except */
    456                                 /* in the GC case, in which it   */
    457                                 /* doesn't matter.               */
    458 private:
    459   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
    460   typedef typename _RopeRep::_IsBasicCharType _IsBasicCharType;
    461   void _M_init(__true_type const& /*_IsBasicCharType*/) {
    462     this->_M_c_string = _M_data;
    463   }
    464   void _M_init(__false_type const& /*_IsBasicCharType*/) {}
    465 
    466 public:
    467   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
    468   typedef typename _RopeRep::allocator_type allocator_type;
    469 
    470   _Rope_RopeLeaf( _CharT* __d, size_t _p_size, allocator_type __a)
    471     : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_leaf, 0, true, _p_size, __a),
    472       _M_data(__d) {
    473     _STLP_ASSERT(_p_size > 0)
    474     _M_init(_IsBasicCharType());
    475   }
    476 
    477 # ifdef _STLP_NO_ARROW_OPERATOR
    478   _Rope_RopeLeaf() {}
    479   _Rope_RopeLeaf(const _Rope_RopeLeaf<_CharT, _Alloc>& ) {}
    480 # endif
    481 
    482 // The constructor assumes that d has been allocated with
    483   // the proper allocator and the properly padded size.
    484   // In contrast, the destructor deallocates the data:
    485   ~_Rope_RopeLeaf() {
    486     if (_M_data != this->_M_c_string) {
    487       this->_M_free_c_string();
    488     }
    489     _RopeRep::_S_free_string(_M_data, this->_M_size._M_data, this->get_allocator());
    490   }
    491 };
    492 
    493 template<class _CharT, class _Alloc>
    494 struct _Rope_RopeConcatenation : public _Rope_RopeRep<_CharT, _Alloc> {
    495 private:
    496   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
    497 
    498 public:
    499   _RopeRep* _M_left;
    500   _RopeRep* _M_right;
    501   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
    502   typedef typename _RopeRep::allocator_type allocator_type;
    503   _Rope_RopeConcatenation(_RopeRep* __l, _RopeRep* __r, allocator_type __a)
    504     : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_concat,
    505                                    (max)(__l->_M_depth, __r->_M_depth) + 1, false,
    506                                    __l->_M_size._M_data + __r->_M_size._M_data, __a), _M_left(__l), _M_right(__r)
    507   {}
    508 # ifdef _STLP_NO_ARROW_OPERATOR
    509   _Rope_RopeConcatenation() {}
    510   _Rope_RopeConcatenation(const _Rope_RopeConcatenation<_CharT, _Alloc>&) {}
    511 # endif
    512 
    513   ~_Rope_RopeConcatenation() {
    514     this->_M_free_c_string();
    515     _M_left->_M_unref_nonnil();
    516     _M_right->_M_unref_nonnil();
    517   }
    518 };
    519 
    520 template <class _CharT, class _Alloc>
    521 struct _Rope_RopeFunction : public _Rope_RopeRep<_CharT, _Alloc> {
    522 private:
    523   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
    524 public:
    525   char_producer<_CharT>* _M_fn;
    526   /*
    527    * Char_producer is owned by the
    528    * rope and should be explicitly
    529    * deleted when the rope becomes
    530    * inaccessible.
    531    */
    532   bool _M_delete_when_done;
    533   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
    534   typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
    535 # ifdef _STLP_NO_ARROW_OPERATOR
    536   _Rope_RopeFunction() {}
    537   _Rope_RopeFunction(const _Rope_RopeFunction<_CharT, _Alloc>& ) {}
    538 # endif
    539 
    540   _Rope_RopeFunction(char_producer<_CharT>* __f, size_t _p_size,
    541                      bool __d, allocator_type __a)
    542     : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_function, 0, true, _p_size, __a), _M_fn(__f)
    543     , _M_delete_when_done(__d)
    544   { _STLP_ASSERT(_p_size > 0) }
    545 
    546   ~_Rope_RopeFunction() {
    547     this->_M_free_c_string();
    548     if (_M_delete_when_done) {
    549       delete _M_fn;
    550     }
    551   }
    552 };
    553 
    554 /*
    555  * Substring results are usually represented using just
    556  * concatenation nodes.  But in the case of very long flat ropes
    557  * or ropes with a functional representation that isn't practical.
    558  * In that case, we represent the __result as a special case of
    559  * RopeFunction, whose char_producer points back to the rope itself.
    560  * In all cases except repeated substring operations and
    561  * deallocation, we treat the __result as a RopeFunction.
    562  */
    563 template<class _CharT, class _Alloc>
    564 struct _Rope_RopeSubstring : public char_producer<_CharT>, public _Rope_RopeFunction<_CharT,_Alloc> {
    565 public:
    566   // XXX this whole class should be rewritten.
    567   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
    568   _RopeRep *_M_base;      // not 0
    569   size_t _M_start;
    570   /* virtual */ void operator()(size_t __start_pos, size_t __req_len,
    571                                 _CharT* __buffer) {
    572     typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
    573     typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
    574     switch (_M_base->_M_tag) {
    575     case _RopeRep::_S_function:
    576     case _RopeRep::_S_substringfn:
    577       {
    578         char_producer<_CharT>* __fn =
    579           __STATIC_CAST(_RopeFunction*, _M_base)->_M_fn;
    580         _STLP_ASSERT(__start_pos + __req_len <= this->_M_size._M_data)
    581         _STLP_ASSERT(_M_start + this->_M_size._M_data <= _M_base->_M_size._M_data)
    582         (*__fn)(__start_pos + _M_start, __req_len, __buffer);
    583       }
    584       break;
    585     case _RopeRep::_S_leaf:
    586       {
    587         _CharT* __s =
    588           __STATIC_CAST(_RopeLeaf*, _M_base)->_M_data;
    589         _STLP_PRIV __ucopy_n(__s + __start_pos + _M_start, __req_len, __buffer);
    590       }
    591       break;
    592     default:
    593       _STLP_ASSERT(false)
    594         ;
    595     }
    596   }
    597 
    598   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
    599   typedef typename _RopeRep::allocator_type allocator_type;
    600 
    601   _Rope_RopeSubstring(_RopeRep* __b, size_t __s, size_t __l, allocator_type __a)
    602     : _Rope_RopeFunction<_CharT,_Alloc>(this, __l, false, __a),
    603       _M_base(__b), _M_start(__s) {
    604     _STLP_ASSERT(__l > 0)
    605     _STLP_ASSERT(__s + __l <= __b->_M_size._M_data)
    606     _M_base->_M_ref_nonnil();
    607     this->_M_tag = _RopeRep::_S_substringfn;
    608   }
    609   virtual ~_Rope_RopeSubstring()
    610   { _M_base->_M_unref_nonnil(); }
    611 };
    612 
    613 /*
    614  * Self-destructing pointers to Rope_rep.
    615  * These are not conventional smart pointers.  Their
    616  * only purpose in life is to ensure that unref is called
    617  * on the pointer either at normal exit or if an exception
    618  * is raised.  It is the caller's responsibility to
    619  * adjust reference counts when these pointers are initialized
    620  * or assigned to.  (This convention significantly reduces
    621  * the number of potentially expensive reference count
    622  * updates.)
    623  */
    624 template<class _CharT, class _Alloc>
    625 struct _Rope_self_destruct_ptr {
    626   _Rope_RopeRep<_CharT,_Alloc>* _M_ptr;
    627   ~_Rope_self_destruct_ptr()
    628   { _Rope_RopeRep<_CharT,_Alloc>::_S_unref(_M_ptr); }
    629 #   ifdef _STLP_USE_EXCEPTIONS
    630   _Rope_self_destruct_ptr() : _M_ptr(0) {}
    631 #   else
    632   _Rope_self_destruct_ptr() {}
    633 #   endif
    634   _Rope_self_destruct_ptr(_Rope_RopeRep<_CharT,_Alloc>* __p) : _M_ptr(__p) {}
    635   _Rope_RopeRep<_CharT,_Alloc>& operator*() { return *_M_ptr; }
    636   _Rope_RopeRep<_CharT,_Alloc>* operator->() { return _M_ptr; }
    637   operator _Rope_RopeRep<_CharT,_Alloc>*() { return _M_ptr; }
    638   _Rope_self_destruct_ptr<_CharT, _Alloc>&
    639   operator= (_Rope_RopeRep<_CharT,_Alloc>* __x)
    640   { _M_ptr = __x; return *this; }
    641 };
    642 
    643 /*
    644  * Dereferencing a nonconst iterator has to return something
    645  * that behaves almost like a reference.  It's not possible to
    646  * return an actual reference since assignment requires extra
    647  * work.  And we would get into the same problems as with the
    648  * CD2 version of basic_string.
    649  */
    650 template<class _CharT, class _Alloc>
    651 class _Rope_char_ref_proxy {
    652   typedef _Rope_char_ref_proxy<_CharT, _Alloc> _Self;
    653   friend class rope<_CharT,_Alloc>;
    654   friend class _Rope_iterator<_CharT,_Alloc>;
    655   friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
    656   typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
    657   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
    658   typedef rope<_CharT,_Alloc> _My_rope;
    659   size_t _M_pos;
    660   _CharT _M_current;
    661   bool _M_current_valid;
    662   _My_rope* _M_root;     // The whole rope.
    663 public:
    664   _Rope_char_ref_proxy(_My_rope* __r, size_t __p) :
    665     _M_pos(__p), _M_current_valid(false), _M_root(__r) {}
    666   _Rope_char_ref_proxy(const _Self& __x) :
    667     _M_pos(__x._M_pos), _M_current_valid(false), _M_root(__x._M_root) {}
    668   // Don't preserve cache if the reference can outlive the
    669   // expression.  We claim that's not possible without calling
    670   // a copy constructor or generating reference to a proxy
    671   // reference.  We declare the latter to have undefined semantics.
    672   _Rope_char_ref_proxy(_My_rope* __r, size_t __p, _CharT __c)
    673     : _M_pos(__p), _M_current(__c), _M_current_valid(true), _M_root(__r) {}
    674   inline operator _CharT () const;
    675   _Self& operator= (_CharT __c);
    676   _Rope_char_ptr_proxy<_CharT, _Alloc> operator& () const;
    677   _Self& operator= (const _Self& __c) {
    678     return operator=((_CharT)__c);
    679   }
    680 };
    681 
    682 #ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
    683 template<class _CharT, class __Alloc>
    684 inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a,
    685                  _Rope_char_ref_proxy <_CharT, __Alloc > __b) {
    686   _CharT __tmp = __a;
    687   __a = __b;
    688   __b = __tmp;
    689 }
    690 #else
    691 // There is no really acceptable way to handle this.  The default
    692 // definition of swap doesn't work for proxy references.
    693 // It can't really be made to work, even with ugly hacks, since
    694 // the only unusual operation it uses is the copy constructor, which
    695 // is needed for other purposes.  We provide a macro for
    696 // full specializations, and instantiate the most common case.
    697 # define _ROPE_SWAP_SPECIALIZATION(_CharT, __Alloc) \
    698     inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a, \
    699                      _Rope_char_ref_proxy <_CharT, __Alloc > __b) { \
    700         _CharT __tmp = __a; \
    701         __a = __b; \
    702         __b = __tmp; \
    703     }
    704 
    705 _ROPE_SWAP_SPECIALIZATION(char, allocator<char>)
    706 
    707 # ifndef _STLP_NO_WCHAR_T
    708 _ROPE_SWAP_SPECIALIZATION(wchar_t, allocator<wchar_t>)
    709 # endif
    710 
    711 #endif /* !_STLP_FUNCTION_TMPL_PARTIAL_ORDER */
    712 
    713 template<class _CharT, class _Alloc>
    714 class _Rope_char_ptr_proxy {
    715   // XXX this class should be rewritten.
    716 public:
    717   typedef _Rope_char_ptr_proxy<_CharT, _Alloc> _Self;
    718   friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
    719   size_t _M_pos;
    720   rope<_CharT,_Alloc>* _M_root;     // The whole rope.
    721 
    722   _Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x)
    723     : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
    724   _Rope_char_ptr_proxy(const _Self& __x)
    725     : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
    726   _Rope_char_ptr_proxy() {}
    727   _Rope_char_ptr_proxy(_CharT* __x) : _M_pos(0), _M_root(0) {
    728     _STLP_ASSERT(0 == __x)
    729   }
    730   _Self& operator= (const _Self& __x) {
    731     _M_pos = __x._M_pos;
    732     _M_root = __x._M_root;
    733     return *this;
    734   }
    735 
    736   _Rope_char_ref_proxy<_CharT,_Alloc> operator*() const {
    737     return _Rope_char_ref_proxy<_CharT,_Alloc>(_M_root, _M_pos);
    738   }
    739 };
    740 
    741 
    742 /*
    743  * Rope iterators:
    744  * Unlike in the C version, we cache only part of the stack
    745  * for rope iterators, since they must be efficiently copyable.
    746  * When we run out of cache, we have to reconstruct the iterator
    747  * value.
    748  * Pointers from iterators are not included in reference counts.
    749  * Iterators are assumed to be thread private.  Ropes can
    750  * be shared.
    751  */
    752 template<class _CharT, class _Alloc>
    753 class _Rope_iterator_base
    754 /*   : public random_access_iterator<_CharT, ptrdiff_t>  */
    755 {
    756   friend class rope<_CharT,_Alloc>;
    757   typedef _Rope_iterator_base<_CharT, _Alloc> _Self;
    758   typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcat;
    759 public:
    760   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
    761 
    762   enum { _S_path_cache_len = 4 }; // Must be <= 9 because of _M_path_direction.
    763   enum { _S_iterator_buf_len = 15 };
    764   size_t _M_current_pos;
    765   // The whole rope.
    766   _RopeRep* _M_root;
    767   // Starting position for current leaf
    768   size_t _M_leaf_pos;
    769   // Buffer possibly containing current char.
    770   _CharT* _M_buf_start;
    771   // Pointer to current char in buffer, != 0 ==> buffer valid.
    772   _CharT* _M_buf_ptr;
    773   // One past __last valid char in buffer.
    774   _CharT* _M_buf_end;
    775 
    776   // What follows is the path cache.  We go out of our
    777   // way to make this compact.
    778   // Path_end contains the bottom section of the path from
    779   // the root to the current leaf.
    780   struct {
    781 #  if defined (__BORLANDC__) && (__BORLANDC__ < 0x560)
    782     _RopeRep const*_M_data[4];
    783 #  else
    784     _RopeRep const*_M_data[_S_path_cache_len];
    785 #  endif
    786   } _M_path_end;
    787   // Last valid __pos in path_end;
    788   // _M_path_end[0] ... _M_path_end[_M_leaf_index-1]
    789   // point to concatenation nodes.
    790   int _M_leaf_index;
    791   // (_M_path_directions >> __i) & 1 is 1
    792   // if we got from _M_path_end[leaf_index - __i - 1]
    793   // to _M_path_end[leaf_index - __i] by going to the
    794   // __right. Assumes path_cache_len <= 9.
    795   unsigned char _M_path_directions;
    796   // Short buffer for surrounding chars.
    797   // This is useful primarily for
    798   // RopeFunctions.  We put the buffer
    799   // here to avoid locking in the
    800   // multithreaded case.
    801   // The cached path is generally assumed to be valid
    802   // only if the buffer is valid.
    803   struct {
    804 #  if defined (__BORLANDC__) && (__BORLANDC__ < 0x560)
    805     _CharT _M_data[15];
    806 #  else
    807     _CharT _M_data[_S_iterator_buf_len];
    808 #  endif
    809   } _M_tmp_buf;
    810 
    811   // Set buffer contents given path cache.
    812   static void _S_setbuf(_Rope_iterator_base<_CharT, _Alloc>& __x);
    813   // Set buffer contents and path cache.
    814   static void _S_setcache(_Rope_iterator_base<_CharT, _Alloc>& __x);
    815   // As above, but assumes path cache is valid for previous posn.
    816   static void _S_setcache_for_incr(_Rope_iterator_base<_CharT, _Alloc>& __x);
    817   _Rope_iterator_base() {}
    818   _Rope_iterator_base(_RopeRep* __root, size_t __pos)
    819     : _M_current_pos(__pos),_M_root(__root),  _M_buf_ptr(0) {}
    820   void _M_incr(size_t __n);
    821   void _M_decr(size_t __n);
    822 public:
    823   size_t index() const { return _M_current_pos; }
    824 private:
    825   void _M_copy_buf(const _Self& __x) {
    826     _M_tmp_buf = __x._M_tmp_buf;
    827     if (__x._M_buf_start == __x._M_tmp_buf._M_data) {
    828       _M_buf_start = _M_tmp_buf._M_data;
    829       _M_buf_end = _M_buf_start + (__x._M_buf_end - __x._M_buf_start);
    830       _M_buf_ptr = _M_buf_start + (__x._M_buf_ptr - __x._M_buf_start);
    831     } else {
    832       _M_buf_end = __x._M_buf_end;
    833     }
    834   }
    835 
    836 public:
    837   _Rope_iterator_base(const _Self& __x) :
    838       _M_current_pos(__x._M_current_pos),
    839       _M_root(__x._M_root),
    840       _M_leaf_pos( __x._M_leaf_pos ),
    841       _M_buf_start(__x._M_buf_start),
    842       _M_buf_ptr(__x._M_buf_ptr),
    843       _M_path_end(__x._M_path_end),
    844       _M_leaf_index(__x._M_leaf_index),
    845       _M_path_directions(__x._M_path_directions)
    846       {
    847         if (0 != __x._M_buf_ptr) {
    848           _M_copy_buf(__x);
    849         }
    850       }
    851   _Self& operator = (const _Self& __x)
    852       {
    853         _M_current_pos = __x._M_current_pos;
    854         _M_root = __x._M_root;
    855         _M_buf_start = __x._M_buf_start;
    856         _M_buf_ptr = __x._M_buf_ptr;
    857         _M_path_end = __x._M_path_end;
    858         _M_leaf_index = __x._M_leaf_index;
    859         _M_path_directions = __x._M_path_directions;
    860         _M_leaf_pos = __x._M_leaf_pos;
    861         if (0 != __x._M_buf_ptr) {
    862           _M_copy_buf(__x);
    863         }
    864         return *this;
    865       }
    866 };
    867 
    868 template<class _CharT, class _Alloc> class _Rope_iterator;
    869 
    870 template<class _CharT, class _Alloc>
    871 class _Rope_const_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
    872   friend class rope<_CharT,_Alloc>;
    873   typedef  _Rope_const_iterator<_CharT, _Alloc> _Self;
    874   typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
    875   //  protected:
    876 public:
    877 #   ifndef _STLP_HAS_NO_NAMESPACES
    878   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
    879   // The one from the base class may not be directly visible.
    880 #   endif
    881   _Rope_const_iterator(const _RopeRep* __root, size_t __pos):
    882     _Rope_iterator_base<_CharT,_Alloc>(__CONST_CAST(_RopeRep*,__root), __pos)
    883     // Only nonconst iterators modify root ref count
    884   {}
    885 public:
    886   typedef _CharT reference;   // Really a value.  Returning a reference
    887                               // Would be a mess, since it would have
    888                               // to be included in refcount.
    889   typedef const _CharT* pointer;
    890   typedef _CharT value_type;
    891   typedef ptrdiff_t difference_type;
    892   typedef random_access_iterator_tag iterator_category;
    893 
    894 public:
    895   _Rope_const_iterator() {}
    896   _Rope_const_iterator(const _Self& __x) :
    897     _Rope_iterator_base<_CharT,_Alloc>(__x) { }
    898   _Rope_const_iterator(const _Rope_iterator<_CharT,_Alloc>& __x):
    899     _Rope_iterator_base<_CharT,_Alloc>(__x) {}
    900   _Rope_const_iterator(const rope<_CharT,_Alloc>& __r, size_t __pos) :
    901     _Rope_iterator_base<_CharT,_Alloc>(__r._M_tree_ptr._M_data, __pos) {}
    902   _Self& operator= (const _Self& __x) {
    903     _Base::operator=(__x);
    904     return *this;
    905   }
    906   reference operator*() {
    907     if (0 == this->_M_buf_ptr)
    908 #if defined(__clang__) || (defined(__GNUC__) && (__GNUC__ > 4 || __GNUC_MINOR__ >= 7))
    909       this->_S_setcache(*this);
    910 #elif !defined (__DMC__)
    911       _S_setcache(*this);
    912 #else
    913     { _Rope_iterator_base<_CharT, _Alloc>* __x = this; _S_setcache(*__x); }
    914 #endif
    915     return *(this->_M_buf_ptr);
    916   }
    917   _Self& operator++()
    918       {
    919         if ( this->_M_buf_ptr != 0 ) {
    920           _CharT *__next = this->_M_buf_ptr + 1;
    921           if ( __next < this->_M_buf_end ) {
    922             this->_M_buf_ptr = __next;
    923             ++this->_M_current_pos;
    924             return *this;
    925           }
    926         }
    927         this->_M_incr(1);
    928         return *this;
    929       }
    930   _Self& operator+=(ptrdiff_t __n) {
    931     if (__n >= 0) {
    932       this->_M_incr(__n);
    933     } else {
    934       this->_M_decr(-__n);
    935     }
    936     return *this;
    937   }
    938   _Self& operator--() {
    939     this->_M_decr(1);
    940     return *this;
    941   }
    942   _Self& operator-=(ptrdiff_t __n) {
    943     if (__n >= 0) {
    944       this->_M_decr(__n);
    945     } else {
    946       this->_M_incr(-__n);
    947     }
    948     return *this;
    949   }
    950   _Self operator++(int) {
    951     size_t __old_pos = this->_M_current_pos;
    952     this->_M_incr(1);
    953     return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
    954     // This makes a subsequent dereference expensive.
    955     // Perhaps we should instead copy the iterator
    956     // if it has a valid cache?
    957   }
    958   _Self operator--(int) {
    959     size_t __old_pos = this->_M_current_pos;
    960     this->_M_decr(1);
    961     return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
    962   }
    963   inline reference operator[](size_t __n);
    964 };
    965 
    966 template<class _CharT, class _Alloc>
    967 class _Rope_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
    968   friend class rope<_CharT,_Alloc>;
    969   typedef _Rope_iterator<_CharT, _Alloc> _Self;
    970   typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
    971   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
    972 
    973 public:
    974   rope<_CharT,_Alloc>* _M_root_rope;
    975   // root is treated as a cached version of this,
    976   // and is used to detect changes to the underlying
    977   // rope.
    978   // Root is included in the reference count.
    979   // This is necessary so that we can detect changes reliably.
    980   // Unfortunately, it requires careful bookkeeping for the
    981   // nonGC case.
    982   _Rope_iterator(rope<_CharT,_Alloc>* __r, size_t __pos);
    983 
    984   void _M_check();
    985 public:
    986   typedef _Rope_char_ref_proxy<_CharT,_Alloc>  reference;
    987   typedef _Rope_char_ref_proxy<_CharT,_Alloc>* pointer;
    988   typedef _CharT value_type;
    989   typedef ptrdiff_t difference_type;
    990   typedef random_access_iterator_tag iterator_category;
    991 public:
    992   ~_Rope_iterator() {  //*TY 5/6/00 - added dtor to balance reference count
    993     _RopeRep::_S_unref(this->_M_root);
    994   }
    995 
    996   rope<_CharT,_Alloc>& container() { return *_M_root_rope; }
    997   _Rope_iterator() {
    998     this->_M_root = 0;  // Needed for reference counting.
    999   }
   1000   _Rope_iterator(const  _Self& __x) :
   1001     _Rope_iterator_base<_CharT,_Alloc>(__x) {
   1002     _M_root_rope = __x._M_root_rope;
   1003     _RopeRep::_S_ref(this->_M_root);
   1004   }
   1005   _Rope_iterator(rope<_CharT,_Alloc>& __r, size_t __pos);
   1006   _Self& operator= (const  _Self& __x) {
   1007     _RopeRep* __old = this->_M_root;
   1008     _RopeRep::_S_ref(__x._M_root);
   1009     _Base::operator=(__x);
   1010     _M_root_rope = __x._M_root_rope;
   1011     _RopeRep::_S_unref(__old);
   1012     return *this;
   1013   }
   1014   reference operator*() {
   1015     _M_check();
   1016     if (0 == this->_M_buf_ptr) {
   1017       return reference(_M_root_rope, this->_M_current_pos);
   1018     } else {
   1019       return reference(_M_root_rope, this->_M_current_pos, *(this->_M_buf_ptr));
   1020     }
   1021   }
   1022   _Self& operator++() {
   1023     this->_M_incr(1);
   1024     return *this;
   1025   }
   1026   _Self& operator+=(ptrdiff_t __n) {
   1027     if (__n >= 0) {
   1028       this->_M_incr(__n);
   1029     } else {
   1030       this->_M_decr(-__n);
   1031     }
   1032     return *this;
   1033   }
   1034   _Self& operator--() {
   1035     this->_M_decr(1);
   1036     return *this;
   1037   }
   1038   _Self& operator-=(ptrdiff_t __n) {
   1039     if (__n >= 0) {
   1040       this->_M_decr(__n);
   1041     } else {
   1042       this->_M_incr(-__n);
   1043     }
   1044     return *this;
   1045   }
   1046   _Self operator++(int) {
   1047     size_t __old_pos = this->_M_current_pos;
   1048     this->_M_incr(1);
   1049     return _Self(_M_root_rope, __old_pos);
   1050   }
   1051   _Self operator--(int) {
   1052     size_t __old_pos = this->_M_current_pos;
   1053     this->_M_decr(1);
   1054     return _Self(_M_root_rope, __old_pos);
   1055   }
   1056   reference operator[](ptrdiff_t __n) {
   1057     return reference(_M_root_rope, this->_M_current_pos + __n);
   1058   }
   1059 };
   1060 
   1061 # ifdef _STLP_USE_OLD_HP_ITERATOR_QUERIES
   1062 template <class _CharT, class _Alloc>
   1063 inline random_access_iterator_tag
   1064 iterator_category(const _Rope_iterator<_CharT,_Alloc>&) {  return random_access_iterator_tag();}
   1065 template <class _CharT, class _Alloc>
   1066 inline _CharT* value_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
   1067 template <class _CharT, class _Alloc>
   1068 inline ptrdiff_t* distance_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
   1069 template <class _CharT, class _Alloc>
   1070 inline random_access_iterator_tag
   1071 iterator_category(const _Rope_const_iterator<_CharT,_Alloc>&) { return random_access_iterator_tag(); }
   1072 template <class _CharT, class _Alloc>
   1073 inline _CharT* value_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
   1074 template <class _CharT, class _Alloc>
   1075 inline ptrdiff_t* distance_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
   1076 #endif /* _STLP_USE_OLD_HP_ITERATOR_QUERIES */
   1077 
   1078 template <class _CharT, class _Alloc, class _CharConsumer>
   1079 bool _S_apply_to_pieces(_CharConsumer& __c,
   1080                         _Rope_RopeRep<_CharT, _Alloc> *__r,
   1081                         size_t __begin, size_t __end);
   1082                         // begin and end are assumed to be in range.
   1083 
   1084 template <class _CharT, class _Alloc>
   1085 class rope
   1086 #if defined (_STLP_USE_PARTIAL_SPEC_WORKAROUND)
   1087            : public __stlport_class<rope<_CharT, _Alloc> >
   1088 #endif
   1089 {
   1090   typedef rope<_CharT,_Alloc> _Self;
   1091 public:
   1092   typedef _CharT value_type;
   1093   typedef ptrdiff_t difference_type;
   1094   typedef size_t size_type;
   1095   typedef _CharT const_reference;
   1096   typedef const _CharT* const_pointer;
   1097   typedef _Rope_iterator<_CharT,_Alloc> iterator;
   1098   typedef _Rope_const_iterator<_CharT,_Alloc> const_iterator;
   1099   typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
   1100   typedef _Rope_char_ptr_proxy<_CharT,_Alloc> pointer;
   1101 
   1102   friend class _Rope_iterator<_CharT,_Alloc>;
   1103   friend class _Rope_const_iterator<_CharT,_Alloc>;
   1104   friend struct _Rope_RopeRep<_CharT,_Alloc>;
   1105   friend class _Rope_iterator_base<_CharT,_Alloc>;
   1106   friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
   1107   friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
   1108   friend struct _Rope_RopeSubstring<_CharT,_Alloc>;
   1109 
   1110   _STLP_DECLARE_RANDOM_ACCESS_REVERSE_ITERATORS;
   1111 
   1112 protected:
   1113   typedef _CharT* _Cstrptr;
   1114 
   1115   static _CharT _S_empty_c_str[1];
   1116 
   1117   enum { _S_copy_max = 23 };
   1118   // For strings shorter than _S_copy_max, we copy to
   1119   // concatenate.
   1120 
   1121   typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep;
   1122   typedef typename _RopeRep::_IsBasicCharType _IsBasicCharType;
   1123 
   1124 public:
   1125   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
   1126   typedef _Alloc allocator_type;
   1127 
   1128 public:
   1129   // The only data member of a rope:
   1130   _STLP_PRIV _STLP_alloc_proxy<_RopeRep*, _CharT, allocator_type> _M_tree_ptr;
   1131 
   1132 public:
   1133   allocator_type get_allocator() const { return allocator_type(_M_tree_ptr); }
   1134 
   1135 public:
   1136   typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcatenation;
   1137   typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
   1138   typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
   1139   typedef _Rope_RopeSubstring<_CharT,_Alloc> _RopeSubstring;
   1140 
   1141   // Retrieve a character at the indicated position.
   1142   static _CharT _S_fetch(_RopeRep* __r, size_type __pos);
   1143 
   1144   // Obtain a pointer to the character at the indicated position.
   1145   // The pointer can be used to change the character.
   1146   // If such a pointer cannot be produced, as is frequently the
   1147   // case, 0 is returned instead.
   1148   // (Returns nonzero only if all nodes in the path have a refcount
   1149   // of 1.)
   1150   static _CharT* _S_fetch_ptr(_RopeRep* __r, size_type __pos);
   1151 
   1152   static void _S_unref(_RopeRep* __t) {
   1153     _RopeRep::_S_unref(__t);
   1154   }
   1155   static void _S_ref(_RopeRep* __t) {
   1156     _RopeRep::_S_ref(__t);
   1157   }
   1158 
   1159   typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
   1160 
   1161   // _Result is counted in refcount.
   1162   static _RopeRep* _S_substring(_RopeRep* __base,
   1163                                 size_t __start, size_t __endp1);
   1164 
   1165   static _RopeRep* _S_concat_char_iter(_RopeRep* __r,
   1166                                        const _CharT* __iter, size_t __slen);
   1167   // Concatenate rope and char ptr, copying __s.
   1168   // Should really take an arbitrary iterator.
   1169   // Result is counted in refcount.
   1170   static _RopeRep* _S_destr_concat_char_iter(_RopeRep* __r,
   1171                                              const _CharT* __iter, size_t __slen);
   1172     // As above, but one reference to __r is about to be
   1173     // destroyed.  Thus the pieces may be recycled if all
   1174     // relevent reference counts are 1.
   1175 
   1176   // General concatenation on _RopeRep.  _Result
   1177   // has refcount of 1.  Adjusts argument refcounts.
   1178   static _RopeRep* _S_concat_rep(_RopeRep* __left, _RopeRep* __right);
   1179 
   1180 public:
   1181 #if defined (_STLP_MEMBER_TEMPLATES)
   1182   template <class _CharConsumer>
   1183 #else
   1184   typedef _Rope_char_consumer<_CharT> _CharConsumer;
   1185 #endif
   1186   void apply_to_pieces(size_t __begin, size_t __end,
   1187                        _CharConsumer& __c) const
   1188   { _S_apply_to_pieces(__c, _M_tree_ptr._M_data, __begin, __end); }
   1189 
   1190 protected:
   1191 
   1192   static size_t _S_rounded_up_size(size_t __n)
   1193   { return _RopeRep::_S_rounded_up_size(__n); }
   1194 
   1195   // Allocate and construct a RopeLeaf using the supplied allocator
   1196   // Takes ownership of s instead of copying.
   1197   static _RopeLeaf* _S_new_RopeLeaf(_CharT *__s,
   1198                                     size_t _p_size, allocator_type __a) {
   1199     _RopeLeaf* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
   1200                                                 _RopeLeaf).allocate(1);
   1201     _STLP_TRY {
   1202       new(__space) _RopeLeaf(__s, _p_size, __a);
   1203     }
   1204    _STLP_UNWIND(_STLP_CREATE_ALLOCATOR(allocator_type,__a,
   1205                                        _RopeLeaf).deallocate(__space, 1))
   1206     return __space;
   1207   }
   1208 
   1209   static _RopeConcatenation* _S_new_RopeConcatenation(_RopeRep* __left, _RopeRep* __right,
   1210                                                       allocator_type __a) {
   1211    _RopeConcatenation* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
   1212                                                         _RopeConcatenation).allocate(1);
   1213     return new(__space) _RopeConcatenation(__left, __right, __a);
   1214   }
   1215 
   1216   static _RopeFunction* _S_new_RopeFunction(char_producer<_CharT>* __f,
   1217                                             size_t _p_size, bool __d, allocator_type __a) {
   1218    _RopeFunction* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
   1219                                                    _RopeFunction).allocate(1);
   1220     return new(__space) _RopeFunction(__f, _p_size, __d, __a);
   1221   }
   1222 
   1223   static _RopeSubstring* _S_new_RopeSubstring(_Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
   1224                                               size_t __l, allocator_type __a) {
   1225    _RopeSubstring* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
   1226                                                     _RopeSubstring).allocate(1);
   1227     return new(__space) _RopeSubstring(__b, __s, __l, __a);
   1228   }
   1229 
   1230   static
   1231   _RopeLeaf* _S_RopeLeaf_from_unowned_char_ptr(const _CharT *__s,
   1232                                                size_t _p_size, allocator_type __a) {
   1233     if (0 == _p_size) return 0;
   1234 
   1235    _CharT* __buf = _STLP_CREATE_ALLOCATOR(allocator_type,__a, _CharT).allocate(_S_rounded_up_size(_p_size));
   1236 
   1237     _STLP_PRIV __ucopy_n(__s, _p_size, __buf);
   1238     _S_construct_null(__buf + _p_size);
   1239 
   1240     _STLP_TRY {
   1241       return _S_new_RopeLeaf(__buf, _p_size, __a);
   1242     }
   1243     _STLP_UNWIND(_RopeRep::_S_free_string(__buf, _p_size, __a))
   1244     _STLP_RET_AFTER_THROW(0)
   1245   }
   1246 
   1247 
   1248   // Concatenation of nonempty strings.
   1249   // Always builds a concatenation node.
   1250   // Rebalances if the result is too deep.
   1251   // Result has refcount 1.
   1252   // Does not increment left and right ref counts even though
   1253   // they are referenced.
   1254   static _RopeRep*
   1255   _S_tree_concat(_RopeRep* __left, _RopeRep* __right);
   1256 
   1257   // Concatenation helper functions
   1258   static _RopeLeaf*
   1259   _S_leaf_concat_char_iter(_RopeLeaf* __r,
   1260                            const _CharT* __iter, size_t __slen);
   1261   // Concatenate by copying leaf.
   1262   // should take an arbitrary iterator
   1263   // result has refcount 1.
   1264   static _RopeLeaf* _S_destr_leaf_concat_char_iter
   1265   (_RopeLeaf* __r, const _CharT* __iter, size_t __slen);
   1266   // A version that potentially clobbers __r if __r->_M_ref_count == 1.
   1267 
   1268 
   1269   // A helper function for exponentiating strings.
   1270   // This uses a nonstandard refcount convention.
   1271   // The result has refcount 0.
   1272   typedef _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
   1273 #if !defined (__GNUC__) || (__GNUC__ < 3)
   1274   friend _Concat_fn;
   1275 #else
   1276   friend struct _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc>;
   1277 #endif
   1278 
   1279 public:
   1280   static size_t _S_char_ptr_len(const _CharT* __s) {
   1281     return char_traits<_CharT>::length(__s);
   1282   }
   1283 
   1284 public: /* for operators */
   1285   rope(_RopeRep* __t, const allocator_type& __a = allocator_type())
   1286     : _M_tree_ptr(__a, __t) { }
   1287 private:
   1288   // Copy __r to the _CharT buffer.
   1289   // Returns __buffer + __r->_M_size._M_data.
   1290   // Assumes that buffer is uninitialized.
   1291   static _CharT* _S_flatten(_RopeRep* __r, _CharT* __buffer);
   1292 
   1293   // Again, with explicit starting position and length.
   1294   // Assumes that buffer is uninitialized.
   1295   static _CharT* _S_flatten(_RopeRep* __r,
   1296                             size_t __start, size_t __len,
   1297                             _CharT* __buffer);
   1298 
   1299   // fbp : HP aCC prohibits access to protected min_len from within static methods ( ?? )
   1300 public:
   1301   static const unsigned long _S_min_len[__ROPE_DEPTH_SIZE];
   1302 protected:
   1303   static bool _S_is_balanced(_RopeRep* __r)
   1304   { return (__r->_M_size._M_data >= _S_min_len[__r->_M_depth]); }
   1305 
   1306   static bool _S_is_almost_balanced(_RopeRep* __r) {
   1307     return (__r->_M_depth == 0 ||
   1308             __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 1]);
   1309   }
   1310 
   1311   static bool _S_is_roughly_balanced(_RopeRep* __r) {
   1312     return (__r->_M_depth <= 1 ||
   1313             __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 2]);
   1314   }
   1315 
   1316   // Assumes the result is not empty.
   1317   static _RopeRep* _S_concat_and_set_balanced(_RopeRep* __left,
   1318                                               _RopeRep* __right) {
   1319     _RopeRep* __result = _S_concat_rep(__left, __right);
   1320     if (_S_is_balanced(__result)) __result->_M_is_balanced = true;
   1321     return __result;
   1322   }
   1323 
   1324   // The basic rebalancing operation.  Logically copies the
   1325   // rope.  The result has refcount of 1.  The client will
   1326   // usually decrement the reference count of __r.
   1327   // The result is within height 2 of balanced by the above
   1328   // definition.
   1329   static _RopeRep* _S_balance(_RopeRep* __r);
   1330 
   1331   // Add all unbalanced subtrees to the forest of balanceed trees.
   1332   // Used only by balance.
   1333   static void _S_add_to_forest(_RopeRep*__r, _RopeRep** __forest);
   1334 
   1335   // Add __r to forest, assuming __r is already balanced.
   1336   static void _S_add_leaf_to_forest(_RopeRep* __r, _RopeRep** __forest);
   1337 
   1338 #ifdef _STLP_DEBUG
   1339   // Print to stdout, exposing structure
   1340   static void _S_dump(_RopeRep* __r, int __indent = 0);
   1341 #endif
   1342 
   1343   // Return -1, 0, or 1 if __x < __y, __x == __y, or __x > __y resp.
   1344   static int _S_compare(const _RopeRep* __x, const _RopeRep* __y);
   1345 
   1346   void _STLP_FUNCTION_THROWS _M_throw_out_of_range() const;
   1347 
   1348   void _M_reset(_RopeRep* __r) {
   1349     //if (__r != _M_tree_ptr._M_data) {
   1350       _S_unref(_M_tree_ptr._M_data);
   1351       _M_tree_ptr._M_data = __r;
   1352     //}
   1353   }
   1354 
   1355 public:
   1356   bool empty() const { return 0 == _M_tree_ptr._M_data; }
   1357 
   1358   // Comparison member function.  This is public only for those
   1359   // clients that need a ternary comparison.  Others
   1360   // should use the comparison operators below.
   1361   int compare(const _Self& __y) const {
   1362     return _S_compare(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data);
   1363   }
   1364 
   1365   rope(const _CharT* __s, const allocator_type& __a = allocator_type())
   1366     : _M_tree_ptr(__a, _S_RopeLeaf_from_unowned_char_ptr(__s, _S_char_ptr_len(__s),__a))
   1367   {}
   1368 
   1369   rope(const _CharT* __s, size_t __len,
   1370        const allocator_type& __a = allocator_type())
   1371     : _M_tree_ptr(__a, (_S_RopeLeaf_from_unowned_char_ptr(__s, __len, __a)))
   1372   {}
   1373 
   1374   // Should perhaps be templatized with respect to the iterator type
   1375   // and use Sequence_buffer.  (It should perhaps use sequence_buffer
   1376   // even now.)
   1377   rope(const _CharT *__s, const _CharT *__e,
   1378        const allocator_type& __a = allocator_type())
   1379     : _M_tree_ptr(__a, _S_RopeLeaf_from_unowned_char_ptr(__s, __e - __s, __a))
   1380   {}
   1381 
   1382   rope(const const_iterator& __s, const const_iterator& __e,
   1383        const allocator_type& __a = allocator_type())
   1384     : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
   1385                                     __e._M_current_pos))
   1386   {}
   1387 
   1388   rope(const iterator& __s, const iterator& __e,
   1389        const allocator_type& __a = allocator_type())
   1390     : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
   1391                                     __e._M_current_pos))
   1392   {}
   1393 
   1394   rope(_CharT __c, const allocator_type& __a = allocator_type())
   1395     : _M_tree_ptr(__a, (_RopeRep*)0) {
   1396     _CharT* __buf = _M_tree_ptr.allocate(_S_rounded_up_size(1));
   1397 
   1398     _Copy_Construct(__buf, __c);
   1399     _S_construct_null(__buf + 1);
   1400 
   1401     _STLP_TRY {
   1402       _M_tree_ptr._M_data = _S_new_RopeLeaf(__buf, 1, __a);
   1403     }
   1404     _STLP_UNWIND(_RopeRep::_S_free_string(__buf, 1, __a))
   1405   }
   1406 
   1407   rope(size_t __n, _CharT __c,
   1408        const allocator_type& __a = allocator_type()):
   1409     _M_tree_ptr(__a, (_RopeRep*)0) {
   1410     if (0 == __n)
   1411       return;
   1412 
   1413     rope<_CharT,_Alloc> __result;
   1414 # define  __exponentiate_threshold size_t(32)
   1415     _RopeRep* __remainder;
   1416     rope<_CharT,_Alloc> __remainder_rope;
   1417 
   1418     // gcc-2.7.2 bugs
   1419     typedef _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
   1420 
   1421     size_t __exponent = __n / __exponentiate_threshold;
   1422     size_t __rest = __n % __exponentiate_threshold;
   1423     if (0 == __rest) {
   1424       __remainder = 0;
   1425     } else {
   1426       _CharT* __rest_buffer = _M_tree_ptr.allocate(_S_rounded_up_size(__rest));
   1427       uninitialized_fill_n(__rest_buffer, __rest, __c);
   1428       _S_construct_null(__rest_buffer + __rest);
   1429       _STLP_TRY {
   1430         __remainder = _S_new_RopeLeaf(__rest_buffer, __rest, __a);
   1431       }
   1432       _STLP_UNWIND(_RopeRep::_S_free_string(__rest_buffer, __rest, __a))
   1433     }
   1434     __remainder_rope._M_tree_ptr._M_data = __remainder;
   1435     if (__exponent != 0) {
   1436       _CharT* __base_buffer = _M_tree_ptr.allocate(_S_rounded_up_size(__exponentiate_threshold));
   1437       _RopeLeaf* __base_leaf;
   1438       rope<_CharT,_Alloc> __base_rope;
   1439       uninitialized_fill_n(__base_buffer, __exponentiate_threshold, __c);
   1440       _S_construct_null(__base_buffer + __exponentiate_threshold);
   1441       _STLP_TRY {
   1442         __base_leaf = _S_new_RopeLeaf(__base_buffer,
   1443                                       __exponentiate_threshold, __a);
   1444       }
   1445       _STLP_UNWIND(_RopeRep::_S_free_string(__base_buffer,
   1446                                             __exponentiate_threshold, __a))
   1447       __base_rope._M_tree_ptr._M_data = __base_leaf;
   1448       if (1 == __exponent) {
   1449         __result = __base_rope;
   1450         // One each for base_rope and __result
   1451         //_STLP_ASSERT(2 == __result._M_tree_ptr._M_data->_M_ref_count)
   1452       } else {
   1453         __result = _STLP_PRIV __power(__base_rope, __exponent, _Concat_fn());
   1454       }
   1455       if (0 != __remainder) {
   1456         __result += __remainder_rope;
   1457       }
   1458     } else {
   1459       __result = __remainder_rope;
   1460     }
   1461     _M_tree_ptr._M_data = __result._M_tree_ptr._M_data;
   1462     _M_tree_ptr._M_data->_M_ref_nonnil();
   1463 # undef __exponentiate_threshold
   1464   }
   1465 
   1466   rope(const allocator_type& __a = allocator_type())
   1467     : _M_tree_ptr(__a, (_RopeRep*)0) {}
   1468 
   1469   // Construct a rope from a function that can compute its members
   1470   rope(char_producer<_CharT> *__fn, size_t __len, bool __delete_fn,
   1471        const allocator_type& __a = allocator_type())
   1472     : _M_tree_ptr(__a, (_RopeRep*)0) {
   1473     _M_tree_ptr._M_data = (0 == __len) ?
   1474       0 : _S_new_RopeFunction(__fn, __len, __delete_fn, __a);
   1475   }
   1476 
   1477   rope(const _Self& __x)
   1478     : _M_tree_ptr(__x._M_tree_ptr, __x._M_tree_ptr._M_data) {
   1479     _S_ref(_M_tree_ptr._M_data);
   1480   }
   1481 
   1482 #if !defined (_STLP_NO_MOVE_SEMANTIC)
   1483   rope(__move_source<_Self> __src)
   1484     : _M_tree_ptr(__src.get()._M_tree_ptr, __src.get()._M_tree_ptr._M_data) {
   1485     __src.get()._M_tree_ptr._M_data = 0;
   1486   }
   1487 #endif
   1488 
   1489   ~rope() {
   1490     _S_unref(_M_tree_ptr._M_data);
   1491   }
   1492 
   1493   _Self& operator=(const _Self& __x) {
   1494     _STLP_ASSERT(get_allocator() == __x.get_allocator())
   1495     _S_ref(__x._M_tree_ptr._M_data);
   1496     _M_reset(__x._M_tree_ptr._M_data);
   1497     return *this;
   1498   }
   1499 
   1500   void clear() {
   1501     _S_unref(_M_tree_ptr._M_data);
   1502     _M_tree_ptr._M_data = 0;
   1503   }
   1504   void push_back(_CharT __x) {
   1505     _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, &__x, 1));
   1506   }
   1507 
   1508   void pop_back() {
   1509     _RopeRep* __old = _M_tree_ptr._M_data;
   1510     _M_tree_ptr._M_data =
   1511       _S_substring(_M_tree_ptr._M_data, 0, _M_tree_ptr._M_data->_M_size._M_data - 1);
   1512     _S_unref(__old);
   1513   }
   1514 
   1515   _CharT back() const {
   1516     return _S_fetch(_M_tree_ptr._M_data, _M_tree_ptr._M_data->_M_size._M_data - 1);
   1517   }
   1518 
   1519   void push_front(_CharT __x) {
   1520     _RopeRep* __old = _M_tree_ptr._M_data;
   1521     _RopeRep* __left =
   1522       _S_RopeLeaf_from_unowned_char_ptr(&__x, 1, _M_tree_ptr);
   1523     _STLP_TRY {
   1524       _M_tree_ptr._M_data = _S_concat_rep(__left, _M_tree_ptr._M_data);
   1525       _S_unref(__old);
   1526       _S_unref(__left);
   1527     }
   1528     _STLP_UNWIND(_S_unref(__left))
   1529   }
   1530 
   1531   void pop_front() {
   1532     _RopeRep* __old = _M_tree_ptr._M_data;
   1533     _M_tree_ptr._M_data = _S_substring(_M_tree_ptr._M_data, 1, _M_tree_ptr._M_data->_M_size._M_data);
   1534     _S_unref(__old);
   1535   }
   1536 
   1537   _CharT front() const {
   1538     return _S_fetch(_M_tree_ptr._M_data, 0);
   1539   }
   1540 
   1541   void balance() {
   1542     _RopeRep* __old = _M_tree_ptr._M_data;
   1543     _M_tree_ptr._M_data = _S_balance(_M_tree_ptr._M_data);
   1544     _S_unref(__old);
   1545   }
   1546 
   1547   void copy(_CharT* __buffer) const {
   1548     _STLP_STD::_Destroy_Range(__buffer, __buffer + size());
   1549     _S_flatten(_M_tree_ptr._M_data, __buffer);
   1550   }
   1551 
   1552   /*
   1553    * This is the copy function from the standard, but
   1554    * with the arguments reordered to make it consistent with the
   1555    * rest of the interface.
   1556    * Note that this guaranteed not to compile if the draft standard
   1557    * order is assumed.
   1558    */
   1559   size_type copy(size_type __pos, size_type __n, _CharT* __buffer) const {
   1560     size_t _p_size = size();
   1561     size_t __len = (__pos + __n > _p_size? _p_size - __pos : __n);
   1562 
   1563     _STLP_STD::_Destroy_Range(__buffer, __buffer + __len);
   1564     _S_flatten(_M_tree_ptr._M_data, __pos, __len, __buffer);
   1565     return __len;
   1566   }
   1567 
   1568 # ifdef _STLP_DEBUG
   1569   // Print to stdout, exposing structure.  May be useful for
   1570   // performance debugging.
   1571   void dump() {
   1572     _S_dump(_M_tree_ptr._M_data);
   1573   }
   1574 # endif
   1575 
   1576   // Convert to 0 terminated string in new allocated memory.
   1577   // Embedded 0s in the input do not terminate the copy.
   1578   const _CharT* c_str() const;
   1579 
   1580   // As above, but also use the flattened representation as the
   1581   // the new rope representation.
   1582   const _CharT* replace_with_c_str();
   1583 
   1584   // Reclaim memory for the c_str generated flattened string.
   1585   // Intentionally undocumented, since it's hard to say when this
   1586   // is safe for multiple threads.
   1587   void delete_c_str () {
   1588     if (0 == _M_tree_ptr._M_data) return;
   1589     if (_RopeRep::_S_leaf == _M_tree_ptr._M_data->_M_tag &&
   1590         ((_RopeLeaf*)_M_tree_ptr._M_data)->_M_data ==
   1591         _M_tree_ptr._M_data->_M_c_string) {
   1592       // Representation shared
   1593       return;
   1594     }
   1595     _M_tree_ptr._M_data->_M_free_c_string();
   1596     _M_tree_ptr._M_data->_M_c_string = 0;
   1597   }
   1598 
   1599   _CharT operator[] (size_type __pos) const {
   1600     return _S_fetch(_M_tree_ptr._M_data, __pos);
   1601   }
   1602 
   1603   _CharT at(size_type __pos) const {
   1604     if (__pos >= size()) _M_throw_out_of_range();
   1605     return (*this)[__pos];
   1606   }
   1607 
   1608   const_iterator begin() const {
   1609     return(const_iterator(_M_tree_ptr._M_data, 0));
   1610   }
   1611 
   1612   // An easy way to get a const iterator from a non-const container.
   1613   const_iterator const_begin() const {
   1614     return(const_iterator(_M_tree_ptr._M_data, 0));
   1615   }
   1616 
   1617   const_iterator end() const {
   1618     return(const_iterator(_M_tree_ptr._M_data, size()));
   1619   }
   1620 
   1621   const_iterator const_end() const {
   1622     return(const_iterator(_M_tree_ptr._M_data, size()));
   1623   }
   1624 
   1625   size_type size() const {
   1626     return(0 == _M_tree_ptr._M_data? 0 : _M_tree_ptr._M_data->_M_size._M_data);
   1627   }
   1628 
   1629   size_type length() const {
   1630     return size();
   1631   }
   1632 
   1633   size_type max_size() const {
   1634     return _S_min_len[__ROPE_MAX_DEPTH-1] - 1;
   1635     //  Guarantees that the result can be sufficiently
   1636     //  balanced.  Longer ropes will probably still work,
   1637     //  but it's harder to make guarantees.
   1638   }
   1639 
   1640   const_reverse_iterator rbegin() const {
   1641     return const_reverse_iterator(end());
   1642   }
   1643 
   1644   const_reverse_iterator const_rbegin() const {
   1645     return const_reverse_iterator(end());
   1646   }
   1647 
   1648   const_reverse_iterator rend() const {
   1649     return const_reverse_iterator(begin());
   1650   }
   1651 
   1652   const_reverse_iterator const_rend() const {
   1653     return const_reverse_iterator(begin());
   1654   }
   1655   // The symmetric cases are intentionally omitted, since they're presumed
   1656   // to be less common, and we don't handle them as well.
   1657 
   1658   // The following should really be templatized.
   1659   // The first argument should be an input iterator or
   1660   // forward iterator with value_type _CharT.
   1661   _Self& append(const _CharT* __iter, size_t __n) {
   1662     _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, __iter, __n));
   1663     return *this;
   1664   }
   1665 
   1666   _Self& append(const _CharT* __c_string) {
   1667     size_t __len = _S_char_ptr_len(__c_string);
   1668     append(__c_string, __len);
   1669     return *this;
   1670   }
   1671 
   1672   _Self& append(const _CharT* __s, const _CharT* __e) {
   1673     _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, __s, __e - __s));
   1674     return *this;
   1675   }
   1676 
   1677   _Self& append(const_iterator __s, const_iterator __e) {
   1678     _STLP_ASSERT(__s._M_root == __e._M_root)
   1679     _STLP_ASSERT(get_allocator() == __s._M_root->get_allocator())
   1680     _Self_destruct_ptr __appendee(_S_substring(__s._M_root, __s._M_current_pos, __e._M_current_pos));
   1681     _M_reset(_S_concat_rep(_M_tree_ptr._M_data, (_RopeRep*)__appendee));
   1682     return *this;
   1683   }
   1684 
   1685   _Self& append(_CharT __c) {
   1686     _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, &__c, 1));
   1687     return *this;
   1688   }
   1689 
   1690   _Self& append() { return append(_CharT()); }  // XXX why?
   1691 
   1692   _Self& append(const _Self& __y) {
   1693     _STLP_ASSERT(__y.get_allocator() == get_allocator())
   1694     _M_reset(_S_concat_rep(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data));
   1695     return *this;
   1696   }
   1697 
   1698   _Self& append(size_t __n, _CharT __c) {
   1699     rope<_CharT,_Alloc> __last(__n, __c);
   1700     return append(__last);
   1701   }
   1702 
   1703   void swap(_Self& __b) {
   1704     _M_tree_ptr.swap(__b._M_tree_ptr);
   1705   }
   1706 #if defined (_STLP_USE_PARTIAL_SPEC_WORKAROUND) && !defined (_STLP_FUNCTION_TMPL_PARTIAL_ORDER)
   1707   void _M_swap_workaround(_Self& __x) { swap(__x); }
   1708 #endif
   1709 
   1710 protected:
   1711   // Result is included in refcount.
   1712   static _RopeRep* replace(_RopeRep* __old, size_t __pos1,
   1713                            size_t __pos2, _RopeRep* __r) {
   1714     if (0 == __old) { _S_ref(__r); return __r; }
   1715     _Self_destruct_ptr __left(_S_substring(__old, 0, __pos1));
   1716     _Self_destruct_ptr __right(_S_substring(__old, __pos2, __old->_M_size._M_data));
   1717     _STLP_MPWFIX_TRY  //*TY 06/01/2000 -
   1718     _RopeRep* __result;
   1719 
   1720     if (0 == __r) {
   1721       __result = _S_concat_rep(__left, __right);
   1722     } else {
   1723       _STLP_ASSERT(__old->get_allocator() == __r->get_allocator())
   1724       _Self_destruct_ptr __left_result(_S_concat_rep(__left, __r));
   1725       __result = _S_concat_rep(__left_result, __right);
   1726     }
   1727     return __result;
   1728     _STLP_MPWFIX_CATCH  //*TY 06/01/2000 -
   1729   }
   1730 
   1731 public:
   1732   void insert(size_t __p, const _Self& __r) {
   1733     if (__p > size()) _M_throw_out_of_range();
   1734     _STLP_ASSERT(get_allocator() == __r.get_allocator())
   1735     _M_reset(replace(_M_tree_ptr._M_data, __p, __p, __r._M_tree_ptr._M_data));
   1736   }
   1737 
   1738   void insert(size_t __p, size_t __n, _CharT __c) {
   1739     rope<_CharT,_Alloc> __r(__n,__c);
   1740     insert(__p, __r);
   1741   }
   1742 
   1743   void insert(size_t __p, const _CharT* __i, size_t __n) {
   1744     if (__p > size()) _M_throw_out_of_range();
   1745     _Self_destruct_ptr __left(_S_substring(_M_tree_ptr._M_data, 0, __p));
   1746     _Self_destruct_ptr __right(_S_substring(_M_tree_ptr._M_data, __p, size()));
   1747     _Self_destruct_ptr __left_result(
   1748                                      _S_concat_char_iter(__left, __i, __n));
   1749     // _S_ destr_concat_char_iter should be safe here.
   1750     // But as it stands it's probably not a win, since __left
   1751     // is likely to have additional references.
   1752     _M_reset(_S_concat_rep(__left_result, __right));
   1753   }
   1754 
   1755   void insert(size_t __p, const _CharT* __c_string) {
   1756     insert(__p, __c_string, _S_char_ptr_len(__c_string));
   1757   }
   1758 
   1759   void insert(size_t __p, _CharT __c) {
   1760     insert(__p, &__c, 1);
   1761   }
   1762 
   1763   void insert(size_t __p) {
   1764     _CharT __c = _CharT();
   1765     insert(__p, &__c, 1);
   1766   }
   1767 
   1768   void insert(size_t __p, const _CharT* __i, const _CharT* __j) {
   1769     _Self __r(__i, __j);
   1770     insert(__p, __r);
   1771   }
   1772 
   1773   void insert(size_t __p, const const_iterator& __i,
   1774                           const const_iterator& __j) {
   1775     _Self __r(__i, __j);
   1776     insert(__p, __r);
   1777   }
   1778 
   1779   void insert(size_t __p, const iterator& __i,
   1780                           const iterator& __j) {
   1781     _Self __r(__i, __j);
   1782     insert(__p, __r);
   1783   }
   1784 
   1785   // (position, length) versions of replace operations:
   1786   void replace(size_t __p, size_t __n, const _Self& __r) {
   1787     if (__p > size()) _M_throw_out_of_range();
   1788     _M_reset(replace(_M_tree_ptr._M_data, __p, __p + __n, __r._M_tree_ptr._M_data));
   1789   }
   1790 
   1791   void replace(size_t __p, size_t __n,
   1792                const _CharT* __i, size_t __i_len) {
   1793     _Self __r(__i, __i_len);
   1794     replace(__p, __n, __r);
   1795   }
   1796 
   1797   void replace(size_t __p, size_t __n, _CharT __c) {
   1798     _Self __r(__c);
   1799     replace(__p, __n, __r);
   1800   }
   1801 
   1802   void replace(size_t __p, size_t __n, const _CharT* __c_string) {
   1803     _Self __r(__c_string);
   1804     replace(__p, __n, __r);
   1805   }
   1806 
   1807   void replace(size_t __p, size_t __n,
   1808                const _CharT* __i, const _CharT* __j) {
   1809     _Self __r(__i, __j);
   1810     replace(__p, __n, __r);
   1811   }
   1812 
   1813   void replace(size_t __p, size_t __n,
   1814                const const_iterator& __i, const const_iterator& __j) {
   1815     _Self __r(__i, __j);
   1816     replace(__p, __n, __r);
   1817   }
   1818 
   1819   void replace(size_t __p, size_t __n,
   1820                const iterator& __i, const iterator& __j) {
   1821     _Self __r(__i, __j);
   1822     replace(__p, __n, __r);
   1823   }
   1824 
   1825   // Single character variants:
   1826   void replace(size_t __p, _CharT __c) {
   1827     if (__p > size()) _M_throw_out_of_range();
   1828     iterator __i(this, __p);
   1829     *__i = __c;
   1830   }
   1831 
   1832   void replace(size_t __p, const _Self& __r) {
   1833     replace(__p, 1, __r);
   1834   }
   1835 
   1836   void replace(size_t __p, const _CharT* __i, size_t __i_len) {
   1837     replace(__p, 1, __i, __i_len);
   1838   }
   1839 
   1840   void replace(size_t __p, const _CharT* __c_string) {
   1841     replace(__p, 1, __c_string);
   1842   }
   1843 
   1844   void replace(size_t __p, const _CharT* __i, const _CharT* __j) {
   1845     replace(__p, 1, __i, __j);
   1846   }
   1847 
   1848   void replace(size_t __p, const const_iterator& __i,
   1849                            const const_iterator& __j) {
   1850     replace(__p, 1, __i, __j);
   1851   }
   1852 
   1853   void replace(size_t __p, const iterator& __i,
   1854                            const iterator& __j) {
   1855     replace(__p, 1, __i, __j);
   1856   }
   1857 
   1858   // Erase, (position, size) variant.
   1859   void erase(size_t __p, size_t __n) {
   1860     if (__p > size()) _M_throw_out_of_range();
   1861     _M_reset(replace(_M_tree_ptr._M_data, __p, __p + __n, 0));
   1862   }
   1863 
   1864   // Erase, single character
   1865   void erase(size_t __p) {
   1866     erase(__p, __p + 1);
   1867   }
   1868 
   1869   // Insert, iterator variants.
   1870   iterator insert(const iterator& __p, const _Self& __r)
   1871   { insert(__p.index(), __r); return __p; }
   1872   iterator insert(const iterator& __p, size_t __n, _CharT __c)
   1873   { insert(__p.index(), __n, __c); return __p; }
   1874   iterator insert(const iterator& __p, _CharT __c)
   1875   { insert(__p.index(), __c); return __p; }
   1876   iterator insert(const iterator& __p )
   1877   { insert(__p.index()); return __p; }
   1878   iterator insert(const iterator& __p, const _CharT* c_string)
   1879   { insert(__p.index(), c_string); return __p; }
   1880   iterator insert(const iterator& __p, const _CharT* __i, size_t __n)
   1881   { insert(__p.index(), __i, __n); return __p; }
   1882   iterator insert(const iterator& __p, const _CharT* __i,
   1883                   const _CharT* __j)
   1884   { insert(__p.index(), __i, __j);  return __p; }
   1885   iterator insert(const iterator& __p,
   1886                   const const_iterator& __i, const const_iterator& __j)
   1887   { insert(__p.index(), __i, __j); return __p; }
   1888   iterator insert(const iterator& __p,
   1889                   const iterator& __i, const iterator& __j)
   1890   { insert(__p.index(), __i, __j); return __p; }
   1891 
   1892   // Replace, range variants.
   1893   void replace(const iterator& __p, const iterator& __q,
   1894                const _Self& __r)
   1895   { replace(__p.index(), __q.index() - __p.index(), __r); }
   1896   void replace(const iterator& __p, const iterator& __q, _CharT __c)
   1897   { replace(__p.index(), __q.index() - __p.index(), __c); }
   1898   void replace(const iterator& __p, const iterator& __q,
   1899                const _CharT* __c_string)
   1900   { replace(__p.index(), __q.index() - __p.index(), __c_string); }
   1901   void replace(const iterator& __p, const iterator& __q,
   1902                const _CharT* __i, size_t __n)
   1903   { replace(__p.index(), __q.index() - __p.index(), __i, __n); }
   1904   void replace(const iterator& __p, const iterator& __q,
   1905                const _CharT* __i, const _CharT* __j)
   1906   { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
   1907   void replace(const iterator& __p, const iterator& __q,
   1908                const const_iterator& __i, const const_iterator& __j)
   1909   { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
   1910   void replace(const iterator& __p, const iterator& __q,
   1911                const iterator& __i, const iterator& __j)
   1912   { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
   1913 
   1914   // Replace, iterator variants.
   1915   void replace(const iterator& __p, const _Self& __r)
   1916   { replace(__p.index(), __r); }
   1917   void replace(const iterator& __p, _CharT __c)
   1918   { replace(__p.index(), __c); }
   1919   void replace(const iterator& __p, const _CharT* __c_string)
   1920   { replace(__p.index(), __c_string); }
   1921   void replace(const iterator& __p, const _CharT* __i, size_t __n)
   1922   { replace(__p.index(), __i, __n); }
   1923   void replace(const iterator& __p, const _CharT* __i, const _CharT* __j)
   1924   { replace(__p.index(), __i, __j); }
   1925   void replace(const iterator& __p, const_iterator __i,
   1926                const_iterator __j)
   1927   { replace(__p.index(), __i, __j); }
   1928   void replace(const iterator& __p, iterator __i, iterator __j)
   1929   { replace(__p.index(), __i, __j); }
   1930 
   1931   // Iterator and range variants of erase
   1932   iterator erase(const iterator& __p, const iterator& __q) {
   1933     size_t __p_index = __p.index();
   1934     erase(__p_index, __q.index() - __p_index);
   1935     return iterator(this, __p_index);
   1936   }
   1937   iterator erase(const iterator& __p) {
   1938     size_t __p_index = __p.index();
   1939     erase(__p_index, 1);
   1940     return iterator(this, __p_index);
   1941   }
   1942 
   1943   _Self substr(size_t __start, size_t __len = 1) const {
   1944     if (__start > size()) _M_throw_out_of_range();
   1945     return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start, __start + __len));
   1946   }
   1947 
   1948   _Self substr(iterator __start, iterator __end) const {
   1949     return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
   1950   }
   1951 
   1952   _Self substr(iterator __start) const {
   1953     size_t __pos = __start.index();
   1954     return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
   1955   }
   1956 
   1957   _Self substr(const_iterator __start, const_iterator __end) const {
   1958     // This might eventually take advantage of the cache in the
   1959     // iterator.
   1960     return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
   1961   }
   1962 
   1963   rope<_CharT,_Alloc> substr(const_iterator __start) {
   1964     size_t __pos = __start.index();
   1965     return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
   1966   }
   1967 
   1968 #include <stl/_string_npos.h>
   1969 
   1970   size_type find(const _Self& __s, size_type __pos = 0) const {
   1971     if (__pos >= size())
   1972 # ifndef _STLP_OLD_ROPE_SEMANTICS
   1973       return npos;
   1974 # else
   1975       return size();
   1976 # endif
   1977 
   1978     size_type __result_pos;
   1979     const_iterator __result = _STLP_STD::search(const_begin() + (ptrdiff_t)__pos, const_end(), __s.begin(), __s.end() );
   1980     __result_pos = __result.index();
   1981 # ifndef _STLP_OLD_ROPE_SEMANTICS
   1982     if (__result_pos == size()) __result_pos = npos;
   1983 # endif
   1984     return __result_pos;
   1985   }
   1986   size_type find(_CharT __c, size_type __pos = 0) const;
   1987   size_type find(const _CharT* __s, size_type __pos = 0) const {
   1988     size_type __result_pos;
   1989     const_iterator __result = _STLP_STD::search(const_begin() + (ptrdiff_t)__pos, const_end(),
   1990                                                 __s, __s + _S_char_ptr_len(__s));
   1991     __result_pos = __result.index();
   1992 # ifndef _STLP_OLD_ROPE_SEMANTICS
   1993     if (__result_pos == size()) __result_pos = npos;
   1994 # endif
   1995     return __result_pos;
   1996   }
   1997 
   1998   iterator mutable_begin() {
   1999     return(iterator(this, 0));
   2000   }
   2001 
   2002   iterator mutable_end() {
   2003     return(iterator(this, size()));
   2004   }
   2005 
   2006   reverse_iterator mutable_rbegin() {
   2007     return reverse_iterator(mutable_end());
   2008   }
   2009 
   2010   reverse_iterator mutable_rend() {
   2011     return reverse_iterator(mutable_begin());
   2012   }
   2013 
   2014   reference mutable_reference_at(size_type __pos) {
   2015     return reference(this, __pos);
   2016   }
   2017 
   2018 # ifdef __STD_STUFF
   2019   reference operator[] (size_type __pos) {
   2020     return reference(this, __pos);
   2021   }
   2022 
   2023   reference at(size_type __pos) {
   2024     if (__pos >= size()) _M_throw_out_of_range();
   2025     return (*this)[__pos];
   2026   }
   2027 
   2028   void resize(size_type, _CharT) {}
   2029   void resize(size_type) {}
   2030   void reserve(size_type = 0) {}
   2031   size_type capacity() const {
   2032     return max_size();
   2033   }
   2034 
   2035   // Stuff below this line is dangerous because it's error prone.
   2036   // I would really like to get rid of it.
   2037   // copy function with funny arg ordering.
   2038   size_type copy(_CharT* __buffer, size_type __n,
   2039                  size_type __pos = 0) const {
   2040     return copy(__pos, __n, __buffer);
   2041   }
   2042 
   2043   iterator end() { return mutable_end(); }
   2044 
   2045   iterator begin() { return mutable_begin(); }
   2046 
   2047   reverse_iterator rend() { return mutable_rend(); }
   2048 
   2049   reverse_iterator rbegin() { return mutable_rbegin(); }
   2050 
   2051 # else
   2052 
   2053   const_iterator end() { return const_end(); }
   2054 
   2055   const_iterator begin() { return const_begin(); }
   2056 
   2057   const_reverse_iterator rend() { return const_rend(); }
   2058 
   2059   const_reverse_iterator rbegin() { return const_rbegin(); }
   2060 
   2061 # endif
   2062 }; //class rope
   2063 
   2064 #if defined (__GNUC__) && (__GNUC__ == 2) && (__GNUC_MINOR__ == 96)
   2065 template <class _CharT, class _Alloc>
   2066 const size_t rope<_CharT, _Alloc>::npos = ~(size_t) 0;
   2067 #endif
   2068 
   2069 template <class _CharT, class _Alloc>
   2070 inline _CharT
   2071 _Rope_const_iterator< _CharT, _Alloc>::operator[](size_t __n)
   2072 { return rope<_CharT,_Alloc>::_S_fetch(this->_M_root, this->_M_current_pos + __n); }
   2073 
   2074 template <class _CharT, class _Alloc>
   2075 inline bool operator== (const _Rope_const_iterator<_CharT,_Alloc>& __x,
   2076                         const _Rope_const_iterator<_CharT,_Alloc>& __y) {
   2077   return (__x._M_current_pos == __y._M_current_pos &&
   2078           __x._M_root == __y._M_root);
   2079 }
   2080 
   2081 template <class _CharT, class _Alloc>
   2082 inline bool operator< (const _Rope_const_iterator<_CharT,_Alloc>& __x,
   2083                        const _Rope_const_iterator<_CharT,_Alloc>& __y)
   2084 { return (__x._M_current_pos < __y._M_current_pos); }
   2085 
   2086 #ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
   2087 
   2088 template <class _CharT, class _Alloc>
   2089 inline bool operator!= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
   2090                         const _Rope_const_iterator<_CharT,_Alloc>& __y)
   2091 { return !(__x == __y); }
   2092 
   2093 template <class _CharT, class _Alloc>
   2094 inline bool operator> (const _Rope_const_iterator<_CharT,_Alloc>& __x,
   2095                        const _Rope_const_iterator<_CharT,_Alloc>& __y)
   2096 { return __y < __x; }
   2097 
   2098 template <class _CharT, class _Alloc>
   2099 inline bool operator<= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
   2100                         const _Rope_const_iterator<_CharT,_Alloc>& __y)
   2101 { return !(__y < __x); }
   2102 
   2103 template <class _CharT, class _Alloc>
   2104 inline bool operator>= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
   2105                         const _Rope_const_iterator<_CharT,_Alloc>& __y)
   2106 { return !(__x < __y); }
   2107 
   2108 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
   2109 
   2110 template <class _CharT, class _Alloc>
   2111 inline ptrdiff_t operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x,
   2112                            const _Rope_const_iterator<_CharT,_Alloc>& __y)
   2113 { return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos; }
   2114 
   2115 #if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000  // dwa 8/21/97  - "ambiguous access to overloaded function" bug.
   2116 template <class _CharT, class _Alloc>
   2117 inline _Rope_const_iterator<_CharT,_Alloc>
   2118 operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n)
   2119 { return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos - __n); }
   2120 # endif
   2121 
   2122 template <class _CharT, class _Alloc>
   2123 inline _Rope_const_iterator<_CharT,_Alloc>
   2124 operator+(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n)
   2125 { return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos + __n); }
   2126 
   2127 template <class _CharT, class _Alloc>
   2128 inline _Rope_const_iterator<_CharT,_Alloc>
   2129 operator+(ptrdiff_t __n, const _Rope_const_iterator<_CharT,_Alloc>& __x)
   2130 { return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos + __n); }
   2131 
   2132 template <class _CharT, class _Alloc>
   2133 inline bool operator== (const _Rope_iterator<_CharT,_Alloc>& __x,
   2134                         const _Rope_iterator<_CharT,_Alloc>& __y) {
   2135   return (__x._M_current_pos == __y._M_current_pos &&
   2136           __x._M_root_rope == __y._M_root_rope);
   2137 }
   2138 
   2139 template <class _CharT, class _Alloc>
   2140 inline bool operator< (const _Rope_iterator<_CharT,_Alloc>& __x,
   2141                        const _Rope_iterator<_CharT,_Alloc>& __y)
   2142 { return (__x._M_current_pos < __y._M_current_pos); }
   2143 
   2144 #if defined (_STLP_USE_SEPARATE_RELOPS_NAMESPACE)
   2145 template <class _CharT, class _Alloc>
   2146 inline bool operator!= (const _Rope_iterator<_CharT,_Alloc>& __x,
   2147                         const _Rope_iterator<_CharT,_Alloc>& __y)
   2148 { return !(__x == __y); }
   2149 
   2150 template <class _CharT, class _Alloc>
   2151 inline bool operator> (const _Rope_iterator<_CharT,_Alloc>& __x,
   2152                        const _Rope_iterator<_CharT,_Alloc>& __y)
   2153 { return __y < __x; }
   2154 
   2155 template <class _CharT, class _Alloc>
   2156 inline bool operator<= (const _Rope_iterator<_CharT,_Alloc>& __x,
   2157                         const _Rope_iterator<_CharT,_Alloc>& __y)
   2158 { return !(__y < __x); }
   2159 
   2160 template <class _CharT, class _Alloc>
   2161 inline bool operator>= (const _Rope_iterator<_CharT,_Alloc>& __x,
   2162                         const _Rope_iterator<_CharT,_Alloc>& __y)
   2163 { return !(__x < __y); }
   2164 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
   2165 
   2166 template <class _CharT, class _Alloc>
   2167 inline ptrdiff_t operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
   2168                            const _Rope_iterator<_CharT,_Alloc>& __y)
   2169 { return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos; }
   2170 
   2171 #if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000  // dwa 8/21/97  - "ambiguous access to overloaded function" bug.
   2172 template <class _CharT, class _Alloc>
   2173 inline _Rope_iterator<_CharT,_Alloc>
   2174 operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
   2175           ptrdiff_t __n) {
   2176   return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos - __n);
   2177 }
   2178 # endif
   2179 
   2180 template <class _CharT, class _Alloc>
   2181 inline _Rope_iterator<_CharT,_Alloc>
   2182 operator+(const _Rope_iterator<_CharT,_Alloc>& __x,
   2183           ptrdiff_t __n) {
   2184   return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos + __n);
   2185 }
   2186 
   2187 template <class _CharT, class _Alloc>
   2188 inline _Rope_iterator<_CharT,_Alloc>
   2189 operator+(ptrdiff_t __n, const _Rope_iterator<_CharT,_Alloc>& __x) {
   2190   return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos + __n);
   2191 }
   2192 
   2193 template <class _CharT, class _Alloc>
   2194 inline rope<_CharT,_Alloc>
   2195 operator+ (const rope<_CharT,_Alloc>& __left,
   2196            const rope<_CharT,_Alloc>& __right) {
   2197   _STLP_ASSERT(__left.get_allocator() == __right.get_allocator())
   2198   return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_rep(__left._M_tree_ptr._M_data, __right._M_tree_ptr._M_data));
   2199   // Inlining this should make it possible to keep __left and __right in registers.
   2200 }
   2201 
   2202 template <class _CharT, class _Alloc>
   2203 inline rope<_CharT,_Alloc>&
   2204 operator+= (rope<_CharT,_Alloc>& __left,
   2205             const rope<_CharT,_Alloc>& __right) {
   2206   __left.append(__right);
   2207   return __left;
   2208 }
   2209 
   2210 template <class _CharT, class _Alloc>
   2211 inline rope<_CharT,_Alloc>
   2212 operator+ (const rope<_CharT,_Alloc>& __left,
   2213            const _CharT* __right) {
   2214   size_t __rlen = rope<_CharT,_Alloc>::_S_char_ptr_len(__right);
   2215   return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_char_iter(__left._M_tree_ptr._M_data, __right, __rlen));
   2216 }
   2217 
   2218 template <class _CharT, class _Alloc>
   2219 inline rope<_CharT,_Alloc>&
   2220 operator+= (rope<_CharT,_Alloc>& __left,
   2221             const _CharT* __right) {
   2222   __left.append(__right);
   2223   return __left;
   2224 }
   2225 
   2226 template <class _CharT, class _Alloc>
   2227 inline rope<_CharT,_Alloc>
   2228 operator+ (const rope<_CharT,_Alloc>& __left, _CharT __right) {
   2229   return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_char_iter(__left._M_tree_ptr._M_data, &__right, 1));
   2230 }
   2231 
   2232 template <class _CharT, class _Alloc>
   2233 inline rope<_CharT,_Alloc>&
   2234 operator+= (rope<_CharT,_Alloc>& __left, _CharT __right) {
   2235   __left.append(__right);
   2236   return __left;
   2237 }
   2238 
   2239 template <class _CharT, class _Alloc>
   2240 inline bool
   2241 operator< (const rope<_CharT,_Alloc>& __left,
   2242            const rope<_CharT,_Alloc>& __right) {
   2243   return __left.compare(__right) < 0;
   2244 }
   2245 
   2246 template <class _CharT, class _Alloc>
   2247 inline bool
   2248 operator== (const rope<_CharT,_Alloc>& __left,
   2249             const rope<_CharT,_Alloc>& __right) {
   2250   return __left.compare(__right) == 0;
   2251 }
   2252 
   2253 #ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
   2254 
   2255 template <class _CharT, class _Alloc>
   2256 inline bool
   2257 operator!= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
   2258   return !(__x == __y);
   2259 }
   2260 
   2261 template <class _CharT, class _Alloc>
   2262 inline bool
   2263 operator> (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
   2264   return __y < __x;
   2265 }
   2266 
   2267 template <class _CharT, class _Alloc>
   2268 inline bool
   2269 operator<= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
   2270   return !(__y < __x);
   2271 }
   2272 
   2273 template <class _CharT, class _Alloc>
   2274 inline bool
   2275 operator>= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
   2276   return !(__x < __y);
   2277 }
   2278 
   2279 template <class _CharT, class _Alloc>
   2280 inline bool operator!= (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
   2281                         const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
   2282   return !(__x == __y);
   2283 }
   2284 
   2285 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
   2286 
   2287 template <class _CharT, class _Alloc>
   2288 inline bool operator== (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
   2289                         const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
   2290   return (__x._M_pos == __y._M_pos && __x._M_root == __y._M_root);
   2291 }
   2292 
   2293 #if !defined (_STLP_USE_NO_IOSTREAMS)
   2294 template<class _CharT, class _Traits, class _Alloc>
   2295 basic_ostream<_CharT, _Traits>& operator<< (basic_ostream<_CharT, _Traits>& __o,
   2296                                             const rope<_CharT, _Alloc>& __r);
   2297 #endif
   2298 
   2299 typedef rope<char, allocator<char> > crope;
   2300 #if defined (_STLP_HAS_WCHAR_T)
   2301 typedef rope<wchar_t, allocator<wchar_t> > wrope;
   2302 #endif
   2303 
   2304 inline crope::reference __mutable_reference_at(crope& __c, size_t __i)
   2305 { return __c.mutable_reference_at(__i); }
   2306 
   2307 #if defined (_STLP_HAS_WCHAR_T)
   2308 inline wrope::reference __mutable_reference_at(wrope& __c, size_t __i)
   2309 { return __c.mutable_reference_at(__i); }
   2310 #endif
   2311 
   2312 #if defined (_STLP_FUNCTION_TMPL_PARTIAL_ORDER)
   2313 template <class _CharT, class _Alloc>
   2314 inline void swap(rope<_CharT,_Alloc>& __x, rope<_CharT,_Alloc>& __y)
   2315 { __x.swap(__y); }
   2316 #else
   2317 
   2318 inline void swap(crope& __x, crope& __y) { __x.swap(__y); }
   2319 # ifdef _STLP_HAS_WCHAR_T  // dwa 8/21/97
   2320 inline void swap(wrope& __x, wrope& __y) { __x.swap(__y); }
   2321 # endif
   2322 
   2323 #endif /* _STLP_FUNCTION_TMPL_PARTIAL_ORDER */
   2324 
   2325 
   2326 // Hash functions should probably be revisited later:
   2327 _STLP_TEMPLATE_NULL struct hash<crope> {
   2328   size_t operator()(const crope& __str) const {
   2329     size_t _p_size = __str.size();
   2330 
   2331     if (0 == _p_size) return 0;
   2332     return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
   2333   }
   2334 };
   2335 
   2336 #if defined (_STLP_HAS_WCHAR_T)  // dwa 8/21/97
   2337 _STLP_TEMPLATE_NULL struct hash<wrope> {
   2338   size_t operator()(const wrope& __str) const {
   2339     size_t _p_size = __str.size();
   2340 
   2341     if (0 == _p_size) return 0;
   2342     return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
   2343   }
   2344 };
   2345 #endif
   2346 
   2347 #if (!defined (_STLP_MSVC) || (_STLP_MSVC >= 1310))
   2348 // I couldn't get this to work with VC++
   2349 template<class _CharT,class _Alloc>
   2350 #  if defined (__DMC__)
   2351 extern
   2352 #  endif
   2353 void _Rope_rotate(_Rope_iterator<_CharT, _Alloc> __first,
   2354                   _Rope_iterator<_CharT, _Alloc> __middle,
   2355                   _Rope_iterator<_CharT, _Alloc> __last);
   2356 
   2357 inline void rotate(_Rope_iterator<char, allocator<char> > __first,
   2358                    _Rope_iterator<char, allocator<char> > __middle,
   2359                    _Rope_iterator<char, allocator<char> > __last)
   2360 { _Rope_rotate(__first, __middle, __last); }
   2361 #endif
   2362 
   2363 template <class _CharT, class _Alloc>
   2364 inline _Rope_char_ref_proxy<_CharT, _Alloc>::operator _CharT () const {
   2365   if (_M_current_valid) {
   2366     return _M_current;
   2367   } else {
   2368     return _My_rope::_S_fetch(_M_root->_M_tree_ptr._M_data, _M_pos);
   2369   }
   2370 }
   2371 
   2372 #if defined (_STLP_CLASS_PARTIAL_SPECIALIZATION) && !defined (_STLP_NO_MOVE_SEMANTIC)
   2373 template <class _CharT, class _Alloc>
   2374 struct __move_traits<rope<_CharT, _Alloc> > {
   2375   typedef __true_type implemented;
   2376   //Completness depends on the allocator:
   2377   typedef typename __move_traits<_Alloc>::complete complete;
   2378 };
   2379 #endif
   2380 
   2381 _STLP_END_NAMESPACE
   2382 
   2383 #if !defined (_STLP_LINK_TIME_INSTANTIATION)
   2384 #  include <stl/_rope.c>
   2385 #endif
   2386 
   2387 #endif /* _STLP_INTERNAL_ROPE_H */
   2388 
   2389 // Local Variables:
   2390 // mode:C++
   2391 // End:
   2392