Home | History | Annotate | Download | only in stl
      1 /*
      2  *
      3  * Copyright (c) 1996,1997
      4  * Silicon Graphics Computer Systems, Inc.
      5  *
      6  * Copyright (c) 1997
      7  * Moscow Center for SPARC Technology
      8  *
      9  * Copyright (c) 1999
     10  * Boris Fomitchev
     11  *
     12  * This material is provided "as is", with absolutely no warranty expressed
     13  * or implied. Any use is at your own risk.
     14  *
     15  * Permission to use or copy this software for any purpose is hereby granted
     16  * without fee, provided the above notices are retained on all copies.
     17  * Permission to modify the code and to distribute modified code is granted,
     18  * provided the above notices are retained, and a notice that the code was
     19  * modified is included with the above copyright notice.
     20  *
     21  */
     22 
     23 /* NOTE: This is an internal header file, included by other STL headers.
     24  *   You should not attempt to use it directly.
     25  */
     26 
     27 // rope<_CharT,_Alloc> is a sequence of _CharT.
     28 // Ropes appear to be mutable, but update operations
     29 // really copy enough of the data structure to leave the original
     30 // valid.  Thus ropes can be logically copied by just copying
     31 // a pointer value.
     32 
     33 #ifndef _STLP_INTERNAL_ROPE_H
     34 #define _STLP_INTERNAL_ROPE_H
     35 
     36 #ifndef _STLP_INTERNAL_ALGOBASE_H
     37 #  include <stl/_algobase.h>
     38 #endif
     39 
     40 #if !defined (_STLP_USE_NO_IOSTREAMS) && !defined (_STLP_INTERNAL_IOSFWD)
     41 #  include <stl/_iosfwd.h>
     42 #endif
     43 
     44 #ifndef _STLP_INTERNAL_ALLOC_H
     45 #  include <stl/_alloc.h>
     46 #endif
     47 
     48 #ifndef _STLP_INTERNAL_ITERATOR_H
     49 #  include <stl/_iterator.h>
     50 #endif
     51 
     52 #ifndef _STLP_INTERNAL_ALGO_H
     53 #  include <stl/_algo.h>
     54 #endif
     55 
     56 #ifndef _STLP_INTERNAL_FUNCTION_BASE_H
     57 #  include <stl/_function_base.h>
     58 #endif
     59 
     60 #ifndef _STLP_INTERNAL_NUMERIC_H
     61 #  include <stl/_numeric.h>
     62 #endif
     63 
     64 #ifndef _STLP_INTERNAL_HASH_FUN_H
     65 #  include <stl/_hash_fun.h>
     66 #endif
     67 
     68 #ifndef _STLP_CHAR_TRAITS_H
     69 #  include <stl/char_traits.h>
     70 #endif
     71 
     72 #ifndef _STLP_INTERNAL_THREADS_H
     73 #  include <stl/_threads.h>
     74 #endif
     75 
     76 #ifdef _STLP_SGI_THREADS
     77 #  include <mutex.h>
     78 #endif
     79 
     80 #ifndef _STLP_DONT_SUPPORT_REBIND_MEMBER_TEMPLATE
     81 #  define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) (_Alloc_traits<_Tp,__atype>::create_allocator(__a))
     82 #else
     83 #  define _STLP_CREATE_ALLOCATOR(__atype,__a, _Tp) __stl_alloc_create(__a,(_Tp*)0)
     84 #endif
     85 
     86 _STLP_BEGIN_NAMESPACE
     87 
     88 // First a lot of forward declarations.  The standard seems to require
     89 // much stricter "declaration before use" than many of the implementations
     90 // that preceded it.
     91 template<class _CharT, _STLP_DFL_TMPL_PARAM(_Alloc, allocator<_CharT>) > class rope;
     92 template<class _CharT, class _Alloc> struct _Rope_RopeConcatenation;
     93 template<class _CharT, class _Alloc> struct _Rope_RopeRep;
     94 template<class _CharT, class _Alloc> struct _Rope_RopeLeaf;
     95 template<class _CharT, class _Alloc> struct _Rope_RopeFunction;
     96 template<class _CharT, class _Alloc> struct _Rope_RopeSubstring;
     97 template<class _CharT, class _Alloc> class _Rope_iterator;
     98 template<class _CharT, class _Alloc> class _Rope_const_iterator;
     99 template<class _CharT, class _Alloc> class _Rope_char_ref_proxy;
    100 template<class _CharT, class _Alloc> class _Rope_char_ptr_proxy;
    101 
    102 _STLP_MOVE_TO_PRIV_NAMESPACE
    103 
    104 template <class _CharT>
    105 struct _BasicCharType { typedef __false_type _Ret; };
    106 
    107 _STLP_TEMPLATE_NULL
    108 struct _BasicCharType<char> { typedef __true_type _Ret; };
    109 
    110 #ifdef _STLP_HAS_WCHAR_T
    111 _STLP_TEMPLATE_NULL
    112 struct _BasicCharType<wchar_t> { typedef __true_type _Ret; };
    113 #endif
    114 
    115 // Some helpers, so we can use the power algorithm on ropes.
    116 // See below for why this isn't local to the implementation.
    117 
    118 // This uses a nonstandard refcount convention.
    119 // The result has refcount 0.
    120 template<class _CharT, class _Alloc>
    121 struct _Rope_Concat_fn
    122   : public binary_function<rope<_CharT,_Alloc>, rope<_CharT,_Alloc>,
    123                            rope<_CharT,_Alloc> > {
    124   rope<_CharT,_Alloc> operator() (const rope<_CharT,_Alloc>& __x,
    125                                   const rope<_CharT,_Alloc>& __y) {
    126     return __x + __y;
    127   }
    128 };
    129 
    130 template <class _CharT, class _Alloc>
    131 inline
    132 rope<_CharT,_Alloc>
    133 __identity_element(_Rope_Concat_fn<_CharT, _Alloc>)
    134 { return rope<_CharT,_Alloc>(); }
    135 
    136 _STLP_MOVE_TO_STD_NAMESPACE
    137 
    138 // Store an eos
    139 template <class _CharT>
    140 inline void _S_construct_null_aux(_CharT *__p, const __true_type&)
    141 { *__p = 0; }
    142 
    143 template <class _CharT>
    144 inline void _S_construct_null_aux(_CharT *__p, const __false_type&)
    145 { _STLP_STD::_Construct(__p); }
    146 
    147 template <class _CharT>
    148 inline void _S_construct_null(_CharT *__p) {
    149   typedef typename _IsIntegral<_CharT>::_Ret _Char_Is_Integral;
    150   _S_construct_null_aux(__p, _Char_Is_Integral());
    151 }
    152 
    153 // char_producers are logically functions that generate a section of
    154 // a string.  These can be converted to ropes.  The resulting rope
    155 // invokes the char_producer on demand.  This allows, for example,
    156 // files to be viewed as ropes without reading the entire file.
    157 template <class _CharT>
    158 class char_producer {
    159 public:
    160   virtual ~char_producer() {}
    161   virtual void operator()(size_t __start_pos, size_t __len,
    162                           _CharT* __buffer) = 0;
    163   // Buffer should really be an arbitrary output iterator.
    164   // That way we could flatten directly into an ostream, etc.
    165   // This is thoroughly impossible, since iterator types don't
    166   // have runtime descriptions.
    167 };
    168 
    169 // Sequence buffers:
    170 //
    171 // Sequence must provide an append operation that appends an
    172 // array to the sequence.  Sequence buffers are useful only if
    173 // appending an entire array is cheaper than appending element by element.
    174 // This is true for many string representations.
    175 // This should  perhaps inherit from ostream<sequence::value_type>
    176 // and be implemented correspondingly, so that they can be used
    177 // for formatted.  For the sake of portability, we don't do this yet.
    178 //
    179 // For now, sequence buffers behave as output iterators.  But they also
    180 // behave a little like basic_ostringstream<sequence::value_type> and a
    181 // little like containers.
    182 
    183 template<class _Sequence
    184 # if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
    185        defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
    186          , size_t _Buf_sz = 100
    187 #   if defined(__sgi) && !defined(__GNUC__)
    188 #   define __TYPEDEF_WORKAROUND
    189          ,class _V = typename _Sequence::value_type
    190 #   endif /* __sgi */
    191 # endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
    192          >
    193 // The 3rd parameter works around a common compiler bug.
    194 class sequence_buffer : public iterator <output_iterator_tag, void, void, void, void> {
    195 public:
    196 # ifndef __TYPEDEF_WORKAROUND
    197   typedef typename _Sequence::value_type value_type;
    198   typedef sequence_buffer<_Sequence
    199 # if !(defined (_STLP_NON_TYPE_TMPL_PARAM_BUG) || \
    200        defined ( _STLP_NO_DEFAULT_NON_TYPE_PARAM ))
    201   , _Buf_sz
    202   > _Self;
    203 # else /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
    204   > _Self;
    205   enum { _Buf_sz = 100};
    206 # endif /* _STLP_NON_TYPE_TMPL_PARAM_BUG */
    207   // # endif
    208 # else /* __TYPEDEF_WORKAROUND */
    209   typedef _V value_type;
    210   typedef sequence_buffer<_Sequence, _Buf_sz, _V> _Self;
    211 # endif /* __TYPEDEF_WORKAROUND */
    212 protected:
    213   _Sequence* _M_prefix;
    214   value_type _M_buffer[_Buf_sz];
    215   size_t     _M_buf_count;
    216 public:
    217   void flush() {
    218     _M_prefix->append(_M_buffer, _M_buffer + _M_buf_count);
    219     _M_buf_count = 0;
    220   }
    221   ~sequence_buffer() { flush(); }
    222   sequence_buffer() : _M_prefix(0), _M_buf_count(0) {}
    223   sequence_buffer(const _Self& __x) {
    224     _M_prefix = __x._M_prefix;
    225     _M_buf_count = __x._M_buf_count;
    226     _STLP_STD::copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
    227   }
    228   sequence_buffer(_Self& __x) {
    229     __x.flush();
    230     _M_prefix = __x._M_prefix;
    231     _M_buf_count = 0;
    232   }
    233   sequence_buffer(_Sequence& __s) : _M_prefix(&__s), _M_buf_count(0) {}
    234   _Self& operator= (_Self& __x) {
    235     __x.flush();
    236     _M_prefix = __x._M_prefix;
    237     _M_buf_count = 0;
    238     return *this;
    239   }
    240   _Self& operator= (const _Self& __x) {
    241     _M_prefix = __x._M_prefix;
    242     _M_buf_count = __x._M_buf_count;
    243     _STLP_STD::copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
    244     return *this;
    245   }
    246   void push_back(value_type __x) {
    247     if (_M_buf_count < _Buf_sz) {
    248       _M_buffer[_M_buf_count] = __x;
    249       ++_M_buf_count;
    250     } else {
    251       flush();
    252       _M_buffer[0] = __x;
    253       _M_buf_count = 1;
    254     }
    255   }
    256   void append(const value_type *__s, size_t __len) {
    257     if (__len + _M_buf_count <= _Buf_sz) {
    258       size_t __i = _M_buf_count;
    259       size_t __j = 0;
    260       for (; __j < __len; __i++, __j++) {
    261         _M_buffer[__i] = __s[__j];
    262       }
    263       _M_buf_count += __len;
    264     } else if (0 == _M_buf_count) {
    265       _M_prefix->append(__s, __s + __len);
    266     } else {
    267       flush();
    268       append(__s, __len);
    269     }
    270   }
    271   _Self& write(const value_type *__s, size_t __len) {
    272     append(__s, __len);
    273     return *this;
    274   }
    275   _Self& put(value_type __x) {
    276     push_back(__x);
    277     return *this;
    278   }
    279   _Self& operator=(const value_type& __rhs) {
    280     push_back(__rhs);
    281     return *this;
    282   }
    283   _Self& operator*() { return *this; }
    284   _Self& operator++() { return *this; }
    285   _Self& operator++(int) { return *this; }
    286 };
    287 
    288 // The following should be treated as private, at least for now.
    289 template<class _CharT>
    290 class _Rope_char_consumer {
    291 #if !defined (_STLP_MEMBER_TEMPLATES)
    292 public:
    293   //Without member templates we have to use run-time parameterization.
    294   // The symmetry with char_producer is accidental and temporary.
    295   virtual ~_Rope_char_consumer() {}
    296   virtual bool operator()(const _CharT* __buffer, size_t __len) = 0;
    297 #endif
    298 };
    299 
    300 //
    301 // What follows should really be local to rope.  Unfortunately,
    302 // that doesn't work, since it makes it impossible to define generic
    303 // equality on rope iterators.  According to the draft standard, the
    304 // template parameters for such an equality operator cannot be inferred
    305 // from the occurence of a member class as a parameter.
    306 // (SGI compilers in fact allow this, but the __result wouldn't be
    307 // portable.)
    308 // Similarly, some of the static member functions are member functions
    309 // only to avoid polluting the global namespace, and to circumvent
    310 // restrictions on type inference for template functions.
    311 //
    312 
    313 //
    314 // The internal data structure for representing a rope.  This is
    315 // private to the implementation.  A rope is really just a pointer
    316 // to one of these.
    317 //
    318 // A few basic functions for manipulating this data structure
    319 // are members of _RopeRep.  Most of the more complex algorithms
    320 // are implemented as rope members.
    321 //
    322 // Some of the static member functions of _RopeRep have identically
    323 // named functions in rope that simply invoke the _RopeRep versions.
    324 //
    325 
    326 template<class _CharT, class _Alloc>
    327 struct _Rope_RopeRep
    328   : public _Refcount_Base
    329 {
    330   typedef _Rope_RopeRep<_CharT, _Alloc> _Self;
    331 public:
    332   //
    333   // GAB: 11/09/05
    334   //
    335   // "__ROPE_DEPTH_SIZE" is set to one more then the "__ROPE_MAX_DEPTH".
    336   // This was originally just an addition of "__ROPE_MAX_DEPTH + 1"
    337   // but this addition causes the sunpro compiler to complain about
    338   // multiple declarations during the initialization of "_S_min_len".
    339   // Changed to be a fixed value and the sunpro compiler appears to
    340   // be happy???
    341   //
    342 #  define __ROPE_MAX_DEPTH  45
    343 #  define __ROPE_DEPTH_SIZE 46 // __ROPE_MAX_DEPTH + 1
    344   enum { _S_max_rope_depth = __ROPE_MAX_DEPTH };
    345   enum _Tag {_S_leaf, _S_concat, _S_substringfn, _S_function};
    346   // Apparently needed by VC++
    347   // The data fields of leaves are allocated with some
    348   // extra space, to accomodate future growth and for basic
    349   // character types, to hold a trailing eos character.
    350   enum { _S_alloc_granularity = 8 };
    351 
    352   _Tag _M_tag:8;
    353   bool _M_is_balanced:8;
    354 
    355   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
    356   typedef _Alloc allocator_type;
    357 
    358   allocator_type get_allocator() const { return allocator_type(_M_size);  }
    359 
    360   unsigned char _M_depth;
    361   _CharT* _STLP_VOLATILE _M_c_string;
    362   _STLP_PRIV _STLP_alloc_proxy<size_t, _CharT, allocator_type> _M_size;
    363 
    364 #ifdef _STLP_NO_ARROW_OPERATOR
    365   _Rope_RopeRep() : _Refcount_Base(1), _M_size(allocator_type(), 0) {
    366 #  if defined (_STLP_CHECK_RUNTIME_COMPATIBILITY)
    367     _STLP_CHECK_RUNTIME_COMPATIBILITY();
    368 #  endif
    369   }
    370 #endif
    371 
    372   /* Flattened version of string, if needed.  */
    373   /* typically 0.                             */
    374   /* If it's not 0, then the memory is owned  */
    375   /* by this node.                            */
    376   /* In the case of a leaf, this may point to */
    377   /* the same memory as the data field.       */
    378   _Rope_RopeRep(_Tag __t, unsigned char __d, bool __b, size_t _p_size,
    379                 allocator_type __a) :
    380     _Refcount_Base(1),
    381     _M_tag(__t), _M_is_balanced(__b), _M_depth(__d), _M_c_string(0), _M_size(__a, _p_size) {
    382 #if defined (_STLP_CHECK_RUNTIME_COMPATIBILITY)
    383     _STLP_CHECK_RUNTIME_COMPATIBILITY();
    384 #endif
    385     }
    386 
    387   typedef _STLP_TYPENAME _STLP_PRIV _BasicCharType<_CharT>::_Ret _IsBasicCharType;
    388 
    389 #if 0
    390   /* Please tell why this code is necessary if you uncomment it.
    391    * Problem with it is that rope implementation expect that _S_rounded_up_size(n)
    392    * returns a size > n in order to store the terminating null charater. When
    393    * instanciation type is not a char or wchar_t this is not guaranty resulting in
    394    * memory overrun.
    395    */
    396   static size_t _S_rounded_up_size_aux(size_t __n, __true_type const& /*_IsBasicCharType*/) {
    397     // Allow slop for in-place expansion.
    398     return (__n + _S_alloc_granularity) & ~(_S_alloc_granularity - 1);
    399   }
    400 
    401   static size_t _S_rounded_up_size_aux(size_t __n, __false_type const& /*_IsBasicCharType*/) {
    402     // Allow slop for in-place expansion.
    403     return (__n + _S_alloc_granularity - 1) & ~(_S_alloc_granularity - 1);
    404   }
    405 #endif
    406   // fbp : moved from RopeLeaf
    407   static size_t _S_rounded_up_size(size_t __n)
    408   //{ return _S_rounded_up_size_aux(__n, _IsBasicCharType()); }
    409   { return (__n + _S_alloc_granularity) & ~(_S_alloc_granularity - 1); }
    410 
    411   static void _S_free_string( _CharT* __s, size_t __len,
    412                              allocator_type __a) {
    413     _STLP_STD::_Destroy_Range(__s, __s + __len);
    414     //  This has to be a static member, so this gets a bit messy
    415 #   ifndef _STLP_DONT_SUPPORT_REBIND_MEMBER_TEMPLATE
    416     __a.deallocate(__s, _S_rounded_up_size(__len));    //*ty 03/24/2001 - restored not to use __stl_alloc_rebind() since it is not defined under _STLP_MEMBER_TEMPLATE_CLASSES
    417 #   else
    418     __stl_alloc_rebind (__a, (_CharT*)0).deallocate(__s, _S_rounded_up_size(__len));
    419 #   endif
    420   }
    421 
    422   // Deallocate data section of a leaf.
    423   // This shouldn't be a member function.
    424   // But its hard to do anything else at the
    425   // moment, because it's templatized w.r.t.
    426   // an allocator.
    427   // Does nothing if __GC is defined.
    428   void _M_free_c_string();
    429   void _M_free_tree();
    430   // Deallocate t. Assumes t is not 0.
    431   void _M_unref_nonnil() {
    432     if (_M_decr() == 0) _M_free_tree();
    433   }
    434   void _M_ref_nonnil() {
    435     _M_incr();
    436   }
    437   static void _S_unref(_Self* __t) {
    438     if (0 != __t) {
    439       __t->_M_unref_nonnil();
    440     }
    441   }
    442   static void _S_ref(_Self* __t) {
    443     if (0 != __t) __t->_M_incr();
    444   }
    445   //static void _S_free_if_unref(_Self* __t) {
    446   //  if (0 != __t && 0 == __t->_M_ref_count) __t->_M_free_tree();
    447   //}
    448 };
    449 
    450 template<class _CharT, class _Alloc>
    451 struct _Rope_RopeLeaf : public _Rope_RopeRep<_CharT,_Alloc> {
    452 public:
    453   _CharT* _M_data; /* Not necessarily 0 terminated. */
    454                                 /* The allocated size is         */
    455                                 /* _S_rounded_up_size(size), except */
    456                                 /* in the GC case, in which it   */
    457                                 /* doesn't matter.               */
    458 private:
    459   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
    460   typedef typename _RopeRep::_IsBasicCharType _IsBasicCharType;
    461   void _M_init(__true_type const& /*_IsBasicCharType*/) {
    462     this->_M_c_string = _M_data;
    463   }
    464   void _M_init(__false_type const& /*_IsBasicCharType*/) {}
    465 
    466 public:
    467   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
    468   typedef typename _RopeRep::allocator_type allocator_type;
    469 
    470   _Rope_RopeLeaf( _CharT* __d, size_t _p_size, allocator_type __a)
    471     : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_leaf, 0, true, _p_size, __a),
    472       _M_data(__d) {
    473     _STLP_ASSERT(_p_size > 0)
    474     _M_init(_IsBasicCharType());
    475   }
    476 
    477 # ifdef _STLP_NO_ARROW_OPERATOR
    478   _Rope_RopeLeaf() {}
    479   _Rope_RopeLeaf(const _Rope_RopeLeaf<_CharT, _Alloc>& ) {}
    480 # endif
    481 
    482 // The constructor assumes that d has been allocated with
    483   // the proper allocator and the properly padded size.
    484   // In contrast, the destructor deallocates the data:
    485   ~_Rope_RopeLeaf() {
    486     if (_M_data != this->_M_c_string) {
    487       this->_M_free_c_string();
    488     }
    489     _RopeRep::_S_free_string(_M_data, this->_M_size._M_data, this->get_allocator());
    490   }
    491 };
    492 
    493 template<class _CharT, class _Alloc>
    494 struct _Rope_RopeConcatenation : public _Rope_RopeRep<_CharT, _Alloc> {
    495 private:
    496   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
    497 
    498 public:
    499   _RopeRep* _M_left;
    500   _RopeRep* _M_right;
    501   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
    502   typedef typename _RopeRep::allocator_type allocator_type;
    503   _Rope_RopeConcatenation(_RopeRep* __l, _RopeRep* __r, allocator_type __a)
    504     : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_concat,
    505                                    (max)(__l->_M_depth, __r->_M_depth) + 1, false,
    506                                    __l->_M_size._M_data + __r->_M_size._M_data, __a), _M_left(__l), _M_right(__r)
    507   {}
    508 # ifdef _STLP_NO_ARROW_OPERATOR
    509   _Rope_RopeConcatenation() {}
    510   _Rope_RopeConcatenation(const _Rope_RopeConcatenation<_CharT, _Alloc>&) {}
    511 # endif
    512 
    513   ~_Rope_RopeConcatenation() {
    514     this->_M_free_c_string();
    515     _M_left->_M_unref_nonnil();
    516     _M_right->_M_unref_nonnil();
    517   }
    518 };
    519 
    520 template <class _CharT, class _Alloc>
    521 struct _Rope_RopeFunction : public _Rope_RopeRep<_CharT, _Alloc> {
    522 private:
    523   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
    524 public:
    525   char_producer<_CharT>* _M_fn;
    526   /*
    527    * Char_producer is owned by the
    528    * rope and should be explicitly
    529    * deleted when the rope becomes
    530    * inaccessible.
    531    */
    532   bool _M_delete_when_done;
    533   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
    534   typedef typename _Rope_RopeRep<_CharT,_Alloc>::allocator_type allocator_type;
    535 # ifdef _STLP_NO_ARROW_OPERATOR
    536   _Rope_RopeFunction() {}
    537   _Rope_RopeFunction(const _Rope_RopeFunction<_CharT, _Alloc>& ) {}
    538 # endif
    539 
    540   _Rope_RopeFunction(char_producer<_CharT>* __f, size_t _p_size,
    541                      bool __d, allocator_type __a)
    542     : _Rope_RopeRep<_CharT,_Alloc>(_RopeRep::_S_function, 0, true, _p_size, __a), _M_fn(__f)
    543     , _M_delete_when_done(__d)
    544   { _STLP_ASSERT(_p_size > 0) }
    545 
    546   ~_Rope_RopeFunction() {
    547     this->_M_free_c_string();
    548     if (_M_delete_when_done) {
    549       delete _M_fn;
    550     }
    551   }
    552 };
    553 
    554 /*
    555  * Substring results are usually represented using just
    556  * concatenation nodes.  But in the case of very long flat ropes
    557  * or ropes with a functional representation that isn't practical.
    558  * In that case, we represent the __result as a special case of
    559  * RopeFunction, whose char_producer points back to the rope itself.
    560  * In all cases except repeated substring operations and
    561  * deallocation, we treat the __result as a RopeFunction.
    562  */
    563 template<class _CharT, class _Alloc>
    564 struct _Rope_RopeSubstring : public char_producer<_CharT>, public _Rope_RopeFunction<_CharT,_Alloc> {
    565 public:
    566   // XXX this whole class should be rewritten.
    567   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
    568   _RopeRep *_M_base;      // not 0
    569   size_t _M_start;
    570   /* virtual */ void operator()(size_t __start_pos, size_t __req_len,
    571                                 _CharT* __buffer) {
    572     typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
    573     typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
    574     switch (_M_base->_M_tag) {
    575     case _RopeRep::_S_function:
    576     case _RopeRep::_S_substringfn:
    577       {
    578         char_producer<_CharT>* __fn =
    579           __STATIC_CAST(_RopeFunction*, _M_base)->_M_fn;
    580         _STLP_ASSERT(__start_pos + __req_len <= this->_M_size._M_data)
    581         _STLP_ASSERT(_M_start + this->_M_size._M_data <= _M_base->_M_size._M_data)
    582         (*__fn)(__start_pos + _M_start, __req_len, __buffer);
    583       }
    584       break;
    585     case _RopeRep::_S_leaf:
    586       {
    587         _CharT* __s =
    588           __STATIC_CAST(_RopeLeaf*, _M_base)->_M_data;
    589         _STLP_PRIV __ucopy_n(__s + __start_pos + _M_start, __req_len, __buffer);
    590       }
    591       break;
    592     default:
    593       _STLP_ASSERT(false)
    594         ;
    595     }
    596   }
    597 
    598   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
    599   typedef typename _RopeRep::allocator_type allocator_type;
    600 
    601   _Rope_RopeSubstring(_RopeRep* __b, size_t __s, size_t __l, allocator_type __a)
    602     : _Rope_RopeFunction<_CharT,_Alloc>(this, __l, false, __a),
    603       _M_base(__b), _M_start(__s) {
    604     _STLP_ASSERT(__l > 0)
    605     _STLP_ASSERT(__s + __l <= __b->_M_size._M_data)
    606     _M_base->_M_ref_nonnil();
    607     this->_M_tag = _RopeRep::_S_substringfn;
    608   }
    609   virtual ~_Rope_RopeSubstring()
    610   { _M_base->_M_unref_nonnil(); }
    611 };
    612 
    613 /*
    614  * Self-destructing pointers to Rope_rep.
    615  * These are not conventional smart pointers.  Their
    616  * only purpose in life is to ensure that unref is called
    617  * on the pointer either at normal exit or if an exception
    618  * is raised.  It is the caller's responsibility to
    619  * adjust reference counts when these pointers are initialized
    620  * or assigned to.  (This convention significantly reduces
    621  * the number of potentially expensive reference count
    622  * updates.)
    623  */
    624 template<class _CharT, class _Alloc>
    625 struct _Rope_self_destruct_ptr {
    626   _Rope_RopeRep<_CharT,_Alloc>* _M_ptr;
    627   ~_Rope_self_destruct_ptr()
    628   { _Rope_RopeRep<_CharT,_Alloc>::_S_unref(_M_ptr); }
    629 #   ifdef _STLP_USE_EXCEPTIONS
    630   _Rope_self_destruct_ptr() : _M_ptr(0) {}
    631 #   else
    632   _Rope_self_destruct_ptr() {}
    633 #   endif
    634   _Rope_self_destruct_ptr(_Rope_RopeRep<_CharT,_Alloc>* __p) : _M_ptr(__p) {}
    635   _Rope_RopeRep<_CharT,_Alloc>& operator*() { return *_M_ptr; }
    636   _Rope_RopeRep<_CharT,_Alloc>* operator->() { return _M_ptr; }
    637   operator _Rope_RopeRep<_CharT,_Alloc>*() { return _M_ptr; }
    638   _Rope_self_destruct_ptr<_CharT, _Alloc>&
    639   operator= (_Rope_RopeRep<_CharT,_Alloc>* __x)
    640   { _M_ptr = __x; return *this; }
    641 };
    642 
    643 /*
    644  * Dereferencing a nonconst iterator has to return something
    645  * that behaves almost like a reference.  It's not possible to
    646  * return an actual reference since assignment requires extra
    647  * work.  And we would get into the same problems as with the
    648  * CD2 version of basic_string.
    649  */
    650 template<class _CharT, class _Alloc>
    651 class _Rope_char_ref_proxy {
    652   typedef _Rope_char_ref_proxy<_CharT, _Alloc> _Self;
    653   friend class rope<_CharT,_Alloc>;
    654   friend class _Rope_iterator<_CharT,_Alloc>;
    655   friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
    656   typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
    657   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
    658   typedef rope<_CharT,_Alloc> _My_rope;
    659   size_t _M_pos;
    660   _CharT _M_current;
    661   bool _M_current_valid;
    662   _My_rope* _M_root;     // The whole rope.
    663 public:
    664   _Rope_char_ref_proxy(_My_rope* __r, size_t __p) :
    665     _M_pos(__p), _M_current_valid(false), _M_root(__r) {}
    666   _Rope_char_ref_proxy(const _Self& __x) :
    667     _M_pos(__x._M_pos), _M_current_valid(false), _M_root(__x._M_root) {}
    668   // Don't preserve cache if the reference can outlive the
    669   // expression.  We claim that's not possible without calling
    670   // a copy constructor or generating reference to a proxy
    671   // reference.  We declare the latter to have undefined semantics.
    672   _Rope_char_ref_proxy(_My_rope* __r, size_t __p, _CharT __c)
    673     : _M_pos(__p), _M_current(__c), _M_current_valid(true), _M_root(__r) {}
    674   inline operator _CharT () const;
    675   _Self& operator= (_CharT __c);
    676   _Rope_char_ptr_proxy<_CharT, _Alloc> operator& () const;
    677   _Self& operator= (const _Self& __c) {
    678     return operator=((_CharT)__c);
    679   }
    680 };
    681 
    682 #ifdef _STLP_FUNCTION_TMPL_PARTIAL_ORDER
    683 template<class _CharT, class __Alloc>
    684 inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a,
    685                  _Rope_char_ref_proxy <_CharT, __Alloc > __b) {
    686   _CharT __tmp = __a;
    687   __a = __b;
    688   __b = __tmp;
    689 }
    690 #else
    691 // There is no really acceptable way to handle this.  The default
    692 // definition of swap doesn't work for proxy references.
    693 // It can't really be made to work, even with ugly hacks, since
    694 // the only unusual operation it uses is the copy constructor, which
    695 // is needed for other purposes.  We provide a macro for
    696 // full specializations, and instantiate the most common case.
    697 # define _ROPE_SWAP_SPECIALIZATION(_CharT, __Alloc) \
    698     inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a, \
    699                      _Rope_char_ref_proxy <_CharT, __Alloc > __b) { \
    700         _CharT __tmp = __a; \
    701         __a = __b; \
    702         __b = __tmp; \
    703     }
    704 
    705 _ROPE_SWAP_SPECIALIZATION(char, allocator<char>)
    706 
    707 # ifndef _STLP_NO_WCHAR_T
    708 _ROPE_SWAP_SPECIALIZATION(wchar_t, allocator<wchar_t>)
    709 # endif
    710 
    711 #endif /* !_STLP_FUNCTION_TMPL_PARTIAL_ORDER */
    712 
    713 template<class _CharT, class _Alloc>
    714 class _Rope_char_ptr_proxy {
    715   // XXX this class should be rewritten.
    716 public:
    717   typedef _Rope_char_ptr_proxy<_CharT, _Alloc> _Self;
    718   friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
    719   size_t _M_pos;
    720   rope<_CharT,_Alloc>* _M_root;     // The whole rope.
    721 
    722   _Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x)
    723     : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
    724   _Rope_char_ptr_proxy(const _Self& __x)
    725     : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
    726   _Rope_char_ptr_proxy() {}
    727   _Rope_char_ptr_proxy(_CharT* __x) : _M_pos(0), _M_root(0) {
    728     _STLP_ASSERT(0 == __x)
    729   }
    730   _Self& operator= (const _Self& __x) {
    731     _M_pos = __x._M_pos;
    732     _M_root = __x._M_root;
    733     return *this;
    734   }
    735 
    736   _Rope_char_ref_proxy<_CharT,_Alloc> operator*() const {
    737     return _Rope_char_ref_proxy<_CharT,_Alloc>(_M_root, _M_pos);
    738   }
    739 };
    740 
    741 
    742 /*
    743  * Rope iterators:
    744  * Unlike in the C version, we cache only part of the stack
    745  * for rope iterators, since they must be efficiently copyable.
    746  * When we run out of cache, we have to reconstruct the iterator
    747  * value.
    748  * Pointers from iterators are not included in reference counts.
    749  * Iterators are assumed to be thread private.  Ropes can
    750  * be shared.
    751  */
    752 template<class _CharT, class _Alloc>
    753 class _Rope_iterator_base
    754 /*   : public random_access_iterator<_CharT, ptrdiff_t>  */
    755 {
    756   friend class rope<_CharT,_Alloc>;
    757   typedef _Rope_iterator_base<_CharT, _Alloc> _Self;
    758   typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcat;
    759 public:
    760   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
    761 
    762   enum { _S_path_cache_len = 4 }; // Must be <= 9 because of _M_path_direction.
    763   enum { _S_iterator_buf_len = 15 };
    764   size_t _M_current_pos;
    765   // The whole rope.
    766   _RopeRep* _M_root;
    767   // Starting position for current leaf
    768   size_t _M_leaf_pos;
    769   // Buffer possibly containing current char.
    770   _CharT* _M_buf_start;
    771   // Pointer to current char in buffer, != 0 ==> buffer valid.
    772   _CharT* _M_buf_ptr;
    773   // One past __last valid char in buffer.
    774   _CharT* _M_buf_end;
    775 
    776   // What follows is the path cache.  We go out of our
    777   // way to make this compact.
    778   // Path_end contains the bottom section of the path from
    779   // the root to the current leaf.
    780   struct {
    781 #  if defined (__BORLANDC__) && (__BORLANDC__ < 0x560)
    782     _RopeRep const*_M_data[4];
    783 #  else
    784     _RopeRep const*_M_data[_S_path_cache_len];
    785 #  endif
    786   } _M_path_end;
    787   // Last valid __pos in path_end;
    788   // _M_path_end[0] ... _M_path_end[_M_leaf_index-1]
    789   // point to concatenation nodes.
    790   int _M_leaf_index;
    791   // (_M_path_directions >> __i) & 1 is 1
    792   // if we got from _M_path_end[leaf_index - __i - 1]
    793   // to _M_path_end[leaf_index - __i] by going to the
    794   // __right. Assumes path_cache_len <= 9.
    795   unsigned char _M_path_directions;
    796   // Short buffer for surrounding chars.
    797   // This is useful primarily for
    798   // RopeFunctions.  We put the buffer
    799   // here to avoid locking in the
    800   // multithreaded case.
    801   // The cached path is generally assumed to be valid
    802   // only if the buffer is valid.
    803   struct {
    804 #  if defined (__BORLANDC__) && (__BORLANDC__ < 0x560)
    805     _CharT _M_data[15];
    806 #  else
    807     _CharT _M_data[_S_iterator_buf_len];
    808 #  endif
    809   } _M_tmp_buf;
    810 
    811   // Set buffer contents given path cache.
    812   static void _S_setbuf(_Rope_iterator_base<_CharT, _Alloc>& __x);
    813   // Set buffer contents and path cache.
    814   static void _S_setcache(_Rope_iterator_base<_CharT, _Alloc>& __x);
    815   // As above, but assumes path cache is valid for previous posn.
    816   static void _S_setcache_for_incr(_Rope_iterator_base<_CharT, _Alloc>& __x);
    817   _Rope_iterator_base() {}
    818   _Rope_iterator_base(_RopeRep* __root, size_t __pos)
    819     : _M_current_pos(__pos),_M_root(__root),  _M_buf_ptr(0) {}
    820   void _M_incr(size_t __n);
    821   void _M_decr(size_t __n);
    822 public:
    823   size_t index() const { return _M_current_pos; }
    824 private:
    825   void _M_copy_buf(const _Self& __x) {
    826     _M_tmp_buf = __x._M_tmp_buf;
    827     if (__x._M_buf_start == __x._M_tmp_buf._M_data) {
    828       _M_buf_start = _M_tmp_buf._M_data;
    829       _M_buf_end = _M_buf_start + (__x._M_buf_end - __x._M_buf_start);
    830       _M_buf_ptr = _M_buf_start + (__x._M_buf_ptr - __x._M_buf_start);
    831     } else {
    832       _M_buf_end = __x._M_buf_end;
    833     }
    834   }
    835 
    836 public:
    837   _Rope_iterator_base(const _Self& __x) :
    838       _M_current_pos(__x._M_current_pos),
    839       _M_root(__x._M_root),
    840       _M_leaf_pos( __x._M_leaf_pos ),
    841       _M_buf_start(__x._M_buf_start),
    842       _M_buf_ptr(__x._M_buf_ptr),
    843       _M_path_end(__x._M_path_end),
    844       _M_leaf_index(__x._M_leaf_index),
    845       _M_path_directions(__x._M_path_directions)
    846       {
    847         if (0 != __x._M_buf_ptr) {
    848           _M_copy_buf(__x);
    849         }
    850       }
    851   _Self& operator = (const _Self& __x)
    852       {
    853         _M_current_pos = __x._M_current_pos;
    854         _M_root = __x._M_root;
    855         _M_buf_start = __x._M_buf_start;
    856         _M_buf_ptr = __x._M_buf_ptr;
    857         _M_path_end = __x._M_path_end;
    858         _M_leaf_index = __x._M_leaf_index;
    859         _M_path_directions = __x._M_path_directions;
    860         _M_leaf_pos = __x._M_leaf_pos;
    861         if (0 != __x._M_buf_ptr) {
    862           _M_copy_buf(__x);
    863         }
    864         return *this;
    865       }
    866 };
    867 
    868 template<class _CharT, class _Alloc> class _Rope_iterator;
    869 
    870 template<class _CharT, class _Alloc>
    871 class _Rope_const_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
    872   friend class rope<_CharT,_Alloc>;
    873   typedef  _Rope_const_iterator<_CharT, _Alloc> _Self;
    874   typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
    875   //  protected:
    876 public:
    877 #   ifndef _STLP_HAS_NO_NAMESPACES
    878   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
    879   // The one from the base class may not be directly visible.
    880 #   endif
    881   _Rope_const_iterator(const _RopeRep* __root, size_t __pos):
    882     _Rope_iterator_base<_CharT,_Alloc>(__CONST_CAST(_RopeRep*,__root), __pos)
    883     // Only nonconst iterators modify root ref count
    884   {}
    885 public:
    886   typedef _CharT reference;   // Really a value.  Returning a reference
    887                               // Would be a mess, since it would have
    888                               // to be included in refcount.
    889   typedef const _CharT* pointer;
    890   typedef _CharT value_type;
    891   typedef ptrdiff_t difference_type;
    892   typedef random_access_iterator_tag iterator_category;
    893 
    894 public:
    895   _Rope_const_iterator() {}
    896   _Rope_const_iterator(const _Self& __x) :
    897     _Rope_iterator_base<_CharT,_Alloc>(__x) { }
    898   _Rope_const_iterator(const _Rope_iterator<_CharT,_Alloc>& __x):
    899     _Rope_iterator_base<_CharT,_Alloc>(__x) {}
    900   _Rope_const_iterator(const rope<_CharT,_Alloc>& __r, size_t __pos) :
    901     _Rope_iterator_base<_CharT,_Alloc>(__r._M_tree_ptr._M_data, __pos) {}
    902   _Self& operator= (const _Self& __x) {
    903     _Base::operator=(__x);
    904     return *this;
    905   }
    906   reference operator*() {
    907     if (0 == this->_M_buf_ptr)
    908 #if !defined (__DMC__)
    909       _S_setcache(*this);
    910 #else
    911     { _Rope_iterator_base<_CharT, _Alloc>* __x = this; _S_setcache(*__x); }
    912 #endif
    913     return *(this->_M_buf_ptr);
    914   }
    915   _Self& operator++()
    916       {
    917         if ( this->_M_buf_ptr != 0 ) {
    918           _CharT *__next = this->_M_buf_ptr + 1;
    919           if ( __next < this->_M_buf_end ) {
    920             this->_M_buf_ptr = __next;
    921             ++this->_M_current_pos;
    922             return *this;
    923           }
    924         }
    925         this->_M_incr(1);
    926         return *this;
    927       }
    928   _Self& operator+=(ptrdiff_t __n) {
    929     if (__n >= 0) {
    930       this->_M_incr(__n);
    931     } else {
    932       this->_M_decr(-__n);
    933     }
    934     return *this;
    935   }
    936   _Self& operator--() {
    937     this->_M_decr(1);
    938     return *this;
    939   }
    940   _Self& operator-=(ptrdiff_t __n) {
    941     if (__n >= 0) {
    942       this->_M_decr(__n);
    943     } else {
    944       this->_M_incr(-__n);
    945     }
    946     return *this;
    947   }
    948   _Self operator++(int) {
    949     size_t __old_pos = this->_M_current_pos;
    950     this->_M_incr(1);
    951     return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
    952     // This makes a subsequent dereference expensive.
    953     // Perhaps we should instead copy the iterator
    954     // if it has a valid cache?
    955   }
    956   _Self operator--(int) {
    957     size_t __old_pos = this->_M_current_pos;
    958     this->_M_decr(1);
    959     return _Rope_const_iterator<_CharT,_Alloc>(this->_M_root, __old_pos);
    960   }
    961   inline reference operator[](size_t __n);
    962 };
    963 
    964 template<class _CharT, class _Alloc>
    965 class _Rope_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
    966   friend class rope<_CharT,_Alloc>;
    967   typedef _Rope_iterator<_CharT, _Alloc> _Self;
    968   typedef _Rope_iterator_base<_CharT,_Alloc> _Base;
    969   typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
    970 
    971 public:
    972   rope<_CharT,_Alloc>* _M_root_rope;
    973   // root is treated as a cached version of this,
    974   // and is used to detect changes to the underlying
    975   // rope.
    976   // Root is included in the reference count.
    977   // This is necessary so that we can detect changes reliably.
    978   // Unfortunately, it requires careful bookkeeping for the
    979   // nonGC case.
    980   _Rope_iterator(rope<_CharT,_Alloc>* __r, size_t __pos);
    981 
    982   void _M_check();
    983 public:
    984   typedef _Rope_char_ref_proxy<_CharT,_Alloc>  reference;
    985   typedef _Rope_char_ref_proxy<_CharT,_Alloc>* pointer;
    986   typedef _CharT value_type;
    987   typedef ptrdiff_t difference_type;
    988   typedef random_access_iterator_tag iterator_category;
    989 public:
    990   ~_Rope_iterator() {  //*TY 5/6/00 - added dtor to balance reference count
    991     _RopeRep::_S_unref(this->_M_root);
    992   }
    993 
    994   rope<_CharT,_Alloc>& container() { return *_M_root_rope; }
    995   _Rope_iterator() {
    996     this->_M_root = 0;  // Needed for reference counting.
    997   }
    998   _Rope_iterator(const  _Self& __x) :
    999     _Rope_iterator_base<_CharT,_Alloc>(__x) {
   1000     _M_root_rope = __x._M_root_rope;
   1001     _RopeRep::_S_ref(this->_M_root);
   1002   }
   1003   _Rope_iterator(rope<_CharT,_Alloc>& __r, size_t __pos);
   1004   _Self& operator= (const  _Self& __x) {
   1005     _RopeRep* __old = this->_M_root;
   1006     _RopeRep::_S_ref(__x._M_root);
   1007     _Base::operator=(__x);
   1008     _M_root_rope = __x._M_root_rope;
   1009     _RopeRep::_S_unref(__old);
   1010     return *this;
   1011   }
   1012   reference operator*() {
   1013     _M_check();
   1014     if (0 == this->_M_buf_ptr) {
   1015       return reference(_M_root_rope, this->_M_current_pos);
   1016     } else {
   1017       return reference(_M_root_rope, this->_M_current_pos, *(this->_M_buf_ptr));
   1018     }
   1019   }
   1020   _Self& operator++() {
   1021     this->_M_incr(1);
   1022     return *this;
   1023   }
   1024   _Self& operator+=(ptrdiff_t __n) {
   1025     if (__n >= 0) {
   1026       this->_M_incr(__n);
   1027     } else {
   1028       this->_M_decr(-__n);
   1029     }
   1030     return *this;
   1031   }
   1032   _Self& operator--() {
   1033     this->_M_decr(1);
   1034     return *this;
   1035   }
   1036   _Self& operator-=(ptrdiff_t __n) {
   1037     if (__n >= 0) {
   1038       this->_M_decr(__n);
   1039     } else {
   1040       this->_M_incr(-__n);
   1041     }
   1042     return *this;
   1043   }
   1044   _Self operator++(int) {
   1045     size_t __old_pos = this->_M_current_pos;
   1046     this->_M_incr(1);
   1047     return _Self(_M_root_rope, __old_pos);
   1048   }
   1049   _Self operator--(int) {
   1050     size_t __old_pos = this->_M_current_pos;
   1051     this->_M_decr(1);
   1052     return _Self(_M_root_rope, __old_pos);
   1053   }
   1054   reference operator[](ptrdiff_t __n) {
   1055     return reference(_M_root_rope, this->_M_current_pos + __n);
   1056   }
   1057 };
   1058 
   1059 # ifdef _STLP_USE_OLD_HP_ITERATOR_QUERIES
   1060 template <class _CharT, class _Alloc>
   1061 inline random_access_iterator_tag
   1062 iterator_category(const _Rope_iterator<_CharT,_Alloc>&) {  return random_access_iterator_tag();}
   1063 template <class _CharT, class _Alloc>
   1064 inline _CharT* value_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
   1065 template <class _CharT, class _Alloc>
   1066 inline ptrdiff_t* distance_type(const _Rope_iterator<_CharT,_Alloc>&) { return 0; }
   1067 template <class _CharT, class _Alloc>
   1068 inline random_access_iterator_tag
   1069 iterator_category(const _Rope_const_iterator<_CharT,_Alloc>&) { return random_access_iterator_tag(); }
   1070 template <class _CharT, class _Alloc>
   1071 inline _CharT* value_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
   1072 template <class _CharT, class _Alloc>
   1073 inline ptrdiff_t* distance_type(const _Rope_const_iterator<_CharT,_Alloc>&) { return 0; }
   1074 #endif /* _STLP_USE_OLD_HP_ITERATOR_QUERIES */
   1075 
   1076 template <class _CharT, class _Alloc, class _CharConsumer>
   1077 bool _S_apply_to_pieces(_CharConsumer& __c,
   1078                         _Rope_RopeRep<_CharT, _Alloc> *__r,
   1079                         size_t __begin, size_t __end);
   1080                         // begin and end are assumed to be in range.
   1081 
   1082 template <class _CharT, class _Alloc>
   1083 class rope
   1084 #if defined (_STLP_USE_PARTIAL_SPEC_WORKAROUND)
   1085            : public __stlport_class<rope<_CharT, _Alloc> >
   1086 #endif
   1087 {
   1088   typedef rope<_CharT,_Alloc> _Self;
   1089 public:
   1090   typedef _CharT value_type;
   1091   typedef ptrdiff_t difference_type;
   1092   typedef size_t size_type;
   1093   typedef _CharT const_reference;
   1094   typedef const _CharT* const_pointer;
   1095   typedef _Rope_iterator<_CharT,_Alloc> iterator;
   1096   typedef _Rope_const_iterator<_CharT,_Alloc> const_iterator;
   1097   typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
   1098   typedef _Rope_char_ptr_proxy<_CharT,_Alloc> pointer;
   1099 
   1100   friend class _Rope_iterator<_CharT,_Alloc>;
   1101   friend class _Rope_const_iterator<_CharT,_Alloc>;
   1102   friend struct _Rope_RopeRep<_CharT,_Alloc>;
   1103   friend class _Rope_iterator_base<_CharT,_Alloc>;
   1104   friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
   1105   friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
   1106   friend struct _Rope_RopeSubstring<_CharT,_Alloc>;
   1107 
   1108   _STLP_DECLARE_RANDOM_ACCESS_REVERSE_ITERATORS;
   1109 
   1110 protected:
   1111   typedef _CharT* _Cstrptr;
   1112 
   1113   static _CharT _S_empty_c_str[1];
   1114 
   1115   enum { _S_copy_max = 23 };
   1116   // For strings shorter than _S_copy_max, we copy to
   1117   // concatenate.
   1118 
   1119   typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep;
   1120   typedef typename _RopeRep::_IsBasicCharType _IsBasicCharType;
   1121 
   1122 public:
   1123   _STLP_FORCE_ALLOCATORS(_CharT, _Alloc)
   1124   typedef _Alloc allocator_type;
   1125 
   1126 public:
   1127   // The only data member of a rope:
   1128   _STLP_PRIV _STLP_alloc_proxy<_RopeRep*, _CharT, allocator_type> _M_tree_ptr;
   1129 
   1130 public:
   1131   allocator_type get_allocator() const { return allocator_type(_M_tree_ptr); }
   1132 
   1133 public:
   1134   typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcatenation;
   1135   typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
   1136   typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
   1137   typedef _Rope_RopeSubstring<_CharT,_Alloc> _RopeSubstring;
   1138 
   1139   // Retrieve a character at the indicated position.
   1140   static _CharT _S_fetch(_RopeRep* __r, size_type __pos);
   1141 
   1142   // Obtain a pointer to the character at the indicated position.
   1143   // The pointer can be used to change the character.
   1144   // If such a pointer cannot be produced, as is frequently the
   1145   // case, 0 is returned instead.
   1146   // (Returns nonzero only if all nodes in the path have a refcount
   1147   // of 1.)
   1148   static _CharT* _S_fetch_ptr(_RopeRep* __r, size_type __pos);
   1149 
   1150   static void _S_unref(_RopeRep* __t) {
   1151     _RopeRep::_S_unref(__t);
   1152   }
   1153   static void _S_ref(_RopeRep* __t) {
   1154     _RopeRep::_S_ref(__t);
   1155   }
   1156 
   1157   typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
   1158 
   1159   // _Result is counted in refcount.
   1160   static _RopeRep* _S_substring(_RopeRep* __base,
   1161                                 size_t __start, size_t __endp1);
   1162 
   1163   static _RopeRep* _S_concat_char_iter(_RopeRep* __r,
   1164                                        const _CharT* __iter, size_t __slen);
   1165   // Concatenate rope and char ptr, copying __s.
   1166   // Should really take an arbitrary iterator.
   1167   // Result is counted in refcount.
   1168   static _RopeRep* _S_destr_concat_char_iter(_RopeRep* __r,
   1169                                              const _CharT* __iter, size_t __slen);
   1170     // As above, but one reference to __r is about to be
   1171     // destroyed.  Thus the pieces may be recycled if all
   1172     // relevent reference counts are 1.
   1173 
   1174   // General concatenation on _RopeRep.  _Result
   1175   // has refcount of 1.  Adjusts argument refcounts.
   1176   static _RopeRep* _S_concat_rep(_RopeRep* __left, _RopeRep* __right);
   1177 
   1178 public:
   1179 #if defined (_STLP_MEMBER_TEMPLATES)
   1180   template <class _CharConsumer>
   1181 #else
   1182   typedef _Rope_char_consumer<_CharT> _CharConsumer;
   1183 #endif
   1184   void apply_to_pieces(size_t __begin, size_t __end,
   1185                        _CharConsumer& __c) const
   1186   { _S_apply_to_pieces(__c, _M_tree_ptr._M_data, __begin, __end); }
   1187 
   1188 protected:
   1189 
   1190   static size_t _S_rounded_up_size(size_t __n)
   1191   { return _RopeRep::_S_rounded_up_size(__n); }
   1192 
   1193   // Allocate and construct a RopeLeaf using the supplied allocator
   1194   // Takes ownership of s instead of copying.
   1195   static _RopeLeaf* _S_new_RopeLeaf(_CharT *__s,
   1196                                     size_t _p_size, allocator_type __a) {
   1197     _RopeLeaf* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
   1198                                                 _RopeLeaf).allocate(1);
   1199     _STLP_TRY {
   1200       new(__space) _RopeLeaf(__s, _p_size, __a);
   1201     }
   1202    _STLP_UNWIND(_STLP_CREATE_ALLOCATOR(allocator_type,__a,
   1203                                        _RopeLeaf).deallocate(__space, 1))
   1204     return __space;
   1205   }
   1206 
   1207   static _RopeConcatenation* _S_new_RopeConcatenation(_RopeRep* __left, _RopeRep* __right,
   1208                                                       allocator_type __a) {
   1209    _RopeConcatenation* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
   1210                                                         _RopeConcatenation).allocate(1);
   1211     return new(__space) _RopeConcatenation(__left, __right, __a);
   1212   }
   1213 
   1214   static _RopeFunction* _S_new_RopeFunction(char_producer<_CharT>* __f,
   1215                                             size_t _p_size, bool __d, allocator_type __a) {
   1216    _RopeFunction* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
   1217                                                    _RopeFunction).allocate(1);
   1218     return new(__space) _RopeFunction(__f, _p_size, __d, __a);
   1219   }
   1220 
   1221   static _RopeSubstring* _S_new_RopeSubstring(_Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
   1222                                               size_t __l, allocator_type __a) {
   1223    _RopeSubstring* __space = _STLP_CREATE_ALLOCATOR(allocator_type, __a,
   1224                                                     _RopeSubstring).allocate(1);
   1225     return new(__space) _RopeSubstring(__b, __s, __l, __a);
   1226   }
   1227 
   1228   static
   1229   _RopeLeaf* _S_RopeLeaf_from_unowned_char_ptr(const _CharT *__s,
   1230                                                size_t _p_size, allocator_type __a) {
   1231     if (0 == _p_size) return 0;
   1232 
   1233    _CharT* __buf = _STLP_CREATE_ALLOCATOR(allocator_type,__a, _CharT).allocate(_S_rounded_up_size(_p_size));
   1234 
   1235     _STLP_PRIV __ucopy_n(__s, _p_size, __buf);
   1236     _S_construct_null(__buf + _p_size);
   1237 
   1238     _STLP_TRY {
   1239       return _S_new_RopeLeaf(__buf, _p_size, __a);
   1240     }
   1241     _STLP_UNWIND(_RopeRep::_S_free_string(__buf, _p_size, __a))
   1242     _STLP_RET_AFTER_THROW(0)
   1243   }
   1244 
   1245 
   1246   // Concatenation of nonempty strings.
   1247   // Always builds a concatenation node.
   1248   // Rebalances if the result is too deep.
   1249   // Result has refcount 1.
   1250   // Does not increment left and right ref counts even though
   1251   // they are referenced.
   1252   static _RopeRep*
   1253   _S_tree_concat(_RopeRep* __left, _RopeRep* __right);
   1254 
   1255   // Concatenation helper functions
   1256   static _RopeLeaf*
   1257   _S_leaf_concat_char_iter(_RopeLeaf* __r,
   1258                            const _CharT* __iter, size_t __slen);
   1259   // Concatenate by copying leaf.
   1260   // should take an arbitrary iterator
   1261   // result has refcount 1.
   1262   static _RopeLeaf* _S_destr_leaf_concat_char_iter
   1263   (_RopeLeaf* __r, const _CharT* __iter, size_t __slen);
   1264   // A version that potentially clobbers __r if __r->_M_ref_count == 1.
   1265 
   1266 
   1267   // A helper function for exponentiating strings.
   1268   // This uses a nonstandard refcount convention.
   1269   // The result has refcount 0.
   1270   typedef _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
   1271 #if !defined (__GNUC__) || (__GNUC__ < 3)
   1272   friend _Concat_fn;
   1273 #else
   1274   friend struct _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc>;
   1275 #endif
   1276 
   1277 public:
   1278   static size_t _S_char_ptr_len(const _CharT* __s) {
   1279     return char_traits<_CharT>::length(__s);
   1280   }
   1281 
   1282 public: /* for operators */
   1283   rope(_RopeRep* __t, const allocator_type& __a = allocator_type())
   1284     : _M_tree_ptr(__a, __t) { }
   1285 private:
   1286   // Copy __r to the _CharT buffer.
   1287   // Returns __buffer + __r->_M_size._M_data.
   1288   // Assumes that buffer is uninitialized.
   1289   static _CharT* _S_flatten(_RopeRep* __r, _CharT* __buffer);
   1290 
   1291   // Again, with explicit starting position and length.
   1292   // Assumes that buffer is uninitialized.
   1293   static _CharT* _S_flatten(_RopeRep* __r,
   1294                             size_t __start, size_t __len,
   1295                             _CharT* __buffer);
   1296 
   1297   // fbp : HP aCC prohibits access to protected min_len from within static methods ( ?? )
   1298 public:
   1299   static const unsigned long _S_min_len[__ROPE_DEPTH_SIZE];
   1300 protected:
   1301   static bool _S_is_balanced(_RopeRep* __r)
   1302   { return (__r->_M_size._M_data >= _S_min_len[__r->_M_depth]); }
   1303 
   1304   static bool _S_is_almost_balanced(_RopeRep* __r) {
   1305     return (__r->_M_depth == 0 ||
   1306             __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 1]);
   1307   }
   1308 
   1309   static bool _S_is_roughly_balanced(_RopeRep* __r) {
   1310     return (__r->_M_depth <= 1 ||
   1311             __r->_M_size._M_data >= _S_min_len[__r->_M_depth - 2]);
   1312   }
   1313 
   1314   // Assumes the result is not empty.
   1315   static _RopeRep* _S_concat_and_set_balanced(_RopeRep* __left,
   1316                                               _RopeRep* __right) {
   1317     _RopeRep* __result = _S_concat_rep(__left, __right);
   1318     if (_S_is_balanced(__result)) __result->_M_is_balanced = true;
   1319     return __result;
   1320   }
   1321 
   1322   // The basic rebalancing operation.  Logically copies the
   1323   // rope.  The result has refcount of 1.  The client will
   1324   // usually decrement the reference count of __r.
   1325   // The result is within height 2 of balanced by the above
   1326   // definition.
   1327   static _RopeRep* _S_balance(_RopeRep* __r);
   1328 
   1329   // Add all unbalanced subtrees to the forest of balanceed trees.
   1330   // Used only by balance.
   1331   static void _S_add_to_forest(_RopeRep*__r, _RopeRep** __forest);
   1332 
   1333   // Add __r to forest, assuming __r is already balanced.
   1334   static void _S_add_leaf_to_forest(_RopeRep* __r, _RopeRep** __forest);
   1335 
   1336 #ifdef _STLP_DEBUG
   1337   // Print to stdout, exposing structure
   1338   static void _S_dump(_RopeRep* __r, int __indent = 0);
   1339 #endif
   1340 
   1341   // Return -1, 0, or 1 if __x < __y, __x == __y, or __x > __y resp.
   1342   static int _S_compare(const _RopeRep* __x, const _RopeRep* __y);
   1343 
   1344   void _STLP_FUNCTION_THROWS _M_throw_out_of_range() const;
   1345 
   1346   void _M_reset(_RopeRep* __r) {
   1347     //if (__r != _M_tree_ptr._M_data) {
   1348       _S_unref(_M_tree_ptr._M_data);
   1349       _M_tree_ptr._M_data = __r;
   1350     //}
   1351   }
   1352 
   1353 public:
   1354   bool empty() const { return 0 == _M_tree_ptr._M_data; }
   1355 
   1356   // Comparison member function.  This is public only for those
   1357   // clients that need a ternary comparison.  Others
   1358   // should use the comparison operators below.
   1359   int compare(const _Self& __y) const {
   1360     return _S_compare(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data);
   1361   }
   1362 
   1363   rope(const _CharT* __s, const allocator_type& __a = allocator_type())
   1364     : _M_tree_ptr(__a, _S_RopeLeaf_from_unowned_char_ptr(__s, _S_char_ptr_len(__s),__a))
   1365   {}
   1366 
   1367   rope(const _CharT* __s, size_t __len,
   1368        const allocator_type& __a = allocator_type())
   1369     : _M_tree_ptr(__a, (_S_RopeLeaf_from_unowned_char_ptr(__s, __len, __a)))
   1370   {}
   1371 
   1372   // Should perhaps be templatized with respect to the iterator type
   1373   // and use Sequence_buffer.  (It should perhaps use sequence_buffer
   1374   // even now.)
   1375   rope(const _CharT *__s, const _CharT *__e,
   1376        const allocator_type& __a = allocator_type())
   1377     : _M_tree_ptr(__a, _S_RopeLeaf_from_unowned_char_ptr(__s, __e - __s, __a))
   1378   {}
   1379 
   1380   rope(const const_iterator& __s, const const_iterator& __e,
   1381        const allocator_type& __a = allocator_type())
   1382     : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
   1383                                     __e._M_current_pos))
   1384   {}
   1385 
   1386   rope(const iterator& __s, const iterator& __e,
   1387        const allocator_type& __a = allocator_type())
   1388     : _M_tree_ptr(__a, _S_substring(__s._M_root, __s._M_current_pos,
   1389                                     __e._M_current_pos))
   1390   {}
   1391 
   1392   rope(_CharT __c, const allocator_type& __a = allocator_type())
   1393     : _M_tree_ptr(__a, (_RopeRep*)0) {
   1394     _CharT* __buf = _M_tree_ptr.allocate(_S_rounded_up_size(1));
   1395 
   1396     _Copy_Construct(__buf, __c);
   1397     _S_construct_null(__buf + 1);
   1398 
   1399     _STLP_TRY {
   1400       _M_tree_ptr._M_data = _S_new_RopeLeaf(__buf, 1, __a);
   1401     }
   1402     _STLP_UNWIND(_RopeRep::_S_free_string(__buf, 1, __a))
   1403   }
   1404 
   1405   rope(size_t __n, _CharT __c,
   1406        const allocator_type& __a = allocator_type()):
   1407     _M_tree_ptr(__a, (_RopeRep*)0) {
   1408     if (0 == __n)
   1409       return;
   1410 
   1411     rope<_CharT,_Alloc> __result;
   1412 # define  __exponentiate_threshold size_t(32)
   1413     _RopeRep* __remainder;
   1414     rope<_CharT,_Alloc> __remainder_rope;
   1415 
   1416     // gcc-2.7.2 bugs
   1417     typedef _STLP_PRIV _Rope_Concat_fn<_CharT,_Alloc> _Concat_fn;
   1418 
   1419     size_t __exponent = __n / __exponentiate_threshold;
   1420     size_t __rest = __n % __exponentiate_threshold;
   1421     if (0 == __rest) {
   1422       __remainder = 0;
   1423     } else {
   1424       _CharT* __rest_buffer = _M_tree_ptr.allocate(_S_rounded_up_size(__rest));
   1425       uninitialized_fill_n(__rest_buffer, __rest, __c);
   1426       _S_construct_null(__rest_buffer + __rest);
   1427       _STLP_TRY {
   1428         __remainder = _S_new_RopeLeaf(__rest_buffer, __rest, __a);
   1429       }
   1430       _STLP_UNWIND(_RopeRep::_S_free_string(__rest_buffer, __rest, __a))
   1431     }
   1432     __remainder_rope._M_tree_ptr._M_data = __remainder;
   1433     if (__exponent != 0) {
   1434       _CharT* __base_buffer = _M_tree_ptr.allocate(_S_rounded_up_size(__exponentiate_threshold));
   1435       _RopeLeaf* __base_leaf;
   1436       rope<_CharT,_Alloc> __base_rope;
   1437       uninitialized_fill_n(__base_buffer, __exponentiate_threshold, __c);
   1438       _S_construct_null(__base_buffer + __exponentiate_threshold);
   1439       _STLP_TRY {
   1440         __base_leaf = _S_new_RopeLeaf(__base_buffer,
   1441                                       __exponentiate_threshold, __a);
   1442       }
   1443       _STLP_UNWIND(_RopeRep::_S_free_string(__base_buffer,
   1444                                             __exponentiate_threshold, __a))
   1445       __base_rope._M_tree_ptr._M_data = __base_leaf;
   1446       if (1 == __exponent) {
   1447         __result = __base_rope;
   1448         // One each for base_rope and __result
   1449         //_STLP_ASSERT(2 == __result._M_tree_ptr._M_data->_M_ref_count)
   1450       } else {
   1451         __result = _STLP_PRIV __power(__base_rope, __exponent, _Concat_fn());
   1452       }
   1453       if (0 != __remainder) {
   1454         __result += __remainder_rope;
   1455       }
   1456     } else {
   1457       __result = __remainder_rope;
   1458     }
   1459     _M_tree_ptr._M_data = __result._M_tree_ptr._M_data;
   1460     _M_tree_ptr._M_data->_M_ref_nonnil();
   1461 # undef __exponentiate_threshold
   1462   }
   1463 
   1464   rope(const allocator_type& __a = allocator_type())
   1465     : _M_tree_ptr(__a, (_RopeRep*)0) {}
   1466 
   1467   // Construct a rope from a function that can compute its members
   1468   rope(char_producer<_CharT> *__fn, size_t __len, bool __delete_fn,
   1469        const allocator_type& __a = allocator_type())
   1470     : _M_tree_ptr(__a, (_RopeRep*)0) {
   1471     _M_tree_ptr._M_data = (0 == __len) ?
   1472       0 : _S_new_RopeFunction(__fn, __len, __delete_fn, __a);
   1473   }
   1474 
   1475   rope(const _Self& __x)
   1476     : _M_tree_ptr(__x._M_tree_ptr, __x._M_tree_ptr._M_data) {
   1477     _S_ref(_M_tree_ptr._M_data);
   1478   }
   1479 
   1480 #if !defined (_STLP_NO_MOVE_SEMANTIC)
   1481   rope(__move_source<_Self> __src)
   1482     : _M_tree_ptr(__src.get()._M_tree_ptr, __src.get()._M_tree_ptr._M_data) {
   1483     __src.get()._M_tree_ptr._M_data = 0;
   1484   }
   1485 #endif
   1486 
   1487   ~rope() {
   1488     _S_unref(_M_tree_ptr._M_data);
   1489   }
   1490 
   1491   _Self& operator=(const _Self& __x) {
   1492     _STLP_ASSERT(get_allocator() == __x.get_allocator())
   1493     _S_ref(__x._M_tree_ptr._M_data);
   1494     _M_reset(__x._M_tree_ptr._M_data);
   1495     return *this;
   1496   }
   1497 
   1498   void clear() {
   1499     _S_unref(_M_tree_ptr._M_data);
   1500     _M_tree_ptr._M_data = 0;
   1501   }
   1502   void push_back(_CharT __x) {
   1503     _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, &__x, 1));
   1504   }
   1505 
   1506   void pop_back() {
   1507     _RopeRep* __old = _M_tree_ptr._M_data;
   1508     _M_tree_ptr._M_data =
   1509       _S_substring(_M_tree_ptr._M_data, 0, _M_tree_ptr._M_data->_M_size._M_data - 1);
   1510     _S_unref(__old);
   1511   }
   1512 
   1513   _CharT back() const {
   1514     return _S_fetch(_M_tree_ptr._M_data, _M_tree_ptr._M_data->_M_size._M_data - 1);
   1515   }
   1516 
   1517   void push_front(_CharT __x) {
   1518     _RopeRep* __old = _M_tree_ptr._M_data;
   1519     _RopeRep* __left =
   1520       _S_RopeLeaf_from_unowned_char_ptr(&__x, 1, _M_tree_ptr);
   1521     _STLP_TRY {
   1522       _M_tree_ptr._M_data = _S_concat_rep(__left, _M_tree_ptr._M_data);
   1523       _S_unref(__old);
   1524       _S_unref(__left);
   1525     }
   1526     _STLP_UNWIND(_S_unref(__left))
   1527   }
   1528 
   1529   void pop_front() {
   1530     _RopeRep* __old = _M_tree_ptr._M_data;
   1531     _M_tree_ptr._M_data = _S_substring(_M_tree_ptr._M_data, 1, _M_tree_ptr._M_data->_M_size._M_data);
   1532     _S_unref(__old);
   1533   }
   1534 
   1535   _CharT front() const {
   1536     return _S_fetch(_M_tree_ptr._M_data, 0);
   1537   }
   1538 
   1539   void balance() {
   1540     _RopeRep* __old = _M_tree_ptr._M_data;
   1541     _M_tree_ptr._M_data = _S_balance(_M_tree_ptr._M_data);
   1542     _S_unref(__old);
   1543   }
   1544 
   1545   void copy(_CharT* __buffer) const {
   1546     _STLP_STD::_Destroy_Range(__buffer, __buffer + size());
   1547     _S_flatten(_M_tree_ptr._M_data, __buffer);
   1548   }
   1549 
   1550   /*
   1551    * This is the copy function from the standard, but
   1552    * with the arguments reordered to make it consistent with the
   1553    * rest of the interface.
   1554    * Note that this guaranteed not to compile if the draft standard
   1555    * order is assumed.
   1556    */
   1557   size_type copy(size_type __pos, size_type __n, _CharT* __buffer) const {
   1558     size_t _p_size = size();
   1559     size_t __len = (__pos + __n > _p_size? _p_size - __pos : __n);
   1560 
   1561     _STLP_STD::_Destroy_Range(__buffer, __buffer + __len);
   1562     _S_flatten(_M_tree_ptr._M_data, __pos, __len, __buffer);
   1563     return __len;
   1564   }
   1565 
   1566 # ifdef _STLP_DEBUG
   1567   // Print to stdout, exposing structure.  May be useful for
   1568   // performance debugging.
   1569   void dump() {
   1570     _S_dump(_M_tree_ptr._M_data);
   1571   }
   1572 # endif
   1573 
   1574   // Convert to 0 terminated string in new allocated memory.
   1575   // Embedded 0s in the input do not terminate the copy.
   1576   const _CharT* c_str() const;
   1577 
   1578   // As above, but also use the flattened representation as the
   1579   // the new rope representation.
   1580   const _CharT* replace_with_c_str();
   1581 
   1582   // Reclaim memory for the c_str generated flattened string.
   1583   // Intentionally undocumented, since it's hard to say when this
   1584   // is safe for multiple threads.
   1585   void delete_c_str () {
   1586     if (0 == _M_tree_ptr._M_data) return;
   1587     if (_RopeRep::_S_leaf == _M_tree_ptr._M_data->_M_tag &&
   1588         ((_RopeLeaf*)_M_tree_ptr._M_data)->_M_data ==
   1589         _M_tree_ptr._M_data->_M_c_string) {
   1590       // Representation shared
   1591       return;
   1592     }
   1593     _M_tree_ptr._M_data->_M_free_c_string();
   1594     _M_tree_ptr._M_data->_M_c_string = 0;
   1595   }
   1596 
   1597   _CharT operator[] (size_type __pos) const {
   1598     return _S_fetch(_M_tree_ptr._M_data, __pos);
   1599   }
   1600 
   1601   _CharT at(size_type __pos) const {
   1602     if (__pos >= size()) _M_throw_out_of_range();
   1603     return (*this)[__pos];
   1604   }
   1605 
   1606   const_iterator begin() const {
   1607     return(const_iterator(_M_tree_ptr._M_data, 0));
   1608   }
   1609 
   1610   // An easy way to get a const iterator from a non-const container.
   1611   const_iterator const_begin() const {
   1612     return(const_iterator(_M_tree_ptr._M_data, 0));
   1613   }
   1614 
   1615   const_iterator end() const {
   1616     return(const_iterator(_M_tree_ptr._M_data, size()));
   1617   }
   1618 
   1619   const_iterator const_end() const {
   1620     return(const_iterator(_M_tree_ptr._M_data, size()));
   1621   }
   1622 
   1623   size_type size() const {
   1624     return(0 == _M_tree_ptr._M_data? 0 : _M_tree_ptr._M_data->_M_size._M_data);
   1625   }
   1626 
   1627   size_type length() const {
   1628     return size();
   1629   }
   1630 
   1631   size_type max_size() const {
   1632     return _S_min_len[__ROPE_MAX_DEPTH-1] - 1;
   1633     //  Guarantees that the result can be sufficiently
   1634     //  balanced.  Longer ropes will probably still work,
   1635     //  but it's harder to make guarantees.
   1636   }
   1637 
   1638   const_reverse_iterator rbegin() const {
   1639     return const_reverse_iterator(end());
   1640   }
   1641 
   1642   const_reverse_iterator const_rbegin() const {
   1643     return const_reverse_iterator(end());
   1644   }
   1645 
   1646   const_reverse_iterator rend() const {
   1647     return const_reverse_iterator(begin());
   1648   }
   1649 
   1650   const_reverse_iterator const_rend() const {
   1651     return const_reverse_iterator(begin());
   1652   }
   1653   // The symmetric cases are intentionally omitted, since they're presumed
   1654   // to be less common, and we don't handle them as well.
   1655 
   1656   // The following should really be templatized.
   1657   // The first argument should be an input iterator or
   1658   // forward iterator with value_type _CharT.
   1659   _Self& append(const _CharT* __iter, size_t __n) {
   1660     _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, __iter, __n));
   1661     return *this;
   1662   }
   1663 
   1664   _Self& append(const _CharT* __c_string) {
   1665     size_t __len = _S_char_ptr_len(__c_string);
   1666     append(__c_string, __len);
   1667     return *this;
   1668   }
   1669 
   1670   _Self& append(const _CharT* __s, const _CharT* __e) {
   1671     _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, __s, __e - __s));
   1672     return *this;
   1673   }
   1674 
   1675   _Self& append(const_iterator __s, const_iterator __e) {
   1676     _STLP_ASSERT(__s._M_root == __e._M_root)
   1677     _STLP_ASSERT(get_allocator() == __s._M_root->get_allocator())
   1678     _Self_destruct_ptr __appendee(_S_substring(__s._M_root, __s._M_current_pos, __e._M_current_pos));
   1679     _M_reset(_S_concat_rep(_M_tree_ptr._M_data, (_RopeRep*)__appendee));
   1680     return *this;
   1681   }
   1682 
   1683   _Self& append(_CharT __c) {
   1684     _M_reset(_S_destr_concat_char_iter(_M_tree_ptr._M_data, &__c, 1));
   1685     return *this;
   1686   }
   1687 
   1688   _Self& append() { return append(_CharT()); }  // XXX why?
   1689 
   1690   _Self& append(const _Self& __y) {
   1691     _STLP_ASSERT(__y.get_allocator() == get_allocator())
   1692     _M_reset(_S_concat_rep(_M_tree_ptr._M_data, __y._M_tree_ptr._M_data));
   1693     return *this;
   1694   }
   1695 
   1696   _Self& append(size_t __n, _CharT __c) {
   1697     rope<_CharT,_Alloc> __last(__n, __c);
   1698     return append(__last);
   1699   }
   1700 
   1701   void swap(_Self& __b) {
   1702     _M_tree_ptr.swap(__b._M_tree_ptr);
   1703   }
   1704 #if defined (_STLP_USE_PARTIAL_SPEC_WORKAROUND) && !defined (_STLP_FUNCTION_TMPL_PARTIAL_ORDER)
   1705   void _M_swap_workaround(_Self& __x) { swap(__x); }
   1706 #endif
   1707 
   1708 protected:
   1709   // Result is included in refcount.
   1710   static _RopeRep* replace(_RopeRep* __old, size_t __pos1,
   1711                            size_t __pos2, _RopeRep* __r) {
   1712     if (0 == __old) { _S_ref(__r); return __r; }
   1713     _Self_destruct_ptr __left(_S_substring(__old, 0, __pos1));
   1714     _Self_destruct_ptr __right(_S_substring(__old, __pos2, __old->_M_size._M_data));
   1715     _STLP_MPWFIX_TRY  //*TY 06/01/2000 -
   1716     _RopeRep* __result;
   1717 
   1718     if (0 == __r) {
   1719       __result = _S_concat_rep(__left, __right);
   1720     } else {
   1721       _STLP_ASSERT(__old->get_allocator() == __r->get_allocator())
   1722       _Self_destruct_ptr __left_result(_S_concat_rep(__left, __r));
   1723       __result = _S_concat_rep(__left_result, __right);
   1724     }
   1725     return __result;
   1726     _STLP_MPWFIX_CATCH  //*TY 06/01/2000 -
   1727   }
   1728 
   1729 public:
   1730   void insert(size_t __p, const _Self& __r) {
   1731     if (__p > size()) _M_throw_out_of_range();
   1732     _STLP_ASSERT(get_allocator() == __r.get_allocator())
   1733     _M_reset(replace(_M_tree_ptr._M_data, __p, __p, __r._M_tree_ptr._M_data));
   1734   }
   1735 
   1736   void insert(size_t __p, size_t __n, _CharT __c) {
   1737     rope<_CharT,_Alloc> __r(__n,__c);
   1738     insert(__p, __r);
   1739   }
   1740 
   1741   void insert(size_t __p, const _CharT* __i, size_t __n) {
   1742     if (__p > size()) _M_throw_out_of_range();
   1743     _Self_destruct_ptr __left(_S_substring(_M_tree_ptr._M_data, 0, __p));
   1744     _Self_destruct_ptr __right(_S_substring(_M_tree_ptr._M_data, __p, size()));
   1745     _Self_destruct_ptr __left_result(
   1746                                      _S_concat_char_iter(__left, __i, __n));
   1747     // _S_ destr_concat_char_iter should be safe here.
   1748     // But as it stands it's probably not a win, since __left
   1749     // is likely to have additional references.
   1750     _M_reset(_S_concat_rep(__left_result, __right));
   1751   }
   1752 
   1753   void insert(size_t __p, const _CharT* __c_string) {
   1754     insert(__p, __c_string, _S_char_ptr_len(__c_string));
   1755   }
   1756 
   1757   void insert(size_t __p, _CharT __c) {
   1758     insert(__p, &__c, 1);
   1759   }
   1760 
   1761   void insert(size_t __p) {
   1762     _CharT __c = _CharT();
   1763     insert(__p, &__c, 1);
   1764   }
   1765 
   1766   void insert(size_t __p, const _CharT* __i, const _CharT* __j) {
   1767     _Self __r(__i, __j);
   1768     insert(__p, __r);
   1769   }
   1770 
   1771   void insert(size_t __p, const const_iterator& __i,
   1772                           const const_iterator& __j) {
   1773     _Self __r(__i, __j);
   1774     insert(__p, __r);
   1775   }
   1776 
   1777   void insert(size_t __p, const iterator& __i,
   1778                           const iterator& __j) {
   1779     _Self __r(__i, __j);
   1780     insert(__p, __r);
   1781   }
   1782 
   1783   // (position, length) versions of replace operations:
   1784   void replace(size_t __p, size_t __n, const _Self& __r) {
   1785     if (__p > size()) _M_throw_out_of_range();
   1786     _M_reset(replace(_M_tree_ptr._M_data, __p, __p + __n, __r._M_tree_ptr._M_data));
   1787   }
   1788 
   1789   void replace(size_t __p, size_t __n,
   1790                const _CharT* __i, size_t __i_len) {
   1791     _Self __r(__i, __i_len);
   1792     replace(__p, __n, __r);
   1793   }
   1794 
   1795   void replace(size_t __p, size_t __n, _CharT __c) {
   1796     _Self __r(__c);
   1797     replace(__p, __n, __r);
   1798   }
   1799 
   1800   void replace(size_t __p, size_t __n, const _CharT* __c_string) {
   1801     _Self __r(__c_string);
   1802     replace(__p, __n, __r);
   1803   }
   1804 
   1805   void replace(size_t __p, size_t __n,
   1806                const _CharT* __i, const _CharT* __j) {
   1807     _Self __r(__i, __j);
   1808     replace(__p, __n, __r);
   1809   }
   1810 
   1811   void replace(size_t __p, size_t __n,
   1812                const const_iterator& __i, const const_iterator& __j) {
   1813     _Self __r(__i, __j);
   1814     replace(__p, __n, __r);
   1815   }
   1816 
   1817   void replace(size_t __p, size_t __n,
   1818                const iterator& __i, const iterator& __j) {
   1819     _Self __r(__i, __j);
   1820     replace(__p, __n, __r);
   1821   }
   1822 
   1823   // Single character variants:
   1824   void replace(size_t __p, _CharT __c) {
   1825     if (__p > size()) _M_throw_out_of_range();
   1826     iterator __i(this, __p);
   1827     *__i = __c;
   1828   }
   1829 
   1830   void replace(size_t __p, const _Self& __r) {
   1831     replace(__p, 1, __r);
   1832   }
   1833 
   1834   void replace(size_t __p, const _CharT* __i, size_t __i_len) {
   1835     replace(__p, 1, __i, __i_len);
   1836   }
   1837 
   1838   void replace(size_t __p, const _CharT* __c_string) {
   1839     replace(__p, 1, __c_string);
   1840   }
   1841 
   1842   void replace(size_t __p, const _CharT* __i, const _CharT* __j) {
   1843     replace(__p, 1, __i, __j);
   1844   }
   1845 
   1846   void replace(size_t __p, const const_iterator& __i,
   1847                            const const_iterator& __j) {
   1848     replace(__p, 1, __i, __j);
   1849   }
   1850 
   1851   void replace(size_t __p, const iterator& __i,
   1852                            const iterator& __j) {
   1853     replace(__p, 1, __i, __j);
   1854   }
   1855 
   1856   // Erase, (position, size) variant.
   1857   void erase(size_t __p, size_t __n) {
   1858     if (__p > size()) _M_throw_out_of_range();
   1859     _M_reset(replace(_M_tree_ptr._M_data, __p, __p + __n, 0));
   1860   }
   1861 
   1862   // Erase, single character
   1863   void erase(size_t __p) {
   1864     erase(__p, __p + 1);
   1865   }
   1866 
   1867   // Insert, iterator variants.
   1868   iterator insert(const iterator& __p, const _Self& __r)
   1869   { insert(__p.index(), __r); return __p; }
   1870   iterator insert(const iterator& __p, size_t __n, _CharT __c)
   1871   { insert(__p.index(), __n, __c); return __p; }
   1872   iterator insert(const iterator& __p, _CharT __c)
   1873   { insert(__p.index(), __c); return __p; }
   1874   iterator insert(const iterator& __p )
   1875   { insert(__p.index()); return __p; }
   1876   iterator insert(const iterator& __p, const _CharT* c_string)
   1877   { insert(__p.index(), c_string); return __p; }
   1878   iterator insert(const iterator& __p, const _CharT* __i, size_t __n)
   1879   { insert(__p.index(), __i, __n); return __p; }
   1880   iterator insert(const iterator& __p, const _CharT* __i,
   1881                   const _CharT* __j)
   1882   { insert(__p.index(), __i, __j);  return __p; }
   1883   iterator insert(const iterator& __p,
   1884                   const const_iterator& __i, const const_iterator& __j)
   1885   { insert(__p.index(), __i, __j); return __p; }
   1886   iterator insert(const iterator& __p,
   1887                   const iterator& __i, const iterator& __j)
   1888   { insert(__p.index(), __i, __j); return __p; }
   1889 
   1890   // Replace, range variants.
   1891   void replace(const iterator& __p, const iterator& __q,
   1892                const _Self& __r)
   1893   { replace(__p.index(), __q.index() - __p.index(), __r); }
   1894   void replace(const iterator& __p, const iterator& __q, _CharT __c)
   1895   { replace(__p.index(), __q.index() - __p.index(), __c); }
   1896   void replace(const iterator& __p, const iterator& __q,
   1897                const _CharT* __c_string)
   1898   { replace(__p.index(), __q.index() - __p.index(), __c_string); }
   1899   void replace(const iterator& __p, const iterator& __q,
   1900                const _CharT* __i, size_t __n)
   1901   { replace(__p.index(), __q.index() - __p.index(), __i, __n); }
   1902   void replace(const iterator& __p, const iterator& __q,
   1903                const _CharT* __i, const _CharT* __j)
   1904   { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
   1905   void replace(const iterator& __p, const iterator& __q,
   1906                const const_iterator& __i, const const_iterator& __j)
   1907   { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
   1908   void replace(const iterator& __p, const iterator& __q,
   1909                const iterator& __i, const iterator& __j)
   1910   { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
   1911 
   1912   // Replace, iterator variants.
   1913   void replace(const iterator& __p, const _Self& __r)
   1914   { replace(__p.index(), __r); }
   1915   void replace(const iterator& __p, _CharT __c)
   1916   { replace(__p.index(), __c); }
   1917   void replace(const iterator& __p, const _CharT* __c_string)
   1918   { replace(__p.index(), __c_string); }
   1919   void replace(const iterator& __p, const _CharT* __i, size_t __n)
   1920   { replace(__p.index(), __i, __n); }
   1921   void replace(const iterator& __p, const _CharT* __i, const _CharT* __j)
   1922   { replace(__p.index(), __i, __j); }
   1923   void replace(const iterator& __p, const_iterator __i,
   1924                const_iterator __j)
   1925   { replace(__p.index(), __i, __j); }
   1926   void replace(const iterator& __p, iterator __i, iterator __j)
   1927   { replace(__p.index(), __i, __j); }
   1928 
   1929   // Iterator and range variants of erase
   1930   iterator erase(const iterator& __p, const iterator& __q) {
   1931     size_t __p_index = __p.index();
   1932     erase(__p_index, __q.index() - __p_index);
   1933     return iterator(this, __p_index);
   1934   }
   1935   iterator erase(const iterator& __p) {
   1936     size_t __p_index = __p.index();
   1937     erase(__p_index, 1);
   1938     return iterator(this, __p_index);
   1939   }
   1940 
   1941   _Self substr(size_t __start, size_t __len = 1) const {
   1942     if (__start > size()) _M_throw_out_of_range();
   1943     return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start, __start + __len));
   1944   }
   1945 
   1946   _Self substr(iterator __start, iterator __end) const {
   1947     return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
   1948   }
   1949 
   1950   _Self substr(iterator __start) const {
   1951     size_t __pos = __start.index();
   1952     return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
   1953   }
   1954 
   1955   _Self substr(const_iterator __start, const_iterator __end) const {
   1956     // This might eventually take advantage of the cache in the
   1957     // iterator.
   1958     return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __start.index(), __end.index()));
   1959   }
   1960 
   1961   rope<_CharT,_Alloc> substr(const_iterator __start) {
   1962     size_t __pos = __start.index();
   1963     return rope<_CharT,_Alloc>(_S_substring(_M_tree_ptr._M_data, __pos, __pos + 1));
   1964   }
   1965 
   1966 #include <stl/_string_npos.h>
   1967 
   1968   size_type find(const _Self& __s, size_type __pos = 0) const {
   1969     if (__pos >= size())
   1970 # ifndef _STLP_OLD_ROPE_SEMANTICS
   1971       return npos;
   1972 # else
   1973       return size();
   1974 # endif
   1975 
   1976     size_type __result_pos;
   1977     const_iterator __result = _STLP_STD::search(const_begin() + (ptrdiff_t)__pos, const_end(), __s.begin(), __s.end() );
   1978     __result_pos = __result.index();
   1979 # ifndef _STLP_OLD_ROPE_SEMANTICS
   1980     if (__result_pos == size()) __result_pos = npos;
   1981 # endif
   1982     return __result_pos;
   1983   }
   1984   size_type find(_CharT __c, size_type __pos = 0) const;
   1985   size_type find(const _CharT* __s, size_type __pos = 0) const {
   1986     size_type __result_pos;
   1987     const_iterator __result = _STLP_STD::search(const_begin() + (ptrdiff_t)__pos, const_end(),
   1988                                                 __s, __s + _S_char_ptr_len(__s));
   1989     __result_pos = __result.index();
   1990 # ifndef _STLP_OLD_ROPE_SEMANTICS
   1991     if (__result_pos == size()) __result_pos = npos;
   1992 # endif
   1993     return __result_pos;
   1994   }
   1995 
   1996   iterator mutable_begin() {
   1997     return(iterator(this, 0));
   1998   }
   1999 
   2000   iterator mutable_end() {
   2001     return(iterator(this, size()));
   2002   }
   2003 
   2004   reverse_iterator mutable_rbegin() {
   2005     return reverse_iterator(mutable_end());
   2006   }
   2007 
   2008   reverse_iterator mutable_rend() {
   2009     return reverse_iterator(mutable_begin());
   2010   }
   2011 
   2012   reference mutable_reference_at(size_type __pos) {
   2013     return reference(this, __pos);
   2014   }
   2015 
   2016 # ifdef __STD_STUFF
   2017   reference operator[] (size_type __pos) {
   2018     return reference(this, __pos);
   2019   }
   2020 
   2021   reference at(size_type __pos) {
   2022     if (__pos >= size()) _M_throw_out_of_range();
   2023     return (*this)[__pos];
   2024   }
   2025 
   2026   void resize(size_type, _CharT) {}
   2027   void resize(size_type) {}
   2028   void reserve(size_type = 0) {}
   2029   size_type capacity() const {
   2030     return max_size();
   2031   }
   2032 
   2033   // Stuff below this line is dangerous because it's error prone.
   2034   // I would really like to get rid of it.
   2035   // copy function with funny arg ordering.
   2036   size_type copy(_CharT* __buffer, size_type __n,
   2037                  size_type __pos = 0) const {
   2038     return copy(__pos, __n, __buffer);
   2039   }
   2040 
   2041   iterator end() { return mutable_end(); }
   2042 
   2043   iterator begin() { return mutable_begin(); }
   2044 
   2045   reverse_iterator rend() { return mutable_rend(); }
   2046 
   2047   reverse_iterator rbegin() { return mutable_rbegin(); }
   2048 
   2049 # else
   2050 
   2051   const_iterator end() { return const_end(); }
   2052 
   2053   const_iterator begin() { return const_begin(); }
   2054 
   2055   const_reverse_iterator rend() { return const_rend(); }
   2056 
   2057   const_reverse_iterator rbegin() { return const_rbegin(); }
   2058 
   2059 # endif
   2060 }; //class rope
   2061 
   2062 #if defined (__GNUC__) && (__GNUC__ == 2) && (__GNUC_MINOR__ == 96)
   2063 template <class _CharT, class _Alloc>
   2064 const size_t rope<_CharT, _Alloc>::npos = ~(size_t) 0;
   2065 #endif
   2066 
   2067 template <class _CharT, class _Alloc>
   2068 inline _CharT
   2069 _Rope_const_iterator< _CharT, _Alloc>::operator[](size_t __n)
   2070 { return rope<_CharT,_Alloc>::_S_fetch(this->_M_root, this->_M_current_pos + __n); }
   2071 
   2072 template <class _CharT, class _Alloc>
   2073 inline bool operator== (const _Rope_const_iterator<_CharT,_Alloc>& __x,
   2074                         const _Rope_const_iterator<_CharT,_Alloc>& __y) {
   2075   return (__x._M_current_pos == __y._M_current_pos &&
   2076           __x._M_root == __y._M_root);
   2077 }
   2078 
   2079 template <class _CharT, class _Alloc>
   2080 inline bool operator< (const _Rope_const_iterator<_CharT,_Alloc>& __x,
   2081                        const _Rope_const_iterator<_CharT,_Alloc>& __y)
   2082 { return (__x._M_current_pos < __y._M_current_pos); }
   2083 
   2084 #ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
   2085 
   2086 template <class _CharT, class _Alloc>
   2087 inline bool operator!= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
   2088                         const _Rope_const_iterator<_CharT,_Alloc>& __y)
   2089 { return !(__x == __y); }
   2090 
   2091 template <class _CharT, class _Alloc>
   2092 inline bool operator> (const _Rope_const_iterator<_CharT,_Alloc>& __x,
   2093                        const _Rope_const_iterator<_CharT,_Alloc>& __y)
   2094 { return __y < __x; }
   2095 
   2096 template <class _CharT, class _Alloc>
   2097 inline bool operator<= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
   2098                         const _Rope_const_iterator<_CharT,_Alloc>& __y)
   2099 { return !(__y < __x); }
   2100 
   2101 template <class _CharT, class _Alloc>
   2102 inline bool operator>= (const _Rope_const_iterator<_CharT,_Alloc>& __x,
   2103                         const _Rope_const_iterator<_CharT,_Alloc>& __y)
   2104 { return !(__x < __y); }
   2105 
   2106 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
   2107 
   2108 template <class _CharT, class _Alloc>
   2109 inline ptrdiff_t operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x,
   2110                            const _Rope_const_iterator<_CharT,_Alloc>& __y)
   2111 { return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos; }
   2112 
   2113 #if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000  // dwa 8/21/97  - "ambiguous access to overloaded function" bug.
   2114 template <class _CharT, class _Alloc>
   2115 inline _Rope_const_iterator<_CharT,_Alloc>
   2116 operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n)
   2117 { return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos - __n); }
   2118 # endif
   2119 
   2120 template <class _CharT, class _Alloc>
   2121 inline _Rope_const_iterator<_CharT,_Alloc>
   2122 operator+(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n)
   2123 { return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos + __n); }
   2124 
   2125 template <class _CharT, class _Alloc>
   2126 inline _Rope_const_iterator<_CharT,_Alloc>
   2127 operator+(ptrdiff_t __n, const _Rope_const_iterator<_CharT,_Alloc>& __x)
   2128 { return _Rope_const_iterator<_CharT,_Alloc>(__x._M_root, __x._M_current_pos + __n); }
   2129 
   2130 template <class _CharT, class _Alloc>
   2131 inline bool operator== (const _Rope_iterator<_CharT,_Alloc>& __x,
   2132                         const _Rope_iterator<_CharT,_Alloc>& __y) {
   2133   return (__x._M_current_pos == __y._M_current_pos &&
   2134           __x._M_root_rope == __y._M_root_rope);
   2135 }
   2136 
   2137 template <class _CharT, class _Alloc>
   2138 inline bool operator< (const _Rope_iterator<_CharT,_Alloc>& __x,
   2139                        const _Rope_iterator<_CharT,_Alloc>& __y)
   2140 { return (__x._M_current_pos < __y._M_current_pos); }
   2141 
   2142 #if defined (_STLP_USE_SEPARATE_RELOPS_NAMESPACE)
   2143 template <class _CharT, class _Alloc>
   2144 inline bool operator!= (const _Rope_iterator<_CharT,_Alloc>& __x,
   2145                         const _Rope_iterator<_CharT,_Alloc>& __y)
   2146 { return !(__x == __y); }
   2147 
   2148 template <class _CharT, class _Alloc>
   2149 inline bool operator> (const _Rope_iterator<_CharT,_Alloc>& __x,
   2150                        const _Rope_iterator<_CharT,_Alloc>& __y)
   2151 { return __y < __x; }
   2152 
   2153 template <class _CharT, class _Alloc>
   2154 inline bool operator<= (const _Rope_iterator<_CharT,_Alloc>& __x,
   2155                         const _Rope_iterator<_CharT,_Alloc>& __y)
   2156 { return !(__y < __x); }
   2157 
   2158 template <class _CharT, class _Alloc>
   2159 inline bool operator>= (const _Rope_iterator<_CharT,_Alloc>& __x,
   2160                         const _Rope_iterator<_CharT,_Alloc>& __y)
   2161 { return !(__x < __y); }
   2162 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
   2163 
   2164 template <class _CharT, class _Alloc>
   2165 inline ptrdiff_t operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
   2166                            const _Rope_iterator<_CharT,_Alloc>& __y)
   2167 { return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos; }
   2168 
   2169 #if !defined( __MWERKS__ ) || __MWERKS__ >= 0x2000  // dwa 8/21/97  - "ambiguous access to overloaded function" bug.
   2170 template <class _CharT, class _Alloc>
   2171 inline _Rope_iterator<_CharT,_Alloc>
   2172 operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
   2173           ptrdiff_t __n) {
   2174   return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos - __n);
   2175 }
   2176 # endif
   2177 
   2178 template <class _CharT, class _Alloc>
   2179 inline _Rope_iterator<_CharT,_Alloc>
   2180 operator+(const _Rope_iterator<_CharT,_Alloc>& __x,
   2181           ptrdiff_t __n) {
   2182   return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos + __n);
   2183 }
   2184 
   2185 template <class _CharT, class _Alloc>
   2186 inline _Rope_iterator<_CharT,_Alloc>
   2187 operator+(ptrdiff_t __n, const _Rope_iterator<_CharT,_Alloc>& __x) {
   2188   return _Rope_iterator<_CharT,_Alloc>(__x._M_root_rope, __x._M_current_pos + __n);
   2189 }
   2190 
   2191 template <class _CharT, class _Alloc>
   2192 inline rope<_CharT,_Alloc>
   2193 operator+ (const rope<_CharT,_Alloc>& __left,
   2194            const rope<_CharT,_Alloc>& __right) {
   2195   _STLP_ASSERT(__left.get_allocator() == __right.get_allocator())
   2196   return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_rep(__left._M_tree_ptr._M_data, __right._M_tree_ptr._M_data));
   2197   // Inlining this should make it possible to keep __left and __right in registers.
   2198 }
   2199 
   2200 template <class _CharT, class _Alloc>
   2201 inline rope<_CharT,_Alloc>&
   2202 operator+= (rope<_CharT,_Alloc>& __left,
   2203             const rope<_CharT,_Alloc>& __right) {
   2204   __left.append(__right);
   2205   return __left;
   2206 }
   2207 
   2208 template <class _CharT, class _Alloc>
   2209 inline rope<_CharT,_Alloc>
   2210 operator+ (const rope<_CharT,_Alloc>& __left,
   2211            const _CharT* __right) {
   2212   size_t __rlen = rope<_CharT,_Alloc>::_S_char_ptr_len(__right);
   2213   return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_char_iter(__left._M_tree_ptr._M_data, __right, __rlen));
   2214 }
   2215 
   2216 template <class _CharT, class _Alloc>
   2217 inline rope<_CharT,_Alloc>&
   2218 operator+= (rope<_CharT,_Alloc>& __left,
   2219             const _CharT* __right) {
   2220   __left.append(__right);
   2221   return __left;
   2222 }
   2223 
   2224 template <class _CharT, class _Alloc>
   2225 inline rope<_CharT,_Alloc>
   2226 operator+ (const rope<_CharT,_Alloc>& __left, _CharT __right) {
   2227   return rope<_CharT,_Alloc>(rope<_CharT,_Alloc>::_S_concat_char_iter(__left._M_tree_ptr._M_data, &__right, 1));
   2228 }
   2229 
   2230 template <class _CharT, class _Alloc>
   2231 inline rope<_CharT,_Alloc>&
   2232 operator+= (rope<_CharT,_Alloc>& __left, _CharT __right) {
   2233   __left.append(__right);
   2234   return __left;
   2235 }
   2236 
   2237 template <class _CharT, class _Alloc>
   2238 inline bool
   2239 operator< (const rope<_CharT,_Alloc>& __left,
   2240            const rope<_CharT,_Alloc>& __right) {
   2241   return __left.compare(__right) < 0;
   2242 }
   2243 
   2244 template <class _CharT, class _Alloc>
   2245 inline bool
   2246 operator== (const rope<_CharT,_Alloc>& __left,
   2247             const rope<_CharT,_Alloc>& __right) {
   2248   return __left.compare(__right) == 0;
   2249 }
   2250 
   2251 #ifdef _STLP_USE_SEPARATE_RELOPS_NAMESPACE
   2252 
   2253 template <class _CharT, class _Alloc>
   2254 inline bool
   2255 operator!= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
   2256   return !(__x == __y);
   2257 }
   2258 
   2259 template <class _CharT, class _Alloc>
   2260 inline bool
   2261 operator> (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
   2262   return __y < __x;
   2263 }
   2264 
   2265 template <class _CharT, class _Alloc>
   2266 inline bool
   2267 operator<= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
   2268   return !(__y < __x);
   2269 }
   2270 
   2271 template <class _CharT, class _Alloc>
   2272 inline bool
   2273 operator>= (const rope<_CharT,_Alloc>& __x, const rope<_CharT,_Alloc>& __y) {
   2274   return !(__x < __y);
   2275 }
   2276 
   2277 template <class _CharT, class _Alloc>
   2278 inline bool operator!= (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
   2279                         const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
   2280   return !(__x == __y);
   2281 }
   2282 
   2283 #endif /* _STLP_USE_SEPARATE_RELOPS_NAMESPACE */
   2284 
   2285 template <class _CharT, class _Alloc>
   2286 inline bool operator== (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
   2287                         const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
   2288   return (__x._M_pos == __y._M_pos && __x._M_root == __y._M_root);
   2289 }
   2290 
   2291 #if !defined (_STLP_USE_NO_IOSTREAMS)
   2292 template<class _CharT, class _Traits, class _Alloc>
   2293 basic_ostream<_CharT, _Traits>& operator<< (basic_ostream<_CharT, _Traits>& __o,
   2294                                             const rope<_CharT, _Alloc>& __r);
   2295 #endif
   2296 
   2297 typedef rope<char, allocator<char> > crope;
   2298 #if defined (_STLP_HAS_WCHAR_T)
   2299 typedef rope<wchar_t, allocator<wchar_t> > wrope;
   2300 #endif
   2301 
   2302 inline crope::reference __mutable_reference_at(crope& __c, size_t __i)
   2303 { return __c.mutable_reference_at(__i); }
   2304 
   2305 #if defined (_STLP_HAS_WCHAR_T)
   2306 inline wrope::reference __mutable_reference_at(wrope& __c, size_t __i)
   2307 { return __c.mutable_reference_at(__i); }
   2308 #endif
   2309 
   2310 #if defined (_STLP_FUNCTION_TMPL_PARTIAL_ORDER)
   2311 template <class _CharT, class _Alloc>
   2312 inline void swap(rope<_CharT,_Alloc>& __x, rope<_CharT,_Alloc>& __y)
   2313 { __x.swap(__y); }
   2314 #else
   2315 
   2316 inline void swap(crope& __x, crope& __y) { __x.swap(__y); }
   2317 # ifdef _STLP_HAS_WCHAR_T  // dwa 8/21/97
   2318 inline void swap(wrope& __x, wrope& __y) { __x.swap(__y); }
   2319 # endif
   2320 
   2321 #endif /* _STLP_FUNCTION_TMPL_PARTIAL_ORDER */
   2322 
   2323 
   2324 // Hash functions should probably be revisited later:
   2325 _STLP_TEMPLATE_NULL struct hash<crope> {
   2326   size_t operator()(const crope& __str) const {
   2327     size_t _p_size = __str.size();
   2328 
   2329     if (0 == _p_size) return 0;
   2330     return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
   2331   }
   2332 };
   2333 
   2334 #if defined (_STLP_HAS_WCHAR_T)  // dwa 8/21/97
   2335 _STLP_TEMPLATE_NULL struct hash<wrope> {
   2336   size_t operator()(const wrope& __str) const {
   2337     size_t _p_size = __str.size();
   2338 
   2339     if (0 == _p_size) return 0;
   2340     return 13*__str[0] + 5*__str[_p_size - 1] + _p_size;
   2341   }
   2342 };
   2343 #endif
   2344 
   2345 #if (!defined (_STLP_MSVC) || (_STLP_MSVC >= 1310))
   2346 // I couldn't get this to work with VC++
   2347 template<class _CharT,class _Alloc>
   2348 #  if defined (__DMC__)
   2349 extern
   2350 #  endif
   2351 void _Rope_rotate(_Rope_iterator<_CharT, _Alloc> __first,
   2352                   _Rope_iterator<_CharT, _Alloc> __middle,
   2353                   _Rope_iterator<_CharT, _Alloc> __last);
   2354 
   2355 inline void rotate(_Rope_iterator<char, allocator<char> > __first,
   2356                    _Rope_iterator<char, allocator<char> > __middle,
   2357                    _Rope_iterator<char, allocator<char> > __last)
   2358 { _Rope_rotate(__first, __middle, __last); }
   2359 #endif
   2360 
   2361 template <class _CharT, class _Alloc>
   2362 inline _Rope_char_ref_proxy<_CharT, _Alloc>::operator _CharT () const {
   2363   if (_M_current_valid) {
   2364     return _M_current;
   2365   } else {
   2366     return _My_rope::_S_fetch(_M_root->_M_tree_ptr._M_data, _M_pos);
   2367   }
   2368 }
   2369 
   2370 #if defined (_STLP_CLASS_PARTIAL_SPECIALIZATION) && !defined (_STLP_NO_MOVE_SEMANTIC)
   2371 template <class _CharT, class _Alloc>
   2372 struct __move_traits<rope<_CharT, _Alloc> > {
   2373   typedef __true_type implemented;
   2374   //Completness depends on the allocator:
   2375   typedef typename __move_traits<_Alloc>::complete complete;
   2376 };
   2377 #endif
   2378 
   2379 _STLP_END_NAMESPACE
   2380 
   2381 #if !defined (_STLP_LINK_TIME_INSTANTIATION)
   2382 #  include <stl/_rope.c>
   2383 #endif
   2384 
   2385 #endif /* _STLP_INTERNAL_ROPE_H */
   2386 
   2387 // Local Variables:
   2388 // mode:C++
   2389 // End:
   2390