1/*
2 * Copyright (c) 1997
3 * Silicon Graphics Computer Systems, Inc.
4 *
5 * Permission to use, copy, modify, distribute and sell this software
6 * and its documentation for any purpose is hereby granted without fee,
7 * provided that the above copyright notice appear in all copies and
8 * that both that copyright notice and this permission notice appear
9 * in supporting documentation.  Silicon Graphics makes no
10 * representations about the suitability of this software for any
11 * purpose.  It is provided "as is" without express or implied warranty.
12 */
13
14/* NOTE: This is an internal header file, included by other STL headers.
15 *   You should not attempt to use it directly.
16 */
17
18// rope<_CharT,_Alloc> is a sequence of _CharT.
19// Ropes appear to be mutable, but update operations
20// really copy enough of the data structure to leave the original
21// valid.  Thus ropes can be logically copied by just copying
22// a pointer value.
23
24#ifndef __SGI_STL_INTERNAL_ROPE_H
25# define __SGI_STL_INTERNAL_ROPE_H
26
27# ifdef __GC
28#   define __GC_CONST const
29# else
30#   define __GC_CONST   // constant except for deallocation
31# endif
32# ifdef __STL_SGI_THREADS
33#    include <mutex.h>
34# endif
35
36__STL_BEGIN_NAMESPACE
37
38#if defined(__sgi) && !defined(__GNUC__) && (_MIPS_SIM != _MIPS_SIM_ABI32)
39#pragma set woff 1174
40#endif
41
42// The _S_eos function is used for those functions that
43// convert to/from C-like strings to detect the end of the string.
44
45// The end-of-C-string character.
46// This is what the draft standard says it should be.
47template <class _CharT>
48inline _CharT _S_eos(_CharT*) { return _CharT(); }
49
50// Test for basic character types.
51// For basic character types leaves having a trailing eos.
52template <class _CharT>
53inline bool _S_is_basic_char_type(_CharT*) { return false; }
54template <class _CharT>
55inline bool _S_is_one_byte_char_type(_CharT*) { return false; }
56
57inline bool _S_is_basic_char_type(char*) { return true; }
58inline bool _S_is_one_byte_char_type(char*) { return true; }
59inline bool _S_is_basic_char_type(wchar_t*) { return true; }
60
61// Store an eos iff _CharT is a basic character type.
62// Do not reference _S_eos if it isn't.
63template <class _CharT>
64inline void _S_cond_store_eos(_CharT&) {}
65
66inline void _S_cond_store_eos(char& __c) { __c = 0; }
67inline void _S_cond_store_eos(wchar_t& __c) { __c = 0; }
68
69// char_producers are logically functions that generate a section of
70// a string.  These can be convereted to ropes.  The resulting rope
71// invokes the char_producer on demand.  This allows, for example,
72// files to be viewed as ropes without reading the entire file.
73template <class _CharT>
74class char_producer {
75    public:
76        virtual ~char_producer() {};
77        virtual void operator()(size_t __start_pos, size_t __len,
78                                _CharT* __buffer) = 0;
79        // Buffer should really be an arbitrary output iterator.
80        // That way we could flatten directly into an ostream, etc.
81        // This is thoroughly impossible, since iterator types don't
82        // have runtime descriptions.
83};
84
85// Sequence buffers:
86//
87// Sequence must provide an append operation that appends an
88// array to the sequence.  Sequence buffers are useful only if
89// appending an entire array is cheaper than appending element by element.
90// This is true for many string representations.
91// This should  perhaps inherit from ostream<sequence::value_type>
92// and be implemented correspondingly, so that they can be used
93// for formatted.  For the sake of portability, we don't do this yet.
94//
95// For now, sequence buffers behave as output iterators.  But they also
96// behave a little like basic_ostringstream<sequence::value_type> and a
97// little like containers.
98
99template<class _Sequence, size_t _Buf_sz = 100
100#   if defined(__sgi) && !defined(__GNUC__)
101#        define __TYPEDEF_WORKAROUND
102         ,class _V = typename _Sequence::value_type
103#   endif
104        >
105// The 3rd parameter works around a common compiler bug.
106class sequence_buffer : public output_iterator {
107    public:
108#       ifndef __TYPEDEF_WORKAROUND
109            typedef typename _Sequence::value_type value_type;
110#       else
111            typedef _V value_type;
112#       endif
113    protected:
114        _Sequence* _M_prefix;
115        value_type _M_buffer[_Buf_sz];
116        size_t     _M_buf_count;
117    public:
118        void flush() {
119            _M_prefix->append(_M_buffer, _M_buffer + _M_buf_count);
120            _M_buf_count = 0;
121        }
122        ~sequence_buffer() { flush(); }
123        sequence_buffer() : _M_prefix(0), _M_buf_count(0) {}
124        sequence_buffer(const sequence_buffer& __x) {
125            _M_prefix = __x._M_prefix;
126            _M_buf_count = __x._M_buf_count;
127            copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
128        }
129        sequence_buffer(sequence_buffer& __x) {
130            __x.flush();
131            _M_prefix = __x._M_prefix;
132            _M_buf_count = 0;
133        }
134        sequence_buffer(_Sequence& __s) : _M_prefix(&__s), _M_buf_count(0) {}
135        sequence_buffer& operator= (sequence_buffer& __x) {
136            __x.flush();
137            _M_prefix = __x._M_prefix;
138            _M_buf_count = 0;
139            return *this;
140        }
141        sequence_buffer& operator= (const sequence_buffer& __x) {
142            _M_prefix = __x._M_prefix;
143            _M_buf_count = __x._M_buf_count;
144            copy(__x._M_buffer, __x._M_buffer + __x._M_buf_count, _M_buffer);
145            return *this;
146        }
147        void push_back(value_type __x)
148        {
149            if (_M_buf_count < _Buf_sz) {
150                _M_buffer[_M_buf_count] = __x;
151                ++_M_buf_count;
152            } else {
153                flush();
154                _M_buffer[0] = __x;
155                _M_buf_count = 1;
156            }
157        }
158        void append(value_type* __s, size_t __len)
159        {
160            if (__len + _M_buf_count <= _Buf_sz) {
161                size_t __i = _M_buf_count;
162                size_t __j = 0;
163                for (; __j < __len; __i++, __j++) {
164                    _M_buffer[__i] = __s[__j];
165                }
166                _M_buf_count += __len;
167            } else if (0 == _M_buf_count) {
168                _M_prefix->append(__s, __s + __len);
169            } else {
170                flush();
171                append(__s, __len);
172            }
173        }
174        sequence_buffer& write(value_type* __s, size_t __len)
175        {
176            append(__s, __len);
177            return *this;
178        }
179        sequence_buffer& put(value_type __x)
180        {
181            push_back(__x);
182            return *this;
183        }
184        sequence_buffer& operator=(const value_type& __rhs)
185        {
186            push_back(__rhs);
187            return *this;
188        }
189        sequence_buffer& operator*() { return *this; }
190        sequence_buffer& operator++() { return *this; }
191        sequence_buffer& operator++(int) { return *this; }
192};
193
194// The following should be treated as private, at least for now.
195template<class _CharT>
196class _Rope_char_consumer {
197    public:
198        // If we had member templates, these should not be virtual.
199        // For now we need to use run-time parametrization where
200        // compile-time would do.  _Hence this should all be private
201        // for now.
202        // The symmetry with char_producer is accidental and temporary.
203        virtual ~_Rope_char_consumer() {};
204        virtual bool operator()(const _CharT* __buffer, size_t __len) = 0;
205};
206
207//
208// What follows should really be local to rope.  Unfortunately,
209// that doesn't work, since it makes it impossible to define generic
210// equality on rope iterators.  According to the draft standard, the
211// template parameters for such an equality operator cannot be inferred
212// from the occurence of a member class as a parameter.
213// (SGI compilers in fact allow this, but the __result wouldn't be
214// portable.)
215// Similarly, some of the static member functions are member functions
216// only to avoid polluting the global namespace, and to circumvent
217// restrictions on type inference for template functions.
218//
219
220template<class _CharT, class _Alloc=__STL_DEFAULT_ALLOCATOR(_CharT)> class rope;
221template<class _CharT, class _Alloc> struct _Rope_RopeConcatenation;
222template<class _CharT, class _Alloc> struct _Rope_RopeLeaf;
223template<class _CharT, class _Alloc> struct _Rope_RopeFunction;
224template<class _CharT, class _Alloc> struct _Rope_RopeSubstring;
225template<class _CharT, class _Alloc> class _Rope_iterator;
226template<class _CharT, class _Alloc> class _Rope_const_iterator;
227template<class _CharT, class _Alloc> class _Rope_char_ref_proxy;
228template<class _CharT, class _Alloc> class _Rope_char_ptr_proxy;
229
230//
231// The internal data structure for representing a rope.  This is
232// private to the implementation.  A rope is really just a pointer
233// to one of these.
234//
235// A few basic functions for manipulating this data structure
236// are members of _RopeRep.  Most of the more complex algorithms
237// are implemented as rope members.
238//
239// Some of the static member functions of _RopeRep have identically
240// named functions in rope that simply invoke the _RopeRep versions.
241//
242// A macro to introduce various allocation and deallocation functions
243// These need to be defined differently depending on whether or not
244// we are using standard conforming allocators, and whether the allocator
245// instances have real state.  Thus this macro is invoked repeatedly
246// with different definitions of __ROPE_DEFINE_ALLOC.
247// __ROPE_DEFINE_ALLOC(type,name) defines
248//   type * name_allocate(size_t) and
249//   void name_deallocate(tipe *, size_t)
250// Both functions may or may not be static.
251
252#define __ROPE_DEFINE_ALLOCS(__a) \
253        __ROPE_DEFINE_ALLOC(_CharT,_Data) /* character data */ \
254        typedef _Rope_RopeConcatenation<_CharT,__a> __C; \
255        __ROPE_DEFINE_ALLOC(__C,_C) \
256        typedef _Rope_RopeLeaf<_CharT,__a> __L; \
257        __ROPE_DEFINE_ALLOC(__L,_L) \
258        typedef _Rope_RopeFunction<_CharT,__a> __F; \
259        __ROPE_DEFINE_ALLOC(__F,_F) \
260        typedef _Rope_RopeSubstring<_CharT,__a> __S; \
261        __ROPE_DEFINE_ALLOC(__S,_S)
262
263//  Internal rope nodes potentially store a copy of the allocator
264//  instance used to allocate them.  This is mostly redundant.
265//  But the alternative would be to pass allocator instances around
266//  in some form to nearly all internal functions, since any pointer
267//  assignment may result in a zero reference count and thus require
268//  deallocation.
269//  The _Rope_rep_base class encapsulates
270//  the differences between SGI-style allocators and standard-conforming
271//  allocators.
272
273#ifdef __STL_USE_STD_ALLOCATORS
274
275#define __STATIC_IF_SGI_ALLOC  /* not static */
276
277// Base class for ordinary allocators.
278template <class _CharT, class _Allocator, bool _IsStatic>
279class _Rope_rep_alloc_base {
280public:
281  typedef typename _Alloc_traits<_CharT,_Allocator>::allocator_type
282          allocator_type;
283  allocator_type get_allocator() const { return _M_data_allocator; }
284  _Rope_rep_alloc_base(size_t __size, const allocator_type& __a)
285        : _M_size(__size), _M_data_allocator(__a) {}
286  size_t _M_size;       // This is here only to avoid wasting space
287                // for an otherwise empty base class.
288
289
290protected:
291    allocator_type _M_data_allocator;
292
293# define __ROPE_DEFINE_ALLOC(_Tp, __name) \
294        typedef typename \
295          _Alloc_traits<_Tp,_Allocator>::allocator_type __name##Allocator; \
296        /*static*/ _Tp * __name##_allocate(size_t __n) \
297          { return __name##Allocator(_M_data_allocator).allocate(__n); } \
298        void __name##_deallocate(_Tp* __p, size_t __n) \
299          { __name##Allocator(_M_data_allocator).deallocate(__p, __n); }
300  __ROPE_DEFINE_ALLOCS(_Allocator);
301# undef __ROPE_DEFINE_ALLOC
302};
303
304// Specialization for allocators that have the property that we don't
305//  actually have to store an allocator object.
306template <class _CharT, class _Allocator>
307class _Rope_rep_alloc_base<_CharT,_Allocator,true> {
308public:
309  typedef typename _Alloc_traits<_CharT,_Allocator>::allocator_type
310          allocator_type;
311  allocator_type get_allocator() const { return allocator_type(); }
312  _Rope_rep_alloc_base(size_t __size, const allocator_type&)
313                : _M_size(__size) {}
314  size_t _M_size;
315
316protected:
317
318# define __ROPE_DEFINE_ALLOC(_Tp, __name) \
319        typedef typename \
320          _Alloc_traits<_Tp,_Allocator>::_Alloc_type __name##Alloc; \
321        typedef typename \
322          _Alloc_traits<_Tp,_Allocator>::allocator_type __name##Allocator; \
323        static _Tp* __name##_allocate(size_t __n) \
324                { return __name##Alloc::allocate(__n); } \
325        void __name##_deallocate(_Tp *__p, size_t __n) \
326                { __name##Alloc::deallocate(__p, __n); }
327  __ROPE_DEFINE_ALLOCS(_Allocator);
328# undef __ROPE_DEFINE_ALLOC
329};
330
331template <class _CharT, class _Alloc>
332struct _Rope_rep_base
333  : public _Rope_rep_alloc_base<_CharT,_Alloc,
334                                _Alloc_traits<_CharT,_Alloc>::_S_instanceless>
335{
336  typedef _Rope_rep_alloc_base<_CharT,_Alloc,
337                               _Alloc_traits<_CharT,_Alloc>::_S_instanceless>
338          _Base;
339  typedef typename _Base::allocator_type allocator_type;
340  _Rope_rep_base(size_t __size, const allocator_type& __a)
341    : _Base(__size, __a) {}
342};
343
344#else /* !__STL_USE_STD_ALLOCATORS */
345
346#define __STATIC_IF_SGI_ALLOC static
347
348template <class _CharT, class _Alloc>
349class _Rope_rep_base {
350public:
351  typedef _Alloc allocator_type;
352  static allocator_type get_allocator() { return allocator_type(); }
353  _Rope_rep_base(size_t __size, const allocator_type&) : _M_size(__size) {}
354  size_t _M_size;
355
356protected:
357
358# define __ROPE_DEFINE_ALLOC(_Tp, __name) \
359        typedef simple_alloc<_Tp, _Alloc> __name##Alloc; \
360        static _Tp* __name##_allocate(size_t __n) \
361                { return __name##Alloc::allocate(__n); } \
362        static void __name##_deallocate(_Tp* __p, size_t __n) \
363                { __name##Alloc::deallocate(__p, __n); }
364  __ROPE_DEFINE_ALLOCS(_Alloc);
365# undef __ROPE_DEFINE_ALLOC
366};
367
368#endif /* __STL_USE_STD_ALLOCATORS */
369
370
371template<class _CharT, class _Alloc>
372struct _Rope_RopeRep : public _Rope_rep_base<_CharT,_Alloc> {
373    public:
374    enum { _S_max_rope_depth = 45 };
375    enum _Tag {_S_leaf, _S_concat, _S_substringfn, _S_function};
376    _Tag _M_tag:8;
377    bool _M_is_balanced:8;
378    unsigned char _M_depth;
379    __GC_CONST _CharT* _M_c_string;
380                        /* Flattened version of string, if needed.  */
381                        /* typically 0.                             */
382                        /* If it's not 0, then the memory is owned  */
383                        /* by this node.                            */
384                        /* In the case of a leaf, this may point to */
385                        /* the same memory as the data field.       */
386    typedef _Rope_rep_base<_CharT,_Alloc>::allocator_type allocator_type;
387    _Rope_RopeRep(_Tag __t, int __d, bool __b, size_t __size,
388                  allocator_type __a)
389        : _Rope_rep_base<_CharT,_Alloc>(__size, __a),
390          _M_tag(__t), _M_depth(__d), _M_is_balanced(__b), _M_c_string(0)
391    {
392#       ifndef __GC
393            _M_refcount = 1;
394            _M_init_refcount_lock();
395#       endif
396    }
397#   ifndef __GC
398#       if defined(__STL_WIN32THREADS)
399            long _M_refcount;   // InterlockedIncrement wants a long *
400#       else
401            size_t _M_refcount;
402#       endif
403        // We count references from rope instances
404        // and references from other rope nodes.  We
405        // do not count const_iterator references.
406        // Iterator references are counted so that rope modifications
407        // can be detected after the fact.
408        // Generally function results are counted, i.__e.
409        // a pointer returned by a function is included at the
410        // point at which the pointer is returned.
411        // The recipient should decrement the count if the
412        // __result is not needed.
413        // Generally function arguments are not reflected
414        // in the reference count.  The callee should increment
415        // the count before saving the argument someplace that
416        // will outlive the call.
417#   endif
418#   ifndef __GC
419#       ifdef __STL_SGI_THREADS
420            // Reference counting with multiple threads and no
421            // hardware or thread package support is pretty awful.
422            // Mutexes are normally too expensive.
423            // We'll assume a COMPARE_AND_SWAP(destp, __old, new)
424            // operation, which might be cheaper.
425#           if __mips < 3 || !(defined (_ABIN32) || defined(_ABI64))
426#               define __add_and_fetch(l,v) add_then_test((unsigned long*)l,v)
427#           endif
428            void _M_init_refcount_lock() {}
429            void _M_incr_refcount ()
430            {
431                __add_and_fetch(&_M_refcount, 1);
432            }
433            size_t _M_decr_refcount ()
434            {
435                return __add_and_fetch(&_M_refcount, (size_t)(-1));
436            }
437#       elif defined(__STL_WIN32THREADS)
438            void _M_init_refcount_lock() {}
439            void _M_incr_refcount ()
440            {
441                InterlockedIncrement(&_M_refcount);
442            }
443            size_t _M_decr_refcount ()
444            {
445                return InterlockedDecrement(&_M_refcount);
446            }
447#	elif defined(__STL_PTHREADS)
448            // This should be portable, but performance is expected
449            // to be quite awful.  This really needs platform specific
450            // code.
451            pthread_mutex_t _M_refcount_lock;
452            void _M_init_refcount_lock() {
453                pthread_mutex_init(&_M_refcount_lock, 0);
454            }
455            void _M_incr_refcount ()
456            {
457                pthread_mutex_lock(&_M_refcount_lock);
458                ++_M_refcount;
459                pthread_mutex_unlock(&_M_refcount_lock);
460            }
461            size_t _M_decr_refcount ()
462            {
463                size_t __result;
464                pthread_mutex_lock(&_M_refcount_lock);
465                __result = --_M_refcount;
466                pthread_mutex_unlock(&_M_refcount_lock);
467                return __result;
468            }
469#       else
470            void _M_init_refcount_lock() {}
471            void _M_incr_refcount ()
472            {
473                ++_M_refcount;
474            }
475            size_t _M_decr_refcount ()
476            {
477                --_M_refcount;
478                return _M_refcount;
479            }
480#       endif
481#   else
482        void _M_incr_refcount () {}
483#   endif
484#   ifdef __STL_USE_STD_ALLOCATORS
485        static void _S_free_string(__GC_CONST _CharT*, size_t __len,
486                                   allocator_type __a);
487#       define __STL_FREE_STRING(__s, __l, __a) _S_free_string(__s, __l, __a);
488#   else
489        static void _S_free_string(__GC_CONST _CharT*, size_t __len);
490#       define __STL_FREE_STRING(__s, __l, __a) _S_free_string(__s, __l);
491#   endif
492                        // Deallocate data section of a leaf.
493                        // This shouldn't be a member function.
494                        // But its hard to do anything else at the
495                        // moment, because it's templatized w.r.t.
496                        // an allocator.
497                        // Does nothing if __GC is defined.
498#   ifndef __GC
499          void _M_free_c_string();
500          void _M_free_tree();
501                        // Deallocate t. Assumes t is not 0.
502          void _M_unref_nonnil()
503          {
504              if (0 == _M_decr_refcount()) _M_free_tree();
505          }
506          void _M_ref_nonnil()
507          {
508              _M_incr_refcount();
509          }
510          static void _S_unref(_Rope_RopeRep* __t)
511          {
512              if (0 != __t) {
513                  __t->_M_unref_nonnil();
514              }
515          }
516          static void _S_ref(_Rope_RopeRep* __t)
517          {
518              if (0 != __t) __t->_M_incr_refcount();
519          }
520          static void _S_free_if_unref(_Rope_RopeRep* __t)
521          {
522              if (0 != __t && 0 == __t->_M_refcount) __t->_M_free_tree();
523          }
524#   else /* __GC */
525          void _M_unref_nonnil() {}
526          void _M_ref_nonnil() {}
527          static void _S_unref(_Rope_RopeRep*) {}
528          static void _S_ref(_Rope_RopeRep*) {}
529          static void _S_free_if_unref(_Rope_RopeRep*) {}
530#   endif
531
532};
533
534template<class _CharT, class _Alloc>
535struct _Rope_RopeLeaf : public _Rope_RopeRep<_CharT,_Alloc> {
536  public:
537    // Apparently needed by VC++
538    // The data fields of leaves are allocated with some
539    // extra space, to accomodate future growth and for basic
540    // character types, to hold a trailing eos character.
541    enum { _S_alloc_granularity = 8 };
542    static size_t _S_rounded_up_size(size_t __n) {
543        size_t __size_with_eos;
544
545        if (_S_is_basic_char_type((_CharT*)0)) {
546            __size_with_eos = __n + 1;
547        } else {
548            __size_with_eos = __n;
549        }
550#       ifdef __GC
551           return __size_with_eos;
552#       else
553           // Allow slop for in-place expansion.
554           return (__size_with_eos + _S_alloc_granularity-1)
555                        &~ (_S_alloc_granularity-1);
556#       endif
557    }
558    __GC_CONST _CharT* _M_data; /* Not necessarily 0 terminated. */
559                                /* The allocated size is         */
560                                /* _S_rounded_up_size(size), except */
561                                /* in the GC case, in which it   */
562                                /* doesn't matter.               */
563    typedef _Rope_rep_base<_CharT,_Alloc>::allocator_type allocator_type;
564    _Rope_RopeLeaf(__GC_CONST _CharT* __d, size_t __size, allocator_type __a)
565        : _Rope_RopeRep<_CharT,_Alloc>(_S_leaf, 0, true, __size, __a),
566	  _M_data(__d)
567        {
568        __stl_assert(__size > 0);
569        if (_S_is_basic_char_type((_CharT *)0)) {
570            // already eos terminated.
571            _M_c_string = __d;
572        }
573    }
574        // The constructor assumes that d has been allocated with
575        // the proper allocator and the properly padded size.
576        // In contrast, the destructor deallocates the data:
577# ifndef __GC
578    ~_Rope_RopeLeaf() {
579        if (_M_data != _M_c_string) {
580            _M_free_c_string();
581        }
582        __STL_FREE_STRING(_M_data, _M_size, get_allocator());
583    }
584# endif
585};
586
587template<class _CharT, class _Alloc>
588struct _Rope_RopeConcatenation : public _Rope_RopeRep<_CharT,_Alloc> {
589  public:
590    _Rope_RopeRep<_CharT,_Alloc>* _M_left;
591    _Rope_RopeRep<_CharT,_Alloc>* _M_right;
592    typedef _Rope_rep_base<_CharT,_Alloc>::allocator_type allocator_type;
593    _Rope_RopeConcatenation(_Rope_RopeRep<_CharT,_Alloc>* __l,
594                             _Rope_RopeRep<_CharT,_Alloc>* __r,
595                             allocator_type __a)
596      : _Rope_RopeRep<_CharT,_Alloc>(
597          _S_concat, max(__l->_M_depth, __r->_M_depth) + 1, false,
598          __l->_M_size + __r->_M_size, __a),
599      _M_left(__l), _M_right(__r)
600      {}
601# ifndef __GC
602    ~_Rope_RopeConcatenation() {
603        _M_free_c_string();
604        _M_left->_M_unref_nonnil();
605        _M_right->_M_unref_nonnil();
606    }
607# endif
608};
609
610template<class _CharT, class _Alloc>
611struct _Rope_RopeFunction : public _Rope_RopeRep<_CharT,_Alloc> {
612  public:
613    char_producer<_CharT>* _M_fn;
614#   ifndef __GC
615      bool _M_delete_when_done; // Char_producer is owned by the
616                                // rope and should be explicitly
617                                // deleted when the rope becomes
618                                // inaccessible.
619#   else
620      // In the GC case, we either register the rope for
621      // finalization, or not.  Thus the field is unnecessary;
622      // the information is stored in the collector data structures.
623      // We do need a finalization procedure to be invoked by the
624      // collector.
625      static void _S_fn_finalization_proc(void * __tree, void *) {
626        delete ((_Rope_RopeFunction *)__tree) -> _M_fn;
627      }
628#   endif
629    typedef _Rope_rep_base<_CharT,_Alloc>::allocator_type allocator_type;
630    _Rope_RopeFunction(char_producer<_CharT>* __f, size_t __size,
631                        bool __d, allocator_type __a)
632      :_Rope_RopeRep<_CharT,_Alloc>(_S_function, 0, true, __size, __a),
633       _M_fn(__f)
634#       ifndef __GC
635      , _M_delete_when_done(__d)
636#       endif
637          {
638        __stl_assert(__size > 0);
639#       ifdef __GC
640            if (__d) {
641                GC_REGISTER_FINALIZER(
642                  this, _Rope_RopeFunction::_S_fn_finalization_proc, 0, 0, 0);
643            }
644#       endif
645    }
646# ifndef __GC
647    ~_Rope_RopeFunction() {
648          _M_free_c_string();
649          if (_M_delete_when_done) {
650              delete _M_fn;
651          }
652    }
653# endif
654};
655// Substring results are usually represented using just
656// concatenation nodes.  But in the case of very long flat ropes
657// or ropes with a functional representation that isn't practical.
658// In that case, we represent the __result as a special case of
659// RopeFunction, whose char_producer points back to the rope itself.
660// In all cases except repeated substring operations and
661// deallocation, we treat the __result as a RopeFunction.
662template<class _CharT, class _Alloc>
663struct _Rope_RopeSubstring : public _Rope_RopeFunction<_CharT,_Alloc>,
664                             public char_producer<_CharT> {
665  public:
666    // XXX this whole class should be rewritten.
667    _Rope_RopeRep<_CharT,_Alloc>* _M_base;      // not 0
668    size_t _M_start;
669    virtual void operator()(size_t __start_pos, size_t __req_len,
670                            _CharT* __buffer) {
671        switch(_M_base->_M_tag) {
672            case _S_function:
673            case _S_substringfn:
674              {
675                char_producer<_CharT>* __fn =
676                        ((_Rope_RopeFunction<_CharT,_Alloc>*)_M_base)->_M_fn;
677                __stl_assert(__start_pos + __req_len <= _M_size);
678                __stl_assert(_M_start + _M_size <= _M_base->_M_size);
679                (*__fn)(__start_pos + _M_start, __req_len, __buffer);
680              }
681              break;
682            case _S_leaf:
683              {
684                __GC_CONST _CharT* __s =
685                        ((_Rope_RopeLeaf<_CharT,_Alloc>*)_M_base)->_M_data;
686                uninitialized_copy_n(__s + __start_pos + _M_start, __req_len,
687                                     __buffer);
688              }
689              break;
690            default:
691              __stl_assert(false);
692        }
693    }
694    typedef _Rope_rep_base<_CharT,_Alloc>::allocator_type allocator_type;
695    _Rope_RopeSubstring(_Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
696                          size_t __l, allocator_type __a)
697      : _Rope_RopeFunction<_CharT,_Alloc>(this, __l, false, __a), _M_base(__b)
698      , _M_start(__s)
699    {
700        __stl_assert(__l > 0);
701        __stl_assert(__s + __l <= __b->_M_size);
702#       ifndef __GC
703            _M_base->_M_ref_nonnil();
704#       endif
705        _M_tag = _S_substringfn;
706    }
707    virtual ~_Rope_RopeSubstring()
708      {
709#       ifndef __GC
710          _M_base->_M_unref_nonnil();
711          // _M_free_c_string();  -- done by parent class
712#       endif
713      }
714};
715
716
717// Self-destructing pointers to Rope_rep.
718// These are not conventional smart pointers.  Their
719// only purpose in life is to ensure that unref is called
720// on the pointer either at normal exit or if an exception
721// is raised.  It is the caller's responsibility to
722// adjust reference counts when these pointers are initialized
723// or assigned to.  (This convention significantly reduces
724// the number of potentially expensive reference count
725// updates.)
726#ifndef __GC
727  template<class _CharT, class _Alloc>
728  struct _Rope_self_destruct_ptr {
729    _Rope_RopeRep<_CharT,_Alloc>* _M_ptr;
730    ~_Rope_self_destruct_ptr()
731      { _Rope_RopeRep<_CharT,_Alloc>::_S_unref(_M_ptr); }
732#   ifdef __STL_USE_EXCEPTIONS
733        _Rope_self_destruct_ptr() : _M_ptr(0) {};
734#   else
735        _Rope_self_destruct_ptr() {};
736#   endif
737    _Rope_self_destruct_ptr(_Rope_RopeRep<_CharT,_Alloc>* __p) : _M_ptr(__p) {}
738    _Rope_RopeRep<_CharT,_Alloc>& operator*() { return *_M_ptr; }
739    _Rope_RopeRep<_CharT,_Alloc>* operator->() { return _M_ptr; }
740    operator _Rope_RopeRep<_CharT,_Alloc>*() { return _M_ptr; }
741    _Rope_self_destruct_ptr& operator= (_Rope_RopeRep<_CharT,_Alloc>* __x)
742        { _M_ptr = __x; return *this; }
743  };
744#endif
745
746// Dereferencing a nonconst iterator has to return something
747// that behaves almost like a reference.  It's not possible to
748// return an actual reference since assignment requires extra
749// work.  And we would get into the same problems as with the
750// CD2 version of basic_string.
751template<class _CharT, class _Alloc>
752class _Rope_char_ref_proxy {
753    friend class rope<_CharT,_Alloc>;
754    friend class _Rope_iterator<_CharT,_Alloc>;
755    friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
756#   ifdef __GC
757        typedef _Rope_RopeRep<_CharT,_Alloc>* _Self_destruct_ptr;
758#   else
759        typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
760#   endif
761    typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
762    typedef rope<_CharT,_Alloc> _My_rope;
763    size_t _M_pos;
764    _CharT _M_current;
765    bool _M_current_valid;
766    _My_rope* _M_root;     // The whole rope.
767  public:
768    _Rope_char_ref_proxy(_My_rope* __r, size_t __p) :
769        _M_pos(__p), _M_current_valid(false), _M_root(__r) {}
770    _Rope_char_ref_proxy(const _Rope_char_ref_proxy& __x) :
771        _M_pos(__x._M_pos), _M_current_valid(false), _M_root(__x._M_root) {}
772        // Don't preserve cache if the reference can outlive the
773        // expression.  We claim that's not possible without calling
774        // a copy constructor or generating reference to a proxy
775        // reference.  We declare the latter to have undefined semantics.
776    _Rope_char_ref_proxy(_My_rope* __r, size_t __p,
777                    _CharT __c) :
778        _M_pos(__p), _M_current(__c), _M_current_valid(true), _M_root(__r) {}
779    inline operator _CharT () const;
780    _Rope_char_ref_proxy& operator= (_CharT __c);
781    _Rope_char_ptr_proxy<_CharT,_Alloc> operator& () const;
782    _Rope_char_ref_proxy& operator= (const _Rope_char_ref_proxy& __c) {
783        return operator=((_CharT)__c);
784    }
785};
786
787#ifdef __STL_FUNCTION_TMPL_PARTIAL_ORDER
788    template<class _CharT, class __Alloc>
789    inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a,
790                     _Rope_char_ref_proxy <_CharT, __Alloc > __b) {
791        _CharT __tmp = __a;
792        __a = __b;
793        __b = __tmp;
794    }
795#else
796// There is no really acceptable way to handle this.  The default
797// definition of swap doesn't work for proxy references.
798// It can't really be made to work, even with ugly hacks, since
799// the only unusual operation it uses is the copy constructor, which
800// is needed for other purposes.  We provide a macro for
801// full specializations, and instantiate the most common case.
802# define _ROPE_SWAP_SPECIALIZATION(_CharT, __Alloc) \
803    inline void swap(_Rope_char_ref_proxy <_CharT, __Alloc > __a, \
804                     _Rope_char_ref_proxy <_CharT, __Alloc > __b) { \
805        _CharT __tmp = __a; \
806        __a = __b; \
807        __b = __tmp; \
808    }
809
810_ROPE_SWAP_SPECIALIZATION(char,__STL_DEFAULT_ALLOCATOR(char))
811
812#endif /* !__STL_FUNCTION_TMPL_PARTIAL_ORDER */
813
814template<class _CharT, class _Alloc>
815class _Rope_char_ptr_proxy {
816    // XXX this class should be rewritten.
817    friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
818    size_t _M_pos;
819    rope<_CharT,_Alloc>* _M_root;     // The whole rope.
820  public:
821    _Rope_char_ptr_proxy(const _Rope_char_ref_proxy<_CharT,_Alloc>& __x)
822      : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
823    _Rope_char_ptr_proxy(const _Rope_char_ptr_proxy& __x)
824      : _M_pos(__x._M_pos), _M_root(__x._M_root) {}
825    _Rope_char_ptr_proxy() {}
826    _Rope_char_ptr_proxy(_CharT* __x) : _M_root(0), _M_pos(0) {
827        __stl_assert(0 == __x);
828    }
829    _Rope_char_ptr_proxy&
830    operator= (const _Rope_char_ptr_proxy& __x) {
831        _M_pos = __x._M_pos;
832        _M_root = __x._M_root;
833        return *this;
834    }
835    friend bool operator==  __STL_NULL_TMPL_ARGS
836                (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
837                 const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y);
838
839    _Rope_char_ref_proxy<_CharT,_Alloc> operator*() const {
840        return _Rope_char_ref_proxy<_CharT,_Alloc>(_M_root, _M_pos);
841    }
842};
843
844
845// Rope iterators:
846// Unlike in the C version, we cache only part of the stack
847// for rope iterators, since they must be efficiently copyable.
848// When we run out of cache, we have to reconstruct the iterator
849// value.
850// Pointers from iterators are not included in reference counts.
851// Iterators are assumed to be thread private.  Ropes can
852// be shared.
853
854#if defined(__sgi) && !defined(__GNUC__) && (_MIPS_SIM != _MIPS_SIM_ABI32)
855#pragma set woff 1375
856#endif
857
858template<class _CharT, class _Alloc>
859class _Rope_iterator_base
860  : public random_access_iterator<_CharT, ptrdiff_t> {
861    friend class rope<_CharT,_Alloc>;
862  public:
863    typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
864        // Borland doesnt want this to be protected.
865  protected:
866    enum { _S_path_cache_len = 4 }; // Must be <= 9.
867    enum { _S_iterator_buf_len = 15 };
868    size_t _M_current_pos;
869    _RopeRep* _M_root;     // The whole rope.
870    size_t _M_leaf_pos;    // Starting position for current leaf
871    __GC_CONST _CharT* _M_buf_start;
872                        // Buffer possibly
873                        // containing current char.
874    __GC_CONST _CharT* _M_buf_ptr;
875                        // Pointer to current char in buffer.
876                        // != 0 ==> buffer valid.
877    __GC_CONST _CharT* _M_buf_end;
878                        // One past __last valid char in buffer.
879    // What follows is the path cache.  We go out of our
880    // way to make this compact.
881    // Path_end contains the bottom section of the path from
882    // the root to the current leaf.
883    const _RopeRep* _M_path_end[_S_path_cache_len];
884    int _M_leaf_index;     // Last valid __pos in path_end;
885                        // _M_path_end[0] ... _M_path_end[leaf_index-1]
886                        // point to concatenation nodes.
887    unsigned char _M_path_directions;
888                          // (path_directions >> __i) & 1 is 1
889                          // iff we got from _M_path_end[leaf_index - __i - 1]
890                          // to _M_path_end[leaf_index - __i] by going to the
891                          // __right. Assumes path_cache_len <= 9.
892    _CharT _M_tmp_buf[_S_iterator_buf_len];
893                        // Short buffer for surrounding chars.
894                        // This is useful primarily for
895                        // RopeFunctions.  We put the buffer
896                        // here to avoid locking in the
897                        // multithreaded case.
898    // The cached path is generally assumed to be valid
899    // only if the buffer is valid.
900    static void _S_setbuf(_Rope_iterator_base& __x);
901                                        // Set buffer contents given
902                                        // path cache.
903    static void _S_setcache(_Rope_iterator_base& __x);
904                                        // Set buffer contents and
905                                        // path cache.
906    static void _S_setcache_for_incr(_Rope_iterator_base& __x);
907                                        // As above, but assumes path
908                                        // cache is valid for previous posn.
909    _Rope_iterator_base() {}
910    _Rope_iterator_base(_RopeRep* __root, size_t __pos)
911      : _M_root(__root), _M_current_pos(__pos), _M_buf_ptr(0) {}
912    void _M_incr(size_t __n);
913    void _M_decr(size_t __n);
914  public:
915    size_t index() const { return _M_current_pos; }
916    _Rope_iterator_base(const _Rope_iterator_base& __x) {
917        if (0 != __x._M_buf_ptr) {
918            *this = __x;
919        } else {
920            _M_current_pos = __x._M_current_pos;
921            _M_root = __x._M_root;
922            _M_buf_ptr = 0;
923        }
924    }
925};
926
927template<class _CharT, class _Alloc> class _Rope_iterator;
928
929template<class _CharT, class _Alloc>
930class _Rope_const_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
931    friend class rope<_CharT,_Alloc>;
932  protected:
933    _Rope_const_iterator(const _RopeRep* __root, size_t __pos):
934                   _Rope_iterator_base<_CharT,_Alloc>(
935                     const_cast<_RopeRep*>(__root), __pos)
936                   // Only nonconst iterators modify root ref count
937    {}
938  public:
939    typedef _CharT reference;   // Really a value.  Returning a reference
940                                // Would be a mess, since it would have
941                                // to be included in refcount.
942    typedef const _CharT* pointer;
943
944  public:
945    _Rope_const_iterator() {};
946    _Rope_const_iterator(const _Rope_const_iterator& __x) :
947                                _Rope_iterator_base<_CharT,_Alloc>(__x) { }
948    _Rope_const_iterator(const _Rope_iterator<_CharT,_Alloc>& __x);
949    _Rope_const_iterator(const rope<_CharT,_Alloc>& __r, size_t __pos) :
950        _Rope_iterator_base<_CharT,_Alloc>(__r._M_tree_ptr, __pos) {}
951    _Rope_const_iterator& operator= (const _Rope_const_iterator& __x) {
952        if (0 != __x._M_buf_ptr) {
953            *(static_cast<_Rope_iterator_base<_CharT,_Alloc>*>(this)) = __x;
954        } else {
955            _M_current_pos = __x._M_current_pos;
956            _M_root = __x._M_root;
957            _M_buf_ptr = 0;
958        }
959        return(*this);
960    }
961    reference operator*() {
962        if (0 == _M_buf_ptr) _S_setcache(*this);
963        return *_M_buf_ptr;
964    }
965    _Rope_const_iterator& operator++() {
966        __GC_CONST _CharT* __next;
967        if (0 != _M_buf_ptr && (__next = _M_buf_ptr + 1) < _M_buf_end) {
968            _M_buf_ptr = __next;
969            ++_M_current_pos;
970        } else {
971            _M_incr(1);
972        }
973        return *this;
974    }
975    _Rope_const_iterator& operator+=(ptrdiff_t __n) {
976        if (__n >= 0) {
977            _M_incr(__n);
978        } else {
979            _M_decr(-__n);
980        }
981        return *this;
982    }
983    _Rope_const_iterator& operator--() {
984        _M_decr(1);
985        return *this;
986    }
987    _Rope_const_iterator& operator-=(ptrdiff_t __n) {
988        if (__n >= 0) {
989            _M_decr(__n);
990        } else {
991            _M_incr(-__n);
992        }
993        return *this;
994    }
995    _Rope_const_iterator operator++(int) {
996        size_t __old_pos = _M_current_pos;
997        _M_incr(1);
998        return _Rope_const_iterator<_CharT,_Alloc>(_M_root, __old_pos);
999        // This makes a subsequent dereference expensive.
1000        // Perhaps we should instead copy the iterator
1001        // if it has a valid cache?
1002    }
1003    _Rope_const_iterator operator--(int) {
1004        size_t __old_pos = _M_current_pos;
1005        _M_decr(1);
1006        return _Rope_const_iterator<_CharT,_Alloc>(_M_root, __old_pos);
1007    }
1008    friend _Rope_const_iterator<_CharT,_Alloc> operator- __STL_NULL_TMPL_ARGS
1009        (const _Rope_const_iterator<_CharT,_Alloc>& __x,
1010         ptrdiff_t __n);
1011    friend _Rope_const_iterator<_CharT,_Alloc> operator+ __STL_NULL_TMPL_ARGS
1012        (const _Rope_const_iterator<_CharT,_Alloc>& __x,
1013         ptrdiff_t __n);
1014    friend _Rope_const_iterator<_CharT,_Alloc> operator+ __STL_NULL_TMPL_ARGS
1015        (ptrdiff_t __n,
1016         const _Rope_const_iterator<_CharT,_Alloc>& __x);
1017    reference operator[](size_t __n) {
1018        return rope<_CharT,_Alloc>::_S_fetch(_M_root, _M_current_pos + __n);
1019    }
1020    friend bool operator== __STL_NULL_TMPL_ARGS
1021        (const _Rope_const_iterator<_CharT,_Alloc>& __x,
1022         const _Rope_const_iterator<_CharT,_Alloc>& __y);
1023    friend bool operator< __STL_NULL_TMPL_ARGS
1024        (const _Rope_const_iterator<_CharT,_Alloc>& __x,
1025         const _Rope_const_iterator<_CharT,_Alloc>& __y);
1026    friend ptrdiff_t operator- __STL_NULL_TMPL_ARGS
1027        (const _Rope_const_iterator<_CharT,_Alloc>& __x,
1028         const _Rope_const_iterator<_CharT,_Alloc>& __y);
1029};
1030
1031template<class _CharT, class _Alloc>
1032class _Rope_iterator : public _Rope_iterator_base<_CharT,_Alloc> {
1033    friend class rope<_CharT,_Alloc>;
1034  protected:
1035    rope<_CharT,_Alloc>* _M_root_rope;
1036        // root is treated as a cached version of this,
1037        // and is used to detect changes to the underlying
1038        // rope.
1039        // Root is included in the reference count.
1040        // This is necessary so that we can detect changes reliably.
1041        // Unfortunately, it requires careful bookkeeping for the
1042        // nonGC case.
1043    _Rope_iterator(rope<_CharT,_Alloc>* __r, size_t __pos)
1044      : _Rope_iterator_base<_CharT,_Alloc>(__r->_M_tree_ptr, __pos),
1045        _M_root_rope(__r)
1046      { _RopeRep::_S_ref(_M_root); }
1047
1048    void _M_check();
1049  public:
1050    typedef _Rope_char_ref_proxy<_CharT,_Alloc>  reference;
1051    typedef _Rope_char_ref_proxy<_CharT,_Alloc>* pointer;
1052
1053  public:
1054    rope<_CharT,_Alloc>& container() { return *_M_root_rope; }
1055    _Rope_iterator() {
1056        _M_root = 0;  // Needed for reference counting.
1057    };
1058    _Rope_iterator(const _Rope_iterator& __x) :
1059        _Rope_iterator_base<_CharT,_Alloc>(__x) {
1060        _M_root_rope = __x._M_root_rope;
1061        _RopeRep::_S_ref(_M_root);
1062    }
1063    _Rope_iterator(rope<_CharT,_Alloc>& __r, size_t __pos);
1064    ~_Rope_iterator() {
1065        _RopeRep::_S_unref(_M_root);
1066    }
1067    _Rope_iterator& operator= (const _Rope_iterator& __x) {
1068        _RopeRep* __old = _M_root;
1069
1070        _RopeRep::_S_ref(__x._M_root);
1071        if (0 != __x._M_buf_ptr) {
1072            _M_root_rope = __x._M_root_rope;
1073            *(static_cast<_Rope_iterator_base<_CharT,_Alloc>*>(this)) = __x;
1074        } else {
1075            _M_current_pos = __x._M_current_pos;
1076            _M_root = __x._M_root;
1077            _M_root_rope = __x._M_root_rope;
1078            _M_buf_ptr = 0;
1079        }
1080        _RopeRep::_S_unref(__old);
1081        return(*this);
1082    }
1083    reference operator*() {
1084        _M_check();
1085        if (0 == _M_buf_ptr) {
1086            return _Rope_char_ref_proxy<_CharT,_Alloc>(
1087               _M_root_rope, _M_current_pos);
1088        } else {
1089            return _Rope_char_ref_proxy<_CharT,_Alloc>(
1090               _M_root_rope, _M_current_pos, *_M_buf_ptr);
1091        }
1092    }
1093    _Rope_iterator& operator++() {
1094        _M_incr(1);
1095        return *this;
1096    }
1097    _Rope_iterator& operator+=(difference_type __n) {
1098        if (__n >= 0) {
1099            _M_incr(__n);
1100        } else {
1101            _M_decr(-__n);
1102        }
1103        return *this;
1104    }
1105    _Rope_iterator& operator--() {
1106        _M_decr(1);
1107        return *this;
1108    }
1109    _Rope_iterator& operator-=(difference_type __n) {
1110        if (__n >= 0) {
1111            _M_decr(__n);
1112        } else {
1113            _M_incr(-__n);
1114        }
1115        return *this;
1116    }
1117    _Rope_iterator operator++(int) {
1118        size_t __old_pos = _M_current_pos;
1119        _M_incr(1);
1120        return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos);
1121    }
1122    _Rope_iterator operator--(int) {
1123        size_t __old_pos = _M_current_pos;
1124        _M_decr(1);
1125        return _Rope_iterator<_CharT,_Alloc>(_M_root_rope, __old_pos);
1126    }
1127    reference operator[](ptrdiff_t __n) {
1128        return _Rope_char_ref_proxy<_CharT,_Alloc>(
1129          _M_root_rope, _M_current_pos + __n);
1130    }
1131    friend bool operator== __STL_NULL_TMPL_ARGS
1132        (const _Rope_iterator<_CharT,_Alloc>& __x,
1133         const _Rope_iterator<_CharT,_Alloc>& __y);
1134    friend bool operator< __STL_NULL_TMPL_ARGS
1135        (const _Rope_iterator<_CharT,_Alloc>& __x,
1136         const _Rope_iterator<_CharT,_Alloc>& __y);
1137    friend ptrdiff_t operator- __STL_NULL_TMPL_ARGS
1138        (const _Rope_iterator<_CharT,_Alloc>& __x,
1139         const _Rope_iterator<_CharT,_Alloc>& __y);
1140    friend _Rope_iterator<_CharT,_Alloc> operator- __STL_NULL_TMPL_ARGS
1141        (const _Rope_iterator<_CharT,_Alloc>& __x,
1142         ptrdiff_t __n);
1143    friend _Rope_iterator<_CharT,_Alloc> operator+ __STL_NULL_TMPL_ARGS
1144        (const _Rope_iterator<_CharT,_Alloc>& __x,
1145         ptrdiff_t __n);
1146    friend _Rope_iterator<_CharT,_Alloc> operator+ __STL_NULL_TMPL_ARGS
1147        (ptrdiff_t __n,
1148         const _Rope_iterator<_CharT,_Alloc>& __x);
1149
1150};
1151
1152#if defined(__sgi) && !defined(__GNUC__) && (_MIPS_SIM != _MIPS_SIM_ABI32)
1153#pragma reset woff 1375
1154#endif
1155
1156//  The rope base class encapsulates
1157//  the differences between SGI-style allocators and standard-conforming
1158//  allocators.
1159
1160#ifdef __STL_USE_STD_ALLOCATORS
1161
1162// Base class for ordinary allocators.
1163template <class _CharT, class _Allocator, bool _IsStatic>
1164class _Rope_alloc_base {
1165public:
1166  typedef _Rope_RopeRep<_CharT,_Allocator> _RopeRep;
1167  typedef typename _Alloc_traits<_CharT,_Allocator>::allocator_type
1168          allocator_type;
1169  allocator_type get_allocator() const { return _M_data_allocator; }
1170  _Rope_alloc_base(_RopeRep *__t, const allocator_type& __a)
1171        : _M_tree_ptr(__t), _M_data_allocator(__a) {}
1172  _Rope_alloc_base(const allocator_type& __a)
1173        : _M_data_allocator(__a) {}
1174
1175protected:
1176  // The only data members of a rope:
1177    allocator_type _M_data_allocator;
1178    _RopeRep* _M_tree_ptr;
1179
1180# define __ROPE_DEFINE_ALLOC(_Tp, __name) \
1181        typedef typename \
1182          _Alloc_traits<_Tp,_Allocator>::allocator_type __name##Allocator; \
1183        _Tp* __name##_allocate(size_t __n) const \
1184          { return __name##Allocator(_M_data_allocator).allocate(__n); } \
1185        void __name##_deallocate(_Tp *__p, size_t __n) const \
1186                { __name##Allocator(_M_data_allocator).deallocate(__p, __n); }
1187  __ROPE_DEFINE_ALLOCS(_Allocator)
1188# undef __ROPE_DEFINE_ALLOC
1189};
1190
1191// Specialization for allocators that have the property that we don't
1192//  actually have to store an allocator object.
1193template <class _CharT, class _Allocator>
1194class _Rope_alloc_base<_CharT,_Allocator,true> {
1195public:
1196  typedef _Rope_RopeRep<_CharT,_Allocator> _RopeRep;
1197  typedef typename _Alloc_traits<_CharT,_Allocator>::allocator_type
1198          allocator_type;
1199  allocator_type get_allocator() const { return allocator_type(); }
1200  _Rope_alloc_base(_RopeRep *__t, const allocator_type&)
1201                : _M_tree_ptr(__t) {}
1202  _Rope_alloc_base(const allocator_type&) {}
1203
1204protected:
1205  // The only data member of a rope:
1206    _RopeRep *_M_tree_ptr;
1207
1208# define __ROPE_DEFINE_ALLOC(_Tp, __name) \
1209        typedef typename \
1210          _Alloc_traits<_Tp,_Allocator>::_Alloc_type __name##Alloc; \
1211        typedef typename \
1212          _Alloc_traits<_Tp,_Allocator>::allocator_type __name##Allocator; \
1213        static _Tp* __name##_allocate(size_t __n) \
1214          { return __name##Alloc::allocate(__n); } \
1215        static void __name##_deallocate(_Tp *__p, size_t __n) \
1216          { __name##Alloc::deallocate(__p, __n); }
1217  __ROPE_DEFINE_ALLOCS(_Allocator)
1218# undef __ROPE_DEFINE_ALLOC
1219};
1220
1221template <class _CharT, class _Alloc>
1222struct _Rope_base
1223  : public _Rope_alloc_base<_CharT,_Alloc,
1224                            _Alloc_traits<_CharT,_Alloc>::_S_instanceless>
1225{
1226  typedef _Rope_alloc_base<_CharT,_Alloc,
1227                            _Alloc_traits<_CharT,_Alloc>::_S_instanceless>
1228          _Base;
1229  typedef typename _Base::allocator_type allocator_type;
1230  _Rope_base(_RopeRep* __t, const allocator_type& __a) : _Base(__t, __a) {}
1231  _Rope_base(const allocator_type& __a) : _Base(__a) {}
1232};
1233
1234#else /* !__STL_USE_STD_ALLOCATORS */
1235
1236template <class _CharT, class _Alloc>
1237class _Rope_base {
1238public:
1239  typedef _Rope_RopeRep<_CharT, _Alloc> _RopeRep;
1240  typedef _Alloc allocator_type;
1241  static allocator_type get_allocator() { return allocator_type(); }
1242  _Rope_base(_RopeRep * __t, const allocator_type&) : _M_tree_ptr(__t) {}
1243  _Rope_base(const allocator_type&) {}
1244
1245protected:
1246  // The only data member of a rope:
1247    _RopeRep* _M_tree_ptr;
1248
1249# define __ROPE_DEFINE_ALLOC(_Tp, __name) \
1250        typedef simple_alloc<_Tp, _Alloc> __name##Alloc; \
1251        static _Tp* __name##_allocate(size_t __n) \
1252                { return __name##Alloc::allocate(__n); } \
1253        static void __name##_deallocate(_Tp *__p, size_t __n) \
1254                { __name##Alloc::deallocate(__p, __n); }
1255  __ROPE_DEFINE_ALLOCS(_Alloc)
1256# undef __ROPE_DEFINE_ALLOC
1257};
1258
1259#endif /* __STL_USE_STD_ALLOCATORS */
1260
1261
1262template <class _CharT, class _Alloc>
1263class rope : public _Rope_base<_CharT,_Alloc> {
1264    public:
1265        typedef _CharT value_type;
1266        typedef ptrdiff_t difference_type;
1267        typedef size_t size_type;
1268        typedef _CharT const_reference;
1269        typedef const _CharT* const_pointer;
1270        typedef _Rope_iterator<_CharT,_Alloc> iterator;
1271        typedef _Rope_const_iterator<_CharT,_Alloc> const_iterator;
1272        typedef _Rope_char_ref_proxy<_CharT,_Alloc> reference;
1273        typedef _Rope_char_ptr_proxy<_CharT,_Alloc> pointer;
1274
1275        friend class _Rope_iterator<_CharT,_Alloc>;
1276        friend class _Rope_const_iterator<_CharT,_Alloc>;
1277        friend struct _Rope_RopeRep<_CharT,_Alloc>;
1278        friend class _Rope_iterator_base<_CharT,_Alloc>;
1279        friend class _Rope_char_ptr_proxy<_CharT,_Alloc>;
1280        friend class _Rope_char_ref_proxy<_CharT,_Alloc>;
1281        friend struct _Rope_RopeSubstring<_CharT,_Alloc>;
1282
1283    protected:
1284        typedef _Rope_base<_CharT,_Alloc> _Base;
1285        typedef typename _Base::allocator_type allocator_type;
1286#       ifdef __STL_USE_NAMESPACES
1287          using _Base::_M_tree_ptr;
1288#       endif
1289        typedef __GC_CONST _CharT* _Cstrptr;
1290#       ifdef __STL_SGI_THREADS
1291            static _Cstrptr _S_atomic_swap(_Cstrptr* __p, _Cstrptr __q) {
1292#               if __mips < 3 || !(defined (_ABIN32) || defined(_ABI64))
1293                    return (_Cstrptr) test_and_set((unsigned long*)__p,
1294                                                   (unsigned long)__q);
1295#               else
1296                    return (_Cstrptr) __test_and_set((unsigned long*)__p,
1297                                                     (unsigned long)__q);
1298#               endif
1299            }
1300#       elif defined(__STL_WIN32THREADS)
1301            static _Cstrptr _S_atomic_swap(_Cstrptr* __p, _Cstrptr __q) {
1302                return (_Cstrptr) InterlockedExchange(
1303                  (LPLONG)__p, (LONG)__q);
1304            }
1305#       elif defined(__STL_PTHREADS)
1306            // This should be portable, but performance is expected
1307            // to be quite awful.  This really needs platform specific
1308            // code.
1309            static pthread_mutex_t _S_swap_lock;
1310            static _Cstrptr _S_atomic_swap(_Cstrptr* __p, _Cstrptr __q) {
1311                pthread_mutex_lock(&_S_swap_lock);
1312                _Cstrptr __result = *__p;
1313                *__p = __q;
1314                pthread_mutex_unlock(&_S_swap_lock);
1315                return __result;
1316            }
1317#       else
1318            static _Cstrptr _S_atomic_swap(_Cstrptr* __p, _Cstrptr __q) {
1319                _Cstrptr __result = *__p;
1320                *__p = __q;
1321                return __result;
1322            }
1323#       endif
1324
1325        static _CharT _S_empty_c_str[1];
1326
1327        static bool _S_is0(_CharT __c) { return __c == _S_eos((_CharT*)0); }
1328        enum { _S_copy_max = 23 };
1329                // For strings shorter than _S_copy_max, we copy to
1330                // concatenate.
1331
1332        typedef _Rope_RopeRep<_CharT,_Alloc> _RopeRep;
1333        typedef _Rope_RopeConcatenation<_CharT,_Alloc> _RopeConcatenation;
1334        typedef _Rope_RopeLeaf<_CharT,_Alloc> _RopeLeaf;
1335        typedef _Rope_RopeFunction<_CharT,_Alloc> _RopeFunction;
1336        typedef _Rope_RopeSubstring<_CharT,_Alloc> _RopeSubstring;
1337
1338        // Retrieve a character at the indicated position.
1339        static _CharT _S_fetch(_RopeRep* __r, size_type __pos);
1340
1341#       ifndef __GC
1342            // Obtain a pointer to the character at the indicated position.
1343            // The pointer can be used to change the character.
1344            // If such a pointer cannot be produced, as is frequently the
1345            // case, 0 is returned instead.
1346            // (Returns nonzero only if all nodes in the path have a refcount
1347            // of 1.)
1348            static _CharT* _S_fetch_ptr(_RopeRep* __r, size_type __pos);
1349#       endif
1350
1351        static bool _S_apply_to_pieces(
1352                                // should be template parameter
1353                                _Rope_char_consumer<_CharT>& __c,
1354                                const _RopeRep* __r,
1355                                size_t __begin, size_t __end);
1356                                // begin and end are assumed to be in range.
1357
1358#       ifndef __GC
1359          static void _S_unref(_RopeRep* __t)
1360          {
1361              _RopeRep::_S_unref(__t);
1362          }
1363          static void _S_ref(_RopeRep* __t)
1364          {
1365              _RopeRep::_S_ref(__t);
1366          }
1367#       else /* __GC */
1368          static void _S_unref(_RopeRep*) {}
1369          static void _S_ref(_RopeRep*) {}
1370#       endif
1371
1372
1373#       ifdef __GC
1374            typedef _Rope_RopeRep<_CharT,_Alloc>* _Self_destruct_ptr;
1375#       else
1376            typedef _Rope_self_destruct_ptr<_CharT,_Alloc> _Self_destruct_ptr;
1377#       endif
1378
1379        // _Result is counted in refcount.
1380        static _RopeRep* _S_substring(_RopeRep* __base,
1381                                    size_t __start, size_t __endp1);
1382
1383        static _RopeRep* _S_concat_char_iter(_RopeRep* __r,
1384                                          const _CharT* __iter, size_t __slen);
1385                // Concatenate rope and char ptr, copying __s.
1386                // Should really take an arbitrary iterator.
1387                // Result is counted in refcount.
1388        static _RopeRep* _S_destr_concat_char_iter(_RopeRep* __r,
1389                                          const _CharT* __iter, size_t __slen)
1390                // As above, but one reference to __r is about to be
1391                // destroyed.  Thus the pieces may be recycled if all
1392                // relevent reference counts are 1.
1393#           ifdef __GC
1394                // We can't really do anything since refcounts are unavailable.
1395                { return _S_concat_char_iter(__r, __iter, __slen); }
1396#           else
1397                ;
1398#           endif
1399
1400        static _RopeRep* _S_concat(_RopeRep* __left, _RopeRep* __right);
1401                // General concatenation on _RopeRep.  _Result
1402                // has refcount of 1.  Adjusts argument refcounts.
1403
1404   public:
1405        void apply_to_pieces( size_t __begin, size_t __end,
1406                              _Rope_char_consumer<_CharT>& __c) const {
1407            _S_apply_to_pieces(__c, _M_tree_ptr, __begin, __end);
1408        }
1409
1410
1411   protected:
1412
1413        static size_t _S_rounded_up_size(size_t __n) {
1414            return _RopeLeaf::_S_rounded_up_size(__n);
1415        }
1416
1417        static size_t _S_allocated_capacity(size_t __n) {
1418            if (_S_is_basic_char_type((_CharT*)0)) {
1419                return _S_rounded_up_size(__n) - 1;
1420            } else {
1421                return _S_rounded_up_size(__n);
1422            }
1423        }
1424
1425        // Allocate and construct a RopeLeaf using the supplied allocator
1426        // Takes ownership of s instead of copying.
1427        static _RopeLeaf* _S_new_RopeLeaf(__GC_CONST _CharT *__s,
1428                                          size_t __size, allocator_type __a)
1429        {
1430#           ifdef __STL_USE_STD_ALLOCATORS
1431              _RopeLeaf* __space = _LAllocator(__a).allocate(1);
1432#           else
1433              _RopeLeaf* __space = _L_allocate(1);
1434#           endif
1435            return new(__space) _RopeLeaf(__s, __size, __a);
1436        }
1437
1438        static _RopeConcatenation* _S_new_RopeConcatenation(
1439                        _RopeRep* __left, _RopeRep* __right,
1440                        allocator_type __a)
1441        {
1442#           ifdef __STL_USE_STD_ALLOCATORS
1443              _RopeConcatenation* __space = _CAllocator(__a).allocate(1);
1444#           else
1445              _RopeConcatenation* __space = _C_allocate(1);
1446#           endif
1447            return new(__space) _RopeConcatenation(__left, __right, __a);
1448        }
1449
1450        static _RopeFunction* _S_new_RopeFunction(char_producer<_CharT>* __f,
1451                size_t __size, bool __d, allocator_type __a)
1452        {
1453#           ifdef __STL_USE_STD_ALLOCATORS
1454              _RopeFunction* __space = _FAllocator(__a).allocate(1);
1455#           else
1456              _RopeFunction* __space = _F_allocate(1);
1457#           endif
1458            return new(__space) _RopeFunction(__f, __size, __d, __a);
1459        }
1460
1461        static _RopeSubstring* _S_new_RopeSubstring(
1462                _Rope_RopeRep<_CharT,_Alloc>* __b, size_t __s,
1463                size_t __l, allocator_type __a)
1464        {
1465#           ifdef __STL_USE_STD_ALLOCATORS
1466              _RopeSubstring* __space = _SAllocator(__a).allocate(1);
1467#           else
1468              _RopeSubstring* __space = _S_allocate(1);
1469#           endif
1470            return new(__space) _RopeSubstring(__b, __s, __l, __a);
1471        }
1472
1473#       ifdef __STL_USE_STD_ALLOCATORS
1474          static
1475          _RopeLeaf* _S_RopeLeaf_from_unowned_char_ptr(const _CharT *__s,
1476                       size_t __size, allocator_type __a)
1477#         define __STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __size, __a) \
1478                _S_RopeLeaf_from_unowned_char_ptr(__s, __size, __a)
1479#       else
1480          static
1481          _RopeLeaf* _S_RopeLeaf_from_unowned_char_ptr2(const _CharT* __s,
1482                                                        size_t __size)
1483#         define __STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __size, __a) \
1484               _S_RopeLeaf_from_unowned_char_ptr2(__s, __size)
1485#       endif
1486        {
1487            if (0 == __size) return 0;
1488#           ifdef __STL_USE_STD_ALLOCATORS
1489              _CharT* __buf = __a.allocate(_S_rounded_up_size(__size));
1490#           else
1491              _CharT* __buf = _Data_allocate(_S_rounded_up_size(__size));
1492              allocator_type __a = allocator_type();
1493#           endif
1494
1495            uninitialized_copy_n(__s, __size, __buf);
1496            _S_cond_store_eos(__buf[__size]);
1497            __STL_TRY {
1498              return _S_new_RopeLeaf(__buf, __size, __a);
1499            }
1500            __STL_UNWIND(_RopeRep::__STL_FREE_STRING(__buf, __size, __a))
1501        }
1502
1503
1504        // Concatenation of nonempty strings.
1505        // Always builds a concatenation node.
1506        // Rebalances if the result is too deep.
1507        // Result has refcount 1.
1508        // Does not increment left and right ref counts even though
1509        // they are referenced.
1510        static _RopeRep*
1511        _S_tree_concat(_RopeRep* __left, _RopeRep* __right);
1512
1513        // Concatenation helper functions
1514        static _RopeLeaf*
1515        _S_leaf_concat_char_iter(_RopeLeaf* __r,
1516                                 const _CharT* __iter, size_t __slen);
1517                // Concatenate by copying leaf.
1518                // should take an arbitrary iterator
1519                // result has refcount 1.
1520#       ifndef __GC
1521          static _RopeLeaf* _S_destr_leaf_concat_char_iter
1522                        (_RopeLeaf* __r, const _CharT* __iter, size_t __slen);
1523          // A version that potentially clobbers __r if __r->_M_refcount == 1.
1524#       endif
1525
1526        // A helper function for exponentiating strings.
1527        // This uses a nonstandard refcount convention.
1528        // The result has refcount 0.
1529        struct _Concat_fn
1530                : public binary_function<rope<_CharT,_Alloc>,
1531                                         rope<_CharT,_Alloc>,
1532                                         rope<_CharT,_Alloc> > {
1533                rope operator() (const rope& __x, const rope& __y) {
1534                    return __x + __y;
1535                }
1536        };
1537
1538        // Needed by the call to "power" used to build ropes
1539        // consisting of n copies of a character.
1540        friend rope identity_element(_Concat_fn)
1541        { return rope<_CharT,_Alloc>(); }
1542
1543        static size_t _S_char_ptr_len(const _CharT* __s);
1544                        // slightly generalized strlen
1545
1546        rope(_RopeRep* __t, const allocator_type& __a = allocator_type())
1547          : _Base(__t,__a) { }
1548
1549
1550        // Copy __r to the _CharT buffer.
1551        // Returns __buffer + __r->_M_size.
1552        // Assumes that buffer is uninitialized.
1553        static _CharT* _S_flatten(_RopeRep* __r, _CharT* __buffer);
1554
1555        // Again, with explicit starting position and length.
1556        // Assumes that buffer is uninitialized.
1557        static _CharT* _S_flatten(_RopeRep* __r,
1558                                  size_t __start, size_t __len,
1559                                  _CharT* __buffer);
1560
1561        static const unsigned long
1562          _S_min_len[_RopeRep::_S_max_rope_depth + 1];
1563
1564        static bool _S_is_balanced(_RopeRep* __r)
1565                { return (__r->_M_size >= _S_min_len[__r->_M_depth]); }
1566
1567        static bool _S_is_almost_balanced(_RopeRep* __r)
1568                { return (__r->_M_depth == 0 ||
1569                          __r->_M_size >= _S_min_len[__r->_M_depth - 1]); }
1570
1571        static bool _S_is_roughly_balanced(_RopeRep* __r)
1572                { return (__r->_M_depth <= 1 ||
1573                          __r->_M_size >= _S_min_len[__r->_M_depth - 2]); }
1574
1575        // Assumes the result is not empty.
1576        static _RopeRep* _S_concat_and_set_balanced(_RopeRep* __left,
1577                                                     _RopeRep* __right)
1578        {
1579            _RopeRep* __result = _S_concat(__left, __right);
1580            if (_S_is_balanced(__result)) __result->_M_is_balanced = true;
1581            return __result;
1582        }
1583
1584        // The basic rebalancing operation.  Logically copies the
1585        // rope.  The result has refcount of 1.  The client will
1586        // usually decrement the reference count of __r.
1587        // The result is within height 2 of balanced by the above
1588        // definition.
1589        static _RopeRep* _S_balance(_RopeRep* __r);
1590
1591        // Add all unbalanced subtrees to the forest of balanceed trees.
1592        // Used only by balance.
1593        static void _S_add_to_forest(_RopeRep*__r, _RopeRep** __forest);
1594
1595        // Add __r to forest, assuming __r is already balanced.
1596        static void _S_add_leaf_to_forest(_RopeRep* __r, _RopeRep** __forest);
1597
1598        // Print to stdout, exposing structure
1599        static void _S_dump(_RopeRep* __r, int __indent = 0);
1600
1601        // Return -1, 0, or 1 if __x < __y, __x == __y, or __x > __y resp.
1602        static int _S_compare(const _RopeRep* __x, const _RopeRep* __y);
1603
1604   public:
1605        bool empty() const { return 0 == _M_tree_ptr; }
1606
1607        // Comparison member function.  This is public only for those
1608        // clients that need a ternary comparison.  Others
1609        // should use the comparison operators below.
1610        int compare(const rope& __y) const {
1611            return _S_compare(_M_tree_ptr, __y._M_tree_ptr);
1612        }
1613
1614        rope(const _CharT* __s, const allocator_type& __a = allocator_type())
1615        : _Base(__STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, _S_char_ptr_len(__s),
1616                                                 __a),__a)
1617        { }
1618
1619        rope(const _CharT* __s, size_t __len,
1620             const allocator_type& __a = allocator_type())
1621        : _Base(__STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __len, __a), __a)
1622        { }
1623
1624        // Should perhaps be templatized with respect to the iterator type
1625        // and use Sequence_buffer.  (It should perhaps use sequence_buffer
1626        // even now.)
1627        rope(const _CharT *__s, const _CharT *__e,
1628             const allocator_type& __a = allocator_type())
1629        : _Base(__STL_ROPE_FROM_UNOWNED_CHAR_PTR(__s, __e - __s, __a), __a)
1630        { }
1631
1632        rope(const const_iterator& __s, const const_iterator& __e,
1633             const allocator_type& __a = allocator_type())
1634        : _Base(_S_substring(__s._M_root, __s._M_current_pos,
1635                             __e._M_current_pos), __a)
1636        { }
1637
1638        rope(const iterator& __s, const iterator& __e,
1639             const allocator_type& __a = allocator_type())
1640        : _Base(_S_substring(__s._M_root, __s._M_current_pos,
1641                             __e._M_current_pos), __a)
1642        { }
1643
1644        rope(_CharT __c, const allocator_type& __a = allocator_type())
1645        : _Base(__a)
1646        {
1647            _CharT* __buf = _Data_allocate(_S_rounded_up_size(1));
1648
1649            construct(__buf, __c);
1650            __STL_TRY {
1651                _M_tree_ptr = _S_new_RopeLeaf(__buf, 1, __a);
1652            }
1653            __STL_UNWIND(_RopeRep::__STL_FREE_STRING(__buf, 1, __a))
1654        }
1655
1656        rope(size_t __n, _CharT __c,
1657             const allocator_type& __a = allocator_type());
1658
1659        rope(const allocator_type& __a = allocator_type())
1660        : _Base(0, __a) {}
1661
1662        // Construct a rope from a function that can compute its members
1663        rope(char_producer<_CharT> *__fn, size_t __len, bool __delete_fn,
1664             const allocator_type& __a = allocator_type())
1665            : _Base(__a)
1666        {
1667            _M_tree_ptr = (0 == __len) ?
1668               0 : _S_new_RopeFunction(__fn, __len, __delete_fn, __a);
1669        }
1670
1671        rope(const rope& __x, const allocator_type& __a = allocator_type())
1672        : _Base(__x._M_tree_ptr, __a)
1673        {
1674            _S_ref(_M_tree_ptr);
1675        }
1676
1677        ~rope()
1678        {
1679            _S_unref(_M_tree_ptr);
1680        }
1681
1682        rope& operator=(const rope& __x)
1683        {
1684            _RopeRep* __old = _M_tree_ptr;
1685#           ifdef __STL_USE_STD_ALLOCATORS
1686              __stl_assert(get_allocator() == __x.get_allocator());
1687#           endif
1688            _M_tree_ptr = __x._M_tree_ptr;
1689            _S_ref(_M_tree_ptr);
1690            _S_unref(__old);
1691            return(*this);
1692        }
1693
1694        void push_back(_CharT __x)
1695        {
1696            _RopeRep* __old = _M_tree_ptr;
1697            _M_tree_ptr = _S_concat_char_iter(_M_tree_ptr, &__x, 1);
1698            _S_unref(__old);
1699        }
1700
1701        void pop_back()
1702        {
1703            _RopeRep* __old = _M_tree_ptr;
1704            _M_tree_ptr =
1705              _S_substring(_M_tree_ptr, 0, _M_tree_ptr->_M_size - 1);
1706            _S_unref(__old);
1707        }
1708
1709        _CharT back() const
1710        {
1711            return _S_fetch(_M_tree_ptr, _M_tree_ptr->_M_size - 1);
1712        }
1713
1714        void push_front(_CharT __x)
1715        {
1716            _RopeRep* __old = _M_tree_ptr;
1717            _RopeRep* __left =
1718              __STL_ROPE_FROM_UNOWNED_CHAR_PTR(&__x, 1, get_allocator());
1719            __STL_TRY {
1720              _M_tree_ptr = _S_concat(__left, _M_tree_ptr);
1721              _S_unref(__old);
1722              _S_unref(__left);
1723            }
1724            __STL_UNWIND(_S_unref(__left))
1725        }
1726
1727        void pop_front()
1728        {
1729            _RopeRep* __old = _M_tree_ptr;
1730            _M_tree_ptr = _S_substring(_M_tree_ptr, 1, _M_tree_ptr->_M_size);
1731            _S_unref(__old);
1732        }
1733
1734        _CharT front() const
1735        {
1736            return _S_fetch(_M_tree_ptr, 0);
1737        }
1738
1739        void balance()
1740        {
1741            _RopeRep* __old = _M_tree_ptr;
1742            _M_tree_ptr = _S_balance(_M_tree_ptr);
1743            _S_unref(__old);
1744        }
1745
1746        void copy(_CharT* __buffer) const {
1747            destroy(__buffer, __buffer + size());
1748            _S_flatten(_M_tree_ptr, __buffer);
1749        }
1750
1751        // This is the copy function from the standard, but
1752        // with the arguments reordered to make it consistent with the
1753        // rest of the interface.
1754        // Note that this guaranteed not to compile if the draft standard
1755        // order is assumed.
1756        size_type copy(size_type __pos, size_type __n, _CharT* __buffer) const
1757        {
1758            size_t __size = size();
1759            size_t __len = (__pos + __n > __size? __size - __pos : __n);
1760
1761            destroy(__buffer, __buffer + __len);
1762            _S_flatten(_M_tree_ptr, __pos, __len, __buffer);
1763            return __len;
1764        }
1765
1766        // Print to stdout, exposing structure.  May be useful for
1767        // performance debugging.
1768        void dump() {
1769            _S_dump(_M_tree_ptr);
1770        }
1771
1772        // Convert to 0 terminated string in new allocated memory.
1773        // Embedded 0s in the input do not terminate the copy.
1774        const _CharT* c_str() const;
1775
1776        // As above, but lso use the flattened representation as the
1777        // the new rope representation.
1778        const _CharT* replace_with_c_str();
1779
1780        // Reclaim memory for the c_str generated flattened string.
1781        // Intentionally undocumented, since it's hard to say when this
1782        // is safe for multiple threads.
1783        void delete_c_str () {
1784            if (0 == _M_tree_ptr) return;
1785            if (_RopeRep::_S_leaf == _M_tree_ptr->_M_tag &&
1786                ((_RopeLeaf*)_M_tree_ptr)->_M_data ==
1787                      _M_tree_ptr->_M_c_string) {
1788                // Representation shared
1789                return;
1790            }
1791#           ifndef __GC
1792              _M_tree_ptr->_M_free_c_string();
1793#           endif
1794            _M_tree_ptr->_M_c_string = 0;
1795        }
1796
1797        _CharT operator[] (size_type __pos) const {
1798            return _S_fetch(_M_tree_ptr, __pos);
1799        }
1800
1801        _CharT at(size_type __pos) const {
1802           // if (__pos >= size()) throw out_of_range;  // XXX
1803           return (*this)[__pos];
1804        }
1805
1806        const_iterator begin() const {
1807            return(const_iterator(_M_tree_ptr, 0));
1808        }
1809
1810        // An easy way to get a const iterator from a non-const container.
1811        const_iterator const_begin() const {
1812            return(const_iterator(_M_tree_ptr, 0));
1813        }
1814
1815        const_iterator end() const {
1816            return(const_iterator(_M_tree_ptr, size()));
1817        }
1818
1819        const_iterator const_end() const {
1820            return(const_iterator(_M_tree_ptr, size()));
1821        }
1822
1823        size_type size() const {
1824            return(0 == _M_tree_ptr? 0 : _M_tree_ptr->_M_size);
1825        }
1826
1827        size_type length() const {
1828            return size();
1829        }
1830
1831        size_type max_size() const {
1832            return _S_min_len[_RopeRep::_S_max_rope_depth-1] - 1;
1833            //  Guarantees that the result can be sufficirntly
1834            //  balanced.  Longer ropes will probably still work,
1835            //  but it's harder to make guarantees.
1836        }
1837
1838#     ifdef __STL_CLASS_PARTIAL_SPECIALIZATION
1839        typedef reverse_iterator<const_iterator> const_reverse_iterator;
1840#     else /* __STL_CLASS_PARTIAL_SPECIALIZATION */
1841        typedef reverse_iterator<const_iterator, value_type, const_reference,
1842                                 difference_type>  const_reverse_iterator;
1843#     endif /* __STL_CLASS_PARTIAL_SPECIALIZATION */
1844
1845        const_reverse_iterator rbegin() const {
1846            return const_reverse_iterator(end());
1847        }
1848
1849        const_reverse_iterator const_rbegin() const {
1850            return const_reverse_iterator(end());
1851        }
1852
1853        const_reverse_iterator rend() const {
1854            return const_reverse_iterator(begin());
1855        }
1856
1857        const_reverse_iterator const_rend() const {
1858            return const_reverse_iterator(begin());
1859        }
1860
1861        friend rope<_CharT,_Alloc>
1862        operator+ __STL_NULL_TMPL_ARGS (const rope<_CharT,_Alloc>& __left,
1863                                        const rope<_CharT,_Alloc>& __right);
1864
1865        friend rope<_CharT,_Alloc>
1866        operator+ __STL_NULL_TMPL_ARGS (const rope<_CharT,_Alloc>& __left,
1867                                        const _CharT* __right);
1868
1869        friend rope<_CharT,_Alloc>
1870        operator+ __STL_NULL_TMPL_ARGS (const rope<_CharT,_Alloc>& __left,
1871                                        _CharT __right);
1872
1873        // The symmetric cases are intentionally omitted, since they're presumed
1874        // to be less common, and we don't handle them as well.
1875
1876        // The following should really be templatized.
1877        // The first argument should be an input iterator or
1878        // forward iterator with value_type _CharT.
1879        rope& append(const _CharT* __iter, size_t __n) {
1880            _RopeRep* __result =
1881              _S_destr_concat_char_iter(_M_tree_ptr, __iter, __n);
1882            _S_unref(_M_tree_ptr);
1883            _M_tree_ptr = __result;
1884            return *this;
1885        }
1886
1887        rope& append(const _CharT* __c_string) {
1888            size_t __len = _S_char_ptr_len(__c_string);
1889            append(__c_string, __len);
1890            return(*this);
1891        }
1892
1893        rope& append(const _CharT* __s, const _CharT* __e) {
1894            _RopeRep* __result =
1895                _S_destr_concat_char_iter(_M_tree_ptr, __s, __e - __s);
1896            _S_unref(_M_tree_ptr);
1897            _M_tree_ptr = __result;
1898            return *this;
1899        }
1900
1901        rope& append(const_iterator __s, const_iterator __e) {
1902            __stl_assert(__s._M_root == __e._M_root);
1903#           ifdef __STL_USE_STD_ALLOCATORS
1904                __stl_assert(get_allocator() == __s._M_root->get_allocator());
1905#           endif
1906            _Self_destruct_ptr __appendee(_S_substring(
1907              __s._M_root, __s._M_current_pos, __e._M_current_pos));
1908            _RopeRep* __result =
1909              _S_concat(_M_tree_ptr, (_RopeRep*)__appendee);
1910            _S_unref(_M_tree_ptr);
1911            _M_tree_ptr = __result;
1912            return *this;
1913        }
1914
1915        rope& append(_CharT __c) {
1916            _RopeRep* __result =
1917              _S_destr_concat_char_iter(_M_tree_ptr, &__c, 1);
1918            _S_unref(_M_tree_ptr);
1919            _M_tree_ptr = __result;
1920            return *this;
1921        }
1922
1923        rope& append() { return append(_CharT()); }  // XXX why?
1924
1925        rope& append(const rope& __y) {
1926#           ifdef __STL_USE_STD_ALLOCATORS
1927              __stl_assert(__y.get_allocator() == get_allocator());
1928#           endif
1929            _RopeRep* __result = _S_concat(_M_tree_ptr, __y._M_tree_ptr);
1930            _S_unref(_M_tree_ptr);
1931            _M_tree_ptr = __result;
1932            return *this;
1933        }
1934
1935        rope& append(size_t __n, _CharT __c) {
1936            rope<_CharT,_Alloc> __last(__n, __c);
1937            return append(__last);
1938        }
1939
1940        void swap(rope& __b) {
1941#           ifdef __STL_USE_STD_ALLOCATORS
1942                __stl_assert(get_allocator() == __b.get_allocator());
1943#           endif
1944            _RopeRep* __tmp = _M_tree_ptr;
1945            _M_tree_ptr = __b._M_tree_ptr;
1946            __b._M_tree_ptr = __tmp;
1947        }
1948
1949
1950    protected:
1951        // Result is included in refcount.
1952        static _RopeRep* replace(_RopeRep* __old, size_t __pos1,
1953                                  size_t __pos2, _RopeRep* __r) {
1954            if (0 == __old) { _S_ref(__r); return __r; }
1955            _Self_destruct_ptr __left(
1956              _S_substring(__old, 0, __pos1));
1957            _Self_destruct_ptr __right(
1958              _S_substring(__old, __pos2, __old->_M_size));
1959            _RopeRep* __result;
1960
1961#           ifdef __STL_USE_STD_ALLOCATORS
1962                __stl_assert(__old->get_allocator() == __r->get_allocator());
1963#           endif
1964            if (0 == __r) {
1965                __result = _S_concat(__left, __right);
1966            } else {
1967                _Self_destruct_ptr __left_result(_S_concat(__left, __r));
1968                __result = _S_concat(__left_result, __right);
1969            }
1970            return __result;
1971        }
1972
1973    public:
1974        void insert(size_t __p, const rope& __r) {
1975            _RopeRep* __result =
1976              replace(_M_tree_ptr, __p, __p, __r._M_tree_ptr);
1977#           ifdef __STL_USE_STD_ALLOCATORS
1978                __stl_assert(get_allocator() == __r.get_allocator());
1979#           endif
1980            _S_unref(_M_tree_ptr);
1981            _M_tree_ptr = __result;
1982        }
1983
1984        void insert(size_t __p, size_t __n, _CharT __c) {
1985            rope<_CharT,_Alloc> __r(__n,__c);
1986            insert(__p, __r);
1987        }
1988
1989        void insert(size_t __p, const _CharT* __i, size_t __n) {
1990            _Self_destruct_ptr __left(_S_substring(_M_tree_ptr, 0, __p));
1991            _Self_destruct_ptr __right(_S_substring(_M_tree_ptr, __p, size()));
1992            _Self_destruct_ptr __left_result(
1993              _S_concat_char_iter(__left, __i, __n));
1994            _RopeRep* __result = _S_concat(__left_result, __right);
1995            _S_unref(_M_tree_ptr);
1996            _M_tree_ptr = __result;
1997        }
1998
1999        void insert(size_t __p, const _CharT* __c_string) {
2000            insert(__p, __c_string, _S_char_ptr_len(__c_string));
2001        }
2002
2003        void insert(size_t __p, _CharT __c) {
2004            insert(__p, &__c, 1);
2005        }
2006
2007        void insert(size_t __p) {
2008            _CharT __c = _CharT();
2009            insert(__p, &__c, 1);
2010        }
2011
2012        void insert(size_t __p, const _CharT* __i, const _CharT* __j) {
2013            rope __r(__i, __j);
2014            insert(__p, __r);
2015        }
2016
2017        void insert(size_t __p, const const_iterator& __i,
2018                              const const_iterator& __j) {
2019            rope __r(__i, __j);
2020            insert(__p, __r);
2021        }
2022
2023        void insert(size_t __p, const iterator& __i,
2024                              const iterator& __j) {
2025            rope __r(__i, __j);
2026            insert(__p, __r);
2027        }
2028
2029        // (position, length) versions of replace operations:
2030
2031        void replace(size_t __p, size_t __n, const rope& __r) {
2032            _RopeRep* __result =
2033              replace(_M_tree_ptr, __p, __p + __n, __r._M_tree_ptr);
2034            _S_unref(_M_tree_ptr);
2035            _M_tree_ptr = __result;
2036        }
2037
2038        void replace(size_t __p, size_t __n,
2039                     const _CharT* __i, size_t __i_len) {
2040            rope __r(__i, __i_len);
2041            replace(__p, __n, __r);
2042        }
2043
2044        void replace(size_t __p, size_t __n, _CharT __c) {
2045            rope __r(__c);
2046            replace(__p, __n, __r);
2047        }
2048
2049        void replace(size_t __p, size_t __n, const _CharT* __c_string) {
2050            rope __r(__c_string);
2051            replace(__p, __n, __r);
2052        }
2053
2054        void replace(size_t __p, size_t __n,
2055                     const _CharT* __i, const _CharT* __j) {
2056            rope __r(__i, __j);
2057            replace(__p, __n, __r);
2058        }
2059
2060        void replace(size_t __p, size_t __n,
2061                     const const_iterator& __i, const const_iterator& __j) {
2062            rope __r(__i, __j);
2063            replace(__p, __n, __r);
2064        }
2065
2066        void replace(size_t __p, size_t __n,
2067                     const iterator& __i, const iterator& __j) {
2068            rope __r(__i, __j);
2069            replace(__p, __n, __r);
2070        }
2071
2072        // Single character variants:
2073        void replace(size_t __p, _CharT __c) {
2074            iterator __i(this, __p);
2075            *__i = __c;
2076        }
2077
2078        void replace(size_t __p, const rope& __r) {
2079            replace(__p, 1, __r);
2080        }
2081
2082        void replace(size_t __p, const _CharT* __i, size_t __i_len) {
2083            replace(__p, 1, __i, __i_len);
2084        }
2085
2086        void replace(size_t __p, const _CharT* __c_string) {
2087            replace(__p, 1, __c_string);
2088        }
2089
2090        void replace(size_t __p, const _CharT* __i, const _CharT* __j) {
2091            replace(__p, 1, __i, __j);
2092        }
2093
2094        void replace(size_t __p, const const_iterator& __i,
2095                               const const_iterator& __j) {
2096            replace(__p, 1, __i, __j);
2097        }
2098
2099        void replace(size_t __p, const iterator& __i,
2100                               const iterator& __j) {
2101            replace(__p, 1, __i, __j);
2102        }
2103
2104        // Erase, (position, size) variant.
2105        void erase(size_t __p, size_t __n) {
2106            _RopeRep* __result = replace(_M_tree_ptr, __p, __p + __n, 0);
2107            _S_unref(_M_tree_ptr);
2108            _M_tree_ptr = __result;
2109        }
2110
2111        // Erase, single character
2112        void erase(size_t __p) {
2113            erase(__p, __p + 1);
2114        }
2115
2116        // Insert, iterator variants.
2117        iterator insert(const iterator& __p, const rope& __r)
2118                { insert(__p.index(), __r); return __p; }
2119        iterator insert(const iterator& __p, size_t __n, _CharT __c)
2120                { insert(__p.index(), __n, __c); return __p; }
2121        iterator insert(const iterator& __p, _CharT __c)
2122                { insert(__p.index(), __c); return __p; }
2123        iterator insert(const iterator& __p )
2124                { insert(__p.index()); return __p; }
2125        iterator insert(const iterator& __p, const _CharT* c_string)
2126                { insert(__p.index(), c_string); return __p; }
2127        iterator insert(const iterator& __p, const _CharT* __i, size_t __n)
2128                { insert(__p.index(), __i, __n); return __p; }
2129        iterator insert(const iterator& __p, const _CharT* __i,
2130                        const _CharT* __j)
2131                { insert(__p.index(), __i, __j);  return __p; }
2132        iterator insert(const iterator& __p,
2133                        const const_iterator& __i, const const_iterator& __j)
2134                { insert(__p.index(), __i, __j); return __p; }
2135        iterator insert(const iterator& __p,
2136                        const iterator& __i, const iterator& __j)
2137                { insert(__p.index(), __i, __j); return __p; }
2138
2139        // Replace, range variants.
2140        void replace(const iterator& __p, const iterator& __q,
2141                     const rope& __r)
2142                { replace(__p.index(), __q.index() - __p.index(), __r); }
2143        void replace(const iterator& __p, const iterator& __q, _CharT __c)
2144                { replace(__p.index(), __q.index() - __p.index(), __c); }
2145        void replace(const iterator& __p, const iterator& __q,
2146                     const _CharT* __c_string)
2147                { replace(__p.index(), __q.index() - __p.index(), __c_string); }
2148        void replace(const iterator& __p, const iterator& __q,
2149                     const _CharT* __i, size_t __n)
2150                { replace(__p.index(), __q.index() - __p.index(), __i, __n); }
2151        void replace(const iterator& __p, const iterator& __q,
2152                     const _CharT* __i, const _CharT* __j)
2153                { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
2154        void replace(const iterator& __p, const iterator& __q,
2155                     const const_iterator& __i, const const_iterator& __j)
2156                { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
2157        void replace(const iterator& __p, const iterator& __q,
2158                     const iterator& __i, const iterator& __j)
2159                { replace(__p.index(), __q.index() - __p.index(), __i, __j); }
2160
2161        // Replace, iterator variants.
2162        void replace(const iterator& __p, const rope& __r)
2163                { replace(__p.index(), __r); }
2164        void replace(const iterator& __p, _CharT __c)
2165                { replace(__p.index(), __c); }
2166        void replace(const iterator& __p, const _CharT* __c_string)
2167                { replace(__p.index(), __c_string); }
2168        void replace(const iterator& __p, const _CharT* __i, size_t __n)
2169                { replace(__p.index(), __i, __n); }
2170        void replace(const iterator& __p, const _CharT* __i, const _CharT* __j)
2171                { replace(__p.index(), __i, __j); }
2172        void replace(const iterator& __p, const_iterator __i,
2173                     const_iterator __j)
2174                { replace(__p.index(), __i, __j); }
2175        void replace(const iterator& __p, iterator __i, iterator __j)
2176                { replace(__p.index(), __i, __j); }
2177
2178        // Iterator and range variants of erase
2179        iterator erase(const iterator& __p, const iterator& __q) {
2180            size_t __p_index = __p.index();
2181            erase(__p_index, __q.index() - __p_index);
2182            return iterator(this, __p_index);
2183        }
2184        iterator erase(const iterator& __p) {
2185            size_t __p_index = __p.index();
2186            erase(__p_index, 1);
2187            return iterator(this, __p_index);
2188        }
2189
2190        rope substr(size_t __start, size_t __len = 1) const {
2191            return rope<_CharT,_Alloc>(
2192                        _S_substring(_M_tree_ptr, __start, __start + __len));
2193        }
2194
2195        rope substr(iterator __start, iterator __end) const {
2196            return rope<_CharT,_Alloc>(
2197                _S_substring(_M_tree_ptr, __start.index(), __end.index()));
2198        }
2199
2200        rope substr(iterator __start) const {
2201            size_t __pos = __start.index();
2202            return rope<_CharT,_Alloc>(
2203                        _S_substring(_M_tree_ptr, __pos, __pos + 1));
2204        }
2205
2206        rope substr(const_iterator __start, const_iterator __end) const {
2207            // This might eventually take advantage of the cache in the
2208            // iterator.
2209            return rope<_CharT,_Alloc>(
2210              _S_substring(_M_tree_ptr, __start.index(), __end.index()));
2211        }
2212
2213        rope<_CharT,_Alloc> substr(const_iterator __start) {
2214            size_t __pos = __start.index();
2215            return rope<_CharT,_Alloc>(
2216              _S_substring(_M_tree_ptr, __pos, __pos + 1));
2217        }
2218
2219        static const size_type npos;
2220
2221        size_type find(_CharT __c, size_type __pos = 0) const;
2222        size_type find(_CharT* __s, size_type __pos = 0) const {
2223            size_type __result_pos;
2224            const_iterator __result = search(const_begin() + __pos, const_end(),
2225                                           __s, __s + _S_char_ptr_len(__s));
2226            __result_pos = __result.index();
2227#           ifndef __STL_OLD_ROPE_SEMANTICS
2228                if (__result_pos == size()) __result_pos = npos;
2229#           endif
2230            return __result_pos;
2231        }
2232
2233        iterator mutable_begin() {
2234            return(iterator(this, 0));
2235        }
2236
2237        iterator mutable_end() {
2238            return(iterator(this, size()));
2239        }
2240
2241#     ifdef __STL_CLASS_PARTIAL_SPECIALIZATION
2242        typedef reverse_iterator<iterator> reverse_iterator;
2243#     else /* __STL_CLASS_PARTIAL_SPECIALIZATION */
2244        typedef reverse_iterator<iterator, value_type, reference,
2245                                 difference_type>  reverse_iterator;
2246#     endif /* __STL_CLASS_PARTIAL_SPECIALIZATION */
2247
2248        reverse_iterator mutable_rbegin() {
2249            return reverse_iterator(mutable_end());
2250        }
2251
2252        reverse_iterator mutable_rend() {
2253            return reverse_iterator(mutable_begin());
2254        }
2255
2256        reference mutable_reference_at(size_type __pos) {
2257            return reference(this, __pos);
2258        }
2259
2260#       ifdef __STD_STUFF
2261            reference operator[] (size_type __pos) {
2262                return _char_ref_proxy(this, __pos);
2263            }
2264
2265            reference at(size_type __pos) {
2266                // if (__pos >= size()) throw out_of_range;  // XXX
2267                return (*this)[__pos];
2268            }
2269
2270            void resize(size_type __n, _CharT __c) {}
2271            void resize(size_type __n) {}
2272            void reserve(size_type __res_arg = 0) {}
2273            size_type capacity() const {
2274                return max_size();
2275            }
2276
2277          // Stuff below this line is dangerous because it's error prone.
2278          // I would really like to get rid of it.
2279            // copy function with funny arg ordering.
2280              size_type copy(_CharT* __buffer, size_type __n,
2281                             size_type __pos = 0) const {
2282                return copy(__pos, __n, __buffer);
2283              }
2284
2285            iterator end() { return mutable_end(); }
2286
2287            iterator begin() { return mutable_begin(); }
2288
2289            reverse_iterator rend() { return mutable_rend(); }
2290
2291            reverse_iterator rbegin() { return mutable_rbegin(); }
2292
2293#       else
2294
2295            const_iterator end() { return const_end(); }
2296
2297            const_iterator begin() { return const_begin(); }
2298
2299            const_reverse_iterator rend() { return const_rend(); }
2300
2301            const_reverse_iterator rbegin() { return const_rbegin(); }
2302
2303#       endif
2304
2305};
2306
2307template <class _CharT, class _Alloc>
2308const rope<_CharT, _Alloc>::size_type rope<_CharT, _Alloc>::npos =
2309			(size_type)(-1);
2310
2311template <class _CharT, class _Alloc>
2312inline bool operator== (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2313                        const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2314  return (__x._M_current_pos == __y._M_current_pos &&
2315          __x._M_root == __y._M_root);
2316}
2317
2318template <class _CharT, class _Alloc>
2319inline bool operator< (const _Rope_const_iterator<_CharT,_Alloc>& __x,
2320                       const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2321  return (__x._M_current_pos < __y._M_current_pos);
2322}
2323
2324template <class _CharT, class _Alloc>
2325inline ptrdiff_t operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x,
2326                           const _Rope_const_iterator<_CharT,_Alloc>& __y) {
2327  return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos;
2328}
2329
2330template <class _CharT, class _Alloc>
2331inline _Rope_const_iterator<_CharT,_Alloc>
2332operator-(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n) {
2333  return _Rope_const_iterator<_CharT,_Alloc>(
2334            __x._M_root, __x._M_current_pos - __n);
2335}
2336
2337template <class _CharT, class _Alloc>
2338inline _Rope_const_iterator<_CharT,_Alloc>
2339operator+(const _Rope_const_iterator<_CharT,_Alloc>& __x, ptrdiff_t __n) {
2340  return _Rope_const_iterator<_CharT,_Alloc>(
2341           __x._M_root, __x._M_current_pos + __n);
2342}
2343
2344template <class _CharT, class _Alloc>
2345inline _Rope_const_iterator<_CharT,_Alloc>
2346operator+(ptrdiff_t __n, const _Rope_const_iterator<_CharT,_Alloc>& __x) {
2347  return _Rope_const_iterator<_CharT,_Alloc>(
2348           __x._M_root, __x._M_current_pos + __n);
2349}
2350
2351template <class _CharT, class _Alloc>
2352inline bool operator== (const _Rope_iterator<_CharT,_Alloc>& __x,
2353                        const _Rope_iterator<_CharT,_Alloc>& __y) {
2354  return (__x._M_current_pos == __y._M_current_pos &&
2355          __x._M_root_rope == __y._M_root_rope);
2356}
2357
2358template <class _CharT, class _Alloc>
2359inline bool operator< (const _Rope_iterator<_CharT,_Alloc>& __x,
2360                       const _Rope_iterator<_CharT,_Alloc>& __y) {
2361  return (__x._M_current_pos < __y._M_current_pos);
2362}
2363
2364template <class _CharT, class _Alloc>
2365inline ptrdiff_t operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
2366                           const _Rope_iterator<_CharT,_Alloc>& __y) {
2367  return (ptrdiff_t)__x._M_current_pos - (ptrdiff_t)__y._M_current_pos;
2368}
2369
2370template <class _CharT, class _Alloc>
2371inline _Rope_iterator<_CharT,_Alloc>
2372operator-(const _Rope_iterator<_CharT,_Alloc>& __x,
2373          ptrdiff_t __n) {
2374  return _Rope_iterator<_CharT,_Alloc>(
2375    __x._M_root_rope, __x._M_current_pos - __n);
2376}
2377
2378template <class _CharT, class _Alloc>
2379inline _Rope_iterator<_CharT,_Alloc>
2380operator+(const _Rope_iterator<_CharT,_Alloc>& __x,
2381          ptrdiff_t __n) {
2382  return _Rope_iterator<_CharT,_Alloc>(
2383    __x._M_root_rope, __x._M_current_pos + __n);
2384}
2385
2386template <class _CharT, class _Alloc>
2387inline _Rope_iterator<_CharT,_Alloc>
2388operator+(ptrdiff_t __n, const _Rope_iterator<_CharT,_Alloc>& __x) {
2389  return _Rope_iterator<_CharT,_Alloc>(
2390    __x._M_root_rope, __x._M_current_pos + __n);
2391}
2392
2393template <class _CharT, class _Alloc>
2394inline
2395rope<_CharT,_Alloc>
2396operator+ (const rope<_CharT,_Alloc>& __left,
2397           const rope<_CharT,_Alloc>& __right)
2398{
2399#   ifdef __STL_USE_STD_ALLOCATORS
2400        __stl_assert(__left.get_allocator() == __right.get_allocator());
2401#   endif
2402    return rope<_CharT,_Alloc>(
2403      rope<_CharT,_Alloc>::_S_concat(__left._M_tree_ptr, __right._M_tree_ptr));
2404    // Inlining this should make it possible to keep __left and
2405    // __right in registers.
2406}
2407
2408template <class _CharT, class _Alloc>
2409inline
2410rope<_CharT,_Alloc>&
2411operator+= (rope<_CharT,_Alloc>& __left,
2412      const rope<_CharT,_Alloc>& __right)
2413{
2414    __left.append(__right);
2415    return __left;
2416}
2417
2418template <class _CharT, class _Alloc>
2419inline
2420rope<_CharT,_Alloc>
2421operator+ (const rope<_CharT,_Alloc>& __left,
2422           const _CharT* __right) {
2423    size_t __rlen = rope<_CharT,_Alloc>::_S_char_ptr_len(__right);
2424    return rope<_CharT,_Alloc>(
2425      rope<_CharT,_Alloc>::_S_concat_char_iter(
2426        __left._M_tree_ptr, __right, __rlen));
2427}
2428
2429template <class _CharT, class _Alloc>
2430inline
2431rope<_CharT,_Alloc>&
2432operator+= (rope<_CharT,_Alloc>& __left,
2433            const _CharT* __right) {
2434    __left.append(__right);
2435    return __left;
2436}
2437
2438template <class _CharT, class _Alloc>
2439inline
2440rope<_CharT,_Alloc>
2441operator+ (const rope<_CharT,_Alloc>& __left, _CharT __right) {
2442    return rope<_CharT,_Alloc>(
2443      rope<_CharT,_Alloc>::_S_concat_char_iter(
2444        __left._M_tree_ptr, &__right, 1));
2445}
2446
2447template <class _CharT, class _Alloc>
2448inline
2449rope<_CharT,_Alloc>&
2450operator+= (rope<_CharT,_Alloc>& __left, _CharT __right) {
2451    __left.append(__right);
2452    return __left;
2453}
2454
2455template <class _CharT, class _Alloc>
2456bool
2457operator< (const rope<_CharT,_Alloc>& __left,
2458           const rope<_CharT,_Alloc>& __right) {
2459    return __left.compare(__right) < 0;
2460}
2461
2462template <class _CharT, class _Alloc>
2463bool
2464operator== (const rope<_CharT,_Alloc>& __left,
2465            const rope<_CharT,_Alloc>& __right) {
2466    return __left.compare(__right) == 0;
2467}
2468
2469template <class _CharT, class _Alloc>
2470inline bool operator== (const _Rope_char_ptr_proxy<_CharT,_Alloc>& __x,
2471                        const _Rope_char_ptr_proxy<_CharT,_Alloc>& __y) {
2472        return (__x._M_pos == __y._M_pos && __x._M_root == __y._M_root);
2473}
2474
2475template<class _CharT, class _Alloc>
2476ostream& operator<< (ostream& __o, const rope<_CharT,_Alloc>& __r);
2477
2478typedef rope<char> crope;
2479typedef rope<wchar_t> wrope;
2480
2481inline crope::reference __mutable_reference_at(crope& __c, size_t __i)
2482{
2483    return __c.mutable_reference_at(__i);
2484}
2485
2486inline wrope::reference __mutable_reference_at(wrope& __c, size_t __i)
2487{
2488    return __c.mutable_reference_at(__i);
2489}
2490
2491#ifdef __STL_FUNCTION_TMPL_PARTIAL_ORDER
2492
2493template <class _CharT, class _Alloc>
2494inline void swap(rope<_CharT,_Alloc>& __x, rope<_CharT,_Alloc>& __y) {
2495  __x.swap(__y);
2496}
2497
2498#else
2499
2500inline void swap(crope __x, crope __y) { __x.swap(__y); }
2501inline void swap(wrope __x, wrope __y) { __x.swap(__y); }
2502
2503#endif /* __STL_FUNCTION_TMPL_PARTIAL_ORDER */
2504
2505// Hash functions should probably be revisited later:
2506__STL_TEMPLATE_NULL struct hash<crope>
2507{
2508  size_t operator()(const crope& __str) const
2509  {
2510    size_t __size = __str.size();
2511
2512    if (0 == __size) return 0;
2513    return 13*__str[0] + 5*__str[__size - 1] + __size;
2514  }
2515};
2516
2517
2518__STL_TEMPLATE_NULL struct hash<wrope>
2519{
2520  size_t operator()(const wrope& __str) const
2521  {
2522    size_t __size = __str.size();
2523
2524    if (0 == __size) return 0;
2525    return 13*__str[0] + 5*__str[__size - 1] + __size;
2526  }
2527};
2528
2529#if defined(__sgi) && !defined(__GNUC__) && (_MIPS_SIM != _MIPS_SIM_ABI32)
2530#pragma reset woff 1174
2531#endif
2532
2533__STL_END_NAMESPACE
2534
2535# include <ropeimpl.h>
2536
2537# endif /* __SGI_STL_INTERNAL_ROPE_H */
2538
2539// Local Variables:
2540// mode:C++
2541// End:
2542